snowpark-connect 0.20.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of snowpark-connect might be problematic. Click here for more details.
- snowflake/snowpark_connect/__init__.py +23 -0
- snowflake/snowpark_connect/analyze_plan/__init__.py +3 -0
- snowflake/snowpark_connect/analyze_plan/map_tree_string.py +38 -0
- snowflake/snowpark_connect/column_name_handler.py +735 -0
- snowflake/snowpark_connect/config.py +576 -0
- snowflake/snowpark_connect/constants.py +47 -0
- snowflake/snowpark_connect/control_server.py +52 -0
- snowflake/snowpark_connect/dataframe_name_handler.py +54 -0
- snowflake/snowpark_connect/date_time_format_mapping.py +399 -0
- snowflake/snowpark_connect/empty_dataframe.py +18 -0
- snowflake/snowpark_connect/error/__init__.py +11 -0
- snowflake/snowpark_connect/error/error_mapping.py +6174 -0
- snowflake/snowpark_connect/error/error_utils.py +321 -0
- snowflake/snowpark_connect/error/exceptions.py +24 -0
- snowflake/snowpark_connect/execute_plan/__init__.py +3 -0
- snowflake/snowpark_connect/execute_plan/map_execution_command.py +204 -0
- snowflake/snowpark_connect/execute_plan/map_execution_root.py +173 -0
- snowflake/snowpark_connect/execute_plan/utils.py +183 -0
- snowflake/snowpark_connect/expression/__init__.py +3 -0
- snowflake/snowpark_connect/expression/literal.py +90 -0
- snowflake/snowpark_connect/expression/map_cast.py +343 -0
- snowflake/snowpark_connect/expression/map_expression.py +293 -0
- snowflake/snowpark_connect/expression/map_extension.py +104 -0
- snowflake/snowpark_connect/expression/map_sql_expression.py +633 -0
- snowflake/snowpark_connect/expression/map_udf.py +142 -0
- snowflake/snowpark_connect/expression/map_unresolved_attribute.py +241 -0
- snowflake/snowpark_connect/expression/map_unresolved_extract_value.py +85 -0
- snowflake/snowpark_connect/expression/map_unresolved_function.py +9450 -0
- snowflake/snowpark_connect/expression/map_unresolved_star.py +218 -0
- snowflake/snowpark_connect/expression/map_update_fields.py +164 -0
- snowflake/snowpark_connect/expression/map_window_function.py +258 -0
- snowflake/snowpark_connect/expression/typer.py +125 -0
- snowflake/snowpark_connect/includes/__init__.py +0 -0
- snowflake/snowpark_connect/includes/jars/antlr4-runtime-4.9.3.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-cli-1.5.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-codec-1.16.1.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-collections-3.2.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-collections4-4.4.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-compiler-3.1.9.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-compress-1.26.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-crypto-1.1.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-dbcp-1.4.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-io-2.16.1.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-lang-2.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-lang3-3.12.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-logging-1.1.3.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-math3-3.6.1.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-pool-1.5.4.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-text-1.10.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/hadoop-client-api-3.3.4.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-annotations-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-core-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-core-asl-1.9.13.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-databind-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-dataformat-yaml-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-datatype-jsr310-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-mapper-asl-1.9.13.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-module-scala_2.12-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/json4s-ast_2.12-3.7.0-M11.jar +0 -0
- snowflake/snowpark_connect/includes/jars/json4s-core_2.12-3.7.0-M11.jar +0 -0
- snowflake/snowpark_connect/includes/jars/json4s-jackson_2.12-3.7.0-M11.jar +0 -0
- snowflake/snowpark_connect/includes/jars/json4s-scalap_2.12-3.7.0-M11.jar +0 -0
- snowflake/snowpark_connect/includes/jars/kryo-shaded-4.0.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/log4j-1.2-api-2.20.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/log4j-api-2.20.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/log4j-core-2.20.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/log4j-slf4j2-impl-2.20.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/paranamer-2.8.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-collection-compat_2.12-2.7.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-compiler-2.12.18.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-library-2.12.18.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-parser-combinators_2.12-2.3.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-reflect-2.12.18.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-xml_2.12-2.1.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/slf4j-api-2.0.7.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-catalyst_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-common-utils_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-core_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-graphx_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-hive-thriftserver_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-hive_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-kubernetes_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-kvstore_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-launcher_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-mesos_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-mllib-local_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-mllib_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-network-common_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-network-shuffle_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-repl_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-sketch_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-sql-api_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-sql_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-streaming_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-tags_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-unsafe_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-yarn_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/python/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/__init__.py +173 -0
- snowflake/snowpark_connect/includes/python/pyspark/_globals.py +71 -0
- snowflake/snowpark_connect/includes/python/pyspark/_typing.pyi +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/accumulators.py +341 -0
- snowflake/snowpark_connect/includes/python/pyspark/broadcast.py +383 -0
- snowflake/snowpark_connect/includes/python/pyspark/cloudpickle/__init__.py +8 -0
- snowflake/snowpark_connect/includes/python/pyspark/cloudpickle/cloudpickle.py +948 -0
- snowflake/snowpark_connect/includes/python/pyspark/cloudpickle/cloudpickle_fast.py +844 -0
- snowflake/snowpark_connect/includes/python/pyspark/cloudpickle/compat.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/conf.py +276 -0
- snowflake/snowpark_connect/includes/python/pyspark/context.py +2601 -0
- snowflake/snowpark_connect/includes/python/pyspark/daemon.py +218 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/__init__.py +70 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/error_classes.py +889 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/exceptions/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/exceptions/base.py +228 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/exceptions/captured.py +307 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/exceptions/connect.py +190 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/tests/test_errors.py +60 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/utils.py +116 -0
- snowflake/snowpark_connect/includes/python/pyspark/files.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/find_spark_home.py +95 -0
- snowflake/snowpark_connect/includes/python/pyspark/install.py +203 -0
- snowflake/snowpark_connect/includes/python/pyspark/instrumentation_utils.py +190 -0
- snowflake/snowpark_connect/includes/python/pyspark/java_gateway.py +248 -0
- snowflake/snowpark_connect/includes/python/pyspark/join.py +118 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/__init__.py +71 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/_typing.pyi +84 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/base.py +414 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/classification.py +4332 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/clustering.py +2188 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/common.py +146 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/__init__.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/base.py +346 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/classification.py +382 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/evaluation.py +291 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/feature.py +258 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/functions.py +77 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/io_utils.py +335 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/pipeline.py +262 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/summarizer.py +120 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/tuning.py +579 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/util.py +173 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/deepspeed/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/deepspeed/deepspeed_distributor.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/deepspeed/tests/test_deepspeed_distributor.py +306 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/dl_util.py +150 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/evaluation.py +1166 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/feature.py +7474 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/fpm.py +543 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/functions.py +842 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/image.py +271 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/linalg/__init__.py +1382 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/model_cache.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/param/__init__.py +602 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/param/_shared_params_code_gen.py +368 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/param/shared.py +878 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/pipeline.py +451 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/recommendation.py +748 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/regression.py +3335 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/stat.py +523 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_classification.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_evaluation.py +50 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_feature.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_function.py +114 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_pipeline.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_summarizer.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_tuning.py +46 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_classification.py +238 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_evaluation.py +194 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_feature.py +156 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_pipeline.py +184 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_summarizer.py +78 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_tuning.py +292 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_parity_torch_data_loader.py +50 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py +152 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_algorithms.py +456 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_base.py +96 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_dl_util.py +186 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_evaluation.py +77 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_feature.py +401 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_functions.py +528 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_image.py +82 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_linalg.py +409 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_model_cache.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_param.py +441 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_persistence.py +546 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_pipeline.py +71 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_stat.py +52 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_training_summary.py +494 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_util.py +85 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_wrapper.py +138 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_basic.py +151 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_nested.py +97 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_pipeline.py +143 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tuning.py +551 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_basic.py +137 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_nested.py +96 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_pipeline.py +142 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/data.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/distributor.py +1133 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/log_communication.py +198 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_data_loader.py +137 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_distributor.py +561 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_log_communication.py +172 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/torch_run_process_wrapper.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tree.py +434 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tuning.py +1741 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/util.py +749 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/wrapper.py +465 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/__init__.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/_typing.pyi +33 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/classification.py +989 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/clustering.py +1318 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/common.py +174 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/evaluation.py +691 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/feature.py +1085 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/fpm.py +233 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/linalg/__init__.py +1653 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/linalg/distributed.py +1662 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/random.py +698 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/recommendation.py +389 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/regression.py +1067 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/KernelDensity.py +59 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/__init__.py +34 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/_statistics.py +409 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/distribution.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/test.py +86 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_algorithms.py +353 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_feature.py +192 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_linalg.py +680 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_stat.py +206 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_streaming_algorithms.py +471 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_util.py +108 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tree.py +888 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/util.py +659 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/__init__.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/_typing.py +52 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/accessors.py +989 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/base.py +1804 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/categorical.py +822 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/config.py +539 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/correlation.py +262 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/base.py +519 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/binary_ops.py +98 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/boolean_ops.py +426 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/categorical_ops.py +141 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/complex_ops.py +145 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/date_ops.py +127 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/datetime_ops.py +171 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/null_ops.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/num_ops.py +588 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/string_ops.py +154 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/timedelta_ops.py +101 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/udt_ops.py +29 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/datetimes.py +891 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/exceptions.py +150 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/extensions.py +388 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/frame.py +13738 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/generic.py +3560 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/groupby.py +4448 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/base.py +2783 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/category.py +773 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/datetimes.py +843 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/multi.py +1323 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/numeric.py +210 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/timedelta.py +197 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexing.py +1862 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/internal.py +1680 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/__init__.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/common.py +76 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/frame.py +63 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/general_functions.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/groupby.py +93 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/indexes.py +184 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/resample.py +101 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/scalars.py +29 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/series.py +69 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/window.py +168 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/mlflow.py +238 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/namespace.py +3807 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/numpy_compat.py +260 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/__init__.py +17 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/core.py +1213 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/matplotlib.py +928 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/plotly.py +261 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/resample.py +816 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/series.py +7440 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/sql_formatter.py +308 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/sql_processor.py +394 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/strings.py +2371 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/supported_api_gen.py +378 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_any_all.py +177 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_apply_func.py +575 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_binary_ops.py +235 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_combine.py +653 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_compute.py +463 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_corrwith.py +86 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_cov.py +151 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_cumulative.py +139 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_describe.py +458 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_eval.py +86 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_melt.py +202 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_missing_data.py +520 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_pivot.py +361 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_any_all.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_apply_func.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_binary_ops.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_combine.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_compute.py +60 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_corrwith.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_cov.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_cumulative.py +90 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_describe.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_eval.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_melt.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_missing_data.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_pivot.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_base.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_binary_ops.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_boolean_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_categorical_ops.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_complex_ops.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_date_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_datetime_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_null_ops.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_arithmetic.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_reverse.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_string_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_timedelta_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_udt_ops.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/testing_utils.py +226 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_align.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_basic_slow.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_cov_corrwith.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_dot_frame.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_dot_series.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_index.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_series.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_setitem_frame.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_setitem_series.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_attrs.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_constructor.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_conversion.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_reindexing.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_reshaping.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_spark.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_take.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_time_series.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_truncate.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_aggregate.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_apply_func.py +41 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_cumulative.py +67 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_describe.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_groupby.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_head_tail.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_index.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_missing_data.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_split_apply.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_stat.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_align.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_base.py +50 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_category.py +73 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_indexing.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_reindex.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_rename.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_reset_index.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_timedelta.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/io/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/io/test_parity_io.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot_matplotlib.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot_plotly.py +49 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot_matplotlib.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot_plotly.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_all_any.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_arg_ops.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_as_of.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_as_type.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_compute.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_conversion.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_cumulative.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_index.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_missing_data.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_series.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_sort.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_stat.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_categorical.py +66 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_config.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_csv.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_dataframe_conversion.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_dataframe_spark_io.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_default_index.py +49 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ewm.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_expanding.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_extension.py +49 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_frame_spark.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_generic_functions.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_indexing.py +49 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_indexops_spark.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_internal.py +41 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_namespace.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_numpy_compat.py +60 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby_expanding.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby_rolling.py +84 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_repr.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_resample.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_reshape.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_rolling.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_scalars.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_conversion.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_datetime.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_string.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_spark_functions.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_sql.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_stats.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_typedef.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_utils.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_window.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_base.py +107 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_binary_ops.py +224 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_boolean_ops.py +825 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_categorical_ops.py +562 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_complex_ops.py +368 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_date_ops.py +257 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_datetime_ops.py +260 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_null_ops.py +178 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_arithmetic.py +184 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_ops.py +497 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_reverse.py +140 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_string_ops.py +354 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_timedelta_ops.py +219 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_udt_ops.py +192 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/testing_utils.py +228 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_align.py +118 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_basic_slow.py +198 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_cov_corrwith.py +181 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_dot_frame.py +103 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_dot_series.py +141 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_index.py +109 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_series.py +136 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_setitem_frame.py +125 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_setitem_series.py +217 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_attrs.py +384 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_constructor.py +598 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_conversion.py +73 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_reindexing.py +869 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_reshaping.py +487 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_spark.py +309 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_take.py +156 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_time_series.py +149 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_truncate.py +163 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_aggregate.py +311 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_apply_func.py +524 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_cumulative.py +419 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_describe.py +144 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_groupby.py +979 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_head_tail.py +234 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_index.py +206 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_missing_data.py +421 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_split_apply.py +187 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_stat.py +397 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_align.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_base.py +2743 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_category.py +484 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_datetime.py +276 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_indexing.py +432 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_reindex.py +310 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_rename.py +257 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_reset_index.py +160 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_timedelta.py +128 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/io/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/io/test_io.py +137 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot.py +170 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot_matplotlib.py +547 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot_plotly.py +285 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot.py +106 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot_matplotlib.py +409 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot_plotly.py +247 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_all_any.py +105 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_arg_ops.py +197 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_as_of.py +137 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_as_type.py +227 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_compute.py +634 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_conversion.py +88 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_cumulative.py +139 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_index.py +475 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_missing_data.py +265 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_series.py +818 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_sort.py +162 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_stat.py +780 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_categorical.py +741 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_config.py +160 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_csv.py +453 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_dataframe_conversion.py +281 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_dataframe_spark_io.py +487 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_default_index.py +109 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ewm.py +434 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_expanding.py +253 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_extension.py +152 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_frame_spark.py +162 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_generic_functions.py +234 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_indexing.py +1339 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_indexops_spark.py +82 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_internal.py +124 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_namespace.py +638 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_numpy_compat.py +200 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames.py +1355 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby.py +655 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby_expanding.py +113 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby_rolling.py +118 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_repr.py +192 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_resample.py +346 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_reshape.py +495 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_rolling.py +263 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_scalars.py +59 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_conversion.py +85 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_datetime.py +364 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_string.py +362 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_spark_functions.py +46 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_sql.py +123 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_stats.py +581 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_typedef.py +447 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_utils.py +301 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_window.py +465 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/typedef/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/typedef/typehints.py +874 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/usage_logging/__init__.py +143 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/usage_logging/usage_logger.py +132 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/utils.py +1063 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/window.py +2702 -0
- snowflake/snowpark_connect/includes/python/pyspark/profiler.py +489 -0
- snowflake/snowpark_connect/includes/python/pyspark/py.typed +1 -0
- snowflake/snowpark_connect/includes/python/pyspark/python/pyspark/shell.py +123 -0
- snowflake/snowpark_connect/includes/python/pyspark/rdd.py +5518 -0
- snowflake/snowpark_connect/includes/python/pyspark/rddsampler.py +115 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/__init__.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/information.py +69 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/profile.py +317 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/requests.py +539 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/tests/test_resources.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/resultiterable.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/serializers.py +681 -0
- snowflake/snowpark_connect/includes/python/pyspark/shell.py +123 -0
- snowflake/snowpark_connect/includes/python/pyspark/shuffle.py +854 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/__init__.py +75 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/_typing.pyi +80 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/avro/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/avro/functions.py +188 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/catalog.py +1270 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/column.py +1431 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/conf.py +99 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/_typing.py +90 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/avro/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/avro/functions.py +107 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/catalog.py +356 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/client/__init__.py +22 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/client/artifact.py +412 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/client/core.py +1689 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/client/reattach.py +340 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/column.py +514 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/conf.py +128 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/conversion.py +490 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/dataframe.py +2172 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/expressions.py +1056 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/functions.py +3937 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/group.py +418 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/plan.py +2289 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/__init__.py +25 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/base_pb2.py +203 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/base_pb2.pyi +2718 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/base_pb2_grpc.py +423 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/catalog_pb2.py +109 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/catalog_pb2.pyi +1130 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/commands_pb2.py +141 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/commands_pb2.pyi +1766 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/common_pb2.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/common_pb2.pyi +123 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/example_plugins_pb2.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/example_plugins_pb2.pyi +112 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/expressions_pb2.py +107 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/expressions_pb2.pyi +1507 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/relations_pb2.py +195 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/relations_pb2.pyi +3613 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/types_pb2.py +95 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/types_pb2.pyi +980 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/protobuf/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/protobuf/functions.py +166 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/readwriter.py +861 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/session.py +952 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/__init__.py +22 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/query.py +295 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/readwriter.py +618 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/worker/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/worker/foreach_batch_worker.py +87 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/worker/listener_worker.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/types.py +301 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/udf.py +296 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/udtf.py +200 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/utils.py +58 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/window.py +266 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/context.py +818 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/dataframe.py +5973 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/functions.py +15889 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/group.py +547 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/observation.py +152 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/_typing/__init__.pyi +344 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/_typing/protocols/__init__.pyi +17 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/_typing/protocols/frame.pyi +20 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/_typing/protocols/series.pyi +20 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/conversion.py +671 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/functions.py +480 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/functions.pyi +132 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/group_ops.py +523 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/map_ops.py +216 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/serializers.py +1019 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/typehints.py +172 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/types.py +972 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/utils.py +86 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/protobuf/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/protobuf/functions.py +334 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/readwriter.py +2159 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/session.py +2088 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/sql_formatter.py +84 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/listener.py +1050 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/query.py +746 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/readwriter.py +1652 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/state.py +288 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/test_artifact.py +420 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/test_client.py +358 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_foreach.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_foreach_batch.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_listener.py +116 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_streaming.py +35 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_basic.py +3612 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_column.py +1042 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_function.py +2381 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_plan.py +1060 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow.py +163 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow_map.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow_python_udf.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_catalog.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_column.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_conf.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_dataframe.py +96 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_datasources.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_errors.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_functions.py +59 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_group.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_cogrouped_map.py +59 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_grouped_map.py +74 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_grouped_map_with_state.py +62 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_map.py +58 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf.py +70 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_grouped_agg.py +50 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_scalar.py +68 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_window.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_readwriter.py +46 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_serde.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_types.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_udf.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_udtf.py +163 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_session.py +181 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_utils.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py +623 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py +869 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_grouped_map_with_state.py +342 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_map.py +436 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf.py +363 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_grouped_agg.py +592 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py +1503 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints.py +392 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints_with_future_annotations.py +375 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_window.py +411 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming.py +401 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_foreach.py +295 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_foreach_batch.py +106 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_listener.py +558 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow.py +1346 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow_map.py +182 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow_python_udf.py +202 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_catalog.py +503 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_column.py +225 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_conf.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_context.py +201 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_dataframe.py +1931 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_datasources.py +256 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_errors.py +69 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_functions.py +1349 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_group.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_pandas_sqlmetrics.py +68 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_readwriter.py +283 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_serde.py +155 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_session.py +412 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_types.py +1581 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udf.py +961 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udf_profiler.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udtf.py +1456 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_utils.py +1686 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/types.py +2558 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/udf.py +714 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/udtf.py +325 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/utils.py +339 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/window.py +492 -0
- snowflake/snowpark_connect/includes/python/pyspark/statcounter.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/status.py +112 -0
- snowflake/snowpark_connect/includes/python/pyspark/storagelevel.py +97 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/__init__.py +22 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/context.py +471 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/dstream.py +933 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/kinesis.py +205 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/listener.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_context.py +184 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_dstream.py +706 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_kinesis.py +118 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_listener.py +160 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/util.py +168 -0
- snowflake/snowpark_connect/includes/python/pyspark/taskcontext.py +502 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/connectutils.py +199 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/mllibutils.py +30 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/mlutils.py +275 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/objects.py +121 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/pandasutils.py +714 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/sqlutils.py +168 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/streamingutils.py +178 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/utils.py +636 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_appsubmit.py +306 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_broadcast.py +196 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_conf.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_context.py +346 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_daemon.py +89 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_install_spark.py +124 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_join.py +69 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_memory_profiler.py +167 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_pin_thread.py +194 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_profiler.py +168 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_rdd.py +939 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_rddbarrier.py +52 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_rddsampler.py +66 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_readwrite.py +368 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_serializers.py +257 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_shuffle.py +267 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_stage_sched.py +153 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_statcounter.py +130 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_taskcontext.py +350 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_util.py +97 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_worker.py +271 -0
- snowflake/snowpark_connect/includes/python/pyspark/traceback_utils.py +81 -0
- snowflake/snowpark_connect/includes/python/pyspark/util.py +416 -0
- snowflake/snowpark_connect/includes/python/pyspark/version.py +19 -0
- snowflake/snowpark_connect/includes/python/pyspark/worker.py +1307 -0
- snowflake/snowpark_connect/includes/python/pyspark/worker_util.py +46 -0
- snowflake/snowpark_connect/proto/__init__.py +10 -0
- snowflake/snowpark_connect/proto/control_pb2.py +35 -0
- snowflake/snowpark_connect/proto/control_pb2.pyi +38 -0
- snowflake/snowpark_connect/proto/control_pb2_grpc.py +183 -0
- snowflake/snowpark_connect/proto/snowflake_expression_ext_pb2.py +35 -0
- snowflake/snowpark_connect/proto/snowflake_expression_ext_pb2.pyi +53 -0
- snowflake/snowpark_connect/proto/snowflake_rdd_pb2.pyi +39 -0
- snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.py +47 -0
- snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.pyi +111 -0
- snowflake/snowpark_connect/relation/__init__.py +3 -0
- snowflake/snowpark_connect/relation/catalogs/__init__.py +12 -0
- snowflake/snowpark_connect/relation/catalogs/abstract_spark_catalog.py +287 -0
- snowflake/snowpark_connect/relation/catalogs/snowflake_catalog.py +467 -0
- snowflake/snowpark_connect/relation/catalogs/utils.py +51 -0
- snowflake/snowpark_connect/relation/io_utils.py +76 -0
- snowflake/snowpark_connect/relation/map_aggregate.py +322 -0
- snowflake/snowpark_connect/relation/map_catalog.py +151 -0
- snowflake/snowpark_connect/relation/map_column_ops.py +1068 -0
- snowflake/snowpark_connect/relation/map_crosstab.py +48 -0
- snowflake/snowpark_connect/relation/map_extension.py +412 -0
- snowflake/snowpark_connect/relation/map_join.py +341 -0
- snowflake/snowpark_connect/relation/map_local_relation.py +326 -0
- snowflake/snowpark_connect/relation/map_map_partitions.py +146 -0
- snowflake/snowpark_connect/relation/map_relation.py +253 -0
- snowflake/snowpark_connect/relation/map_row_ops.py +716 -0
- snowflake/snowpark_connect/relation/map_sample_by.py +35 -0
- snowflake/snowpark_connect/relation/map_show_string.py +50 -0
- snowflake/snowpark_connect/relation/map_sql.py +1874 -0
- snowflake/snowpark_connect/relation/map_stats.py +324 -0
- snowflake/snowpark_connect/relation/map_subquery_alias.py +32 -0
- snowflake/snowpark_connect/relation/map_udtf.py +288 -0
- snowflake/snowpark_connect/relation/read/__init__.py +7 -0
- snowflake/snowpark_connect/relation/read/jdbc_read_dbapi.py +668 -0
- snowflake/snowpark_connect/relation/read/map_read.py +367 -0
- snowflake/snowpark_connect/relation/read/map_read_csv.py +142 -0
- snowflake/snowpark_connect/relation/read/map_read_jdbc.py +108 -0
- snowflake/snowpark_connect/relation/read/map_read_json.py +344 -0
- snowflake/snowpark_connect/relation/read/map_read_parquet.py +194 -0
- snowflake/snowpark_connect/relation/read/map_read_socket.py +59 -0
- snowflake/snowpark_connect/relation/read/map_read_table.py +109 -0
- snowflake/snowpark_connect/relation/read/map_read_text.py +106 -0
- snowflake/snowpark_connect/relation/read/reader_config.py +399 -0
- snowflake/snowpark_connect/relation/read/utils.py +155 -0
- snowflake/snowpark_connect/relation/stage_locator.py +161 -0
- snowflake/snowpark_connect/relation/utils.py +219 -0
- snowflake/snowpark_connect/relation/write/__init__.py +3 -0
- snowflake/snowpark_connect/relation/write/jdbc_write_dbapi.py +339 -0
- snowflake/snowpark_connect/relation/write/map_write.py +436 -0
- snowflake/snowpark_connect/relation/write/map_write_jdbc.py +48 -0
- snowflake/snowpark_connect/resources/java_udfs-1.0-SNAPSHOT.jar +0 -0
- snowflake/snowpark_connect/resources_initializer.py +75 -0
- snowflake/snowpark_connect/server.py +1136 -0
- snowflake/snowpark_connect/start_server.py +32 -0
- snowflake/snowpark_connect/tcm.py +8 -0
- snowflake/snowpark_connect/type_mapping.py +1003 -0
- snowflake/snowpark_connect/typed_column.py +94 -0
- snowflake/snowpark_connect/utils/__init__.py +3 -0
- snowflake/snowpark_connect/utils/artifacts.py +48 -0
- snowflake/snowpark_connect/utils/attribute_handling.py +72 -0
- snowflake/snowpark_connect/utils/cache.py +84 -0
- snowflake/snowpark_connect/utils/concurrent.py +124 -0
- snowflake/snowpark_connect/utils/context.py +390 -0
- snowflake/snowpark_connect/utils/describe_query_cache.py +231 -0
- snowflake/snowpark_connect/utils/interrupt.py +85 -0
- snowflake/snowpark_connect/utils/io_utils.py +35 -0
- snowflake/snowpark_connect/utils/pandas_udtf_utils.py +117 -0
- snowflake/snowpark_connect/utils/profiling.py +47 -0
- snowflake/snowpark_connect/utils/session.py +180 -0
- snowflake/snowpark_connect/utils/snowpark_connect_logging.py +38 -0
- snowflake/snowpark_connect/utils/telemetry.py +513 -0
- snowflake/snowpark_connect/utils/udf_cache.py +392 -0
- snowflake/snowpark_connect/utils/udf_helper.py +328 -0
- snowflake/snowpark_connect/utils/udf_utils.py +310 -0
- snowflake/snowpark_connect/utils/udtf_helper.py +420 -0
- snowflake/snowpark_connect/utils/udtf_utils.py +799 -0
- snowflake/snowpark_connect/utils/xxhash64.py +247 -0
- snowflake/snowpark_connect/version.py +6 -0
- snowpark_connect-0.20.2.data/scripts/snowpark-connect +71 -0
- snowpark_connect-0.20.2.data/scripts/snowpark-session +11 -0
- snowpark_connect-0.20.2.data/scripts/snowpark-submit +354 -0
- snowpark_connect-0.20.2.dist-info/METADATA +37 -0
- snowpark_connect-0.20.2.dist-info/RECORD +879 -0
- snowpark_connect-0.20.2.dist-info/WHEEL +5 -0
- snowpark_connect-0.20.2.dist-info/licenses/LICENSE.txt +202 -0
- snowpark_connect-0.20.2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,2718 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
3
|
+
# contributor license agreements. See the NOTICE file distributed with
|
|
4
|
+
# this work for additional information regarding copyright ownership.
|
|
5
|
+
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
6
|
+
# (the "License"); you may not use this file except in compliance with
|
|
7
|
+
# the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
# See the License for the specific language governing permissions and
|
|
15
|
+
# limitations under the License.
|
|
16
|
+
#
|
|
17
|
+
"""
|
|
18
|
+
@generated by mypy-protobuf. Do not edit manually!
|
|
19
|
+
isort:skip_file
|
|
20
|
+
|
|
21
|
+
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
22
|
+
contributor license agreements. See the NOTICE file distributed with
|
|
23
|
+
this work for additional information regarding copyright ownership.
|
|
24
|
+
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
25
|
+
(the "License"); you may not use this file except in compliance with
|
|
26
|
+
the License. You may obtain a copy of the License at
|
|
27
|
+
|
|
28
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
29
|
+
|
|
30
|
+
Unless required by applicable law or agreed to in writing, software
|
|
31
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
32
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
33
|
+
See the License for the specific language governing permissions and
|
|
34
|
+
limitations under the License.
|
|
35
|
+
"""
|
|
36
|
+
import builtins
|
|
37
|
+
import collections.abc
|
|
38
|
+
import google.protobuf.any_pb2
|
|
39
|
+
import google.protobuf.descriptor
|
|
40
|
+
import google.protobuf.internal.containers
|
|
41
|
+
import google.protobuf.internal.enum_type_wrapper
|
|
42
|
+
import google.protobuf.message
|
|
43
|
+
import pyspark.sql.connect.proto.commands_pb2
|
|
44
|
+
import pyspark.sql.connect.proto.common_pb2
|
|
45
|
+
import pyspark.sql.connect.proto.expressions_pb2
|
|
46
|
+
import pyspark.sql.connect.proto.relations_pb2
|
|
47
|
+
import pyspark.sql.connect.proto.types_pb2
|
|
48
|
+
import sys
|
|
49
|
+
import typing
|
|
50
|
+
|
|
51
|
+
if sys.version_info >= (3, 10):
|
|
52
|
+
import typing as typing_extensions
|
|
53
|
+
else:
|
|
54
|
+
import typing_extensions
|
|
55
|
+
|
|
56
|
+
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
|
57
|
+
|
|
58
|
+
class Plan(google.protobuf.message.Message):
|
|
59
|
+
"""A [[Plan]] is the structure that carries the runtime information for the execution from the
|
|
60
|
+
client to the server. A [[Plan]] can either be of the type [[Relation]] which is a reference
|
|
61
|
+
to the underlying logical plan or it can be of the [[Command]] type that is used to execute
|
|
62
|
+
commands on the server.
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
66
|
+
|
|
67
|
+
ROOT_FIELD_NUMBER: builtins.int
|
|
68
|
+
COMMAND_FIELD_NUMBER: builtins.int
|
|
69
|
+
@property
|
|
70
|
+
def root(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: ...
|
|
71
|
+
@property
|
|
72
|
+
def command(self) -> pyspark.sql.connect.proto.commands_pb2.Command: ...
|
|
73
|
+
def __init__(
|
|
74
|
+
self,
|
|
75
|
+
*,
|
|
76
|
+
root: pyspark.sql.connect.proto.relations_pb2.Relation | None = ...,
|
|
77
|
+
command: pyspark.sql.connect.proto.commands_pb2.Command | None = ...,
|
|
78
|
+
) -> None: ...
|
|
79
|
+
def HasField(
|
|
80
|
+
self,
|
|
81
|
+
field_name: typing_extensions.Literal[
|
|
82
|
+
"command", b"command", "op_type", b"op_type", "root", b"root"
|
|
83
|
+
],
|
|
84
|
+
) -> builtins.bool: ...
|
|
85
|
+
def ClearField(
|
|
86
|
+
self,
|
|
87
|
+
field_name: typing_extensions.Literal[
|
|
88
|
+
"command", b"command", "op_type", b"op_type", "root", b"root"
|
|
89
|
+
],
|
|
90
|
+
) -> None: ...
|
|
91
|
+
def WhichOneof(
|
|
92
|
+
self, oneof_group: typing_extensions.Literal["op_type", b"op_type"]
|
|
93
|
+
) -> typing_extensions.Literal["root", "command"] | None: ...
|
|
94
|
+
|
|
95
|
+
global___Plan = Plan
|
|
96
|
+
|
|
97
|
+
class UserContext(google.protobuf.message.Message):
|
|
98
|
+
"""User Context is used to refer to one particular user session that is executing
|
|
99
|
+
queries in the backend.
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
103
|
+
|
|
104
|
+
USER_ID_FIELD_NUMBER: builtins.int
|
|
105
|
+
USER_NAME_FIELD_NUMBER: builtins.int
|
|
106
|
+
EXTENSIONS_FIELD_NUMBER: builtins.int
|
|
107
|
+
user_id: builtins.str
|
|
108
|
+
user_name: builtins.str
|
|
109
|
+
@property
|
|
110
|
+
def extensions(
|
|
111
|
+
self,
|
|
112
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
113
|
+
google.protobuf.any_pb2.Any
|
|
114
|
+
]:
|
|
115
|
+
"""To extend the existing user context message that is used to identify incoming requests,
|
|
116
|
+
Spark Connect leverages the Any protobuf type that can be used to inject arbitrary other
|
|
117
|
+
messages into this message. Extensions are stored as a `repeated` type to be able to
|
|
118
|
+
handle multiple active extensions.
|
|
119
|
+
"""
|
|
120
|
+
def __init__(
|
|
121
|
+
self,
|
|
122
|
+
*,
|
|
123
|
+
user_id: builtins.str = ...,
|
|
124
|
+
user_name: builtins.str = ...,
|
|
125
|
+
extensions: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ...,
|
|
126
|
+
) -> None: ...
|
|
127
|
+
def ClearField(
|
|
128
|
+
self,
|
|
129
|
+
field_name: typing_extensions.Literal[
|
|
130
|
+
"extensions", b"extensions", "user_id", b"user_id", "user_name", b"user_name"
|
|
131
|
+
],
|
|
132
|
+
) -> None: ...
|
|
133
|
+
|
|
134
|
+
global___UserContext = UserContext
|
|
135
|
+
|
|
136
|
+
class AnalyzePlanRequest(google.protobuf.message.Message):
|
|
137
|
+
"""Request to perform plan analyze, optionally to explain the plan."""
|
|
138
|
+
|
|
139
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
140
|
+
|
|
141
|
+
class Schema(google.protobuf.message.Message):
|
|
142
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
143
|
+
|
|
144
|
+
PLAN_FIELD_NUMBER: builtins.int
|
|
145
|
+
@property
|
|
146
|
+
def plan(self) -> global___Plan:
|
|
147
|
+
"""(Required) The logical plan to be analyzed."""
|
|
148
|
+
def __init__(
|
|
149
|
+
self,
|
|
150
|
+
*,
|
|
151
|
+
plan: global___Plan | None = ...,
|
|
152
|
+
) -> None: ...
|
|
153
|
+
def HasField(
|
|
154
|
+
self, field_name: typing_extensions.Literal["plan", b"plan"]
|
|
155
|
+
) -> builtins.bool: ...
|
|
156
|
+
def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ...
|
|
157
|
+
|
|
158
|
+
class Explain(google.protobuf.message.Message):
|
|
159
|
+
"""Explains the input plan based on a configurable mode."""
|
|
160
|
+
|
|
161
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
162
|
+
|
|
163
|
+
class _ExplainMode:
|
|
164
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
|
165
|
+
V: typing_extensions.TypeAlias = ValueType
|
|
166
|
+
|
|
167
|
+
class _ExplainModeEnumTypeWrapper(
|
|
168
|
+
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
|
|
169
|
+
AnalyzePlanRequest.Explain._ExplainMode.ValueType
|
|
170
|
+
],
|
|
171
|
+
builtins.type,
|
|
172
|
+
): # noqa: F821
|
|
173
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
|
174
|
+
EXPLAIN_MODE_UNSPECIFIED: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 0
|
|
175
|
+
EXPLAIN_MODE_SIMPLE: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 1
|
|
176
|
+
"""Generates only physical plan."""
|
|
177
|
+
EXPLAIN_MODE_EXTENDED: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 2
|
|
178
|
+
"""Generates parsed logical plan, analyzed logical plan, optimized logical plan and physical plan.
|
|
179
|
+
Parsed Logical plan is a unresolved plan that extracted from the query. Analyzed logical plans
|
|
180
|
+
transforms which translates unresolvedAttribute and unresolvedRelation into fully typed objects.
|
|
181
|
+
The optimized logical plan transforms through a set of optimization rules, resulting in the
|
|
182
|
+
physical plan.
|
|
183
|
+
"""
|
|
184
|
+
EXPLAIN_MODE_CODEGEN: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 3
|
|
185
|
+
"""Generates code for the statement, if any and a physical plan."""
|
|
186
|
+
EXPLAIN_MODE_COST: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 4
|
|
187
|
+
"""If plan node statistics are available, generates a logical plan and also the statistics."""
|
|
188
|
+
EXPLAIN_MODE_FORMATTED: AnalyzePlanRequest.Explain._ExplainMode.ValueType # 5
|
|
189
|
+
"""Generates a physical plan outline and also node details."""
|
|
190
|
+
|
|
191
|
+
class ExplainMode(_ExplainMode, metaclass=_ExplainModeEnumTypeWrapper):
|
|
192
|
+
"""Plan explanation mode."""
|
|
193
|
+
|
|
194
|
+
EXPLAIN_MODE_UNSPECIFIED: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 0
|
|
195
|
+
EXPLAIN_MODE_SIMPLE: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 1
|
|
196
|
+
"""Generates only physical plan."""
|
|
197
|
+
EXPLAIN_MODE_EXTENDED: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 2
|
|
198
|
+
"""Generates parsed logical plan, analyzed logical plan, optimized logical plan and physical plan.
|
|
199
|
+
Parsed Logical plan is a unresolved plan that extracted from the query. Analyzed logical plans
|
|
200
|
+
transforms which translates unresolvedAttribute and unresolvedRelation into fully typed objects.
|
|
201
|
+
The optimized logical plan transforms through a set of optimization rules, resulting in the
|
|
202
|
+
physical plan.
|
|
203
|
+
"""
|
|
204
|
+
EXPLAIN_MODE_CODEGEN: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 3
|
|
205
|
+
"""Generates code for the statement, if any and a physical plan."""
|
|
206
|
+
EXPLAIN_MODE_COST: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 4
|
|
207
|
+
"""If plan node statistics are available, generates a logical plan and also the statistics."""
|
|
208
|
+
EXPLAIN_MODE_FORMATTED: AnalyzePlanRequest.Explain.ExplainMode.ValueType # 5
|
|
209
|
+
"""Generates a physical plan outline and also node details."""
|
|
210
|
+
|
|
211
|
+
PLAN_FIELD_NUMBER: builtins.int
|
|
212
|
+
EXPLAIN_MODE_FIELD_NUMBER: builtins.int
|
|
213
|
+
@property
|
|
214
|
+
def plan(self) -> global___Plan:
|
|
215
|
+
"""(Required) The logical plan to be analyzed."""
|
|
216
|
+
explain_mode: global___AnalyzePlanRequest.Explain.ExplainMode.ValueType
|
|
217
|
+
"""(Required) For analyzePlan rpc calls, configure the mode to explain plan in strings."""
|
|
218
|
+
def __init__(
|
|
219
|
+
self,
|
|
220
|
+
*,
|
|
221
|
+
plan: global___Plan | None = ...,
|
|
222
|
+
explain_mode: global___AnalyzePlanRequest.Explain.ExplainMode.ValueType = ...,
|
|
223
|
+
) -> None: ...
|
|
224
|
+
def HasField(
|
|
225
|
+
self, field_name: typing_extensions.Literal["plan", b"plan"]
|
|
226
|
+
) -> builtins.bool: ...
|
|
227
|
+
def ClearField(
|
|
228
|
+
self,
|
|
229
|
+
field_name: typing_extensions.Literal["explain_mode", b"explain_mode", "plan", b"plan"],
|
|
230
|
+
) -> None: ...
|
|
231
|
+
|
|
232
|
+
class TreeString(google.protobuf.message.Message):
|
|
233
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
234
|
+
|
|
235
|
+
PLAN_FIELD_NUMBER: builtins.int
|
|
236
|
+
LEVEL_FIELD_NUMBER: builtins.int
|
|
237
|
+
@property
|
|
238
|
+
def plan(self) -> global___Plan:
|
|
239
|
+
"""(Required) The logical plan to be analyzed."""
|
|
240
|
+
level: builtins.int
|
|
241
|
+
"""(Optional) Max level of the schema."""
|
|
242
|
+
def __init__(
|
|
243
|
+
self,
|
|
244
|
+
*,
|
|
245
|
+
plan: global___Plan | None = ...,
|
|
246
|
+
level: builtins.int | None = ...,
|
|
247
|
+
) -> None: ...
|
|
248
|
+
def HasField(
|
|
249
|
+
self,
|
|
250
|
+
field_name: typing_extensions.Literal[
|
|
251
|
+
"_level", b"_level", "level", b"level", "plan", b"plan"
|
|
252
|
+
],
|
|
253
|
+
) -> builtins.bool: ...
|
|
254
|
+
def ClearField(
|
|
255
|
+
self,
|
|
256
|
+
field_name: typing_extensions.Literal[
|
|
257
|
+
"_level", b"_level", "level", b"level", "plan", b"plan"
|
|
258
|
+
],
|
|
259
|
+
) -> None: ...
|
|
260
|
+
def WhichOneof(
|
|
261
|
+
self, oneof_group: typing_extensions.Literal["_level", b"_level"]
|
|
262
|
+
) -> typing_extensions.Literal["level"] | None: ...
|
|
263
|
+
|
|
264
|
+
class IsLocal(google.protobuf.message.Message):
|
|
265
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
266
|
+
|
|
267
|
+
PLAN_FIELD_NUMBER: builtins.int
|
|
268
|
+
@property
|
|
269
|
+
def plan(self) -> global___Plan:
|
|
270
|
+
"""(Required) The logical plan to be analyzed."""
|
|
271
|
+
def __init__(
|
|
272
|
+
self,
|
|
273
|
+
*,
|
|
274
|
+
plan: global___Plan | None = ...,
|
|
275
|
+
) -> None: ...
|
|
276
|
+
def HasField(
|
|
277
|
+
self, field_name: typing_extensions.Literal["plan", b"plan"]
|
|
278
|
+
) -> builtins.bool: ...
|
|
279
|
+
def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ...
|
|
280
|
+
|
|
281
|
+
class IsStreaming(google.protobuf.message.Message):
|
|
282
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
283
|
+
|
|
284
|
+
PLAN_FIELD_NUMBER: builtins.int
|
|
285
|
+
@property
|
|
286
|
+
def plan(self) -> global___Plan:
|
|
287
|
+
"""(Required) The logical plan to be analyzed."""
|
|
288
|
+
def __init__(
|
|
289
|
+
self,
|
|
290
|
+
*,
|
|
291
|
+
plan: global___Plan | None = ...,
|
|
292
|
+
) -> None: ...
|
|
293
|
+
def HasField(
|
|
294
|
+
self, field_name: typing_extensions.Literal["plan", b"plan"]
|
|
295
|
+
) -> builtins.bool: ...
|
|
296
|
+
def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ...
|
|
297
|
+
|
|
298
|
+
class InputFiles(google.protobuf.message.Message):
|
|
299
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
300
|
+
|
|
301
|
+
PLAN_FIELD_NUMBER: builtins.int
|
|
302
|
+
@property
|
|
303
|
+
def plan(self) -> global___Plan:
|
|
304
|
+
"""(Required) The logical plan to be analyzed."""
|
|
305
|
+
def __init__(
|
|
306
|
+
self,
|
|
307
|
+
*,
|
|
308
|
+
plan: global___Plan | None = ...,
|
|
309
|
+
) -> None: ...
|
|
310
|
+
def HasField(
|
|
311
|
+
self, field_name: typing_extensions.Literal["plan", b"plan"]
|
|
312
|
+
) -> builtins.bool: ...
|
|
313
|
+
def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ...
|
|
314
|
+
|
|
315
|
+
class SparkVersion(google.protobuf.message.Message):
|
|
316
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
317
|
+
|
|
318
|
+
def __init__(
|
|
319
|
+
self,
|
|
320
|
+
) -> None: ...
|
|
321
|
+
|
|
322
|
+
class DDLParse(google.protobuf.message.Message):
|
|
323
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
324
|
+
|
|
325
|
+
DDL_STRING_FIELD_NUMBER: builtins.int
|
|
326
|
+
ddl_string: builtins.str
|
|
327
|
+
"""(Required) The DDL formatted string to be parsed."""
|
|
328
|
+
def __init__(
|
|
329
|
+
self,
|
|
330
|
+
*,
|
|
331
|
+
ddl_string: builtins.str = ...,
|
|
332
|
+
) -> None: ...
|
|
333
|
+
def ClearField(
|
|
334
|
+
self, field_name: typing_extensions.Literal["ddl_string", b"ddl_string"]
|
|
335
|
+
) -> None: ...
|
|
336
|
+
|
|
337
|
+
class SameSemantics(google.protobuf.message.Message):
|
|
338
|
+
"""Returns `true` when the logical query plans are equal and therefore return same results."""
|
|
339
|
+
|
|
340
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
341
|
+
|
|
342
|
+
TARGET_PLAN_FIELD_NUMBER: builtins.int
|
|
343
|
+
OTHER_PLAN_FIELD_NUMBER: builtins.int
|
|
344
|
+
@property
|
|
345
|
+
def target_plan(self) -> global___Plan:
|
|
346
|
+
"""(Required) The plan to be compared."""
|
|
347
|
+
@property
|
|
348
|
+
def other_plan(self) -> global___Plan:
|
|
349
|
+
"""(Required) The other plan to be compared."""
|
|
350
|
+
def __init__(
|
|
351
|
+
self,
|
|
352
|
+
*,
|
|
353
|
+
target_plan: global___Plan | None = ...,
|
|
354
|
+
other_plan: global___Plan | None = ...,
|
|
355
|
+
) -> None: ...
|
|
356
|
+
def HasField(
|
|
357
|
+
self,
|
|
358
|
+
field_name: typing_extensions.Literal[
|
|
359
|
+
"other_plan", b"other_plan", "target_plan", b"target_plan"
|
|
360
|
+
],
|
|
361
|
+
) -> builtins.bool: ...
|
|
362
|
+
def ClearField(
|
|
363
|
+
self,
|
|
364
|
+
field_name: typing_extensions.Literal[
|
|
365
|
+
"other_plan", b"other_plan", "target_plan", b"target_plan"
|
|
366
|
+
],
|
|
367
|
+
) -> None: ...
|
|
368
|
+
|
|
369
|
+
class SemanticHash(google.protobuf.message.Message):
|
|
370
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
371
|
+
|
|
372
|
+
PLAN_FIELD_NUMBER: builtins.int
|
|
373
|
+
@property
|
|
374
|
+
def plan(self) -> global___Plan:
|
|
375
|
+
"""(Required) The logical plan to get a hashCode."""
|
|
376
|
+
def __init__(
|
|
377
|
+
self,
|
|
378
|
+
*,
|
|
379
|
+
plan: global___Plan | None = ...,
|
|
380
|
+
) -> None: ...
|
|
381
|
+
def HasField(
|
|
382
|
+
self, field_name: typing_extensions.Literal["plan", b"plan"]
|
|
383
|
+
) -> builtins.bool: ...
|
|
384
|
+
def ClearField(self, field_name: typing_extensions.Literal["plan", b"plan"]) -> None: ...
|
|
385
|
+
|
|
386
|
+
class Persist(google.protobuf.message.Message):
|
|
387
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
388
|
+
|
|
389
|
+
RELATION_FIELD_NUMBER: builtins.int
|
|
390
|
+
STORAGE_LEVEL_FIELD_NUMBER: builtins.int
|
|
391
|
+
@property
|
|
392
|
+
def relation(self) -> pyspark.sql.connect.proto.relations_pb2.Relation:
|
|
393
|
+
"""(Required) The logical plan to persist."""
|
|
394
|
+
@property
|
|
395
|
+
def storage_level(self) -> pyspark.sql.connect.proto.common_pb2.StorageLevel:
|
|
396
|
+
"""(Optional) The storage level."""
|
|
397
|
+
def __init__(
|
|
398
|
+
self,
|
|
399
|
+
*,
|
|
400
|
+
relation: pyspark.sql.connect.proto.relations_pb2.Relation | None = ...,
|
|
401
|
+
storage_level: pyspark.sql.connect.proto.common_pb2.StorageLevel | None = ...,
|
|
402
|
+
) -> None: ...
|
|
403
|
+
def HasField(
|
|
404
|
+
self,
|
|
405
|
+
field_name: typing_extensions.Literal[
|
|
406
|
+
"_storage_level",
|
|
407
|
+
b"_storage_level",
|
|
408
|
+
"relation",
|
|
409
|
+
b"relation",
|
|
410
|
+
"storage_level",
|
|
411
|
+
b"storage_level",
|
|
412
|
+
],
|
|
413
|
+
) -> builtins.bool: ...
|
|
414
|
+
def ClearField(
|
|
415
|
+
self,
|
|
416
|
+
field_name: typing_extensions.Literal[
|
|
417
|
+
"_storage_level",
|
|
418
|
+
b"_storage_level",
|
|
419
|
+
"relation",
|
|
420
|
+
b"relation",
|
|
421
|
+
"storage_level",
|
|
422
|
+
b"storage_level",
|
|
423
|
+
],
|
|
424
|
+
) -> None: ...
|
|
425
|
+
def WhichOneof(
|
|
426
|
+
self, oneof_group: typing_extensions.Literal["_storage_level", b"_storage_level"]
|
|
427
|
+
) -> typing_extensions.Literal["storage_level"] | None: ...
|
|
428
|
+
|
|
429
|
+
class Unpersist(google.protobuf.message.Message):
|
|
430
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
431
|
+
|
|
432
|
+
RELATION_FIELD_NUMBER: builtins.int
|
|
433
|
+
BLOCKING_FIELD_NUMBER: builtins.int
|
|
434
|
+
@property
|
|
435
|
+
def relation(self) -> pyspark.sql.connect.proto.relations_pb2.Relation:
|
|
436
|
+
"""(Required) The logical plan to unpersist."""
|
|
437
|
+
blocking: builtins.bool
|
|
438
|
+
"""(Optional) Whether to block until all blocks are deleted."""
|
|
439
|
+
def __init__(
|
|
440
|
+
self,
|
|
441
|
+
*,
|
|
442
|
+
relation: pyspark.sql.connect.proto.relations_pb2.Relation | None = ...,
|
|
443
|
+
blocking: builtins.bool | None = ...,
|
|
444
|
+
) -> None: ...
|
|
445
|
+
def HasField(
|
|
446
|
+
self,
|
|
447
|
+
field_name: typing_extensions.Literal[
|
|
448
|
+
"_blocking", b"_blocking", "blocking", b"blocking", "relation", b"relation"
|
|
449
|
+
],
|
|
450
|
+
) -> builtins.bool: ...
|
|
451
|
+
def ClearField(
|
|
452
|
+
self,
|
|
453
|
+
field_name: typing_extensions.Literal[
|
|
454
|
+
"_blocking", b"_blocking", "blocking", b"blocking", "relation", b"relation"
|
|
455
|
+
],
|
|
456
|
+
) -> None: ...
|
|
457
|
+
def WhichOneof(
|
|
458
|
+
self, oneof_group: typing_extensions.Literal["_blocking", b"_blocking"]
|
|
459
|
+
) -> typing_extensions.Literal["blocking"] | None: ...
|
|
460
|
+
|
|
461
|
+
class GetStorageLevel(google.protobuf.message.Message):
|
|
462
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
463
|
+
|
|
464
|
+
RELATION_FIELD_NUMBER: builtins.int
|
|
465
|
+
@property
|
|
466
|
+
def relation(self) -> pyspark.sql.connect.proto.relations_pb2.Relation:
|
|
467
|
+
"""(Required) The logical plan to get the storage level."""
|
|
468
|
+
def __init__(
|
|
469
|
+
self,
|
|
470
|
+
*,
|
|
471
|
+
relation: pyspark.sql.connect.proto.relations_pb2.Relation | None = ...,
|
|
472
|
+
) -> None: ...
|
|
473
|
+
def HasField(
|
|
474
|
+
self, field_name: typing_extensions.Literal["relation", b"relation"]
|
|
475
|
+
) -> builtins.bool: ...
|
|
476
|
+
def ClearField(
|
|
477
|
+
self, field_name: typing_extensions.Literal["relation", b"relation"]
|
|
478
|
+
) -> None: ...
|
|
479
|
+
|
|
480
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
481
|
+
USER_CONTEXT_FIELD_NUMBER: builtins.int
|
|
482
|
+
CLIENT_TYPE_FIELD_NUMBER: builtins.int
|
|
483
|
+
SCHEMA_FIELD_NUMBER: builtins.int
|
|
484
|
+
EXPLAIN_FIELD_NUMBER: builtins.int
|
|
485
|
+
TREE_STRING_FIELD_NUMBER: builtins.int
|
|
486
|
+
IS_LOCAL_FIELD_NUMBER: builtins.int
|
|
487
|
+
IS_STREAMING_FIELD_NUMBER: builtins.int
|
|
488
|
+
INPUT_FILES_FIELD_NUMBER: builtins.int
|
|
489
|
+
SPARK_VERSION_FIELD_NUMBER: builtins.int
|
|
490
|
+
DDL_PARSE_FIELD_NUMBER: builtins.int
|
|
491
|
+
SAME_SEMANTICS_FIELD_NUMBER: builtins.int
|
|
492
|
+
SEMANTIC_HASH_FIELD_NUMBER: builtins.int
|
|
493
|
+
PERSIST_FIELD_NUMBER: builtins.int
|
|
494
|
+
UNPERSIST_FIELD_NUMBER: builtins.int
|
|
495
|
+
GET_STORAGE_LEVEL_FIELD_NUMBER: builtins.int
|
|
496
|
+
session_id: builtins.str
|
|
497
|
+
"""(Required)
|
|
498
|
+
|
|
499
|
+
The session_id specifies a spark session for a user id (which is specified
|
|
500
|
+
by user_context.user_id). The session_id is set by the client to be able to
|
|
501
|
+
collate streaming responses from different queries within the dedicated session.
|
|
502
|
+
The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
503
|
+
"""
|
|
504
|
+
@property
|
|
505
|
+
def user_context(self) -> global___UserContext:
|
|
506
|
+
"""(Required) User context"""
|
|
507
|
+
client_type: builtins.str
|
|
508
|
+
"""Provides optional information about the client sending the request. This field
|
|
509
|
+
can be used for language or version specific information and is only intended for
|
|
510
|
+
logging purposes and will not be interpreted by the server.
|
|
511
|
+
"""
|
|
512
|
+
@property
|
|
513
|
+
def schema(self) -> global___AnalyzePlanRequest.Schema: ...
|
|
514
|
+
@property
|
|
515
|
+
def explain(self) -> global___AnalyzePlanRequest.Explain: ...
|
|
516
|
+
@property
|
|
517
|
+
def tree_string(self) -> global___AnalyzePlanRequest.TreeString: ...
|
|
518
|
+
@property
|
|
519
|
+
def is_local(self) -> global___AnalyzePlanRequest.IsLocal: ...
|
|
520
|
+
@property
|
|
521
|
+
def is_streaming(self) -> global___AnalyzePlanRequest.IsStreaming: ...
|
|
522
|
+
@property
|
|
523
|
+
def input_files(self) -> global___AnalyzePlanRequest.InputFiles: ...
|
|
524
|
+
@property
|
|
525
|
+
def spark_version(self) -> global___AnalyzePlanRequest.SparkVersion: ...
|
|
526
|
+
@property
|
|
527
|
+
def ddl_parse(self) -> global___AnalyzePlanRequest.DDLParse: ...
|
|
528
|
+
@property
|
|
529
|
+
def same_semantics(self) -> global___AnalyzePlanRequest.SameSemantics: ...
|
|
530
|
+
@property
|
|
531
|
+
def semantic_hash(self) -> global___AnalyzePlanRequest.SemanticHash: ...
|
|
532
|
+
@property
|
|
533
|
+
def persist(self) -> global___AnalyzePlanRequest.Persist: ...
|
|
534
|
+
@property
|
|
535
|
+
def unpersist(self) -> global___AnalyzePlanRequest.Unpersist: ...
|
|
536
|
+
@property
|
|
537
|
+
def get_storage_level(self) -> global___AnalyzePlanRequest.GetStorageLevel: ...
|
|
538
|
+
def __init__(
|
|
539
|
+
self,
|
|
540
|
+
*,
|
|
541
|
+
session_id: builtins.str = ...,
|
|
542
|
+
user_context: global___UserContext | None = ...,
|
|
543
|
+
client_type: builtins.str | None = ...,
|
|
544
|
+
schema: global___AnalyzePlanRequest.Schema | None = ...,
|
|
545
|
+
explain: global___AnalyzePlanRequest.Explain | None = ...,
|
|
546
|
+
tree_string: global___AnalyzePlanRequest.TreeString | None = ...,
|
|
547
|
+
is_local: global___AnalyzePlanRequest.IsLocal | None = ...,
|
|
548
|
+
is_streaming: global___AnalyzePlanRequest.IsStreaming | None = ...,
|
|
549
|
+
input_files: global___AnalyzePlanRequest.InputFiles | None = ...,
|
|
550
|
+
spark_version: global___AnalyzePlanRequest.SparkVersion | None = ...,
|
|
551
|
+
ddl_parse: global___AnalyzePlanRequest.DDLParse | None = ...,
|
|
552
|
+
same_semantics: global___AnalyzePlanRequest.SameSemantics | None = ...,
|
|
553
|
+
semantic_hash: global___AnalyzePlanRequest.SemanticHash | None = ...,
|
|
554
|
+
persist: global___AnalyzePlanRequest.Persist | None = ...,
|
|
555
|
+
unpersist: global___AnalyzePlanRequest.Unpersist | None = ...,
|
|
556
|
+
get_storage_level: global___AnalyzePlanRequest.GetStorageLevel | None = ...,
|
|
557
|
+
) -> None: ...
|
|
558
|
+
def HasField(
|
|
559
|
+
self,
|
|
560
|
+
field_name: typing_extensions.Literal[
|
|
561
|
+
"_client_type",
|
|
562
|
+
b"_client_type",
|
|
563
|
+
"analyze",
|
|
564
|
+
b"analyze",
|
|
565
|
+
"client_type",
|
|
566
|
+
b"client_type",
|
|
567
|
+
"ddl_parse",
|
|
568
|
+
b"ddl_parse",
|
|
569
|
+
"explain",
|
|
570
|
+
b"explain",
|
|
571
|
+
"get_storage_level",
|
|
572
|
+
b"get_storage_level",
|
|
573
|
+
"input_files",
|
|
574
|
+
b"input_files",
|
|
575
|
+
"is_local",
|
|
576
|
+
b"is_local",
|
|
577
|
+
"is_streaming",
|
|
578
|
+
b"is_streaming",
|
|
579
|
+
"persist",
|
|
580
|
+
b"persist",
|
|
581
|
+
"same_semantics",
|
|
582
|
+
b"same_semantics",
|
|
583
|
+
"schema",
|
|
584
|
+
b"schema",
|
|
585
|
+
"semantic_hash",
|
|
586
|
+
b"semantic_hash",
|
|
587
|
+
"spark_version",
|
|
588
|
+
b"spark_version",
|
|
589
|
+
"tree_string",
|
|
590
|
+
b"tree_string",
|
|
591
|
+
"unpersist",
|
|
592
|
+
b"unpersist",
|
|
593
|
+
"user_context",
|
|
594
|
+
b"user_context",
|
|
595
|
+
],
|
|
596
|
+
) -> builtins.bool: ...
|
|
597
|
+
def ClearField(
|
|
598
|
+
self,
|
|
599
|
+
field_name: typing_extensions.Literal[
|
|
600
|
+
"_client_type",
|
|
601
|
+
b"_client_type",
|
|
602
|
+
"analyze",
|
|
603
|
+
b"analyze",
|
|
604
|
+
"client_type",
|
|
605
|
+
b"client_type",
|
|
606
|
+
"ddl_parse",
|
|
607
|
+
b"ddl_parse",
|
|
608
|
+
"explain",
|
|
609
|
+
b"explain",
|
|
610
|
+
"get_storage_level",
|
|
611
|
+
b"get_storage_level",
|
|
612
|
+
"input_files",
|
|
613
|
+
b"input_files",
|
|
614
|
+
"is_local",
|
|
615
|
+
b"is_local",
|
|
616
|
+
"is_streaming",
|
|
617
|
+
b"is_streaming",
|
|
618
|
+
"persist",
|
|
619
|
+
b"persist",
|
|
620
|
+
"same_semantics",
|
|
621
|
+
b"same_semantics",
|
|
622
|
+
"schema",
|
|
623
|
+
b"schema",
|
|
624
|
+
"semantic_hash",
|
|
625
|
+
b"semantic_hash",
|
|
626
|
+
"session_id",
|
|
627
|
+
b"session_id",
|
|
628
|
+
"spark_version",
|
|
629
|
+
b"spark_version",
|
|
630
|
+
"tree_string",
|
|
631
|
+
b"tree_string",
|
|
632
|
+
"unpersist",
|
|
633
|
+
b"unpersist",
|
|
634
|
+
"user_context",
|
|
635
|
+
b"user_context",
|
|
636
|
+
],
|
|
637
|
+
) -> None: ...
|
|
638
|
+
@typing.overload
|
|
639
|
+
def WhichOneof(
|
|
640
|
+
self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"]
|
|
641
|
+
) -> typing_extensions.Literal["client_type"] | None: ...
|
|
642
|
+
@typing.overload
|
|
643
|
+
def WhichOneof(
|
|
644
|
+
self, oneof_group: typing_extensions.Literal["analyze", b"analyze"]
|
|
645
|
+
) -> typing_extensions.Literal[
|
|
646
|
+
"schema",
|
|
647
|
+
"explain",
|
|
648
|
+
"tree_string",
|
|
649
|
+
"is_local",
|
|
650
|
+
"is_streaming",
|
|
651
|
+
"input_files",
|
|
652
|
+
"spark_version",
|
|
653
|
+
"ddl_parse",
|
|
654
|
+
"same_semantics",
|
|
655
|
+
"semantic_hash",
|
|
656
|
+
"persist",
|
|
657
|
+
"unpersist",
|
|
658
|
+
"get_storage_level",
|
|
659
|
+
] | None: ...
|
|
660
|
+
|
|
661
|
+
global___AnalyzePlanRequest = AnalyzePlanRequest
|
|
662
|
+
|
|
663
|
+
class AnalyzePlanResponse(google.protobuf.message.Message):
|
|
664
|
+
"""Response to performing analysis of the query. Contains relevant metadata to be able to
|
|
665
|
+
reason about the performance.
|
|
666
|
+
"""
|
|
667
|
+
|
|
668
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
669
|
+
|
|
670
|
+
class Schema(google.protobuf.message.Message):
|
|
671
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
672
|
+
|
|
673
|
+
SCHEMA_FIELD_NUMBER: builtins.int
|
|
674
|
+
@property
|
|
675
|
+
def schema(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
|
|
676
|
+
def __init__(
|
|
677
|
+
self,
|
|
678
|
+
*,
|
|
679
|
+
schema: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
|
|
680
|
+
) -> None: ...
|
|
681
|
+
def HasField(
|
|
682
|
+
self, field_name: typing_extensions.Literal["schema", b"schema"]
|
|
683
|
+
) -> builtins.bool: ...
|
|
684
|
+
def ClearField(
|
|
685
|
+
self, field_name: typing_extensions.Literal["schema", b"schema"]
|
|
686
|
+
) -> None: ...
|
|
687
|
+
|
|
688
|
+
class Explain(google.protobuf.message.Message):
|
|
689
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
690
|
+
|
|
691
|
+
EXPLAIN_STRING_FIELD_NUMBER: builtins.int
|
|
692
|
+
explain_string: builtins.str
|
|
693
|
+
def __init__(
|
|
694
|
+
self,
|
|
695
|
+
*,
|
|
696
|
+
explain_string: builtins.str = ...,
|
|
697
|
+
) -> None: ...
|
|
698
|
+
def ClearField(
|
|
699
|
+
self, field_name: typing_extensions.Literal["explain_string", b"explain_string"]
|
|
700
|
+
) -> None: ...
|
|
701
|
+
|
|
702
|
+
class TreeString(google.protobuf.message.Message):
|
|
703
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
704
|
+
|
|
705
|
+
TREE_STRING_FIELD_NUMBER: builtins.int
|
|
706
|
+
tree_string: builtins.str
|
|
707
|
+
def __init__(
|
|
708
|
+
self,
|
|
709
|
+
*,
|
|
710
|
+
tree_string: builtins.str = ...,
|
|
711
|
+
) -> None: ...
|
|
712
|
+
def ClearField(
|
|
713
|
+
self, field_name: typing_extensions.Literal["tree_string", b"tree_string"]
|
|
714
|
+
) -> None: ...
|
|
715
|
+
|
|
716
|
+
class IsLocal(google.protobuf.message.Message):
|
|
717
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
718
|
+
|
|
719
|
+
IS_LOCAL_FIELD_NUMBER: builtins.int
|
|
720
|
+
is_local: builtins.bool
|
|
721
|
+
def __init__(
|
|
722
|
+
self,
|
|
723
|
+
*,
|
|
724
|
+
is_local: builtins.bool = ...,
|
|
725
|
+
) -> None: ...
|
|
726
|
+
def ClearField(
|
|
727
|
+
self, field_name: typing_extensions.Literal["is_local", b"is_local"]
|
|
728
|
+
) -> None: ...
|
|
729
|
+
|
|
730
|
+
class IsStreaming(google.protobuf.message.Message):
|
|
731
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
732
|
+
|
|
733
|
+
IS_STREAMING_FIELD_NUMBER: builtins.int
|
|
734
|
+
is_streaming: builtins.bool
|
|
735
|
+
def __init__(
|
|
736
|
+
self,
|
|
737
|
+
*,
|
|
738
|
+
is_streaming: builtins.bool = ...,
|
|
739
|
+
) -> None: ...
|
|
740
|
+
def ClearField(
|
|
741
|
+
self, field_name: typing_extensions.Literal["is_streaming", b"is_streaming"]
|
|
742
|
+
) -> None: ...
|
|
743
|
+
|
|
744
|
+
class InputFiles(google.protobuf.message.Message):
|
|
745
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
746
|
+
|
|
747
|
+
FILES_FIELD_NUMBER: builtins.int
|
|
748
|
+
@property
|
|
749
|
+
def files(
|
|
750
|
+
self,
|
|
751
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
752
|
+
"""A best-effort snapshot of the files that compose this Dataset"""
|
|
753
|
+
def __init__(
|
|
754
|
+
self,
|
|
755
|
+
*,
|
|
756
|
+
files: collections.abc.Iterable[builtins.str] | None = ...,
|
|
757
|
+
) -> None: ...
|
|
758
|
+
def ClearField(self, field_name: typing_extensions.Literal["files", b"files"]) -> None: ...
|
|
759
|
+
|
|
760
|
+
class SparkVersion(google.protobuf.message.Message):
|
|
761
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
762
|
+
|
|
763
|
+
VERSION_FIELD_NUMBER: builtins.int
|
|
764
|
+
version: builtins.str
|
|
765
|
+
def __init__(
|
|
766
|
+
self,
|
|
767
|
+
*,
|
|
768
|
+
version: builtins.str = ...,
|
|
769
|
+
) -> None: ...
|
|
770
|
+
def ClearField(
|
|
771
|
+
self, field_name: typing_extensions.Literal["version", b"version"]
|
|
772
|
+
) -> None: ...
|
|
773
|
+
|
|
774
|
+
class DDLParse(google.protobuf.message.Message):
|
|
775
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
776
|
+
|
|
777
|
+
PARSED_FIELD_NUMBER: builtins.int
|
|
778
|
+
@property
|
|
779
|
+
def parsed(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
|
|
780
|
+
def __init__(
|
|
781
|
+
self,
|
|
782
|
+
*,
|
|
783
|
+
parsed: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
|
|
784
|
+
) -> None: ...
|
|
785
|
+
def HasField(
|
|
786
|
+
self, field_name: typing_extensions.Literal["parsed", b"parsed"]
|
|
787
|
+
) -> builtins.bool: ...
|
|
788
|
+
def ClearField(
|
|
789
|
+
self, field_name: typing_extensions.Literal["parsed", b"parsed"]
|
|
790
|
+
) -> None: ...
|
|
791
|
+
|
|
792
|
+
class SameSemantics(google.protobuf.message.Message):
|
|
793
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
794
|
+
|
|
795
|
+
RESULT_FIELD_NUMBER: builtins.int
|
|
796
|
+
result: builtins.bool
|
|
797
|
+
def __init__(
|
|
798
|
+
self,
|
|
799
|
+
*,
|
|
800
|
+
result: builtins.bool = ...,
|
|
801
|
+
) -> None: ...
|
|
802
|
+
def ClearField(
|
|
803
|
+
self, field_name: typing_extensions.Literal["result", b"result"]
|
|
804
|
+
) -> None: ...
|
|
805
|
+
|
|
806
|
+
class SemanticHash(google.protobuf.message.Message):
|
|
807
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
808
|
+
|
|
809
|
+
RESULT_FIELD_NUMBER: builtins.int
|
|
810
|
+
result: builtins.int
|
|
811
|
+
def __init__(
|
|
812
|
+
self,
|
|
813
|
+
*,
|
|
814
|
+
result: builtins.int = ...,
|
|
815
|
+
) -> None: ...
|
|
816
|
+
def ClearField(
|
|
817
|
+
self, field_name: typing_extensions.Literal["result", b"result"]
|
|
818
|
+
) -> None: ...
|
|
819
|
+
|
|
820
|
+
class Persist(google.protobuf.message.Message):
|
|
821
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
822
|
+
|
|
823
|
+
def __init__(
|
|
824
|
+
self,
|
|
825
|
+
) -> None: ...
|
|
826
|
+
|
|
827
|
+
class Unpersist(google.protobuf.message.Message):
|
|
828
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
829
|
+
|
|
830
|
+
def __init__(
|
|
831
|
+
self,
|
|
832
|
+
) -> None: ...
|
|
833
|
+
|
|
834
|
+
class GetStorageLevel(google.protobuf.message.Message):
|
|
835
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
836
|
+
|
|
837
|
+
STORAGE_LEVEL_FIELD_NUMBER: builtins.int
|
|
838
|
+
@property
|
|
839
|
+
def storage_level(self) -> pyspark.sql.connect.proto.common_pb2.StorageLevel:
|
|
840
|
+
"""(Required) The StorageLevel as a result of get_storage_level request."""
|
|
841
|
+
def __init__(
|
|
842
|
+
self,
|
|
843
|
+
*,
|
|
844
|
+
storage_level: pyspark.sql.connect.proto.common_pb2.StorageLevel | None = ...,
|
|
845
|
+
) -> None: ...
|
|
846
|
+
def HasField(
|
|
847
|
+
self, field_name: typing_extensions.Literal["storage_level", b"storage_level"]
|
|
848
|
+
) -> builtins.bool: ...
|
|
849
|
+
def ClearField(
|
|
850
|
+
self, field_name: typing_extensions.Literal["storage_level", b"storage_level"]
|
|
851
|
+
) -> None: ...
|
|
852
|
+
|
|
853
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
854
|
+
SCHEMA_FIELD_NUMBER: builtins.int
|
|
855
|
+
EXPLAIN_FIELD_NUMBER: builtins.int
|
|
856
|
+
TREE_STRING_FIELD_NUMBER: builtins.int
|
|
857
|
+
IS_LOCAL_FIELD_NUMBER: builtins.int
|
|
858
|
+
IS_STREAMING_FIELD_NUMBER: builtins.int
|
|
859
|
+
INPUT_FILES_FIELD_NUMBER: builtins.int
|
|
860
|
+
SPARK_VERSION_FIELD_NUMBER: builtins.int
|
|
861
|
+
DDL_PARSE_FIELD_NUMBER: builtins.int
|
|
862
|
+
SAME_SEMANTICS_FIELD_NUMBER: builtins.int
|
|
863
|
+
SEMANTIC_HASH_FIELD_NUMBER: builtins.int
|
|
864
|
+
PERSIST_FIELD_NUMBER: builtins.int
|
|
865
|
+
UNPERSIST_FIELD_NUMBER: builtins.int
|
|
866
|
+
GET_STORAGE_LEVEL_FIELD_NUMBER: builtins.int
|
|
867
|
+
session_id: builtins.str
|
|
868
|
+
@property
|
|
869
|
+
def schema(self) -> global___AnalyzePlanResponse.Schema: ...
|
|
870
|
+
@property
|
|
871
|
+
def explain(self) -> global___AnalyzePlanResponse.Explain: ...
|
|
872
|
+
@property
|
|
873
|
+
def tree_string(self) -> global___AnalyzePlanResponse.TreeString: ...
|
|
874
|
+
@property
|
|
875
|
+
def is_local(self) -> global___AnalyzePlanResponse.IsLocal: ...
|
|
876
|
+
@property
|
|
877
|
+
def is_streaming(self) -> global___AnalyzePlanResponse.IsStreaming: ...
|
|
878
|
+
@property
|
|
879
|
+
def input_files(self) -> global___AnalyzePlanResponse.InputFiles: ...
|
|
880
|
+
@property
|
|
881
|
+
def spark_version(self) -> global___AnalyzePlanResponse.SparkVersion: ...
|
|
882
|
+
@property
|
|
883
|
+
def ddl_parse(self) -> global___AnalyzePlanResponse.DDLParse: ...
|
|
884
|
+
@property
|
|
885
|
+
def same_semantics(self) -> global___AnalyzePlanResponse.SameSemantics: ...
|
|
886
|
+
@property
|
|
887
|
+
def semantic_hash(self) -> global___AnalyzePlanResponse.SemanticHash: ...
|
|
888
|
+
@property
|
|
889
|
+
def persist(self) -> global___AnalyzePlanResponse.Persist: ...
|
|
890
|
+
@property
|
|
891
|
+
def unpersist(self) -> global___AnalyzePlanResponse.Unpersist: ...
|
|
892
|
+
@property
|
|
893
|
+
def get_storage_level(self) -> global___AnalyzePlanResponse.GetStorageLevel: ...
|
|
894
|
+
def __init__(
|
|
895
|
+
self,
|
|
896
|
+
*,
|
|
897
|
+
session_id: builtins.str = ...,
|
|
898
|
+
schema: global___AnalyzePlanResponse.Schema | None = ...,
|
|
899
|
+
explain: global___AnalyzePlanResponse.Explain | None = ...,
|
|
900
|
+
tree_string: global___AnalyzePlanResponse.TreeString | None = ...,
|
|
901
|
+
is_local: global___AnalyzePlanResponse.IsLocal | None = ...,
|
|
902
|
+
is_streaming: global___AnalyzePlanResponse.IsStreaming | None = ...,
|
|
903
|
+
input_files: global___AnalyzePlanResponse.InputFiles | None = ...,
|
|
904
|
+
spark_version: global___AnalyzePlanResponse.SparkVersion | None = ...,
|
|
905
|
+
ddl_parse: global___AnalyzePlanResponse.DDLParse | None = ...,
|
|
906
|
+
same_semantics: global___AnalyzePlanResponse.SameSemantics | None = ...,
|
|
907
|
+
semantic_hash: global___AnalyzePlanResponse.SemanticHash | None = ...,
|
|
908
|
+
persist: global___AnalyzePlanResponse.Persist | None = ...,
|
|
909
|
+
unpersist: global___AnalyzePlanResponse.Unpersist | None = ...,
|
|
910
|
+
get_storage_level: global___AnalyzePlanResponse.GetStorageLevel | None = ...,
|
|
911
|
+
) -> None: ...
|
|
912
|
+
def HasField(
|
|
913
|
+
self,
|
|
914
|
+
field_name: typing_extensions.Literal[
|
|
915
|
+
"ddl_parse",
|
|
916
|
+
b"ddl_parse",
|
|
917
|
+
"explain",
|
|
918
|
+
b"explain",
|
|
919
|
+
"get_storage_level",
|
|
920
|
+
b"get_storage_level",
|
|
921
|
+
"input_files",
|
|
922
|
+
b"input_files",
|
|
923
|
+
"is_local",
|
|
924
|
+
b"is_local",
|
|
925
|
+
"is_streaming",
|
|
926
|
+
b"is_streaming",
|
|
927
|
+
"persist",
|
|
928
|
+
b"persist",
|
|
929
|
+
"result",
|
|
930
|
+
b"result",
|
|
931
|
+
"same_semantics",
|
|
932
|
+
b"same_semantics",
|
|
933
|
+
"schema",
|
|
934
|
+
b"schema",
|
|
935
|
+
"semantic_hash",
|
|
936
|
+
b"semantic_hash",
|
|
937
|
+
"spark_version",
|
|
938
|
+
b"spark_version",
|
|
939
|
+
"tree_string",
|
|
940
|
+
b"tree_string",
|
|
941
|
+
"unpersist",
|
|
942
|
+
b"unpersist",
|
|
943
|
+
],
|
|
944
|
+
) -> builtins.bool: ...
|
|
945
|
+
def ClearField(
|
|
946
|
+
self,
|
|
947
|
+
field_name: typing_extensions.Literal[
|
|
948
|
+
"ddl_parse",
|
|
949
|
+
b"ddl_parse",
|
|
950
|
+
"explain",
|
|
951
|
+
b"explain",
|
|
952
|
+
"get_storage_level",
|
|
953
|
+
b"get_storage_level",
|
|
954
|
+
"input_files",
|
|
955
|
+
b"input_files",
|
|
956
|
+
"is_local",
|
|
957
|
+
b"is_local",
|
|
958
|
+
"is_streaming",
|
|
959
|
+
b"is_streaming",
|
|
960
|
+
"persist",
|
|
961
|
+
b"persist",
|
|
962
|
+
"result",
|
|
963
|
+
b"result",
|
|
964
|
+
"same_semantics",
|
|
965
|
+
b"same_semantics",
|
|
966
|
+
"schema",
|
|
967
|
+
b"schema",
|
|
968
|
+
"semantic_hash",
|
|
969
|
+
b"semantic_hash",
|
|
970
|
+
"session_id",
|
|
971
|
+
b"session_id",
|
|
972
|
+
"spark_version",
|
|
973
|
+
b"spark_version",
|
|
974
|
+
"tree_string",
|
|
975
|
+
b"tree_string",
|
|
976
|
+
"unpersist",
|
|
977
|
+
b"unpersist",
|
|
978
|
+
],
|
|
979
|
+
) -> None: ...
|
|
980
|
+
def WhichOneof(
|
|
981
|
+
self, oneof_group: typing_extensions.Literal["result", b"result"]
|
|
982
|
+
) -> typing_extensions.Literal[
|
|
983
|
+
"schema",
|
|
984
|
+
"explain",
|
|
985
|
+
"tree_string",
|
|
986
|
+
"is_local",
|
|
987
|
+
"is_streaming",
|
|
988
|
+
"input_files",
|
|
989
|
+
"spark_version",
|
|
990
|
+
"ddl_parse",
|
|
991
|
+
"same_semantics",
|
|
992
|
+
"semantic_hash",
|
|
993
|
+
"persist",
|
|
994
|
+
"unpersist",
|
|
995
|
+
"get_storage_level",
|
|
996
|
+
] | None: ...
|
|
997
|
+
|
|
998
|
+
global___AnalyzePlanResponse = AnalyzePlanResponse
|
|
999
|
+
|
|
1000
|
+
class ExecutePlanRequest(google.protobuf.message.Message):
|
|
1001
|
+
"""A request to be executed by the service."""
|
|
1002
|
+
|
|
1003
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1004
|
+
|
|
1005
|
+
class RequestOption(google.protobuf.message.Message):
|
|
1006
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1007
|
+
|
|
1008
|
+
REATTACH_OPTIONS_FIELD_NUMBER: builtins.int
|
|
1009
|
+
EXTENSION_FIELD_NUMBER: builtins.int
|
|
1010
|
+
@property
|
|
1011
|
+
def reattach_options(self) -> global___ReattachOptions: ...
|
|
1012
|
+
@property
|
|
1013
|
+
def extension(self) -> google.protobuf.any_pb2.Any:
|
|
1014
|
+
"""Extension type for request options"""
|
|
1015
|
+
def __init__(
|
|
1016
|
+
self,
|
|
1017
|
+
*,
|
|
1018
|
+
reattach_options: global___ReattachOptions | None = ...,
|
|
1019
|
+
extension: google.protobuf.any_pb2.Any | None = ...,
|
|
1020
|
+
) -> None: ...
|
|
1021
|
+
def HasField(
|
|
1022
|
+
self,
|
|
1023
|
+
field_name: typing_extensions.Literal[
|
|
1024
|
+
"extension",
|
|
1025
|
+
b"extension",
|
|
1026
|
+
"reattach_options",
|
|
1027
|
+
b"reattach_options",
|
|
1028
|
+
"request_option",
|
|
1029
|
+
b"request_option",
|
|
1030
|
+
],
|
|
1031
|
+
) -> builtins.bool: ...
|
|
1032
|
+
def ClearField(
|
|
1033
|
+
self,
|
|
1034
|
+
field_name: typing_extensions.Literal[
|
|
1035
|
+
"extension",
|
|
1036
|
+
b"extension",
|
|
1037
|
+
"reattach_options",
|
|
1038
|
+
b"reattach_options",
|
|
1039
|
+
"request_option",
|
|
1040
|
+
b"request_option",
|
|
1041
|
+
],
|
|
1042
|
+
) -> None: ...
|
|
1043
|
+
def WhichOneof(
|
|
1044
|
+
self, oneof_group: typing_extensions.Literal["request_option", b"request_option"]
|
|
1045
|
+
) -> typing_extensions.Literal["reattach_options", "extension"] | None: ...
|
|
1046
|
+
|
|
1047
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
1048
|
+
USER_CONTEXT_FIELD_NUMBER: builtins.int
|
|
1049
|
+
OPERATION_ID_FIELD_NUMBER: builtins.int
|
|
1050
|
+
PLAN_FIELD_NUMBER: builtins.int
|
|
1051
|
+
CLIENT_TYPE_FIELD_NUMBER: builtins.int
|
|
1052
|
+
REQUEST_OPTIONS_FIELD_NUMBER: builtins.int
|
|
1053
|
+
TAGS_FIELD_NUMBER: builtins.int
|
|
1054
|
+
session_id: builtins.str
|
|
1055
|
+
"""(Required)
|
|
1056
|
+
|
|
1057
|
+
The session_id specifies a spark session for a user id (which is specified
|
|
1058
|
+
by user_context.user_id). The session_id is set by the client to be able to
|
|
1059
|
+
collate streaming responses from different queries within the dedicated session.
|
|
1060
|
+
The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
1061
|
+
"""
|
|
1062
|
+
@property
|
|
1063
|
+
def user_context(self) -> global___UserContext:
|
|
1064
|
+
"""(Required) User context
|
|
1065
|
+
|
|
1066
|
+
user_context.user_id and session+id both identify a unique remote spark session on the
|
|
1067
|
+
server side.
|
|
1068
|
+
"""
|
|
1069
|
+
operation_id: builtins.str
|
|
1070
|
+
"""(Optional)
|
|
1071
|
+
Provide an id for this request. If not provided, it will be generated by the server.
|
|
1072
|
+
It is returned in every ExecutePlanResponse.operation_id of the ExecutePlan response stream.
|
|
1073
|
+
The id must be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
1074
|
+
"""
|
|
1075
|
+
@property
|
|
1076
|
+
def plan(self) -> global___Plan:
|
|
1077
|
+
"""(Required) The logical plan to be executed / analyzed."""
|
|
1078
|
+
client_type: builtins.str
|
|
1079
|
+
"""Provides optional information about the client sending the request. This field
|
|
1080
|
+
can be used for language or version specific information and is only intended for
|
|
1081
|
+
logging purposes and will not be interpreted by the server.
|
|
1082
|
+
"""
|
|
1083
|
+
@property
|
|
1084
|
+
def request_options(
|
|
1085
|
+
self,
|
|
1086
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1087
|
+
global___ExecutePlanRequest.RequestOption
|
|
1088
|
+
]:
|
|
1089
|
+
"""Repeated element for options that can be passed to the request. This element is currently
|
|
1090
|
+
unused but allows to pass in an extension value used for arbitrary options.
|
|
1091
|
+
"""
|
|
1092
|
+
@property
|
|
1093
|
+
def tags(
|
|
1094
|
+
self,
|
|
1095
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1096
|
+
"""Tags to tag the given execution with.
|
|
1097
|
+
Tags cannot contain ',' character and cannot be empty strings.
|
|
1098
|
+
Used by Interrupt with interrupt.tag.
|
|
1099
|
+
"""
|
|
1100
|
+
def __init__(
|
|
1101
|
+
self,
|
|
1102
|
+
*,
|
|
1103
|
+
session_id: builtins.str = ...,
|
|
1104
|
+
user_context: global___UserContext | None = ...,
|
|
1105
|
+
operation_id: builtins.str | None = ...,
|
|
1106
|
+
plan: global___Plan | None = ...,
|
|
1107
|
+
client_type: builtins.str | None = ...,
|
|
1108
|
+
request_options: collections.abc.Iterable[global___ExecutePlanRequest.RequestOption]
|
|
1109
|
+
| None = ...,
|
|
1110
|
+
tags: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1111
|
+
) -> None: ...
|
|
1112
|
+
def HasField(
|
|
1113
|
+
self,
|
|
1114
|
+
field_name: typing_extensions.Literal[
|
|
1115
|
+
"_client_type",
|
|
1116
|
+
b"_client_type",
|
|
1117
|
+
"_operation_id",
|
|
1118
|
+
b"_operation_id",
|
|
1119
|
+
"client_type",
|
|
1120
|
+
b"client_type",
|
|
1121
|
+
"operation_id",
|
|
1122
|
+
b"operation_id",
|
|
1123
|
+
"plan",
|
|
1124
|
+
b"plan",
|
|
1125
|
+
"user_context",
|
|
1126
|
+
b"user_context",
|
|
1127
|
+
],
|
|
1128
|
+
) -> builtins.bool: ...
|
|
1129
|
+
def ClearField(
|
|
1130
|
+
self,
|
|
1131
|
+
field_name: typing_extensions.Literal[
|
|
1132
|
+
"_client_type",
|
|
1133
|
+
b"_client_type",
|
|
1134
|
+
"_operation_id",
|
|
1135
|
+
b"_operation_id",
|
|
1136
|
+
"client_type",
|
|
1137
|
+
b"client_type",
|
|
1138
|
+
"operation_id",
|
|
1139
|
+
b"operation_id",
|
|
1140
|
+
"plan",
|
|
1141
|
+
b"plan",
|
|
1142
|
+
"request_options",
|
|
1143
|
+
b"request_options",
|
|
1144
|
+
"session_id",
|
|
1145
|
+
b"session_id",
|
|
1146
|
+
"tags",
|
|
1147
|
+
b"tags",
|
|
1148
|
+
"user_context",
|
|
1149
|
+
b"user_context",
|
|
1150
|
+
],
|
|
1151
|
+
) -> None: ...
|
|
1152
|
+
@typing.overload
|
|
1153
|
+
def WhichOneof(
|
|
1154
|
+
self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"]
|
|
1155
|
+
) -> typing_extensions.Literal["client_type"] | None: ...
|
|
1156
|
+
@typing.overload
|
|
1157
|
+
def WhichOneof(
|
|
1158
|
+
self, oneof_group: typing_extensions.Literal["_operation_id", b"_operation_id"]
|
|
1159
|
+
) -> typing_extensions.Literal["operation_id"] | None: ...
|
|
1160
|
+
|
|
1161
|
+
global___ExecutePlanRequest = ExecutePlanRequest
|
|
1162
|
+
|
|
1163
|
+
class ExecutePlanResponse(google.protobuf.message.Message):
|
|
1164
|
+
"""The response of a query, can be one or more for each request. Responses belonging to the
|
|
1165
|
+
same input query, carry the same `session_id`.
|
|
1166
|
+
"""
|
|
1167
|
+
|
|
1168
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1169
|
+
|
|
1170
|
+
class SqlCommandResult(google.protobuf.message.Message):
|
|
1171
|
+
"""A SQL command returns an opaque Relation that can be directly used as input for the next
|
|
1172
|
+
call.
|
|
1173
|
+
"""
|
|
1174
|
+
|
|
1175
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1176
|
+
|
|
1177
|
+
RELATION_FIELD_NUMBER: builtins.int
|
|
1178
|
+
@property
|
|
1179
|
+
def relation(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: ...
|
|
1180
|
+
def __init__(
|
|
1181
|
+
self,
|
|
1182
|
+
*,
|
|
1183
|
+
relation: pyspark.sql.connect.proto.relations_pb2.Relation | None = ...,
|
|
1184
|
+
) -> None: ...
|
|
1185
|
+
def HasField(
|
|
1186
|
+
self, field_name: typing_extensions.Literal["relation", b"relation"]
|
|
1187
|
+
) -> builtins.bool: ...
|
|
1188
|
+
def ClearField(
|
|
1189
|
+
self, field_name: typing_extensions.Literal["relation", b"relation"]
|
|
1190
|
+
) -> None: ...
|
|
1191
|
+
|
|
1192
|
+
class ArrowBatch(google.protobuf.message.Message):
|
|
1193
|
+
"""Batch results of metrics."""
|
|
1194
|
+
|
|
1195
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1196
|
+
|
|
1197
|
+
ROW_COUNT_FIELD_NUMBER: builtins.int
|
|
1198
|
+
DATA_FIELD_NUMBER: builtins.int
|
|
1199
|
+
row_count: builtins.int
|
|
1200
|
+
data: builtins.bytes
|
|
1201
|
+
def __init__(
|
|
1202
|
+
self,
|
|
1203
|
+
*,
|
|
1204
|
+
row_count: builtins.int = ...,
|
|
1205
|
+
data: builtins.bytes = ...,
|
|
1206
|
+
) -> None: ...
|
|
1207
|
+
def ClearField(
|
|
1208
|
+
self, field_name: typing_extensions.Literal["data", b"data", "row_count", b"row_count"]
|
|
1209
|
+
) -> None: ...
|
|
1210
|
+
|
|
1211
|
+
class Metrics(google.protobuf.message.Message):
|
|
1212
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1213
|
+
|
|
1214
|
+
class MetricObject(google.protobuf.message.Message):
|
|
1215
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1216
|
+
|
|
1217
|
+
class ExecutionMetricsEntry(google.protobuf.message.Message):
|
|
1218
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1219
|
+
|
|
1220
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
1221
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
1222
|
+
key: builtins.str
|
|
1223
|
+
@property
|
|
1224
|
+
def value(self) -> global___ExecutePlanResponse.Metrics.MetricValue: ...
|
|
1225
|
+
def __init__(
|
|
1226
|
+
self,
|
|
1227
|
+
*,
|
|
1228
|
+
key: builtins.str = ...,
|
|
1229
|
+
value: global___ExecutePlanResponse.Metrics.MetricValue | None = ...,
|
|
1230
|
+
) -> None: ...
|
|
1231
|
+
def HasField(
|
|
1232
|
+
self, field_name: typing_extensions.Literal["value", b"value"]
|
|
1233
|
+
) -> builtins.bool: ...
|
|
1234
|
+
def ClearField(
|
|
1235
|
+
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
|
|
1236
|
+
) -> None: ...
|
|
1237
|
+
|
|
1238
|
+
NAME_FIELD_NUMBER: builtins.int
|
|
1239
|
+
PLAN_ID_FIELD_NUMBER: builtins.int
|
|
1240
|
+
PARENT_FIELD_NUMBER: builtins.int
|
|
1241
|
+
EXECUTION_METRICS_FIELD_NUMBER: builtins.int
|
|
1242
|
+
name: builtins.str
|
|
1243
|
+
plan_id: builtins.int
|
|
1244
|
+
parent: builtins.int
|
|
1245
|
+
@property
|
|
1246
|
+
def execution_metrics(
|
|
1247
|
+
self,
|
|
1248
|
+
) -> google.protobuf.internal.containers.MessageMap[
|
|
1249
|
+
builtins.str, global___ExecutePlanResponse.Metrics.MetricValue
|
|
1250
|
+
]: ...
|
|
1251
|
+
def __init__(
|
|
1252
|
+
self,
|
|
1253
|
+
*,
|
|
1254
|
+
name: builtins.str = ...,
|
|
1255
|
+
plan_id: builtins.int = ...,
|
|
1256
|
+
parent: builtins.int = ...,
|
|
1257
|
+
execution_metrics: collections.abc.Mapping[
|
|
1258
|
+
builtins.str, global___ExecutePlanResponse.Metrics.MetricValue
|
|
1259
|
+
]
|
|
1260
|
+
| None = ...,
|
|
1261
|
+
) -> None: ...
|
|
1262
|
+
def ClearField(
|
|
1263
|
+
self,
|
|
1264
|
+
field_name: typing_extensions.Literal[
|
|
1265
|
+
"execution_metrics",
|
|
1266
|
+
b"execution_metrics",
|
|
1267
|
+
"name",
|
|
1268
|
+
b"name",
|
|
1269
|
+
"parent",
|
|
1270
|
+
b"parent",
|
|
1271
|
+
"plan_id",
|
|
1272
|
+
b"plan_id",
|
|
1273
|
+
],
|
|
1274
|
+
) -> None: ...
|
|
1275
|
+
|
|
1276
|
+
class MetricValue(google.protobuf.message.Message):
|
|
1277
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1278
|
+
|
|
1279
|
+
NAME_FIELD_NUMBER: builtins.int
|
|
1280
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
1281
|
+
METRIC_TYPE_FIELD_NUMBER: builtins.int
|
|
1282
|
+
name: builtins.str
|
|
1283
|
+
value: builtins.int
|
|
1284
|
+
metric_type: builtins.str
|
|
1285
|
+
def __init__(
|
|
1286
|
+
self,
|
|
1287
|
+
*,
|
|
1288
|
+
name: builtins.str = ...,
|
|
1289
|
+
value: builtins.int = ...,
|
|
1290
|
+
metric_type: builtins.str = ...,
|
|
1291
|
+
) -> None: ...
|
|
1292
|
+
def ClearField(
|
|
1293
|
+
self,
|
|
1294
|
+
field_name: typing_extensions.Literal[
|
|
1295
|
+
"metric_type", b"metric_type", "name", b"name", "value", b"value"
|
|
1296
|
+
],
|
|
1297
|
+
) -> None: ...
|
|
1298
|
+
|
|
1299
|
+
METRICS_FIELD_NUMBER: builtins.int
|
|
1300
|
+
@property
|
|
1301
|
+
def metrics(
|
|
1302
|
+
self,
|
|
1303
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1304
|
+
global___ExecutePlanResponse.Metrics.MetricObject
|
|
1305
|
+
]: ...
|
|
1306
|
+
def __init__(
|
|
1307
|
+
self,
|
|
1308
|
+
*,
|
|
1309
|
+
metrics: collections.abc.Iterable[global___ExecutePlanResponse.Metrics.MetricObject]
|
|
1310
|
+
| None = ...,
|
|
1311
|
+
) -> None: ...
|
|
1312
|
+
def ClearField(
|
|
1313
|
+
self, field_name: typing_extensions.Literal["metrics", b"metrics"]
|
|
1314
|
+
) -> None: ...
|
|
1315
|
+
|
|
1316
|
+
class ObservedMetrics(google.protobuf.message.Message):
|
|
1317
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1318
|
+
|
|
1319
|
+
NAME_FIELD_NUMBER: builtins.int
|
|
1320
|
+
VALUES_FIELD_NUMBER: builtins.int
|
|
1321
|
+
name: builtins.str
|
|
1322
|
+
@property
|
|
1323
|
+
def values(
|
|
1324
|
+
self,
|
|
1325
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1326
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
1327
|
+
]: ...
|
|
1328
|
+
def __init__(
|
|
1329
|
+
self,
|
|
1330
|
+
*,
|
|
1331
|
+
name: builtins.str = ...,
|
|
1332
|
+
values: collections.abc.Iterable[
|
|
1333
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
1334
|
+
]
|
|
1335
|
+
| None = ...,
|
|
1336
|
+
) -> None: ...
|
|
1337
|
+
def ClearField(
|
|
1338
|
+
self, field_name: typing_extensions.Literal["name", b"name", "values", b"values"]
|
|
1339
|
+
) -> None: ...
|
|
1340
|
+
|
|
1341
|
+
class ResultComplete(google.protobuf.message.Message):
|
|
1342
|
+
"""If present, in a reattachable execution this means that after server sends onComplete,
|
|
1343
|
+
the execution is complete. If the server sends onComplete without sending a ResultComplete,
|
|
1344
|
+
it means that there is more, and the client should use ReattachExecute RPC to continue.
|
|
1345
|
+
"""
|
|
1346
|
+
|
|
1347
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1348
|
+
|
|
1349
|
+
def __init__(
|
|
1350
|
+
self,
|
|
1351
|
+
) -> None: ...
|
|
1352
|
+
|
|
1353
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
1354
|
+
OPERATION_ID_FIELD_NUMBER: builtins.int
|
|
1355
|
+
RESPONSE_ID_FIELD_NUMBER: builtins.int
|
|
1356
|
+
ARROW_BATCH_FIELD_NUMBER: builtins.int
|
|
1357
|
+
SQL_COMMAND_RESULT_FIELD_NUMBER: builtins.int
|
|
1358
|
+
WRITE_STREAM_OPERATION_START_RESULT_FIELD_NUMBER: builtins.int
|
|
1359
|
+
STREAMING_QUERY_COMMAND_RESULT_FIELD_NUMBER: builtins.int
|
|
1360
|
+
GET_RESOURCES_COMMAND_RESULT_FIELD_NUMBER: builtins.int
|
|
1361
|
+
STREAMING_QUERY_MANAGER_COMMAND_RESULT_FIELD_NUMBER: builtins.int
|
|
1362
|
+
RESULT_COMPLETE_FIELD_NUMBER: builtins.int
|
|
1363
|
+
EXTENSION_FIELD_NUMBER: builtins.int
|
|
1364
|
+
METRICS_FIELD_NUMBER: builtins.int
|
|
1365
|
+
OBSERVED_METRICS_FIELD_NUMBER: builtins.int
|
|
1366
|
+
SCHEMA_FIELD_NUMBER: builtins.int
|
|
1367
|
+
session_id: builtins.str
|
|
1368
|
+
operation_id: builtins.str
|
|
1369
|
+
"""Identifies the ExecutePlan execution.
|
|
1370
|
+
If set by the client in ExecutePlanRequest.operationId, that value is returned.
|
|
1371
|
+
Otherwise generated by the server.
|
|
1372
|
+
It is an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
1373
|
+
"""
|
|
1374
|
+
response_id: builtins.str
|
|
1375
|
+
"""Identified the response in the stream.
|
|
1376
|
+
The id is an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
1377
|
+
"""
|
|
1378
|
+
@property
|
|
1379
|
+
def arrow_batch(self) -> global___ExecutePlanResponse.ArrowBatch: ...
|
|
1380
|
+
@property
|
|
1381
|
+
def sql_command_result(self) -> global___ExecutePlanResponse.SqlCommandResult:
|
|
1382
|
+
"""Special case for executing SQL commands."""
|
|
1383
|
+
@property
|
|
1384
|
+
def write_stream_operation_start_result(
|
|
1385
|
+
self,
|
|
1386
|
+
) -> pyspark.sql.connect.proto.commands_pb2.WriteStreamOperationStartResult:
|
|
1387
|
+
"""Response for a streaming query."""
|
|
1388
|
+
@property
|
|
1389
|
+
def streaming_query_command_result(
|
|
1390
|
+
self,
|
|
1391
|
+
) -> pyspark.sql.connect.proto.commands_pb2.StreamingQueryCommandResult:
|
|
1392
|
+
"""Response for commands on a streaming query."""
|
|
1393
|
+
@property
|
|
1394
|
+
def get_resources_command_result(
|
|
1395
|
+
self,
|
|
1396
|
+
) -> pyspark.sql.connect.proto.commands_pb2.GetResourcesCommandResult:
|
|
1397
|
+
"""Response for 'SparkContext.resources'."""
|
|
1398
|
+
@property
|
|
1399
|
+
def streaming_query_manager_command_result(
|
|
1400
|
+
self,
|
|
1401
|
+
) -> pyspark.sql.connect.proto.commands_pb2.StreamingQueryManagerCommandResult:
|
|
1402
|
+
"""Response for commands on the streaming query manager."""
|
|
1403
|
+
@property
|
|
1404
|
+
def result_complete(self) -> global___ExecutePlanResponse.ResultComplete:
|
|
1405
|
+
"""Response type informing if the stream is complete in reattachable execution."""
|
|
1406
|
+
@property
|
|
1407
|
+
def extension(self) -> google.protobuf.any_pb2.Any:
|
|
1408
|
+
"""Support arbitrary result objects."""
|
|
1409
|
+
@property
|
|
1410
|
+
def metrics(self) -> global___ExecutePlanResponse.Metrics:
|
|
1411
|
+
"""Metrics for the query execution. Typically, this field is only present in the last
|
|
1412
|
+
batch of results and then represent the overall state of the query execution.
|
|
1413
|
+
"""
|
|
1414
|
+
@property
|
|
1415
|
+
def observed_metrics(
|
|
1416
|
+
self,
|
|
1417
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1418
|
+
global___ExecutePlanResponse.ObservedMetrics
|
|
1419
|
+
]:
|
|
1420
|
+
"""The metrics observed during the execution of the query plan."""
|
|
1421
|
+
@property
|
|
1422
|
+
def schema(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
|
|
1423
|
+
"""(Optional) The Spark schema. This field is available when `collect` is called."""
|
|
1424
|
+
def __init__(
|
|
1425
|
+
self,
|
|
1426
|
+
*,
|
|
1427
|
+
session_id: builtins.str = ...,
|
|
1428
|
+
operation_id: builtins.str = ...,
|
|
1429
|
+
response_id: builtins.str = ...,
|
|
1430
|
+
arrow_batch: global___ExecutePlanResponse.ArrowBatch | None = ...,
|
|
1431
|
+
sql_command_result: global___ExecutePlanResponse.SqlCommandResult | None = ...,
|
|
1432
|
+
write_stream_operation_start_result: pyspark.sql.connect.proto.commands_pb2.WriteStreamOperationStartResult
|
|
1433
|
+
| None = ...,
|
|
1434
|
+
streaming_query_command_result: pyspark.sql.connect.proto.commands_pb2.StreamingQueryCommandResult
|
|
1435
|
+
| None = ...,
|
|
1436
|
+
get_resources_command_result: pyspark.sql.connect.proto.commands_pb2.GetResourcesCommandResult
|
|
1437
|
+
| None = ...,
|
|
1438
|
+
streaming_query_manager_command_result: pyspark.sql.connect.proto.commands_pb2.StreamingQueryManagerCommandResult
|
|
1439
|
+
| None = ...,
|
|
1440
|
+
result_complete: global___ExecutePlanResponse.ResultComplete | None = ...,
|
|
1441
|
+
extension: google.protobuf.any_pb2.Any | None = ...,
|
|
1442
|
+
metrics: global___ExecutePlanResponse.Metrics | None = ...,
|
|
1443
|
+
observed_metrics: collections.abc.Iterable[global___ExecutePlanResponse.ObservedMetrics]
|
|
1444
|
+
| None = ...,
|
|
1445
|
+
schema: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
|
|
1446
|
+
) -> None: ...
|
|
1447
|
+
def HasField(
|
|
1448
|
+
self,
|
|
1449
|
+
field_name: typing_extensions.Literal[
|
|
1450
|
+
"arrow_batch",
|
|
1451
|
+
b"arrow_batch",
|
|
1452
|
+
"extension",
|
|
1453
|
+
b"extension",
|
|
1454
|
+
"get_resources_command_result",
|
|
1455
|
+
b"get_resources_command_result",
|
|
1456
|
+
"metrics",
|
|
1457
|
+
b"metrics",
|
|
1458
|
+
"response_type",
|
|
1459
|
+
b"response_type",
|
|
1460
|
+
"result_complete",
|
|
1461
|
+
b"result_complete",
|
|
1462
|
+
"schema",
|
|
1463
|
+
b"schema",
|
|
1464
|
+
"sql_command_result",
|
|
1465
|
+
b"sql_command_result",
|
|
1466
|
+
"streaming_query_command_result",
|
|
1467
|
+
b"streaming_query_command_result",
|
|
1468
|
+
"streaming_query_manager_command_result",
|
|
1469
|
+
b"streaming_query_manager_command_result",
|
|
1470
|
+
"write_stream_operation_start_result",
|
|
1471
|
+
b"write_stream_operation_start_result",
|
|
1472
|
+
],
|
|
1473
|
+
) -> builtins.bool: ...
|
|
1474
|
+
def ClearField(
|
|
1475
|
+
self,
|
|
1476
|
+
field_name: typing_extensions.Literal[
|
|
1477
|
+
"arrow_batch",
|
|
1478
|
+
b"arrow_batch",
|
|
1479
|
+
"extension",
|
|
1480
|
+
b"extension",
|
|
1481
|
+
"get_resources_command_result",
|
|
1482
|
+
b"get_resources_command_result",
|
|
1483
|
+
"metrics",
|
|
1484
|
+
b"metrics",
|
|
1485
|
+
"observed_metrics",
|
|
1486
|
+
b"observed_metrics",
|
|
1487
|
+
"operation_id",
|
|
1488
|
+
b"operation_id",
|
|
1489
|
+
"response_id",
|
|
1490
|
+
b"response_id",
|
|
1491
|
+
"response_type",
|
|
1492
|
+
b"response_type",
|
|
1493
|
+
"result_complete",
|
|
1494
|
+
b"result_complete",
|
|
1495
|
+
"schema",
|
|
1496
|
+
b"schema",
|
|
1497
|
+
"session_id",
|
|
1498
|
+
b"session_id",
|
|
1499
|
+
"sql_command_result",
|
|
1500
|
+
b"sql_command_result",
|
|
1501
|
+
"streaming_query_command_result",
|
|
1502
|
+
b"streaming_query_command_result",
|
|
1503
|
+
"streaming_query_manager_command_result",
|
|
1504
|
+
b"streaming_query_manager_command_result",
|
|
1505
|
+
"write_stream_operation_start_result",
|
|
1506
|
+
b"write_stream_operation_start_result",
|
|
1507
|
+
],
|
|
1508
|
+
) -> None: ...
|
|
1509
|
+
def WhichOneof(
|
|
1510
|
+
self, oneof_group: typing_extensions.Literal["response_type", b"response_type"]
|
|
1511
|
+
) -> typing_extensions.Literal[
|
|
1512
|
+
"arrow_batch",
|
|
1513
|
+
"sql_command_result",
|
|
1514
|
+
"write_stream_operation_start_result",
|
|
1515
|
+
"streaming_query_command_result",
|
|
1516
|
+
"get_resources_command_result",
|
|
1517
|
+
"streaming_query_manager_command_result",
|
|
1518
|
+
"result_complete",
|
|
1519
|
+
"extension",
|
|
1520
|
+
] | None: ...
|
|
1521
|
+
|
|
1522
|
+
global___ExecutePlanResponse = ExecutePlanResponse
|
|
1523
|
+
|
|
1524
|
+
class KeyValue(google.protobuf.message.Message):
|
|
1525
|
+
"""The key-value pair for the config request and response."""
|
|
1526
|
+
|
|
1527
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1528
|
+
|
|
1529
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
1530
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
1531
|
+
key: builtins.str
|
|
1532
|
+
"""(Required) The key."""
|
|
1533
|
+
value: builtins.str
|
|
1534
|
+
"""(Optional) The value."""
|
|
1535
|
+
def __init__(
|
|
1536
|
+
self,
|
|
1537
|
+
*,
|
|
1538
|
+
key: builtins.str = ...,
|
|
1539
|
+
value: builtins.str | None = ...,
|
|
1540
|
+
) -> None: ...
|
|
1541
|
+
def HasField(
|
|
1542
|
+
self, field_name: typing_extensions.Literal["_value", b"_value", "value", b"value"]
|
|
1543
|
+
) -> builtins.bool: ...
|
|
1544
|
+
def ClearField(
|
|
1545
|
+
self,
|
|
1546
|
+
field_name: typing_extensions.Literal[
|
|
1547
|
+
"_value", b"_value", "key", b"key", "value", b"value"
|
|
1548
|
+
],
|
|
1549
|
+
) -> None: ...
|
|
1550
|
+
def WhichOneof(
|
|
1551
|
+
self, oneof_group: typing_extensions.Literal["_value", b"_value"]
|
|
1552
|
+
) -> typing_extensions.Literal["value"] | None: ...
|
|
1553
|
+
|
|
1554
|
+
global___KeyValue = KeyValue
|
|
1555
|
+
|
|
1556
|
+
class ConfigRequest(google.protobuf.message.Message):
|
|
1557
|
+
"""Request to update or fetch the configurations."""
|
|
1558
|
+
|
|
1559
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1560
|
+
|
|
1561
|
+
class Operation(google.protobuf.message.Message):
|
|
1562
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1563
|
+
|
|
1564
|
+
SET_FIELD_NUMBER: builtins.int
|
|
1565
|
+
GET_FIELD_NUMBER: builtins.int
|
|
1566
|
+
GET_WITH_DEFAULT_FIELD_NUMBER: builtins.int
|
|
1567
|
+
GET_OPTION_FIELD_NUMBER: builtins.int
|
|
1568
|
+
GET_ALL_FIELD_NUMBER: builtins.int
|
|
1569
|
+
UNSET_FIELD_NUMBER: builtins.int
|
|
1570
|
+
IS_MODIFIABLE_FIELD_NUMBER: builtins.int
|
|
1571
|
+
@property
|
|
1572
|
+
def set(self) -> global___ConfigRequest.Set: ...
|
|
1573
|
+
@property
|
|
1574
|
+
def get(self) -> global___ConfigRequest.Get: ...
|
|
1575
|
+
@property
|
|
1576
|
+
def get_with_default(self) -> global___ConfigRequest.GetWithDefault: ...
|
|
1577
|
+
@property
|
|
1578
|
+
def get_option(self) -> global___ConfigRequest.GetOption: ...
|
|
1579
|
+
@property
|
|
1580
|
+
def get_all(self) -> global___ConfigRequest.GetAll: ...
|
|
1581
|
+
@property
|
|
1582
|
+
def unset(self) -> global___ConfigRequest.Unset: ...
|
|
1583
|
+
@property
|
|
1584
|
+
def is_modifiable(self) -> global___ConfigRequest.IsModifiable: ...
|
|
1585
|
+
def __init__(
|
|
1586
|
+
self,
|
|
1587
|
+
*,
|
|
1588
|
+
set: global___ConfigRequest.Set | None = ...,
|
|
1589
|
+
get: global___ConfigRequest.Get | None = ...,
|
|
1590
|
+
get_with_default: global___ConfigRequest.GetWithDefault | None = ...,
|
|
1591
|
+
get_option: global___ConfigRequest.GetOption | None = ...,
|
|
1592
|
+
get_all: global___ConfigRequest.GetAll | None = ...,
|
|
1593
|
+
unset: global___ConfigRequest.Unset | None = ...,
|
|
1594
|
+
is_modifiable: global___ConfigRequest.IsModifiable | None = ...,
|
|
1595
|
+
) -> None: ...
|
|
1596
|
+
def HasField(
|
|
1597
|
+
self,
|
|
1598
|
+
field_name: typing_extensions.Literal[
|
|
1599
|
+
"get",
|
|
1600
|
+
b"get",
|
|
1601
|
+
"get_all",
|
|
1602
|
+
b"get_all",
|
|
1603
|
+
"get_option",
|
|
1604
|
+
b"get_option",
|
|
1605
|
+
"get_with_default",
|
|
1606
|
+
b"get_with_default",
|
|
1607
|
+
"is_modifiable",
|
|
1608
|
+
b"is_modifiable",
|
|
1609
|
+
"op_type",
|
|
1610
|
+
b"op_type",
|
|
1611
|
+
"set",
|
|
1612
|
+
b"set",
|
|
1613
|
+
"unset",
|
|
1614
|
+
b"unset",
|
|
1615
|
+
],
|
|
1616
|
+
) -> builtins.bool: ...
|
|
1617
|
+
def ClearField(
|
|
1618
|
+
self,
|
|
1619
|
+
field_name: typing_extensions.Literal[
|
|
1620
|
+
"get",
|
|
1621
|
+
b"get",
|
|
1622
|
+
"get_all",
|
|
1623
|
+
b"get_all",
|
|
1624
|
+
"get_option",
|
|
1625
|
+
b"get_option",
|
|
1626
|
+
"get_with_default",
|
|
1627
|
+
b"get_with_default",
|
|
1628
|
+
"is_modifiable",
|
|
1629
|
+
b"is_modifiable",
|
|
1630
|
+
"op_type",
|
|
1631
|
+
b"op_type",
|
|
1632
|
+
"set",
|
|
1633
|
+
b"set",
|
|
1634
|
+
"unset",
|
|
1635
|
+
b"unset",
|
|
1636
|
+
],
|
|
1637
|
+
) -> None: ...
|
|
1638
|
+
def WhichOneof(
|
|
1639
|
+
self, oneof_group: typing_extensions.Literal["op_type", b"op_type"]
|
|
1640
|
+
) -> typing_extensions.Literal[
|
|
1641
|
+
"set", "get", "get_with_default", "get_option", "get_all", "unset", "is_modifiable"
|
|
1642
|
+
] | None: ...
|
|
1643
|
+
|
|
1644
|
+
class Set(google.protobuf.message.Message):
|
|
1645
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1646
|
+
|
|
1647
|
+
PAIRS_FIELD_NUMBER: builtins.int
|
|
1648
|
+
@property
|
|
1649
|
+
def pairs(
|
|
1650
|
+
self,
|
|
1651
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]:
|
|
1652
|
+
"""(Required) The config key-value pairs to set."""
|
|
1653
|
+
def __init__(
|
|
1654
|
+
self,
|
|
1655
|
+
*,
|
|
1656
|
+
pairs: collections.abc.Iterable[global___KeyValue] | None = ...,
|
|
1657
|
+
) -> None: ...
|
|
1658
|
+
def ClearField(self, field_name: typing_extensions.Literal["pairs", b"pairs"]) -> None: ...
|
|
1659
|
+
|
|
1660
|
+
class Get(google.protobuf.message.Message):
|
|
1661
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1662
|
+
|
|
1663
|
+
KEYS_FIELD_NUMBER: builtins.int
|
|
1664
|
+
@property
|
|
1665
|
+
def keys(
|
|
1666
|
+
self,
|
|
1667
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1668
|
+
"""(Required) The config keys to get."""
|
|
1669
|
+
def __init__(
|
|
1670
|
+
self,
|
|
1671
|
+
*,
|
|
1672
|
+
keys: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1673
|
+
) -> None: ...
|
|
1674
|
+
def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ...
|
|
1675
|
+
|
|
1676
|
+
class GetWithDefault(google.protobuf.message.Message):
|
|
1677
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1678
|
+
|
|
1679
|
+
PAIRS_FIELD_NUMBER: builtins.int
|
|
1680
|
+
@property
|
|
1681
|
+
def pairs(
|
|
1682
|
+
self,
|
|
1683
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]:
|
|
1684
|
+
"""(Required) The config key-value paris to get. The value will be used as the default value."""
|
|
1685
|
+
def __init__(
|
|
1686
|
+
self,
|
|
1687
|
+
*,
|
|
1688
|
+
pairs: collections.abc.Iterable[global___KeyValue] | None = ...,
|
|
1689
|
+
) -> None: ...
|
|
1690
|
+
def ClearField(self, field_name: typing_extensions.Literal["pairs", b"pairs"]) -> None: ...
|
|
1691
|
+
|
|
1692
|
+
class GetOption(google.protobuf.message.Message):
|
|
1693
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1694
|
+
|
|
1695
|
+
KEYS_FIELD_NUMBER: builtins.int
|
|
1696
|
+
@property
|
|
1697
|
+
def keys(
|
|
1698
|
+
self,
|
|
1699
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1700
|
+
"""(Required) The config keys to get optionally."""
|
|
1701
|
+
def __init__(
|
|
1702
|
+
self,
|
|
1703
|
+
*,
|
|
1704
|
+
keys: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1705
|
+
) -> None: ...
|
|
1706
|
+
def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ...
|
|
1707
|
+
|
|
1708
|
+
class GetAll(google.protobuf.message.Message):
|
|
1709
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1710
|
+
|
|
1711
|
+
PREFIX_FIELD_NUMBER: builtins.int
|
|
1712
|
+
prefix: builtins.str
|
|
1713
|
+
"""(Optional) The prefix of the config key to get."""
|
|
1714
|
+
def __init__(
|
|
1715
|
+
self,
|
|
1716
|
+
*,
|
|
1717
|
+
prefix: builtins.str | None = ...,
|
|
1718
|
+
) -> None: ...
|
|
1719
|
+
def HasField(
|
|
1720
|
+
self, field_name: typing_extensions.Literal["_prefix", b"_prefix", "prefix", b"prefix"]
|
|
1721
|
+
) -> builtins.bool: ...
|
|
1722
|
+
def ClearField(
|
|
1723
|
+
self, field_name: typing_extensions.Literal["_prefix", b"_prefix", "prefix", b"prefix"]
|
|
1724
|
+
) -> None: ...
|
|
1725
|
+
def WhichOneof(
|
|
1726
|
+
self, oneof_group: typing_extensions.Literal["_prefix", b"_prefix"]
|
|
1727
|
+
) -> typing_extensions.Literal["prefix"] | None: ...
|
|
1728
|
+
|
|
1729
|
+
class Unset(google.protobuf.message.Message):
|
|
1730
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1731
|
+
|
|
1732
|
+
KEYS_FIELD_NUMBER: builtins.int
|
|
1733
|
+
@property
|
|
1734
|
+
def keys(
|
|
1735
|
+
self,
|
|
1736
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1737
|
+
"""(Required) The config keys to unset."""
|
|
1738
|
+
def __init__(
|
|
1739
|
+
self,
|
|
1740
|
+
*,
|
|
1741
|
+
keys: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1742
|
+
) -> None: ...
|
|
1743
|
+
def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ...
|
|
1744
|
+
|
|
1745
|
+
class IsModifiable(google.protobuf.message.Message):
|
|
1746
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1747
|
+
|
|
1748
|
+
KEYS_FIELD_NUMBER: builtins.int
|
|
1749
|
+
@property
|
|
1750
|
+
def keys(
|
|
1751
|
+
self,
|
|
1752
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1753
|
+
"""(Required) The config keys to check the config is modifiable."""
|
|
1754
|
+
def __init__(
|
|
1755
|
+
self,
|
|
1756
|
+
*,
|
|
1757
|
+
keys: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1758
|
+
) -> None: ...
|
|
1759
|
+
def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys"]) -> None: ...
|
|
1760
|
+
|
|
1761
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
1762
|
+
USER_CONTEXT_FIELD_NUMBER: builtins.int
|
|
1763
|
+
OPERATION_FIELD_NUMBER: builtins.int
|
|
1764
|
+
CLIENT_TYPE_FIELD_NUMBER: builtins.int
|
|
1765
|
+
session_id: builtins.str
|
|
1766
|
+
"""(Required)
|
|
1767
|
+
|
|
1768
|
+
The session_id specifies a spark session for a user id (which is specified
|
|
1769
|
+
by user_context.user_id). The session_id is set by the client to be able to
|
|
1770
|
+
collate streaming responses from different queries within the dedicated session.
|
|
1771
|
+
The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
1772
|
+
"""
|
|
1773
|
+
@property
|
|
1774
|
+
def user_context(self) -> global___UserContext:
|
|
1775
|
+
"""(Required) User context"""
|
|
1776
|
+
@property
|
|
1777
|
+
def operation(self) -> global___ConfigRequest.Operation:
|
|
1778
|
+
"""(Required) The operation for the config."""
|
|
1779
|
+
client_type: builtins.str
|
|
1780
|
+
"""Provides optional information about the client sending the request. This field
|
|
1781
|
+
can be used for language or version specific information and is only intended for
|
|
1782
|
+
logging purposes and will not be interpreted by the server.
|
|
1783
|
+
"""
|
|
1784
|
+
def __init__(
|
|
1785
|
+
self,
|
|
1786
|
+
*,
|
|
1787
|
+
session_id: builtins.str = ...,
|
|
1788
|
+
user_context: global___UserContext | None = ...,
|
|
1789
|
+
operation: global___ConfigRequest.Operation | None = ...,
|
|
1790
|
+
client_type: builtins.str | None = ...,
|
|
1791
|
+
) -> None: ...
|
|
1792
|
+
def HasField(
|
|
1793
|
+
self,
|
|
1794
|
+
field_name: typing_extensions.Literal[
|
|
1795
|
+
"_client_type",
|
|
1796
|
+
b"_client_type",
|
|
1797
|
+
"client_type",
|
|
1798
|
+
b"client_type",
|
|
1799
|
+
"operation",
|
|
1800
|
+
b"operation",
|
|
1801
|
+
"user_context",
|
|
1802
|
+
b"user_context",
|
|
1803
|
+
],
|
|
1804
|
+
) -> builtins.bool: ...
|
|
1805
|
+
def ClearField(
|
|
1806
|
+
self,
|
|
1807
|
+
field_name: typing_extensions.Literal[
|
|
1808
|
+
"_client_type",
|
|
1809
|
+
b"_client_type",
|
|
1810
|
+
"client_type",
|
|
1811
|
+
b"client_type",
|
|
1812
|
+
"operation",
|
|
1813
|
+
b"operation",
|
|
1814
|
+
"session_id",
|
|
1815
|
+
b"session_id",
|
|
1816
|
+
"user_context",
|
|
1817
|
+
b"user_context",
|
|
1818
|
+
],
|
|
1819
|
+
) -> None: ...
|
|
1820
|
+
def WhichOneof(
|
|
1821
|
+
self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"]
|
|
1822
|
+
) -> typing_extensions.Literal["client_type"] | None: ...
|
|
1823
|
+
|
|
1824
|
+
global___ConfigRequest = ConfigRequest
|
|
1825
|
+
|
|
1826
|
+
class ConfigResponse(google.protobuf.message.Message):
|
|
1827
|
+
"""Response to the config request."""
|
|
1828
|
+
|
|
1829
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1830
|
+
|
|
1831
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
1832
|
+
PAIRS_FIELD_NUMBER: builtins.int
|
|
1833
|
+
WARNINGS_FIELD_NUMBER: builtins.int
|
|
1834
|
+
session_id: builtins.str
|
|
1835
|
+
@property
|
|
1836
|
+
def pairs(
|
|
1837
|
+
self,
|
|
1838
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]:
|
|
1839
|
+
"""(Optional) The result key-value pairs.
|
|
1840
|
+
|
|
1841
|
+
Available when the operation is 'Get', 'GetWithDefault', 'GetOption', 'GetAll'.
|
|
1842
|
+
Also available for the operation 'IsModifiable' with boolean string "true" and "false".
|
|
1843
|
+
"""
|
|
1844
|
+
@property
|
|
1845
|
+
def warnings(
|
|
1846
|
+
self,
|
|
1847
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1848
|
+
"""(Optional)
|
|
1849
|
+
|
|
1850
|
+
Warning messages for deprecated or unsupported configurations.
|
|
1851
|
+
"""
|
|
1852
|
+
def __init__(
|
|
1853
|
+
self,
|
|
1854
|
+
*,
|
|
1855
|
+
session_id: builtins.str = ...,
|
|
1856
|
+
pairs: collections.abc.Iterable[global___KeyValue] | None = ...,
|
|
1857
|
+
warnings: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1858
|
+
) -> None: ...
|
|
1859
|
+
def ClearField(
|
|
1860
|
+
self,
|
|
1861
|
+
field_name: typing_extensions.Literal[
|
|
1862
|
+
"pairs", b"pairs", "session_id", b"session_id", "warnings", b"warnings"
|
|
1863
|
+
],
|
|
1864
|
+
) -> None: ...
|
|
1865
|
+
|
|
1866
|
+
global___ConfigResponse = ConfigResponse
|
|
1867
|
+
|
|
1868
|
+
class AddArtifactsRequest(google.protobuf.message.Message):
|
|
1869
|
+
"""Request to transfer client-local artifacts."""
|
|
1870
|
+
|
|
1871
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1872
|
+
|
|
1873
|
+
class ArtifactChunk(google.protobuf.message.Message):
|
|
1874
|
+
"""A chunk of an Artifact."""
|
|
1875
|
+
|
|
1876
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1877
|
+
|
|
1878
|
+
DATA_FIELD_NUMBER: builtins.int
|
|
1879
|
+
CRC_FIELD_NUMBER: builtins.int
|
|
1880
|
+
data: builtins.bytes
|
|
1881
|
+
"""Data chunk."""
|
|
1882
|
+
crc: builtins.int
|
|
1883
|
+
"""CRC to allow server to verify integrity of the chunk."""
|
|
1884
|
+
def __init__(
|
|
1885
|
+
self,
|
|
1886
|
+
*,
|
|
1887
|
+
data: builtins.bytes = ...,
|
|
1888
|
+
crc: builtins.int = ...,
|
|
1889
|
+
) -> None: ...
|
|
1890
|
+
def ClearField(
|
|
1891
|
+
self, field_name: typing_extensions.Literal["crc", b"crc", "data", b"data"]
|
|
1892
|
+
) -> None: ...
|
|
1893
|
+
|
|
1894
|
+
class SingleChunkArtifact(google.protobuf.message.Message):
|
|
1895
|
+
"""An artifact that is contained in a single `ArtifactChunk`.
|
|
1896
|
+
Generally, this message represents tiny artifacts such as REPL-generated class files.
|
|
1897
|
+
"""
|
|
1898
|
+
|
|
1899
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1900
|
+
|
|
1901
|
+
NAME_FIELD_NUMBER: builtins.int
|
|
1902
|
+
DATA_FIELD_NUMBER: builtins.int
|
|
1903
|
+
name: builtins.str
|
|
1904
|
+
"""The name of the artifact is expected in the form of a "Relative Path" that is made up of a
|
|
1905
|
+
sequence of directories and the final file element.
|
|
1906
|
+
Examples of "Relative Path"s: "jars/test.jar", "classes/xyz.class", "abc.xyz", "a/b/X.jar".
|
|
1907
|
+
The server is expected to maintain the hierarchy of files as defined by their name. (i.e
|
|
1908
|
+
The relative path of the file on the server's filesystem will be the same as the name of
|
|
1909
|
+
the provided artifact)
|
|
1910
|
+
"""
|
|
1911
|
+
@property
|
|
1912
|
+
def data(self) -> global___AddArtifactsRequest.ArtifactChunk:
|
|
1913
|
+
"""A single data chunk."""
|
|
1914
|
+
def __init__(
|
|
1915
|
+
self,
|
|
1916
|
+
*,
|
|
1917
|
+
name: builtins.str = ...,
|
|
1918
|
+
data: global___AddArtifactsRequest.ArtifactChunk | None = ...,
|
|
1919
|
+
) -> None: ...
|
|
1920
|
+
def HasField(
|
|
1921
|
+
self, field_name: typing_extensions.Literal["data", b"data"]
|
|
1922
|
+
) -> builtins.bool: ...
|
|
1923
|
+
def ClearField(
|
|
1924
|
+
self, field_name: typing_extensions.Literal["data", b"data", "name", b"name"]
|
|
1925
|
+
) -> None: ...
|
|
1926
|
+
|
|
1927
|
+
class Batch(google.protobuf.message.Message):
|
|
1928
|
+
"""A number of `SingleChunkArtifact` batched into a single RPC."""
|
|
1929
|
+
|
|
1930
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1931
|
+
|
|
1932
|
+
ARTIFACTS_FIELD_NUMBER: builtins.int
|
|
1933
|
+
@property
|
|
1934
|
+
def artifacts(
|
|
1935
|
+
self,
|
|
1936
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1937
|
+
global___AddArtifactsRequest.SingleChunkArtifact
|
|
1938
|
+
]: ...
|
|
1939
|
+
def __init__(
|
|
1940
|
+
self,
|
|
1941
|
+
*,
|
|
1942
|
+
artifacts: collections.abc.Iterable[global___AddArtifactsRequest.SingleChunkArtifact]
|
|
1943
|
+
| None = ...,
|
|
1944
|
+
) -> None: ...
|
|
1945
|
+
def ClearField(
|
|
1946
|
+
self, field_name: typing_extensions.Literal["artifacts", b"artifacts"]
|
|
1947
|
+
) -> None: ...
|
|
1948
|
+
|
|
1949
|
+
class BeginChunkedArtifact(google.protobuf.message.Message):
|
|
1950
|
+
"""Signals the beginning/start of a chunked artifact.
|
|
1951
|
+
A large artifact is transferred through a payload of `BeginChunkedArtifact` followed by a
|
|
1952
|
+
sequence of `ArtifactChunk`s.
|
|
1953
|
+
"""
|
|
1954
|
+
|
|
1955
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1956
|
+
|
|
1957
|
+
NAME_FIELD_NUMBER: builtins.int
|
|
1958
|
+
TOTAL_BYTES_FIELD_NUMBER: builtins.int
|
|
1959
|
+
NUM_CHUNKS_FIELD_NUMBER: builtins.int
|
|
1960
|
+
INITIAL_CHUNK_FIELD_NUMBER: builtins.int
|
|
1961
|
+
name: builtins.str
|
|
1962
|
+
"""Name of the artifact undergoing chunking. Follows the same conventions as the `name` in
|
|
1963
|
+
the `Artifact` message.
|
|
1964
|
+
"""
|
|
1965
|
+
total_bytes: builtins.int
|
|
1966
|
+
"""Total size of the artifact in bytes."""
|
|
1967
|
+
num_chunks: builtins.int
|
|
1968
|
+
"""Number of chunks the artifact is split into.
|
|
1969
|
+
This includes the `initial_chunk`.
|
|
1970
|
+
"""
|
|
1971
|
+
@property
|
|
1972
|
+
def initial_chunk(self) -> global___AddArtifactsRequest.ArtifactChunk:
|
|
1973
|
+
"""The first/initial chunk."""
|
|
1974
|
+
def __init__(
|
|
1975
|
+
self,
|
|
1976
|
+
*,
|
|
1977
|
+
name: builtins.str = ...,
|
|
1978
|
+
total_bytes: builtins.int = ...,
|
|
1979
|
+
num_chunks: builtins.int = ...,
|
|
1980
|
+
initial_chunk: global___AddArtifactsRequest.ArtifactChunk | None = ...,
|
|
1981
|
+
) -> None: ...
|
|
1982
|
+
def HasField(
|
|
1983
|
+
self, field_name: typing_extensions.Literal["initial_chunk", b"initial_chunk"]
|
|
1984
|
+
) -> builtins.bool: ...
|
|
1985
|
+
def ClearField(
|
|
1986
|
+
self,
|
|
1987
|
+
field_name: typing_extensions.Literal[
|
|
1988
|
+
"initial_chunk",
|
|
1989
|
+
b"initial_chunk",
|
|
1990
|
+
"name",
|
|
1991
|
+
b"name",
|
|
1992
|
+
"num_chunks",
|
|
1993
|
+
b"num_chunks",
|
|
1994
|
+
"total_bytes",
|
|
1995
|
+
b"total_bytes",
|
|
1996
|
+
],
|
|
1997
|
+
) -> None: ...
|
|
1998
|
+
|
|
1999
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
2000
|
+
USER_CONTEXT_FIELD_NUMBER: builtins.int
|
|
2001
|
+
CLIENT_TYPE_FIELD_NUMBER: builtins.int
|
|
2002
|
+
BATCH_FIELD_NUMBER: builtins.int
|
|
2003
|
+
BEGIN_CHUNK_FIELD_NUMBER: builtins.int
|
|
2004
|
+
CHUNK_FIELD_NUMBER: builtins.int
|
|
2005
|
+
session_id: builtins.str
|
|
2006
|
+
"""(Required)
|
|
2007
|
+
|
|
2008
|
+
The session_id specifies a spark session for a user id (which is specified
|
|
2009
|
+
by user_context.user_id). The session_id is set by the client to be able to
|
|
2010
|
+
collate streaming responses from different queries within the dedicated session.
|
|
2011
|
+
The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
2012
|
+
"""
|
|
2013
|
+
@property
|
|
2014
|
+
def user_context(self) -> global___UserContext:
|
|
2015
|
+
"""User context"""
|
|
2016
|
+
client_type: builtins.str
|
|
2017
|
+
"""Provides optional information about the client sending the request. This field
|
|
2018
|
+
can be used for language or version specific information and is only intended for
|
|
2019
|
+
logging purposes and will not be interpreted by the server.
|
|
2020
|
+
"""
|
|
2021
|
+
@property
|
|
2022
|
+
def batch(self) -> global___AddArtifactsRequest.Batch: ...
|
|
2023
|
+
@property
|
|
2024
|
+
def begin_chunk(self) -> global___AddArtifactsRequest.BeginChunkedArtifact:
|
|
2025
|
+
"""The metadata and the initial chunk of a large artifact chunked into multiple requests.
|
|
2026
|
+
The server side is notified about the total size of the large artifact as well as the
|
|
2027
|
+
number of chunks to expect.
|
|
2028
|
+
"""
|
|
2029
|
+
@property
|
|
2030
|
+
def chunk(self) -> global___AddArtifactsRequest.ArtifactChunk:
|
|
2031
|
+
"""A chunk of an artifact excluding metadata. This can be any chunk of a large artifact
|
|
2032
|
+
excluding the first chunk (which is included in `BeginChunkedArtifact`).
|
|
2033
|
+
"""
|
|
2034
|
+
def __init__(
|
|
2035
|
+
self,
|
|
2036
|
+
*,
|
|
2037
|
+
session_id: builtins.str = ...,
|
|
2038
|
+
user_context: global___UserContext | None = ...,
|
|
2039
|
+
client_type: builtins.str | None = ...,
|
|
2040
|
+
batch: global___AddArtifactsRequest.Batch | None = ...,
|
|
2041
|
+
begin_chunk: global___AddArtifactsRequest.BeginChunkedArtifact | None = ...,
|
|
2042
|
+
chunk: global___AddArtifactsRequest.ArtifactChunk | None = ...,
|
|
2043
|
+
) -> None: ...
|
|
2044
|
+
def HasField(
|
|
2045
|
+
self,
|
|
2046
|
+
field_name: typing_extensions.Literal[
|
|
2047
|
+
"_client_type",
|
|
2048
|
+
b"_client_type",
|
|
2049
|
+
"batch",
|
|
2050
|
+
b"batch",
|
|
2051
|
+
"begin_chunk",
|
|
2052
|
+
b"begin_chunk",
|
|
2053
|
+
"chunk",
|
|
2054
|
+
b"chunk",
|
|
2055
|
+
"client_type",
|
|
2056
|
+
b"client_type",
|
|
2057
|
+
"payload",
|
|
2058
|
+
b"payload",
|
|
2059
|
+
"user_context",
|
|
2060
|
+
b"user_context",
|
|
2061
|
+
],
|
|
2062
|
+
) -> builtins.bool: ...
|
|
2063
|
+
def ClearField(
|
|
2064
|
+
self,
|
|
2065
|
+
field_name: typing_extensions.Literal[
|
|
2066
|
+
"_client_type",
|
|
2067
|
+
b"_client_type",
|
|
2068
|
+
"batch",
|
|
2069
|
+
b"batch",
|
|
2070
|
+
"begin_chunk",
|
|
2071
|
+
b"begin_chunk",
|
|
2072
|
+
"chunk",
|
|
2073
|
+
b"chunk",
|
|
2074
|
+
"client_type",
|
|
2075
|
+
b"client_type",
|
|
2076
|
+
"payload",
|
|
2077
|
+
b"payload",
|
|
2078
|
+
"session_id",
|
|
2079
|
+
b"session_id",
|
|
2080
|
+
"user_context",
|
|
2081
|
+
b"user_context",
|
|
2082
|
+
],
|
|
2083
|
+
) -> None: ...
|
|
2084
|
+
@typing.overload
|
|
2085
|
+
def WhichOneof(
|
|
2086
|
+
self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"]
|
|
2087
|
+
) -> typing_extensions.Literal["client_type"] | None: ...
|
|
2088
|
+
@typing.overload
|
|
2089
|
+
def WhichOneof(
|
|
2090
|
+
self, oneof_group: typing_extensions.Literal["payload", b"payload"]
|
|
2091
|
+
) -> typing_extensions.Literal["batch", "begin_chunk", "chunk"] | None: ...
|
|
2092
|
+
|
|
2093
|
+
global___AddArtifactsRequest = AddArtifactsRequest
|
|
2094
|
+
|
|
2095
|
+
class AddArtifactsResponse(google.protobuf.message.Message):
|
|
2096
|
+
"""Response to adding an artifact. Contains relevant metadata to verify successful transfer of
|
|
2097
|
+
artifact(s).
|
|
2098
|
+
"""
|
|
2099
|
+
|
|
2100
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2101
|
+
|
|
2102
|
+
class ArtifactSummary(google.protobuf.message.Message):
|
|
2103
|
+
"""Metadata of an artifact."""
|
|
2104
|
+
|
|
2105
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2106
|
+
|
|
2107
|
+
NAME_FIELD_NUMBER: builtins.int
|
|
2108
|
+
IS_CRC_SUCCESSFUL_FIELD_NUMBER: builtins.int
|
|
2109
|
+
name: builtins.str
|
|
2110
|
+
is_crc_successful: builtins.bool
|
|
2111
|
+
"""Whether the CRC (Cyclic Redundancy Check) is successful on server verification.
|
|
2112
|
+
The server discards any artifact that fails the CRC.
|
|
2113
|
+
If false, the client may choose to resend the artifact specified by `name`.
|
|
2114
|
+
"""
|
|
2115
|
+
def __init__(
|
|
2116
|
+
self,
|
|
2117
|
+
*,
|
|
2118
|
+
name: builtins.str = ...,
|
|
2119
|
+
is_crc_successful: builtins.bool = ...,
|
|
2120
|
+
) -> None: ...
|
|
2121
|
+
def ClearField(
|
|
2122
|
+
self,
|
|
2123
|
+
field_name: typing_extensions.Literal[
|
|
2124
|
+
"is_crc_successful", b"is_crc_successful", "name", b"name"
|
|
2125
|
+
],
|
|
2126
|
+
) -> None: ...
|
|
2127
|
+
|
|
2128
|
+
ARTIFACTS_FIELD_NUMBER: builtins.int
|
|
2129
|
+
@property
|
|
2130
|
+
def artifacts(
|
|
2131
|
+
self,
|
|
2132
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2133
|
+
global___AddArtifactsResponse.ArtifactSummary
|
|
2134
|
+
]:
|
|
2135
|
+
"""The list of artifact(s) seen by the server."""
|
|
2136
|
+
def __init__(
|
|
2137
|
+
self,
|
|
2138
|
+
*,
|
|
2139
|
+
artifacts: collections.abc.Iterable[global___AddArtifactsResponse.ArtifactSummary]
|
|
2140
|
+
| None = ...,
|
|
2141
|
+
) -> None: ...
|
|
2142
|
+
def ClearField(
|
|
2143
|
+
self, field_name: typing_extensions.Literal["artifacts", b"artifacts"]
|
|
2144
|
+
) -> None: ...
|
|
2145
|
+
|
|
2146
|
+
global___AddArtifactsResponse = AddArtifactsResponse
|
|
2147
|
+
|
|
2148
|
+
class ArtifactStatusesRequest(google.protobuf.message.Message):
|
|
2149
|
+
"""Request to get current statuses of artifacts at the server side."""
|
|
2150
|
+
|
|
2151
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2152
|
+
|
|
2153
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
2154
|
+
USER_CONTEXT_FIELD_NUMBER: builtins.int
|
|
2155
|
+
CLIENT_TYPE_FIELD_NUMBER: builtins.int
|
|
2156
|
+
NAMES_FIELD_NUMBER: builtins.int
|
|
2157
|
+
session_id: builtins.str
|
|
2158
|
+
"""(Required)
|
|
2159
|
+
|
|
2160
|
+
The session_id specifies a spark session for a user id (which is specified
|
|
2161
|
+
by user_context.user_id). The session_id is set by the client to be able to
|
|
2162
|
+
collate streaming responses from different queries within the dedicated session.
|
|
2163
|
+
The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
2164
|
+
"""
|
|
2165
|
+
@property
|
|
2166
|
+
def user_context(self) -> global___UserContext:
|
|
2167
|
+
"""User context"""
|
|
2168
|
+
client_type: builtins.str
|
|
2169
|
+
"""Provides optional information about the client sending the request. This field
|
|
2170
|
+
can be used for language or version specific information and is only intended for
|
|
2171
|
+
logging purposes and will not be interpreted by the server.
|
|
2172
|
+
"""
|
|
2173
|
+
@property
|
|
2174
|
+
def names(
|
|
2175
|
+
self,
|
|
2176
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2177
|
+
"""The name of the artifact is expected in the form of a "Relative Path" that is made up of a
|
|
2178
|
+
sequence of directories and the final file element.
|
|
2179
|
+
Examples of "Relative Path"s: "jars/test.jar", "classes/xyz.class", "abc.xyz", "a/b/X.jar".
|
|
2180
|
+
The server is expected to maintain the hierarchy of files as defined by their name. (i.e
|
|
2181
|
+
The relative path of the file on the server's filesystem will be the same as the name of
|
|
2182
|
+
the provided artifact)
|
|
2183
|
+
"""
|
|
2184
|
+
def __init__(
|
|
2185
|
+
self,
|
|
2186
|
+
*,
|
|
2187
|
+
session_id: builtins.str = ...,
|
|
2188
|
+
user_context: global___UserContext | None = ...,
|
|
2189
|
+
client_type: builtins.str | None = ...,
|
|
2190
|
+
names: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2191
|
+
) -> None: ...
|
|
2192
|
+
def HasField(
|
|
2193
|
+
self,
|
|
2194
|
+
field_name: typing_extensions.Literal[
|
|
2195
|
+
"_client_type",
|
|
2196
|
+
b"_client_type",
|
|
2197
|
+
"client_type",
|
|
2198
|
+
b"client_type",
|
|
2199
|
+
"user_context",
|
|
2200
|
+
b"user_context",
|
|
2201
|
+
],
|
|
2202
|
+
) -> builtins.bool: ...
|
|
2203
|
+
def ClearField(
|
|
2204
|
+
self,
|
|
2205
|
+
field_name: typing_extensions.Literal[
|
|
2206
|
+
"_client_type",
|
|
2207
|
+
b"_client_type",
|
|
2208
|
+
"client_type",
|
|
2209
|
+
b"client_type",
|
|
2210
|
+
"names",
|
|
2211
|
+
b"names",
|
|
2212
|
+
"session_id",
|
|
2213
|
+
b"session_id",
|
|
2214
|
+
"user_context",
|
|
2215
|
+
b"user_context",
|
|
2216
|
+
],
|
|
2217
|
+
) -> None: ...
|
|
2218
|
+
def WhichOneof(
|
|
2219
|
+
self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"]
|
|
2220
|
+
) -> typing_extensions.Literal["client_type"] | None: ...
|
|
2221
|
+
|
|
2222
|
+
global___ArtifactStatusesRequest = ArtifactStatusesRequest
|
|
2223
|
+
|
|
2224
|
+
class ArtifactStatusesResponse(google.protobuf.message.Message):
|
|
2225
|
+
"""Response to checking artifact statuses."""
|
|
2226
|
+
|
|
2227
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2228
|
+
|
|
2229
|
+
class ArtifactStatus(google.protobuf.message.Message):
|
|
2230
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2231
|
+
|
|
2232
|
+
EXISTS_FIELD_NUMBER: builtins.int
|
|
2233
|
+
exists: builtins.bool
|
|
2234
|
+
"""Exists or not particular artifact at the server."""
|
|
2235
|
+
def __init__(
|
|
2236
|
+
self,
|
|
2237
|
+
*,
|
|
2238
|
+
exists: builtins.bool = ...,
|
|
2239
|
+
) -> None: ...
|
|
2240
|
+
def ClearField(
|
|
2241
|
+
self, field_name: typing_extensions.Literal["exists", b"exists"]
|
|
2242
|
+
) -> None: ...
|
|
2243
|
+
|
|
2244
|
+
class StatusesEntry(google.protobuf.message.Message):
|
|
2245
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2246
|
+
|
|
2247
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
2248
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
2249
|
+
key: builtins.str
|
|
2250
|
+
@property
|
|
2251
|
+
def value(self) -> global___ArtifactStatusesResponse.ArtifactStatus: ...
|
|
2252
|
+
def __init__(
|
|
2253
|
+
self,
|
|
2254
|
+
*,
|
|
2255
|
+
key: builtins.str = ...,
|
|
2256
|
+
value: global___ArtifactStatusesResponse.ArtifactStatus | None = ...,
|
|
2257
|
+
) -> None: ...
|
|
2258
|
+
def HasField(
|
|
2259
|
+
self, field_name: typing_extensions.Literal["value", b"value"]
|
|
2260
|
+
) -> builtins.bool: ...
|
|
2261
|
+
def ClearField(
|
|
2262
|
+
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
|
|
2263
|
+
) -> None: ...
|
|
2264
|
+
|
|
2265
|
+
STATUSES_FIELD_NUMBER: builtins.int
|
|
2266
|
+
@property
|
|
2267
|
+
def statuses(
|
|
2268
|
+
self,
|
|
2269
|
+
) -> google.protobuf.internal.containers.MessageMap[
|
|
2270
|
+
builtins.str, global___ArtifactStatusesResponse.ArtifactStatus
|
|
2271
|
+
]:
|
|
2272
|
+
"""A map of artifact names to their statuses."""
|
|
2273
|
+
def __init__(
|
|
2274
|
+
self,
|
|
2275
|
+
*,
|
|
2276
|
+
statuses: collections.abc.Mapping[
|
|
2277
|
+
builtins.str, global___ArtifactStatusesResponse.ArtifactStatus
|
|
2278
|
+
]
|
|
2279
|
+
| None = ...,
|
|
2280
|
+
) -> None: ...
|
|
2281
|
+
def ClearField(
|
|
2282
|
+
self, field_name: typing_extensions.Literal["statuses", b"statuses"]
|
|
2283
|
+
) -> None: ...
|
|
2284
|
+
|
|
2285
|
+
global___ArtifactStatusesResponse = ArtifactStatusesResponse
|
|
2286
|
+
|
|
2287
|
+
class InterruptRequest(google.protobuf.message.Message):
|
|
2288
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2289
|
+
|
|
2290
|
+
class _InterruptType:
|
|
2291
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
|
2292
|
+
V: typing_extensions.TypeAlias = ValueType
|
|
2293
|
+
|
|
2294
|
+
class _InterruptTypeEnumTypeWrapper(
|
|
2295
|
+
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
|
|
2296
|
+
InterruptRequest._InterruptType.ValueType
|
|
2297
|
+
],
|
|
2298
|
+
builtins.type,
|
|
2299
|
+
): # noqa: F821
|
|
2300
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
|
2301
|
+
INTERRUPT_TYPE_UNSPECIFIED: InterruptRequest._InterruptType.ValueType # 0
|
|
2302
|
+
INTERRUPT_TYPE_ALL: InterruptRequest._InterruptType.ValueType # 1
|
|
2303
|
+
"""Interrupt all running executions within the session with the provided session_id."""
|
|
2304
|
+
INTERRUPT_TYPE_TAG: InterruptRequest._InterruptType.ValueType # 2
|
|
2305
|
+
"""Interrupt all running executions within the session with the provided operation_tag."""
|
|
2306
|
+
INTERRUPT_TYPE_OPERATION_ID: InterruptRequest._InterruptType.ValueType # 3
|
|
2307
|
+
"""Interrupt the running execution within the session with the provided operation_id."""
|
|
2308
|
+
|
|
2309
|
+
class InterruptType(_InterruptType, metaclass=_InterruptTypeEnumTypeWrapper): ...
|
|
2310
|
+
INTERRUPT_TYPE_UNSPECIFIED: InterruptRequest.InterruptType.ValueType # 0
|
|
2311
|
+
INTERRUPT_TYPE_ALL: InterruptRequest.InterruptType.ValueType # 1
|
|
2312
|
+
"""Interrupt all running executions within the session with the provided session_id."""
|
|
2313
|
+
INTERRUPT_TYPE_TAG: InterruptRequest.InterruptType.ValueType # 2
|
|
2314
|
+
"""Interrupt all running executions within the session with the provided operation_tag."""
|
|
2315
|
+
INTERRUPT_TYPE_OPERATION_ID: InterruptRequest.InterruptType.ValueType # 3
|
|
2316
|
+
"""Interrupt the running execution within the session with the provided operation_id."""
|
|
2317
|
+
|
|
2318
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
2319
|
+
USER_CONTEXT_FIELD_NUMBER: builtins.int
|
|
2320
|
+
CLIENT_TYPE_FIELD_NUMBER: builtins.int
|
|
2321
|
+
INTERRUPT_TYPE_FIELD_NUMBER: builtins.int
|
|
2322
|
+
OPERATION_TAG_FIELD_NUMBER: builtins.int
|
|
2323
|
+
OPERATION_ID_FIELD_NUMBER: builtins.int
|
|
2324
|
+
session_id: builtins.str
|
|
2325
|
+
"""(Required)
|
|
2326
|
+
|
|
2327
|
+
The session_id specifies a spark session for a user id (which is specified
|
|
2328
|
+
by user_context.user_id). The session_id is set by the client to be able to
|
|
2329
|
+
collate streaming responses from different queries within the dedicated session.
|
|
2330
|
+
The id should be an UUID string of the format `00112233-4455-6677-8899-aabbccddeeff`
|
|
2331
|
+
"""
|
|
2332
|
+
@property
|
|
2333
|
+
def user_context(self) -> global___UserContext:
|
|
2334
|
+
"""(Required) User context"""
|
|
2335
|
+
client_type: builtins.str
|
|
2336
|
+
"""Provides optional information about the client sending the request. This field
|
|
2337
|
+
can be used for language or version specific information and is only intended for
|
|
2338
|
+
logging purposes and will not be interpreted by the server.
|
|
2339
|
+
"""
|
|
2340
|
+
interrupt_type: global___InterruptRequest.InterruptType.ValueType
|
|
2341
|
+
"""(Required) The type of interrupt to execute."""
|
|
2342
|
+
operation_tag: builtins.str
|
|
2343
|
+
"""if interrupt_tag == INTERRUPT_TYPE_TAG, interrupt operation with this tag."""
|
|
2344
|
+
operation_id: builtins.str
|
|
2345
|
+
"""if interrupt_tag == INTERRUPT_TYPE_OPERATION_ID, interrupt operation with this operation_id."""
|
|
2346
|
+
def __init__(
|
|
2347
|
+
self,
|
|
2348
|
+
*,
|
|
2349
|
+
session_id: builtins.str = ...,
|
|
2350
|
+
user_context: global___UserContext | None = ...,
|
|
2351
|
+
client_type: builtins.str | None = ...,
|
|
2352
|
+
interrupt_type: global___InterruptRequest.InterruptType.ValueType = ...,
|
|
2353
|
+
operation_tag: builtins.str = ...,
|
|
2354
|
+
operation_id: builtins.str = ...,
|
|
2355
|
+
) -> None: ...
|
|
2356
|
+
def HasField(
|
|
2357
|
+
self,
|
|
2358
|
+
field_name: typing_extensions.Literal[
|
|
2359
|
+
"_client_type",
|
|
2360
|
+
b"_client_type",
|
|
2361
|
+
"client_type",
|
|
2362
|
+
b"client_type",
|
|
2363
|
+
"interrupt",
|
|
2364
|
+
b"interrupt",
|
|
2365
|
+
"operation_id",
|
|
2366
|
+
b"operation_id",
|
|
2367
|
+
"operation_tag",
|
|
2368
|
+
b"operation_tag",
|
|
2369
|
+
"user_context",
|
|
2370
|
+
b"user_context",
|
|
2371
|
+
],
|
|
2372
|
+
) -> builtins.bool: ...
|
|
2373
|
+
def ClearField(
|
|
2374
|
+
self,
|
|
2375
|
+
field_name: typing_extensions.Literal[
|
|
2376
|
+
"_client_type",
|
|
2377
|
+
b"_client_type",
|
|
2378
|
+
"client_type",
|
|
2379
|
+
b"client_type",
|
|
2380
|
+
"interrupt",
|
|
2381
|
+
b"interrupt",
|
|
2382
|
+
"interrupt_type",
|
|
2383
|
+
b"interrupt_type",
|
|
2384
|
+
"operation_id",
|
|
2385
|
+
b"operation_id",
|
|
2386
|
+
"operation_tag",
|
|
2387
|
+
b"operation_tag",
|
|
2388
|
+
"session_id",
|
|
2389
|
+
b"session_id",
|
|
2390
|
+
"user_context",
|
|
2391
|
+
b"user_context",
|
|
2392
|
+
],
|
|
2393
|
+
) -> None: ...
|
|
2394
|
+
@typing.overload
|
|
2395
|
+
def WhichOneof(
|
|
2396
|
+
self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"]
|
|
2397
|
+
) -> typing_extensions.Literal["client_type"] | None: ...
|
|
2398
|
+
@typing.overload
|
|
2399
|
+
def WhichOneof(
|
|
2400
|
+
self, oneof_group: typing_extensions.Literal["interrupt", b"interrupt"]
|
|
2401
|
+
) -> typing_extensions.Literal["operation_tag", "operation_id"] | None: ...
|
|
2402
|
+
|
|
2403
|
+
global___InterruptRequest = InterruptRequest
|
|
2404
|
+
|
|
2405
|
+
class InterruptResponse(google.protobuf.message.Message):
|
|
2406
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2407
|
+
|
|
2408
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
2409
|
+
INTERRUPTED_IDS_FIELD_NUMBER: builtins.int
|
|
2410
|
+
session_id: builtins.str
|
|
2411
|
+
"""Session id in which the interrupt was running."""
|
|
2412
|
+
@property
|
|
2413
|
+
def interrupted_ids(
|
|
2414
|
+
self,
|
|
2415
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2416
|
+
"""Operation ids of the executions which were interrupted."""
|
|
2417
|
+
def __init__(
|
|
2418
|
+
self,
|
|
2419
|
+
*,
|
|
2420
|
+
session_id: builtins.str = ...,
|
|
2421
|
+
interrupted_ids: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2422
|
+
) -> None: ...
|
|
2423
|
+
def ClearField(
|
|
2424
|
+
self,
|
|
2425
|
+
field_name: typing_extensions.Literal[
|
|
2426
|
+
"interrupted_ids", b"interrupted_ids", "session_id", b"session_id"
|
|
2427
|
+
],
|
|
2428
|
+
) -> None: ...
|
|
2429
|
+
|
|
2430
|
+
global___InterruptResponse = InterruptResponse
|
|
2431
|
+
|
|
2432
|
+
class ReattachOptions(google.protobuf.message.Message):
|
|
2433
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2434
|
+
|
|
2435
|
+
REATTACHABLE_FIELD_NUMBER: builtins.int
|
|
2436
|
+
reattachable: builtins.bool
|
|
2437
|
+
"""If true, the request can be reattached to using ReattachExecute.
|
|
2438
|
+
ReattachExecute can be used either if the stream broke with a GRPC network error,
|
|
2439
|
+
or if the server closed the stream without sending a response with StreamStatus.complete=true.
|
|
2440
|
+
The server will keep a buffer of responses in case a response is lost, and
|
|
2441
|
+
ReattachExecute needs to back-track.
|
|
2442
|
+
|
|
2443
|
+
If false, the execution response stream will will not be reattachable, and all responses are
|
|
2444
|
+
immediately released by the server after being sent.
|
|
2445
|
+
"""
|
|
2446
|
+
def __init__(
|
|
2447
|
+
self,
|
|
2448
|
+
*,
|
|
2449
|
+
reattachable: builtins.bool = ...,
|
|
2450
|
+
) -> None: ...
|
|
2451
|
+
def ClearField(
|
|
2452
|
+
self, field_name: typing_extensions.Literal["reattachable", b"reattachable"]
|
|
2453
|
+
) -> None: ...
|
|
2454
|
+
|
|
2455
|
+
global___ReattachOptions = ReattachOptions
|
|
2456
|
+
|
|
2457
|
+
class ReattachExecuteRequest(google.protobuf.message.Message):
|
|
2458
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2459
|
+
|
|
2460
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
2461
|
+
USER_CONTEXT_FIELD_NUMBER: builtins.int
|
|
2462
|
+
OPERATION_ID_FIELD_NUMBER: builtins.int
|
|
2463
|
+
CLIENT_TYPE_FIELD_NUMBER: builtins.int
|
|
2464
|
+
LAST_RESPONSE_ID_FIELD_NUMBER: builtins.int
|
|
2465
|
+
session_id: builtins.str
|
|
2466
|
+
"""(Required)
|
|
2467
|
+
|
|
2468
|
+
The session_id of the request to reattach to.
|
|
2469
|
+
This must be an id of existing session.
|
|
2470
|
+
"""
|
|
2471
|
+
@property
|
|
2472
|
+
def user_context(self) -> global___UserContext:
|
|
2473
|
+
"""(Required) User context
|
|
2474
|
+
|
|
2475
|
+
user_context.user_id and session+id both identify a unique remote spark session on the
|
|
2476
|
+
server side.
|
|
2477
|
+
"""
|
|
2478
|
+
operation_id: builtins.str
|
|
2479
|
+
"""(Required)
|
|
2480
|
+
Provide an id of the request to reattach to.
|
|
2481
|
+
This must be an id of existing operation.
|
|
2482
|
+
"""
|
|
2483
|
+
client_type: builtins.str
|
|
2484
|
+
"""Provides optional information about the client sending the request. This field
|
|
2485
|
+
can be used for language or version specific information and is only intended for
|
|
2486
|
+
logging purposes and will not be interpreted by the server.
|
|
2487
|
+
"""
|
|
2488
|
+
last_response_id: builtins.str
|
|
2489
|
+
"""(Optional)
|
|
2490
|
+
Last already processed response id from the response stream.
|
|
2491
|
+
After reattach, server will resume the response stream after that response.
|
|
2492
|
+
If not specified, server will restart the stream from the start.
|
|
2493
|
+
|
|
2494
|
+
Note: server controls the amount of responses that it buffers and it may drop responses,
|
|
2495
|
+
that are far behind the latest returned response, so this can't be used to arbitrarily
|
|
2496
|
+
scroll back the cursor. If the response is no longer available, this will result in an error.
|
|
2497
|
+
"""
|
|
2498
|
+
def __init__(
|
|
2499
|
+
self,
|
|
2500
|
+
*,
|
|
2501
|
+
session_id: builtins.str = ...,
|
|
2502
|
+
user_context: global___UserContext | None = ...,
|
|
2503
|
+
operation_id: builtins.str = ...,
|
|
2504
|
+
client_type: builtins.str | None = ...,
|
|
2505
|
+
last_response_id: builtins.str | None = ...,
|
|
2506
|
+
) -> None: ...
|
|
2507
|
+
def HasField(
|
|
2508
|
+
self,
|
|
2509
|
+
field_name: typing_extensions.Literal[
|
|
2510
|
+
"_client_type",
|
|
2511
|
+
b"_client_type",
|
|
2512
|
+
"_last_response_id",
|
|
2513
|
+
b"_last_response_id",
|
|
2514
|
+
"client_type",
|
|
2515
|
+
b"client_type",
|
|
2516
|
+
"last_response_id",
|
|
2517
|
+
b"last_response_id",
|
|
2518
|
+
"user_context",
|
|
2519
|
+
b"user_context",
|
|
2520
|
+
],
|
|
2521
|
+
) -> builtins.bool: ...
|
|
2522
|
+
def ClearField(
|
|
2523
|
+
self,
|
|
2524
|
+
field_name: typing_extensions.Literal[
|
|
2525
|
+
"_client_type",
|
|
2526
|
+
b"_client_type",
|
|
2527
|
+
"_last_response_id",
|
|
2528
|
+
b"_last_response_id",
|
|
2529
|
+
"client_type",
|
|
2530
|
+
b"client_type",
|
|
2531
|
+
"last_response_id",
|
|
2532
|
+
b"last_response_id",
|
|
2533
|
+
"operation_id",
|
|
2534
|
+
b"operation_id",
|
|
2535
|
+
"session_id",
|
|
2536
|
+
b"session_id",
|
|
2537
|
+
"user_context",
|
|
2538
|
+
b"user_context",
|
|
2539
|
+
],
|
|
2540
|
+
) -> None: ...
|
|
2541
|
+
@typing.overload
|
|
2542
|
+
def WhichOneof(
|
|
2543
|
+
self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"]
|
|
2544
|
+
) -> typing_extensions.Literal["client_type"] | None: ...
|
|
2545
|
+
@typing.overload
|
|
2546
|
+
def WhichOneof(
|
|
2547
|
+
self, oneof_group: typing_extensions.Literal["_last_response_id", b"_last_response_id"]
|
|
2548
|
+
) -> typing_extensions.Literal["last_response_id"] | None: ...
|
|
2549
|
+
|
|
2550
|
+
global___ReattachExecuteRequest = ReattachExecuteRequest
|
|
2551
|
+
|
|
2552
|
+
class ReleaseExecuteRequest(google.protobuf.message.Message):
|
|
2553
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2554
|
+
|
|
2555
|
+
class ReleaseAll(google.protobuf.message.Message):
|
|
2556
|
+
"""Release and close operation completely.
|
|
2557
|
+
This will also interrupt the query if it is running execution, and wait for it to be torn down.
|
|
2558
|
+
"""
|
|
2559
|
+
|
|
2560
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2561
|
+
|
|
2562
|
+
def __init__(
|
|
2563
|
+
self,
|
|
2564
|
+
) -> None: ...
|
|
2565
|
+
|
|
2566
|
+
class ReleaseUntil(google.protobuf.message.Message):
|
|
2567
|
+
"""Release all responses from the operation response stream up to and including
|
|
2568
|
+
the response with the given by response_id.
|
|
2569
|
+
While server determines by itself how much of a buffer of responses to keep, client providing
|
|
2570
|
+
explicit release calls will help reduce resource consumption.
|
|
2571
|
+
Noop if response_id not found in cached responses.
|
|
2572
|
+
"""
|
|
2573
|
+
|
|
2574
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2575
|
+
|
|
2576
|
+
RESPONSE_ID_FIELD_NUMBER: builtins.int
|
|
2577
|
+
response_id: builtins.str
|
|
2578
|
+
def __init__(
|
|
2579
|
+
self,
|
|
2580
|
+
*,
|
|
2581
|
+
response_id: builtins.str = ...,
|
|
2582
|
+
) -> None: ...
|
|
2583
|
+
def ClearField(
|
|
2584
|
+
self, field_name: typing_extensions.Literal["response_id", b"response_id"]
|
|
2585
|
+
) -> None: ...
|
|
2586
|
+
|
|
2587
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
2588
|
+
USER_CONTEXT_FIELD_NUMBER: builtins.int
|
|
2589
|
+
OPERATION_ID_FIELD_NUMBER: builtins.int
|
|
2590
|
+
CLIENT_TYPE_FIELD_NUMBER: builtins.int
|
|
2591
|
+
RELEASE_ALL_FIELD_NUMBER: builtins.int
|
|
2592
|
+
RELEASE_UNTIL_FIELD_NUMBER: builtins.int
|
|
2593
|
+
session_id: builtins.str
|
|
2594
|
+
"""(Required)
|
|
2595
|
+
|
|
2596
|
+
The session_id of the request to reattach to.
|
|
2597
|
+
This must be an id of existing session.
|
|
2598
|
+
"""
|
|
2599
|
+
@property
|
|
2600
|
+
def user_context(self) -> global___UserContext:
|
|
2601
|
+
"""(Required) User context
|
|
2602
|
+
|
|
2603
|
+
user_context.user_id and session+id both identify a unique remote spark session on the
|
|
2604
|
+
server side.
|
|
2605
|
+
"""
|
|
2606
|
+
operation_id: builtins.str
|
|
2607
|
+
"""(Required)
|
|
2608
|
+
Provide an id of the request to reattach to.
|
|
2609
|
+
This must be an id of existing operation.
|
|
2610
|
+
"""
|
|
2611
|
+
client_type: builtins.str
|
|
2612
|
+
"""Provides optional information about the client sending the request. This field
|
|
2613
|
+
can be used for language or version specific information and is only intended for
|
|
2614
|
+
logging purposes and will not be interpreted by the server.
|
|
2615
|
+
"""
|
|
2616
|
+
@property
|
|
2617
|
+
def release_all(self) -> global___ReleaseExecuteRequest.ReleaseAll: ...
|
|
2618
|
+
@property
|
|
2619
|
+
def release_until(self) -> global___ReleaseExecuteRequest.ReleaseUntil: ...
|
|
2620
|
+
def __init__(
|
|
2621
|
+
self,
|
|
2622
|
+
*,
|
|
2623
|
+
session_id: builtins.str = ...,
|
|
2624
|
+
user_context: global___UserContext | None = ...,
|
|
2625
|
+
operation_id: builtins.str = ...,
|
|
2626
|
+
client_type: builtins.str | None = ...,
|
|
2627
|
+
release_all: global___ReleaseExecuteRequest.ReleaseAll | None = ...,
|
|
2628
|
+
release_until: global___ReleaseExecuteRequest.ReleaseUntil | None = ...,
|
|
2629
|
+
) -> None: ...
|
|
2630
|
+
def HasField(
|
|
2631
|
+
self,
|
|
2632
|
+
field_name: typing_extensions.Literal[
|
|
2633
|
+
"_client_type",
|
|
2634
|
+
b"_client_type",
|
|
2635
|
+
"client_type",
|
|
2636
|
+
b"client_type",
|
|
2637
|
+
"release",
|
|
2638
|
+
b"release",
|
|
2639
|
+
"release_all",
|
|
2640
|
+
b"release_all",
|
|
2641
|
+
"release_until",
|
|
2642
|
+
b"release_until",
|
|
2643
|
+
"user_context",
|
|
2644
|
+
b"user_context",
|
|
2645
|
+
],
|
|
2646
|
+
) -> builtins.bool: ...
|
|
2647
|
+
def ClearField(
|
|
2648
|
+
self,
|
|
2649
|
+
field_name: typing_extensions.Literal[
|
|
2650
|
+
"_client_type",
|
|
2651
|
+
b"_client_type",
|
|
2652
|
+
"client_type",
|
|
2653
|
+
b"client_type",
|
|
2654
|
+
"operation_id",
|
|
2655
|
+
b"operation_id",
|
|
2656
|
+
"release",
|
|
2657
|
+
b"release",
|
|
2658
|
+
"release_all",
|
|
2659
|
+
b"release_all",
|
|
2660
|
+
"release_until",
|
|
2661
|
+
b"release_until",
|
|
2662
|
+
"session_id",
|
|
2663
|
+
b"session_id",
|
|
2664
|
+
"user_context",
|
|
2665
|
+
b"user_context",
|
|
2666
|
+
],
|
|
2667
|
+
) -> None: ...
|
|
2668
|
+
@typing.overload
|
|
2669
|
+
def WhichOneof(
|
|
2670
|
+
self, oneof_group: typing_extensions.Literal["_client_type", b"_client_type"]
|
|
2671
|
+
) -> typing_extensions.Literal["client_type"] | None: ...
|
|
2672
|
+
@typing.overload
|
|
2673
|
+
def WhichOneof(
|
|
2674
|
+
self, oneof_group: typing_extensions.Literal["release", b"release"]
|
|
2675
|
+
) -> typing_extensions.Literal["release_all", "release_until"] | None: ...
|
|
2676
|
+
|
|
2677
|
+
global___ReleaseExecuteRequest = ReleaseExecuteRequest
|
|
2678
|
+
|
|
2679
|
+
class ReleaseExecuteResponse(google.protobuf.message.Message):
|
|
2680
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2681
|
+
|
|
2682
|
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
|
2683
|
+
OPERATION_ID_FIELD_NUMBER: builtins.int
|
|
2684
|
+
session_id: builtins.str
|
|
2685
|
+
"""Session id in which the release was running."""
|
|
2686
|
+
operation_id: builtins.str
|
|
2687
|
+
"""Operation id of the operation on which the release executed.
|
|
2688
|
+
If the operation couldn't be found (because e.g. it was concurrently released), will be unset.
|
|
2689
|
+
Otherwise, it will be equal to the operation_id from request.
|
|
2690
|
+
"""
|
|
2691
|
+
def __init__(
|
|
2692
|
+
self,
|
|
2693
|
+
*,
|
|
2694
|
+
session_id: builtins.str = ...,
|
|
2695
|
+
operation_id: builtins.str | None = ...,
|
|
2696
|
+
) -> None: ...
|
|
2697
|
+
def HasField(
|
|
2698
|
+
self,
|
|
2699
|
+
field_name: typing_extensions.Literal[
|
|
2700
|
+
"_operation_id", b"_operation_id", "operation_id", b"operation_id"
|
|
2701
|
+
],
|
|
2702
|
+
) -> builtins.bool: ...
|
|
2703
|
+
def ClearField(
|
|
2704
|
+
self,
|
|
2705
|
+
field_name: typing_extensions.Literal[
|
|
2706
|
+
"_operation_id",
|
|
2707
|
+
b"_operation_id",
|
|
2708
|
+
"operation_id",
|
|
2709
|
+
b"operation_id",
|
|
2710
|
+
"session_id",
|
|
2711
|
+
b"session_id",
|
|
2712
|
+
],
|
|
2713
|
+
) -> None: ...
|
|
2714
|
+
def WhichOneof(
|
|
2715
|
+
self, oneof_group: typing_extensions.Literal["_operation_id", b"_operation_id"]
|
|
2716
|
+
) -> typing_extensions.Literal["operation_id"] | None: ...
|
|
2717
|
+
|
|
2718
|
+
global___ReleaseExecuteResponse = ReleaseExecuteResponse
|