snowpark-connect 0.23.0__py3-none-any.whl → 0.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of snowpark-connect might be problematic. Click here for more details.

Files changed (476) hide show
  1. snowflake/snowpark_connect/column_name_handler.py +116 -4
  2. snowflake/snowpark_connect/config.py +13 -0
  3. snowflake/snowpark_connect/constants.py +0 -29
  4. snowflake/snowpark_connect/dataframe_container.py +6 -0
  5. snowflake/snowpark_connect/execute_plan/map_execution_command.py +56 -1
  6. snowflake/snowpark_connect/expression/function_defaults.py +207 -0
  7. snowflake/snowpark_connect/expression/literal.py +18 -2
  8. snowflake/snowpark_connect/expression/map_cast.py +5 -8
  9. snowflake/snowpark_connect/expression/map_expression.py +10 -1
  10. snowflake/snowpark_connect/expression/map_extension.py +12 -2
  11. snowflake/snowpark_connect/expression/map_sql_expression.py +23 -1
  12. snowflake/snowpark_connect/expression/map_udf.py +26 -8
  13. snowflake/snowpark_connect/expression/map_unresolved_attribute.py +199 -15
  14. snowflake/snowpark_connect/expression/map_unresolved_extract_value.py +44 -16
  15. snowflake/snowpark_connect/expression/map_unresolved_function.py +836 -365
  16. snowflake/snowpark_connect/expression/map_unresolved_star.py +3 -2
  17. snowflake/snowpark_connect/hidden_column.py +39 -0
  18. snowflake/snowpark_connect/includes/jars/hadoop-client-api-trimmed-3.3.4.jar +0 -0
  19. snowflake/snowpark_connect/includes/jars/{hadoop-client-api-3.3.4.jar → spark-connect-client-jvm_2.12-3.5.6.jar} +0 -0
  20. snowflake/snowpark_connect/relation/map_column_ops.py +18 -36
  21. snowflake/snowpark_connect/relation/map_extension.py +56 -15
  22. snowflake/snowpark_connect/relation/map_join.py +258 -62
  23. snowflake/snowpark_connect/relation/map_row_ops.py +2 -29
  24. snowflake/snowpark_connect/relation/map_sql.py +88 -11
  25. snowflake/snowpark_connect/relation/map_udtf.py +4 -2
  26. snowflake/snowpark_connect/relation/read/map_read.py +3 -3
  27. snowflake/snowpark_connect/relation/read/map_read_jdbc.py +1 -1
  28. snowflake/snowpark_connect/relation/read/map_read_json.py +8 -1
  29. snowflake/snowpark_connect/relation/read/map_read_table.py +1 -9
  30. snowflake/snowpark_connect/relation/read/reader_config.py +3 -1
  31. snowflake/snowpark_connect/relation/read/utils.py +6 -7
  32. snowflake/snowpark_connect/relation/utils.py +1 -170
  33. snowflake/snowpark_connect/relation/write/map_write.py +62 -53
  34. snowflake/snowpark_connect/resources_initializer.py +29 -1
  35. snowflake/snowpark_connect/server.py +18 -3
  36. snowflake/snowpark_connect/type_mapping.py +29 -25
  37. snowflake/snowpark_connect/typed_column.py +14 -0
  38. snowflake/snowpark_connect/utils/artifacts.py +23 -0
  39. snowflake/snowpark_connect/utils/context.py +6 -1
  40. snowflake/snowpark_connect/utils/scala_udf_utils.py +588 -0
  41. snowflake/snowpark_connect/utils/telemetry.py +6 -17
  42. snowflake/snowpark_connect/utils/udf_helper.py +2 -0
  43. snowflake/snowpark_connect/utils/udf_utils.py +38 -7
  44. snowflake/snowpark_connect/utils/udtf_utils.py +17 -3
  45. snowflake/snowpark_connect/version.py +1 -1
  46. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/METADATA +1 -1
  47. snowpark_connect-0.25.0.dist-info/RECORD +477 -0
  48. snowflake/snowpark_connect/includes/jars/scala-compiler-2.12.18.jar +0 -0
  49. snowflake/snowpark_connect/includes/jars/spark-kubernetes_2.12-3.5.6.jar +0 -0
  50. snowflake/snowpark_connect/includes/jars/spark-mllib_2.12-3.5.6.jar +0 -0
  51. snowflake/snowpark_connect/includes/jars/spark-streaming_2.12-3.5.6.jar +0 -0
  52. snowflake/snowpark_connect/includes/python/pyspark/errors/tests/__init__.py +0 -16
  53. snowflake/snowpark_connect/includes/python/pyspark/errors/tests/test_errors.py +0 -60
  54. snowflake/snowpark_connect/includes/python/pyspark/ml/deepspeed/tests/test_deepspeed_distributor.py +0 -306
  55. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/__init__.py +0 -16
  56. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_classification.py +0 -53
  57. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_evaluation.py +0 -50
  58. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_feature.py +0 -43
  59. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_function.py +0 -114
  60. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_pipeline.py +0 -47
  61. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_summarizer.py +0 -43
  62. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_tuning.py +0 -46
  63. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_classification.py +0 -238
  64. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_evaluation.py +0 -194
  65. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_feature.py +0 -156
  66. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_pipeline.py +0 -184
  67. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_summarizer.py +0 -78
  68. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_tuning.py +0 -292
  69. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_parity_torch_data_loader.py +0 -50
  70. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py +0 -152
  71. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_algorithms.py +0 -456
  72. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_base.py +0 -96
  73. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_dl_util.py +0 -186
  74. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_evaluation.py +0 -77
  75. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_feature.py +0 -401
  76. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_functions.py +0 -528
  77. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_image.py +0 -82
  78. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_linalg.py +0 -409
  79. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_model_cache.py +0 -55
  80. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_param.py +0 -441
  81. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_persistence.py +0 -546
  82. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_pipeline.py +0 -71
  83. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_stat.py +0 -52
  84. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_training_summary.py +0 -494
  85. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_util.py +0 -85
  86. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_wrapper.py +0 -138
  87. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/__init__.py +0 -16
  88. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_basic.py +0 -151
  89. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_nested.py +0 -97
  90. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_pipeline.py +0 -143
  91. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tuning.py +0 -551
  92. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_basic.py +0 -137
  93. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_nested.py +0 -96
  94. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_pipeline.py +0 -142
  95. snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/__init__.py +0 -16
  96. snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_data_loader.py +0 -137
  97. snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_distributor.py +0 -561
  98. snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_log_communication.py +0 -172
  99. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/__init__.py +0 -16
  100. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_algorithms.py +0 -353
  101. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_feature.py +0 -192
  102. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_linalg.py +0 -680
  103. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_stat.py +0 -206
  104. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_streaming_algorithms.py +0 -471
  105. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_util.py +0 -108
  106. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/__init__.py +0 -16
  107. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/__init__.py +0 -16
  108. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_any_all.py +0 -177
  109. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_apply_func.py +0 -575
  110. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_binary_ops.py +0 -235
  111. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_combine.py +0 -653
  112. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_compute.py +0 -463
  113. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_corrwith.py +0 -86
  114. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_cov.py +0 -151
  115. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_cumulative.py +0 -139
  116. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_describe.py +0 -458
  117. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_eval.py +0 -86
  118. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_melt.py +0 -202
  119. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_missing_data.py +0 -520
  120. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_pivot.py +0 -361
  121. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/__init__.py +0 -16
  122. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/__init__.py +0 -16
  123. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_any_all.py +0 -40
  124. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_apply_func.py +0 -42
  125. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_binary_ops.py +0 -40
  126. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_combine.py +0 -37
  127. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_compute.py +0 -60
  128. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_corrwith.py +0 -40
  129. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_cov.py +0 -40
  130. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_cumulative.py +0 -90
  131. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_describe.py +0 -40
  132. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_eval.py +0 -40
  133. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_melt.py +0 -40
  134. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_missing_data.py +0 -42
  135. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_pivot.py +0 -37
  136. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/__init__.py +0 -16
  137. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_base.py +0 -36
  138. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_binary_ops.py +0 -42
  139. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_boolean_ops.py +0 -47
  140. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_categorical_ops.py +0 -55
  141. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_complex_ops.py +0 -40
  142. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_date_ops.py +0 -47
  143. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_datetime_ops.py +0 -47
  144. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_null_ops.py +0 -42
  145. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_arithmetic.py +0 -43
  146. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_ops.py +0 -47
  147. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_reverse.py +0 -43
  148. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_string_ops.py +0 -47
  149. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_timedelta_ops.py +0 -47
  150. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_udt_ops.py +0 -40
  151. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/testing_utils.py +0 -226
  152. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/__init__.py +0 -16
  153. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_align.py +0 -39
  154. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_basic_slow.py +0 -55
  155. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_cov_corrwith.py +0 -39
  156. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_dot_frame.py +0 -39
  157. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_dot_series.py +0 -39
  158. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_index.py +0 -39
  159. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_series.py +0 -39
  160. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_setitem_frame.py +0 -43
  161. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_setitem_series.py +0 -43
  162. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/__init__.py +0 -16
  163. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_attrs.py +0 -40
  164. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_constructor.py +0 -39
  165. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_conversion.py +0 -42
  166. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_reindexing.py +0 -42
  167. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_reshaping.py +0 -37
  168. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_spark.py +0 -40
  169. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_take.py +0 -42
  170. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_time_series.py +0 -48
  171. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_truncate.py +0 -40
  172. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/__init__.py +0 -16
  173. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_aggregate.py +0 -40
  174. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_apply_func.py +0 -41
  175. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_cumulative.py +0 -67
  176. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_describe.py +0 -40
  177. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_groupby.py +0 -55
  178. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_head_tail.py +0 -40
  179. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_index.py +0 -38
  180. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_missing_data.py +0 -55
  181. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_split_apply.py +0 -39
  182. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_stat.py +0 -38
  183. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/__init__.py +0 -16
  184. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_align.py +0 -40
  185. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_base.py +0 -50
  186. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_category.py +0 -73
  187. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime.py +0 -39
  188. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_indexing.py +0 -40
  189. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_reindex.py +0 -40
  190. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_rename.py +0 -40
  191. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_reset_index.py +0 -48
  192. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_timedelta.py +0 -39
  193. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/io/__init__.py +0 -16
  194. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/io/test_parity_io.py +0 -40
  195. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/__init__.py +0 -16
  196. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot.py +0 -45
  197. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot_matplotlib.py +0 -45
  198. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot_plotly.py +0 -49
  199. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot.py +0 -37
  200. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot_matplotlib.py +0 -53
  201. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot_plotly.py +0 -45
  202. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/__init__.py +0 -16
  203. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_all_any.py +0 -38
  204. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_arg_ops.py +0 -37
  205. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_as_of.py +0 -37
  206. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_as_type.py +0 -38
  207. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_compute.py +0 -37
  208. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_conversion.py +0 -40
  209. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_cumulative.py +0 -40
  210. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_index.py +0 -38
  211. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_missing_data.py +0 -40
  212. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_series.py +0 -37
  213. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_sort.py +0 -38
  214. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_stat.py +0 -38
  215. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_categorical.py +0 -66
  216. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_config.py +0 -37
  217. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_csv.py +0 -37
  218. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_dataframe_conversion.py +0 -42
  219. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_dataframe_spark_io.py +0 -39
  220. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_default_index.py +0 -49
  221. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ewm.py +0 -37
  222. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_expanding.py +0 -39
  223. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_extension.py +0 -49
  224. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_frame_spark.py +0 -53
  225. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_generic_functions.py +0 -43
  226. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_indexing.py +0 -49
  227. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_indexops_spark.py +0 -39
  228. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_internal.py +0 -41
  229. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_namespace.py +0 -39
  230. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_numpy_compat.py +0 -60
  231. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames.py +0 -48
  232. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby.py +0 -39
  233. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby_expanding.py +0 -44
  234. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby_rolling.py +0 -84
  235. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_repr.py +0 -37
  236. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_resample.py +0 -45
  237. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_reshape.py +0 -39
  238. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_rolling.py +0 -39
  239. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_scalars.py +0 -37
  240. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_conversion.py +0 -39
  241. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_datetime.py +0 -39
  242. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_string.py +0 -39
  243. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_spark_functions.py +0 -39
  244. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_sql.py +0 -43
  245. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_stats.py +0 -37
  246. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_typedef.py +0 -36
  247. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_utils.py +0 -37
  248. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_window.py +0 -39
  249. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/__init__.py +0 -16
  250. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_base.py +0 -107
  251. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_binary_ops.py +0 -224
  252. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_boolean_ops.py +0 -825
  253. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_categorical_ops.py +0 -562
  254. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_complex_ops.py +0 -368
  255. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_date_ops.py +0 -257
  256. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_datetime_ops.py +0 -260
  257. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_null_ops.py +0 -178
  258. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_arithmetic.py +0 -184
  259. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_ops.py +0 -497
  260. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_reverse.py +0 -140
  261. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_string_ops.py +0 -354
  262. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_timedelta_ops.py +0 -219
  263. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_udt_ops.py +0 -192
  264. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/testing_utils.py +0 -228
  265. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/__init__.py +0 -16
  266. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_align.py +0 -118
  267. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_basic_slow.py +0 -198
  268. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_cov_corrwith.py +0 -181
  269. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_dot_frame.py +0 -103
  270. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_dot_series.py +0 -141
  271. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_index.py +0 -109
  272. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_series.py +0 -136
  273. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_setitem_frame.py +0 -125
  274. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_setitem_series.py +0 -217
  275. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/__init__.py +0 -16
  276. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_attrs.py +0 -384
  277. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_constructor.py +0 -598
  278. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_conversion.py +0 -73
  279. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_reindexing.py +0 -869
  280. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_reshaping.py +0 -487
  281. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_spark.py +0 -309
  282. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_take.py +0 -156
  283. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_time_series.py +0 -149
  284. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_truncate.py +0 -163
  285. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/__init__.py +0 -16
  286. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_aggregate.py +0 -311
  287. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_apply_func.py +0 -524
  288. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_cumulative.py +0 -419
  289. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_describe.py +0 -144
  290. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_groupby.py +0 -979
  291. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_head_tail.py +0 -234
  292. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_index.py +0 -206
  293. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_missing_data.py +0 -421
  294. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_split_apply.py +0 -187
  295. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_stat.py +0 -397
  296. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/__init__.py +0 -16
  297. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_align.py +0 -100
  298. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_base.py +0 -2743
  299. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_category.py +0 -484
  300. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_datetime.py +0 -276
  301. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_indexing.py +0 -432
  302. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_reindex.py +0 -310
  303. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_rename.py +0 -257
  304. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_reset_index.py +0 -160
  305. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_timedelta.py +0 -128
  306. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/io/__init__.py +0 -16
  307. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/io/test_io.py +0 -137
  308. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/__init__.py +0 -16
  309. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot.py +0 -170
  310. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot_matplotlib.py +0 -547
  311. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot_plotly.py +0 -285
  312. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot.py +0 -106
  313. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot_matplotlib.py +0 -409
  314. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot_plotly.py +0 -247
  315. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/__init__.py +0 -16
  316. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_all_any.py +0 -105
  317. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_arg_ops.py +0 -197
  318. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_as_of.py +0 -137
  319. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_as_type.py +0 -227
  320. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_compute.py +0 -634
  321. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_conversion.py +0 -88
  322. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_cumulative.py +0 -139
  323. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_index.py +0 -475
  324. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_missing_data.py +0 -265
  325. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_series.py +0 -818
  326. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_sort.py +0 -162
  327. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_stat.py +0 -780
  328. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_categorical.py +0 -741
  329. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_config.py +0 -160
  330. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_csv.py +0 -453
  331. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_dataframe_conversion.py +0 -281
  332. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_dataframe_spark_io.py +0 -487
  333. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_default_index.py +0 -109
  334. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ewm.py +0 -434
  335. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_expanding.py +0 -253
  336. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_extension.py +0 -152
  337. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_frame_spark.py +0 -162
  338. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_generic_functions.py +0 -234
  339. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_indexing.py +0 -1339
  340. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_indexops_spark.py +0 -82
  341. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_internal.py +0 -124
  342. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_namespace.py +0 -638
  343. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_numpy_compat.py +0 -200
  344. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames.py +0 -1355
  345. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby.py +0 -655
  346. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby_expanding.py +0 -113
  347. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby_rolling.py +0 -118
  348. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_repr.py +0 -192
  349. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_resample.py +0 -346
  350. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_reshape.py +0 -495
  351. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_rolling.py +0 -263
  352. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_scalars.py +0 -59
  353. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_conversion.py +0 -85
  354. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_datetime.py +0 -364
  355. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_string.py +0 -362
  356. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_spark_functions.py +0 -46
  357. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_sql.py +0 -123
  358. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_stats.py +0 -581
  359. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_typedef.py +0 -447
  360. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_utils.py +0 -301
  361. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_window.py +0 -465
  362. snowflake/snowpark_connect/includes/python/pyspark/resource/tests/__init__.py +0 -16
  363. snowflake/snowpark_connect/includes/python/pyspark/resource/tests/test_resources.py +0 -83
  364. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/__init__.py +0 -16
  365. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/__init__.py +0 -16
  366. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/__init__.py +0 -16
  367. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/test_artifact.py +0 -420
  368. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/test_client.py +0 -358
  369. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/__init__.py +0 -16
  370. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_foreach.py +0 -36
  371. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_foreach_batch.py +0 -44
  372. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_listener.py +0 -116
  373. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_streaming.py +0 -35
  374. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_basic.py +0 -3612
  375. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_column.py +0 -1042
  376. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_function.py +0 -2381
  377. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_plan.py +0 -1060
  378. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow.py +0 -163
  379. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow_map.py +0 -38
  380. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow_python_udf.py +0 -48
  381. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_catalog.py +0 -36
  382. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_column.py +0 -55
  383. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_conf.py +0 -36
  384. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_dataframe.py +0 -96
  385. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_datasources.py +0 -44
  386. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_errors.py +0 -36
  387. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_functions.py +0 -59
  388. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_group.py +0 -36
  389. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_cogrouped_map.py +0 -59
  390. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_grouped_map.py +0 -74
  391. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_grouped_map_with_state.py +0 -62
  392. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_map.py +0 -58
  393. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf.py +0 -70
  394. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_grouped_agg.py +0 -50
  395. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_scalar.py +0 -68
  396. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_window.py +0 -40
  397. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_readwriter.py +0 -46
  398. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_serde.py +0 -44
  399. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_types.py +0 -100
  400. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_udf.py +0 -100
  401. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_udtf.py +0 -163
  402. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_session.py +0 -181
  403. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_utils.py +0 -42
  404. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/__init__.py +0 -16
  405. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py +0 -623
  406. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py +0 -869
  407. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_grouped_map_with_state.py +0 -342
  408. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_map.py +0 -436
  409. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf.py +0 -363
  410. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_grouped_agg.py +0 -592
  411. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py +0 -1503
  412. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints.py +0 -392
  413. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints_with_future_annotations.py +0 -375
  414. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_window.py +0 -411
  415. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/__init__.py +0 -16
  416. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming.py +0 -401
  417. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_foreach.py +0 -295
  418. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_foreach_batch.py +0 -106
  419. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_listener.py +0 -558
  420. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow.py +0 -1346
  421. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow_map.py +0 -182
  422. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow_python_udf.py +0 -202
  423. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_catalog.py +0 -503
  424. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_column.py +0 -225
  425. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_conf.py +0 -83
  426. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_context.py +0 -201
  427. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_dataframe.py +0 -1931
  428. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_datasources.py +0 -256
  429. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_errors.py +0 -69
  430. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_functions.py +0 -1349
  431. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_group.py +0 -53
  432. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_pandas_sqlmetrics.py +0 -68
  433. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_readwriter.py +0 -283
  434. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_serde.py +0 -155
  435. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_session.py +0 -412
  436. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_types.py +0 -1581
  437. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udf.py +0 -961
  438. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udf_profiler.py +0 -165
  439. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udtf.py +0 -1456
  440. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_utils.py +0 -1686
  441. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/__init__.py +0 -16
  442. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_context.py +0 -184
  443. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_dstream.py +0 -706
  444. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_kinesis.py +0 -118
  445. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_listener.py +0 -160
  446. snowflake/snowpark_connect/includes/python/pyspark/tests/__init__.py +0 -16
  447. snowflake/snowpark_connect/includes/python/pyspark/tests/test_appsubmit.py +0 -306
  448. snowflake/snowpark_connect/includes/python/pyspark/tests/test_broadcast.py +0 -196
  449. snowflake/snowpark_connect/includes/python/pyspark/tests/test_conf.py +0 -44
  450. snowflake/snowpark_connect/includes/python/pyspark/tests/test_context.py +0 -346
  451. snowflake/snowpark_connect/includes/python/pyspark/tests/test_daemon.py +0 -89
  452. snowflake/snowpark_connect/includes/python/pyspark/tests/test_install_spark.py +0 -124
  453. snowflake/snowpark_connect/includes/python/pyspark/tests/test_join.py +0 -69
  454. snowflake/snowpark_connect/includes/python/pyspark/tests/test_memory_profiler.py +0 -167
  455. snowflake/snowpark_connect/includes/python/pyspark/tests/test_pin_thread.py +0 -194
  456. snowflake/snowpark_connect/includes/python/pyspark/tests/test_profiler.py +0 -168
  457. snowflake/snowpark_connect/includes/python/pyspark/tests/test_rdd.py +0 -939
  458. snowflake/snowpark_connect/includes/python/pyspark/tests/test_rddbarrier.py +0 -52
  459. snowflake/snowpark_connect/includes/python/pyspark/tests/test_rddsampler.py +0 -66
  460. snowflake/snowpark_connect/includes/python/pyspark/tests/test_readwrite.py +0 -368
  461. snowflake/snowpark_connect/includes/python/pyspark/tests/test_serializers.py +0 -257
  462. snowflake/snowpark_connect/includes/python/pyspark/tests/test_shuffle.py +0 -267
  463. snowflake/snowpark_connect/includes/python/pyspark/tests/test_stage_sched.py +0 -153
  464. snowflake/snowpark_connect/includes/python/pyspark/tests/test_statcounter.py +0 -130
  465. snowflake/snowpark_connect/includes/python/pyspark/tests/test_taskcontext.py +0 -350
  466. snowflake/snowpark_connect/includes/python/pyspark/tests/test_util.py +0 -97
  467. snowflake/snowpark_connect/includes/python/pyspark/tests/test_worker.py +0 -271
  468. snowpark_connect-0.23.0.dist-info/RECORD +0 -893
  469. {snowpark_connect-0.23.0.data → snowpark_connect-0.25.0.data}/scripts/snowpark-connect +0 -0
  470. {snowpark_connect-0.23.0.data → snowpark_connect-0.25.0.data}/scripts/snowpark-session +0 -0
  471. {snowpark_connect-0.23.0.data → snowpark_connect-0.25.0.data}/scripts/snowpark-submit +0 -0
  472. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/WHEEL +0 -0
  473. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/licenses/LICENSE-binary +0 -0
  474. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/licenses/LICENSE.txt +0 -0
  475. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/licenses/NOTICE-binary +0 -0
  476. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/top_level.txt +0 -0
@@ -1,939 +0,0 @@
1
- #
2
- # Licensed to the Apache Software Foundation (ASF) under one or more
3
- # contributor license agreements. See the NOTICE file distributed with
4
- # this work for additional information regarding copyright ownership.
5
- # The ASF licenses this file to You under the Apache License, Version 2.0
6
- # (the "License"); you may not use this file except in compliance with
7
- # the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing, software
12
- # distributed under the License is distributed on an "AS IS" BASIS,
13
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
- # See the License for the specific language governing permissions and
15
- # limitations under the License.
16
- #
17
- from datetime import datetime, timedelta
18
- import hashlib
19
- import os
20
- import random
21
- import tempfile
22
- import time
23
- import unittest
24
- from glob import glob
25
-
26
- from py4j.protocol import Py4JJavaError
27
-
28
- from pyspark import shuffle, RDD
29
- from pyspark.resource import ExecutorResourceRequests, ResourceProfileBuilder, TaskResourceRequests
30
- from pyspark.serializers import (
31
- CloudPickleSerializer,
32
- BatchedSerializer,
33
- CPickleSerializer,
34
- MarshalSerializer,
35
- UTF8Deserializer,
36
- NoOpSerializer,
37
- )
38
- from pyspark.sql import SparkSession
39
- from pyspark.testing.utils import ReusedPySparkTestCase, SPARK_HOME, QuietTest, have_numpy
40
- from pyspark.testing.sqlutils import have_pandas
41
-
42
-
43
- global_func = lambda: "Hi" # noqa: E731
44
-
45
-
46
- class RDDTests(ReusedPySparkTestCase):
47
- def test_range(self):
48
- self.assertEqual(self.sc.range(1, 1).count(), 0)
49
- self.assertEqual(self.sc.range(1, 0, -1).count(), 1)
50
- self.assertEqual(self.sc.range(0, 1 << 40, 1 << 39).count(), 2)
51
-
52
- def test_id(self):
53
- rdd = self.sc.parallelize(range(10))
54
- id = rdd.id()
55
- self.assertEqual(id, rdd.id())
56
- rdd2 = rdd.map(str).filter(bool)
57
- id2 = rdd2.id()
58
- self.assertEqual(id + 1, id2)
59
- self.assertEqual(id2, rdd2.id())
60
-
61
- def test_empty_rdd(self):
62
- rdd = self.sc.emptyRDD()
63
- self.assertTrue(rdd.isEmpty())
64
-
65
- def test_sum(self):
66
- self.assertEqual(0, self.sc.emptyRDD().sum())
67
- self.assertEqual(6, self.sc.parallelize([1, 2, 3]).sum())
68
-
69
- def test_to_localiterator(self):
70
- rdd = self.sc.parallelize([1, 2, 3])
71
- it = rdd.toLocalIterator()
72
- self.assertEqual([1, 2, 3], sorted(it))
73
-
74
- rdd2 = rdd.repartition(1000)
75
- it2 = rdd2.toLocalIterator()
76
- self.assertEqual([1, 2, 3], sorted(it2))
77
-
78
- def test_to_localiterator_prefetch(self):
79
- # Test that we fetch the next partition in parallel
80
- # We do this by returning the current time and:
81
- # reading the first elem, waiting, and reading the second elem
82
- # If not in parallel then these would be at different times
83
- # But since they are being computed in parallel we see the time
84
- # is "close enough" to the same.
85
- rdd = self.sc.parallelize(range(2), 2)
86
- times1 = rdd.map(lambda x: datetime.now())
87
- times2 = rdd.map(lambda x: datetime.now())
88
- times_iter_prefetch = times1.toLocalIterator(prefetchPartitions=True)
89
- times_iter = times2.toLocalIterator(prefetchPartitions=False)
90
- times_prefetch_head = next(times_iter_prefetch)
91
- times_head = next(times_iter)
92
- time.sleep(2)
93
- times_next = next(times_iter)
94
- times_prefetch_next = next(times_iter_prefetch)
95
- self.assertTrue(times_next - times_head >= timedelta(seconds=2))
96
- self.assertTrue(times_prefetch_next - times_prefetch_head < timedelta(seconds=1))
97
-
98
- def test_save_as_textfile_with_unicode(self):
99
- # Regression test for SPARK-970
100
- x = "\u00A1Hola, mundo!"
101
- data = self.sc.parallelize([x])
102
- tempFile = tempfile.NamedTemporaryFile(delete=True)
103
- tempFile.close()
104
- data.saveAsTextFile(tempFile.name)
105
- raw_contents = b"".join(open(p, "rb").read() for p in glob(tempFile.name + "/part-0000*"))
106
- self.assertEqual(x, raw_contents.strip().decode("utf-8"))
107
-
108
- def test_save_as_textfile_with_utf8(self):
109
- x = "\u00A1Hola, mundo!"
110
- data = self.sc.parallelize([x.encode("utf-8")])
111
- tempFile = tempfile.NamedTemporaryFile(delete=True)
112
- tempFile.close()
113
- data.saveAsTextFile(tempFile.name)
114
- raw_contents = b"".join(open(p, "rb").read() for p in glob(tempFile.name + "/part-0000*"))
115
- self.assertEqual(x, raw_contents.strip().decode("utf8"))
116
-
117
- def test_transforming_cartesian_result(self):
118
- # Regression test for SPARK-1034
119
- rdd1 = self.sc.parallelize([1, 2])
120
- rdd2 = self.sc.parallelize([3, 4])
121
- cart = rdd1.cartesian(rdd2)
122
- cart.map(lambda x_y3: x_y3[0] + x_y3[1]).collect()
123
-
124
- def test_transforming_pickle_file(self):
125
- # Regression test for SPARK-2601
126
- data = self.sc.parallelize(["Hello", "World!"])
127
- tempFile = tempfile.NamedTemporaryFile(delete=True)
128
- tempFile.close()
129
- data.saveAsPickleFile(tempFile.name)
130
- pickled_file = self.sc.pickleFile(tempFile.name)
131
- pickled_file.map(lambda x: x).collect()
132
-
133
- def test_cartesian_on_textfile(self):
134
- # Regression test for
135
- path = os.path.join(SPARK_HOME, "python/test_support/hello/hello.txt")
136
- a = self.sc.textFile(path)
137
- result = a.cartesian(a).collect()
138
- (x, y) = result[0]
139
- self.assertEqual("Hello World!", x.strip())
140
- self.assertEqual("Hello World!", y.strip())
141
-
142
- def test_cartesian_chaining(self):
143
- # Tests for SPARK-16589
144
- rdd = self.sc.parallelize(range(10), 2)
145
- self.assertSetEqual(
146
- set(rdd.cartesian(rdd).cartesian(rdd).collect()),
147
- set([((x, y), z) for x in range(10) for y in range(10) for z in range(10)]),
148
- )
149
-
150
- self.assertSetEqual(
151
- set(rdd.cartesian(rdd.cartesian(rdd)).collect()),
152
- set([(x, (y, z)) for x in range(10) for y in range(10) for z in range(10)]),
153
- )
154
-
155
- self.assertSetEqual(
156
- set(rdd.cartesian(rdd.zip(rdd)).collect()),
157
- set([(x, (y, y)) for x in range(10) for y in range(10)]),
158
- )
159
-
160
- def test_zip_chaining(self):
161
- # Tests for SPARK-21985
162
- rdd = self.sc.parallelize("abc", 2)
163
- self.assertSetEqual(set(rdd.zip(rdd).zip(rdd).collect()), set([((x, x), x) for x in "abc"]))
164
- self.assertSetEqual(set(rdd.zip(rdd.zip(rdd)).collect()), set([(x, (x, x)) for x in "abc"]))
165
-
166
- def test_union_pair_rdd(self):
167
- # SPARK-31788: test if pair RDDs can be combined by union.
168
- rdd = self.sc.parallelize([1, 2])
169
- pair_rdd = rdd.zip(rdd)
170
- unionRDD = self.sc.union([pair_rdd, pair_rdd])
171
- self.assertEqual(set(unionRDD.collect()), set([(1, 1), (2, 2), (1, 1), (2, 2)]))
172
- self.assertEqual(unionRDD.count(), 4)
173
-
174
- def test_deleting_input_files(self):
175
- # Regression test for SPARK-1025
176
- tempFile = tempfile.NamedTemporaryFile(delete=False)
177
- tempFile.write(b"Hello World!")
178
- tempFile.close()
179
- data = self.sc.textFile(tempFile.name)
180
- filtered_data = data.filter(lambda x: True)
181
- self.assertEqual(1, filtered_data.count())
182
- os.unlink(tempFile.name)
183
- with QuietTest(self.sc):
184
- self.assertRaises(Exception, lambda: filtered_data.count())
185
-
186
- def test_sampling_default_seed(self):
187
- # Test for SPARK-3995 (default seed setting)
188
- data = self.sc.parallelize(range(1000), 1)
189
- subset = data.takeSample(False, 10)
190
- self.assertEqual(len(subset), 10)
191
-
192
- def test_aggregate_mutable_zero_value(self):
193
- # Test for SPARK-9021; uses aggregate and treeAggregate to build dict
194
- # representing a counter of ints
195
- from collections import defaultdict
196
-
197
- # Show that single or multiple partitions work
198
- data1 = self.sc.range(10, numSlices=1)
199
- data2 = self.sc.range(10, numSlices=2)
200
-
201
- def seqOp(x, y):
202
- x[y] += 1
203
- return x
204
-
205
- def comboOp(x, y):
206
- for key, val in y.items():
207
- x[key] += val
208
- return x
209
-
210
- counts1 = data1.aggregate(defaultdict(int), seqOp, comboOp)
211
- counts2 = data2.aggregate(defaultdict(int), seqOp, comboOp)
212
- counts3 = data1.treeAggregate(defaultdict(int), seqOp, comboOp, 2)
213
- counts4 = data2.treeAggregate(defaultdict(int), seqOp, comboOp, 2)
214
-
215
- ground_truth = defaultdict(int, dict((i, 1) for i in range(10)))
216
- self.assertEqual(counts1, ground_truth)
217
- self.assertEqual(counts2, ground_truth)
218
- self.assertEqual(counts3, ground_truth)
219
- self.assertEqual(counts4, ground_truth)
220
-
221
- def test_aggregate_by_key_mutable_zero_value(self):
222
- # Test for SPARK-9021; uses aggregateByKey to make a pair RDD that
223
- # contains lists of all values for each key in the original RDD
224
-
225
- # list(range(...)) for Python 3.x compatibility (can't use * operator
226
- # on a range object)
227
- # list(zip(...)) for Python 3.x compatibility (want to parallelize a
228
- # collection, not a zip object)
229
- tuples = list(zip(list(range(10)) * 2, [1] * 20))
230
- # Show that single or multiple partitions work
231
- data1 = self.sc.parallelize(tuples, 1)
232
- data2 = self.sc.parallelize(tuples, 2)
233
-
234
- def seqOp(x, y):
235
- x.append(y)
236
- return x
237
-
238
- def comboOp(x, y):
239
- x.extend(y)
240
- return x
241
-
242
- values1 = data1.aggregateByKey([], seqOp, comboOp).collect()
243
- values2 = data2.aggregateByKey([], seqOp, comboOp).collect()
244
- # Sort lists to ensure clean comparison with ground_truth
245
- values1.sort()
246
- values2.sort()
247
-
248
- ground_truth = [(i, [1] * 2) for i in range(10)]
249
- self.assertEqual(values1, ground_truth)
250
- self.assertEqual(values2, ground_truth)
251
-
252
- def test_fold_mutable_zero_value(self):
253
- # Test for SPARK-9021; uses fold to merge an RDD of dict counters into
254
- # a single dict
255
- from collections import defaultdict
256
-
257
- counts1 = defaultdict(int, dict((i, 1) for i in range(10)))
258
- counts2 = defaultdict(int, dict((i, 1) for i in range(3, 8)))
259
- counts3 = defaultdict(int, dict((i, 1) for i in range(4, 7)))
260
- counts4 = defaultdict(int, dict((i, 1) for i in range(5, 6)))
261
- all_counts = [counts1, counts2, counts3, counts4]
262
- # Show that single or multiple partitions work
263
- data1 = self.sc.parallelize(all_counts, 1)
264
- data2 = self.sc.parallelize(all_counts, 2)
265
-
266
- def comboOp(x, y):
267
- for key, val in y.items():
268
- x[key] += val
269
- return x
270
-
271
- fold1 = data1.fold(defaultdict(int), comboOp)
272
- fold2 = data2.fold(defaultdict(int), comboOp)
273
-
274
- ground_truth = defaultdict(int)
275
- for counts in all_counts:
276
- for key, val in counts.items():
277
- ground_truth[key] += val
278
- self.assertEqual(fold1, ground_truth)
279
- self.assertEqual(fold2, ground_truth)
280
-
281
- def test_fold_by_key_mutable_zero_value(self):
282
- # Test for SPARK-9021; uses foldByKey to make a pair RDD that contains
283
- # lists of all values for each key in the original RDD
284
-
285
- tuples = [(i, range(i)) for i in range(10)] * 2
286
- # Show that single or multiple partitions work
287
- data1 = self.sc.parallelize(tuples, 1)
288
- data2 = self.sc.parallelize(tuples, 2)
289
-
290
- def comboOp(x, y):
291
- x.extend(y)
292
- return x
293
-
294
- values1 = data1.foldByKey([], comboOp).collect()
295
- values2 = data2.foldByKey([], comboOp).collect()
296
- # Sort lists to ensure clean comparison with ground_truth
297
- values1.sort()
298
- values2.sort()
299
-
300
- # list(range(...)) for Python 3.x compatibility
301
- ground_truth = [(i, list(range(i)) * 2) for i in range(10)]
302
- self.assertEqual(values1, ground_truth)
303
- self.assertEqual(values2, ground_truth)
304
-
305
- def test_aggregate_by_key(self):
306
- data = self.sc.parallelize([(1, 1), (1, 1), (3, 2), (5, 1), (5, 3)], 2)
307
-
308
- def seqOp(x, y):
309
- x.add(y)
310
- return x
311
-
312
- def combOp(x, y):
313
- x |= y
314
- return x
315
-
316
- sets = dict(data.aggregateByKey(set(), seqOp, combOp).collect())
317
- self.assertEqual(3, len(sets))
318
- self.assertEqual(set([1]), sets[1])
319
- self.assertEqual(set([2]), sets[3])
320
- self.assertEqual(set([1, 3]), sets[5])
321
-
322
- def test_itemgetter(self):
323
- rdd = self.sc.parallelize([range(10)])
324
- from operator import itemgetter
325
-
326
- self.assertEqual([1], rdd.map(itemgetter(1)).collect())
327
- self.assertEqual([(2, 3)], rdd.map(itemgetter(2, 3)).collect())
328
-
329
- def test_namedtuple_in_rdd(self):
330
- from collections import namedtuple
331
-
332
- Person = namedtuple("Person", "id firstName lastName")
333
- jon = Person(1, "Jon", "Doe")
334
- jane = Person(2, "Jane", "Doe")
335
- theDoes = self.sc.parallelize([jon, jane])
336
- self.assertEqual([jon, jane], theDoes.collect())
337
-
338
- def test_large_broadcast(self):
339
- N = 10000
340
- data = [[float(i) for i in range(300)] for i in range(N)]
341
- bdata = self.sc.broadcast(data) # 27MB
342
- m = self.sc.parallelize(range(1), 1).map(lambda x: len(bdata.value)).sum()
343
- self.assertEqual(N, m)
344
-
345
- def test_unpersist(self):
346
- N = 1000
347
- data = [[float(i) for i in range(300)] for i in range(N)]
348
- bdata = self.sc.broadcast(data) # 3MB
349
- bdata.unpersist()
350
- m = self.sc.parallelize(range(1), 1).map(lambda x: len(bdata.value)).sum()
351
- self.assertEqual(N, m)
352
- bdata.destroy(blocking=True)
353
- try:
354
- self.sc.parallelize(range(1), 1).map(lambda x: len(bdata.value)).sum()
355
- except Exception:
356
- pass
357
- else:
358
- raise AssertionError("job should fail after destroy the broadcast")
359
-
360
- def test_multiple_broadcasts(self):
361
- N = 1 << 21
362
- b1 = self.sc.broadcast(set(range(N))) # multiple blocks in JVM
363
- r = list(range(1 << 15))
364
- random.shuffle(r)
365
- s = str(r).encode()
366
- checksum = hashlib.md5(s).hexdigest()
367
- b2 = self.sc.broadcast(s)
368
- r = list(
369
- set(
370
- self.sc.parallelize(range(10), 10)
371
- .map(lambda x: (len(b1.value), hashlib.md5(b2.value).hexdigest()))
372
- .collect()
373
- )
374
- )
375
- self.assertEqual(1, len(r))
376
- size, csum = r[0]
377
- self.assertEqual(N, size)
378
- self.assertEqual(checksum, csum)
379
-
380
- random.shuffle(r)
381
- s = str(r).encode()
382
- checksum = hashlib.md5(s).hexdigest()
383
- b2 = self.sc.broadcast(s)
384
- r = list(
385
- set(
386
- self.sc.parallelize(range(10), 10)
387
- .map(lambda x: (len(b1.value), hashlib.md5(b2.value).hexdigest()))
388
- .collect()
389
- )
390
- )
391
- self.assertEqual(1, len(r))
392
- size, csum = r[0]
393
- self.assertEqual(N, size)
394
- self.assertEqual(checksum, csum)
395
-
396
- def test_multithread_broadcast_pickle(self):
397
- import threading
398
-
399
- b1 = self.sc.broadcast(list(range(3)))
400
- b2 = self.sc.broadcast(list(range(3)))
401
-
402
- def f1():
403
- return b1.value
404
-
405
- def f2():
406
- return b2.value
407
-
408
- funcs_num_pickled = {f1: None, f2: None}
409
-
410
- def do_pickle(f, sc):
411
- command = (f, None, sc.serializer, sc.serializer)
412
- ser = CloudPickleSerializer()
413
- ser.dumps(command)
414
-
415
- def process_vars(sc):
416
- broadcast_vars = list(sc._pickled_broadcast_vars)
417
- num_pickled = len(broadcast_vars)
418
- sc._pickled_broadcast_vars.clear()
419
- return num_pickled
420
-
421
- def run(f, sc):
422
- do_pickle(f, sc)
423
- funcs_num_pickled[f] = process_vars(sc)
424
-
425
- # pickle f1, adds b1 to sc._pickled_broadcast_vars in main thread local storage
426
- do_pickle(f1, self.sc)
427
-
428
- # run all for f2, should only add/count/clear b2 from worker thread local storage
429
- t = threading.Thread(target=run, args=(f2, self.sc))
430
- t.start()
431
- t.join()
432
-
433
- # count number of vars pickled in main thread, only b1 should be counted and cleared
434
- funcs_num_pickled[f1] = process_vars(self.sc)
435
-
436
- self.assertEqual(funcs_num_pickled[f1], 1)
437
- self.assertEqual(funcs_num_pickled[f2], 1)
438
- self.assertEqual(len(list(self.sc._pickled_broadcast_vars)), 0)
439
-
440
- def test_large_closure(self):
441
- N = 200000
442
- data = [float(i) for i in range(N)]
443
- rdd = self.sc.parallelize(range(1), 1).map(lambda x: len(data))
444
- self.assertEqual(N, rdd.first())
445
- # regression test for SPARK-6886
446
- self.assertEqual(1, rdd.map(lambda x: (x, 1)).groupByKey().count())
447
-
448
- def test_zip_with_different_serializers(self):
449
- a = self.sc.parallelize(range(5))
450
- b = self.sc.parallelize(range(100, 105))
451
- self.assertEqual(a.zip(b).collect(), [(0, 100), (1, 101), (2, 102), (3, 103), (4, 104)])
452
- a = a._reserialize(BatchedSerializer(CPickleSerializer(), 2))
453
- b = b._reserialize(MarshalSerializer())
454
- self.assertEqual(a.zip(b).collect(), [(0, 100), (1, 101), (2, 102), (3, 103), (4, 104)])
455
- # regression test for SPARK-4841
456
- path = os.path.join(SPARK_HOME, "python/test_support/hello/hello.txt")
457
- t = self.sc.textFile(path)
458
- cnt = t.count()
459
- self.assertEqual(cnt, t.zip(t).count())
460
- rdd = t.map(str)
461
- self.assertEqual(cnt, t.zip(rdd).count())
462
- # regression test for bug in _reserializer()
463
- self.assertEqual(cnt, t.zip(rdd).count())
464
-
465
- def test_zip_with_different_object_sizes(self):
466
- # regress test for SPARK-5973
467
- a = self.sc.parallelize(range(10000)).map(lambda i: "*" * i)
468
- b = self.sc.parallelize(range(10000, 20000)).map(lambda i: "*" * i)
469
- self.assertEqual(10000, a.zip(b).count())
470
-
471
- def test_zip_with_different_number_of_items(self):
472
- a = self.sc.parallelize(range(5), 2)
473
- # different number of partitions
474
- b = self.sc.parallelize(range(100, 106), 3)
475
- self.assertRaises(ValueError, lambda: a.zip(b))
476
- with QuietTest(self.sc):
477
- # different number of batched items in JVM
478
- b = self.sc.parallelize(range(100, 104), 2)
479
- self.assertRaises(Exception, lambda: a.zip(b).count())
480
- # different number of items in one pair
481
- b = self.sc.parallelize(range(100, 106), 2)
482
- self.assertRaises(Exception, lambda: a.zip(b).count())
483
- # same total number of items, but different distributions
484
- a = self.sc.parallelize([2, 3], 2).flatMap(range)
485
- b = self.sc.parallelize([3, 2], 2).flatMap(range)
486
- self.assertEqual(a.count(), b.count())
487
- self.assertRaises(Exception, lambda: a.zip(b).count())
488
-
489
- def test_count_approx_distinct(self):
490
- rdd = self.sc.parallelize(range(1000))
491
- self.assertTrue(950 < rdd.countApproxDistinct(0.03) < 1050)
492
- self.assertTrue(950 < rdd.map(float).countApproxDistinct(0.03) < 1050)
493
- self.assertTrue(950 < rdd.map(str).countApproxDistinct(0.03) < 1050)
494
- self.assertTrue(950 < rdd.map(lambda x: (x, -x)).countApproxDistinct(0.03) < 1050)
495
-
496
- rdd = self.sc.parallelize([i % 20 for i in range(1000)], 7)
497
- self.assertTrue(18 < rdd.countApproxDistinct() < 22)
498
- self.assertTrue(18 < rdd.map(float).countApproxDistinct() < 22)
499
- self.assertTrue(18 < rdd.map(str).countApproxDistinct() < 22)
500
- self.assertTrue(18 < rdd.map(lambda x: (x, -x)).countApproxDistinct() < 22)
501
-
502
- self.assertRaises(ValueError, lambda: rdd.countApproxDistinct(0.00000001))
503
-
504
- def test_histogram(self):
505
- # empty
506
- rdd = self.sc.parallelize([])
507
- self.assertEqual([0], rdd.histogram([0, 10])[1])
508
- self.assertEqual([0, 0], rdd.histogram([0, 4, 10])[1])
509
- self.assertRaises(ValueError, lambda: rdd.histogram(1))
510
-
511
- # out of range
512
- rdd = self.sc.parallelize([10.01, -0.01])
513
- self.assertEqual([0], rdd.histogram([0, 10])[1])
514
- self.assertEqual([0, 0], rdd.histogram((0, 4, 10))[1])
515
-
516
- # in range with one bucket
517
- rdd = self.sc.parallelize(range(1, 5))
518
- self.assertEqual([4], rdd.histogram([0, 10])[1])
519
- self.assertEqual([3, 1], rdd.histogram([0, 4, 10])[1])
520
-
521
- # in range with one bucket exact match
522
- self.assertEqual([4], rdd.histogram([1, 4])[1])
523
-
524
- # out of range with two buckets
525
- rdd = self.sc.parallelize([10.01, -0.01])
526
- self.assertEqual([0, 0], rdd.histogram([0, 5, 10])[1])
527
-
528
- # out of range with two uneven buckets
529
- rdd = self.sc.parallelize([10.01, -0.01])
530
- self.assertEqual([0, 0], rdd.histogram([0, 4, 10])[1])
531
-
532
- # in range with two buckets
533
- rdd = self.sc.parallelize([1, 2, 3, 5, 6])
534
- self.assertEqual([3, 2], rdd.histogram([0, 5, 10])[1])
535
-
536
- # in range with two bucket and None
537
- rdd = self.sc.parallelize([1, 2, 3, 5, 6, None, float("nan")])
538
- self.assertEqual([3, 2], rdd.histogram([0, 5, 10])[1])
539
-
540
- # in range with two uneven buckets
541
- rdd = self.sc.parallelize([1, 2, 3, 5, 6])
542
- self.assertEqual([3, 2], rdd.histogram([0, 5, 11])[1])
543
-
544
- # mixed range with two uneven buckets
545
- rdd = self.sc.parallelize([-0.01, 0.0, 1, 2, 3, 5, 6, 11.0, 11.01])
546
- self.assertEqual([4, 3], rdd.histogram([0, 5, 11])[1])
547
-
548
- # mixed range with four uneven buckets
549
- rdd = self.sc.parallelize([-0.01, 0.0, 1, 2, 3, 5, 6, 11.01, 12.0, 199.0, 200.0, 200.1])
550
- self.assertEqual([4, 2, 1, 3], rdd.histogram([0.0, 5.0, 11.0, 12.0, 200.0])[1])
551
-
552
- # mixed range with uneven buckets and NaN
553
- rdd = self.sc.parallelize(
554
- [-0.01, 0.0, 1, 2, 3, 5, 6, 11.01, 12.0, 199.0, 200.0, 200.1, None, float("nan")]
555
- )
556
- self.assertEqual([4, 2, 1, 3], rdd.histogram([0.0, 5.0, 11.0, 12.0, 200.0])[1])
557
-
558
- # out of range with infinite buckets
559
- rdd = self.sc.parallelize([10.01, -0.01, float("nan"), float("inf")])
560
- self.assertEqual([1, 2], rdd.histogram([float("-inf"), 0, float("inf")])[1])
561
-
562
- # invalid buckets
563
- self.assertRaises(ValueError, lambda: rdd.histogram([]))
564
- self.assertRaises(ValueError, lambda: rdd.histogram([1]))
565
- self.assertRaises(ValueError, lambda: rdd.histogram(0))
566
- self.assertRaises(TypeError, lambda: rdd.histogram({}))
567
-
568
- # without buckets
569
- rdd = self.sc.parallelize(range(1, 5))
570
- self.assertEqual(([1, 4], [4]), rdd.histogram(1))
571
-
572
- # without buckets single element
573
- rdd = self.sc.parallelize([1])
574
- self.assertEqual(([1, 1], [1]), rdd.histogram(1))
575
-
576
- # without bucket no range
577
- rdd = self.sc.parallelize([1] * 4)
578
- self.assertEqual(([1, 1], [4]), rdd.histogram(1))
579
-
580
- # without buckets basic two
581
- rdd = self.sc.parallelize(range(1, 5))
582
- self.assertEqual(([1, 2.5, 4], [2, 2]), rdd.histogram(2))
583
-
584
- # without buckets with more requested than elements
585
- rdd = self.sc.parallelize([1, 2])
586
- buckets = [1 + 0.2 * i for i in range(6)]
587
- hist = [1, 0, 0, 0, 1]
588
- self.assertEqual((buckets, hist), rdd.histogram(5))
589
-
590
- # invalid RDDs
591
- rdd = self.sc.parallelize([1, float("inf")])
592
- self.assertRaises(ValueError, lambda: rdd.histogram(2))
593
- rdd = self.sc.parallelize([float("nan")])
594
- self.assertRaises(ValueError, lambda: rdd.histogram(2))
595
-
596
- # string
597
- rdd = self.sc.parallelize(["ab", "ac", "b", "bd", "ef"], 2)
598
- self.assertEqual([2, 2], rdd.histogram(["a", "b", "c"])[1])
599
- self.assertEqual((["ab", "ef"], [5]), rdd.histogram(1))
600
- self.assertRaises(TypeError, lambda: rdd.histogram(2))
601
-
602
- def test_repartitionAndSortWithinPartitions_asc(self):
603
- rdd = self.sc.parallelize([(0, 5), (3, 8), (2, 6), (0, 8), (3, 8), (1, 3)], 2)
604
-
605
- repartitioned = rdd.repartitionAndSortWithinPartitions(2, lambda key: key % 2, True)
606
- partitions = repartitioned.glom().collect()
607
- self.assertEqual(partitions[0], [(0, 5), (0, 8), (2, 6)])
608
- self.assertEqual(partitions[1], [(1, 3), (3, 8), (3, 8)])
609
-
610
- def test_repartitionAndSortWithinPartitions_desc(self):
611
- rdd = self.sc.parallelize([(0, 5), (3, 8), (2, 6), (0, 8), (3, 8), (1, 3)], 2)
612
-
613
- repartitioned = rdd.repartitionAndSortWithinPartitions(2, lambda key: key % 2, False)
614
- partitions = repartitioned.glom().collect()
615
- self.assertEqual(partitions[0], [(2, 6), (0, 5), (0, 8)])
616
- self.assertEqual(partitions[1], [(3, 8), (3, 8), (1, 3)])
617
-
618
- def test_repartition_no_skewed(self):
619
- num_partitions = 20
620
- a = self.sc.parallelize(range(int(1000)), 2)
621
- xs = a.repartition(num_partitions).glom().map(len).collect()
622
- zeros = len([x for x in xs if x == 0])
623
- self.assertTrue(zeros == 0)
624
- xs = a.coalesce(num_partitions, True).glom().map(len).collect()
625
- zeros = len([x for x in xs if x == 0])
626
- self.assertTrue(zeros == 0)
627
-
628
- def test_repartition_on_textfile(self):
629
- path = os.path.join(SPARK_HOME, "python/test_support/hello/hello.txt")
630
- rdd = self.sc.textFile(path)
631
- result = rdd.repartition(1).collect()
632
- self.assertEqual("Hello World!", result[0])
633
-
634
- def test_distinct(self):
635
- rdd = self.sc.parallelize((1, 2, 3) * 10, 10)
636
- self.assertEqual(rdd.getNumPartitions(), 10)
637
- self.assertEqual(rdd.distinct().count(), 3)
638
- result = rdd.distinct(5)
639
- self.assertEqual(result.getNumPartitions(), 5)
640
- self.assertEqual(result.count(), 3)
641
-
642
- def test_external_group_by_key(self):
643
- self.sc._conf.set("spark.python.worker.memory", "1m")
644
- N = 2000001
645
- kv = self.sc.parallelize(range(N)).map(lambda x: (x % 3, x))
646
- gkv = kv.groupByKey().cache()
647
- self.assertEqual(3, gkv.count())
648
- filtered = gkv.filter(lambda kv: kv[0] == 1)
649
- self.assertEqual(1, filtered.count())
650
- self.assertEqual([(1, N // 3)], filtered.mapValues(len).collect())
651
- self.assertEqual(
652
- [(N // 3, N // 3)], filtered.values().map(lambda x: (len(x), len(list(x)))).collect()
653
- )
654
- result = filtered.collect()[0][1]
655
- self.assertEqual(N // 3, len(result))
656
- self.assertTrue(isinstance(result.data, shuffle.ExternalListOfList))
657
-
658
- def test_sort_on_empty_rdd(self):
659
- self.assertEqual([], self.sc.parallelize(zip([], [])).sortByKey().collect())
660
-
661
- def test_sample(self):
662
- rdd = self.sc.parallelize(range(0, 100), 4)
663
- wo = rdd.sample(False, 0.1, 2).collect()
664
- wo_dup = rdd.sample(False, 0.1, 2).collect()
665
- self.assertSetEqual(set(wo), set(wo_dup))
666
- wr = rdd.sample(True, 0.2, 5).collect()
667
- wr_dup = rdd.sample(True, 0.2, 5).collect()
668
- self.assertSetEqual(set(wr), set(wr_dup))
669
- wo_s10 = rdd.sample(False, 0.3, 10).collect()
670
- wo_s20 = rdd.sample(False, 0.3, 20).collect()
671
- self.assertNotEqual(set(wo_s10), set(wo_s20))
672
- wr_s11 = rdd.sample(True, 0.4, 11).collect()
673
- wr_s21 = rdd.sample(True, 0.4, 21).collect()
674
- self.assertNotEqual(set(wr_s11), set(wr_s21))
675
-
676
- def test_null_in_rdd(self):
677
- jrdd = self.sc._jvm.PythonUtils.generateRDDWithNull(self.sc._jsc)
678
- rdd = RDD(jrdd, self.sc, UTF8Deserializer())
679
- self.assertEqual(["a", None, "b"], rdd.collect())
680
- rdd = RDD(jrdd, self.sc, NoOpSerializer())
681
- self.assertEqual([b"a", None, b"b"], rdd.collect())
682
-
683
- def test_multiple_python_java_RDD_conversions(self):
684
- # Regression test for SPARK-5361
685
- data = [("1", {"director": "David Lean"}), ("2", {"director": "Andrew Dominik"})]
686
- data_rdd = self.sc.parallelize(data)
687
- data_java_rdd = data_rdd._to_java_object_rdd()
688
- data_python_rdd = self.sc._jvm.SerDeUtil.javaToPython(data_java_rdd)
689
- converted_rdd = RDD(data_python_rdd, self.sc)
690
- self.assertEqual(2, converted_rdd.count())
691
-
692
- # conversion between python and java RDD threw exceptions
693
- data_java_rdd = converted_rdd._to_java_object_rdd()
694
- data_python_rdd = self.sc._jvm.SerDeUtil.javaToPython(data_java_rdd)
695
- converted_rdd = RDD(data_python_rdd, self.sc)
696
- self.assertEqual(2, converted_rdd.count())
697
-
698
- # Regression test for SPARK-6294
699
- def test_take_on_jrdd(self):
700
- rdd = self.sc.parallelize(range(1 << 20)).map(lambda x: str(x))
701
- rdd._jrdd.first()
702
-
703
- @unittest.skipIf(not have_numpy or not have_pandas, "NumPy or Pandas not installed")
704
- def test_take_on_jrdd_with_large_rows_should_not_cause_deadlock(self):
705
- # Regression test for SPARK-38677.
706
- #
707
- # Create a DataFrame with many columns, call a Python function on each row, and take only
708
- # the first result row.
709
- #
710
- # This produces large rows that trigger a deadlock involving the following three threads:
711
- #
712
- # 1. The Scala task executor thread. During task execution, this is responsible for reading
713
- # output produced by the Python process. However, in this case the task has finished
714
- # early, and this thread is no longer reading output produced by the Python process.
715
- # Instead, it is waiting for the Scala WriterThread to exit so that it can finish the
716
- # task.
717
- #
718
- # 2. The Scala WriterThread. This is trying to send a large row to the Python process, and
719
- # is waiting for the Python process to read that row.
720
- #
721
- # 3. The Python process. This is trying to send a large output to the Scala task executor
722
- # thread, and is waiting for that thread to read that output.
723
- #
724
- # For this test to succeed rather than hanging, the Scala MonitorThread must detect this
725
- # deadlock and kill the Python worker.
726
- import numpy as np
727
- import pandas as pd
728
-
729
- num_rows = 100000
730
- num_columns = 134
731
- data = np.zeros((num_rows, num_columns))
732
- columns = map(str, range(num_columns))
733
- df = SparkSession(self.sc).createDataFrame(pd.DataFrame(data, columns=columns))
734
- actual = CPickleSerializer().loads(df.rdd.map(list)._jrdd.first())
735
- expected = [list(data[0])]
736
- self.assertEqual(expected, actual)
737
-
738
- def test_sortByKey_uses_all_partitions_not_only_first_and_last(self):
739
- # Regression test for SPARK-5969
740
- seq = [(i * 59 % 101, i) for i in range(101)] # unsorted sequence
741
- rdd = self.sc.parallelize(seq)
742
- for ascending in [True, False]:
743
- sort = rdd.sortByKey(ascending=ascending, numPartitions=5)
744
- self.assertEqual(sort.collect(), sorted(seq, reverse=not ascending))
745
- sizes = sort.glom().map(len).collect()
746
- for size in sizes:
747
- self.assertGreater(size, 0)
748
-
749
- def test_pipe_functions(self):
750
- data = ["1", "2", "3"]
751
- rdd = self.sc.parallelize(data)
752
- with QuietTest(self.sc):
753
- self.assertEqual([], rdd.pipe("java").collect())
754
- self.assertRaises(Py4JJavaError, rdd.pipe("java", checkCode=True).collect)
755
- result = rdd.pipe("cat").collect()
756
- result.sort()
757
- for x, y in zip(data, result):
758
- self.assertEqual(x, y)
759
- self.assertRaises(Py4JJavaError, rdd.pipe("grep 4", checkCode=True).collect)
760
- self.assertEqual([], rdd.pipe("grep 4").collect())
761
-
762
- def test_pipe_unicode(self):
763
- # Regression test for SPARK-20947
764
- data = ["\u6d4b\u8bd5", "1"]
765
- rdd = self.sc.parallelize(data)
766
- result = rdd.pipe("cat").collect()
767
- self.assertEqual(data, result)
768
-
769
- def test_stopiteration_in_user_code(self):
770
- def stopit(*x):
771
- raise StopIteration()
772
-
773
- seq_rdd = self.sc.parallelize(range(10))
774
- keyed_rdd = self.sc.parallelize((x % 2, x) for x in range(10))
775
- msg = "Caught StopIteration thrown from user's code; failing the task"
776
-
777
- self.assertRaisesRegex(Py4JJavaError, msg, seq_rdd.map(stopit).collect)
778
- self.assertRaisesRegex(Py4JJavaError, msg, seq_rdd.filter(stopit).collect)
779
- self.assertRaisesRegex(Py4JJavaError, msg, seq_rdd.foreach, stopit)
780
- self.assertRaisesRegex(Py4JJavaError, msg, seq_rdd.reduce, stopit)
781
- self.assertRaisesRegex(Py4JJavaError, msg, seq_rdd.fold, 0, stopit)
782
- self.assertRaisesRegex(Py4JJavaError, msg, seq_rdd.foreach, stopit)
783
- self.assertRaisesRegex(
784
- Py4JJavaError, msg, seq_rdd.cartesian(seq_rdd).flatMap(stopit).collect
785
- )
786
-
787
- # these methods call the user function both in the driver and in the executor
788
- # the exception raised is different according to where the StopIteration happens
789
- # RuntimeError is raised if in the driver
790
- # Py4JJavaError is raised if in the executor (wraps the RuntimeError raised in the worker)
791
- self.assertRaisesRegex(
792
- (Py4JJavaError, RuntimeError), msg, keyed_rdd.reduceByKeyLocally, stopit
793
- )
794
- self.assertRaisesRegex(
795
- (Py4JJavaError, RuntimeError), msg, seq_rdd.aggregate, 0, stopit, lambda *x: 1
796
- )
797
- self.assertRaisesRegex(
798
- (Py4JJavaError, RuntimeError), msg, seq_rdd.aggregate, 0, lambda *x: 1, stopit
799
- )
800
-
801
- def test_overwritten_global_func(self):
802
- # Regression test for SPARK-27000
803
- global global_func
804
- self.assertEqual(self.sc.parallelize([1]).map(lambda _: global_func()).first(), "Hi")
805
- global_func = lambda: "Yeah" # noqa: E731
806
- self.assertEqual(self.sc.parallelize([1]).map(lambda _: global_func()).first(), "Yeah")
807
-
808
- def test_to_local_iterator_failure(self):
809
- # SPARK-27548 toLocalIterator task failure not propagated to Python driver
810
-
811
- def fail(_):
812
- raise RuntimeError("local iterator error")
813
-
814
- rdd = self.sc.range(10).map(fail)
815
-
816
- with self.assertRaisesRegex(Exception, "local iterator error"):
817
- for _ in rdd.toLocalIterator():
818
- pass
819
-
820
- def test_to_local_iterator_collects_single_partition(self):
821
- # Test that partitions are not computed until requested by iteration
822
-
823
- def fail_last(x):
824
- if x == 9:
825
- raise RuntimeError("This should not be hit")
826
- return x
827
-
828
- rdd = self.sc.range(12, numSlices=4).map(fail_last)
829
- it = rdd.toLocalIterator()
830
-
831
- # Only consume first 4 elements from partitions 1 and 2, this should not collect the last
832
- # partition which would trigger the error
833
- for i in range(4):
834
- self.assertEqual(i, next(it))
835
-
836
- def test_resourceprofile(self):
837
- rp_builder = ResourceProfileBuilder()
838
- ereqs = ExecutorResourceRequests().cores(2).memory("6g").memoryOverhead("1g")
839
- ereqs.pysparkMemory("2g").resource("gpu", 2, "testGpus", "nvidia.com")
840
- treqs = TaskResourceRequests().cpus(2).resource("gpu", 2)
841
-
842
- def assert_request_contents(exec_reqs, task_reqs):
843
- self.assertEqual(len(exec_reqs), 5)
844
- self.assertEqual(exec_reqs["cores"].amount, 2)
845
- self.assertEqual(exec_reqs["memory"].amount, 6144)
846
- self.assertEqual(exec_reqs["memoryOverhead"].amount, 1024)
847
- self.assertEqual(exec_reqs["pyspark.memory"].amount, 2048)
848
- self.assertEqual(exec_reqs["gpu"].amount, 2)
849
- self.assertEqual(exec_reqs["gpu"].discoveryScript, "testGpus")
850
- self.assertEqual(exec_reqs["gpu"].resourceName, "gpu")
851
- self.assertEqual(exec_reqs["gpu"].vendor, "nvidia.com")
852
- self.assertEqual(len(task_reqs), 2)
853
- self.assertEqual(task_reqs["cpus"].amount, 2.0)
854
- self.assertEqual(task_reqs["gpu"].amount, 2.0)
855
-
856
- assert_request_contents(ereqs.requests, treqs.requests)
857
- rp = rp_builder.require(ereqs).require(treqs).build
858
- assert_request_contents(rp.executorResources, rp.taskResources)
859
- rdd = self.sc.parallelize(range(10)).withResources(rp)
860
- return_rp = rdd.getResourceProfile()
861
- assert_request_contents(return_rp.executorResources, return_rp.taskResources)
862
- rddWithoutRp = self.sc.parallelize(range(10))
863
- self.assertEqual(rddWithoutRp.getResourceProfile(), None)
864
-
865
- def test_multiple_group_jobs(self):
866
- import threading
867
-
868
- group_a = "job_ids_to_cancel"
869
- group_b = "job_ids_to_run"
870
-
871
- threads = []
872
- thread_ids = range(4)
873
- thread_ids_to_cancel = [i for i in thread_ids if i % 2 == 0]
874
- thread_ids_to_run = [i for i in thread_ids if i % 2 != 0]
875
-
876
- # A list which records whether job is cancelled.
877
- # The index of the array is the thread index which job run in.
878
- is_job_cancelled = [False for _ in thread_ids]
879
-
880
- def run_job(job_group, index):
881
- """
882
- Executes a job with the group ``job_group``. Each job waits for 3 seconds
883
- and then exits.
884
- """
885
- try:
886
- self.sc.parallelize([15]).map(lambda x: time.sleep(x)).collectWithJobGroup(
887
- job_group, "test rdd collect with setting job group"
888
- )
889
- is_job_cancelled[index] = False
890
- except Exception:
891
- # Assume that exception means job cancellation.
892
- is_job_cancelled[index] = True
893
-
894
- # Test if job succeeded when not cancelled.
895
- run_job(group_a, 0)
896
- self.assertFalse(is_job_cancelled[0])
897
-
898
- # Run jobs
899
- for i in thread_ids_to_cancel:
900
- t = threading.Thread(target=run_job, args=(group_a, i))
901
- t.start()
902
- threads.append(t)
903
-
904
- for i in thread_ids_to_run:
905
- t = threading.Thread(target=run_job, args=(group_b, i))
906
- t.start()
907
- threads.append(t)
908
-
909
- # Wait to make sure all jobs are executed.
910
- time.sleep(3)
911
- # And then, cancel one job group.
912
- self.sc.cancelJobGroup(group_a)
913
-
914
- # Wait until all threads launching jobs are finished.
915
- for t in threads:
916
- t.join()
917
-
918
- for i in thread_ids_to_cancel:
919
- self.assertTrue(
920
- is_job_cancelled[i], "Thread {i}: Job in group A was not cancelled.".format(i=i)
921
- )
922
-
923
- for i in thread_ids_to_run:
924
- self.assertFalse(
925
- is_job_cancelled[i], "Thread {i}: Job in group B did not succeeded.".format(i=i)
926
- )
927
-
928
-
929
- if __name__ == "__main__":
930
- import unittest
931
- from pyspark.tests.test_rdd import * # noqa: F401
932
-
933
- try:
934
- import xmlrunner
935
-
936
- testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2)
937
- except ImportError:
938
- testRunner = None
939
- unittest.main(testRunner=testRunner, verbosity=2)