snowpark-connect 0.23.0__py3-none-any.whl → 0.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of snowpark-connect might be problematic. Click here for more details.

Files changed (476) hide show
  1. snowflake/snowpark_connect/column_name_handler.py +116 -4
  2. snowflake/snowpark_connect/config.py +13 -0
  3. snowflake/snowpark_connect/constants.py +0 -29
  4. snowflake/snowpark_connect/dataframe_container.py +6 -0
  5. snowflake/snowpark_connect/execute_plan/map_execution_command.py +56 -1
  6. snowflake/snowpark_connect/expression/function_defaults.py +207 -0
  7. snowflake/snowpark_connect/expression/literal.py +18 -2
  8. snowflake/snowpark_connect/expression/map_cast.py +5 -8
  9. snowflake/snowpark_connect/expression/map_expression.py +10 -1
  10. snowflake/snowpark_connect/expression/map_extension.py +12 -2
  11. snowflake/snowpark_connect/expression/map_sql_expression.py +23 -1
  12. snowflake/snowpark_connect/expression/map_udf.py +26 -8
  13. snowflake/snowpark_connect/expression/map_unresolved_attribute.py +199 -15
  14. snowflake/snowpark_connect/expression/map_unresolved_extract_value.py +44 -16
  15. snowflake/snowpark_connect/expression/map_unresolved_function.py +836 -365
  16. snowflake/snowpark_connect/expression/map_unresolved_star.py +3 -2
  17. snowflake/snowpark_connect/hidden_column.py +39 -0
  18. snowflake/snowpark_connect/includes/jars/hadoop-client-api-trimmed-3.3.4.jar +0 -0
  19. snowflake/snowpark_connect/includes/jars/{hadoop-client-api-3.3.4.jar → spark-connect-client-jvm_2.12-3.5.6.jar} +0 -0
  20. snowflake/snowpark_connect/relation/map_column_ops.py +18 -36
  21. snowflake/snowpark_connect/relation/map_extension.py +56 -15
  22. snowflake/snowpark_connect/relation/map_join.py +258 -62
  23. snowflake/snowpark_connect/relation/map_row_ops.py +2 -29
  24. snowflake/snowpark_connect/relation/map_sql.py +88 -11
  25. snowflake/snowpark_connect/relation/map_udtf.py +4 -2
  26. snowflake/snowpark_connect/relation/read/map_read.py +3 -3
  27. snowflake/snowpark_connect/relation/read/map_read_jdbc.py +1 -1
  28. snowflake/snowpark_connect/relation/read/map_read_json.py +8 -1
  29. snowflake/snowpark_connect/relation/read/map_read_table.py +1 -9
  30. snowflake/snowpark_connect/relation/read/reader_config.py +3 -1
  31. snowflake/snowpark_connect/relation/read/utils.py +6 -7
  32. snowflake/snowpark_connect/relation/utils.py +1 -170
  33. snowflake/snowpark_connect/relation/write/map_write.py +62 -53
  34. snowflake/snowpark_connect/resources_initializer.py +29 -1
  35. snowflake/snowpark_connect/server.py +18 -3
  36. snowflake/snowpark_connect/type_mapping.py +29 -25
  37. snowflake/snowpark_connect/typed_column.py +14 -0
  38. snowflake/snowpark_connect/utils/artifacts.py +23 -0
  39. snowflake/snowpark_connect/utils/context.py +6 -1
  40. snowflake/snowpark_connect/utils/scala_udf_utils.py +588 -0
  41. snowflake/snowpark_connect/utils/telemetry.py +6 -17
  42. snowflake/snowpark_connect/utils/udf_helper.py +2 -0
  43. snowflake/snowpark_connect/utils/udf_utils.py +38 -7
  44. snowflake/snowpark_connect/utils/udtf_utils.py +17 -3
  45. snowflake/snowpark_connect/version.py +1 -1
  46. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/METADATA +1 -1
  47. snowpark_connect-0.25.0.dist-info/RECORD +477 -0
  48. snowflake/snowpark_connect/includes/jars/scala-compiler-2.12.18.jar +0 -0
  49. snowflake/snowpark_connect/includes/jars/spark-kubernetes_2.12-3.5.6.jar +0 -0
  50. snowflake/snowpark_connect/includes/jars/spark-mllib_2.12-3.5.6.jar +0 -0
  51. snowflake/snowpark_connect/includes/jars/spark-streaming_2.12-3.5.6.jar +0 -0
  52. snowflake/snowpark_connect/includes/python/pyspark/errors/tests/__init__.py +0 -16
  53. snowflake/snowpark_connect/includes/python/pyspark/errors/tests/test_errors.py +0 -60
  54. snowflake/snowpark_connect/includes/python/pyspark/ml/deepspeed/tests/test_deepspeed_distributor.py +0 -306
  55. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/__init__.py +0 -16
  56. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_classification.py +0 -53
  57. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_evaluation.py +0 -50
  58. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_feature.py +0 -43
  59. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_function.py +0 -114
  60. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_pipeline.py +0 -47
  61. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_summarizer.py +0 -43
  62. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_tuning.py +0 -46
  63. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_classification.py +0 -238
  64. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_evaluation.py +0 -194
  65. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_feature.py +0 -156
  66. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_pipeline.py +0 -184
  67. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_summarizer.py +0 -78
  68. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_tuning.py +0 -292
  69. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_parity_torch_data_loader.py +0 -50
  70. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py +0 -152
  71. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_algorithms.py +0 -456
  72. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_base.py +0 -96
  73. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_dl_util.py +0 -186
  74. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_evaluation.py +0 -77
  75. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_feature.py +0 -401
  76. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_functions.py +0 -528
  77. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_image.py +0 -82
  78. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_linalg.py +0 -409
  79. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_model_cache.py +0 -55
  80. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_param.py +0 -441
  81. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_persistence.py +0 -546
  82. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_pipeline.py +0 -71
  83. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_stat.py +0 -52
  84. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_training_summary.py +0 -494
  85. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_util.py +0 -85
  86. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_wrapper.py +0 -138
  87. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/__init__.py +0 -16
  88. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_basic.py +0 -151
  89. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_nested.py +0 -97
  90. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_pipeline.py +0 -143
  91. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tuning.py +0 -551
  92. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_basic.py +0 -137
  93. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_nested.py +0 -96
  94. snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_pipeline.py +0 -142
  95. snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/__init__.py +0 -16
  96. snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_data_loader.py +0 -137
  97. snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_distributor.py +0 -561
  98. snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_log_communication.py +0 -172
  99. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/__init__.py +0 -16
  100. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_algorithms.py +0 -353
  101. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_feature.py +0 -192
  102. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_linalg.py +0 -680
  103. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_stat.py +0 -206
  104. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_streaming_algorithms.py +0 -471
  105. snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_util.py +0 -108
  106. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/__init__.py +0 -16
  107. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/__init__.py +0 -16
  108. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_any_all.py +0 -177
  109. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_apply_func.py +0 -575
  110. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_binary_ops.py +0 -235
  111. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_combine.py +0 -653
  112. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_compute.py +0 -463
  113. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_corrwith.py +0 -86
  114. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_cov.py +0 -151
  115. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_cumulative.py +0 -139
  116. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_describe.py +0 -458
  117. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_eval.py +0 -86
  118. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_melt.py +0 -202
  119. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_missing_data.py +0 -520
  120. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_pivot.py +0 -361
  121. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/__init__.py +0 -16
  122. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/__init__.py +0 -16
  123. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_any_all.py +0 -40
  124. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_apply_func.py +0 -42
  125. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_binary_ops.py +0 -40
  126. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_combine.py +0 -37
  127. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_compute.py +0 -60
  128. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_corrwith.py +0 -40
  129. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_cov.py +0 -40
  130. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_cumulative.py +0 -90
  131. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_describe.py +0 -40
  132. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_eval.py +0 -40
  133. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_melt.py +0 -40
  134. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_missing_data.py +0 -42
  135. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_pivot.py +0 -37
  136. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/__init__.py +0 -16
  137. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_base.py +0 -36
  138. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_binary_ops.py +0 -42
  139. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_boolean_ops.py +0 -47
  140. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_categorical_ops.py +0 -55
  141. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_complex_ops.py +0 -40
  142. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_date_ops.py +0 -47
  143. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_datetime_ops.py +0 -47
  144. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_null_ops.py +0 -42
  145. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_arithmetic.py +0 -43
  146. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_ops.py +0 -47
  147. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_reverse.py +0 -43
  148. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_string_ops.py +0 -47
  149. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_timedelta_ops.py +0 -47
  150. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_udt_ops.py +0 -40
  151. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/testing_utils.py +0 -226
  152. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/__init__.py +0 -16
  153. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_align.py +0 -39
  154. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_basic_slow.py +0 -55
  155. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_cov_corrwith.py +0 -39
  156. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_dot_frame.py +0 -39
  157. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_dot_series.py +0 -39
  158. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_index.py +0 -39
  159. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_series.py +0 -39
  160. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_setitem_frame.py +0 -43
  161. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_setitem_series.py +0 -43
  162. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/__init__.py +0 -16
  163. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_attrs.py +0 -40
  164. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_constructor.py +0 -39
  165. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_conversion.py +0 -42
  166. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_reindexing.py +0 -42
  167. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_reshaping.py +0 -37
  168. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_spark.py +0 -40
  169. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_take.py +0 -42
  170. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_time_series.py +0 -48
  171. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_truncate.py +0 -40
  172. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/__init__.py +0 -16
  173. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_aggregate.py +0 -40
  174. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_apply_func.py +0 -41
  175. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_cumulative.py +0 -67
  176. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_describe.py +0 -40
  177. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_groupby.py +0 -55
  178. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_head_tail.py +0 -40
  179. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_index.py +0 -38
  180. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_missing_data.py +0 -55
  181. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_split_apply.py +0 -39
  182. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_stat.py +0 -38
  183. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/__init__.py +0 -16
  184. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_align.py +0 -40
  185. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_base.py +0 -50
  186. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_category.py +0 -73
  187. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime.py +0 -39
  188. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_indexing.py +0 -40
  189. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_reindex.py +0 -40
  190. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_rename.py +0 -40
  191. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_reset_index.py +0 -48
  192. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_timedelta.py +0 -39
  193. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/io/__init__.py +0 -16
  194. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/io/test_parity_io.py +0 -40
  195. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/__init__.py +0 -16
  196. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot.py +0 -45
  197. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot_matplotlib.py +0 -45
  198. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot_plotly.py +0 -49
  199. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot.py +0 -37
  200. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot_matplotlib.py +0 -53
  201. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot_plotly.py +0 -45
  202. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/__init__.py +0 -16
  203. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_all_any.py +0 -38
  204. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_arg_ops.py +0 -37
  205. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_as_of.py +0 -37
  206. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_as_type.py +0 -38
  207. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_compute.py +0 -37
  208. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_conversion.py +0 -40
  209. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_cumulative.py +0 -40
  210. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_index.py +0 -38
  211. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_missing_data.py +0 -40
  212. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_series.py +0 -37
  213. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_sort.py +0 -38
  214. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_stat.py +0 -38
  215. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_categorical.py +0 -66
  216. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_config.py +0 -37
  217. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_csv.py +0 -37
  218. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_dataframe_conversion.py +0 -42
  219. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_dataframe_spark_io.py +0 -39
  220. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_default_index.py +0 -49
  221. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ewm.py +0 -37
  222. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_expanding.py +0 -39
  223. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_extension.py +0 -49
  224. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_frame_spark.py +0 -53
  225. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_generic_functions.py +0 -43
  226. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_indexing.py +0 -49
  227. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_indexops_spark.py +0 -39
  228. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_internal.py +0 -41
  229. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_namespace.py +0 -39
  230. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_numpy_compat.py +0 -60
  231. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames.py +0 -48
  232. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby.py +0 -39
  233. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby_expanding.py +0 -44
  234. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby_rolling.py +0 -84
  235. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_repr.py +0 -37
  236. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_resample.py +0 -45
  237. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_reshape.py +0 -39
  238. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_rolling.py +0 -39
  239. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_scalars.py +0 -37
  240. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_conversion.py +0 -39
  241. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_datetime.py +0 -39
  242. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_string.py +0 -39
  243. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_spark_functions.py +0 -39
  244. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_sql.py +0 -43
  245. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_stats.py +0 -37
  246. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_typedef.py +0 -36
  247. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_utils.py +0 -37
  248. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_window.py +0 -39
  249. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/__init__.py +0 -16
  250. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_base.py +0 -107
  251. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_binary_ops.py +0 -224
  252. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_boolean_ops.py +0 -825
  253. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_categorical_ops.py +0 -562
  254. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_complex_ops.py +0 -368
  255. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_date_ops.py +0 -257
  256. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_datetime_ops.py +0 -260
  257. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_null_ops.py +0 -178
  258. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_arithmetic.py +0 -184
  259. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_ops.py +0 -497
  260. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_reverse.py +0 -140
  261. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_string_ops.py +0 -354
  262. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_timedelta_ops.py +0 -219
  263. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_udt_ops.py +0 -192
  264. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/testing_utils.py +0 -228
  265. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/__init__.py +0 -16
  266. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_align.py +0 -118
  267. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_basic_slow.py +0 -198
  268. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_cov_corrwith.py +0 -181
  269. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_dot_frame.py +0 -103
  270. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_dot_series.py +0 -141
  271. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_index.py +0 -109
  272. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_series.py +0 -136
  273. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_setitem_frame.py +0 -125
  274. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_setitem_series.py +0 -217
  275. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/__init__.py +0 -16
  276. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_attrs.py +0 -384
  277. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_constructor.py +0 -598
  278. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_conversion.py +0 -73
  279. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_reindexing.py +0 -869
  280. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_reshaping.py +0 -487
  281. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_spark.py +0 -309
  282. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_take.py +0 -156
  283. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_time_series.py +0 -149
  284. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_truncate.py +0 -163
  285. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/__init__.py +0 -16
  286. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_aggregate.py +0 -311
  287. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_apply_func.py +0 -524
  288. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_cumulative.py +0 -419
  289. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_describe.py +0 -144
  290. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_groupby.py +0 -979
  291. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_head_tail.py +0 -234
  292. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_index.py +0 -206
  293. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_missing_data.py +0 -421
  294. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_split_apply.py +0 -187
  295. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_stat.py +0 -397
  296. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/__init__.py +0 -16
  297. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_align.py +0 -100
  298. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_base.py +0 -2743
  299. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_category.py +0 -484
  300. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_datetime.py +0 -276
  301. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_indexing.py +0 -432
  302. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_reindex.py +0 -310
  303. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_rename.py +0 -257
  304. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_reset_index.py +0 -160
  305. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_timedelta.py +0 -128
  306. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/io/__init__.py +0 -16
  307. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/io/test_io.py +0 -137
  308. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/__init__.py +0 -16
  309. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot.py +0 -170
  310. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot_matplotlib.py +0 -547
  311. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot_plotly.py +0 -285
  312. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot.py +0 -106
  313. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot_matplotlib.py +0 -409
  314. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot_plotly.py +0 -247
  315. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/__init__.py +0 -16
  316. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_all_any.py +0 -105
  317. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_arg_ops.py +0 -197
  318. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_as_of.py +0 -137
  319. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_as_type.py +0 -227
  320. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_compute.py +0 -634
  321. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_conversion.py +0 -88
  322. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_cumulative.py +0 -139
  323. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_index.py +0 -475
  324. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_missing_data.py +0 -265
  325. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_series.py +0 -818
  326. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_sort.py +0 -162
  327. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_stat.py +0 -780
  328. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_categorical.py +0 -741
  329. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_config.py +0 -160
  330. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_csv.py +0 -453
  331. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_dataframe_conversion.py +0 -281
  332. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_dataframe_spark_io.py +0 -487
  333. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_default_index.py +0 -109
  334. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ewm.py +0 -434
  335. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_expanding.py +0 -253
  336. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_extension.py +0 -152
  337. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_frame_spark.py +0 -162
  338. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_generic_functions.py +0 -234
  339. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_indexing.py +0 -1339
  340. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_indexops_spark.py +0 -82
  341. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_internal.py +0 -124
  342. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_namespace.py +0 -638
  343. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_numpy_compat.py +0 -200
  344. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames.py +0 -1355
  345. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby.py +0 -655
  346. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby_expanding.py +0 -113
  347. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby_rolling.py +0 -118
  348. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_repr.py +0 -192
  349. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_resample.py +0 -346
  350. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_reshape.py +0 -495
  351. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_rolling.py +0 -263
  352. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_scalars.py +0 -59
  353. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_conversion.py +0 -85
  354. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_datetime.py +0 -364
  355. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_string.py +0 -362
  356. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_spark_functions.py +0 -46
  357. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_sql.py +0 -123
  358. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_stats.py +0 -581
  359. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_typedef.py +0 -447
  360. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_utils.py +0 -301
  361. snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_window.py +0 -465
  362. snowflake/snowpark_connect/includes/python/pyspark/resource/tests/__init__.py +0 -16
  363. snowflake/snowpark_connect/includes/python/pyspark/resource/tests/test_resources.py +0 -83
  364. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/__init__.py +0 -16
  365. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/__init__.py +0 -16
  366. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/__init__.py +0 -16
  367. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/test_artifact.py +0 -420
  368. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/test_client.py +0 -358
  369. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/__init__.py +0 -16
  370. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_foreach.py +0 -36
  371. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_foreach_batch.py +0 -44
  372. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_listener.py +0 -116
  373. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_streaming.py +0 -35
  374. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_basic.py +0 -3612
  375. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_column.py +0 -1042
  376. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_function.py +0 -2381
  377. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_plan.py +0 -1060
  378. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow.py +0 -163
  379. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow_map.py +0 -38
  380. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow_python_udf.py +0 -48
  381. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_catalog.py +0 -36
  382. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_column.py +0 -55
  383. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_conf.py +0 -36
  384. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_dataframe.py +0 -96
  385. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_datasources.py +0 -44
  386. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_errors.py +0 -36
  387. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_functions.py +0 -59
  388. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_group.py +0 -36
  389. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_cogrouped_map.py +0 -59
  390. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_grouped_map.py +0 -74
  391. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_grouped_map_with_state.py +0 -62
  392. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_map.py +0 -58
  393. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf.py +0 -70
  394. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_grouped_agg.py +0 -50
  395. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_scalar.py +0 -68
  396. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_window.py +0 -40
  397. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_readwriter.py +0 -46
  398. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_serde.py +0 -44
  399. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_types.py +0 -100
  400. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_udf.py +0 -100
  401. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_udtf.py +0 -163
  402. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_session.py +0 -181
  403. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_utils.py +0 -42
  404. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/__init__.py +0 -16
  405. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py +0 -623
  406. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py +0 -869
  407. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_grouped_map_with_state.py +0 -342
  408. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_map.py +0 -436
  409. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf.py +0 -363
  410. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_grouped_agg.py +0 -592
  411. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py +0 -1503
  412. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints.py +0 -392
  413. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints_with_future_annotations.py +0 -375
  414. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_window.py +0 -411
  415. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/__init__.py +0 -16
  416. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming.py +0 -401
  417. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_foreach.py +0 -295
  418. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_foreach_batch.py +0 -106
  419. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_listener.py +0 -558
  420. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow.py +0 -1346
  421. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow_map.py +0 -182
  422. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow_python_udf.py +0 -202
  423. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_catalog.py +0 -503
  424. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_column.py +0 -225
  425. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_conf.py +0 -83
  426. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_context.py +0 -201
  427. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_dataframe.py +0 -1931
  428. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_datasources.py +0 -256
  429. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_errors.py +0 -69
  430. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_functions.py +0 -1349
  431. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_group.py +0 -53
  432. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_pandas_sqlmetrics.py +0 -68
  433. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_readwriter.py +0 -283
  434. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_serde.py +0 -155
  435. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_session.py +0 -412
  436. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_types.py +0 -1581
  437. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udf.py +0 -961
  438. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udf_profiler.py +0 -165
  439. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udtf.py +0 -1456
  440. snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_utils.py +0 -1686
  441. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/__init__.py +0 -16
  442. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_context.py +0 -184
  443. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_dstream.py +0 -706
  444. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_kinesis.py +0 -118
  445. snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_listener.py +0 -160
  446. snowflake/snowpark_connect/includes/python/pyspark/tests/__init__.py +0 -16
  447. snowflake/snowpark_connect/includes/python/pyspark/tests/test_appsubmit.py +0 -306
  448. snowflake/snowpark_connect/includes/python/pyspark/tests/test_broadcast.py +0 -196
  449. snowflake/snowpark_connect/includes/python/pyspark/tests/test_conf.py +0 -44
  450. snowflake/snowpark_connect/includes/python/pyspark/tests/test_context.py +0 -346
  451. snowflake/snowpark_connect/includes/python/pyspark/tests/test_daemon.py +0 -89
  452. snowflake/snowpark_connect/includes/python/pyspark/tests/test_install_spark.py +0 -124
  453. snowflake/snowpark_connect/includes/python/pyspark/tests/test_join.py +0 -69
  454. snowflake/snowpark_connect/includes/python/pyspark/tests/test_memory_profiler.py +0 -167
  455. snowflake/snowpark_connect/includes/python/pyspark/tests/test_pin_thread.py +0 -194
  456. snowflake/snowpark_connect/includes/python/pyspark/tests/test_profiler.py +0 -168
  457. snowflake/snowpark_connect/includes/python/pyspark/tests/test_rdd.py +0 -939
  458. snowflake/snowpark_connect/includes/python/pyspark/tests/test_rddbarrier.py +0 -52
  459. snowflake/snowpark_connect/includes/python/pyspark/tests/test_rddsampler.py +0 -66
  460. snowflake/snowpark_connect/includes/python/pyspark/tests/test_readwrite.py +0 -368
  461. snowflake/snowpark_connect/includes/python/pyspark/tests/test_serializers.py +0 -257
  462. snowflake/snowpark_connect/includes/python/pyspark/tests/test_shuffle.py +0 -267
  463. snowflake/snowpark_connect/includes/python/pyspark/tests/test_stage_sched.py +0 -153
  464. snowflake/snowpark_connect/includes/python/pyspark/tests/test_statcounter.py +0 -130
  465. snowflake/snowpark_connect/includes/python/pyspark/tests/test_taskcontext.py +0 -350
  466. snowflake/snowpark_connect/includes/python/pyspark/tests/test_util.py +0 -97
  467. snowflake/snowpark_connect/includes/python/pyspark/tests/test_worker.py +0 -271
  468. snowpark_connect-0.23.0.dist-info/RECORD +0 -893
  469. {snowpark_connect-0.23.0.data → snowpark_connect-0.25.0.data}/scripts/snowpark-connect +0 -0
  470. {snowpark_connect-0.23.0.data → snowpark_connect-0.25.0.data}/scripts/snowpark-session +0 -0
  471. {snowpark_connect-0.23.0.data → snowpark_connect-0.25.0.data}/scripts/snowpark-submit +0 -0
  472. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/WHEEL +0 -0
  473. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/licenses/LICENSE-binary +0 -0
  474. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/licenses/LICENSE.txt +0 -0
  475. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/licenses/NOTICE-binary +0 -0
  476. {snowpark_connect-0.23.0.dist-info → snowpark_connect-0.25.0.dist-info}/top_level.txt +0 -0
@@ -1,1581 +0,0 @@
1
- # -*- encoding: utf-8 -*-
2
- #
3
- # Licensed to the Apache Software Foundation (ASF) under one or more
4
- # contributor license agreements. See the NOTICE file distributed with
5
- # this work for additional information regarding copyright ownership.
6
- # The ASF licenses this file to You under the Apache License, Version 2.0
7
- # (the "License"); you may not use this file except in compliance with
8
- # the License. You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing, software
13
- # distributed under the License is distributed on an "AS IS" BASIS,
14
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
- # See the License for the specific language governing permissions and
16
- # limitations under the License.
17
- #
18
-
19
- import array
20
- import ctypes
21
- import datetime
22
- import os
23
- import pickle
24
- import sys
25
- import unittest
26
-
27
- from pyspark.sql import Row
28
- from pyspark.sql import functions as F
29
- from pyspark.errors import AnalysisException, PySparkTypeError, PySparkValueError
30
- from pyspark.sql.types import (
31
- ByteType,
32
- ShortType,
33
- IntegerType,
34
- FloatType,
35
- DateType,
36
- TimestampType,
37
- DayTimeIntervalType,
38
- YearMonthIntervalType,
39
- MapType,
40
- StringType,
41
- CharType,
42
- VarcharType,
43
- StructType,
44
- StructField,
45
- ArrayType,
46
- DoubleType,
47
- LongType,
48
- DecimalType,
49
- BinaryType,
50
- BooleanType,
51
- NullType,
52
- )
53
- from pyspark.sql.types import (
54
- _array_signed_int_typecode_ctype_mappings,
55
- _array_type_mappings,
56
- _array_unsigned_int_typecode_ctype_mappings,
57
- _infer_type,
58
- _make_type_verifier,
59
- _merge_type,
60
- )
61
- from pyspark.testing.objects import (
62
- ExamplePointUDT,
63
- PythonOnlyUDT,
64
- ExamplePoint,
65
- PythonOnlyPoint,
66
- MyObject,
67
- )
68
- from pyspark.testing.sqlutils import ReusedSQLTestCase
69
- from pyspark.testing.utils import PySparkErrorTestUtils
70
-
71
-
72
- class TypesTestsMixin:
73
- def test_apply_schema_to_row(self):
74
- df = self.spark.read.json(self.sc.parallelize(["""{"a":2}"""]))
75
- df2 = self.spark.createDataFrame(df.rdd.map(lambda x: x), df.schema)
76
- self.assertEqual(df.collect(), df2.collect())
77
-
78
- rdd = self.sc.parallelize(range(10)).map(lambda x: Row(a=x))
79
- df3 = self.spark.createDataFrame(rdd, df.schema)
80
- self.assertEqual(10, df3.count())
81
-
82
- def test_infer_schema_to_local(self):
83
- input = [{"a": 1}, {"b": "coffee"}]
84
- rdd = self.sc.parallelize(input)
85
- df = self.spark.createDataFrame(input)
86
- df2 = self.spark.createDataFrame(rdd, samplingRatio=1.0)
87
- self.assertEqual(df.schema, df2.schema)
88
-
89
- rdd = self.sc.parallelize(range(10)).map(lambda x: Row(a=x, b=None))
90
- df3 = self.spark.createDataFrame(rdd, df.schema)
91
- self.assertEqual(10, df3.count())
92
-
93
- def test_apply_schema_to_dict_and_rows(self):
94
- schema = StructType().add("a", IntegerType()).add("b", StringType())
95
- input = [{"a": 1}, {"b": "coffee"}]
96
- rdd = self.sc.parallelize(input)
97
- for verify in [False, True]:
98
- df = self.spark.createDataFrame(input, schema, verifySchema=verify)
99
- df2 = self.spark.createDataFrame(rdd, schema, verifySchema=verify)
100
- self.assertEqual(df.schema, df2.schema)
101
-
102
- rdd = self.sc.parallelize(range(10)).map(lambda x: Row(a=x, b=None))
103
- df3 = self.spark.createDataFrame(rdd, schema, verifySchema=verify)
104
- self.assertEqual(10, df3.count())
105
- input = [Row(a=x, b=str(x)) for x in range(10)]
106
- df4 = self.spark.createDataFrame(input, schema, verifySchema=verify)
107
- self.assertEqual(10, df4.count())
108
-
109
- def test_create_dataframe_schema_mismatch(self):
110
- rdd = self.sc.parallelize(range(3)).map(lambda i: Row(a=i))
111
- schema = StructType([StructField("a", IntegerType()), StructField("b", StringType())])
112
- df = self.spark.createDataFrame(rdd, schema)
113
- self.assertRaises(Exception, lambda: df.show())
114
-
115
- def test_infer_schema(self):
116
- d = [Row(l=[], d={}, s=None), Row(l=[Row(a=1, b="s")], d={"key": Row(c=1.0, d="2")}, s="")]
117
- rdd = self.sc.parallelize(d)
118
- df = self.spark.createDataFrame(rdd)
119
- self.assertEqual([], df.rdd.map(lambda r: r.l).first())
120
- self.assertEqual([None, ""], df.rdd.map(lambda r: r.s).collect())
121
-
122
- with self.tempView("test"):
123
- df.createOrReplaceTempView("test")
124
- result = self.spark.sql("SELECT l from test")
125
- self.assertEqual([], result.head()[0])
126
- # We set `spark.sql.ansi.enabled` to False for this case
127
- # since it occurs an error in ANSI mode if there is a list index
128
- # or key that does not exist.
129
- with self.sql_conf({"spark.sql.ansi.enabled": False}):
130
- result = self.spark.sql("SELECT l[0].a from test where d['key'].d = '2'")
131
- self.assertEqual(1, result.head()[0])
132
-
133
- df2 = self.spark.createDataFrame(rdd, samplingRatio=1.0)
134
- self.assertEqual(df.schema, df2.schema)
135
- self.assertEqual({}, df2.rdd.map(lambda r: r.d).first())
136
- self.assertEqual([None, ""], df2.rdd.map(lambda r: r.s).collect())
137
-
138
- with self.tempView("test2"):
139
- df2.createOrReplaceTempView("test2")
140
- result = self.spark.sql("SELECT l from test2")
141
- self.assertEqual([], result.head()[0])
142
- # We set `spark.sql.ansi.enabled` to False for this case
143
- # since it occurs an error in ANSI mode if there is a list index
144
- # or key that does not exist.
145
- with self.sql_conf({"spark.sql.ansi.enabled": False}):
146
- result = self.spark.sql("SELECT l[0].a from test2 where d['key'].d = '2'")
147
- self.assertEqual(1, result.head()[0])
148
-
149
- def test_infer_schema_specification(self):
150
- from decimal import Decimal
151
-
152
- class A:
153
- def __init__(self):
154
- self.a = 1
155
-
156
- data = [
157
- True,
158
- 1,
159
- "a",
160
- "a",
161
- datetime.date(1970, 1, 1),
162
- datetime.datetime(1970, 1, 1, 0, 0),
163
- datetime.timedelta(microseconds=123456678),
164
- 1.0,
165
- array.array("d", [1]),
166
- [1],
167
- (1,),
168
- {"a": 1},
169
- bytearray(1),
170
- Decimal(1),
171
- Row(a=1),
172
- Row("a")(1),
173
- A(),
174
- ]
175
-
176
- df = self.spark.createDataFrame([data])
177
- actual = list(map(lambda x: x.dataType.simpleString(), df.schema))
178
- expected = [
179
- "boolean",
180
- "bigint",
181
- "string",
182
- "string",
183
- "date",
184
- "timestamp",
185
- "interval day to second",
186
- "double",
187
- "array<double>",
188
- "array<bigint>",
189
- "struct<_1:bigint>",
190
- "map<string,bigint>",
191
- "binary",
192
- "decimal(38,18)",
193
- "struct<a:bigint>",
194
- "struct<a:bigint>",
195
- "struct<a:bigint>",
196
- ]
197
- self.assertEqual(actual, expected)
198
-
199
- actual = list(df.first())
200
- expected = [
201
- True,
202
- 1,
203
- "a",
204
- "a",
205
- datetime.date(1970, 1, 1),
206
- datetime.datetime(1970, 1, 1, 0, 0),
207
- datetime.timedelta(microseconds=123456678),
208
- 1.0,
209
- [1.0],
210
- [1],
211
- Row(_1=1),
212
- {"a": 1},
213
- bytearray(b"\x00"),
214
- Decimal("1.000000000000000000"),
215
- Row(a=1),
216
- Row(a=1),
217
- Row(a=1),
218
- ]
219
- self.assertEqual(actual, expected)
220
-
221
- with self.sql_conf({"spark.sql.timestampType": "TIMESTAMP_NTZ"}):
222
- with self.sql_conf({"spark.sql.session.timeZone": "America/Sao_Paulo"}):
223
- df = self.spark.createDataFrame([(datetime.datetime(1970, 1, 1, 0, 0),)])
224
- self.assertEqual(list(df.schema)[0].dataType.simpleString(), "timestamp_ntz")
225
- self.assertEqual(df.first()[0], datetime.datetime(1970, 1, 1, 0, 0))
226
-
227
- df = self.spark.createDataFrame(
228
- [
229
- (datetime.datetime(1970, 1, 1, 0, 0),),
230
- (datetime.datetime(1970, 1, 1, 0, 0, tzinfo=datetime.timezone.utc),),
231
- ]
232
- )
233
- self.assertEqual(list(df.schema)[0].dataType.simpleString(), "timestamp")
234
-
235
- def test_infer_schema_not_enough_names(self):
236
- df = self.spark.createDataFrame([["a", "b"]], ["col1"])
237
- self.assertEqual(df.columns, ["col1", "_2"])
238
-
239
- def test_infer_schema_upcast_int_to_string(self):
240
- df = self.spark.createDataFrame(
241
- self.spark.sparkContext.parallelize([[1, 1], ["x", 1]]),
242
- schema=["a", "b"],
243
- samplingRatio=0.99,
244
- )
245
- self.assertEqual([Row(a="1", b=1), Row(a="x", b=1)], df.collect())
246
-
247
- def test_infer_schema_upcast_float_to_string(self):
248
- df = self.spark.createDataFrame([[1.33, 1], ["2.1", 1]], schema=["a", "b"])
249
- self.assertEqual([Row(a="1.33", b=1), Row(a="2.1", b=1)], df.collect())
250
-
251
- def test_infer_schema_upcast_boolean_to_string(self):
252
- df = self.spark.createDataFrame([[True, 1], ["false", 1]], schema=["a", "b"])
253
- self.assertEqual([Row(a="true", b=1), Row(a="false", b=1)], df.collect())
254
-
255
- def test_infer_nested_schema(self):
256
- NestedRow = Row("f1", "f2")
257
- nestedRdd1 = self.sc.parallelize(
258
- [NestedRow([1, 2], {"row1": 1.0}), NestedRow([2, 3], {"row2": 2.0})]
259
- )
260
- df = self.spark.createDataFrame(nestedRdd1)
261
- self.assertEqual(Row(f1=[1, 2], f2={"row1": 1.0}), df.collect()[0])
262
-
263
- nestedRdd2 = self.sc.parallelize(
264
- [NestedRow([[1, 2], [2, 3]], [1, 2]), NestedRow([[2, 3], [3, 4]], [2, 3])]
265
- )
266
- df = self.spark.createDataFrame(nestedRdd2)
267
- self.assertEqual(Row(f1=[[1, 2], [2, 3]], f2=[1, 2]), df.collect()[0])
268
-
269
- from collections import namedtuple
270
-
271
- CustomRow = namedtuple("CustomRow", "field1 field2")
272
- rdd = self.sc.parallelize(
273
- [
274
- CustomRow(field1=1, field2="row1"),
275
- CustomRow(field1=2, field2="row2"),
276
- CustomRow(field1=3, field2="row3"),
277
- ]
278
- )
279
- df = self.spark.createDataFrame(rdd)
280
- self.assertEqual(Row(field1=1, field2="row1"), df.first())
281
-
282
- def test_infer_nested_dict_as_struct(self):
283
- # SPARK-35929: Test inferring nested dict as a struct type.
284
- NestedRow = Row("f1", "f2")
285
-
286
- with self.sql_conf({"spark.sql.pyspark.inferNestedDictAsStruct.enabled": True}):
287
- data = [
288
- NestedRow([{"payment": 200.5, "name": "A"}], [1, 2]),
289
- NestedRow([{"payment": 100.5, "name": "B"}], [2, 3]),
290
- ]
291
-
292
- df = self.spark.createDataFrame(data)
293
- self.assertEqual(Row(f1=[Row(payment=200.5, name="A")], f2=[1, 2]), df.first())
294
-
295
- def test_infer_nested_dict_as_struct_with_rdd(self):
296
- # SPARK-35929: Test inferring nested dict as a struct type.
297
- NestedRow = Row("f1", "f2")
298
-
299
- with self.sql_conf({"spark.sql.pyspark.inferNestedDictAsStruct.enabled": True}):
300
- data = [
301
- NestedRow([{"payment": 200.5, "name": "A"}], [1, 2]),
302
- NestedRow([{"payment": 100.5, "name": "B"}], [2, 3]),
303
- ]
304
-
305
- nestedRdd = self.sc.parallelize(data)
306
- df = self.spark.createDataFrame(nestedRdd)
307
- self.assertEqual(Row(f1=[Row(payment=200.5, name="A")], f2=[1, 2]), df.first())
308
-
309
- def test_infer_array_merge_element_types(self):
310
- # SPARK-39168: Test inferring array element type from all values in array
311
- ArrayRow = Row("f1", "f2")
312
-
313
- data = [ArrayRow([1, None], [None, 2])]
314
-
315
- df = self.spark.createDataFrame(data)
316
- self.assertEqual(Row(f1=[1, None], f2=[None, 2]), df.first())
317
-
318
- # Test legacy behavior inferring only from the first element
319
- with self.sql_conf(
320
- {"spark.sql.pyspark.legacy.inferArrayTypeFromFirstElement.enabled": True}
321
- ):
322
- # Legacy: f2 schema inferred as an array of nulls, should raise error
323
- self.assertRaises(ValueError, lambda: self.spark.createDataFrame(data))
324
-
325
- # an array with only null values should raise an error
326
- data2 = [ArrayRow([1], [None])]
327
- self.assertRaises(ValueError, lambda: self.spark.createDataFrame(data2))
328
-
329
- # an array with no values should raise an error
330
- data3 = [ArrayRow([1], [])]
331
- self.assertRaises(ValueError, lambda: self.spark.createDataFrame(data3))
332
-
333
- # an array with conflicting types should raise an error
334
- # in this case this is ArrayType(StringType) and ArrayType(NullType)
335
- data4 = [ArrayRow([1, "1"], [None])]
336
- with self.assertRaisesRegex(ValueError, "types cannot be determined after inferring"):
337
- self.spark.createDataFrame(data4)
338
-
339
- def test_infer_array_merge_element_types_with_rdd(self):
340
- # SPARK-39168: Test inferring array element type from all values in array
341
- ArrayRow = Row("f1", "f2")
342
-
343
- data = [ArrayRow([1, None], [None, 2])]
344
-
345
- rdd = self.sc.parallelize(data)
346
- df = self.spark.createDataFrame(rdd)
347
- self.assertEqual(Row(f1=[1, None], f2=[None, 2]), df.first())
348
-
349
- def test_infer_array_element_type_empty(self):
350
- # SPARK-39168: Test inferring array element type from all rows
351
- ArrayRow = Row("f1")
352
-
353
- data = [ArrayRow([]), ArrayRow([None]), ArrayRow([1])]
354
-
355
- rdd = self.sc.parallelize(data)
356
- df = self.spark.createDataFrame(rdd)
357
- rows = df.collect()
358
- self.assertEqual(Row(f1=[]), rows[0])
359
- self.assertEqual(Row(f1=[None]), rows[1])
360
- self.assertEqual(Row(f1=[1]), rows[2])
361
-
362
- df = self.spark.createDataFrame(data)
363
- rows = df.collect()
364
- self.assertEqual(Row(f1=[]), rows[0])
365
- self.assertEqual(Row(f1=[None]), rows[1])
366
- self.assertEqual(Row(f1=[1]), rows[2])
367
-
368
- def test_infer_array_element_type_with_struct(self):
369
- # SPARK-39168: Test inferring array of struct type from all struct values
370
- NestedRow = Row("f1")
371
-
372
- with self.sql_conf({"spark.sql.pyspark.inferNestedDictAsStruct.enabled": True}):
373
- data = [NestedRow([{"payment": 200.5}, {"name": "A"}])]
374
-
375
- nestedRdd = self.sc.parallelize(data)
376
- df = self.spark.createDataFrame(nestedRdd)
377
- self.assertEqual(
378
- Row(f1=[Row(payment=200.5, name=None), Row(payment=None, name="A")]), df.first()
379
- )
380
-
381
- df = self.spark.createDataFrame(data)
382
- self.assertEqual(
383
- Row(f1=[Row(payment=200.5, name=None), Row(payment=None, name="A")]), df.first()
384
- )
385
-
386
- # Test legacy behavior inferring only from the first element; excludes "name" field
387
- with self.sql_conf(
388
- {"spark.sql.pyspark.legacy.inferArrayTypeFromFirstElement.enabled": True}
389
- ):
390
- df = self.spark.createDataFrame(data)
391
- self.assertEqual(Row(f1=[Row(payment=200.5), Row(payment=None)]), df.first())
392
-
393
- def test_create_dataframe_from_dict_respects_schema(self):
394
- df = self.spark.createDataFrame([{"a": 1}], ["b"])
395
- self.assertEqual(df.columns, ["b"])
396
-
397
- def test_negative_decimal(self):
398
- try:
399
- self.spark.sql("set spark.sql.legacy.allowNegativeScaleOfDecimal=true")
400
- df = self.spark.createDataFrame([(1,), (11,)], ["value"])
401
- ret = df.select(F.col("value").cast(DecimalType(1, -1))).collect()
402
- actual = list(map(lambda r: int(r.value), ret))
403
- self.assertEqual(actual, [0, 10])
404
- finally:
405
- self.spark.sql("set spark.sql.legacy.allowNegativeScaleOfDecimal=false")
406
-
407
- def test_create_dataframe_from_objects(self):
408
- data = [MyObject(1, "1"), MyObject(2, "2")]
409
- df = self.spark.createDataFrame(data)
410
- self.assertEqual(df.dtypes, [("key", "bigint"), ("value", "string")])
411
- self.assertEqual(df.first(), Row(key=1, value="1"))
412
-
413
- def test_apply_schema(self):
414
- from datetime import date, datetime, timedelta
415
-
416
- rdd = self.sc.parallelize(
417
- [
418
- (
419
- 127,
420
- -128,
421
- -32768,
422
- 32767,
423
- 2147483647,
424
- 1.0,
425
- date(2010, 1, 1),
426
- datetime(2010, 1, 1, 1, 1, 1),
427
- timedelta(days=1),
428
- {"a": 1},
429
- (2,),
430
- [1, 2, 3],
431
- None,
432
- )
433
- ]
434
- )
435
- schema = StructType(
436
- [
437
- StructField("byte1", ByteType(), False),
438
- StructField("byte2", ByteType(), False),
439
- StructField("short1", ShortType(), False),
440
- StructField("short2", ShortType(), False),
441
- StructField("int1", IntegerType(), False),
442
- StructField("float1", FloatType(), False),
443
- StructField("date1", DateType(), False),
444
- StructField("time1", TimestampType(), False),
445
- StructField("daytime1", DayTimeIntervalType(), False),
446
- StructField("map1", MapType(StringType(), IntegerType(), False), False),
447
- StructField("struct1", StructType([StructField("b", ShortType(), False)]), False),
448
- StructField("list1", ArrayType(ByteType(), False), False),
449
- StructField("null1", DoubleType(), True),
450
- ]
451
- )
452
- df = self.spark.createDataFrame(rdd, schema)
453
- results = df.rdd.map(
454
- lambda x: (
455
- x.byte1,
456
- x.byte2,
457
- x.short1,
458
- x.short2,
459
- x.int1,
460
- x.float1,
461
- x.date1,
462
- x.time1,
463
- x.daytime1,
464
- x.map1["a"],
465
- x.struct1.b,
466
- x.list1,
467
- x.null1,
468
- )
469
- )
470
- r = (
471
- 127,
472
- -128,
473
- -32768,
474
- 32767,
475
- 2147483647,
476
- 1.0,
477
- date(2010, 1, 1),
478
- datetime(2010, 1, 1, 1, 1, 1),
479
- timedelta(days=1),
480
- 1,
481
- 2,
482
- [1, 2, 3],
483
- None,
484
- )
485
- self.assertEqual(r, results.first())
486
-
487
- with self.tempView("table2"):
488
- df.createOrReplaceTempView("table2")
489
- r = self.spark.sql(
490
- "SELECT byte1 - 1 AS byte1, byte2 + 1 AS byte2, "
491
- + "short1 + 1 AS short1, short2 - 1 AS short2, int1 - 1 AS int1, "
492
- + "float1 + 1.5 as float1 FROM table2"
493
- ).first()
494
-
495
- self.assertEqual((126, -127, -32767, 32766, 2147483646, 2.5), tuple(r))
496
-
497
- def test_convert_row_to_dict(self):
498
- row = Row(l=[Row(a=1, b="s")], d={"key": Row(c=1.0, d="2")})
499
- self.assertEqual(1, row.asDict()["l"][0].a)
500
- df = self.spark.createDataFrame([row])
501
-
502
- with self.tempView("test"):
503
- df.createOrReplaceTempView("test")
504
- row = self.spark.sql("select l, d from test").head()
505
- self.assertEqual(1, row.asDict()["l"][0].a)
506
- self.assertEqual(1.0, row.asDict()["d"]["key"].c)
507
-
508
- def test_udt(self):
509
- from pyspark.sql.types import _parse_datatype_json_string, _infer_type, _make_type_verifier
510
-
511
- def check_datatype(datatype):
512
- pickled = pickle.loads(pickle.dumps(datatype))
513
- assert datatype == pickled
514
- scala_datatype = self.spark._jsparkSession.parseDataType(datatype.json())
515
- python_datatype = _parse_datatype_json_string(scala_datatype.json())
516
- assert datatype == python_datatype
517
-
518
- check_datatype(ExamplePointUDT())
519
- structtype_with_udt = StructType(
520
- [
521
- StructField("label", DoubleType(), False),
522
- StructField("point", ExamplePointUDT(), False),
523
- ]
524
- )
525
- check_datatype(structtype_with_udt)
526
- p = ExamplePoint(1.0, 2.0)
527
- self.assertEqual(_infer_type(p), ExamplePointUDT())
528
- _make_type_verifier(ExamplePointUDT())(ExamplePoint(1.0, 2.0))
529
- self.assertRaises(ValueError, lambda: _make_type_verifier(ExamplePointUDT())([1.0, 2.0]))
530
-
531
- check_datatype(PythonOnlyUDT())
532
- structtype_with_udt = StructType(
533
- [
534
- StructField("label", DoubleType(), False),
535
- StructField("point", PythonOnlyUDT(), False),
536
- ]
537
- )
538
- check_datatype(structtype_with_udt)
539
- p = PythonOnlyPoint(1.0, 2.0)
540
- self.assertEqual(_infer_type(p), PythonOnlyUDT())
541
- _make_type_verifier(PythonOnlyUDT())(PythonOnlyPoint(1.0, 2.0))
542
- self.assertRaises(ValueError, lambda: _make_type_verifier(PythonOnlyUDT())([1.0, 2.0]))
543
-
544
- def test_simple_udt_in_df(self):
545
- schema = StructType().add("key", LongType()).add("val", PythonOnlyUDT())
546
- df = self.spark.createDataFrame(
547
- [(i % 3, PythonOnlyPoint(float(i), float(i))) for i in range(10)], schema=schema
548
- )
549
- df.collect()
550
-
551
- def test_nested_udt_in_df(self):
552
- schema = StructType().add("key", LongType()).add("val", ArrayType(PythonOnlyUDT()))
553
- df = self.spark.createDataFrame(
554
- [(i % 3, [PythonOnlyPoint(float(i), float(i))]) for i in range(10)], schema=schema
555
- )
556
- df.collect()
557
-
558
- schema = (
559
- StructType().add("key", LongType()).add("val", MapType(LongType(), PythonOnlyUDT()))
560
- )
561
- df = self.spark.createDataFrame(
562
- [(i % 3, {i % 3: PythonOnlyPoint(float(i + 1), float(i + 1))}) for i in range(10)],
563
- schema=schema,
564
- )
565
- df.collect()
566
-
567
- def test_complex_nested_udt_in_df(self):
568
- schema = StructType().add("key", LongType()).add("val", PythonOnlyUDT())
569
- df = self.spark.createDataFrame(
570
- [(i % 3, PythonOnlyPoint(float(i), float(i))) for i in range(10)], schema=schema
571
- )
572
- df.collect()
573
-
574
- gd = df.groupby("key").agg({"val": "collect_list"})
575
- gd.collect()
576
- udf = F.udf(lambda k, v: [(k, v[0])], ArrayType(df.schema))
577
- gd.select(udf(*gd)).collect()
578
-
579
- def test_udt_with_none(self):
580
- df = self.spark.range(0, 10, 1, 1)
581
-
582
- def myudf(x):
583
- if x > 0:
584
- return PythonOnlyPoint(float(x), float(x))
585
-
586
- self.spark.catalog.registerFunction("udf", myudf, PythonOnlyUDT())
587
- rows = [r[0] for r in df.selectExpr("udf(id)").take(2)]
588
- self.assertEqual(rows, [None, PythonOnlyPoint(1, 1)])
589
-
590
- def test_infer_schema_with_udt(self):
591
- row = Row(label=1.0, point=ExamplePoint(1.0, 2.0))
592
- df = self.spark.createDataFrame([row])
593
- schema = df.schema
594
- field = [f for f in schema.fields if f.name == "point"][0]
595
- self.assertEqual(type(field.dataType), ExamplePointUDT)
596
-
597
- with self.tempView("labeled_point"):
598
- df.createOrReplaceTempView("labeled_point")
599
- point = self.spark.sql("SELECT point FROM labeled_point").head().point
600
- self.assertEqual(point, ExamplePoint(1.0, 2.0))
601
-
602
- row = Row(label=1.0, point=PythonOnlyPoint(1.0, 2.0))
603
- df = self.spark.createDataFrame([row])
604
- schema = df.schema
605
- field = [f for f in schema.fields if f.name == "point"][0]
606
- self.assertEqual(type(field.dataType), PythonOnlyUDT)
607
-
608
- with self.tempView("labeled_point"):
609
- df.createOrReplaceTempView("labeled_point")
610
- point = self.spark.sql("SELECT point FROM labeled_point").head().point
611
- self.assertEqual(point, PythonOnlyPoint(1.0, 2.0))
612
-
613
- def test_infer_schema_with_udt_with_column_names(self):
614
- row = (1.0, ExamplePoint(1.0, 2.0))
615
- df = self.spark.createDataFrame([row], ["label", "point"])
616
- schema = df.schema
617
- field = [f for f in schema.fields if f.name == "point"][0]
618
- self.assertEqual(type(field.dataType), ExamplePointUDT)
619
-
620
- with self.tempView("labeled_point"):
621
- df.createOrReplaceTempView("labeled_point")
622
- point = self.spark.sql("SELECT point FROM labeled_point").head().point
623
- self.assertEqual(point, ExamplePoint(1.0, 2.0))
624
-
625
- row = (1.0, PythonOnlyPoint(1.0, 2.0))
626
- df = self.spark.createDataFrame([row], ["label", "point"])
627
- schema = df.schema
628
- field = [f for f in schema.fields if f.name == "point"][0]
629
- self.assertEqual(type(field.dataType), PythonOnlyUDT)
630
-
631
- with self.tempView("labeled_point"):
632
- df.createOrReplaceTempView("labeled_point")
633
- point = self.spark.sql("SELECT point FROM labeled_point").head().point
634
- self.assertEqual(point, PythonOnlyPoint(1.0, 2.0))
635
-
636
- def test_apply_schema_with_udt(self):
637
- row = (1.0, ExamplePoint(1.0, 2.0))
638
- schema = StructType(
639
- [
640
- StructField("label", DoubleType(), False),
641
- StructField("point", ExamplePointUDT(), False),
642
- ]
643
- )
644
- df = self.spark.createDataFrame([row], schema)
645
- point = df.head().point
646
- self.assertEqual(point, ExamplePoint(1.0, 2.0))
647
-
648
- row = (1.0, PythonOnlyPoint(1.0, 2.0))
649
- schema = StructType(
650
- [
651
- StructField("label", DoubleType(), False),
652
- StructField("point", PythonOnlyUDT(), False),
653
- ]
654
- )
655
- df = self.spark.createDataFrame([row], schema)
656
- point = df.head().point
657
- self.assertEqual(point, PythonOnlyPoint(1.0, 2.0))
658
-
659
- def test_apply_schema_with_nullable_udt(self):
660
- rows = [(1.0, ExamplePoint(1.0, 2.0)), (2.0, None)]
661
- schema = StructType(
662
- [
663
- StructField("label", DoubleType(), False),
664
- StructField("point", ExamplePointUDT(), True),
665
- ]
666
- )
667
- df = self.spark.createDataFrame(rows, schema)
668
- points = [row.point for row in df.collect()]
669
- self.assertEqual(points, [ExamplePoint(1.0, 2.0), None])
670
-
671
- rows = [(1.0, PythonOnlyPoint(1.0, 2.0)), (2.0, None)]
672
- schema = StructType(
673
- [
674
- StructField("label", DoubleType(), False),
675
- StructField("point", PythonOnlyUDT(), True),
676
- ]
677
- )
678
- df = self.spark.createDataFrame(rows, schema)
679
- points = [row.point for row in df.collect()]
680
- self.assertEqual(points, [PythonOnlyPoint(1.0, 2.0), None])
681
-
682
- def test_udf_with_udt(self):
683
- row = Row(label=1.0, point=ExamplePoint(1.0, 2.0))
684
- df = self.spark.createDataFrame([row])
685
- udf = F.udf(lambda p: p.y, DoubleType())
686
- self.assertEqual(2.0, df.select(udf(df.point)).first()[0])
687
- udf2 = F.udf(lambda p: ExamplePoint(p.x + 1, p.y + 1), ExamplePointUDT())
688
- self.assertEqual(ExamplePoint(2.0, 3.0), df.select(udf2(df.point)).first()[0])
689
-
690
- row = Row(label=1.0, point=PythonOnlyPoint(1.0, 2.0))
691
- df = self.spark.createDataFrame([row])
692
- udf = F.udf(lambda p: p.y, DoubleType())
693
- self.assertEqual(2.0, df.select(udf(df.point)).first()[0])
694
- udf2 = F.udf(lambda p: PythonOnlyPoint(p.x + 1, p.y + 1), PythonOnlyUDT())
695
- self.assertEqual(PythonOnlyPoint(2.0, 3.0), df.select(udf2(df.point)).first()[0])
696
-
697
- def test_rdd_with_udt(self):
698
- row = Row(label=1.0, point=ExamplePoint(1.0, 2.0))
699
- df = self.spark.createDataFrame([row])
700
- self.assertEqual(1.0, df.rdd.map(lambda r: r.point.x).first())
701
-
702
- row = Row(label=1.0, point=PythonOnlyPoint(1.0, 2.0))
703
- df = self.spark.createDataFrame([row])
704
- self.assertEqual(1.0, df.rdd.map(lambda r: r.point.x).first())
705
-
706
- def test_parquet_with_udt(self):
707
- row = Row(label=1.0, point=ExamplePoint(1.0, 2.0))
708
- df0 = self.spark.createDataFrame([row])
709
- output_dir = os.path.join(self.tempdir.name, "labeled_point")
710
- df0.write.parquet(output_dir)
711
- df1 = self.spark.read.parquet(output_dir)
712
- point = df1.head().point
713
- self.assertEqual(point, ExamplePoint(1.0, 2.0))
714
-
715
- row = Row(label=1.0, point=PythonOnlyPoint(1.0, 2.0))
716
- df0 = self.spark.createDataFrame([row])
717
- df0.write.parquet(output_dir, mode="overwrite")
718
- df1 = self.spark.read.parquet(output_dir)
719
- point = df1.head().point
720
- self.assertEqual(point, PythonOnlyPoint(1.0, 2.0))
721
-
722
- def test_union_with_udt(self):
723
- row1 = (1.0, ExamplePoint(1.0, 2.0))
724
- row2 = (2.0, ExamplePoint(3.0, 4.0))
725
- schema = StructType(
726
- [
727
- StructField("label", DoubleType(), False),
728
- StructField("point", ExamplePointUDT(), False),
729
- ]
730
- )
731
- df1 = self.spark.createDataFrame([row1], schema)
732
- df2 = self.spark.createDataFrame([row2], schema)
733
-
734
- result = df1.union(df2).orderBy("label").collect()
735
- self.assertEqual(
736
- result,
737
- [
738
- Row(label=1.0, point=ExamplePoint(1.0, 2.0)),
739
- Row(label=2.0, point=ExamplePoint(3.0, 4.0)),
740
- ],
741
- )
742
-
743
- def test_cast_to_string_with_udt(self):
744
- row = (ExamplePoint(1.0, 2.0), PythonOnlyPoint(3.0, 4.0))
745
- schema = StructType(
746
- [
747
- StructField("point", ExamplePointUDT(), False),
748
- StructField("pypoint", PythonOnlyUDT(), False),
749
- ]
750
- )
751
- df = self.spark.createDataFrame([row], schema)
752
-
753
- result = df.select(F.col("point").cast("string"), F.col("pypoint").cast("string")).head()
754
- self.assertEqual(result, Row(point="(1.0, 2.0)", pypoint="[3.0, 4.0]"))
755
-
756
- @unittest.skipIf(
757
- "SPARK_SKIP_CONNECT_COMPAT_TESTS" in os.environ, "SPARK-49787: Supported since Spark 4.0.0"
758
- )
759
- def test_cast_to_udt_with_udt(self):
760
- row = Row(point=ExamplePoint(1.0, 2.0), python_only_point=PythonOnlyPoint(1.0, 2.0))
761
- df = self.spark.createDataFrame([row])
762
- with self.assertRaises(AnalysisException):
763
- df.select(F.col("point").cast(PythonOnlyUDT())).collect()
764
- with self.assertRaises(AnalysisException):
765
- df.select(F.col("python_only_point").cast(ExamplePointUDT())).collect()
766
-
767
- def test_struct_type(self):
768
- struct1 = StructType().add("f1", StringType(), True).add("f2", StringType(), True, None)
769
- struct2 = StructType(
770
- [StructField("f1", StringType(), True), StructField("f2", StringType(), True, None)]
771
- )
772
- self.assertEqual(struct1.fieldNames(), struct2.names)
773
- self.assertEqual(struct1, struct2)
774
-
775
- struct1 = StructType().add("f1", StringType(), True).add("f2", StringType(), True, None)
776
- struct2 = StructType([StructField("f1", StringType(), True)])
777
- self.assertNotEqual(struct1.fieldNames(), struct2.names)
778
- self.assertNotEqual(struct1, struct2)
779
-
780
- struct1 = (
781
- StructType()
782
- .add(StructField("f1", StringType(), True))
783
- .add(StructField("f2", StringType(), True, None))
784
- )
785
- struct2 = StructType(
786
- [StructField("f1", StringType(), True), StructField("f2", StringType(), True, None)]
787
- )
788
- self.assertEqual(struct1.fieldNames(), struct2.names)
789
- self.assertEqual(struct1, struct2)
790
-
791
- struct1 = (
792
- StructType()
793
- .add(StructField("f1", StringType(), True))
794
- .add(StructField("f2", StringType(), True, None))
795
- )
796
- struct2 = StructType([StructField("f1", StringType(), True)])
797
- self.assertNotEqual(struct1.fieldNames(), struct2.names)
798
- self.assertNotEqual(struct1, struct2)
799
-
800
- # Catch exception raised during improper construction
801
- self.assertRaises(ValueError, lambda: StructType().add("name"))
802
-
803
- struct1 = StructType().add("f1", StringType(), True).add("f2", StringType(), True, None)
804
- for field in struct1:
805
- self.assertIsInstance(field, StructField)
806
-
807
- struct1 = StructType().add("f1", StringType(), True).add("f2", StringType(), True, None)
808
- self.assertEqual(len(struct1), 2)
809
-
810
- struct1 = StructType().add("f1", StringType(), True).add("f2", StringType(), True, None)
811
- self.assertIs(struct1["f1"], struct1.fields[0])
812
- self.assertIs(struct1[0], struct1.fields[0])
813
- self.assertEqual(struct1[0:1], StructType(struct1.fields[0:1]))
814
- self.assertRaises(KeyError, lambda: struct1["f9"])
815
- self.assertRaises(IndexError, lambda: struct1[9])
816
- self.assertRaises(TypeError, lambda: struct1[9.9])
817
-
818
- @unittest.skipIf(
819
- "SPARK_SKIP_CONNECT_COMPAT_TESTS" in os.environ, "Failed with different Client <> Server"
820
- )
821
- def test_parse_datatype_string(self):
822
- from pyspark.sql.types import _all_atomic_types, _parse_datatype_string
823
-
824
- for k, t in _all_atomic_types.items():
825
- if k != "varchar" and k != "char":
826
- self.assertEqual(t(), _parse_datatype_string(k))
827
- self.assertEqual(IntegerType(), _parse_datatype_string("int"))
828
- self.assertEqual(CharType(1), _parse_datatype_string("char(1)"))
829
- self.assertEqual(CharType(10), _parse_datatype_string("char( 10 )"))
830
- self.assertEqual(CharType(11), _parse_datatype_string("char( 11)"))
831
- self.assertEqual(VarcharType(1), _parse_datatype_string("varchar(1)"))
832
- self.assertEqual(VarcharType(10), _parse_datatype_string("varchar( 10 )"))
833
- self.assertEqual(VarcharType(11), _parse_datatype_string("varchar( 11)"))
834
- self.assertEqual(DecimalType(1, 1), _parse_datatype_string("decimal(1 ,1)"))
835
- self.assertEqual(DecimalType(10, 1), _parse_datatype_string("decimal( 10,1 )"))
836
- self.assertEqual(DecimalType(11, 1), _parse_datatype_string("decimal(11,1)"))
837
- self.assertEqual(ArrayType(IntegerType()), _parse_datatype_string("array<int >"))
838
- self.assertEqual(
839
- MapType(IntegerType(), DoubleType()), _parse_datatype_string("map< int, double >")
840
- )
841
- self.assertEqual(
842
- StructType([StructField("a", IntegerType()), StructField("c", DoubleType())]),
843
- _parse_datatype_string("struct<a:int, c:double >"),
844
- )
845
- self.assertEqual(
846
- StructType([StructField("a", IntegerType()), StructField("c", DoubleType())]),
847
- _parse_datatype_string("a:int, c:double"),
848
- )
849
- self.assertEqual(
850
- StructType([StructField("a", IntegerType()), StructField("c", DoubleType())]),
851
- _parse_datatype_string("a INT, c DOUBLE"),
852
- )
853
-
854
- def test_metadata_null(self):
855
- schema = StructType(
856
- [
857
- StructField("f1", StringType(), True, None),
858
- StructField("f2", StringType(), True, {"a": None}),
859
- ]
860
- )
861
- self.spark.createDataFrame([["a", "b"], ["c", "d"]], schema)
862
-
863
- def test_access_nested_types(self):
864
- df = self.spark.createDataFrame([Row(l=[1], r=Row(a=1, b="b"), d={"k": "v"})])
865
- self.assertEqual(1, df.select(df.l[0]).first()[0])
866
- self.assertEqual(1, df.select(df.l.getItem(0)).first()[0])
867
- self.assertEqual(1, df.select(df.r.a).first()[0])
868
- self.assertEqual("b", df.select(df.r.getField("b")).first()[0])
869
- self.assertEqual("v", df.select(df.d["k"]).first()[0])
870
- self.assertEqual("v", df.select(df.d.getItem("k")).first()[0])
871
-
872
- def test_infer_long_type(self):
873
- longrow = [Row(f1="a", f2=100000000000000)]
874
- df = self.sc.parallelize(longrow).toDF()
875
- self.assertEqual(df.schema.fields[1].dataType, LongType())
876
-
877
- # this saving as Parquet caused issues as well.
878
- output_dir = os.path.join(self.tempdir.name, "infer_long_type")
879
- df.write.parquet(output_dir)
880
- df1 = self.spark.read.parquet(output_dir)
881
- self.assertEqual("a", df1.first().f1)
882
- self.assertEqual(100000000000000, df1.first().f2)
883
-
884
- self.assertEqual(_infer_type(1), LongType())
885
- self.assertEqual(_infer_type(2**10), LongType())
886
- self.assertEqual(_infer_type(2**20), LongType())
887
- self.assertEqual(_infer_type(2**31 - 1), LongType())
888
- self.assertEqual(_infer_type(2**31), LongType())
889
- self.assertEqual(_infer_type(2**61), LongType())
890
- self.assertEqual(_infer_type(2**71), LongType())
891
-
892
- def test_infer_binary_type(self):
893
- binaryrow = [Row(f1="a", f2=b"abcd")]
894
- df = self.sc.parallelize(binaryrow).toDF()
895
- self.assertEqual(df.schema.fields[1].dataType, BinaryType())
896
-
897
- # this saving as Parquet caused issues as well.
898
- output_dir = os.path.join(self.tempdir.name, "infer_binary_type")
899
- df.write.parquet(output_dir)
900
- df1 = self.spark.read.parquet(output_dir)
901
- self.assertEqual("a", df1.first().f1)
902
- self.assertEqual(b"abcd", df1.first().f2)
903
-
904
- self.assertEqual(_infer_type(b""), BinaryType())
905
- self.assertEqual(_infer_type(b"1234"), BinaryType())
906
-
907
- def test_merge_type(self):
908
- self.assertEqual(_merge_type(LongType(), NullType()), LongType())
909
- self.assertEqual(_merge_type(NullType(), LongType()), LongType())
910
-
911
- self.assertEqual(_merge_type(LongType(), LongType()), LongType())
912
-
913
- self.assertEqual(
914
- _merge_type(ArrayType(LongType()), ArrayType(LongType())), ArrayType(LongType())
915
- )
916
- with self.assertRaises(PySparkTypeError) as pe:
917
- _merge_type(ArrayType(LongType()), ArrayType(DoubleType()))
918
- self.check_error(
919
- exception=pe.exception,
920
- error_class="CANNOT_MERGE_TYPE",
921
- message_parameters={"data_type1": "LongType", "data_type2": "DoubleType"},
922
- )
923
-
924
- self.assertEqual(
925
- _merge_type(MapType(StringType(), LongType()), MapType(StringType(), LongType())),
926
- MapType(StringType(), LongType()),
927
- )
928
-
929
- self.assertEqual(
930
- _merge_type(MapType(StringType(), LongType()), MapType(DoubleType(), LongType())),
931
- MapType(StringType(), LongType()),
932
- )
933
-
934
- with self.assertRaises(PySparkTypeError) as pe:
935
- _merge_type(MapType(StringType(), LongType()), MapType(StringType(), DoubleType()))
936
- self.check_error(
937
- exception=pe.exception,
938
- error_class="CANNOT_MERGE_TYPE",
939
- message_parameters={"data_type1": "LongType", "data_type2": "DoubleType"},
940
- )
941
-
942
- self.assertEqual(
943
- _merge_type(
944
- StructType([StructField("f1", LongType()), StructField("f2", StringType())]),
945
- StructType([StructField("f1", LongType()), StructField("f2", StringType())]),
946
- ),
947
- StructType([StructField("f1", LongType()), StructField("f2", StringType())]),
948
- )
949
- with self.assertRaises(PySparkTypeError) as pe:
950
- _merge_type(
951
- StructType([StructField("f1", LongType()), StructField("f2", StringType())]),
952
- StructType([StructField("f1", DoubleType()), StructField("f2", StringType())]),
953
- )
954
- self.check_error(
955
- exception=pe.exception,
956
- error_class="CANNOT_MERGE_TYPE",
957
- message_parameters={"data_type1": "LongType", "data_type2": "DoubleType"},
958
- )
959
-
960
- self.assertEqual(
961
- _merge_type(
962
- StructType([StructField("f1", StructType([StructField("f2", LongType())]))]),
963
- StructType([StructField("f1", StructType([StructField("f2", LongType())]))]),
964
- ),
965
- StructType([StructField("f1", StructType([StructField("f2", LongType())]))]),
966
- )
967
- self.assertEqual(
968
- _merge_type(
969
- StructType([StructField("f1", StructType([StructField("f2", LongType())]))]),
970
- StructType([StructField("f1", StructType([StructField("f2", StringType())]))]),
971
- ),
972
- StructType([StructField("f1", StructType([StructField("f2", StringType())]))]),
973
- )
974
-
975
- self.assertEqual(
976
- _merge_type(
977
- StructType(
978
- [StructField("f1", ArrayType(LongType())), StructField("f2", StringType())]
979
- ),
980
- StructType(
981
- [StructField("f1", ArrayType(LongType())), StructField("f2", StringType())]
982
- ),
983
- ),
984
- StructType([StructField("f1", ArrayType(LongType())), StructField("f2", StringType())]),
985
- )
986
- with self.assertRaises(PySparkTypeError) as pe:
987
- _merge_type(
988
- StructType(
989
- [StructField("f1", ArrayType(LongType())), StructField("f2", StringType())]
990
- ),
991
- StructType(
992
- [StructField("f1", ArrayType(DoubleType())), StructField("f2", StringType())]
993
- ),
994
- )
995
- self.check_error(
996
- exception=pe.exception,
997
- error_class="CANNOT_MERGE_TYPE",
998
- message_parameters={"data_type1": "LongType", "data_type2": "DoubleType"},
999
- )
1000
-
1001
- self.assertEqual(
1002
- _merge_type(
1003
- StructType(
1004
- [
1005
- StructField("f1", MapType(StringType(), LongType())),
1006
- StructField("f2", StringType()),
1007
- ]
1008
- ),
1009
- StructType(
1010
- [
1011
- StructField("f1", MapType(StringType(), LongType())),
1012
- StructField("f2", StringType()),
1013
- ]
1014
- ),
1015
- ),
1016
- StructType(
1017
- [
1018
- StructField("f1", MapType(StringType(), LongType())),
1019
- StructField("f2", StringType()),
1020
- ]
1021
- ),
1022
- )
1023
- with self.assertRaises(PySparkTypeError) as pe:
1024
- _merge_type(
1025
- StructType(
1026
- [
1027
- StructField("f1", MapType(StringType(), LongType())),
1028
- StructField("f2", StringType()),
1029
- ]
1030
- ),
1031
- StructType(
1032
- [
1033
- StructField("f1", MapType(StringType(), DoubleType())),
1034
- StructField("f2", StringType()),
1035
- ]
1036
- ),
1037
- )
1038
- self.check_error(
1039
- exception=pe.exception,
1040
- error_class="CANNOT_MERGE_TYPE",
1041
- message_parameters={"data_type1": "LongType", "data_type2": "DoubleType"},
1042
- )
1043
-
1044
- self.assertEqual(
1045
- _merge_type(
1046
- StructType([StructField("f1", ArrayType(MapType(StringType(), LongType())))]),
1047
- StructType([StructField("f1", ArrayType(MapType(StringType(), LongType())))]),
1048
- ),
1049
- StructType([StructField("f1", ArrayType(MapType(StringType(), LongType())))]),
1050
- )
1051
- self.assertEqual(
1052
- _merge_type(
1053
- StructType([StructField("f1", ArrayType(MapType(StringType(), LongType())))]),
1054
- StructType([StructField("f1", ArrayType(MapType(DoubleType(), LongType())))]),
1055
- ),
1056
- StructType([StructField("f1", ArrayType(MapType(StringType(), LongType())))]),
1057
- )
1058
-
1059
- # test for SPARK-16542
1060
- def test_array_types(self):
1061
- # This test need to make sure that the Scala type selected is at least
1062
- # as large as the python's types. This is necessary because python's
1063
- # array types depend on C implementation on the machine. Therefore there
1064
- # is no machine independent correspondence between python's array types
1065
- # and Scala types.
1066
- # See: https://docs.python.org/2/library/array.html
1067
-
1068
- def assertCollectSuccess(typecode, value):
1069
- row = Row(myarray=array.array(typecode, [value]))
1070
- df = self.spark.createDataFrame([row])
1071
- self.assertEqual(df.first()["myarray"][0], value)
1072
-
1073
- # supported string types
1074
- #
1075
- # String types in python's array are "u" for Py_UNICODE and "c" for char.
1076
- # "u" will be removed in python 4, and "c" is not supported in python 3.
1077
- supported_string_types = []
1078
- if sys.version_info[0] < 4:
1079
- supported_string_types += ["u"]
1080
- # test unicode
1081
- assertCollectSuccess("u", "a")
1082
-
1083
- # supported float and double
1084
- #
1085
- # Test max, min, and precision for float and double, assuming IEEE 754
1086
- # floating-point format.
1087
- supported_fractional_types = ["f", "d"]
1088
- assertCollectSuccess("f", ctypes.c_float(1e38).value)
1089
- assertCollectSuccess("f", ctypes.c_float(1e-38).value)
1090
- assertCollectSuccess("f", ctypes.c_float(1.123456).value)
1091
- assertCollectSuccess("d", sys.float_info.max)
1092
- assertCollectSuccess("d", sys.float_info.min)
1093
- assertCollectSuccess("d", sys.float_info.epsilon)
1094
-
1095
- # supported signed int types
1096
- #
1097
- # The size of C types changes with implementation, we need to make sure
1098
- # that there is no overflow error on the platform running this test.
1099
- supported_signed_int_types = list(
1100
- set(_array_signed_int_typecode_ctype_mappings.keys()).intersection(
1101
- set(_array_type_mappings.keys())
1102
- )
1103
- )
1104
- for t in supported_signed_int_types:
1105
- ctype = _array_signed_int_typecode_ctype_mappings[t]
1106
- max_val = 2 ** (ctypes.sizeof(ctype) * 8 - 1)
1107
- assertCollectSuccess(t, max_val - 1)
1108
- assertCollectSuccess(t, -max_val)
1109
-
1110
- # supported unsigned int types
1111
- #
1112
- # JVM does not have unsigned types. We need to be very careful to make
1113
- # sure that there is no overflow error.
1114
- supported_unsigned_int_types = list(
1115
- set(_array_unsigned_int_typecode_ctype_mappings.keys()).intersection(
1116
- set(_array_type_mappings.keys())
1117
- )
1118
- )
1119
- for t in supported_unsigned_int_types:
1120
- ctype = _array_unsigned_int_typecode_ctype_mappings[t]
1121
- assertCollectSuccess(t, 2 ** (ctypes.sizeof(ctype) * 8) - 1)
1122
-
1123
- # all supported types
1124
- #
1125
- # Make sure the types tested above:
1126
- # 1. are all supported types
1127
- # 2. cover all supported types
1128
- supported_types = (
1129
- supported_string_types
1130
- + supported_fractional_types
1131
- + supported_signed_int_types
1132
- + supported_unsigned_int_types
1133
- )
1134
- self.assertEqual(set(supported_types), set(_array_type_mappings.keys()))
1135
-
1136
- # all unsupported types
1137
- #
1138
- # Keys in _array_type_mappings is a complete list of all supported types,
1139
- # and types not in _array_type_mappings are considered unsupported.
1140
- # PyPy seems not having array.typecodes.
1141
- all_types = set(["b", "B", "u", "h", "H", "i", "I", "l", "L", "q", "Q", "f", "d"])
1142
- unsupported_types = all_types - set(supported_types)
1143
- # test unsupported types
1144
- for t in unsupported_types:
1145
- with self.assertRaises(PySparkTypeError) as pe:
1146
- a = array.array(t)
1147
- self.spark.createDataFrame([Row(myarray=a)]).collect()
1148
-
1149
- self.check_error(
1150
- exception=pe.exception,
1151
- error_class="CANNOT_INFER_TYPE_FOR_FIELD",
1152
- message_parameters={"field_name": "myarray"},
1153
- )
1154
-
1155
- def test_repr(self):
1156
- instances = [
1157
- NullType(),
1158
- StringType(),
1159
- CharType(10),
1160
- VarcharType(10),
1161
- BinaryType(),
1162
- BooleanType(),
1163
- DateType(),
1164
- TimestampType(),
1165
- DecimalType(),
1166
- DoubleType(),
1167
- FloatType(),
1168
- ByteType(),
1169
- IntegerType(),
1170
- LongType(),
1171
- ShortType(),
1172
- ArrayType(StringType()),
1173
- MapType(StringType(), IntegerType()),
1174
- StructField("f1", StringType(), True),
1175
- StructType([StructField("f1", StringType(), True)]),
1176
- ]
1177
- for instance in instances:
1178
- self.assertEqual(eval(repr(instance)), instance)
1179
-
1180
- def test_daytime_interval_type_constructor(self):
1181
- # SPARK-37277: Test constructors in day time interval.
1182
- self.assertEqual(DayTimeIntervalType().simpleString(), "interval day to second")
1183
- self.assertEqual(
1184
- DayTimeIntervalType(DayTimeIntervalType.DAY).simpleString(), "interval day"
1185
- )
1186
- self.assertEqual(
1187
- DayTimeIntervalType(
1188
- DayTimeIntervalType.HOUR, DayTimeIntervalType.SECOND
1189
- ).simpleString(),
1190
- "interval hour to second",
1191
- )
1192
-
1193
- with self.assertRaisesRegex(RuntimeError, "interval None to 3 is invalid"):
1194
- DayTimeIntervalType(endField=DayTimeIntervalType.SECOND)
1195
-
1196
- with self.assertRaisesRegex(RuntimeError, "interval 123 to 123 is invalid"):
1197
- DayTimeIntervalType(123)
1198
-
1199
- with self.assertRaisesRegex(RuntimeError, "interval 0 to 321 is invalid"):
1200
- DayTimeIntervalType(DayTimeIntervalType.DAY, 321)
1201
-
1202
- def test_daytime_interval_type(self):
1203
- # SPARK-37277: Support DayTimeIntervalType in createDataFrame
1204
- timedetlas = [
1205
- (datetime.timedelta(microseconds=123),),
1206
- (
1207
- datetime.timedelta(
1208
- days=1, seconds=23, microseconds=123, milliseconds=4, minutes=5, hours=11
1209
- ),
1210
- ),
1211
- (datetime.timedelta(microseconds=-123),),
1212
- (datetime.timedelta(days=-1),),
1213
- (datetime.timedelta(microseconds=388629894454999981),),
1214
- (datetime.timedelta(days=-1, seconds=86399, microseconds=999999),), # -1 microsecond
1215
- ]
1216
- df = self.spark.createDataFrame(timedetlas, schema="td interval day to second")
1217
- self.assertEqual(set(r.td for r in df.collect()), set(set(r[0] for r in timedetlas)))
1218
-
1219
- exprs = [
1220
- "INTERVAL '1 02:03:04' DAY TO SECOND AS a",
1221
- "INTERVAL '1 02:03' DAY TO MINUTE AS b",
1222
- "INTERVAL '1 02' DAY TO HOUR AS c",
1223
- "INTERVAL '1' DAY AS d",
1224
- "INTERVAL '26:03:04' HOUR TO SECOND AS e",
1225
- "INTERVAL '26:03' HOUR TO MINUTE AS f",
1226
- "INTERVAL '26' HOUR AS g",
1227
- "INTERVAL '1563:04' MINUTE TO SECOND AS h",
1228
- "INTERVAL '1563' MINUTE AS i",
1229
- "INTERVAL '93784' SECOND AS j",
1230
- ]
1231
- df = self.spark.range(1).selectExpr(exprs)
1232
-
1233
- actual = list(df.first())
1234
- expected = [
1235
- datetime.timedelta(days=1, hours=2, minutes=3, seconds=4),
1236
- datetime.timedelta(days=1, hours=2, minutes=3),
1237
- datetime.timedelta(days=1, hours=2),
1238
- datetime.timedelta(days=1),
1239
- datetime.timedelta(hours=26, minutes=3, seconds=4),
1240
- datetime.timedelta(hours=26, minutes=3),
1241
- datetime.timedelta(hours=26),
1242
- datetime.timedelta(minutes=1563, seconds=4),
1243
- datetime.timedelta(minutes=1563),
1244
- datetime.timedelta(seconds=93784),
1245
- ]
1246
-
1247
- for n, (a, e) in enumerate(zip(actual, expected)):
1248
- self.assertEqual(a, e, "%s does not match with %s" % (exprs[n], expected[n]))
1249
-
1250
- def test_yearmonth_interval_type_constructor(self):
1251
- self.assertEqual(YearMonthIntervalType().simpleString(), "interval year to month")
1252
- self.assertEqual(
1253
- YearMonthIntervalType(YearMonthIntervalType.YEAR).simpleString(), "interval year"
1254
- )
1255
- self.assertEqual(
1256
- YearMonthIntervalType(
1257
- YearMonthIntervalType.YEAR, YearMonthIntervalType.MONTH
1258
- ).simpleString(),
1259
- "interval year to month",
1260
- )
1261
-
1262
- with self.assertRaisesRegex(RuntimeError, "interval None to 3 is invalid"):
1263
- YearMonthIntervalType(endField=3)
1264
-
1265
- with self.assertRaisesRegex(RuntimeError, "interval 123 to 123 is invalid"):
1266
- YearMonthIntervalType(123)
1267
-
1268
- with self.assertRaisesRegex(RuntimeError, "interval 0 to 321 is invalid"):
1269
- YearMonthIntervalType(YearMonthIntervalType.YEAR, 321)
1270
-
1271
- def test_yearmonth_interval_type(self):
1272
- schema1 = self.spark.sql("SELECT INTERVAL '10-8' YEAR TO MONTH AS interval").schema
1273
- self.assertEqual(schema1.fields[0].dataType, YearMonthIntervalType(0, 1))
1274
-
1275
- schema2 = self.spark.sql("SELECT INTERVAL '10' YEAR AS interval").schema
1276
- self.assertEqual(schema2.fields[0].dataType, YearMonthIntervalType(0, 0))
1277
-
1278
- schema3 = self.spark.sql("SELECT INTERVAL '8' MONTH AS interval").schema
1279
- self.assertEqual(schema3.fields[0].dataType, YearMonthIntervalType(1, 1))
1280
-
1281
- def test_infer_array_element_type_with_struct(self):
1282
- # SPARK-48248: Nested array to respect legacy conf of inferArrayTypeFromFirstElement
1283
- with self.sql_conf(
1284
- {"spark.sql.pyspark.legacy.inferArrayTypeFromFirstElement.enabled": True}
1285
- ):
1286
- self.assertEqual([[1, None]], self.spark.createDataFrame([[[[1, "a"]]]]).first()[0])
1287
-
1288
-
1289
- class DataTypeTests(unittest.TestCase):
1290
- # regression test for SPARK-6055
1291
- def test_data_type_eq(self):
1292
- lt = LongType()
1293
- lt2 = pickle.loads(pickle.dumps(LongType()))
1294
- self.assertEqual(lt, lt2)
1295
-
1296
- # regression test for SPARK-7978
1297
- def test_decimal_type(self):
1298
- t1 = DecimalType()
1299
- t2 = DecimalType(10, 2)
1300
- self.assertTrue(t2 is not t1)
1301
- self.assertNotEqual(t1, t2)
1302
- t3 = DecimalType(8)
1303
- self.assertNotEqual(t2, t3)
1304
-
1305
- def test_char_type(self):
1306
- v1 = CharType(10)
1307
- v2 = CharType(20)
1308
- self.assertTrue(v2 is not v1)
1309
- self.assertNotEqual(v1, v2)
1310
- v3 = CharType(10)
1311
- self.assertEqual(v1, v3)
1312
- self.assertFalse(v1 is v3)
1313
-
1314
- def test_varchar_type(self):
1315
- v1 = VarcharType(10)
1316
- v2 = VarcharType(20)
1317
- self.assertTrue(v2 is not v1)
1318
- self.assertNotEqual(v1, v2)
1319
- v3 = VarcharType(10)
1320
- self.assertEqual(v1, v3)
1321
- self.assertFalse(v1 is v3)
1322
-
1323
- # regression test for SPARK-10392
1324
- def test_datetype_equal_zero(self):
1325
- dt = DateType()
1326
- self.assertEqual(dt.fromInternal(0), datetime.date(1970, 1, 1))
1327
-
1328
- # regression test for SPARK-17035
1329
- def test_timestamp_microsecond(self):
1330
- tst = TimestampType()
1331
- self.assertEqual(tst.toInternal(datetime.datetime.max) % 1000000, 999999)
1332
-
1333
- # regression test for SPARK-23299
1334
- def test_row_without_column_name(self):
1335
- row = Row("Alice", 11)
1336
- self.assertEqual(repr(row), "<Row('Alice', 11)>")
1337
-
1338
- # test __repr__ with unicode values
1339
- self.assertEqual(repr(Row("数", "量")), "<Row('数', '量')>")
1340
-
1341
- # SPARK-44643: test __repr__ with empty Row
1342
- def test_row_repr_with_empty_row(self):
1343
- self.assertEqual(repr(Row(a=Row())), "Row(a=<Row()>)")
1344
- self.assertEqual(repr(Row(Row())), "<Row(<Row()>)>")
1345
-
1346
- EmptyRow = Row()
1347
- self.assertEqual(repr(Row(a=EmptyRow())), "Row(a=Row())")
1348
- self.assertEqual(repr(Row(EmptyRow())), "<Row(Row())>")
1349
-
1350
- def test_empty_row(self):
1351
- row = Row()
1352
- self.assertEqual(len(row), 0)
1353
-
1354
- def test_struct_field_type_name(self):
1355
- struct_field = StructField("a", IntegerType())
1356
- self.assertRaises(TypeError, struct_field.typeName)
1357
-
1358
- def test_invalid_create_row(self):
1359
- row_class = Row("c1", "c2")
1360
- self.assertRaises(ValueError, lambda: row_class(1, 2, 3))
1361
-
1362
-
1363
- class DataTypeVerificationTests(unittest.TestCase, PySparkErrorTestUtils):
1364
- def test_verify_type_exception_msg(self):
1365
- with self.assertRaises(PySparkValueError) as pe:
1366
- _make_type_verifier(StringType(), nullable=False, name="test_name")(None)
1367
-
1368
- self.check_error(
1369
- exception=pe.exception,
1370
- error_class="CANNOT_BE_NONE",
1371
- message_parameters={
1372
- "arg_name": "obj",
1373
- },
1374
- )
1375
-
1376
- schema = StructType([StructField("a", StructType([StructField("b", IntegerType())]))])
1377
- with self.assertRaises(PySparkTypeError) as pe:
1378
- _make_type_verifier(schema)([["data"]])
1379
-
1380
- self.check_error(
1381
- exception=pe.exception,
1382
- error_class="CANNOT_ACCEPT_OBJECT_IN_TYPE",
1383
- message_parameters={
1384
- "data_type": "IntegerType()",
1385
- "obj_name": "data",
1386
- "obj_type": "str",
1387
- },
1388
- )
1389
-
1390
- def test_verify_type_ok_nullable(self):
1391
- obj = None
1392
- types = [IntegerType(), FloatType(), StringType(), StructType([])]
1393
- for data_type in types:
1394
- try:
1395
- _make_type_verifier(data_type, nullable=True)(obj)
1396
- except Exception:
1397
- self.fail("verify_type(%s, %s, nullable=True)" % (obj, data_type))
1398
-
1399
- def test_verify_type_not_nullable(self):
1400
- import array
1401
- import datetime
1402
- import decimal
1403
-
1404
- schema = StructType(
1405
- [
1406
- StructField("s", StringType(), nullable=False),
1407
- StructField("i", IntegerType(), nullable=True),
1408
- ]
1409
- )
1410
-
1411
- class MyObj:
1412
- def __init__(self, **kwargs):
1413
- for k, v in kwargs.items():
1414
- setattr(self, k, v)
1415
-
1416
- # obj, data_type
1417
- success_spec = [
1418
- # String
1419
- ("", StringType()),
1420
- (1, StringType()),
1421
- (1.0, StringType()),
1422
- ([], StringType()),
1423
- ({}, StringType()),
1424
- # Char
1425
- ("", CharType(10)),
1426
- (1, CharType(10)),
1427
- (1.0, CharType(10)),
1428
- ([], CharType(10)),
1429
- ({}, CharType(10)),
1430
- # Varchar
1431
- ("", VarcharType(10)),
1432
- (1, VarcharType(10)),
1433
- (1.0, VarcharType(10)),
1434
- ([], VarcharType(10)),
1435
- ({}, VarcharType(10)),
1436
- # UDT
1437
- (ExamplePoint(1.0, 2.0), ExamplePointUDT()),
1438
- # Boolean
1439
- (True, BooleanType()),
1440
- # Byte
1441
- (-(2**7), ByteType()),
1442
- (2**7 - 1, ByteType()),
1443
- # Short
1444
- (-(2**15), ShortType()),
1445
- (2**15 - 1, ShortType()),
1446
- # Integer
1447
- (-(2**31), IntegerType()),
1448
- (2**31 - 1, IntegerType()),
1449
- # Long
1450
- (-(2**63), LongType()),
1451
- (2**63 - 1, LongType()),
1452
- # Float & Double
1453
- (1.0, FloatType()),
1454
- (1.0, DoubleType()),
1455
- # Decimal
1456
- (decimal.Decimal("1.0"), DecimalType()),
1457
- # Binary
1458
- (bytearray([1, 2]), BinaryType()),
1459
- # Date/Timestamp
1460
- (datetime.date(2000, 1, 2), DateType()),
1461
- (datetime.datetime(2000, 1, 2, 3, 4), DateType()),
1462
- (datetime.datetime(2000, 1, 2, 3, 4), TimestampType()),
1463
- # Array
1464
- ([], ArrayType(IntegerType())),
1465
- (["1", None], ArrayType(StringType(), containsNull=True)),
1466
- ([1, 2], ArrayType(IntegerType())),
1467
- ((1, 2), ArrayType(IntegerType())),
1468
- (array.array("h", [1, 2]), ArrayType(IntegerType())),
1469
- # Map
1470
- ({}, MapType(StringType(), IntegerType())),
1471
- ({"a": 1}, MapType(StringType(), IntegerType())),
1472
- ({"a": None}, MapType(StringType(), IntegerType(), valueContainsNull=True)),
1473
- # Struct
1474
- ({"s": "a", "i": 1}, schema),
1475
- ({"s": "a", "i": None}, schema),
1476
- ({"s": "a"}, schema),
1477
- ({"s": "a", "f": 1.0}, schema),
1478
- (Row(s="a", i=1), schema),
1479
- (Row(s="a", i=None), schema),
1480
- (["a", 1], schema),
1481
- (["a", None], schema),
1482
- (("a", 1), schema),
1483
- (MyObj(s="a", i=1), schema),
1484
- (MyObj(s="a", i=None), schema),
1485
- (MyObj(s="a"), schema),
1486
- ]
1487
-
1488
- # obj, data_type, exception class
1489
- failure_spec = [
1490
- # String (match anything but None)
1491
- (None, StringType(), ValueError),
1492
- # CharType (match anything but None)
1493
- (None, CharType(10), ValueError),
1494
- # VarcharType (match anything but None)
1495
- (None, VarcharType(10), ValueError),
1496
- # UDT
1497
- (ExamplePoint(1.0, 2.0), PythonOnlyUDT(), ValueError),
1498
- # Boolean
1499
- (1, BooleanType(), TypeError),
1500
- ("True", BooleanType(), TypeError),
1501
- ([1], BooleanType(), TypeError),
1502
- # Byte
1503
- (-(2**7) - 1, ByteType(), ValueError),
1504
- (2**7, ByteType(), ValueError),
1505
- ("1", ByteType(), TypeError),
1506
- (1.0, ByteType(), TypeError),
1507
- # Short
1508
- (-(2**15) - 1, ShortType(), ValueError),
1509
- (2**15, ShortType(), ValueError),
1510
- # Integer
1511
- (-(2**31) - 1, IntegerType(), ValueError),
1512
- (2**31, IntegerType(), ValueError),
1513
- # Float & Double
1514
- (1, FloatType(), TypeError),
1515
- (1, DoubleType(), TypeError),
1516
- # Decimal
1517
- (1.0, DecimalType(), TypeError),
1518
- (1, DecimalType(), TypeError),
1519
- ("1.0", DecimalType(), TypeError),
1520
- # Binary
1521
- (1, BinaryType(), TypeError),
1522
- # Date/Timestamp
1523
- ("2000-01-02", DateType(), TypeError),
1524
- (946811040, TimestampType(), TypeError),
1525
- # Array
1526
- (["1", None], ArrayType(StringType(), containsNull=False), ValueError),
1527
- ([1, "2"], ArrayType(IntegerType()), TypeError),
1528
- # Map
1529
- ({"a": 1}, MapType(IntegerType(), IntegerType()), TypeError),
1530
- ({"a": "1"}, MapType(StringType(), IntegerType()), TypeError),
1531
- (
1532
- {"a": None},
1533
- MapType(StringType(), IntegerType(), valueContainsNull=False),
1534
- ValueError,
1535
- ),
1536
- # Struct
1537
- ({"s": "a", "i": "1"}, schema, TypeError),
1538
- (Row(s="a"), schema, ValueError), # Row can't have missing field
1539
- (Row(s="a", i="1"), schema, TypeError),
1540
- (["a"], schema, ValueError),
1541
- (["a", "1"], schema, TypeError),
1542
- (MyObj(s="a", i="1"), schema, TypeError),
1543
- (MyObj(s=None, i="1"), schema, ValueError),
1544
- ]
1545
-
1546
- # Check success cases
1547
- for obj, data_type in success_spec:
1548
- try:
1549
- _make_type_verifier(data_type, nullable=False)(obj)
1550
- except Exception:
1551
- self.fail("verify_type(%s, %s, nullable=False)" % (obj, data_type))
1552
-
1553
- # Check failure cases
1554
- for obj, data_type, exp in failure_spec:
1555
- msg = "verify_type(%s, %s, nullable=False) == %s" % (obj, data_type, exp)
1556
- with self.assertRaises(exp, msg=msg):
1557
- _make_type_verifier(data_type, nullable=False)(obj)
1558
-
1559
- def test_row_without_field_sorting(self):
1560
- r = Row(b=1, a=2)
1561
- TestRow = Row("b", "a")
1562
- expected = TestRow(1, 2)
1563
-
1564
- self.assertEqual(r, expected)
1565
- self.assertEqual(repr(r), "Row(b=1, a=2)")
1566
-
1567
-
1568
- class TypesTests(TypesTestsMixin, ReusedSQLTestCase):
1569
- pass
1570
-
1571
-
1572
- if __name__ == "__main__":
1573
- from pyspark.sql.tests.test_types import * # noqa: F401
1574
-
1575
- try:
1576
- import xmlrunner
1577
-
1578
- testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2)
1579
- except ImportError:
1580
- testRunner = None
1581
- unittest.main(testRunner=testRunner, verbosity=2)