snowpark-connect 0.20.2__py3-none-any.whl → 0.22.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of snowpark-connect might be problematic. Click here for more details.

Files changed (84) hide show
  1. snowflake/snowpark_connect/analyze_plan/map_tree_string.py +3 -2
  2. snowflake/snowpark_connect/column_name_handler.py +6 -65
  3. snowflake/snowpark_connect/config.py +47 -17
  4. snowflake/snowpark_connect/dataframe_container.py +242 -0
  5. snowflake/snowpark_connect/error/error_utils.py +25 -0
  6. snowflake/snowpark_connect/execute_plan/map_execution_command.py +13 -23
  7. snowflake/snowpark_connect/execute_plan/map_execution_root.py +9 -5
  8. snowflake/snowpark_connect/expression/map_extension.py +2 -1
  9. snowflake/snowpark_connect/expression/map_udf.py +4 -4
  10. snowflake/snowpark_connect/expression/map_unresolved_attribute.py +8 -7
  11. snowflake/snowpark_connect/expression/map_unresolved_function.py +481 -170
  12. snowflake/snowpark_connect/expression/map_unresolved_star.py +8 -8
  13. snowflake/snowpark_connect/expression/map_update_fields.py +1 -1
  14. snowflake/snowpark_connect/expression/typer.py +6 -6
  15. snowflake/snowpark_connect/proto/control_pb2.py +17 -16
  16. snowflake/snowpark_connect/proto/control_pb2.pyi +17 -17
  17. snowflake/snowpark_connect/proto/control_pb2_grpc.py +12 -63
  18. snowflake/snowpark_connect/proto/snowflake_expression_ext_pb2.py +15 -14
  19. snowflake/snowpark_connect/proto/snowflake_expression_ext_pb2.pyi +19 -14
  20. snowflake/snowpark_connect/proto/snowflake_expression_ext_pb2_grpc.py +4 -0
  21. snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.py +27 -26
  22. snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.pyi +74 -68
  23. snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2_grpc.py +4 -0
  24. snowflake/snowpark_connect/relation/catalogs/abstract_spark_catalog.py +5 -5
  25. snowflake/snowpark_connect/relation/catalogs/snowflake_catalog.py +25 -17
  26. snowflake/snowpark_connect/relation/map_aggregate.py +170 -61
  27. snowflake/snowpark_connect/relation/map_catalog.py +2 -2
  28. snowflake/snowpark_connect/relation/map_column_ops.py +227 -145
  29. snowflake/snowpark_connect/relation/map_crosstab.py +25 -6
  30. snowflake/snowpark_connect/relation/map_extension.py +81 -56
  31. snowflake/snowpark_connect/relation/map_join.py +72 -63
  32. snowflake/snowpark_connect/relation/map_local_relation.py +35 -20
  33. snowflake/snowpark_connect/relation/map_map_partitions.py +24 -17
  34. snowflake/snowpark_connect/relation/map_relation.py +22 -16
  35. snowflake/snowpark_connect/relation/map_row_ops.py +232 -146
  36. snowflake/snowpark_connect/relation/map_sample_by.py +15 -8
  37. snowflake/snowpark_connect/relation/map_show_string.py +42 -5
  38. snowflake/snowpark_connect/relation/map_sql.py +141 -237
  39. snowflake/snowpark_connect/relation/map_stats.py +88 -39
  40. snowflake/snowpark_connect/relation/map_subquery_alias.py +13 -14
  41. snowflake/snowpark_connect/relation/map_udtf.py +10 -13
  42. snowflake/snowpark_connect/relation/read/map_read.py +8 -3
  43. snowflake/snowpark_connect/relation/read/map_read_csv.py +7 -7
  44. snowflake/snowpark_connect/relation/read/map_read_jdbc.py +7 -7
  45. snowflake/snowpark_connect/relation/read/map_read_json.py +19 -8
  46. snowflake/snowpark_connect/relation/read/map_read_parquet.py +7 -7
  47. snowflake/snowpark_connect/relation/read/map_read_socket.py +7 -3
  48. snowflake/snowpark_connect/relation/read/map_read_table.py +25 -16
  49. snowflake/snowpark_connect/relation/read/map_read_text.py +7 -7
  50. snowflake/snowpark_connect/relation/read/reader_config.py +1 -0
  51. snowflake/snowpark_connect/relation/utils.py +11 -5
  52. snowflake/snowpark_connect/relation/write/jdbc_write_dbapi.py +15 -12
  53. snowflake/snowpark_connect/relation/write/map_write.py +259 -56
  54. snowflake/snowpark_connect/relation/write/map_write_jdbc.py +3 -2
  55. snowflake/snowpark_connect/server.py +43 -4
  56. snowflake/snowpark_connect/type_mapping.py +6 -23
  57. snowflake/snowpark_connect/utils/cache.py +27 -22
  58. snowflake/snowpark_connect/utils/context.py +33 -17
  59. snowflake/snowpark_connect/utils/describe_query_cache.py +2 -9
  60. snowflake/snowpark_connect/utils/{attribute_handling.py → identifiers.py} +47 -0
  61. snowflake/snowpark_connect/utils/session.py +41 -38
  62. snowflake/snowpark_connect/utils/telemetry.py +214 -63
  63. snowflake/snowpark_connect/utils/udxf_import_utils.py +14 -0
  64. snowflake/snowpark_connect/version.py +1 -1
  65. snowflake/snowpark_decoder/__init__.py +0 -0
  66. snowflake/snowpark_decoder/_internal/proto/generated/DataframeProcessorMsg_pb2.py +36 -0
  67. snowflake/snowpark_decoder/_internal/proto/generated/DataframeProcessorMsg_pb2.pyi +156 -0
  68. snowflake/snowpark_decoder/dp_session.py +111 -0
  69. snowflake/snowpark_decoder/spark_decoder.py +76 -0
  70. {snowpark_connect-0.20.2.dist-info → snowpark_connect-0.22.1.dist-info}/METADATA +6 -4
  71. {snowpark_connect-0.20.2.dist-info → snowpark_connect-0.22.1.dist-info}/RECORD +83 -69
  72. snowpark_connect-0.22.1.dist-info/licenses/LICENSE-binary +568 -0
  73. snowpark_connect-0.22.1.dist-info/licenses/NOTICE-binary +1533 -0
  74. {snowpark_connect-0.20.2.dist-info → snowpark_connect-0.22.1.dist-info}/top_level.txt +1 -0
  75. spark/__init__.py +0 -0
  76. spark/connect/__init__.py +0 -0
  77. spark/connect/envelope_pb2.py +31 -0
  78. spark/connect/envelope_pb2.pyi +46 -0
  79. snowflake/snowpark_connect/includes/jars/jackson-mapper-asl-1.9.13.jar +0 -0
  80. {snowpark_connect-0.20.2.data → snowpark_connect-0.22.1.data}/scripts/snowpark-connect +0 -0
  81. {snowpark_connect-0.20.2.data → snowpark_connect-0.22.1.data}/scripts/snowpark-session +0 -0
  82. {snowpark_connect-0.20.2.data → snowpark_connect-0.22.1.data}/scripts/snowpark-submit +0 -0
  83. {snowpark_connect-0.20.2.dist-info → snowpark_connect-0.22.1.dist-info}/WHEEL +0 -0
  84. {snowpark_connect-0.20.2.dist-info → snowpark_connect-0.22.1.dist-info}/licenses/LICENSE.txt +0 -0
@@ -8,104 +8,110 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map
8
8
 
9
9
  DESCRIPTOR: _descriptor.FileDescriptor
10
10
 
11
- class Aggregate(_message.Message):
12
- __slots__ = ["aggregate_expressions", "group_type", "grouping_expressions", "grouping_sets", "input", "pivot"]
13
- class GroupType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
14
- __slots__ = []
15
- class GroupingSets(_message.Message):
16
- __slots__ = ["grouping_set"]
17
- GROUPING_SET_FIELD_NUMBER: _ClassVar[int]
18
- grouping_set: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression]
19
- def __init__(self, grouping_set: _Optional[_Iterable[_Union[_expressions_pb2.Expression, _Mapping]]] = ...) -> None: ...
20
- class Pivot(_message.Message):
21
- __slots__ = ["col", "values"]
22
- COL_FIELD_NUMBER: _ClassVar[int]
23
- VALUES_FIELD_NUMBER: _ClassVar[int]
24
- col: _expressions_pb2.Expression
25
- values: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression.Literal]
26
- def __init__(self, col: _Optional[_Union[_expressions_pb2.Expression, _Mapping]] = ..., values: _Optional[_Iterable[_Union[_expressions_pb2.Expression.Literal, _Mapping]]] = ...) -> None: ...
27
- AGGREGATE_EXPRESSIONS_FIELD_NUMBER: _ClassVar[int]
28
- GROUPING_EXPRESSIONS_FIELD_NUMBER: _ClassVar[int]
29
- GROUPING_SETS_FIELD_NUMBER: _ClassVar[int]
30
- GROUP_TYPE_CUBE: Aggregate.GroupType
31
- GROUP_TYPE_FIELD_NUMBER: _ClassVar[int]
32
- GROUP_TYPE_GROUPBY: Aggregate.GroupType
33
- GROUP_TYPE_GROUPING_SETS: Aggregate.GroupType
34
- GROUP_TYPE_PIVOT: Aggregate.GroupType
35
- GROUP_TYPE_ROLLUP: Aggregate.GroupType
36
- GROUP_TYPE_UNSPECIFIED: Aggregate.GroupType
37
- INPUT_FIELD_NUMBER: _ClassVar[int]
38
- PIVOT_FIELD_NUMBER: _ClassVar[int]
39
- aggregate_expressions: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression]
40
- group_type: Aggregate.GroupType
41
- grouping_expressions: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression]
42
- grouping_sets: _containers.RepeatedCompositeFieldContainer[Aggregate.GroupingSets]
43
- input: _relations_pb2.Relation
44
- pivot: Aggregate.Pivot
45
- def __init__(self, input: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., group_type: _Optional[_Union[Aggregate.GroupType, str]] = ..., grouping_expressions: _Optional[_Iterable[_Union[_expressions_pb2.Expression, _Mapping]]] = ..., aggregate_expressions: _Optional[_Iterable[_Union[_expressions_pb2.Expression, _Mapping]]] = ..., pivot: _Optional[_Union[Aggregate.Pivot, _Mapping]] = ..., grouping_sets: _Optional[_Iterable[_Union[Aggregate.GroupingSets, _Mapping]]] = ...) -> None: ...
46
-
47
11
  class Extension(_message.Message):
48
- __slots__ = ["aggregate", "lateral_join", "rdd_map", "rdd_reduce", "subquery_column_aliases", "udtf_with_table_arguments"]
49
- AGGREGATE_FIELD_NUMBER: _ClassVar[int]
50
- LATERAL_JOIN_FIELD_NUMBER: _ClassVar[int]
12
+ __slots__ = ("rdd_map", "rdd_reduce", "subquery_column_aliases", "lateral_join", "udtf_with_table_arguments", "aggregate")
51
13
  RDD_MAP_FIELD_NUMBER: _ClassVar[int]
52
14
  RDD_REDUCE_FIELD_NUMBER: _ClassVar[int]
53
15
  SUBQUERY_COLUMN_ALIASES_FIELD_NUMBER: _ClassVar[int]
16
+ LATERAL_JOIN_FIELD_NUMBER: _ClassVar[int]
54
17
  UDTF_WITH_TABLE_ARGUMENTS_FIELD_NUMBER: _ClassVar[int]
55
- aggregate: Aggregate
56
- lateral_join: LateralJoin
18
+ AGGREGATE_FIELD_NUMBER: _ClassVar[int]
57
19
  rdd_map: RddMap
58
20
  rdd_reduce: RddReduce
59
21
  subquery_column_aliases: SubqueryColumnAliases
22
+ lateral_join: LateralJoin
60
23
  udtf_with_table_arguments: UDTFWithTableArguments
24
+ aggregate: Aggregate
61
25
  def __init__(self, rdd_map: _Optional[_Union[RddMap, _Mapping]] = ..., rdd_reduce: _Optional[_Union[RddReduce, _Mapping]] = ..., subquery_column_aliases: _Optional[_Union[SubqueryColumnAliases, _Mapping]] = ..., lateral_join: _Optional[_Union[LateralJoin, _Mapping]] = ..., udtf_with_table_arguments: _Optional[_Union[UDTFWithTableArguments, _Mapping]] = ..., aggregate: _Optional[_Union[Aggregate, _Mapping]] = ...) -> None: ...
62
26
 
63
- class LateralJoin(_message.Message):
64
- __slots__ = ["left", "right"]
65
- LEFT_FIELD_NUMBER: _ClassVar[int]
66
- RIGHT_FIELD_NUMBER: _ClassVar[int]
67
- left: _relations_pb2.Relation
68
- right: _relations_pb2.Relation
69
- def __init__(self, left: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., right: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ...) -> None: ...
70
-
71
27
  class RddMap(_message.Message):
72
- __slots__ = ["func", "input"]
73
- FUNC_FIELD_NUMBER: _ClassVar[int]
28
+ __slots__ = ("input", "func")
74
29
  INPUT_FIELD_NUMBER: _ClassVar[int]
75
- func: bytes
30
+ FUNC_FIELD_NUMBER: _ClassVar[int]
76
31
  input: _relations_pb2.Relation
32
+ func: bytes
77
33
  def __init__(self, input: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., func: _Optional[bytes] = ...) -> None: ...
78
34
 
79
35
  class RddReduce(_message.Message):
80
- __slots__ = ["func", "input"]
81
- FUNC_FIELD_NUMBER: _ClassVar[int]
36
+ __slots__ = ("input", "func")
82
37
  INPUT_FIELD_NUMBER: _ClassVar[int]
83
- func: bytes
38
+ FUNC_FIELD_NUMBER: _ClassVar[int]
84
39
  input: _relations_pb2.Relation
40
+ func: bytes
85
41
  def __init__(self, input: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., func: _Optional[bytes] = ...) -> None: ...
86
42
 
87
43
  class SubqueryColumnAliases(_message.Message):
88
- __slots__ = ["aliases", "input"]
89
- ALIASES_FIELD_NUMBER: _ClassVar[int]
44
+ __slots__ = ("input", "aliases")
90
45
  INPUT_FIELD_NUMBER: _ClassVar[int]
91
- aliases: _containers.RepeatedScalarFieldContainer[str]
46
+ ALIASES_FIELD_NUMBER: _ClassVar[int]
92
47
  input: _relations_pb2.Relation
48
+ aliases: _containers.RepeatedScalarFieldContainer[str]
93
49
  def __init__(self, input: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., aliases: _Optional[_Iterable[str]] = ...) -> None: ...
94
50
 
95
- class TableArgumentInfo(_message.Message):
96
- __slots__ = ["table_argument", "table_argument_idx"]
97
- TABLE_ARGUMENT_FIELD_NUMBER: _ClassVar[int]
98
- TABLE_ARGUMENT_IDX_FIELD_NUMBER: _ClassVar[int]
99
- table_argument: _relations_pb2.Relation
100
- table_argument_idx: int
101
- def __init__(self, table_argument: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., table_argument_idx: _Optional[int] = ...) -> None: ...
51
+ class LateralJoin(_message.Message):
52
+ __slots__ = ("left", "right")
53
+ LEFT_FIELD_NUMBER: _ClassVar[int]
54
+ RIGHT_FIELD_NUMBER: _ClassVar[int]
55
+ left: _relations_pb2.Relation
56
+ right: _relations_pb2.Relation
57
+ def __init__(self, left: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., right: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ...) -> None: ...
102
58
 
103
59
  class UDTFWithTableArguments(_message.Message):
104
- __slots__ = ["arguments", "function_name", "table_arguments"]
105
- ARGUMENTS_FIELD_NUMBER: _ClassVar[int]
60
+ __slots__ = ("function_name", "arguments", "table_arguments")
106
61
  FUNCTION_NAME_FIELD_NUMBER: _ClassVar[int]
62
+ ARGUMENTS_FIELD_NUMBER: _ClassVar[int]
107
63
  TABLE_ARGUMENTS_FIELD_NUMBER: _ClassVar[int]
108
- arguments: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression]
109
64
  function_name: str
65
+ arguments: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression]
110
66
  table_arguments: _containers.RepeatedCompositeFieldContainer[TableArgumentInfo]
111
67
  def __init__(self, function_name: _Optional[str] = ..., arguments: _Optional[_Iterable[_Union[_expressions_pb2.Expression, _Mapping]]] = ..., table_arguments: _Optional[_Iterable[_Union[TableArgumentInfo, _Mapping]]] = ...) -> None: ...
68
+
69
+ class TableArgumentInfo(_message.Message):
70
+ __slots__ = ("table_argument", "table_argument_idx")
71
+ TABLE_ARGUMENT_FIELD_NUMBER: _ClassVar[int]
72
+ TABLE_ARGUMENT_IDX_FIELD_NUMBER: _ClassVar[int]
73
+ table_argument: _relations_pb2.Relation
74
+ table_argument_idx: int
75
+ def __init__(self, table_argument: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., table_argument_idx: _Optional[int] = ...) -> None: ...
76
+
77
+ class Aggregate(_message.Message):
78
+ __slots__ = ("input", "group_type", "grouping_expressions", "aggregate_expressions", "pivot", "grouping_sets")
79
+ class GroupType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
80
+ __slots__ = ()
81
+ GROUP_TYPE_UNSPECIFIED: _ClassVar[Aggregate.GroupType]
82
+ GROUP_TYPE_GROUPBY: _ClassVar[Aggregate.GroupType]
83
+ GROUP_TYPE_ROLLUP: _ClassVar[Aggregate.GroupType]
84
+ GROUP_TYPE_CUBE: _ClassVar[Aggregate.GroupType]
85
+ GROUP_TYPE_PIVOT: _ClassVar[Aggregate.GroupType]
86
+ GROUP_TYPE_GROUPING_SETS: _ClassVar[Aggregate.GroupType]
87
+ GROUP_TYPE_UNSPECIFIED: Aggregate.GroupType
88
+ GROUP_TYPE_GROUPBY: Aggregate.GroupType
89
+ GROUP_TYPE_ROLLUP: Aggregate.GroupType
90
+ GROUP_TYPE_CUBE: Aggregate.GroupType
91
+ GROUP_TYPE_PIVOT: Aggregate.GroupType
92
+ GROUP_TYPE_GROUPING_SETS: Aggregate.GroupType
93
+ class Pivot(_message.Message):
94
+ __slots__ = ("col", "values")
95
+ COL_FIELD_NUMBER: _ClassVar[int]
96
+ VALUES_FIELD_NUMBER: _ClassVar[int]
97
+ col: _expressions_pb2.Expression
98
+ values: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression.Literal]
99
+ def __init__(self, col: _Optional[_Union[_expressions_pb2.Expression, _Mapping]] = ..., values: _Optional[_Iterable[_Union[_expressions_pb2.Expression.Literal, _Mapping]]] = ...) -> None: ...
100
+ class GroupingSets(_message.Message):
101
+ __slots__ = ("grouping_set",)
102
+ GROUPING_SET_FIELD_NUMBER: _ClassVar[int]
103
+ grouping_set: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression]
104
+ def __init__(self, grouping_set: _Optional[_Iterable[_Union[_expressions_pb2.Expression, _Mapping]]] = ...) -> None: ...
105
+ INPUT_FIELD_NUMBER: _ClassVar[int]
106
+ GROUP_TYPE_FIELD_NUMBER: _ClassVar[int]
107
+ GROUPING_EXPRESSIONS_FIELD_NUMBER: _ClassVar[int]
108
+ AGGREGATE_EXPRESSIONS_FIELD_NUMBER: _ClassVar[int]
109
+ PIVOT_FIELD_NUMBER: _ClassVar[int]
110
+ GROUPING_SETS_FIELD_NUMBER: _ClassVar[int]
111
+ input: _relations_pb2.Relation
112
+ group_type: Aggregate.GroupType
113
+ grouping_expressions: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression]
114
+ aggregate_expressions: _containers.RepeatedCompositeFieldContainer[_expressions_pb2.Expression]
115
+ pivot: Aggregate.Pivot
116
+ grouping_sets: _containers.RepeatedCompositeFieldContainer[Aggregate.GroupingSets]
117
+ def __init__(self, input: _Optional[_Union[_relations_pb2.Relation, _Mapping]] = ..., group_type: _Optional[_Union[Aggregate.GroupType, str]] = ..., grouping_expressions: _Optional[_Iterable[_Union[_expressions_pb2.Expression, _Mapping]]] = ..., aggregate_expressions: _Optional[_Iterable[_Union[_expressions_pb2.Expression, _Mapping]]] = ..., pivot: _Optional[_Union[Aggregate.Pivot, _Mapping]] = ..., grouping_sets: _Optional[_Iterable[_Union[Aggregate.GroupingSets, _Mapping]]] = ...) -> None: ...
@@ -0,0 +1,4 @@
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
@@ -9,10 +9,10 @@ import pandas
9
9
  import pyspark.sql.connect.proto.common_pb2 as common_proto
10
10
  import pyspark.sql.connect.proto.types_pb2 as types_proto
11
11
 
12
- from snowflake import snowpark
13
12
  from snowflake.snowpark._internal.analyzer.analyzer_utils import unquote_if_quoted
13
+ from snowflake.snowpark_connect.dataframe_container import DataFrameContainer
14
14
  from snowflake.snowpark_connect.error.exceptions import MissingDatabase, MissingSchema
15
- from snowflake.snowpark_connect.utils.attribute_handling import (
15
+ from snowflake.snowpark_connect.utils.identifiers import (
16
16
  split_fully_qualified_spark_name,
17
17
  )
18
18
  from snowflake.snowpark_connect.utils.session import get_or_create_snowpark_session
@@ -38,7 +38,7 @@ class AbstractSparkCatalog(ABC):
38
38
  schema: types_proto.DataType,
39
39
  description: str,
40
40
  **options: typing.Any,
41
- ) -> snowpark.DataFrame:
41
+ ) -> DataFrameContainer:
42
42
  raise SnowparkConnectNotImplementedError("createTable is not implemented")
43
43
 
44
44
  @abstractmethod
@@ -110,7 +110,7 @@ class AbstractSparkCatalog(ABC):
110
110
  def dropGlobalTempView(
111
111
  self,
112
112
  spark_view_name: str,
113
- ) -> snowpark.DataFrame:
113
+ ) -> DataFrameContainer:
114
114
  raise SnowparkConnectNotImplementedError(
115
115
  "dropGlobalTempView is not implemented"
116
116
  )
@@ -119,7 +119,7 @@ class AbstractSparkCatalog(ABC):
119
119
  def dropTempView(
120
120
  self,
121
121
  spark_view_name: str,
122
- ) -> snowpark.DataFrame:
122
+ ) -> DataFrameContainer:
123
123
  raise SnowparkConnectNotImplementedError("dropTempView is not implemented")
124
124
 
125
125
  def cacheTable(
@@ -10,7 +10,6 @@ import pyspark.sql.connect.proto.common_pb2 as common_proto
10
10
  import pyspark.sql.connect.proto.types_pb2 as types_proto
11
11
  from snowflake.core.exceptions import NotFoundError
12
12
 
13
- from snowflake import snowpark
14
13
  from snowflake.snowpark import functions
15
14
  from snowflake.snowpark._internal.analyzer.analyzer_utils import (
16
15
  quote_name_without_upper_casing,
@@ -18,8 +17,11 @@ from snowflake.snowpark._internal.analyzer.analyzer_utils import (
18
17
  )
19
18
  from snowflake.snowpark.functions import lit
20
19
  from snowflake.snowpark.types import BooleanType, StringType
21
- from snowflake.snowpark_connect import column_name_handler
22
- from snowflake.snowpark_connect.config import auto_uppercase_ddl, global_config
20
+ from snowflake.snowpark_connect.config import (
21
+ auto_uppercase_non_column_identifiers,
22
+ global_config,
23
+ )
24
+ from snowflake.snowpark_connect.dataframe_container import DataFrameContainer
23
25
  from snowflake.snowpark_connect.relation.catalogs.abstract_spark_catalog import (
24
26
  AbstractSparkCatalog,
25
27
  _get_current_snowflake_schema,
@@ -27,7 +29,7 @@ from snowflake.snowpark_connect.relation.catalogs.abstract_spark_catalog import
27
29
  _process_multi_layer_identifier,
28
30
  )
29
31
  from snowflake.snowpark_connect.type_mapping import proto_to_snowpark_type
30
- from snowflake.snowpark_connect.utils.attribute_handling import (
32
+ from snowflake.snowpark_connect.utils.identifiers import (
31
33
  split_fully_qualified_spark_name,
32
34
  )
33
35
  from snowflake.snowpark_connect.utils.session import get_or_create_snowpark_session
@@ -40,7 +42,7 @@ from snowflake.snowpark_connect.utils.udf_cache import cached_udf
40
42
  def _normalize_identifier(identifier: str | None) -> str | None:
41
43
  if identifier is None:
42
44
  return None
43
- return identifier.upper() if auto_uppercase_ddl() else identifier
45
+ return identifier.upper() if auto_uppercase_non_column_identifiers() else identifier
44
46
 
45
47
 
46
48
  def sf_quote(name: str | None) -> str | None:
@@ -345,7 +347,7 @@ class SnowflakeCatalog(AbstractSparkCatalog):
345
347
  def dropGlobalTempView(
346
348
  self,
347
349
  spark_view_name: str,
348
- ) -> snowpark.DataFrame:
350
+ ) -> DataFrameContainer:
349
351
  session = get_or_create_snowpark_session()
350
352
  schema = global_config.spark_sql_globalTempDatabase
351
353
  result_df = session.sql(
@@ -358,14 +360,17 @@ class SnowflakeCatalog(AbstractSparkCatalog):
358
360
  )
359
361
  )
360
362
  columns = ["value"]
361
- return column_name_handler.with_column_map(
362
- result_df, columns, columns, [BooleanType()]
363
+ return DataFrameContainer.create_with_column_mapping(
364
+ dataframe=result_df,
365
+ spark_column_names=columns,
366
+ snowpark_column_names=columns,
367
+ snowpark_column_types=[BooleanType()],
363
368
  )
364
369
 
365
370
  def dropTempView(
366
371
  self,
367
372
  spark_view_name: str,
368
- ) -> snowpark.DataFrame:
373
+ ) -> DataFrameContainer:
369
374
  """Drop the current temporary view."""
370
375
  session = get_or_create_snowpark_session()
371
376
  result = session.sql(
@@ -377,8 +382,11 @@ class SnowflakeCatalog(AbstractSparkCatalog):
377
382
  )
378
383
  result_df = session.createDataFrame([(view_was_dropped,)], schema=["value"])
379
384
  columns = ["value"]
380
- return column_name_handler.with_column_map(
381
- result_df, columns, columns, [BooleanType()]
385
+ return DataFrameContainer.create_with_column_mapping(
386
+ dataframe=result_df,
387
+ spark_column_names=columns,
388
+ snowpark_column_names=columns,
389
+ snowpark_column_types=[BooleanType()],
382
390
  )
383
391
 
384
392
  def createTable(
@@ -389,7 +397,7 @@ class SnowflakeCatalog(AbstractSparkCatalog):
389
397
  schema: types_proto.DataType,
390
398
  description: str,
391
399
  **options: typing.Any,
392
- ) -> snowpark.DataFrame:
400
+ ) -> DataFrameContainer:
393
401
  """Create either an external, or a managed table.
394
402
 
395
403
  If path is supplied in which the data for this table exists. When path is specified, an external table is
@@ -422,11 +430,11 @@ class SnowflakeCatalog(AbstractSparkCatalog):
422
430
  table_name_parts = split_fully_qualified_spark_name(tableName)
423
431
  qualifiers = [table_name_parts for _ in columns]
424
432
  column_types = [f.datatype for f in sp_schema.fields]
425
- return column_name_handler.with_column_map(
426
- session.createDataFrame([], sp_schema),
427
- columns,
428
- columns,
429
- column_types,
433
+ return DataFrameContainer.create_with_column_mapping(
434
+ dataframe=session.createDataFrame([], sp_schema),
435
+ spark_column_names=columns,
436
+ snowpark_column_names=columns,
437
+ snowpark_column_types=column_types,
430
438
  column_qualifiers=qualifiers,
431
439
  )
432
440