snowpark-connect 0.26.0__py3-none-any.whl → 0.28.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of snowpark-connect might be problematic. Click here for more details.

Files changed (42) hide show
  1. snowflake/snowpark_connect/column_name_handler.py +3 -93
  2. snowflake/snowpark_connect/config.py +99 -4
  3. snowflake/snowpark_connect/dataframe_container.py +0 -6
  4. snowflake/snowpark_connect/expression/map_expression.py +31 -1
  5. snowflake/snowpark_connect/expression/map_sql_expression.py +22 -18
  6. snowflake/snowpark_connect/expression/map_unresolved_attribute.py +22 -26
  7. snowflake/snowpark_connect/expression/map_unresolved_function.py +28 -10
  8. snowflake/snowpark_connect/expression/map_unresolved_star.py +2 -3
  9. snowflake/snowpark_connect/includes/jars/sas-scala-udf_2.12-0.1.0.jar +0 -0
  10. snowflake/snowpark_connect/relation/map_extension.py +7 -1
  11. snowflake/snowpark_connect/relation/map_join.py +62 -258
  12. snowflake/snowpark_connect/relation/map_map_partitions.py +36 -77
  13. snowflake/snowpark_connect/relation/map_relation.py +8 -2
  14. snowflake/snowpark_connect/relation/map_show_string.py +2 -0
  15. snowflake/snowpark_connect/relation/map_sql.py +413 -15
  16. snowflake/snowpark_connect/relation/write/map_write.py +195 -114
  17. snowflake/snowpark_connect/resources_initializer.py +20 -5
  18. snowflake/snowpark_connect/server.py +20 -18
  19. snowflake/snowpark_connect/utils/artifacts.py +4 -5
  20. snowflake/snowpark_connect/utils/concurrent.py +4 -0
  21. snowflake/snowpark_connect/utils/context.py +41 -1
  22. snowflake/snowpark_connect/utils/describe_query_cache.py +57 -51
  23. snowflake/snowpark_connect/utils/identifiers.py +120 -0
  24. snowflake/snowpark_connect/utils/io_utils.py +21 -1
  25. snowflake/snowpark_connect/utils/pandas_udtf_utils.py +86 -2
  26. snowflake/snowpark_connect/utils/scala_udf_utils.py +34 -43
  27. snowflake/snowpark_connect/utils/session.py +16 -26
  28. snowflake/snowpark_connect/utils/telemetry.py +53 -0
  29. snowflake/snowpark_connect/utils/udf_utils.py +66 -103
  30. snowflake/snowpark_connect/utils/udtf_helper.py +17 -7
  31. snowflake/snowpark_connect/version.py +2 -3
  32. {snowpark_connect-0.26.0.dist-info → snowpark_connect-0.28.0.dist-info}/METADATA +2 -2
  33. {snowpark_connect-0.26.0.dist-info → snowpark_connect-0.28.0.dist-info}/RECORD +41 -42
  34. snowflake/snowpark_connect/hidden_column.py +0 -39
  35. {snowpark_connect-0.26.0.data → snowpark_connect-0.28.0.data}/scripts/snowpark-connect +0 -0
  36. {snowpark_connect-0.26.0.data → snowpark_connect-0.28.0.data}/scripts/snowpark-session +0 -0
  37. {snowpark_connect-0.26.0.data → snowpark_connect-0.28.0.data}/scripts/snowpark-submit +0 -0
  38. {snowpark_connect-0.26.0.dist-info → snowpark_connect-0.28.0.dist-info}/WHEEL +0 -0
  39. {snowpark_connect-0.26.0.dist-info → snowpark_connect-0.28.0.dist-info}/licenses/LICENSE-binary +0 -0
  40. {snowpark_connect-0.26.0.dist-info → snowpark_connect-0.28.0.dist-info}/licenses/LICENSE.txt +0 -0
  41. {snowpark_connect-0.26.0.dist-info → snowpark_connect-0.28.0.dist-info}/licenses/NOTICE-binary +0 -0
  42. {snowpark_connect-0.26.0.dist-info → snowpark_connect-0.28.0.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,7 @@ from collections.abc import Sequence
8
8
  from typing import Any
9
9
 
10
10
  from snowflake import snowpark
11
- from snowflake.snowpark.exceptions import SnowparkClientException, SnowparkSQLException
11
+ from snowflake.snowpark.exceptions import SnowparkClientException
12
12
  from snowflake.snowpark.session import _get_active_session
13
13
  from snowflake.snowpark_connect.constants import DEFAULT_CONNECTION_NAME
14
14
  from snowflake.snowpark_connect.utils.describe_query_cache import (
@@ -50,7 +50,10 @@ def _get_current_snowpark_session() -> snowpark.Session | None:
50
50
 
51
51
  def configure_snowpark_session(session: snowpark.Session):
52
52
  """Configure a snowpark session with required parameters and settings."""
53
- from snowflake.snowpark_connect.config import global_config
53
+ from snowflake.snowpark_connect.config import (
54
+ get_cte_optimization_enabled,
55
+ global_config,
56
+ )
54
57
 
55
58
  logger.info(f"Configuring session {session}")
56
59
 
@@ -77,6 +80,14 @@ def configure_snowpark_session(session: snowpark.Session):
77
80
  session.connection.arrow_number_to_decimal_setter = True
78
81
  session.custom_package_usage_config["enabled"] = True
79
82
 
83
+ # Configure CTE optimization based on session configuration
84
+ cte_optimization_enabled = get_cte_optimization_enabled()
85
+ session.cte_optimization_enabled = cte_optimization_enabled
86
+ logger.info(f"CTE optimization enabled: {cte_optimization_enabled}")
87
+
88
+ # Default query tag to be used unless overridden by user using AppName or spark.addTag()
89
+ query_tag = "SNOWPARK_CONNECT_QUERY"
90
+
80
91
  default_fallback_timezone = "UTC"
81
92
  if global_config.spark_sql_session_timeZone is None:
82
93
  try:
@@ -104,35 +115,14 @@ def configure_snowpark_session(session: snowpark.Session):
104
115
  "QUOTED_IDENTIFIERS_IGNORE_CASE": "false",
105
116
  "PYTHON_SNOWPARK_ENABLE_THREAD_SAFE_SESSION": "true",
106
117
  "PYTHON_SNOWPARK_USE_SCOPED_TEMP_OBJECTS": "false", # this is required for creating udfs from sproc
118
+ "ENABLE_STRUCTURED_TYPES_IN_SNOWPARK_CONNECT_RESPONSE": "true",
119
+ "QUERY_TAG": f"'{query_tag}'",
107
120
  }
108
121
 
109
122
  session.sql(
110
123
  f"ALTER SESSION SET {', '.join([f'{k} = {v}' for k, v in session_params.items()])}"
111
124
  ).collect()
112
125
 
113
- # Rolling ahead in preparation of GS release 9.22 (ETA 8/5/2025). Once 9.22 is past rollback risk, merge this
114
- # parameter with other in the session_params dictionary above
115
- try:
116
- session.sql(
117
- "ALTER SESSION SET ENABLE_STRUCTURED_TYPES_IN_SNOWPARK_CONNECT_RESPONSE=true"
118
- ).collect()
119
- except SnowparkSQLException:
120
- logger.debug(
121
- "ENABLE_STRUCTURED_TYPES_IN_SNOWPARK_CONNECT_RESPONSE is not defined"
122
- )
123
- try:
124
- session.sql(
125
- "ALTER SESSION SET ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT=true"
126
- ).collect()
127
- except SnowparkSQLException:
128
- logger.debug("ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT is not defined")
129
- try:
130
- session.sql(
131
- "ALTER SESSION SET ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE=true"
132
- ).collect()
133
- except SnowparkSQLException:
134
- logger.debug("ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE is not defined")
135
-
136
126
  # Instrument the snowpark session to use a cache for describe queries.
137
127
  instrument_session_for_describe_cache(session)
138
128
 
@@ -204,5 +194,5 @@ def set_query_tags(spark_tags: Sequence[str]) -> None:
204
194
  snowpark_session = get_or_create_snowpark_session()
205
195
  spark_tags_str = ",".join(sorted(spark_tags)) if spark_tags else None
206
196
 
207
- if spark_tags_str != snowpark_session.query_tag:
197
+ if spark_tags_str and spark_tags_str != snowpark_session.query_tag:
208
198
  snowpark_session.query_tag = spark_tags_str
@@ -88,6 +88,7 @@ RECORDED_CONFIG_KEYS = {
88
88
  "spark.sql.session.localRelationCacheThreshold",
89
89
  "spark.sql.mapKeyDedupPolicy",
90
90
  "snowpark.connect.sql.passthrough",
91
+ "snowpark.connect.cte.optimization_enabled",
91
92
  "snowpark.connect.iceberg.external_volume",
92
93
  "snowpark.connect.sql.identifiers.auto-uppercase",
93
94
  "snowpark.connect.udtf.compatibility_mode",
@@ -426,6 +427,58 @@ class Telemetry:
426
427
 
427
428
  summary["internal_queries"] += 1
428
429
 
430
+ @safe
431
+ def report_describe_query_cache_lookup(self):
432
+ """Report a describe query cache lookup."""
433
+ if self._not_in_request():
434
+ return
435
+
436
+ summary = self._request_summary.get()
437
+
438
+ if "describe_cache_lookups" not in summary:
439
+ summary["describe_cache_lookups"] = 0
440
+
441
+ summary["describe_cache_lookups"] += 1
442
+
443
+ @safe
444
+ def report_describe_query_cache_hit(self):
445
+ """Report a describe query cache hit."""
446
+ if self._not_in_request():
447
+ return
448
+
449
+ summary = self._request_summary.get()
450
+
451
+ if "describe_cache_hits" not in summary:
452
+ summary["describe_cache_hits"] = 0
453
+
454
+ summary["describe_cache_hits"] += 1
455
+
456
+ @safe
457
+ def report_describe_query_cache_expired(self):
458
+ """Report a describe query cache hit."""
459
+ if self._not_in_request():
460
+ return
461
+
462
+ summary = self._request_summary.get()
463
+
464
+ if "describe_cache_expired" not in summary:
465
+ summary["describe_cache_expired"] = 0
466
+
467
+ summary["describe_cache_expired"] += 1
468
+
469
+ @safe
470
+ def report_describe_query_cache_clear(self, query_prefix: str):
471
+ """Report a describe query cache clear."""
472
+ if self._not_in_request():
473
+ return
474
+
475
+ summary = self._request_summary.get()
476
+
477
+ if "describe_cache_clears" not in summary:
478
+ summary["describe_cache_clears"] = []
479
+
480
+ summary["describe_cache_clears"].append(query_prefix)
481
+
429
482
  @safe
430
483
  def report_udf_usage(self, udf_name: str):
431
484
  if self._not_in_request():
@@ -6,7 +6,6 @@
6
6
  # So its dependencies are restricted to pandas, snowpark, and, pyspark
7
7
  import functools
8
8
  import inspect
9
- from typing import Iterator
10
9
 
11
10
  import pandas
12
11
  import pyspark.sql.connect.proto.expressions_pb2 as expressions_proto
@@ -198,111 +197,75 @@ class ProcessCommonInlineUserDefinedFunction:
198
197
 
199
198
  needs_struct_conversion = isinstance(self._original_return_type, StructType)
200
199
 
201
- match self._called_from:
202
- case "map_map_partitions":
203
- column_names = self._input_column_names
204
- eval_type = self._eval_type
200
+ if not needs_struct_conversion:
201
+ return snowpark_fn.udf(
202
+ create_null_safe_wrapper(callable_func),
203
+ return_type=self._return_type,
204
+ input_types=self._input_types,
205
+ name=self._udf_name,
206
+ replace=self._replace,
207
+ packages=packages,
208
+ imports=imports,
209
+ immutable=self._is_deterministic,
210
+ )
205
211
 
206
- def wrapped_function(*args):
207
- result = callable_func(
208
- pandas.DataFrame(iter([list(args)]), columns=column_names)
209
- )
210
- if eval_type == MAP_IN_ARROW_EVAL_TYPE:
211
- if not isinstance(result, Iterator) and not hasattr(
212
- result, "__iter__"
213
- ):
214
- raise RuntimeError(
215
- f"snowpark_connect::UDF_RETURN_TYPE Return type of the user-defined function should be "
216
- f"iterator of pyarrow.RecordBatch, but is {type(result).__name__}"
217
- )
218
- import pyarrow as pa
219
-
220
- for elem in result:
221
- if not isinstance(elem, pa.RecordBatch):
222
- raise RuntimeError(
223
- f"snowpark_connect::UDF_RETURN_TYPE Return type of the user-defined function should "
224
- f"be iterator of pyarrow.RecordBatch, but is iterator of {type(elem).__name__}"
225
- )
226
- return result
227
-
228
- udf_function = create_null_safe_wrapper(wrapped_function)
229
- packages += ["pyarrow", "pandas"]
230
- case _:
231
- if not needs_struct_conversion:
232
- return snowpark_fn.udf(
233
- create_null_safe_wrapper(callable_func),
234
- return_type=self._return_type,
235
- input_types=self._input_types,
236
- name=self._udf_name,
237
- replace=self._replace,
238
- packages=packages,
239
- imports=imports,
240
- immutable=self._is_deterministic,
212
+ is_pandas_udf, _, return_types, _ = extract_return_input_types(
213
+ callable_func,
214
+ self._original_return_type,
215
+ self._input_types,
216
+ TempObjectType.FUNCTION,
217
+ )
218
+ if is_pandas_udf and isinstance(return_types, PandasDataFrameType):
219
+ # Snowpark Python UDFs only support returning a Pandas Series.
220
+ # We change the return type to make the input callable compatible with Snowpark Python UDFs,
221
+ # and then in the wrapper function we convert the pandas DataFrame of the
222
+ # original callable to a Pandas Series.
223
+ original_callable.__annotations__["return"] = pandas.Series
224
+
225
+ field_names = [field.name for field in self._original_return_type.fields]
226
+
227
+ def struct_wrapper(*args):
228
+ result = callable_func(*args)
229
+ if isinstance(result, (tuple, list)):
230
+ # Convert tuple/list to dict using struct field names
231
+ if len(result) == len(field_names):
232
+ return dict(zip(field_names, result))
233
+ return result
234
+
235
+ def pandas_struct_wrapper(*args):
236
+ # inspired by the following snowpark modin code to handle Pandas int/bool/null data in Snowflake VariantType
237
+ # https://github.com/snowflakedb/snowpark-python/blob/e095d5a54f3a697416c3f1df87d239def47a5495/src/snowflake/snowpark/modin/plugin/_internal/apply_utils.py#L1309-L1366
238
+ def convert_to_snowflake_compatible_type(value):
239
+ import numpy as np
240
+ from pandas.api.types import is_scalar
241
+
242
+ if is_scalar(value) and pandas.isna(value):
243
+ return None
244
+
245
+ return (
246
+ int(value)
247
+ if np.issubdtype(type(value), np.integer)
248
+ else (
249
+ bool(value) if np.issubdtype(type(value), np.bool_) else value
241
250
  )
242
-
243
- is_pandas_udf, _, return_types, _ = extract_return_input_types(
244
- callable_func,
245
- self._original_return_type,
246
- self._input_types,
247
- TempObjectType.FUNCTION,
248
251
  )
249
- if is_pandas_udf and isinstance(return_types, PandasDataFrameType):
250
- # Snowpark Python UDFs only support returning a Pandas Series.
251
- # We change the return type to make the input callable compatible with Snowpark Python UDFs,
252
- # and then in the wrapper function we convert the pandas DataFrame of the
253
- # original callable to a Pandas Series.
254
- original_callable.__annotations__["return"] = pandas.Series
255
-
256
- field_names = [
257
- field.name for field in self._original_return_type.fields
258
- ]
259
-
260
- def struct_wrapper(*args):
261
- result = callable_func(*args)
262
- if isinstance(result, (tuple, list)):
263
- # Convert tuple/list to dict using struct field names
264
- if len(result) == len(field_names):
265
- return dict(zip(field_names, result))
266
- return result
267
-
268
- def pandas_struct_wrapper(*args):
269
- # inspired by the following snowpark modin code to handle Pandas int/bool/null data in Snowflake VariantType
270
- # https://github.com/snowflakedb/snowpark-python/blob/e095d5a54f3a697416c3f1df87d239def47a5495/src/snowflake/snowpark/modin/plugin/_internal/apply_utils.py#L1309-L1366
271
- def convert_to_snowflake_compatible_type(value):
272
- import numpy as np
273
- from pandas.api.types import is_scalar
274
-
275
- if is_scalar(value) and pandas.isna(value):
276
- return None
277
-
278
- return (
279
- int(value)
280
- if np.issubdtype(type(value), np.integer)
281
- else (
282
- bool(value)
283
- if np.issubdtype(type(value), np.bool_)
284
- else value
285
- )
286
- )
287
-
288
- result = callable_func(*args)
289
- assert (
290
- len(result) == 1
291
- ), "Expected result to be a single row DataFrame"
292
- # df.applymap doesn't help here, the original type was preserved, hence we convert each value
293
- row_data = [
294
- convert_to_snowflake_compatible_type(value)
295
- for value in result.iloc[0].tolist()
296
- ]
297
- result = pandas.Series([dict(zip(field_names, row_data))])
298
- return result
299
-
300
- if is_pandas_udf:
301
- udf_function = pandas_struct_wrapper
302
- if isinstance(return_types, PandasDataFrameType):
303
- udf_function.__annotations__ = original_callable.__annotations__
304
- else:
305
- udf_function = create_null_safe_wrapper(struct_wrapper)
252
+
253
+ result = callable_func(*args)
254
+ assert len(result) == 1, "Expected result to be a single row DataFrame"
255
+ # df.applymap doesn't help here, the original type was preserved, hence we convert each value
256
+ row_data = [
257
+ convert_to_snowflake_compatible_type(value)
258
+ for value in result.iloc[0].tolist()
259
+ ]
260
+ result = pandas.Series([dict(zip(field_names, row_data))])
261
+ return result
262
+
263
+ if is_pandas_udf:
264
+ udf_function = pandas_struct_wrapper
265
+ if isinstance(return_types, PandasDataFrameType):
266
+ udf_function.__annotations__ = original_callable.__annotations__
267
+ else:
268
+ udf_function = create_null_safe_wrapper(struct_wrapper)
306
269
 
307
270
  return snowpark_fn.udf(
308
271
  udf_function,
@@ -261,15 +261,25 @@ def create(session, b64_str, spark_column_names_json_str, input_schema_json_str,
261
261
  udf_proto = CommonInlineUserDefinedFunction()
262
262
  udf_proto.ParseFromString(restored_bytes)
263
263
 
264
+ if not input_schema_json_str:
265
+ raise ValueError("Input schema is required for pandas UDTF.")
266
+ if not return_schema_json_str:
267
+ raise ValueError("Return schema is required for pandas UDTF.")
268
+
264
269
  spark_column_names = json.loads(spark_column_names_json_str)
265
- input_schema = StructType.fromJson(json.loads(input_schema_json_str)) if input_schema_json_str else None
266
- return_schema = StructType.fromJson(json.loads(return_schema_json_str)) if return_schema_json_str else None
270
+ input_schema = StructType.fromJson(json.loads(input_schema_json_str))
271
+ return_schema = StructType.fromJson(json.loads(return_schema_json_str))
267
272
 
268
- map_in_arrow_udtf = create_pandas_udtf(
269
- udf_proto, spark_column_names,
270
- input_schema, return_schema
271
- )
272
- return map_in_arrow_udtf.name
273
+ map_in_arrow = udf_proto.WhichOneof("function") == "python_udf" and udf_proto.python_udf.eval_type == 207
274
+ if map_in_arrow:
275
+ map_udtf = create_pandas_udtf_with_arrow(
276
+ udf_proto, spark_column_names, input_schema, return_schema
277
+ )
278
+ else:
279
+ map_udtf = create_pandas_udtf(
280
+ udf_proto, spark_column_names, input_schema, return_schema
281
+ )
282
+ return map_udtf.name
273
283
  $$;
274
284
  """
275
285
  session.sql(create_udtf_sproc_sql).collect()
@@ -1,6 +1,5 @@
1
- #!/usr/bin/env python3
1
+ # !/usr/bin/env python3
2
2
  #
3
3
  # Copyright (c) 2012-2025 Snowflake Computing Inc. All rights reserved.
4
4
  #
5
-
6
- VERSION = (0, 26, 0)
5
+ VERSION = (0,28,0)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: snowpark-connect
3
- Version: 0.26.0
3
+ Version: 0.28.0
4
4
  Summary: Snowpark Connect for Spark
5
5
  Author: Snowflake, Inc
6
6
  License: Apache License, Version 2.0
@@ -16,7 +16,7 @@ Requires-Dist: jpype1
16
16
  Requires-Dist: protobuf<5.0,>=4.25.3
17
17
  Requires-Dist: s3fs>=2025.3.0
18
18
  Requires-Dist: snowflake.core<2,>=1.0.5
19
- Requires-Dist: snowflake-snowpark-python[pandas]<1.38.0,>=1.37.0
19
+ Requires-Dist: snowflake-snowpark-python[pandas]<1.40.0,==1.39.0
20
20
  Requires-Dist: sqlglot>=26.3.8
21
21
  Requires-Dist: jaydebeapi
22
22
  Requires-Dist: aiobotocore~=2.23.0
@@ -1,20 +1,19 @@
1
1
  snowflake/snowpark_connect/__init__.py,sha256=Sml4x1LTNnxZyw6nnDeJrZWUi3eUAR46Rsw6N-wHUSA,605
2
- snowflake/snowpark_connect/column_name_handler.py,sha256=_bTrguwdiF_cqFvoihFU7f42lNqtJ3Af-9fPvSLbf_A,31919
3
- snowflake/snowpark_connect/config.py,sha256=KQlRIZsclZfzWe3Co4XBvT_WBeuC6xb9GNAnGEeOWPY,25094
2
+ snowflake/snowpark_connect/column_name_handler.py,sha256=_Z2j4teR_3nsCLjMxhyChGmds_1v6tP51OfkEpmWXWk,27164
3
+ snowflake/snowpark_connect/config.py,sha256=vfuM2TBuuoBe_y9Y_utPelU-9hLoVE-lJszmN8vT5qw,28145
4
4
  snowflake/snowpark_connect/constants.py,sha256=FBDxNUxdqWxnf6d5-eanHlYdFFyQqCqvNyZG-uOiO6Q,598
5
5
  snowflake/snowpark_connect/control_server.py,sha256=mz3huYo84hgqUB6maZxu3LYyGq7vVL1nv7-7-MjuSYY,1956
6
- snowflake/snowpark_connect/dataframe_container.py,sha256=sCzMxzb6UIJz1mVpwzSYeVBiqtSzZv5n_pn7FVhbRv0,9114
6
+ snowflake/snowpark_connect/dataframe_container.py,sha256=0ozyUXrWErzM7Gltnb-i2o5ZyXVVeT_HCqpuYliQXwc,8798
7
7
  snowflake/snowpark_connect/dataframe_name_handler.py,sha256=aR-CpdGsN2d6tNW0H_F9P-FLe5hDU68zJwKjsrgeA2g,1682
8
8
  snowflake/snowpark_connect/date_time_format_mapping.py,sha256=qtQ-JTGR1VRWM2oxM1aYggE_g-BNouffeHxmCk89xkk,16809
9
9
  snowflake/snowpark_connect/empty_dataframe.py,sha256=aKO6JkYnArWCpLGcn9BzvTspw2k_c6eAM0mQImAY0J0,428
10
- snowflake/snowpark_connect/hidden_column.py,sha256=k56-e97vqQmoOFtOtIPy6rSu8mZAz-toKHoIBXYW3CU,1322
11
- snowflake/snowpark_connect/resources_initializer.py,sha256=njE4_L82evaCwFYPGBUX6OEVaHqww42oGXXmzUylt0I,4424
12
- snowflake/snowpark_connect/server.py,sha256=JRgj7PXJAPZ1LyoRs6heMh2ISXjhOMIxw3sipXlhQd8,50978
10
+ snowflake/snowpark_connect/resources_initializer.py,sha256=GqBaiFrK9dWec3NLuNZuuxx5Ebw_ZCiMyFrS2KjK2fw,5084
11
+ snowflake/snowpark_connect/server.py,sha256=Hi9FYtIHJXURgMS3On50b5BXJQjMv220_h_jttkHUkI,51015
13
12
  snowflake/snowpark_connect/start_server.py,sha256=udegO0rk2FeSnXsIcCIYQW3VRlGDjB_cU8lJ8xSzuM8,942
14
13
  snowflake/snowpark_connect/tcm.py,sha256=ftncZFbVO-uyWMhF1_HYKQykB7KobHEYoyQsYbQj1EM,203
15
14
  snowflake/snowpark_connect/type_mapping.py,sha256=6Hg-h1iVzVB_FnwG3Sjl-UGr2Itrs4LxVb2Pz5Ue-YA,41566
16
15
  snowflake/snowpark_connect/typed_column.py,sha256=Tavii8b4zMj5IWOvN6tlOVmC80W6eQct0pC_tF2nlhU,3867
17
- snowflake/snowpark_connect/version.py,sha256=n9kt8bTa93V973HaRfyqLqeVkvUhOJqglXJqOAdmsWw,121
16
+ snowflake/snowpark_connect/version.py,sha256=h25IrA984uyrAk0fG8Iy3TJC7b-VG1YTDkoW-sfpxDU,118
18
17
  snowflake/snowpark_connect/analyze_plan/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
19
18
  snowflake/snowpark_connect/analyze_plan/map_tree_string.py,sha256=Q3ZD-Z7uForrF7W3mSAjwaiEcIv2KDXr5jPfVbromVg,1470
20
19
  snowflake/snowpark_connect/error/__init__.py,sha256=oQo6k4zztLmNF1c5IvJLcS99J6RWY9KBTN3RJ2pKimg,249
@@ -30,14 +29,14 @@ snowflake/snowpark_connect/expression/function_defaults.py,sha256=WEnzc_uzZZltcf
30
29
  snowflake/snowpark_connect/expression/hybrid_column_map.py,sha256=2jItaXnFnqcaOIiHKxscFLj7hi2zQE7F3xcher8Zo2U,7614
31
30
  snowflake/snowpark_connect/expression/literal.py,sha256=wk5NnLR85SLHe7GoAvSzMTW0B-3yhAiRz4F5SfU2REs,4459
32
31
  snowflake/snowpark_connect/expression/map_cast.py,sha256=uxNukg9op0i_sKLhY43fJJJ2-SKZ-8wkRMRBiPikJ0c,14799
33
- snowflake/snowpark_connect/expression/map_expression.py,sha256=cotjHDdKiHWbQZx3buBcISCnty8TYyUbbUat4W9qhIk,14119
32
+ snowflake/snowpark_connect/expression/map_expression.py,sha256=04EZNHV7bb0KAIhbnmhIgssvafn3XTSE9Ax_uMishP8,15711
34
33
  snowflake/snowpark_connect/expression/map_extension.py,sha256=Qm1Jn3JunswD_hO7ornvKpR6WcM3UKO_JpZE6ovH3VM,4939
35
- snowflake/snowpark_connect/expression/map_sql_expression.py,sha256=bHl7_YrKnWy0k1gMz02HYF8lTZKoamQdv5nFCHEfbzI,25824
34
+ snowflake/snowpark_connect/expression/map_sql_expression.py,sha256=ultWVVcCeXHUlNSkq4PEdeFuezLDELcemFJVSIJczbE,25923
36
35
  snowflake/snowpark_connect/expression/map_udf.py,sha256=_om_Kqxm_sm3RhwP4DZbGPUpArX90MoJQm-KwEWrTiQ,8034
37
- snowflake/snowpark_connect/expression/map_unresolved_attribute.py,sha256=xIJYxs1EJZNs1skX_xgshvcddsqO8jHgR9QUEvwXm9E,18520
36
+ snowflake/snowpark_connect/expression/map_unresolved_attribute.py,sha256=cyyEtAsPqcCpCYulAobSbtPDV3xkCIM0_tq4wdMxuJM,18843
38
37
  snowflake/snowpark_connect/expression/map_unresolved_extract_value.py,sha256=A-m-RczZW6xHMjgYR5RV_vzMTpNBRoH3Tk_A1V8z_pk,5382
39
- snowflake/snowpark_connect/expression/map_unresolved_function.py,sha256=x8LRI_BXgnclTEfCkJp6O8CPkxsYTc4fYGK_zPGpGdM,477028
40
- snowflake/snowpark_connect/expression/map_unresolved_star.py,sha256=XNJurGS5RLAaIAcalGGBbPJujqg8YZru_RsoB61m0WQ,8865
38
+ snowflake/snowpark_connect/expression/map_unresolved_function.py,sha256=5Ne94Mip18iXmiqpBNO9wVkiDULGhjxD2wU4ueEsSXs,478168
39
+ snowflake/snowpark_connect/expression/map_unresolved_star.py,sha256=gecSxJ9flsn9chTXmYHP8nLzs_209xnVlyNwANDXRFg,8820
41
40
  snowflake/snowpark_connect/expression/map_update_fields.py,sha256=bMuZjCOE5wgrZv3ApsoiwEUv-lPgnMWUk-DFX11QuX0,6710
42
41
  snowflake/snowpark_connect/expression/map_window_function.py,sha256=apJxtWSnLogMG0F_En67JoGrGRGYQ0I8O0xPRk4TR_o,12152
43
42
  snowflake/snowpark_connect/expression/typer.py,sha256=aV2dmsqsJWhZzeJtu-_xLqVZ8REsU0hTYAEpCrbO5y8,4582
@@ -78,7 +77,7 @@ snowflake/snowpark_connect/includes/jars/log4j-core-2.20.0.jar,sha256=YTffhIza7Z
78
77
  snowflake/snowpark_connect/includes/jars/log4j-slf4j2-impl-2.20.0.jar,sha256=uN0-TqnP-hjbXzAc2MU5FYZi5pHv1HAaqHtNCZYb2LA,26430
79
78
  snowflake/snowpark_connect/includes/jars/paranamer-2.8.3.jar,sha256=qd8Tby6SazeoOKW04iJzQ8OnVdFXJLOoNQtK6ksViUU,37899
80
79
  snowflake/snowpark_connect/includes/jars/paranamer-2.8.jar,sha256=aIyxGKYCHYGROOhVIIyVYDFoi-S0eiS7YVvsxjrO3wc,34654
81
- snowflake/snowpark_connect/includes/jars/sas-scala-udf_2.12-0.1.0.jar,sha256=cl-jS8dYqWuqawLz8zybq2QTbS9Uuf46k8WEU_Cexdo,3242
80
+ snowflake/snowpark_connect/includes/jars/sas-scala-udf_2.12-0.1.0.jar,sha256=2flk7VxMgrNtLKNE6owH8QDS4pQMmWNvSajkjkdtdQA,6613712
82
81
  snowflake/snowpark_connect/includes/jars/scala-collection-compat_2.12-2.7.0.jar,sha256=-2iGSgJIrwl51Fkhhp6xIwtZJqph5cCoc3w_c65YLpI,247466
83
82
  snowflake/snowpark_connect/includes/jars/scala-library-2.12.18.jar,sha256=5R5mNsADNZ4Qa-pK2Z3vcOYTwpAZDIyE8Q-VYN1bAK4,5433857
84
83
  snowflake/snowpark_connect/includes/jars/scala-parser-combinators_2.12-2.3.0.jar,sha256=Fb8T9c4On_QiRgPEQoEmXQUJrDHqgjxGqimlpS4JpXQ,186777
@@ -401,15 +400,15 @@ snowflake/snowpark_connect/relation/map_aggregate.py,sha256=KElVYO62V3jkU8ldTCfT
401
400
  snowflake/snowpark_connect/relation/map_catalog.py,sha256=mcx6An4fqHAxy2OhOC66Xe_0aRtYPDGkBEgMK5CfaXU,5822
402
401
  snowflake/snowpark_connect/relation/map_column_ops.py,sha256=grleA0S6jyia6T5IGbPOEdz0zI0TkkV7jENzlfzoQlQ,49017
403
402
  snowflake/snowpark_connect/relation/map_crosstab.py,sha256=H_J8-IARK6zMEUFrOjKif1St6M20gvBAnP0EuArFHGg,2422
404
- snowflake/snowpark_connect/relation/map_extension.py,sha256=OeaCtrmvRJBfn6TKJ-tb9ynoBs6QybGOsDJqaJMSkjw,22172
405
- snowflake/snowpark_connect/relation/map_join.py,sha256=oINd7AMa_O0bLPqsFFeKnZfE0anKa2RAR9QYh3qdbbY,25042
403
+ snowflake/snowpark_connect/relation/map_extension.py,sha256=ItzB9AMG0OAwQhmO8bvIJxVToYhAyQu19pprone7feE,22473
404
+ snowflake/snowpark_connect/relation/map_join.py,sha256=YGOy6nmpb0mqw8D3TzsV4bDlvaq6QFYPwfF5S6YgydU,15201
406
405
  snowflake/snowpark_connect/relation/map_local_relation.py,sha256=VBfwBT75GQUe01UOZptwcYsI7zZxaIMQyTOG6kmVuJ0,15219
407
- snowflake/snowpark_connect/relation/map_map_partitions.py,sha256=2vN7wfe0mR0qzMYeHOmWrgsdCq573j2qQSLnzp7pSFg,5881
408
- snowflake/snowpark_connect/relation/map_relation.py,sha256=gCko2zjUEBYkiw2GmJerGl-YcRAeONGTIF1GhoGhQl0,12468
406
+ snowflake/snowpark_connect/relation/map_map_partitions.py,sha256=JGiPnqmFFNdHAx54dHyOUwo9JZzyggDd1yLBmu-hasQ,4307
407
+ snowflake/snowpark_connect/relation/map_relation.py,sha256=etQ_SGXAAXNZxqfG6ouh6XVMSbvVLo2NS2Hhj3vOAas,12806
409
408
  snowflake/snowpark_connect/relation/map_row_ops.py,sha256=x1Jqircy4I0iiSljx3zbq0YxwGvGzPcXIY8_nhtl2PM,30528
410
409
  snowflake/snowpark_connect/relation/map_sample_by.py,sha256=8ALQbeUsB89sI3uiUFqG3w1A4TtOzOAL4umdKp6-c38,1530
411
- snowflake/snowpark_connect/relation/map_show_string.py,sha256=fuYCuThp7V3VPb5NETJvW0JDf9Xv2qCxdvBmlbHgF7c,3254
412
- snowflake/snowpark_connect/relation/map_sql.py,sha256=UphF9mWLV8csDLNlFTTC0Xq2-OfoHI1ztcyAE5X6d-M,83484
410
+ snowflake/snowpark_connect/relation/map_show_string.py,sha256=GgKg0qp1pGqSC7TuFedTU4IYaIm-Fx23OJ1LfkcGOHw,3382
411
+ snowflake/snowpark_connect/relation/map_sql.py,sha256=BvGVUG2fH_oyym8DVoMXnJy4Zzi_hSy-wa-x1xcapTE,101175
413
412
  snowflake/snowpark_connect/relation/map_stats.py,sha256=kqRYvix8RfluTKx1cAy9JhBUv6arYQHgfxpP1R4QwBM,13985
414
413
  snowflake/snowpark_connect/relation/map_subquery_alias.py,sha256=rHgE9XUzuWWkjNPtJz3Sxzz2aFo690paHKZh9frqPXk,1456
415
414
  snowflake/snowpark_connect/relation/map_udtf.py,sha256=cfDnbZ3TRJ6eb0EVResu6GL-OwQpaEabWLbrhgWnkRw,13316
@@ -433,30 +432,30 @@ snowflake/snowpark_connect/relation/read/reader_config.py,sha256=PMh1R5IjqqTwiAA
433
432
  snowflake/snowpark_connect/relation/read/utils.py,sha256=rIIM6d2WXHh7MLGyHNiRc9tS8b0dmyFQr7rHepIYJOU,4111
434
433
  snowflake/snowpark_connect/relation/write/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
435
434
  snowflake/snowpark_connect/relation/write/jdbc_write_dbapi.py,sha256=GI9FyGZuQQNV-6Q8Ob-Xr0im3iAPdH-Jkyx8bjwbOuE,11931
436
- snowflake/snowpark_connect/relation/write/map_write.py,sha256=UE5A-m1NWgbziGbUiZh48YCZHwogwTegAXEPEcFuepg,39361
435
+ snowflake/snowpark_connect/relation/write/map_write.py,sha256=zHWjjzYxj4rNPzybFYPnCK3mUSJMjmXv2FMiMAG0krY,43014
437
436
  snowflake/snowpark_connect/relation/write/map_write_jdbc.py,sha256=1nOWRgjtZzfRwnSRGFP9V6mqBVlGhSBr2KHGHbe4JMU,1404
438
437
  snowflake/snowpark_connect/resources/java_udfs-1.0-SNAPSHOT.jar,sha256=tVyOp6tXxu9nm6SDufwQiGzfH3pnuh_7PowsMZxOolY,9773
439
438
  snowflake/snowpark_connect/utils/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
440
- snowflake/snowpark_connect/utils/artifacts.py,sha256=CDSO9ve_yHvT1CKjoLnwiQNarsM5cMUdypGgpRELtK0,2407
439
+ snowflake/snowpark_connect/utils/artifacts.py,sha256=TkHZ2uNfZiphgtG91V1_c_h9yP9dP677BXUMymboCss,2498
441
440
  snowflake/snowpark_connect/utils/cache.py,sha256=bAyoNBW6Z1ui9BuppDywbQeG6fdju4L-owFHzySOTnk,3382
442
- snowflake/snowpark_connect/utils/concurrent.py,sha256=2UrQfJPWFrtpx131514t-9aXFDphbGI3N2pumLrltNk,3543
443
- snowflake/snowpark_connect/utils/context.py,sha256=nZVj1EMH-aP32cKFCcHqDut3lPtdt0A8DniBzMxCS4k,12132
444
- snowflake/snowpark_connect/utils/describe_query_cache.py,sha256=2VcPgGP9bUpdIhnN2s_MOG8oGHKX0hS0rT7Y26MJb3A,9001
441
+ snowflake/snowpark_connect/utils/concurrent.py,sha256=BTbUmvupLzUSRd6L7kKk9yIXFdqlDOkXebVMaECRD-A,3653
442
+ snowflake/snowpark_connect/utils/context.py,sha256=W9j9eC-lbGp7tfXWhnvI88CVOcLGspYEhEgxGPYVbYE,13288
443
+ snowflake/snowpark_connect/utils/describe_query_cache.py,sha256=HByBsP-XJ1PAk414OQtwIqcFnqpC2zq-ZK_YxR4O5gg,9100
445
444
  snowflake/snowpark_connect/utils/env_utils.py,sha256=g__Uio5ae20Tm1evahIHdJUXQYPmjNUT_kYPSIy5JDU,1488
446
445
  snowflake/snowpark_connect/utils/external_udxf_cache.py,sha256=eSZHMbjTxnkg78IlbG5P1Vno6j5ag_FSI0c4Xi2UyPs,1044
447
- snowflake/snowpark_connect/utils/identifiers.py,sha256=YgtVIQGuUnnTiNdtRficdBwUICWaWkJltjOPnTnfrak,3881
446
+ snowflake/snowpark_connect/utils/identifiers.py,sha256=A65-aQpuhTjOjGv0cmlE8fTNCI9_ONn-AYatKb_VfGM,7635
448
447
  snowflake/snowpark_connect/utils/interrupt.py,sha256=_awhdrzF1KQO-EQThneEcfMg3Zxed4p3HtMpkcAb6ek,2790
449
- snowflake/snowpark_connect/utils/io_utils.py,sha256=noBlKpJvzEA6iwLjFgBVGlCLlzjZ16-w0fsGimTyBAQ,1039
450
- snowflake/snowpark_connect/utils/pandas_udtf_utils.py,sha256=QwdLGLg5bX0JJTrWKfL4Ou4MIp443cryEbrZNBrulNE,4207
448
+ snowflake/snowpark_connect/utils/io_utils.py,sha256=xu0AMrHy-qsY7TfdIxzWChf0hU_7bnvm3Ruk0XScRns,1781
449
+ snowflake/snowpark_connect/utils/pandas_udtf_utils.py,sha256=3WA_9IVRZL8fnwIHo048LTg62-bPGfCDUZzYd-zjzQQ,7564
451
450
  snowflake/snowpark_connect/utils/profiling.py,sha256=ttdHzQUYarvTqJASLNuKFIax7ejO39Tv1mHKl0QjRkg,1519
452
- snowflake/snowpark_connect/utils/scala_udf_utils.py,sha256=YDHmhqsjgBeuKdv1TaInEUUOvrnbrU3_toQY_MfEygk,22895
453
- snowflake/snowpark_connect/utils/session.py,sha256=dxt5VoqsQqBZd3OaEpoPfA0U-uNFWDP-HWtrcoey7_w,7770
451
+ snowflake/snowpark_connect/utils/scala_udf_utils.py,sha256=RFDDMmgQ_xBWk98kdfWaw4Hla3ZqYf3UAijU4uAUNdA,23011
452
+ snowflake/snowpark_connect/utils/session.py,sha256=BWwpIbhdplQOhvY4GpbLgZ-BrJLidrWR6A_cJmGFCq8,7372
454
453
  snowflake/snowpark_connect/utils/snowpark_connect_logging.py,sha256=23bvbALGqixJ3Ap9QWM3OpcKNK-sog2mr9liSmvwqYU,1123
455
- snowflake/snowpark_connect/utils/telemetry.py,sha256=BRDX1eRXcww2SAiBlVr2fQVR2I6gH50qSCFkOop_x4M,20736
454
+ snowflake/snowpark_connect/utils/telemetry.py,sha256=4TImrE4XY5rfiesHyPQQ0xtU63qnN1wUpDXUP12PAvA,22259
456
455
  snowflake/snowpark_connect/utils/udf_cache.py,sha256=8K7kASEhvpnp-l1hjzovjyboUzKctDq7PiGXRcNv6Lg,12125
457
456
  snowflake/snowpark_connect/utils/udf_helper.py,sha256=g-TxTs4ARyJWYgADrosfQQG-ykBBQdm1g5opslxJq_E,12563
458
- snowflake/snowpark_connect/utils/udf_utils.py,sha256=Ey_clI2d-1m4zc51JWNsl8m0i93cS2L9NboLGGVwII8,14213
459
- snowflake/snowpark_connect/utils/udtf_helper.py,sha256=nTWbrFMc4hHLkalT4nJIsC_c00T6GFkB55JHOw1D-wg,14571
457
+ snowflake/snowpark_connect/utils/udf_utils.py,sha256=pxERcJKum2M5jHxPqsl1NgHFAqZV4RxoEnSLxJV5ups,12009
458
+ snowflake/snowpark_connect/utils/udtf_helper.py,sha256=9B_1iOckfFXQfVv-UHerIJ32fDd4qucKaHGqxtBEi4w,14969
460
459
  snowflake/snowpark_connect/utils/udtf_utils.py,sha256=wHO5V0BXRQOLqAYos1vGt8bbdH7jBvD2gwspWywjTtY,33110
461
460
  snowflake/snowpark_connect/utils/udxf_import_utils.py,sha256=pPtcaGsyh0tUdy0aAvNqTj04jqPKlEcGmvaZDP9O8Gc,536
462
461
  snowflake/snowpark_connect/utils/xxhash64.py,sha256=ysJRxhBPf25LeNhM1RK_H36MWl6q6C6vBRHa-jIna_A,7477
@@ -465,17 +464,17 @@ snowflake/snowpark_decoder/dp_session.py,sha256=HIr3TfKgYl5zqaGR5xpFU9ZVkcaTB9I8
465
464
  snowflake/snowpark_decoder/spark_decoder.py,sha256=EQiCvBiqB736Bc17o3gnYGtcYVcyfGxroO5e1kbe1Co,2885
466
465
  snowflake/snowpark_decoder/_internal/proto/generated/DataframeProcessorMsg_pb2.py,sha256=2eSDqeyfMvmIJ6_rF663DrEe1dg_anrP4OpVJNTJHaQ,2598
467
466
  snowflake/snowpark_decoder/_internal/proto/generated/DataframeProcessorMsg_pb2.pyi,sha256=aIH23k52bXdw5vO3RtM5UcOjDPaWsJFx1SRUSk3qOK8,6142
468
- snowpark_connect-0.26.0.data/scripts/snowpark-connect,sha256=yZ94KqbWACxnwV8mpg8NjILvvRNjnF8B3cs3ZFNuIM4,1546
469
- snowpark_connect-0.26.0.data/scripts/snowpark-session,sha256=NMAHSonTo-nmOZSkQNlszUC0jLJ8QWEDUsUmMe2UAOw,190
470
- snowpark_connect-0.26.0.data/scripts/snowpark-submit,sha256=Zd98H9W_d0dIqMSkQLdHyW5G3myxF0t4c3vNBt2nD6A,12056
471
- snowpark_connect-0.26.0.dist-info/licenses/LICENSE-binary,sha256=fmBlX39HwTlBUyiKEznaLZGuxQy-7ndLLG_rTXjF02Y,22916
472
- snowpark_connect-0.26.0.dist-info/licenses/LICENSE.txt,sha256=Ff9cPv4xu0z7bnMTHzo4vDncOShsy33w4oJMA2xjn6c,11365
473
- snowpark_connect-0.26.0.dist-info/licenses/NOTICE-binary,sha256=elMF8brgGNJwOz8YdorzBF6-U8ZhR8F-77FfGkZng7U,57843
467
+ snowpark_connect-0.28.0.data/scripts/snowpark-connect,sha256=yZ94KqbWACxnwV8mpg8NjILvvRNjnF8B3cs3ZFNuIM4,1546
468
+ snowpark_connect-0.28.0.data/scripts/snowpark-session,sha256=NMAHSonTo-nmOZSkQNlszUC0jLJ8QWEDUsUmMe2UAOw,190
469
+ snowpark_connect-0.28.0.data/scripts/snowpark-submit,sha256=Zd98H9W_d0dIqMSkQLdHyW5G3myxF0t4c3vNBt2nD6A,12056
470
+ snowpark_connect-0.28.0.dist-info/licenses/LICENSE-binary,sha256=fmBlX39HwTlBUyiKEznaLZGuxQy-7ndLLG_rTXjF02Y,22916
471
+ snowpark_connect-0.28.0.dist-info/licenses/LICENSE.txt,sha256=Ff9cPv4xu0z7bnMTHzo4vDncOShsy33w4oJMA2xjn6c,11365
472
+ snowpark_connect-0.28.0.dist-info/licenses/NOTICE-binary,sha256=elMF8brgGNJwOz8YdorzBF6-U8ZhR8F-77FfGkZng7U,57843
474
473
  spark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
475
474
  spark/connect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
476
475
  spark/connect/envelope_pb2.py,sha256=7Gc6OUA3vaCuTCIKamb_Iiw7W9jPTcWNEv1im20eWHM,2726
477
476
  spark/connect/envelope_pb2.pyi,sha256=VXTJSPpcxzB_dWqVdvPY4KkPhJfh0WmkX7SNHWoLhx0,3358
478
- snowpark_connect-0.26.0.dist-info/METADATA,sha256=IQ7OoHx9dFxYtBVJaEW2lsjfTj8-K-uyLr3Po1q2SBM,1594
479
- snowpark_connect-0.26.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
480
- snowpark_connect-0.26.0.dist-info/top_level.txt,sha256=ExnWqVpoTHRG99fu_AxXZVOz8c-De7nNu0yFCGylM8I,16
481
- snowpark_connect-0.26.0.dist-info/RECORD,,
477
+ snowpark_connect-0.28.0.dist-info/METADATA,sha256=ZYd6i8wRgv9zajEKwzZ_v8jMcLYvPwN4yOGeLF6pj7g,1594
478
+ snowpark_connect-0.28.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
479
+ snowpark_connect-0.28.0.dist-info/top_level.txt,sha256=ExnWqVpoTHRG99fu_AxXZVOz8c-De7nNu0yFCGylM8I,16
480
+ snowpark_connect-0.28.0.dist-info/RECORD,,
@@ -1,39 +0,0 @@
1
- #
2
- # Copyright (c) 2012-2025 Snowflake Computing Inc. All rights reserved.
3
- #
4
-
5
-
6
- class HiddenColumn:
7
- """
8
- Represents a hidden column in a Snowflake table.
9
-
10
- Hidden columns are not visible in standard queries but can be accessed
11
- directly if needed. This class provides a way to reference such columns
12
- in Snowpark operations
13
- """
14
-
15
- def __init__(
16
- self,
17
- hidden_snowpark_name: str,
18
- spark_name: str,
19
- visible_snowpark_name: str,
20
- qualifiers: list[str] | None = None,
21
- original_position: int | None = None,
22
- ) -> None:
23
- """
24
- Initializes a HiddenColumn instance.
25
-
26
- Args:
27
- name (str): The name of the hidden column.
28
- """
29
-
30
- # The Snowpark internal name for the hidden column
31
- self.hidden_snowpark_name = hidden_snowpark_name
32
- # The Spark name for the hidden column
33
- self.spark_name = spark_name
34
- # The left side visible Snowpark name for the dropped right side column
35
- self.visible_snowpark_name = visible_snowpark_name
36
- # Qualifiers for the hidden column (e.g., table or schema names)
37
- self.qualifiers = qualifiers if qualifiers is not None else []
38
- # The position of the hidden column in the original schema
39
- self.original_position = original_position