snowpark-connect 0.33.0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of snowpark-connect might be problematic. Click here for more details.

Files changed (39) hide show
  1. snowflake/snowpark_connect/column_name_handler.py +42 -56
  2. snowflake/snowpark_connect/config.py +9 -0
  3. snowflake/snowpark_connect/expression/literal.py +12 -12
  4. snowflake/snowpark_connect/expression/map_sql_expression.py +6 -0
  5. snowflake/snowpark_connect/expression/map_unresolved_attribute.py +147 -63
  6. snowflake/snowpark_connect/expression/map_unresolved_function.py +31 -28
  7. snowflake/snowpark_connect/relation/map_aggregate.py +156 -255
  8. snowflake/snowpark_connect/relation/map_column_ops.py +14 -0
  9. snowflake/snowpark_connect/relation/map_join.py +364 -234
  10. snowflake/snowpark_connect/relation/map_sql.py +309 -150
  11. snowflake/snowpark_connect/relation/read/map_read.py +9 -1
  12. snowflake/snowpark_connect/relation/read/map_read_csv.py +19 -2
  13. snowflake/snowpark_connect/relation/read/map_read_json.py +3 -0
  14. snowflake/snowpark_connect/relation/read/map_read_parquet.py +3 -0
  15. snowflake/snowpark_connect/relation/read/map_read_text.py +4 -0
  16. snowflake/snowpark_connect/relation/read/reader_config.py +10 -0
  17. snowflake/snowpark_connect/relation/read/utils.py +41 -0
  18. snowflake/snowpark_connect/relation/utils.py +4 -2
  19. snowflake/snowpark_connect/relation/write/map_write.py +65 -17
  20. snowflake/snowpark_connect/utils/context.py +0 -14
  21. snowflake/snowpark_connect/utils/expression_transformer.py +163 -0
  22. snowflake/snowpark_connect/utils/session.py +0 -4
  23. snowflake/snowpark_connect/utils/udf_helper.py +1 -0
  24. snowflake/snowpark_connect/utils/udtf_helper.py +3 -0
  25. snowflake/snowpark_connect/version.py +1 -1
  26. {snowpark_connect-0.33.0.dist-info → snowpark_connect-1.0.0.dist-info}/METADATA +2 -2
  27. {snowpark_connect-0.33.0.dist-info → snowpark_connect-1.0.0.dist-info}/RECORD +35 -38
  28. snowflake/snowpark_connect/includes/python/pyspark/pandas/spark/__init__.py +0 -16
  29. snowflake/snowpark_connect/includes/python/pyspark/pandas/spark/accessors.py +0 -1281
  30. snowflake/snowpark_connect/includes/python/pyspark/pandas/spark/functions.py +0 -203
  31. snowflake/snowpark_connect/includes/python/pyspark/pandas/spark/utils.py +0 -202
  32. {snowpark_connect-0.33.0.data → snowpark_connect-1.0.0.data}/scripts/snowpark-connect +0 -0
  33. {snowpark_connect-0.33.0.data → snowpark_connect-1.0.0.data}/scripts/snowpark-session +0 -0
  34. {snowpark_connect-0.33.0.data → snowpark_connect-1.0.0.data}/scripts/snowpark-submit +0 -0
  35. {snowpark_connect-0.33.0.dist-info → snowpark_connect-1.0.0.dist-info}/WHEEL +0 -0
  36. {snowpark_connect-0.33.0.dist-info → snowpark_connect-1.0.0.dist-info}/licenses/LICENSE-binary +0 -0
  37. {snowpark_connect-0.33.0.dist-info → snowpark_connect-1.0.0.dist-info}/licenses/LICENSE.txt +0 -0
  38. {snowpark_connect-0.33.0.dist-info → snowpark_connect-1.0.0.dist-info}/licenses/NOTICE-binary +0 -0
  39. {snowpark_connect-0.33.0.dist-info → snowpark_connect-1.0.0.dist-info}/top_level.txt +0 -0
@@ -40,6 +40,47 @@ DATA_SOURCE_SQL_COMMENT = (
40
40
  INDEXED_COLUMN_NAME_PATTERN = re.compile(r"(^\"c)(\d+)(\"$)")
41
41
 
42
42
 
43
+ def apply_metadata_exclusion_pattern(options: dict) -> None:
44
+ """
45
+ Exclude metadata and hidden files from reads, matching Spark's behavior.
46
+
47
+ Automatically filters out internal metadata files that should never be read as data:
48
+ - _SUCCESS, _metadata, _common_metadata (Spark/Parquet metadata)
49
+ - .crc (Hadoop checksum files)
50
+ - .DS_Store (macOS system files)
51
+ - Any file starting with _ or .
52
+
53
+ Pattern used: ".*/[^_.][^/]*$|^[^_.][^/]*$"
54
+ - Matches files where filename does NOT start with _ or .
55
+ - Works at any directory depth (flat or partitioned data)
56
+ - Allows files with or without extensions
57
+
58
+ Examples of excluded files:
59
+ ❌ _SUCCESS, _metadata, _common_metadata (Spark/Parquet metadata)
60
+ ❌ .crc, .DS_Store, .hidden (system/hidden files)
61
+ ❌ year=2024/_SUCCESS (metadata in partitioned directories)
62
+
63
+ Examples of allowed files:
64
+ ✅ part-00000.parquet, data.csv, output.json (data files)
65
+ ✅ success, myfile (files without extensions, don't start with _ or .)
66
+ ✅ year=2024/month=01/part-00000.parquet (partitioned data)
67
+
68
+ User pattern handling:
69
+ - No pattern or "*" or ".*" → Apply metadata exclusion
70
+ - Custom patterns → Default to user provided pattern.
71
+
72
+ Leak cases (user explicitly requests metadata files and are intentional):
73
+ ⚠️ "_*" → Matches _SUCCESS, _metadata (explicit underscore prefix)
74
+ ⚠️ "*SUCCESS*" → Matches _SUCCESS (broad wildcard side effect)
75
+ ⚠️ "[_.].*" → Matches _SUCCESS, .crc (character class includes _)
76
+
77
+ Args:
78
+ options: Dictionary of Snowpark read options (modified in place)
79
+ """
80
+ if "PATTERN" not in options or options["PATTERN"] in ("*", ".*"):
81
+ options["PATTERN"] = ".*/[^_.][^/]*$|^[^_.][^/]*$"
82
+
83
+
43
84
  def subtract_one(match: re.Match[str]) -> str:
44
85
  """Spark column names are 0 indexed, Snowpark is 1 indexed."""
45
86
  return f"_c{str(int(match.group(2)) - 1)}"
@@ -174,6 +174,7 @@ def generate_spark_compatible_filename(
174
174
  attempt_number: int = 0,
175
175
  compression: str = None,
176
176
  format_ext: str = "parquet",
177
+ shared_uuid: str = None,
177
178
  ) -> str:
178
179
  """Generate a Spark-compatible filename following the convention:
179
180
  part-<task-id>-<uuid>-c<attempt-number>.<compression>.<format>
@@ -183,12 +184,13 @@ def generate_spark_compatible_filename(
183
184
  attempt_number: Attempt number (usually 0)
184
185
  compression: Compression type (e.g., 'snappy', 'gzip', 'none')
185
186
  format_ext: File format extension (e.g., 'parquet', 'csv', 'json')
187
+ shared_uuid: Shared UUID for the file
186
188
 
187
189
  Returns:
188
190
  A filename string following Spark's naming convention
189
191
  """
190
- # Generate a UUID for uniqueness
191
- file_uuid = str(uuid.uuid4())
192
+ # Use the shared UUID if provided, otherwise generate a new one for uniqueness
193
+ file_uuid = shared_uuid or str(uuid.uuid4())
192
194
 
193
195
  # Format task ID with leading zeros (5 digits)
194
196
  formatted_task_id = f"{task_id:05d}"
@@ -2,8 +2,10 @@
2
2
  # Copyright (c) 2012-2025 Snowflake Computing Inc. All rights reserved.
3
3
  #
4
4
 
5
+ import copy
5
6
  import os
6
7
  import shutil
8
+ import uuid
7
9
  from pathlib import Path
8
10
 
9
11
  import pyspark.sql.connect.proto.base_pb2 as proto_base
@@ -29,6 +31,7 @@ from snowflake.snowpark.types import (
29
31
  )
30
32
  from snowflake.snowpark_connect.config import (
31
33
  auto_uppercase_column_identifiers,
34
+ get_success_file_generation_enabled,
32
35
  global_config,
33
36
  sessions_config,
34
37
  str_to_bool,
@@ -249,20 +252,9 @@ def map_write(request: proto_base.ExecutePlanRequest):
249
252
  )
250
253
 
251
254
  if overwrite:
252
- try:
253
- path_after_stage = (
254
- write_path.split("/", 1)[1] if "/" in write_path else ""
255
- )
256
- if not path_after_stage or path_after_stage == "/":
257
- logger.warning(
258
- f"Skipping REMOVE for root path {write_path} - too broad scope"
259
- )
260
- else:
261
- remove_command = f"REMOVE '{write_path}/'"
262
- session.sql(remove_command).collect()
263
- logger.info(f"Successfully cleared directory: {write_path}")
264
- except Exception as e:
265
- logger.warning(f"Could not clear directory {write_path}: {e}")
255
+ remove_command = f"REMOVE '{write_path}'"
256
+ session.sql(remove_command).collect()
257
+ logger.info(f"Successfully cleared directory: {write_path}")
266
258
 
267
259
  if should_write_to_single_file and partition_hint is None:
268
260
  # Single file: generate complete filename with extension
@@ -350,15 +342,20 @@ def map_write(request: proto_base.ExecutePlanRequest):
350
342
  # Execute multiple COPY INTO operations, one per target file.
351
343
  # Since we write per-partition with distinct prefixes, download from the base write path.
352
344
  download_stage_path = write_path
345
+
346
+ # We need to create a new set of parameters with single=True
347
+ shared_uuid = str(uuid.uuid4())
348
+ part_params = copy.deepcopy(dict(parameters))
349
+ part_params["single"] = True
353
350
  for part_idx in range(partition_hint):
354
- part_params = dict(parameters)
355
351
  # Preserve Spark-like filename prefix per partition so downloaded basenames
356
352
  # match the expected Spark pattern (with possible Snowflake counters appended).
357
353
  per_part_prefix = generate_spark_compatible_filename(
358
354
  task_id=part_idx,
359
355
  attempt_number=0,
360
- compression=None,
361
- format_ext="", # prefix only; Snowflake appends extension/counters
356
+ compression=compression,
357
+ format_ext=extension,
358
+ shared_uuid=shared_uuid,
362
359
  )
363
360
  part_params["location"] = f"{write_path}/{per_part_prefix}"
364
361
  (
@@ -368,6 +365,9 @@ def map_write(request: proto_base.ExecutePlanRequest):
368
365
  )
369
366
  else:
370
367
  rewritten_df.write.copy_into_location(**parameters)
368
+
369
+ generate_success = get_success_file_generation_enabled()
370
+
371
371
  if not is_cloud_path(write_op.path):
372
372
  store_files_locally(
373
373
  download_stage_path,
@@ -375,6 +375,13 @@ def map_write(request: proto_base.ExecutePlanRequest):
375
375
  overwrite,
376
376
  session,
377
377
  )
378
+ if generate_success:
379
+ _write_success_file_locally(write_op.path)
380
+ else:
381
+ if generate_success:
382
+ _write_success_file_to_stage(
383
+ download_stage_path, session, parameters
384
+ )
378
385
  case "jdbc":
379
386
  from snowflake.snowpark_connect.relation.write.map_write_jdbc import (
380
387
  map_write_jdbc,
@@ -1044,6 +1051,47 @@ def handle_column_names(
1044
1051
  return df
1045
1052
 
1046
1053
 
1054
+ def _write_success_file_locally(directory_path: str) -> None:
1055
+ """
1056
+ Write a _SUCCESS marker file to a local directory.
1057
+ """
1058
+ try:
1059
+ success_file = Path(directory_path) / "_SUCCESS"
1060
+ success_file.touch()
1061
+ logger.debug(f"Created _SUCCESS file at {directory_path}")
1062
+ except Exception as e:
1063
+ logger.warning(f"Failed to create _SUCCESS file at {directory_path}: {e}")
1064
+
1065
+
1066
+ def _write_success_file_to_stage(
1067
+ stage_path: str,
1068
+ session: snowpark.Session,
1069
+ parameters: dict,
1070
+ ) -> None:
1071
+ """
1072
+ Write a _SUCCESS marker file to a stage location.
1073
+ """
1074
+ try:
1075
+ # Create a dummy dataframe with one row containing "SUCCESS"
1076
+ success_df = session.create_dataframe([["SUCCESS"]]).to_df(["STATUS"])
1077
+ success_params = copy.deepcopy(parameters)
1078
+ success_params["location"] = f"{stage_path}/_SUCCESS"
1079
+ success_params["single"] = True
1080
+ success_params["header"] = True
1081
+
1082
+ # Set CSV format with explicit no compression for _SUCCESS file
1083
+ success_params["file_format_type"] = "csv"
1084
+ success_params["format_type_options"] = {
1085
+ "COMPRESSION": "NONE",
1086
+ }
1087
+
1088
+ success_df.write.copy_into_location(**success_params)
1089
+
1090
+ logger.debug(f"Created _SUCCESS file at {stage_path}")
1091
+ except Exception as e:
1092
+ logger.warning(f"Failed to create _SUCCESS file at {stage_path}: {e}")
1093
+
1094
+
1047
1095
  def store_files_locally(
1048
1096
  stage_path: str, target_path: str, overwrite: bool, session: snowpark.Session
1049
1097
  ) -> None:
@@ -55,7 +55,6 @@ _resolving_lambda_fun = ContextVar[bool]("_resolving_lambdas", default=False)
55
55
  _current_lambda_params = ContextVar[list[str]]("_current_lambda_params", default=[])
56
56
 
57
57
  _is_window_enabled = ContextVar[bool]("_is_window_enabled", default=False)
58
- _is_in_pivot = ContextVar[bool]("_is_in_pivot", default=False)
59
58
  _is_in_udtf_context = ContextVar[bool]("_is_in_udtf_context", default=False)
60
59
  _accessing_temp_object = ContextVar[bool]("_accessing_temp_object", default=False)
61
60
 
@@ -467,19 +466,6 @@ def is_window_enabled():
467
466
  return _is_window_enabled.get()
468
467
 
469
468
 
470
- @contextmanager
471
- def temporary_pivot_expression(value: bool):
472
- token = _is_in_pivot.set(value)
473
- try:
474
- yield
475
- finally:
476
- _is_in_pivot.reset(token)
477
-
478
-
479
- def is_in_pivot() -> bool:
480
- return _is_in_pivot.get()
481
-
482
-
483
469
  def get_is_in_udtf_context() -> bool:
484
470
  """
485
471
  Gets the value of _is_in_udtf_context for the current context, defaults to False.
@@ -0,0 +1,163 @@
1
+ #
2
+ # Copyright (c) 2012-2025 Snowflake Computing Inc. All rights reserved.
3
+ #
4
+
5
+ from snowflake.snowpark import Column, functions as snowpark_fn
6
+ from snowflake.snowpark._internal.analyzer.expression import (
7
+ CaseWhen,
8
+ Expression,
9
+ FunctionExpression,
10
+ SnowflakeUDF,
11
+ )
12
+
13
+ _SF_AGGREGATE_FUNCTIONS = [
14
+ "any_value",
15
+ "avg",
16
+ "corr",
17
+ "count",
18
+ "count_if",
19
+ "covar_pop",
20
+ "covar_samp",
21
+ "listagg",
22
+ "max",
23
+ "max_by",
24
+ "median",
25
+ "min",
26
+ "min_by",
27
+ "mode",
28
+ "percentile_cont",
29
+ "percentile_disc",
30
+ "stddev",
31
+ "stddev_samp",
32
+ "stddev_pop",
33
+ "sum",
34
+ "var_pop",
35
+ "var_samp",
36
+ "variance_pop",
37
+ "variance",
38
+ "variance_samp",
39
+ "bitand_agg",
40
+ "bitor_agg",
41
+ "bitxor_agg",
42
+ "booland_agg",
43
+ "boolor_agg",
44
+ "boolxor_agg",
45
+ "hash_agg",
46
+ "array_agg",
47
+ "object_agg",
48
+ "regr_avgx",
49
+ "regr_avgy",
50
+ "regr_count",
51
+ "regr_intercept",
52
+ "regr_r2",
53
+ "regr_slope",
54
+ "regr_sxx",
55
+ "regr_sxy",
56
+ "regr_syy",
57
+ "kurtosis",
58
+ "skew",
59
+ "array_union_agg",
60
+ "array_unique_agg",
61
+ "bitmap_bit_position",
62
+ "bitmap_bucket_number",
63
+ "bitmap_count",
64
+ "bitmap_construct_agg",
65
+ "bitmap_or_agg",
66
+ "approx_count_distinct",
67
+ "datasketches_hll",
68
+ "datasketches_hll_accumulate",
69
+ "datasketches_hll_combine",
70
+ "datasketches_hll_estimate",
71
+ "hll",
72
+ "hll_accumulate",
73
+ "hll_combine",
74
+ "hll_estimate",
75
+ "hll_export",
76
+ "hll_import",
77
+ "approximate_jaccard_index",
78
+ "approximate_similarity",
79
+ "minhash",
80
+ "minhash_combine",
81
+ "approx_top_k",
82
+ "approx_top_k_accumulate",
83
+ "approx_top_k_combine",
84
+ "approx_top_k_estimate",
85
+ "approx_percentile",
86
+ "approx_percentile_accumulate",
87
+ "approx_percentile_combine",
88
+ "approx_percentile_estimate",
89
+ "grouping",
90
+ "grouping_id",
91
+ "ai_agg",
92
+ "ai_summarize_agg",
93
+ ]
94
+
95
+
96
+ def _is_agg_function_expression(expression: Expression) -> bool:
97
+ if (
98
+ isinstance(expression, FunctionExpression)
99
+ and expression.pretty_name.lower() in _SF_AGGREGATE_FUNCTIONS
100
+ ):
101
+ return True
102
+
103
+ # For PySpark aggregate functions that were mapped using a UDAF, e.g. try_sum
104
+ if isinstance(expression, SnowflakeUDF) and expression.is_aggregate_function:
105
+ return True
106
+
107
+ return False
108
+
109
+
110
+ def _get_child_expressions(expression: Expression) -> list[Expression]:
111
+ if isinstance(expression, CaseWhen):
112
+ return expression._child_expressions
113
+
114
+ return expression.children or []
115
+
116
+
117
+ def inject_condition_to_all_agg_functions(
118
+ expression: Expression, condition: Column
119
+ ) -> None:
120
+ """
121
+ Recursively traverses an expression tree and wraps all aggregate function arguments with a CASE WHEN condition.
122
+
123
+ Args:
124
+ expression: The Snowpark expression tree to traverse and modify.
125
+ condition: The Column condition to inject into aggregate function arguments.
126
+ """
127
+
128
+ any_agg_function_found = _inject_condition_to_all_agg_functions(
129
+ expression, condition
130
+ )
131
+
132
+ if not any_agg_function_found:
133
+ raise ValueError(f"No aggregate functions found in: {expression.sql}")
134
+
135
+
136
+ def _inject_condition_to_all_agg_functions(
137
+ expression: Expression, condition: Column
138
+ ) -> bool:
139
+ any_agg_function_found = False
140
+
141
+ if _is_agg_function_expression(expression):
142
+ new_children = []
143
+ for child in _get_child_expressions(expression):
144
+ case_when = snowpark_fn.when(condition, Column(child))
145
+
146
+ new_children.append(case_when._expr1)
147
+
148
+ # Swap children
149
+ expression.children = new_children
150
+ if len(new_children) > 0:
151
+ expression.child = new_children[0]
152
+
153
+ return True
154
+
155
+ for child in _get_child_expressions(expression):
156
+ is_agg_function_in_child = _inject_condition_to_all_agg_functions(
157
+ child, condition
158
+ )
159
+
160
+ if is_agg_function_in_child:
161
+ any_agg_function_found = True
162
+
163
+ return any_agg_function_found
@@ -96,8 +96,6 @@ def configure_snowpark_session(session: snowpark.Session):
96
96
 
97
97
  # Scoped temp objects may not be accessible in stored procedure and cause "object does not exist" error. So disable
98
98
  # _use_scoped_temp_objects here and use temp table instead.
99
- # Note that we also set PYTHON_SNOWPARK_USE_SCOPED_TEMP_OBJECTS to false below to enforce the current Snowpark
100
- # session does not use scoped temp objects too.
101
99
  session._use_scoped_temp_objects = False
102
100
 
103
101
  # Configure CTE optimization based on session configuration
@@ -134,8 +132,6 @@ def configure_snowpark_session(session: snowpark.Session):
134
132
  "TIMEZONE": f"'{global_config.spark_sql_session_timeZone}'",
135
133
  "QUOTED_IDENTIFIERS_IGNORE_CASE": "false",
136
134
  "PYTHON_SNOWPARK_ENABLE_THREAD_SAFE_SESSION": "true",
137
- # this is required for creating udfs from sproc and avoid "object does not exist" error
138
- "PYTHON_SNOWPARK_USE_SCOPED_TEMP_OBJECTS": "false",
139
135
  "ENABLE_STRUCTURED_TYPES_IN_SNOWPARK_CONNECT_RESPONSE": "true",
140
136
  "QUERY_TAG": f"'{query_tag}'",
141
137
  }
@@ -186,6 +186,7 @@ def parse_return_type(return_type_json_str) -> Optional[DataType]:
186
186
 
187
187
 
188
188
  def create(session, called_from, return_type_json_str, input_types_json_str, input_column_names_json_str, udf_name, replace, udf_packages, udf_imports, b64_str, original_return_type):
189
+ session._use_scoped_temp_objects = False
189
190
  import snowflake.snowpark.context as context
190
191
  context._use_structured_type_semantics = True
191
192
  context._is_snowpark_connect_compatible_mode = True
@@ -153,6 +153,7 @@ def parse_types(types_json_str) -> Optional[list[DataType]]:
153
153
  return json.loads(types_json_str)
154
154
 
155
155
  def create(session, b64_str, expected_types_json_str, output_schema_json_str, packages, imports, is_arrow_enabled, is_spark_compatible_udtf_mode_enabled, called_from):
156
+ session._use_scoped_temp_objects = False
156
157
  import snowflake.snowpark.context as context
157
158
  context._use_structured_type_semantics = True
158
159
  context._is_snowpark_connect_compatible_mode = True
@@ -257,6 +258,7 @@ from snowflake.snowpark.types import _parse_datatype_json_value
257
258
  {inline_udtf_utils_py_code}
258
259
 
259
260
  def create(session, b64_str, spark_column_names_json_str, input_schema_json_str, return_schema_json_str):
261
+ session._use_scoped_temp_objects = False
260
262
  import snowflake.snowpark.context as context
261
263
  context._use_structured_type_semantics = True
262
264
  context._is_snowpark_connect_compatible_mode = True
@@ -330,6 +332,7 @@ from snowflake.snowpark.types import _parse_datatype_json_value
330
332
  from pyspark.serializers import CloudPickleSerializer
331
333
 
332
334
  def create(session, func_info_json):
335
+ session._use_scoped_temp_objects = False
333
336
  import snowflake.snowpark.context as context
334
337
  context._use_structured_type_semantics = True
335
338
  context._is_snowpark_connect_compatible_mode = True
@@ -2,4 +2,4 @@
2
2
  #
3
3
  # Copyright (c) 2012-2025 Snowflake Computing Inc. All rights reserved.
4
4
  #
5
- VERSION = (0,33,0)
5
+ VERSION = (1,0,0)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: snowpark-connect
3
- Version: 0.33.0
3
+ Version: 1.0.0
4
4
  Summary: Snowpark Connect for Spark
5
5
  Author: Snowflake, Inc
6
6
  License: Apache License, Version 2.0
@@ -18,7 +18,7 @@ Requires-Dist: jpype1
18
18
  Requires-Dist: protobuf<6.32.0,>=4.25.3
19
19
  Requires-Dist: s3fs>=2025.3.0
20
20
  Requires-Dist: snowflake.core<2,>=1.0.5
21
- Requires-Dist: snowflake-snowpark-python[pandas]<1.41.0,==1.40.0
21
+ Requires-Dist: snowflake-snowpark-python[pandas]<1.43.0,==1.42.0
22
22
  Requires-Dist: snowflake-connector-python<4.0.0,>=3.18.0
23
23
  Requires-Dist: sqlglot>=26.3.8
24
24
  Requires-Dist: jaydebeapi
@@ -1,8 +1,8 @@
1
1
  snowflake/snowpark_connect/__init__.py,sha256=dbyVECHix6Z2sa32KJp-IwbsZI0rMlr_yCrIIYQlMrs,679
2
2
  snowflake/snowpark_connect/client.py,sha256=K0PK8aIppghBnY0uKlDOi4WZBUhKxJQ_vRuDpB3YsbQ,2045
3
- snowflake/snowpark_connect/column_name_handler.py,sha256=u-KJfL6DOcGKcUEsOMkOpi3s6Wr8p1hOpzd08CTMvGE,35389
3
+ snowflake/snowpark_connect/column_name_handler.py,sha256=B-3kN2HR4QRRp-Q6D8LMPsaNvti_jhJj3vy6c1oek9I,34758
4
4
  snowflake/snowpark_connect/column_qualifier.py,sha256=cRJklMDA1Nz34iwarhIyO_KWx_3leKTDYi0FH_tW0p8,1364
5
- snowflake/snowpark_connect/config.py,sha256=hIUoGgDeS3zKig75OTBfuwjG-YiiCUyYWxgII9TFJLg,31048
5
+ snowflake/snowpark_connect/config.py,sha256=F9K-rgoepWRXTjdsjA0YGICrJ7mFLQUohGeWuMnaWro,31402
6
6
  snowflake/snowpark_connect/constants.py,sha256=FBDxNUxdqWxnf6d5-eanHlYdFFyQqCqvNyZG-uOiO6Q,598
7
7
  snowflake/snowpark_connect/control_server.py,sha256=mz3huYo84hgqUB6maZxu3LYyGq7vVL1nv7-7-MjuSYY,1956
8
8
  snowflake/snowpark_connect/dataframe_container.py,sha256=5M-JSHWmjIUyMJ8jxthaa34p534i-1AHx6GWsGHRTqQ,8892
@@ -15,7 +15,7 @@ snowflake/snowpark_connect/start_server.py,sha256=udegO0rk2FeSnXsIcCIYQW3VRlGDjB
15
15
  snowflake/snowpark_connect/tcm.py,sha256=ftncZFbVO-uyWMhF1_HYKQykB7KobHEYoyQsYbQj1EM,203
16
16
  snowflake/snowpark_connect/type_mapping.py,sha256=MtdyFMHDa9ovwXnmjvKd82YeBU2760Vuvq1wqWlUtCc,48593
17
17
  snowflake/snowpark_connect/typed_column.py,sha256=fpaW6Vb-J15ObKFtE82iy_k3tJpaRDL39XxUBchH5nU,4006
18
- snowflake/snowpark_connect/version.py,sha256=4vUgjq2_Fv8jTILQ7RtujyeGY7YQ90E1riGhfMENsMs,118
18
+ snowflake/snowpark_connect/version.py,sha256=kGvXcbEP_-KFd0MR8SWNEC6ia1RmRUPoPexhcnToA08,117
19
19
  snowflake/snowpark_connect/analyze_plan/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
20
20
  snowflake/snowpark_connect/analyze_plan/map_tree_string.py,sha256=RFSNdDwOTXOd3gm_rKRJNyDd65zQpBxBpVOOwmKaFVA,1661
21
21
  snowflake/snowpark_connect/error/__init__.py,sha256=oQo6k4zztLmNF1c5IvJLcS99J6RWY9KBTN3RJ2pKimg,249
@@ -30,15 +30,15 @@ snowflake/snowpark_connect/execute_plan/utils.py,sha256=tyLwu3jpQL7uaTVyLLNUkeYe
30
30
  snowflake/snowpark_connect/expression/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
31
31
  snowflake/snowpark_connect/expression/function_defaults.py,sha256=VUv2XenDdY1e37uvb9Cd3vofZGUw6QidZwGaIIQ9rTg,6971
32
32
  snowflake/snowpark_connect/expression/hybrid_column_map.py,sha256=Pkuy4r5_AGNfU1BBS-3b4KxNRIFROqKSAogbMmsuqzw,7650
33
- snowflake/snowpark_connect/expression/literal.py,sha256=dYjWiCofWqWo8izFxZAx8GVibKbVzJPWOAvi5806F1s,4755
33
+ snowflake/snowpark_connect/expression/literal.py,sha256=vdnIVly2c-nOfnyZnYad3m5-zofCemTIaBGL-NyjWZo,4578
34
34
  snowflake/snowpark_connect/expression/map_cast.py,sha256=2_KRY1-Zpx_brjWRqJM7Q3FkynIqSZn0yvZ_D8jfaN0,15468
35
35
  snowflake/snowpark_connect/expression/map_expression.py,sha256=8SBN9jbLbbzBqCcL0LYBXQg18zoF1K6AueDhBO10DzM,18005
36
36
  snowflake/snowpark_connect/expression/map_extension.py,sha256=iA7ZptVSf227ifqMH7HXA1pVZP7kbdZz3YYiT3MCTy8,18769
37
- snowflake/snowpark_connect/expression/map_sql_expression.py,sha256=sZvIuI1yC6lUTrcrSDoQXEtc3umy_ngOWT4aYEFOdVM,34848
37
+ snowflake/snowpark_connect/expression/map_sql_expression.py,sha256=YxzwmACPLBgCsGlmcgAK2g3wJ7RJJiPp_NL2pwn9_fI,35051
38
38
  snowflake/snowpark_connect/expression/map_udf.py,sha256=_T7T2cXz-bQQwXgHseOCPy-jMcx1Dy24CCBWpDPM90o,8494
39
- snowflake/snowpark_connect/expression/map_unresolved_attribute.py,sha256=k9IT3rGrzPWQw-ayb3MJHftBgQK8z1Q3rq94b0lGpcc,21900
39
+ snowflake/snowpark_connect/expression/map_unresolved_attribute.py,sha256=HqG0zaq6gKKSp7QjlpaKaS1OQTHkF6qidTiGK4TpJp0,25352
40
40
  snowflake/snowpark_connect/expression/map_unresolved_extract_value.py,sha256=A-m-RczZW6xHMjgYR5RV_vzMTpNBRoH3Tk_A1V8z_pk,5382
41
- snowflake/snowpark_connect/expression/map_unresolved_function.py,sha256=Fb8MRJlwXy5zcVBjneSLQ9OkRfs1KlJzORbyVWAvmSg,550990
41
+ snowflake/snowpark_connect/expression/map_unresolved_function.py,sha256=ZuNZr2kOljNCEfRe-Sjn3VHnYfjs1V0aoqdJtkBZVqs,551154
42
42
  snowflake/snowpark_connect/expression/map_unresolved_star.py,sha256=Jy0yizUCoL_N4tsibMJbPUbM-niziRJdj6n6CboiKO0,9271
43
43
  snowflake/snowpark_connect/expression/map_update_fields.py,sha256=EyUmPa3mARTYEooB4ziulLt4BTFe1L_Xp-NXbTto5rg,7296
44
44
  snowflake/snowpark_connect/expression/map_window_function.py,sha256=N-O7SUEnzR-Ck7l8KhPPk4dj1W3hJ5v6Uvaspb85VUE,13032
@@ -210,10 +210,6 @@ snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/__init__.py,sha25
210
210
  snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/core.py,sha256=CffmuLf7kRgH9hOt7hyRlUEnR4DkepDPZH_gXcU3Vfc,44046
211
211
  snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/matplotlib.py,sha256=WgBw_N9QAGe_yepp-h8KDA9SN8sJ0hgD-6Q4zcUwMUM,31119
212
212
  snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/plotly.py,sha256=SK3bBF1UPIRSNElVhmuwJkUoA7_LSwK5bWZVmqa-lyY,9351
213
- snowflake/snowpark_connect/includes/python/pyspark/pandas/spark/__init__.py,sha256=c0bKYjIaffoqkm7ZgaR53O64pH33BYzSzFxNFPweSjU,784
214
- snowflake/snowpark_connect/includes/python/pyspark/pandas/spark/accessors.py,sha256=i9gySPM8VPmbeVL1y21AXLwf-5iLBhkEf8qum0AmfuI,42991
215
- snowflake/snowpark_connect/includes/python/pyspark/pandas/spark/functions.py,sha256=ML4rGOXa-9_1bU25K7td69TC-4SQXDxOswJlill1mbg,6452
216
- snowflake/snowpark_connect/includes/python/pyspark/pandas/spark/utils.py,sha256=Daz64KQJW4ejTgbacgMxF0xck_5fJmpX89ukwYL1nLY,6555
217
213
  snowflake/snowpark_connect/includes/python/pyspark/pandas/typedef/__init__.py,sha256=upsqRldnUGWbIywKe_tD81yr4wAF3AGuAEJeclwDKL8,856
218
214
  snowflake/snowpark_connect/includes/python/pyspark/pandas/typedef/typehints.py,sha256=rN739ELncqheny9CgFjKNW2gOheFy-7Y8yb59BrERsM,32198
219
215
  snowflake/snowpark_connect/includes/python/pyspark/pandas/usage_logging/__init__.py,sha256=ywDs8_5pRSt0cIo1aIOF0qfhTR55KV2zqDSLAYag6Ew,4945
@@ -340,53 +336,54 @@ snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.py,sha256=now6RWB80C
340
336
  snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.pyi,sha256=4PZkACNf5ulshE8yP2Sl36CXH4UBpIKvZPhzFqeEbsM,7302
341
337
  snowflake/snowpark_connect/relation/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
342
338
  snowflake/snowpark_connect/relation/io_utils.py,sha256=Fry5XxJ9RB3uqeCwwqkeOtpUe4Jl9U_bkw1ltxIzoAs,4525
343
- snowflake/snowpark_connect/relation/map_aggregate.py,sha256=ZkPO7H2_kaC0Q3zGD9sP5mt73C9iCsj3ZiGMgrs6gvU,19313
339
+ snowflake/snowpark_connect/relation/map_aggregate.py,sha256=jHR4sKtvwGwtHvF-uJjLIVT8yF4s33uqG1oJJHnKz6I,13643
344
340
  snowflake/snowpark_connect/relation/map_catalog.py,sha256=v0BDoPPDCnKPxb-HLllflbfa1SfNEsj9pekB5o8jOy0,6088
345
- snowflake/snowpark_connect/relation/map_column_ops.py,sha256=ITEl1PRLGLWLUot__CG55wPod-rK9Vf1dZXZIHwkvno,51427
341
+ snowflake/snowpark_connect/relation/map_column_ops.py,sha256=FfF3WiY8g7AhEtDDFONaGX-xSp2wpNK-ed5FbsRlKAI,52184
346
342
  snowflake/snowpark_connect/relation/map_crosstab.py,sha256=H_J8-IARK6zMEUFrOjKif1St6M20gvBAnP0EuArFHGg,2422
347
343
  snowflake/snowpark_connect/relation/map_extension.py,sha256=C4OVRq6fEy1i16ECAZ-dKSI6IA7rCWZqZ0jkMe40U1w,24564
348
- snowflake/snowpark_connect/relation/map_join.py,sha256=6oX4Tr3o34a0xCt2ZAtEWBpzj4Qu3aeqz5M1weq5WRs,18475
344
+ snowflake/snowpark_connect/relation/map_join.py,sha256=oZS3Fk-V1d5NmLXQBNRHRsS2V1sc2OXfQtH-3UK8Dac,22325
349
345
  snowflake/snowpark_connect/relation/map_local_relation.py,sha256=yypaF15RfHQ9QBaDYTlfAp5LzzgqXmVzUIjcBIxihAo,15477
350
346
  snowflake/snowpark_connect/relation/map_map_partitions.py,sha256=y6WhtacyYRmV8zLv3dQtBP2asijLze363htQbgNyoak,4130
351
347
  snowflake/snowpark_connect/relation/map_relation.py,sha256=xKdTYdl7i6n3t843nT6hT3Y6aeFWTppVulAo_at1fXE,14022
352
348
  snowflake/snowpark_connect/relation/map_row_ops.py,sha256=BlCU0OphGFkqgQtp3r6_IlnSyFMayCjhUxJL7e6XqmE,39295
353
349
  snowflake/snowpark_connect/relation/map_sample_by.py,sha256=8ALQbeUsB89sI3uiUFqG3w1A4TtOzOAL4umdKp6-c38,1530
354
350
  snowflake/snowpark_connect/relation/map_show_string.py,sha256=-QgOCZ_Kf7qDBN6hd0-eTxXqvVvzoJxw5hGBdk9Upgs,3669
355
- snowflake/snowpark_connect/relation/map_sql.py,sha256=_RzMSCmCJALZaDqMCmJq7zc4P1qG0jM6ezSBGirrQ88,115363
351
+ snowflake/snowpark_connect/relation/map_sql.py,sha256=Xi7q2LMLy41lutTgWYqPJVL-fX-jH-vlobUaFKbYAMc,121467
356
352
  snowflake/snowpark_connect/relation/map_stats.py,sha256=VOwMWX4agHsYVinO8h3Mw8yaY-vM1zeqHCAWv8P1bjc,14762
357
353
  snowflake/snowpark_connect/relation/map_subquery_alias.py,sha256=XIm2K4unqlhapugeHOzjjWvJjFJhgP_PTwXfWDvypSI,1564
358
354
  snowflake/snowpark_connect/relation/map_udtf.py,sha256=FgBYTtP3ao60juz053XMdPTqweRN7LFj7y5WuDJDWn0,13900
359
355
  snowflake/snowpark_connect/relation/stage_locator.py,sha256=njZMfJUSpoLB88z8Y720htxhUMn_6AvZl_8HOT0zp2A,9191
360
- snowflake/snowpark_connect/relation/utils.py,sha256=RdRRmbYAAVNmZPQ9eKjE2BkpQrXOD8_MJV0beiQOsw8,10620
356
+ snowflake/snowpark_connect/relation/utils.py,sha256=gPGfp4j_sP8JV5QCKsRmP2_7VUun3DzB4YqpZkcyC98,10756
361
357
  snowflake/snowpark_connect/relation/catalogs/__init__.py,sha256=0yJ5Nfg7SIxudI0P7_U5EWPyiTpkMet8tSq-IwutSZo,265
362
358
  snowflake/snowpark_connect/relation/catalogs/abstract_spark_catalog.py,sha256=SJhGhiuHbjeppEvaI6eU6Pnas8ul6BjZDBRESV7CkMM,11984
363
359
  snowflake/snowpark_connect/relation/catalogs/snowflake_catalog.py,sha256=4dSTNqMOydnLYtnX0tcH7RrDrCiOm6iCyCJ6sNZADGs,29965
364
360
  snowflake/snowpark_connect/relation/catalogs/utils.py,sha256=zexCUmfva3ejfuRi5XR9Z1JdosGkq9PPdX_H5zBt8Vo,2209
365
361
  snowflake/snowpark_connect/relation/read/__init__.py,sha256=5J3IOTKu4Qmenouz1Oz_bUu_4c6KpxtaC63mPUGLyeY,132
366
362
  snowflake/snowpark_connect/relation/read/jdbc_read_dbapi.py,sha256=aLXx5pOYZ_0aUQR-DsmFuyd1ZdA49OBuCpz6YXREZww,26525
367
- snowflake/snowpark_connect/relation/read/map_read.py,sha256=JbmL9SXJDxzrncDRYhnniyxOWcAemwBblXGOt0osZxw,16376
368
- snowflake/snowpark_connect/relation/read/map_read_csv.py,sha256=-_DYletV3neK88eRMQoCnkXcqw4I6piM3qr_mkuzjTw,8762
363
+ snowflake/snowpark_connect/relation/read/map_read.py,sha256=do64q9GKqPTkBKY9KEnv1T6V5__C3EpmJMV7kOX2F0g,16656
364
+ snowflake/snowpark_connect/relation/read/map_read_csv.py,sha256=o1ReOHZr7_lte7YmYmAwCrjQoy9DsxRsY-9sHv5E2WA,9398
369
365
  snowflake/snowpark_connect/relation/read/map_read_jdbc.py,sha256=bdDlX8oQfHvHysG8cnqaEhfRrX0rPuEWsUw4CNIPSUM,4607
370
- snowflake/snowpark_connect/relation/read/map_read_json.py,sha256=i2M2xt-Mpqoj6vr_yGbhZ49Bl66J46YQ3jCd4y5Bcn8,17011
371
- snowflake/snowpark_connect/relation/read/map_read_parquet.py,sha256=n1xv2DfcHsq3rg6j_NP4V1CFtSSQZbeCedCGMOlZf2k,7944
366
+ snowflake/snowpark_connect/relation/read/map_read_json.py,sha256=uSPwJ4bPcrWd2PnLliV_3uabl7XQ-5QM80bN2LizSMM,17109
367
+ snowflake/snowpark_connect/relation/read/map_read_parquet.py,sha256=B5NhBBJlMA3wq-nPpI6ezv0VcIG9QV5YC-0ODKQM0XY,8038
372
368
  snowflake/snowpark_connect/relation/read/map_read_socket.py,sha256=BsTCN3X1zW4OatPHW7Pe-xvicCTZPxVJ76cMfCNka2M,2750
373
369
  snowflake/snowpark_connect/relation/read/map_read_table.py,sha256=wYOtPlE-ThRVojUI3gwRrajtNDPdSExQ8u4PqX77n6U,7352
374
- snowflake/snowpark_connect/relation/read/map_read_text.py,sha256=qpEqi6FfAR_BbpjD7xUqpdyLhiAjj_ZERilhpFOU5_g,3947
370
+ snowflake/snowpark_connect/relation/read/map_read_text.py,sha256=BlErI72zzsP6sQcaWfs52fJmrJvGfWRV7GcbAZotZPM,4058
375
371
  snowflake/snowpark_connect/relation/read/metadata_utils.py,sha256=j9Dkg9w7CwcXINW9Q7Am0jO1jtlqg6myx-AIw6o0YnI,5682
376
- snowflake/snowpark_connect/relation/read/reader_config.py,sha256=PMh1R5IjqqTwiAAqvDRhnTx8Jxnhq8wVCmpZRqqKD3E,16437
377
- snowflake/snowpark_connect/relation/read/utils.py,sha256=rIIM6d2WXHh7MLGyHNiRc9tS8b0dmyFQr7rHepIYJOU,4111
372
+ snowflake/snowpark_connect/relation/read/reader_config.py,sha256=FYJwp6asJS2y-uT8wkqVv6hod4mnSTKiNhV8p6kEdwg,16934
373
+ snowflake/snowpark_connect/relation/read/utils.py,sha256=kwvaF7lZv_kGDxT3yAEPwngT6X36U1r3JkXzb70tPXg,5952
378
374
  snowflake/snowpark_connect/relation/write/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
379
375
  snowflake/snowpark_connect/relation/write/jdbc_write_dbapi.py,sha256=2N4Z9J8ZCxdMXAQcKqEkZ0M4O3ICTih2gcR9iG2WA1E,12796
380
- snowflake/snowpark_connect/relation/write/map_write.py,sha256=jN9A8b3kIyIXMPzXG715SIYwU3yK9jHwTx8-LkbnrY0,47202
376
+ snowflake/snowpark_connect/relation/write/map_write.py,sha256=tBN3MIRuY2GFI6DcOzMELAJoc-nhpJAZRcFXMyBNy-g,48654
381
377
  snowflake/snowpark_connect/relation/write/map_write_jdbc.py,sha256=95fkSAz9QKbk59XF0u_Reru5E6s5gVlNBAMQLcn24DQ,1755
382
378
  snowflake/snowpark_connect/resources/java_udfs-1.0-SNAPSHOT.jar,sha256=tVyOp6tXxu9nm6SDufwQiGzfH3pnuh_7PowsMZxOolY,9773
383
379
  snowflake/snowpark_connect/utils/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
384
380
  snowflake/snowpark_connect/utils/artifacts.py,sha256=TkHZ2uNfZiphgtG91V1_c_h9yP9dP677BXUMymboCss,2498
385
381
  snowflake/snowpark_connect/utils/cache.py,sha256=bAyoNBW6Z1ui9BuppDywbQeG6fdju4L-owFHzySOTnk,3382
386
382
  snowflake/snowpark_connect/utils/concurrent.py,sha256=BTbUmvupLzUSRd6L7kKk9yIXFdqlDOkXebVMaECRD-A,3653
387
- snowflake/snowpark_connect/utils/context.py,sha256=Sz15ktGNCQEr2rBEzxVADMdKk6mncRXVmIGrbgwXP3w,14649
383
+ snowflake/snowpark_connect/utils/context.py,sha256=6U_8zv-m3RBdFXkb9ql4bwF2L93kCuxz9HnZ1f6X8tI,14358
388
384
  snowflake/snowpark_connect/utils/describe_query_cache.py,sha256=HM18szi_s-4hWNChtwNyMQ_i1QMB6iOBx2JA6qDDt9I,9434
389
385
  snowflake/snowpark_connect/utils/env_utils.py,sha256=VLCaAYDqJRH4aD0UMWKrFW2EHEJuXwFwjxOX6TzaxFg,1738
386
+ snowflake/snowpark_connect/utils/expression_transformer.py,sha256=t4tQZwl6Kk0pUavqHFO74P1KTZiOGhFDgwvoPzmDr9M,3975
390
387
  snowflake/snowpark_connect/utils/external_udxf_cache.py,sha256=eSZHMbjTxnkg78IlbG5P1Vno6j5ag_FSI0c4Xi2UyPs,1044
391
388
  snowflake/snowpark_connect/utils/identifiers.py,sha256=w5J8nmLWraWVNNu1ztlzylvOlJC1neHSzXmSh8oy1fU,8271
392
389
  snowflake/snowpark_connect/utils/interrupt.py,sha256=_awhdrzF1KQO-EQThneEcfMg3Zxed4p3HtMpkcAb6ek,2790
@@ -395,14 +392,14 @@ snowflake/snowpark_connect/utils/pandas_udtf_utils.py,sha256=jXh3jcJPvY31dy9m_WA
395
392
  snowflake/snowpark_connect/utils/profiling.py,sha256=3mn3BSpGEGDmbg-3W9Yf6IGN2G5ohIKs9MLh3ZYrRSw,2100
396
393
  snowflake/snowpark_connect/utils/scala_udf_utils.py,sha256=pERYK0ah4Blq3MybIgt58kuprez_HwQ6Bu2IVQMmfao,23502
397
394
  snowflake/snowpark_connect/utils/sequence.py,sha256=HeoLs8vWFzlCgH-2QMr9IpZiNNYyk4oQCdNYyygWjx4,530
398
- snowflake/snowpark_connect/utils/session.py,sha256=zsCq2aNwq3hU6v0DVKnIPI6R9DHgJN4AoqhiXOZukcI,8759
395
+ snowflake/snowpark_connect/utils/session.py,sha256=CwIMNVcsf75nDA-XF0tUNdbLgNo9z_pu8TkecXqAT6E,8436
399
396
  snowflake/snowpark_connect/utils/snowpark_connect_logging.py,sha256=23bvbALGqixJ3Ap9QWM3OpcKNK-sog2mr9liSmvwqYU,1123
400
397
  snowflake/snowpark_connect/utils/telemetry.py,sha256=rjk-ej5yZg7hDva5KRRNOOY6xnjbEGNWMIslhO7QFtA,25549
401
398
  snowflake/snowpark_connect/utils/temporary_view_cache.py,sha256=1GdMrsciheyUMY8bI3y_B_DrVJb8n7nAr2mxWEIfmBo,2350
402
399
  snowflake/snowpark_connect/utils/udf_cache.py,sha256=04Srm-55QXVpcAY0N3heFs0cSQ-Emkl1vq7EaT-Zy-w,13500
403
- snowflake/snowpark_connect/utils/udf_helper.py,sha256=y39U7ZimK6q6zSYmulg8BOw1kUJnkx9FDFXsEumasj8,13402
400
+ snowflake/snowpark_connect/utils/udf_helper.py,sha256=rrdttOk40RfiEr3AJVeiSuxjz0QMSMv3dzG7mgjbNPs,13447
404
401
  snowflake/snowpark_connect/utils/udf_utils.py,sha256=X6nn12-CILuHRi0mUicicYTfhA7VoQQfPjgfKDt7pF0,12191
405
- snowflake/snowpark_connect/utils/udtf_helper.py,sha256=dvesL5p3Mh4oBWkkS4kh19dNTTcFJ1MXFnhfbz9p85A,15227
402
+ snowflake/snowpark_connect/utils/udtf_helper.py,sha256=n8dK0QGz81iJXmRB9b2BskyMUKVvNSnUkHGFSHNQpVo,15362
406
403
  snowflake/snowpark_connect/utils/udtf_utils.py,sha256=SNzvrvQxKcSpVx3OmB5UMIcpzb5xkmTMNIfeARQpdzs,34179
407
404
  snowflake/snowpark_connect/utils/udxf_import_utils.py,sha256=pPtcaGsyh0tUdy0aAvNqTj04jqPKlEcGmvaZDP9O8Gc,536
408
405
  snowflake/snowpark_connect/utils/xxhash64.py,sha256=ysJRxhBPf25LeNhM1RK_H36MWl6q6C6vBRHa-jIna_A,7477
@@ -411,17 +408,17 @@ snowflake/snowpark_decoder/dp_session.py,sha256=JFrIL-EiTu72i4TmOUSDSLvFCW8cKCgn
411
408
  snowflake/snowpark_decoder/spark_decoder.py,sha256=EQiCvBiqB736Bc17o3gnYGtcYVcyfGxroO5e1kbe1Co,2885
412
409
  snowflake/snowpark_decoder/_internal/proto/generated/DataframeProcessorMsg_pb2.py,sha256=2eSDqeyfMvmIJ6_rF663DrEe1dg_anrP4OpVJNTJHaQ,2598
413
410
  snowflake/snowpark_decoder/_internal/proto/generated/DataframeProcessorMsg_pb2.pyi,sha256=aIH23k52bXdw5vO3RtM5UcOjDPaWsJFx1SRUSk3qOK8,6142
414
- snowpark_connect-0.33.0.data/scripts/snowpark-connect,sha256=yZ94KqbWACxnwV8mpg8NjILvvRNjnF8B3cs3ZFNuIM4,1546
415
- snowpark_connect-0.33.0.data/scripts/snowpark-session,sha256=NMAHSonTo-nmOZSkQNlszUC0jLJ8QWEDUsUmMe2UAOw,190
416
- snowpark_connect-0.33.0.data/scripts/snowpark-submit,sha256=Zd98H9W_d0dIqMSkQLdHyW5G3myxF0t4c3vNBt2nD6A,12056
417
- snowpark_connect-0.33.0.dist-info/licenses/LICENSE-binary,sha256=fmBlX39HwTlBUyiKEznaLZGuxQy-7ndLLG_rTXjF02Y,22916
418
- snowpark_connect-0.33.0.dist-info/licenses/LICENSE.txt,sha256=Ff9cPv4xu0z7bnMTHzo4vDncOShsy33w4oJMA2xjn6c,11365
419
- snowpark_connect-0.33.0.dist-info/licenses/NOTICE-binary,sha256=elMF8brgGNJwOz8YdorzBF6-U8ZhR8F-77FfGkZng7U,57843
411
+ snowpark_connect-1.0.0.data/scripts/snowpark-connect,sha256=yZ94KqbWACxnwV8mpg8NjILvvRNjnF8B3cs3ZFNuIM4,1546
412
+ snowpark_connect-1.0.0.data/scripts/snowpark-session,sha256=NMAHSonTo-nmOZSkQNlszUC0jLJ8QWEDUsUmMe2UAOw,190
413
+ snowpark_connect-1.0.0.data/scripts/snowpark-submit,sha256=Zd98H9W_d0dIqMSkQLdHyW5G3myxF0t4c3vNBt2nD6A,12056
414
+ snowpark_connect-1.0.0.dist-info/licenses/LICENSE-binary,sha256=fmBlX39HwTlBUyiKEznaLZGuxQy-7ndLLG_rTXjF02Y,22916
415
+ snowpark_connect-1.0.0.dist-info/licenses/LICENSE.txt,sha256=Ff9cPv4xu0z7bnMTHzo4vDncOShsy33w4oJMA2xjn6c,11365
416
+ snowpark_connect-1.0.0.dist-info/licenses/NOTICE-binary,sha256=elMF8brgGNJwOz8YdorzBF6-U8ZhR8F-77FfGkZng7U,57843
420
417
  spark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
421
418
  spark/connect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
422
419
  spark/connect/envelope_pb2.py,sha256=7Gc6OUA3vaCuTCIKamb_Iiw7W9jPTcWNEv1im20eWHM,2726
423
420
  spark/connect/envelope_pb2.pyi,sha256=VXTJSPpcxzB_dWqVdvPY4KkPhJfh0WmkX7SNHWoLhx0,3358
424
- snowpark_connect-0.33.0.dist-info/METADATA,sha256=9d1tfFC2uW2p0_1BgleIiqkTECRF1Y9-UXa5QkrVkLM,1871
425
- snowpark_connect-0.33.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
426
- snowpark_connect-0.33.0.dist-info/top_level.txt,sha256=ExnWqVpoTHRG99fu_AxXZVOz8c-De7nNu0yFCGylM8I,16
427
- snowpark_connect-0.33.0.dist-info/RECORD,,
421
+ snowpark_connect-1.0.0.dist-info/METADATA,sha256=CrUOFci5ZBJdJBRD1uRMQ3eTIP0HSK-yfScsT1-ZxKI,1870
422
+ snowpark_connect-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
423
+ snowpark_connect-1.0.0.dist-info/top_level.txt,sha256=ExnWqVpoTHRG99fu_AxXZVOz8c-De7nNu0yFCGylM8I,16
424
+ snowpark_connect-1.0.0.dist-info/RECORD,,