snowpark-connect 1.6.0__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. snowflake/snowpark_connect/client/server.py +37 -0
  2. snowflake/snowpark_connect/config.py +72 -3
  3. snowflake/snowpark_connect/expression/error_utils.py +28 -0
  4. snowflake/snowpark_connect/expression/integral_types_support.py +219 -0
  5. snowflake/snowpark_connect/expression/map_cast.py +108 -17
  6. snowflake/snowpark_connect/expression/map_udf.py +1 -0
  7. snowflake/snowpark_connect/expression/map_unresolved_function.py +229 -96
  8. snowflake/snowpark_connect/includes/jars/json4s-ast_2.13-3.7.0-M11.jar +0 -0
  9. snowflake/snowpark_connect/includes/jars/sas-scala-udf_2.12-0.2.0.jar +0 -0
  10. snowflake/snowpark_connect/includes/jars/sas-scala-udf_2.13-0.2.0.jar +0 -0
  11. snowflake/snowpark_connect/includes/jars/scala-reflect-2.13.16.jar +0 -0
  12. snowflake/snowpark_connect/includes/jars/spark-common-utils_2.13-3.5.6.jar +0 -0
  13. snowflake/snowpark_connect/includes/jars/spark-connect-client-jvm_2.13-3.5.6.jar +0 -0
  14. snowflake/snowpark_connect/includes/jars/spark-sql_2.13-3.5.6.jar +0 -0
  15. snowflake/snowpark_connect/relation/map_aggregate.py +43 -1
  16. snowflake/snowpark_connect/relation/read/map_read_csv.py +73 -4
  17. snowflake/snowpark_connect/relation/read/map_read_jdbc.py +4 -1
  18. snowflake/snowpark_connect/relation/read/map_read_json.py +4 -1
  19. snowflake/snowpark_connect/relation/read/map_read_parquet.py +4 -1
  20. snowflake/snowpark_connect/relation/read/map_read_socket.py +4 -0
  21. snowflake/snowpark_connect/relation/read/map_read_table.py +4 -1
  22. snowflake/snowpark_connect/relation/read/map_read_text.py +4 -1
  23. snowflake/snowpark_connect/relation/read/reader_config.py +6 -0
  24. snowflake/snowpark_connect/resources_initializer.py +90 -29
  25. snowflake/snowpark_connect/server.py +6 -41
  26. snowflake/snowpark_connect/server_common/__init__.py +4 -1
  27. snowflake/snowpark_connect/type_support.py +130 -0
  28. snowflake/snowpark_connect/utils/context.py +8 -0
  29. snowflake/snowpark_connect/utils/java_stored_procedure.py +53 -27
  30. snowflake/snowpark_connect/utils/java_udaf_utils.py +46 -28
  31. snowflake/snowpark_connect/utils/java_udtf_utils.py +1 -1
  32. snowflake/snowpark_connect/utils/jvm_udf_utils.py +48 -15
  33. snowflake/snowpark_connect/utils/scala_udf_utils.py +98 -22
  34. snowflake/snowpark_connect/utils/telemetry.py +33 -22
  35. snowflake/snowpark_connect/utils/udxf_import_utils.py +9 -2
  36. snowflake/snowpark_connect/version.py +1 -1
  37. {snowpark_connect-1.6.0.data → snowpark_connect-1.7.0.data}/scripts/snowpark-submit +12 -2
  38. {snowpark_connect-1.6.0.dist-info → snowpark_connect-1.7.0.dist-info}/METADATA +4 -2
  39. {snowpark_connect-1.6.0.dist-info → snowpark_connect-1.7.0.dist-info}/RECORD +46 -37
  40. {snowpark_connect-1.6.0.data → snowpark_connect-1.7.0.data}/scripts/snowpark-connect +0 -0
  41. {snowpark_connect-1.6.0.data → snowpark_connect-1.7.0.data}/scripts/snowpark-session +0 -0
  42. {snowpark_connect-1.6.0.dist-info → snowpark_connect-1.7.0.dist-info}/WHEEL +0 -0
  43. {snowpark_connect-1.6.0.dist-info → snowpark_connect-1.7.0.dist-info}/licenses/LICENSE-binary +0 -0
  44. {snowpark_connect-1.6.0.dist-info → snowpark_connect-1.7.0.dist-info}/licenses/LICENSE.txt +0 -0
  45. {snowpark_connect-1.6.0.dist-info → snowpark_connect-1.7.0.dist-info}/licenses/NOTICE-binary +0 -0
  46. {snowpark_connect-1.6.0.dist-info → snowpark_connect-1.7.0.dist-info}/top_level.txt +0 -0
@@ -20,6 +20,7 @@ from typing import List, Union
20
20
 
21
21
  import snowflake.snowpark.types as snowpark_type
22
22
  import snowflake.snowpark_connect.includes.python.pyspark.sql.connect.proto.types_pb2 as types_proto
23
+ from snowflake.snowpark_connect.config import get_scala_version
23
24
  from snowflake.snowpark_connect.error.error_codes import ErrorCodes
24
25
  from snowflake.snowpark_connect.error.error_utils import attach_custom_error_code
25
26
  from snowflake.snowpark_connect.type_mapping import map_type_to_snowflake_type
@@ -30,6 +31,7 @@ from snowflake.snowpark_connect.utils.jvm_udf_utils import (
30
31
  Signature,
31
32
  build_jvm_udxf_imports,
32
33
  )
34
+ from snowflake.snowpark_connect.utils.session import get_or_create_snowpark_session
33
35
  from snowflake.snowpark_connect.utils.snowpark_connect_logging import logger
34
36
  from snowflake.snowpark_connect.utils.udf_utils import (
35
37
  ProcessCommonInlineUserDefinedFunction,
@@ -104,7 +106,9 @@ class ScalaUDFDef:
104
106
  # Convert Array to Seq for Scala compatibility in function signatures.
105
107
  # Replace each "Variant" type with "Any" in the function signature since fromVariant returns Any
106
108
  udf_func_input_types = ", ".join(
107
- "Any" if p.data_type == "Variant" else p.data_type.replace("Array", "Seq")
109
+ "Any"
110
+ if p.data_type == "Variant"
111
+ else p.data_type # .replace("Array", "Seq")
108
112
  for p in self.scala_signature.params
109
113
  )
110
114
  udf_func_return_type = self.scala_signature.returns.data_type.replace(
@@ -113,30 +117,41 @@ class ScalaUDFDef:
113
117
 
114
118
  # Create the Scala arguments and input types string: "arg0: Type0, arg1: Type1, ...".
115
119
  joined_wrapper_arg_and_input_types_str = ", ".join(
116
- f"{p.name}: {p.data_type}" for p in self.scala_signature.params
120
+ f"{scala_type.name}: { scala_type.data_type if snowflake_type.data_type != 'VARIANT' else 'Variant'}"
121
+ for (scala_type, snowflake_type) in zip(
122
+ self.scala_signature.params, self.signature.params
123
+ )
117
124
  )
118
125
 
119
126
  # All Scala UDFs return Variant to ensure consistency and avoid type conversion issues.
120
127
  wrapper_return_type = "Variant"
121
128
  wrapped_args = [
122
- f"udfPacket.fromVariant({arg}, {i})" if p.data_type == "Variant" else arg
123
- for i, (arg, p) in enumerate(
124
- zip(self.scala_invocation_args, self.scala_signature.params)
129
+ f"UdfPacketUtils.fromVariant{f'[{scala_param.data_type}]' if scala_param.data_type != 'Variant' else '' }({arg if scala_param.data_type != 'Variant' else f'udfPacket, {arg}, {i}'})"
130
+ if param.data_type == "VARIANT"
131
+ else arg
132
+ for i, ((arg, param), scala_param) in enumerate(
133
+ zip(
134
+ zip(self.scala_invocation_args, self.signature.params),
135
+ self.scala_signature.params,
136
+ )
125
137
  )
126
138
  ]
127
139
  invocation_args = ", ".join(wrapped_args)
128
140
  invoke_udf_func = f"func({invocation_args})"
129
141
 
130
142
  # Always wrap the result in Utils.toVariant() to ensure all Scala UDFs return Variant
131
- invoke_udf_func = f"Utils.toVariant({invoke_udf_func})"
143
+ invoke_udf_func = f"Utils.toVariant({invoke_udf_func}, udfPacket)"
132
144
 
133
145
  return f"""
134
146
  import org.apache.spark.sql.connect.common.UdfPacket
135
147
  import com.snowflake.sas.scala.UdfPacketUtils._
148
+ import com.snowflake.sas.scala.UdfPacketUtils
136
149
  import com.snowflake.sas.scala.Utils
137
150
  import com.snowflake.snowpark_java.types.Variant
138
151
 
139
152
  object __RecreatedSparkUdf {{
153
+ import com.snowflake.sas.scala.FromVariantConverter._
154
+
140
155
  private lazy val udfPacket: UdfPacket = Utils.deserializeUdfPacket("{self.name}.bin")
141
156
  private lazy val func: ({udf_func_input_types}) => {udf_func_return_type} = udfPacket.function.asInstanceOf[({udf_func_input_types}) => {udf_func_return_type}]
142
157
 
@@ -169,13 +184,15 @@ object __RecreatedSparkUdf {{
169
184
  # Handler and imports
170
185
  imports_sql = f"IMPORTS = ({', '.join(quote_single(x) for x in self.imports)})"
171
186
 
187
+ scala_version = get_scala_version()
188
+
172
189
  return f"""
173
190
  CREATE OR REPLACE TEMPORARY FUNCTION {self.name}({args})
174
191
  RETURNS {ret_type}
175
192
  LANGUAGE SCALA
176
193
  {self.null_handling.value}
177
- RUNTIME_VERSION = 2.12
178
- PACKAGES = ('com.snowflake:snowpark:latest')
194
+ RUNTIME_VERSION = {scala_version}
195
+ PACKAGES = ('com.snowflake:snowpark_{scala_version}:latest')
179
196
  {imports_sql}
180
197
  HANDLER = '__RecreatedSparkUdf.__wrapperFunc'
181
198
  AS
@@ -229,10 +246,34 @@ def create_scala_udf(pciudf: ProcessCommonInlineUserDefinedFunction) -> ScalaUdf
229
246
  pciudf._scala_input_types if pciudf._scala_input_types else pciudf._input_types
230
247
  )
231
248
 
249
+ scala_return_type = _map_type_to_scala_type(
250
+ pciudf._original_return_type, is_input=False
251
+ )
232
252
  scala_input_params: List[Param] = []
233
253
  sql_input_params: List[Param] = []
234
254
  scala_invocation_args: List[str] = [] # arguments passed into the udf function
235
- if input_types: # input_types can be None when no arguments are provided
255
+
256
+ session = get_or_create_snowpark_session()
257
+ imports = build_jvm_udxf_imports(
258
+ session,
259
+ pciudf._payload,
260
+ udf_name,
261
+ )
262
+
263
+ # If input_types is empty (length 0), it doesn't necessarily mean there are no arguments.
264
+ # We need to inspect the UdfPacket to determine the actual number of arguments.
265
+ if (
266
+ input_types is None or len(input_types) == 0
267
+ ) and pciudf._called_from == "register_udf":
268
+ args_scala = _get_input_arg_types_if_udfpacket_input_types_empty(
269
+ session, imports, udf_name
270
+ )
271
+ for i, arg in enumerate(args_scala):
272
+ param_name = "arg" + str(i)
273
+ scala_input_params.append(Param(param_name, arg))
274
+ sql_input_params.append(Param(param_name, "VARIANT"))
275
+ scala_invocation_args.append(param_name)
276
+ elif input_types:
236
277
  for i, input_type in enumerate(input_types):
237
278
  param_name = "arg" + str(i)
238
279
  # Create the Scala arguments and input types string: "arg0: Type0, arg1: Type1, ...".
@@ -257,19 +298,6 @@ def create_scala_udf(pciudf: ProcessCommonInlineUserDefinedFunction) -> ScalaUdf
257
298
  # In the case of Map input types, we need to cast the argument to the correct type in Scala.
258
299
  scala_invocation_args.append(param_name)
259
300
 
260
- scala_return_type = _map_type_to_scala_type(
261
- pciudf._original_return_type, is_input=False
262
- )
263
- # All Scala UDFs now return VARIANT to ensure consistency and avoid type conversion issues.
264
- # The actual type conversion is handled after the UDF is called.
265
- from snowflake.snowpark_connect.utils.session import get_or_create_snowpark_session
266
-
267
- session = get_or_create_snowpark_session()
268
- imports = build_jvm_udxf_imports(
269
- session,
270
- pciudf._payload,
271
- udf_name,
272
- )
273
301
  sql_return_type = "VARIANT"
274
302
 
275
303
  udf_def = ScalaUDFDef(
@@ -289,6 +317,54 @@ def create_scala_udf(pciudf: ProcessCommonInlineUserDefinedFunction) -> ScalaUdf
289
317
  return ScalaUdf(udf_name, pciudf._input_types, pciudf._return_type)
290
318
 
291
319
 
320
+ def _ensure_input_types_udf_created(session, imports: List[str], udf_name: str) -> str:
321
+ """
322
+ Create a UDF for getting input types with a unique name based on the UDF name.
323
+
324
+ This UDF uses reflection to inspect a serialized UdfPacket
325
+ and determine the actual input parameter types.
326
+
327
+ Returns:
328
+ The name of the created UDF.
329
+ """
330
+
331
+ def quote_single(s: str) -> str:
332
+ return "'" + s + "'"
333
+
334
+ scala_version = get_scala_version()
335
+ udf_helper_name = f"__SC_INPUT_ARGS_UDF_{udf_name}"
336
+ imports_sql = f"IMPORTS = ({', '.join(quote_single(x) for x in imports)})"
337
+ create_udf_sql = f"""
338
+ CREATE OR REPLACE TEMPORARY FUNCTION {udf_helper_name}(udf_bin_file VARCHAR)
339
+ RETURNS STRING
340
+ LANGUAGE SCALA
341
+ PACKAGES = ('com.snowflake:snowpark_{scala_version}:latest')
342
+ RUNTIME_VERSION = {scala_version}
343
+ {imports_sql}
344
+ HANDLER = 'com.snowflake.sas.scala.handlers.InputTypesUdf.getInputArgTypesWithReflection';"""
345
+ logger.info(f"Creating UDF for input type inspection: {create_udf_sql}")
346
+ session.sql(create_udf_sql).collect()
347
+ return udf_helper_name
348
+
349
+
350
+ def _get_input_arg_types_if_udfpacket_input_types_empty(
351
+ session, imports: List[str], udf_name: str
352
+ ) -> list[str]:
353
+ """
354
+ Get the number of input arguments from a UdfPacket by calling a Scala UDF.
355
+
356
+ This is used when the input_types list is empty (length 0), which doesn't necessarily
357
+ mean there are no arguments. The UDF uses reflection to inspect the
358
+ serialized function and determine the actual parameters.
359
+ """
360
+ udf_helper_name = _ensure_input_types_udf_created(session, imports, udf_name)
361
+ result = session.sql(f"SELECT {udf_helper_name}('{udf_name}.bin')").collect()
362
+ args = str(result[0][0])
363
+ num_args = len(args.split(", "))
364
+ logger.info(f"UDF has {num_args} input arguments")
365
+ return [arg for arg in args.split(", ") if arg]
366
+
367
+
292
368
  def _map_type_to_scala_type(
293
369
  t: Union[snowpark_type.DataType, types_proto.DataType], is_input: bool = False
294
370
  ) -> str:
@@ -62,19 +62,12 @@ class EventType(Enum):
62
62
 
63
63
 
64
64
  # global labels
65
- SOURCE = "SparkConnectForSnowpark"
65
+ DEFAULT_SOURCE = "SparkConnectForSnowpark"
66
66
  SCOS_VERSION = ".".join([str(d) for d in sas_version if d is not None])
67
67
  SNOWPARK_VERSION = ".".join([str(d) for d in snowpark_version if d is not None])
68
68
  PYTHON_VERSION = get_python_version()
69
69
  OS = get_os_name()
70
70
 
71
- STATIC_TELEMETRY_DATA = {
72
- TelemetryField.KEY_SOURCE.value: SOURCE,
73
- TelemetryField.KEY_VERSION.value: SCOS_VERSION,
74
- TelemetryField.KEY_SNOWPARK_VERSION.value: SNOWPARK_VERSION,
75
- TelemetryField.KEY_PYTHON_VERSION.value: PYTHON_VERSION,
76
- TelemetryField.KEY_OS.value: OS,
77
- }
78
71
 
79
72
  # list of config keys for which we record values, other config values are not recorded
80
73
  RECORDED_CONFIG_KEYS = {
@@ -117,13 +110,6 @@ class TelemetryMessage:
117
110
  is_warning: bool
118
111
 
119
112
 
120
- def _basic_telemetry_data() -> dict:
121
- return {
122
- **STATIC_TELEMETRY_DATA,
123
- TelemetryField.KEY_EVENT_ID.value: str(uuid.uuid4()),
124
- }
125
-
126
-
127
113
  def safe(func):
128
114
  """
129
115
  Decorator to safely execute telemetry functions, catching and logging exceptions
@@ -253,6 +239,7 @@ class Telemetry:
253
239
  self._is_enabled = is_enabled
254
240
  self._is_initialized = False
255
241
  self._lock = threading.Lock()
242
+ self._source = DEFAULT_SOURCE
256
243
 
257
244
  # Async processing setup
258
245
  self._message_queue = queue.Queue(maxsize=10000)
@@ -261,11 +248,32 @@ class Telemetry:
261
248
  def __del__(self):
262
249
  self.shutdown()
263
250
 
264
- def initialize(self, session: Session):
251
+ def _get_static_telemetry_data(self) -> dict:
252
+ """Get static telemetry data with current configuration."""
253
+ return {
254
+ TelemetryField.KEY_SOURCE.value: self._source,
255
+ TelemetryField.KEY_VERSION.value: SCOS_VERSION,
256
+ TelemetryField.KEY_SNOWPARK_VERSION.value: SNOWPARK_VERSION,
257
+ TelemetryField.KEY_PYTHON_VERSION.value: PYTHON_VERSION,
258
+ TelemetryField.KEY_OS.value: OS,
259
+ }
260
+
261
+ def _basic_telemetry_data(self) -> dict:
262
+ return {
263
+ **self._get_static_telemetry_data(),
264
+ TelemetryField.KEY_EVENT_ID.value: str(uuid.uuid4()),
265
+ }
266
+
267
+ def initialize(self, session: Session, source: str = None):
265
268
  """
266
269
  Must be called after the session is created to initialize telemetry.
267
270
  Gets the telemetry client from the session's connection and uses it
268
271
  to report telemetry data.
272
+
273
+ Args:
274
+ session: Snowpark Session to use for telemetry
275
+ source: Optional source identifier for telemetry (e.g., "SparkConnectThinClient").
276
+ Defaults to "SparkConnectForSnowpark".
269
277
  """
270
278
  if not self._is_enabled:
271
279
  return
@@ -276,12 +284,15 @@ class Telemetry:
276
284
  return
277
285
  self._is_initialized = True
278
286
 
279
- telemetry = getattr(session._conn._conn, "_telemetry", None)
280
- if telemetry is None:
287
+ if source is not None:
288
+ self._source = source
289
+
290
+ telemetry_client = getattr(session._conn._conn, "_telemetry", None)
291
+ if telemetry_client is None:
281
292
  # no telemetry client available, so we export with queries
282
293
  self._sink = QueryTelemetrySink(session)
283
294
  else:
284
- self._sink = ClientTelemetrySink(telemetry)
295
+ self._sink = ClientTelemetrySink(telemetry_client)
285
296
 
286
297
  self._start_worker_thread()
287
298
  logger.info(f"Telemetry initialized with {type(self._sink)}")
@@ -534,7 +545,7 @@ class Telemetry:
534
545
  @safe
535
546
  def send_server_started_telemetry(self):
536
547
  message = {
537
- **_basic_telemetry_data(),
548
+ **self._basic_telemetry_data(),
538
549
  TelemetryField.KEY_TYPE.value: TelemetryType.TYPE_EVENT.value,
539
550
  TelemetryType.EVENT_TYPE.value: EventType.SERVER_STARTED.value,
540
551
  TelemetryField.KEY_DATA.value: {
@@ -553,7 +564,7 @@ class Telemetry:
553
564
 
554
565
  summary = self._request_summary.get()
555
566
  message = {
556
- **_basic_telemetry_data(),
567
+ **self._basic_telemetry_data(),
557
568
  TelemetryField.KEY_TYPE.value: TelemetryType.TYPE_REQUEST_SUMMARY.value,
558
569
  TelemetryField.KEY_DATA.value: summary,
559
570
  }
@@ -589,7 +600,7 @@ class Telemetry:
589
600
  data["spark_operation_id"] = spark_operation_id
590
601
 
591
602
  message = {
592
- **_basic_telemetry_data(),
603
+ **self._basic_telemetry_data(),
593
604
  TelemetryField.KEY_TYPE.value: TelemetryType.TYPE_EVENT.value,
594
605
  TelemetryType.EVENT_TYPE.value: EventType.WARNING.value,
595
606
  TelemetryField.KEY_DATA.value: data,
@@ -7,8 +7,15 @@ from snowflake.snowpark_connect.config import global_config
7
7
 
8
8
 
9
9
  def get_python_udxf_import_files(session: snowpark.Session) -> str:
10
- config_imports = global_config.get("snowpark.connect.udf.imports", "")
11
- config_imports = config_imports.strip("[] ").split(",") if config_imports else []
10
+ config_imports = global_config.get(
11
+ "snowpark.connect.udf.python.imports",
12
+ global_config.get("snowpark.connect.udf.imports", ""),
13
+ )
14
+ config_imports = (
15
+ [x.strip() for x in config_imports.strip("[] ").split(",") if x.strip()]
16
+ if config_imports
17
+ else []
18
+ )
12
19
  imports = {*session._python_files, *session._import_files, *config_imports}
13
20
 
14
21
  return ",".join([file for file in imports if file])
@@ -2,4 +2,4 @@
2
2
  #
3
3
  # Copyright (c) 2012-2025 Snowflake Computing Inc. All rights reserved.
4
4
  #
5
- VERSION = (1,6,0)
5
+ VERSION = (1,7,0)
@@ -137,8 +137,7 @@ def init_args(args: list[str] | None = None) -> argparse.Namespace:
137
137
  "--files",
138
138
  metavar="FILES",
139
139
  type=str,
140
- nargs="*",
141
- help="[DEPRECATED] Comma-separated list of files to be placed in the working directory of each executor.",
140
+ help="Comma-separated list of files to be placed in the working directory of each executor.",
142
141
  )
143
142
  options_group.add_argument(
144
143
  "--archives",
@@ -222,6 +221,14 @@ def init_args(args: list[str] | None = None) -> argparse.Namespace:
222
221
  action="store_true",
223
222
  help="If given, skip initialize SAS. This is used in server side testing.",
224
223
  )
224
+ spark_connect_group.add_argument(
225
+ "--scala-version",
226
+ metavar="SCALA_VERSION",
227
+ type=str,
228
+ default="2.12",
229
+ choices=["2.12", "2.13"],
230
+ help="Scala binary version for Scala/Java workloads (default: 2.12). Accepts 2.12 or 2.13.",
231
+ )
225
232
  cluster_deploy_group.add_argument(
226
233
  "--driver-cores",
227
234
  metavar="NUM",
@@ -307,6 +314,7 @@ def generate_spark_submit_cmd(
307
314
  "skip_init_sas",
308
315
  "deploy_mode",
309
316
  "app_arguments",
317
+ "scala_version",
310
318
  ]:
311
319
  args_for_spark.append(f"--{k.replace('_', '-')}")
312
320
  args_for_spark.append(v)
@@ -315,6 +323,8 @@ def generate_spark_submit_cmd(
315
323
  setup_logging(logging.DEBUG)
316
324
  else:
317
325
  setup_logging(logging.INFO)
326
+ if getattr(args, "scala_version", "2.12") == "2.13":
327
+ args_for_spark.extend(["--conf", "snowpark.connect.scala.version=2.13"])
318
328
  args_for_spark.append(args.filename)
319
329
  args_for_spark.extend(args.app_arguments)
320
330
  return args_for_spark
@@ -1,9 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: snowpark-connect
3
- Version: 1.6.0
3
+ Version: 1.7.0
4
4
  Summary: Snowpark Connect for Spark
5
5
  Author: Snowflake, Inc
6
6
  License: Apache License, Version 2.0
7
+ Keywords: snowflake,snowpark,connect,spark
7
8
  Requires-Python: >=3.10,<3.13
8
9
  Description-Content-Type: text/markdown
9
10
  License-File: LICENSE.txt
@@ -18,7 +19,7 @@ Requires-Dist: jpype1
18
19
  Requires-Dist: protobuf<6.32.0,>=4.25.3
19
20
  Requires-Dist: s3fs>=2025.3.0
20
21
  Requires-Dist: snowflake.core<2,>=1.0.5
21
- Requires-Dist: snowflake-snowpark-python[pandas]<1.43.0,>=1.42.0
22
+ Requires-Dist: snowflake-snowpark-python[pandas]<1.45.0,>=1.44.0
22
23
  Requires-Dist: snowflake-connector-python<4.2.0,>=3.18.0
23
24
  Requires-Dist: sqlglot>=26.3.8
24
25
  Requires-Dist: jaydebeapi
@@ -36,6 +37,7 @@ Requires-Dist: jdk4py==17.0.9.2; extra == "jdk"
36
37
  Dynamic: author
37
38
  Dynamic: description
38
39
  Dynamic: description-content-type
40
+ Dynamic: keywords
39
41
  Dynamic: license
40
42
  Dynamic: license-file
41
43
  Dynamic: provides-extra
@@ -1,28 +1,29 @@
1
1
  snowflake/snowpark_connect/__init__.py,sha256=dbyVECHix6Z2sa32KJp-IwbsZI0rMlr_yCrIIYQlMrs,679
2
2
  snowflake/snowpark_connect/column_name_handler.py,sha256=VfvTyisZ1aelMDj4KPJP7w_eeztyQr9zwpvFHwFnpgA,39560
3
3
  snowflake/snowpark_connect/column_qualifier.py,sha256=cRJklMDA1Nz34iwarhIyO_KWx_3leKTDYi0FH_tW0p8,1364
4
- snowflake/snowpark_connect/config.py,sha256=m_6QPDE_Evu1_2rfhZqWLEwPBFqkHD3iUG3DQVpzB6s,33722
4
+ snowflake/snowpark_connect/config.py,sha256=rN8DNmT4l7uboW7hnyuw9V-1AFsp5UkSEEwt3YHDEcM,36810
5
5
  snowflake/snowpark_connect/constants.py,sha256=lp7Dc9auDzZoJEgee3iBGHcVY_q8IwbmfJAhqtsYarw,623
6
6
  snowflake/snowpark_connect/control_server.py,sha256=mz3huYo84hgqUB6maZxu3LYyGq7vVL1nv7-7-MjuSYY,1956
7
7
  snowflake/snowpark_connect/dataframe_container.py,sha256=-vpFhF2P6B9dqWdj0rghrMxa85NZuqnANhwH4UGU2tI,13135
8
8
  snowflake/snowpark_connect/dataframe_name_handler.py,sha256=aR-CpdGsN2d6tNW0H_F9P-FLe5hDU68zJwKjsrgeA2g,1682
9
9
  snowflake/snowpark_connect/date_time_format_mapping.py,sha256=tDww-6zHTDFThjmvHAt15RzbtmVo6_HM5Vo-QamAZlY,19541
10
10
  snowflake/snowpark_connect/empty_dataframe.py,sha256=aKO6JkYnArWCpLGcn9BzvTspw2k_c6eAM0mQImAY0J0,428
11
- snowflake/snowpark_connect/resources_initializer.py,sha256=bMj_85q__fTdbbZbISKbyXCT3lvOCML-nUC74-VaeFY,7244
12
- snowflake/snowpark_connect/server.py,sha256=VeqmI1f_iyKY5oCmhS-AylDu69SxC0FGvPPsjJqUQNo,55143
11
+ snowflake/snowpark_connect/resources_initializer.py,sha256=MpTxWcXhbnRPPBzcmiKHWHLd8AH6PUI0opgDqaU2Znk,9525
12
+ snowflake/snowpark_connect/server.py,sha256=KWry_U0pHzvEXBphGPyru19RIhFQ8UYsKkTTggA1c4w,53712
13
13
  snowflake/snowpark_connect/snowflake_session.py,sha256=K0PK8aIppghBnY0uKlDOi4WZBUhKxJQ_vRuDpB3YsbQ,2045
14
14
  snowflake/snowpark_connect/start_server.py,sha256=GH6f5vj8UJq6A6q-6eBNUdUguqaDVsaui2NGPE2e8VM,2782
15
15
  snowflake/snowpark_connect/tcm.py,sha256=ftncZFbVO-uyWMhF1_HYKQykB7KobHEYoyQsYbQj1EM,203
16
16
  snowflake/snowpark_connect/type_mapping.py,sha256=i27yPysZHAsJMgsncdaNRtrm-J4d1sTAF0UkNnxvBDo,55444
17
+ snowflake/snowpark_connect/type_support.py,sha256=v37xM2_heH-S3TiDSURyt7B92c6CPu5-62fYZb2XoL4,3633
17
18
  snowflake/snowpark_connect/typed_column.py,sha256=mD7YoBxZX2Jnd0YcO7DrMLMi_MLK9K2MmfuMyMJ0SeI,4001
18
- snowflake/snowpark_connect/version.py,sha256=SWqnurCK0nElB5RvbGiXTRCOkG7wtOIqZd3D9iaMk8c,117
19
+ snowflake/snowpark_connect/version.py,sha256=0bdKHy18jLdFKAmDHycXYij3BHyxQ8usas3hVdhK0nM,117
19
20
  snowflake/snowpark_connect/analyze_plan/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
20
21
  snowflake/snowpark_connect/analyze_plan/map_tree_string.py,sha256=-nmDZ42GnJDR209zAXT0Mv4oZMR1tOiudCaqK6vvU7A,1663
21
22
  snowflake/snowpark_connect/client/__init__.py,sha256=3i_7KzXSnTsuu-S8-JvtajqZrS3GGbjB4VG_GJBiH_E,320
22
23
  snowflake/snowpark_connect/client/error_utils.py,sha256=DOI376qp9wHHCLpKhSKBIGcKbxJTSEkDzVp49JxWdbo,1094
23
24
  snowflake/snowpark_connect/client/exceptions.py,sha256=JafORTSf5F3shqwpJMv8sCHA6FWvUwd08oA2V7-Oc8A,1030
24
25
  snowflake/snowpark_connect/client/query_results.py,sha256=8O4ltu-8KqnY35AYLqxNub6ySHm8xhzFIH6ZZ_LgphQ,3008
25
- snowflake/snowpark_connect/client/server.py,sha256=Aur-WCdHZ-e-CU19u0fMolLL5phO3jaEfJSlTNx10tc,27596
26
+ snowflake/snowpark_connect/client/server.py,sha256=I41MpalNXS9AAZfMZPRFMfrOdvjvePtS_qEiydd66R0,29262
26
27
  snowflake/snowpark_connect/client/utils/__init__.py,sha256=gj-HIlOmhn8gtiVMDETXwxjhY1lGygNYnjlbjojSkx0,232
27
28
  snowflake/snowpark_connect/client/utils/session.py,sha256=IPLF2p-i8yOsTw4yJURPI3c808jUxRaEqQwWDqjaepI,2775
28
29
  snowflake/snowpark_connect/error/__init__.py,sha256=oQo6k4zztLmNF1c5IvJLcS99J6RWY9KBTN3RJ2pKimg,249
@@ -35,23 +36,31 @@ snowflake/snowpark_connect/execute_plan/map_execution_command.py,sha256=mw5O2Imo
35
36
  snowflake/snowpark_connect/execute_plan/map_execution_root.py,sha256=wyDE4A8Np53idkhDsux7PEnpcmLaajWQvipANJTDv7A,7921
36
37
  snowflake/snowpark_connect/execute_plan/utils.py,sha256=tyLwu3jpQL7uaTVyLLNUkeYePKo3UWF3uxQ0VLl7_SQ,8104
37
38
  snowflake/snowpark_connect/expression/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
39
+ snowflake/snowpark_connect/expression/error_utils.py,sha256=qG98Ma-uiwkSQdG34u7M_kJniXlMu-2MDmniR7uHzY0,845
38
40
  snowflake/snowpark_connect/expression/function_defaults.py,sha256=VUv2XenDdY1e37uvb9Cd3vofZGUw6QidZwGaIIQ9rTg,6971
39
41
  snowflake/snowpark_connect/expression/hybrid_column_map.py,sha256=pdWHmZaZYwuxY3i3FGb8_xYpZHARXiM-S_SIUSJIfAU,9600
42
+ snowflake/snowpark_connect/expression/integral_types_support.py,sha256=NVbl8ByyakF9B-AzRrCW9_ur9I7Drel3ShiO0lXvnAg,7744
40
43
  snowflake/snowpark_connect/expression/literal.py,sha256=XpHNcGiSEzyOQgLuBsCr3vncxcgqYgERuqlfOPKNI4E,5219
41
- snowflake/snowpark_connect/expression/map_cast.py,sha256=JdU_EwwD9IFDCSdNDlNxKaqa_Np-bDQIUjiG_DZdCks,19535
44
+ snowflake/snowpark_connect/expression/map_cast.py,sha256=7roYwIKb-xGsiUPvq-5mRLTZfmN3ukGRr7etU95W6-4,24100
42
45
  snowflake/snowpark_connect/expression/map_expression.py,sha256=IB08cAIhHrwFIa4XJ082VmQKiPslHdojnpVyJG7NtJQ,17762
43
46
  snowflake/snowpark_connect/expression/map_extension.py,sha256=eQs3VKu2LG3q1ys2b3To_z1IxP6EGWT79tRLEXn7phk,19115
44
47
  snowflake/snowpark_connect/expression/map_sql_expression.py,sha256=1ZI07juSsSGPe4ni7pWsOjHcXwB7u6aKDAIg4LaeuP8,37289
45
- snowflake/snowpark_connect/expression/map_udf.py,sha256=aydf5JpsGzSVKg7XRp64v_S0I2iUV81l9KNzttucHQA,11447
48
+ snowflake/snowpark_connect/expression/map_udf.py,sha256=pDxElhsj4PjZ4gVy6g2PzXuAEMez1FIgqQ1tu--hJ-Y,11448
46
49
  snowflake/snowpark_connect/expression/map_unresolved_attribute.py,sha256=i29sZ8xPLzcalNwXloKzMWxiTXZaK3Ws-ed_tiGC6zo,31305
47
50
  snowflake/snowpark_connect/expression/map_unresolved_extract_value.py,sha256=A-m-RczZW6xHMjgYR5RV_vzMTpNBRoH3Tk_A1V8z_pk,5382
48
- snowflake/snowpark_connect/expression/map_unresolved_function.py,sha256=oNcTjLEJWJHbY_2-k6egNLB5YcDninunxH20-Az7D94,586465
51
+ snowflake/snowpark_connect/expression/map_unresolved_function.py,sha256=KmBIiB3f8654YBOVtuDYRrgBBMlmBCgdTgvhJUqn8pM,593654
49
52
  snowflake/snowpark_connect/expression/map_unresolved_star.py,sha256=pOQChsf04LN7eipWe1t2kFTAr1bMOVsOrQpPVr71EVk,11618
50
53
  snowflake/snowpark_connect/expression/map_update_fields.py,sha256=1mSG9EvrKOPDzrKwAjav43SuuyEXyRRpaU0Hq00MSX4,8927
51
54
  snowflake/snowpark_connect/expression/map_window_function.py,sha256=N-O7SUEnzR-Ck7l8KhPPk4dj1W3hJ5v6Uvaspb85VUE,13032
52
55
  snowflake/snowpark_connect/expression/typer.py,sha256=aV2dmsqsJWhZzeJtu-_xLqVZ8REsU0hTYAEpCrbO5y8,4582
53
56
  snowflake/snowpark_connect/includes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
- snowflake/snowpark_connect/includes/jars/sas-scala-udf_2.12-0.2.0.jar,sha256=-uaTBAnVouKcc_RMst1a6g2rxSTji9tuHO0eftFhifc,6642006
57
+ snowflake/snowpark_connect/includes/jars/json4s-ast_2.13-3.7.0-M11.jar,sha256=xpS2DHfdyeM6hJ8Vb_Jax3wFKmTVzOX9GAwsFg16ySM,91558
58
+ snowflake/snowpark_connect/includes/jars/sas-scala-udf_2.12-0.2.0.jar,sha256=vUyQilSs9xJt7rG360Jp2dho7jsOXzzYyCS9SxReCmE,6657850
59
+ snowflake/snowpark_connect/includes/jars/sas-scala-udf_2.13-0.2.0.jar,sha256=1EvaoBVqvIQ90bUacLPzC1y6FlswwDro8XF1wP9TOXU,7173221
60
+ snowflake/snowpark_connect/includes/jars/scala-reflect-2.13.16.jar,sha256=-0nM2crHRkSGq5k82iCjwVadjvJvBS6JdXetKklw-x0,3801119
61
+ snowflake/snowpark_connect/includes/jars/spark-common-utils_2.13-3.5.6.jar,sha256=NqQ6vg6rBXWhAmavgFar6JsDmaMLADluoVOxncUQG7g,246971
62
+ snowflake/snowpark_connect/includes/jars/spark-connect-client-jvm_2.13-3.5.6.jar,sha256=e43NLFfXDxpYpnJ866adtKFPTcCarg_LHZn55auEGhk,21881000
63
+ snowflake/snowpark_connect/includes/jars/spark-sql_2.13-3.5.6.jar,sha256=mUoeyQAapSUmszahIKa9DsO8Xvz20SKZpDaAdyJiRtc,9894547
55
64
  snowflake/snowpark_connect/includes/python/__init__.py,sha256=mGM7jEgT9Y5-UxiUSIct6Kz6CcxWm6jKvwxhx1-EuWE,893
56
65
  snowflake/snowpark_connect/includes/python/pyspark/__init__.py,sha256=ZHHSmpR1qz5qcFwirEx6l6Smt2XsQo5lfv_pm9LkZt8,5461
57
66
  snowflake/snowpark_connect/includes/python/pyspark/_globals.py,sha256=-BMZDRqDlx3Mqdoez6dWwqGeMbFbjtwaxaiR2Bci6r0,2275
@@ -344,7 +353,7 @@ snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.py,sha256=dj2Msqz8-N
344
353
  snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.pyi,sha256=_E51cMJWkC2hpxFI-YjFZNDGFyDm49zVck0GqvL9tys,7875
345
354
  snowflake/snowpark_connect/relation/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
346
355
  snowflake/snowpark_connect/relation/io_utils.py,sha256=6uwox_Ui9UWsXKRDseiSL-8iTCR2p5N6pbgsndu-Bo4,5423
347
- snowflake/snowpark_connect/relation/map_aggregate.py,sha256=W8j-qTRw9ygDL0775bop8dRxhZmf3KjCLbDtKYYInGQ,15255
356
+ snowflake/snowpark_connect/relation/map_aggregate.py,sha256=LfZKSaQWnlIlOQ2AXsQUNLhWAFzP-hEF68JuzlbK_0I,17084
348
357
  snowflake/snowpark_connect/relation/map_catalog.py,sha256=v0BDoPPDCnKPxb-HLllflbfa1SfNEsj9pekB5o8jOy0,6088
349
358
  snowflake/snowpark_connect/relation/map_column_ops.py,sha256=AX4UYaDEpKYy9r8ximJvls0F1P3ld3qJBwncmrXdMHY,57553
350
359
  snowflake/snowpark_connect/relation/map_crosstab.py,sha256=H_J8-IARK6zMEUFrOjKif1St6M20gvBAnP0EuArFHGg,2422
@@ -369,28 +378,28 @@ snowflake/snowpark_connect/relation/catalogs/utils.py,sha256=zexCUmfva3ejfuRi5XR
369
378
  snowflake/snowpark_connect/relation/read/__init__.py,sha256=5J3IOTKu4Qmenouz1Oz_bUu_4c6KpxtaC63mPUGLyeY,132
370
379
  snowflake/snowpark_connect/relation/read/jdbc_read_dbapi.py,sha256=5fWA7UiaPzDC88F4tg0smC03XYpjLT6RP0hXIApTK7M,26761
371
380
  snowflake/snowpark_connect/relation/read/map_read.py,sha256=nIh_km3ZSN_e1ATVHH2JRLf4AvKsdvanaryGlkCNCJc,19380
372
- snowflake/snowpark_connect/relation/read/map_read_csv.py,sha256=iUebwmjnFr0lTjVrYpXvW8jA7ROmCiUAh_IWLSFLlFc,13153
373
- snowflake/snowpark_connect/relation/read/map_read_jdbc.py,sha256=bdDlX8oQfHvHysG8cnqaEhfRrX0rPuEWsUw4CNIPSUM,4607
374
- snowflake/snowpark_connect/relation/read/map_read_json.py,sha256=0Bn3B3LUU9VJuRVl9_C-JNVLA-0R5CN-oVLnt3E9rmU,22830
375
- snowflake/snowpark_connect/relation/read/map_read_parquet.py,sha256=wqxAZqqgH0yOk9aUQodGPh4eSkQ2VyISfqMXmbeggOs,11558
381
+ snowflake/snowpark_connect/relation/read/map_read_csv.py,sha256=gKC2kekYvB1l7lV9LpDm8gkkvTcaVMy7jvzMQgAlSRg,15156
382
+ snowflake/snowpark_connect/relation/read/map_read_jdbc.py,sha256=u3RX7NO6j9H7AewHLStJeT397rbK3iZgEd-IsQpkQ1M,4736
383
+ snowflake/snowpark_connect/relation/read/map_read_json.py,sha256=nw7a-ESSc2Wi81TIg1clADCZ_0puwN1vGio-bFBuSe4,22959
384
+ snowflake/snowpark_connect/relation/read/map_read_parquet.py,sha256=tNlX71B8xrsDVFQmRLINOaEuPTTjRJ6JnLSsrt87wr0,11679
376
385
  snowflake/snowpark_connect/relation/read/map_read_partitioned_parquet.py,sha256=LEumV-4j1RXBRVZVd21REdeyNgiB5KhBvFMxqg4anLk,5200
377
- snowflake/snowpark_connect/relation/read/map_read_socket.py,sha256=BsTCN3X1zW4OatPHW7Pe-xvicCTZPxVJ76cMfCNka2M,2750
378
- snowflake/snowpark_connect/relation/read/map_read_table.py,sha256=eQHvc-jUycTpcPlQ0ofkRtJJ7tygF0H-9R4W_Jf_fQY,7605
379
- snowflake/snowpark_connect/relation/read/map_read_text.py,sha256=cSqB-OGG1gRr5HqmgUtqr_Njx0EVlfxmRGOoPVKYSJQ,4158
386
+ snowflake/snowpark_connect/relation/read/map_read_socket.py,sha256=j50gPvKKh3Q1CawlWrz9clNDtbnajYGV_Uhyc_jhd94,2977
387
+ snowflake/snowpark_connect/relation/read/map_read_table.py,sha256=jdCUs9W9ZWG0mSDtTwXwNmKl4VhsbZ2BSRJghvNfK9w,7734
388
+ snowflake/snowpark_connect/relation/read/map_read_text.py,sha256=28h8pDzrS8Fs-QASS6ZiPjR4W2yAcTNw0QMVFYxKJuc,4279
380
389
  snowflake/snowpark_connect/relation/read/metadata_utils.py,sha256=D5qE_fMHPPr_HwcknQD6k19kmy7iqPCmcb6IOMU_xAc,6093
381
- snowflake/snowpark_connect/relation/read/reader_config.py,sha256=vZejoYiInDkSJU0wJG3OzENZuovOvngM6JFWDJGHwM4,18039
390
+ snowflake/snowpark_connect/relation/read/reader_config.py,sha256=APr-gSK1pR-GIdIBDw7SBey4V4MbSjNtyN2j6zsANlc,18386
382
391
  snowflake/snowpark_connect/relation/read/utils.py,sha256=8OeaNSjNRAT5rL5tI6hXq4BSUe1ltKHKMoK4bSJLZEU,6155
383
392
  snowflake/snowpark_connect/relation/write/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
384
393
  snowflake/snowpark_connect/relation/write/jdbc_write_dbapi.py,sha256=2N4Z9J8ZCxdMXAQcKqEkZ0M4O3ICTih2gcR9iG2WA1E,12796
385
394
  snowflake/snowpark_connect/relation/write/map_write.py,sha256=s0uzS1rfiikctqjBHDjrJf0tk22h8OGEfTV4vOqZuNU,63645
386
395
  snowflake/snowpark_connect/relation/write/map_write_jdbc.py,sha256=95fkSAz9QKbk59XF0u_Reru5E6s5gVlNBAMQLcn24DQ,1755
387
396
  snowflake/snowpark_connect/resources/java_udfs-1.0-SNAPSHOT.jar,sha256=va_iQeJTubqCBgICfwY5UGFbPaFXTFV7o8iK21uR1MQ,25428
388
- snowflake/snowpark_connect/server_common/__init__.py,sha256=Cr62Hn27Fu4dC_K0e9lVqmxyVWZMV-vEcFo7XOO1HK4,17693
397
+ snowflake/snowpark_connect/server_common/__init__.py,sha256=AEFihloKzMfzrq62uJKjsMh0GZvYURgLTA8G2oIeJ8Q,17779
389
398
  snowflake/snowpark_connect/utils/__init__.py,sha256=xsIE96jDASko3F-MeNf4T4Gg5ufthS8CejeiJDfri0M,76
390
399
  snowflake/snowpark_connect/utils/artifacts.py,sha256=mpJ4TvJbhpiAl3uBeDKmC0e0nmfUuWEb8Z6CIa67So4,2543
391
400
  snowflake/snowpark_connect/utils/cache.py,sha256=pKWUe5uGHenm3iYe4_hs8TGe48lojMzTSuZBAlC2UdY,4721
392
401
  snowflake/snowpark_connect/utils/concurrent.py,sha256=GhX9SVdbhFfWHqwi5hyXxMJ1_A3FqZ9Zw40ZWgk6dGs,4548
393
- snowflake/snowpark_connect/utils/context.py,sha256=d4ylW2j-ksUOT8lVRjjIhXIPFjSpXqxDlvLi3Pmt60E,18008
402
+ snowflake/snowpark_connect/utils/context.py,sha256=ypcbGNwiGtY8UYPggSe6r_oKkgHs9B4Hrtp39xjGIe4,18296
394
403
  snowflake/snowpark_connect/utils/describe_query_cache.py,sha256=J43wWN5EbhLSw9gnTeEv529_yPN7tS7rCTzWALfxY2I,9470
395
404
  snowflake/snowpark_connect/utils/env_utils.py,sha256=VLCaAYDqJRH4aD0UMWKrFW2EHEJuXwFwjxOX6TzaxFg,1738
396
405
  snowflake/snowpark_connect/utils/expression_transformer.py,sha256=XA2a1g6OE6psYGaWn4ySDG1PeG_5ntc5MmVD3-0yJiQ,4213
@@ -398,20 +407,20 @@ snowflake/snowpark_connect/utils/external_udxf_cache.py,sha256=eSZHMbjTxnkg78Ilb
398
407
  snowflake/snowpark_connect/utils/identifiers.py,sha256=w5J8nmLWraWVNNu1ztlzylvOlJC1neHSzXmSh8oy1fU,8271
399
408
  snowflake/snowpark_connect/utils/interrupt.py,sha256=_awhdrzF1KQO-EQThneEcfMg3Zxed4p3HtMpkcAb6ek,2790
400
409
  snowflake/snowpark_connect/utils/io_utils.py,sha256=j8-t_Vvy0qJhA7EeSYlEqqmc2-osS085PdgUkQDqij8,2811
401
- snowflake/snowpark_connect/utils/java_stored_procedure.py,sha256=bpqeBbwZ65Jrbv_FQUW2Q2mL31SzYqdJ7Ymp3ezIsgs,3986
402
- snowflake/snowpark_connect/utils/java_udaf_utils.py,sha256=TNgDbXW0Tha_JgNQjt_AT3NXf8q28pnN366gYaqMlpk,10694
403
- snowflake/snowpark_connect/utils/java_udtf_utils.py,sha256=XK_Uvqais8mclsRwgWjBB--DIA495JZ8Yj0LNnS3Ejg,7923
404
- snowflake/snowpark_connect/utils/jvm_udf_utils.py,sha256=7WWbPaF8xVaZv5wj7fHpO8yfRz8dBXSv-6CtnqJs488,9053
410
+ snowflake/snowpark_connect/utils/java_stored_procedure.py,sha256=6UqO49NgP_dAWgS4mDzcj64DCTbqPZYcspSVzrSB7z8,5263
411
+ snowflake/snowpark_connect/utils/java_udaf_utils.py,sha256=Cyu6JzyzhebWwC-3vdb1Ap9oqD_ghN4P8yJbU9aU8GQ,11186
412
+ snowflake/snowpark_connect/utils/java_udtf_utils.py,sha256=HOEkfb9Fd-YhSLfm3KtG3E4YTCbCvOfk4gXWmW8Tric,7934
413
+ snowflake/snowpark_connect/utils/jvm_udf_utils.py,sha256=7DQKYq3A2W0QAwMw6lqjoNKY7a5KBs5Da87fxpDTu8c,10333
405
414
  snowflake/snowpark_connect/utils/open_telemetry.py,sha256=X1ZOgI8c4QeCxIMjApCCsZLzUCxUJpfgxMdHY2VLByg,15961
406
415
  snowflake/snowpark_connect/utils/pandas_udtf_utils.py,sha256=jXh3jcJPvY31dy9m_WAcYzlepQG12Y5Pq2rsfaYrVLk,7815
407
416
  snowflake/snowpark_connect/utils/patch_spark_line_number.py,sha256=xM2laj6XxfqaFS47cf1uas1PMwMnEt1_5668PJhrpDE,6288
408
417
  snowflake/snowpark_connect/utils/profiling.py,sha256=3mn3BSpGEGDmbg-3W9Yf6IGN2G5ohIKs9MLh3ZYrRSw,2100
409
- snowflake/snowpark_connect/utils/scala_udf_utils.py,sha256=jbrmvAMGMW8AzuFqraG3E-68RF84hrHmJ016Bdlj4WM,14593
418
+ snowflake/snowpark_connect/utils/scala_udf_utils.py,sha256=FQUXj6kdpSrCL1u8-tZLXyZ6jodiMfULlwNyfVySmH8,17635
410
419
  snowflake/snowpark_connect/utils/sequence.py,sha256=1rxDgySTpByJFlZkIksJg4TwnkvsJm5Jt0qkSdnuMr8,542
411
420
  snowflake/snowpark_connect/utils/session.py,sha256=D7NiSJsTKeljsY0MOO_CSjCWR-mh9l576CZzOPSwjIY,9200
412
421
  snowflake/snowpark_connect/utils/snowpark_connect_logging.py,sha256=MVO-tKqqBGTcM_7_kXSma1Q7n8DQzJVCJ7d64faAZ6A,2651
413
422
  snowflake/snowpark_connect/utils/spcs_logger.py,sha256=iPOKUzU_pH_v45ffvp-PcpF2l5NpbMoJeUZGzw2nr7A,9269
414
- snowflake/snowpark_connect/utils/telemetry.py,sha256=H459bpBLeAK7a0902iTsYX2IVFNFDGr1LtwbJD6va24,25516
423
+ snowflake/snowpark_connect/utils/telemetry.py,sha256=Vgl3AMmIO6dBJoddOERrvcn137JI1de4L5jWIWUIzFw,26119
415
424
  snowflake/snowpark_connect/utils/temporary_view_cache.py,sha256=qZYNNwIfcy6-Tm_bFTC06pOeQ2Eba5v5bn4J5FtSDEo,2396
416
425
  snowflake/snowpark_connect/utils/temporary_view_helper.py,sha256=DwCQ-7S49bE1Nkc1mI9ghAtwYG6T9V6KTdxc6IlbJ9U,12214
417
426
  snowflake/snowpark_connect/utils/udf_cache.py,sha256=rbe_CKzi6i-B6dzEba4SgBnULnzHUKz_y6ZR0URI26k,14450
@@ -419,7 +428,7 @@ snowflake/snowpark_connect/utils/udf_helper.py,sha256=If9Sn9Wxon570dIUTUHuJlA8Br
419
428
  snowflake/snowpark_connect/utils/udf_utils.py,sha256=wS1_CF0P4Rj9QeTK5KVs_wb4pVTgAXv7YLukBurq3ho,17357
420
429
  snowflake/snowpark_connect/utils/udtf_helper.py,sha256=n8dK0QGz81iJXmRB9b2BskyMUKVvNSnUkHGFSHNQpVo,15362
421
430
  snowflake/snowpark_connect/utils/udtf_utils.py,sha256=JOZepy94nRdiakj1ROrhVM_ywYV0u-k9ljZbsZKnOF4,34396
422
- snowflake/snowpark_connect/utils/udxf_import_utils.py,sha256=pPtcaGsyh0tUdy0aAvNqTj04jqPKlEcGmvaZDP9O8Gc,536
431
+ snowflake/snowpark_connect/utils/udxf_import_utils.py,sha256=yjL9U-9bzINrs6G6aTTU2VmowHDY7LWv7tf-IojN4d4,683
423
432
  snowflake/snowpark_connect/utils/upload_java_jar.py,sha256=xCd4OkArLKQQYptfhqPizawWFisw_-qC61wazouyhIM,2071
424
433
  snowflake/snowpark_connect/utils/xxhash64.py,sha256=ysJRxhBPf25LeNhM1RK_H36MWl6q6C6vBRHa-jIna_A,7477
425
434
  snowflake/snowpark_decoder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -427,17 +436,17 @@ snowflake/snowpark_decoder/dp_session.py,sha256=ol2Zau-AdgLZeSHajc40rOaidcKcSqQG
427
436
  snowflake/snowpark_decoder/spark_decoder.py,sha256=vyVwOsBbZ26C1jJ0ORFPnzT58bamHBAq2lcYf22-F3c,3419
428
437
  snowflake/snowpark_decoder/_internal/proto/generated/DataframeProcessorMsg_pb2.py,sha256=2eSDqeyfMvmIJ6_rF663DrEe1dg_anrP4OpVJNTJHaQ,2598
429
438
  snowflake/snowpark_decoder/_internal/proto/generated/DataframeProcessorMsg_pb2.pyi,sha256=aIH23k52bXdw5vO3RtM5UcOjDPaWsJFx1SRUSk3qOK8,6142
430
- snowpark_connect-1.6.0.data/scripts/snowpark-connect,sha256=yZ94KqbWACxnwV8mpg8NjILvvRNjnF8B3cs3ZFNuIM4,1546
431
- snowpark_connect-1.6.0.data/scripts/snowpark-session,sha256=NMAHSonTo-nmOZSkQNlszUC0jLJ8QWEDUsUmMe2UAOw,190
432
- snowpark_connect-1.6.0.data/scripts/snowpark-submit,sha256=MaTt66d8O61yxSsJE_N5U8lNbnVwj0d-1u_707Ipkzo,12056
433
- snowpark_connect-1.6.0.dist-info/licenses/LICENSE-binary,sha256=fmBlX39HwTlBUyiKEznaLZGuxQy-7ndLLG_rTXjF02Y,22916
434
- snowpark_connect-1.6.0.dist-info/licenses/LICENSE.txt,sha256=Ff9cPv4xu0z7bnMTHzo4vDncOShsy33w4oJMA2xjn6c,11365
435
- snowpark_connect-1.6.0.dist-info/licenses/NOTICE-binary,sha256=elMF8brgGNJwOz8YdorzBF6-U8ZhR8F-77FfGkZng7U,57843
439
+ snowpark_connect-1.7.0.data/scripts/snowpark-connect,sha256=yZ94KqbWACxnwV8mpg8NjILvvRNjnF8B3cs3ZFNuIM4,1546
440
+ snowpark_connect-1.7.0.data/scripts/snowpark-session,sha256=NMAHSonTo-nmOZSkQNlszUC0jLJ8QWEDUsUmMe2UAOw,190
441
+ snowpark_connect-1.7.0.data/scripts/snowpark-submit,sha256=n5SyESQMEEfWe9hPjYfsUukraqOfFmO9_c9SdEXx194,12472
442
+ snowpark_connect-1.7.0.dist-info/licenses/LICENSE-binary,sha256=fmBlX39HwTlBUyiKEznaLZGuxQy-7ndLLG_rTXjF02Y,22916
443
+ snowpark_connect-1.7.0.dist-info/licenses/LICENSE.txt,sha256=Ff9cPv4xu0z7bnMTHzo4vDncOShsy33w4oJMA2xjn6c,11365
444
+ snowpark_connect-1.7.0.dist-info/licenses/NOTICE-binary,sha256=elMF8brgGNJwOz8YdorzBF6-U8ZhR8F-77FfGkZng7U,57843
436
445
  spark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
437
446
  spark/connect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
438
447
  spark/connect/envelope_pb2.py,sha256=7Gc6OUA3vaCuTCIKamb_Iiw7W9jPTcWNEv1im20eWHM,2726
439
448
  spark/connect/envelope_pb2.pyi,sha256=VXTJSPpcxzB_dWqVdvPY4KkPhJfh0WmkX7SNHWoLhx0,3358
440
- snowpark_connect-1.6.0.dist-info/METADATA,sha256=8-qHHb1kVAaAC1Ipe2FnnTekj1tjUi1KBSo5CtxTzUo,1879
441
- snowpark_connect-1.6.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
442
- snowpark_connect-1.6.0.dist-info/top_level.txt,sha256=ExnWqVpoTHRG99fu_AxXZVOz8c-De7nNu0yFCGylM8I,16
443
- snowpark_connect-1.6.0.dist-info/RECORD,,
449
+ snowpark_connect-1.7.0.dist-info/METADATA,sha256=Bk8luLbRlwf5oRfWMRSYfh54NY1XLiS7CU3cmIs6Cmo,1940
450
+ snowpark_connect-1.7.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
451
+ snowpark_connect-1.7.0.dist-info/top_level.txt,sha256=ExnWqVpoTHRG99fu_AxXZVOz8c-De7nNu0yFCGylM8I,16
452
+ snowpark_connect-1.7.0.dist-info/RECORD,,