snowflake-ml-python 1.13.0__py3-none-any.whl → 1.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,7 +25,7 @@ RESULT_PATH_DEFAULT_VALUE = f"{OUTPUT_STAGE_SUBPATH}/mljob_result.pkl"
25
25
  DEFAULT_IMAGE_REPO = "/snowflake/images/snowflake_images"
26
26
  DEFAULT_IMAGE_CPU = "st_plat/runtime/x86/runtime_image/snowbooks"
27
27
  DEFAULT_IMAGE_GPU = "st_plat/runtime/x86/generic_gpu/runtime_image/snowbooks"
28
- DEFAULT_IMAGE_TAG = "1.6.2"
28
+ DEFAULT_IMAGE_TAG = "1.8.0"
29
29
  DEFAULT_ENTRYPOINT_PATH = "func.py"
30
30
 
31
31
  # Percent of container memory to allocate for /dev/shm volume
@@ -234,12 +234,6 @@ def run_script(script_path: str, *script_args: Any, main_func: Optional[str] = N
234
234
  if payload_dir and payload_dir not in sys.path:
235
235
  sys.path.insert(0, payload_dir)
236
236
 
237
- # Create a Snowpark session before running the script
238
- # Session can be retrieved from using snowflake.snowpark.context.get_active_session()
239
- config = SnowflakeLoginOptions()
240
- config["client_session_keep_alive"] = "True"
241
- session = Session.builder.configs(config).create() # noqa: F841
242
-
243
237
  try:
244
238
 
245
239
  if main_func:
@@ -266,7 +260,6 @@ def run_script(script_path: str, *script_args: Any, main_func: Optional[str] = N
266
260
  finally:
267
261
  # Restore original sys.argv
268
262
  sys.argv = original_argv
269
- session.close()
270
263
 
271
264
 
272
265
  def main(script_path: str, *script_args: Any, script_main_func: Optional[str] = None) -> ExecutionResult:
@@ -297,6 +290,12 @@ def main(script_path: str, *script_args: Any, script_main_func: Optional[str] =
297
290
  except ModuleNotFoundError:
298
291
  warnings.warn("Ray is not installed, skipping Ray initialization", ImportWarning, stacklevel=1)
299
292
 
293
+ # Create a Snowpark session before starting
294
+ # Session can be retrieved from using snowflake.snowpark.context.get_active_session()
295
+ config = SnowflakeLoginOptions()
296
+ config["client_session_keep_alive"] = "True"
297
+ session = Session.builder.configs(config).create() # noqa: F841
298
+
300
299
  try:
301
300
  # Wait for minimum required instances if specified
302
301
  min_instances_str = os.environ.get(MIN_INSTANCES_ENV_VAR) or "1"
@@ -352,6 +351,9 @@ def main(script_path: str, *script_args: Any, script_main_func: Optional[str] =
352
351
  f"Failed to serialize JSON result to {result_json_path}: {json_exc}", RuntimeWarning, stacklevel=1
353
352
  )
354
353
 
354
+ # Close the session after serializing the result
355
+ session.close()
356
+
355
357
 
356
358
  if __name__ == "__main__":
357
359
  # Parse command line arguments
snowflake/ml/jobs/job.py CHANGED
@@ -83,6 +83,8 @@ class MLJob(Generic[T], SerializableSessionMixin):
83
83
  def _container_spec(self) -> dict[str, Any]:
84
84
  """Get the job's main container spec."""
85
85
  containers = self._service_spec["spec"]["containers"]
86
+ if len(containers) == 1:
87
+ return cast(dict[str, Any], containers[0])
86
88
  try:
87
89
  container_spec = next(c for c in containers if c["name"] == constants.DEFAULT_CONTAINER_NAME)
88
90
  except StopIteration:
@@ -163,7 +165,7 @@ class MLJob(Generic[T], SerializableSessionMixin):
163
165
  Returns:
164
166
  The job's execution logs.
165
167
  """
166
- logs = _get_logs(self._session, self.id, limit, instance_id, verbose)
168
+ logs = _get_logs(self._session, self.id, limit, instance_id, self._container_spec["name"], verbose)
167
169
  assert isinstance(logs, str) # mypy
168
170
  if as_list:
169
171
  return logs.splitlines()
@@ -281,7 +283,12 @@ def _get_service_spec(session: snowpark.Session, job_id: str) -> dict[str, Any]:
281
283
 
282
284
  @telemetry.send_api_usage_telemetry(project=_PROJECT, func_params_to_log=["job_id", "limit", "instance_id"])
283
285
  def _get_logs(
284
- session: snowpark.Session, job_id: str, limit: int = -1, instance_id: Optional[int] = None, verbose: bool = True
286
+ session: snowpark.Session,
287
+ job_id: str,
288
+ limit: int = -1,
289
+ instance_id: Optional[int] = None,
290
+ container_name: str = constants.DEFAULT_CONTAINER_NAME,
291
+ verbose: bool = True,
285
292
  ) -> str:
286
293
  """
287
294
  Retrieve the job's execution logs.
@@ -291,6 +298,7 @@ def _get_logs(
291
298
  limit: The maximum number of lines to return. Negative values are treated as no limit.
292
299
  session: The Snowpark session to use. If none specified, uses active session.
293
300
  instance_id: Optional instance ID to get logs from a specific instance.
301
+ container_name: The container name to get logs from a specific container.
294
302
  verbose: Whether to return the full log or just the portion between START and END messages.
295
303
 
296
304
  Returns:
@@ -311,7 +319,7 @@ def _get_logs(
311
319
  params: list[Any] = [
312
320
  job_id,
313
321
  0 if instance_id is None else instance_id,
314
- constants.DEFAULT_CONTAINER_NAME,
322
+ container_name,
315
323
  ]
316
324
  if limit > 0:
317
325
  params.append(limit)
@@ -337,7 +345,7 @@ def _get_logs(
337
345
  job_id,
338
346
  limit=limit,
339
347
  instance_id=instance_id if instance_id else 0,
340
- container_name=constants.DEFAULT_CONTAINER_NAME,
348
+ container_name=container_name,
341
349
  )
342
350
  full_log = os.linesep.join(row[0] for row in logs)
343
351
 
@@ -232,6 +232,7 @@ def submit_file(
232
232
  enable_metrics (bool): Whether to enable metrics publishing for the job.
233
233
  query_warehouse (str): The query warehouse to use. Defaults to session warehouse.
234
234
  spec_overrides (dict): A dictionary of overrides for the service spec.
235
+ imports (list[Union[tuple[str, str], tuple[str]]]): A list of additional payloads used in the job.
235
236
 
236
237
  Returns:
237
238
  An object representing the submitted job.
@@ -286,6 +287,7 @@ def submit_directory(
286
287
  enable_metrics (bool): Whether to enable metrics publishing for the job.
287
288
  query_warehouse (str): The query warehouse to use. Defaults to session warehouse.
288
289
  spec_overrides (dict): A dictionary of overrides for the service spec.
290
+ imports (list[Union[tuple[str, str], tuple[str]]]): A list of additional payloads used in the job.
289
291
 
290
292
  Returns:
291
293
  An object representing the submitted job.
@@ -341,6 +343,7 @@ def submit_from_stage(
341
343
  enable_metrics (bool): Whether to enable metrics publishing for the job.
342
344
  query_warehouse (str): The query warehouse to use. Defaults to session warehouse.
343
345
  spec_overrides (dict): A dictionary of overrides for the service spec.
346
+ imports (list[Union[tuple[str, str], tuple[str]]]): A list of additional payloads used in the job.
344
347
 
345
348
  Returns:
346
349
  An object representing the submitted job.
@@ -404,6 +407,8 @@ def _submit_job(
404
407
  "num_instances", # deprecated
405
408
  "target_instances",
406
409
  "min_instances",
410
+ "enable_metrics",
411
+ "query_warehouse",
407
412
  ],
408
413
  )
409
414
  def _submit_job(
@@ -447,6 +452,13 @@ def _submit_job(
447
452
  )
448
453
  target_instances = max(target_instances, kwargs.pop("num_instances"))
449
454
 
455
+ imports = None
456
+ if "additional_payloads" in kwargs:
457
+ logger.warning(
458
+ "'additional_payloads' is deprecated and will be removed in a future release. Use 'imports' instead."
459
+ )
460
+ imports = kwargs.pop("additional_payloads")
461
+
450
462
  # Use kwargs for less common optional parameters
451
463
  database = kwargs.pop("database", None)
452
464
  schema = kwargs.pop("schema", None)
@@ -457,10 +469,7 @@ def _submit_job(
457
469
  spec_overrides = kwargs.pop("spec_overrides", None)
458
470
  enable_metrics = kwargs.pop("enable_metrics", True)
459
471
  query_warehouse = kwargs.pop("query_warehouse", session.get_current_warehouse())
460
- additional_payloads = kwargs.pop("additional_payloads", None)
461
-
462
- if additional_payloads:
463
- logger.warning("'additional_payloads' is in private preview since 1.9.1. Do not use it in production.")
472
+ imports = kwargs.pop("imports", None) or imports
464
473
 
465
474
  # Warn if there are unknown kwargs
466
475
  if kwargs:
@@ -492,7 +501,7 @@ def _submit_job(
492
501
  try:
493
502
  # Upload payload
494
503
  uploaded_payload = payload_utils.JobPayload(
495
- source, entrypoint=entrypoint, pip_requirements=pip_requirements, additional_payloads=additional_payloads
504
+ source, entrypoint=entrypoint, pip_requirements=pip_requirements, additional_payloads=imports
496
505
  ).upload(session, stage_path)
497
506
  except snowpark.exceptions.SnowparkSQLException as e:
498
507
  if e.sql_error_code == 90106:
@@ -501,6 +510,22 @@ def _submit_job(
501
510
  )
502
511
  raise
503
512
 
513
+ # FIXME: Temporary patches, remove this after v1 is deprecated
514
+ if target_instances > 1:
515
+ default_spec_overrides = {
516
+ "spec": {
517
+ "endpoints": [
518
+ {"name": "ray-dashboard-endpoint", "port": 12003, "protocol": "TCP"},
519
+ ]
520
+ },
521
+ }
522
+ if spec_overrides:
523
+ spec_overrides = spec_utils.merge_patch(
524
+ default_spec_overrides, spec_overrides, display_name="spec_overrides"
525
+ )
526
+ else:
527
+ spec_overrides = default_spec_overrides
528
+
504
529
  if feature_flags.FeatureFlags.USE_SUBMIT_JOB_V2.is_enabled():
505
530
  # Add default env vars (extracted from spec_utils.generate_service_spec)
506
531
  combined_env_vars = {**uploaded_payload.env_vars, **(env_vars or {})}
@@ -668,8 +693,10 @@ def _ensure_session(session: Optional[snowpark.Session]) -> snowpark.Session:
668
693
  session = session or get_active_session()
669
694
  except snowpark.exceptions.SnowparkSessionException as e:
670
695
  if "More than one active session" in e.message:
671
- raise RuntimeError("Please specify the session as a parameter in API call")
696
+ raise RuntimeError(
697
+ "More than one active session is found. Please specify the session explicitly as a parameter"
698
+ ) from None
672
699
  if "No default Session is found" in e.message:
673
- raise RuntimeError("Please create a session before API call")
700
+ raise RuntimeError("No active session is found. Please create a session") from None
674
701
  raise
675
702
  return session
@@ -83,7 +83,6 @@ class LineageNode(mixins.SerializableSessionMixin):
83
83
  raise NotImplementedError()
84
84
 
85
85
  @telemetry.send_api_usage_telemetry(project=_PROJECT)
86
- @snowpark._internal.utils.private_preview(version="1.5.3")
87
86
  def lineage(
88
87
  self,
89
88
  direction: Literal["upstream", "downstream"] = "downstream",
@@ -788,7 +788,7 @@ class ModelVersion(lineage_node.LineageNode):
788
788
  inference_engine_args: service_ops.InferenceEngineArgs,
789
789
  gpu_requests: Optional[Union[str, int]] = None,
790
790
  ) -> Optional[service_ops.InferenceEngineArgs]:
791
- """Enrich inference engine args with model path and tensor parallelism settings.
791
+ """Enrich inference engine args with tensor parallelism settings.
792
792
 
793
793
  Args:
794
794
  inference_engine_args: The original inference engine args
@@ -803,21 +803,6 @@ class ModelVersion(lineage_node.LineageNode):
803
803
  if inference_engine_args.inference_engine_args_override is None:
804
804
  inference_engine_args.inference_engine_args_override = []
805
805
 
806
- # Get model stage path and strip off "snow://" prefix
807
- model_stage_path = self._model_ops.get_model_version_stage_path(
808
- database_name=None,
809
- schema_name=None,
810
- model_name=self._model_name,
811
- version_name=self._version_name,
812
- )
813
-
814
- # Strip "snow://" prefix
815
- if model_stage_path.startswith("snow://"):
816
- model_stage_path = model_stage_path.replace("snow://", "", 1)
817
-
818
- # Always overwrite the model key by appending
819
- inference_engine_args.inference_engine_args_override.append(f"--model={model_stage_path}")
820
-
821
806
  gpu_count = None
822
807
 
823
808
  # Set tensor-parallelism if gpu_requests is specified
@@ -47,7 +47,8 @@ class ServiceInfo(TypedDict):
47
47
  class ModelOperator:
48
48
  INFERENCE_SERVICE_ENDPOINT_NAME = "inference"
49
49
  INGRESS_ENDPOINT_URL_SUFFIX = "snowflakecomputing.app"
50
- PRIVATELINK_INGRESS_ENDPOINT_URL_SUBSTRING = "privatelink.snowflakecomputing"
50
+ # app-service-privatelink might not contain "snowflakecomputing" in the url - using the minimum required substring
51
+ PRIVATELINK_INGRESS_ENDPOINT_URL_SUBSTRING = "privatelink.snowflake"
51
52
 
52
53
  def __init__(
53
54
  self,
@@ -631,7 +632,13 @@ class ModelOperator:
631
632
 
632
633
  def _extract_and_validate_privatelink_url(self, res_row: "row.Row") -> Optional[str]:
633
634
  """Extract and validate privatelink ingress URL from endpoint row."""
634
- url_value = res_row[self._service_client.MODEL_INFERENCE_SERVICE_ENDPOINT_PRIVATELINK_INGRESS_URL_COL_NAME]
635
+ # Check if the privatelink_ingress_url column exists
636
+ col_name = self._service_client.MODEL_INFERENCE_SERVICE_ENDPOINT_PRIVATELINK_INGRESS_URL_COL_NAME
637
+ if col_name not in res_row:
638
+ # Column doesn't exist in query result for non-Business Critical accounts
639
+ return None
640
+
641
+ url_value = res_row[col_name]
635
642
  if url_value is None:
636
643
  return None
637
644
  url_str = str(url_value)
@@ -323,17 +323,20 @@ class ServiceOperator:
323
323
  statement_params=statement_params,
324
324
  )
325
325
 
326
- # stream service logs in a thread
327
- model_build_service_name = sql_identifier.SqlIdentifier(
328
- self._get_service_id_from_deployment_step(query_id, DeploymentStep.MODEL_BUILD)
329
- )
330
- model_build_service = ServiceLogInfo(
331
- database_name=service_database_name,
332
- schema_name=service_schema_name,
333
- service_name=model_build_service_name,
334
- deployment_step=DeploymentStep.MODEL_BUILD,
335
- log_color=service_logger.LogColor.GREEN,
336
- )
326
+ model_build_service: Optional[ServiceLogInfo] = None
327
+ if is_enable_image_build:
328
+ # stream service logs in a thread
329
+ model_build_service_name = sql_identifier.SqlIdentifier(
330
+ self._get_service_id_from_deployment_step(query_id, DeploymentStep.MODEL_BUILD)
331
+ )
332
+ model_build_service = ServiceLogInfo(
333
+ database_name=service_database_name,
334
+ schema_name=service_schema_name,
335
+ service_name=model_build_service_name,
336
+ deployment_step=DeploymentStep.MODEL_BUILD,
337
+ log_color=service_logger.LogColor.GREEN,
338
+ )
339
+
337
340
  model_inference_service = ServiceLogInfo(
338
341
  database_name=service_database_name,
339
342
  schema_name=service_schema_name,
@@ -375,7 +378,7 @@ class ServiceOperator:
375
378
  progress_status.increment()
376
379
 
377
380
  # Poll for model build to start if not using existing service
378
- if not model_inference_service_exists:
381
+ if not model_inference_service_exists and model_build_service:
379
382
  self._wait_for_service_status(
380
383
  model_build_service_name,
381
384
  service_sql.ServiceStatus.RUNNING,
@@ -390,7 +393,7 @@ class ServiceOperator:
390
393
  progress_status.increment()
391
394
 
392
395
  # Poll for model build completion
393
- if not model_inference_service_exists:
396
+ if not model_inference_service_exists and model_build_service:
394
397
  self._wait_for_service_status(
395
398
  model_build_service_name,
396
399
  service_sql.ServiceStatus.DONE,
@@ -454,7 +457,7 @@ class ServiceOperator:
454
457
  self,
455
458
  async_job: snowpark.AsyncJob,
456
459
  model_logger_service: Optional[ServiceLogInfo],
457
- model_build_service: ServiceLogInfo,
460
+ model_build_service: Optional[ServiceLogInfo],
458
461
  model_inference_service: ServiceLogInfo,
459
462
  model_inference_service_exists: bool,
460
463
  force_rebuild: bool,
@@ -483,7 +486,7 @@ class ServiceOperator:
483
486
  self,
484
487
  force_rebuild: bool,
485
488
  service_log_meta: ServiceLogMetadata,
486
- model_build_service: ServiceLogInfo,
489
+ model_build_service: Optional[ServiceLogInfo],
487
490
  model_inference_service: ServiceLogInfo,
488
491
  operation_id: str,
489
492
  statement_params: Optional[dict[str, Any]] = None,
@@ -599,13 +602,24 @@ class ServiceOperator:
599
602
  # check if model logger service is done
600
603
  # and transition the service log metadata to the model image build service
601
604
  if service.deployment_step == DeploymentStep.MODEL_LOGGING:
602
- service_log_meta.transition_service_log_metadata(
603
- model_build_service,
604
- f"Model Logger service {service.display_service_name} complete.",
605
- is_model_build_service_done=False,
606
- is_model_logger_service_done=service_log_meta.is_model_logger_service_done,
607
- operation_id=operation_id,
608
- )
605
+ if model_build_service:
606
+ # building the inference image, transition to the model build service
607
+ service_log_meta.transition_service_log_metadata(
608
+ model_build_service,
609
+ f"Model Logger service {service.display_service_name} complete.",
610
+ is_model_build_service_done=False,
611
+ is_model_logger_service_done=service_log_meta.is_model_logger_service_done,
612
+ operation_id=operation_id,
613
+ )
614
+ else:
615
+ # no model build service, transition to the model inference service
616
+ service_log_meta.transition_service_log_metadata(
617
+ model_inference_service,
618
+ f"Model Logger service {service.display_service_name} complete.",
619
+ is_model_build_service_done=True,
620
+ is_model_logger_service_done=service_log_meta.is_model_logger_service_done,
621
+ operation_id=operation_id,
622
+ )
609
623
  # check if model build service is done
610
624
  # and transition the service log metadata to the model inference service
611
625
  elif service.deployment_step == DeploymentStep.MODEL_BUILD:
@@ -616,6 +630,8 @@ class ServiceOperator:
616
630
  is_model_logger_service_done=service_log_meta.is_model_logger_service_done,
617
631
  operation_id=operation_id,
618
632
  )
633
+ elif service.deployment_step == DeploymentStep.MODEL_INFERENCE:
634
+ module_logger.info(f"Inference service {service.display_service_name} is deployed.")
619
635
  else:
620
636
  module_logger.warning(f"Service {service.display_service_name} is done, but not transitioning.")
621
637
 
@@ -623,7 +639,7 @@ class ServiceOperator:
623
639
  self,
624
640
  async_job: snowpark.AsyncJob,
625
641
  model_logger_service: Optional[ServiceLogInfo],
626
- model_build_service: ServiceLogInfo,
642
+ model_build_service: Optional[ServiceLogInfo],
627
643
  model_inference_service: ServiceLogInfo,
628
644
  model_inference_service_exists: bool,
629
645
  force_rebuild: bool,
@@ -632,14 +648,23 @@ class ServiceOperator:
632
648
  ) -> None:
633
649
  """Stream service logs while the async job is running."""
634
650
 
635
- model_build_service_logger = service_logger.get_logger( # BuildJobName
636
- model_build_service.display_service_name,
637
- model_build_service.log_color,
638
- operation_id=operation_id,
639
- )
640
- if model_logger_service:
641
- model_logger_service_logger = service_logger.get_logger( # ModelLoggerName
642
- model_logger_service.display_service_name,
651
+ if model_build_service:
652
+ model_build_service_logger = service_logger.get_logger(
653
+ model_build_service.display_service_name, # BuildJobName
654
+ model_build_service.log_color,
655
+ operation_id=operation_id,
656
+ )
657
+ service_log_meta = ServiceLogMetadata(
658
+ service_logger=model_build_service_logger,
659
+ service=model_build_service,
660
+ service_status=None,
661
+ is_model_build_service_done=False,
662
+ is_model_logger_service_done=True,
663
+ log_offset=0,
664
+ )
665
+ elif model_logger_service:
666
+ model_logger_service_logger = service_logger.get_logger(
667
+ model_logger_service.display_service_name, # ModelLoggerName
643
668
  model_logger_service.log_color,
644
669
  operation_id=operation_id,
645
670
  )
@@ -653,12 +678,17 @@ class ServiceOperator:
653
678
  log_offset=0,
654
679
  )
655
680
  else:
681
+ model_inference_service_logger = service_logger.get_logger(
682
+ model_inference_service.display_service_name, # ModelInferenceName
683
+ model_inference_service.log_color,
684
+ operation_id=operation_id,
685
+ )
656
686
  service_log_meta = ServiceLogMetadata(
657
- service_logger=model_build_service_logger,
658
- service=model_build_service,
687
+ service_logger=model_inference_service_logger,
688
+ service=model_inference_service,
659
689
  service_status=None,
660
690
  is_model_build_service_done=False,
661
- is_model_logger_service_done=True,
691
+ is_model_logger_service_done=False,
662
692
  log_offset=0,
663
693
  )
664
694
 
@@ -256,9 +256,6 @@ class ServiceSQLClient(_base._BaseSQLClient):
256
256
  )
257
257
  .has_column(ServiceSQLClient.MODEL_INFERENCE_SERVICE_ENDPOINT_NAME_COL_NAME, allow_empty=True)
258
258
  .has_column(ServiceSQLClient.MODEL_INFERENCE_SERVICE_ENDPOINT_INGRESS_URL_COL_NAME, allow_empty=True)
259
- .has_column(
260
- ServiceSQLClient.MODEL_INFERENCE_SERVICE_ENDPOINT_PRIVATELINK_INGRESS_URL_COL_NAME, allow_empty=True
261
- )
262
259
  )
263
260
 
264
261
  return res.validate()
@@ -30,8 +30,8 @@ class MonitorOperation(Enum):
30
30
  _OPERATION_SUPPORTED_PROPS: dict[MonitorOperation, frozenset[str]] = {
31
31
  MonitorOperation.SUSPEND: frozenset(),
32
32
  MonitorOperation.RESUME: frozenset(),
33
- MonitorOperation.ADD: frozenset({"SEGMENT_COLUMN"}),
34
- MonitorOperation.DROP: frozenset({"SEGMENT_COLUMN"}),
33
+ MonitorOperation.ADD: frozenset({"SEGMENT_COLUMN", "CUSTOM_METRIC_COLUMN"}),
34
+ MonitorOperation.DROP: frozenset({"SEGMENT_COLUMN", "CUSTOM_METRIC_COLUMN"}),
35
35
  }
36
36
 
37
37
 
@@ -91,6 +91,7 @@ class ModelMonitorSQLClient:
91
91
  baseline_schema: Optional[sql_identifier.SqlIdentifier] = None,
92
92
  baseline: Optional[sql_identifier.SqlIdentifier] = None,
93
93
  segment_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
94
+ custom_metric_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
94
95
  statement_params: Optional[dict[str, Any]] = None,
95
96
  ) -> None:
96
97
  baseline_sql = ""
@@ -101,6 +102,10 @@ class ModelMonitorSQLClient:
101
102
  if segment_columns:
102
103
  segment_columns_sql = f"SEGMENT_COLUMNS={_build_sql_list_from_columns(segment_columns)}"
103
104
 
105
+ custom_metric_columns_sql = ""
106
+ if custom_metric_columns:
107
+ custom_metric_columns_sql = f"CUSTOM_METRIC_COLUMNS={_build_sql_list_from_columns(custom_metric_columns)}"
108
+
104
109
  query_result_checker.SqlResultValidator(
105
110
  self._sql_client._session,
106
111
  f"""
@@ -120,6 +125,7 @@ class ModelMonitorSQLClient:
120
125
  REFRESH_INTERVAL='{refresh_interval}'
121
126
  AGGREGATION_WINDOW='{aggregation_window}'
122
127
  {segment_columns_sql}
128
+ {custom_metric_columns_sql}
123
129
  {baseline_sql}""",
124
130
  statement_params=statement_params,
125
131
  ).has_column("status").has_dimensions(1, 1).validate()
@@ -210,6 +216,7 @@ class ModelMonitorSQLClient:
210
216
  actual_class_columns: list[sql_identifier.SqlIdentifier],
211
217
  id_columns: list[sql_identifier.SqlIdentifier],
212
218
  segment_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
219
+ custom_metric_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
213
220
  ) -> None:
214
221
  """Ensures all columns exist in the source table.
215
222
 
@@ -222,12 +229,14 @@ class ModelMonitorSQLClient:
222
229
  actual_class_columns: List of actual class column names.
223
230
  id_columns: List of id column names.
224
231
  segment_columns: List of segment column names.
232
+ custom_metric_columns: List of custom metric column names.
225
233
 
226
234
  Raises:
227
235
  ValueError: If any of the columns do not exist in the source.
228
236
  """
229
237
 
230
238
  segment_columns = [] if segment_columns is None else segment_columns
239
+ custom_metric_columns = [] if custom_metric_columns is None else custom_metric_columns
231
240
 
232
241
  if timestamp_column not in source_column_schema:
233
242
  raise ValueError(f"Timestamp column {timestamp_column} does not exist in source.")
@@ -248,6 +257,9 @@ class ModelMonitorSQLClient:
248
257
  if not all([column_name in source_column_schema for column_name in segment_columns]):
249
258
  raise ValueError(f"Segment column(s): {segment_columns} do not exist in source.")
250
259
 
260
+ if not all([column_name in source_column_schema for column_name in custom_metric_columns]):
261
+ raise ValueError(f"Custom Metric column(s): {custom_metric_columns} do not exist in source.")
262
+
251
263
  def validate_source(
252
264
  self,
253
265
  *,
@@ -261,6 +273,7 @@ class ModelMonitorSQLClient:
261
273
  actual_class_columns: list[sql_identifier.SqlIdentifier],
262
274
  id_columns: list[sql_identifier.SqlIdentifier],
263
275
  segment_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
276
+ custom_metric_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
264
277
  ) -> None:
265
278
 
266
279
  source_database = source_database or self._database_name
@@ -281,6 +294,7 @@ class ModelMonitorSQLClient:
281
294
  actual_class_columns=actual_class_columns,
282
295
  id_columns=id_columns,
283
296
  segment_columns=segment_columns,
297
+ custom_metric_columns=custom_metric_columns,
284
298
  )
285
299
 
286
300
  def _alter_monitor(
@@ -299,7 +313,7 @@ class ModelMonitorSQLClient:
299
313
 
300
314
  if target_property not in supported_target_properties:
301
315
  raise ValueError(
302
- f"Only {', '.join(supported_target_properties)} supported as target property "
316
+ f"Only {', '.join(sorted(supported_target_properties))} supported as target property "
303
317
  f"for {operation.name} operation"
304
318
  )
305
319
 
@@ -366,3 +380,33 @@ class ModelMonitorSQLClient:
366
380
  target_value=segment_column,
367
381
  statement_params=statement_params,
368
382
  )
383
+
384
+ def add_custom_metric_column(
385
+ self,
386
+ monitor_name: sql_identifier.SqlIdentifier,
387
+ custom_metric_column: sql_identifier.SqlIdentifier,
388
+ statement_params: Optional[dict[str, Any]] = None,
389
+ ) -> None:
390
+ """Add a custom metric column to the Model Monitor"""
391
+ self._alter_monitor(
392
+ operation=MonitorOperation.ADD,
393
+ monitor_name=monitor_name,
394
+ target_property="CUSTOM_METRIC_COLUMN",
395
+ target_value=custom_metric_column,
396
+ statement_params=statement_params,
397
+ )
398
+
399
+ def drop_custom_metric_column(
400
+ self,
401
+ monitor_name: sql_identifier.SqlIdentifier,
402
+ custom_metric_column: sql_identifier.SqlIdentifier,
403
+ statement_params: Optional[dict[str, Any]] = None,
404
+ ) -> None:
405
+ """Drop a custom metric column from the Model Monitor"""
406
+ self._alter_monitor(
407
+ operation=MonitorOperation.DROP,
408
+ monitor_name=monitor_name,
409
+ target_property="CUSTOM_METRIC_COLUMN",
410
+ target_value=custom_metric_column,
411
+ statement_params=statement_params,
412
+ )
@@ -109,6 +109,7 @@ class ModelMonitorManager:
109
109
  actual_score_columns = self._build_column_list_from_input(source_config.actual_score_columns)
110
110
  actual_class_columns = self._build_column_list_from_input(source_config.actual_class_columns)
111
111
  segment_columns = self._build_column_list_from_input(source_config.segment_columns)
112
+ custom_metric_columns = self._build_column_list_from_input(source_config.custom_metric_columns)
112
113
 
113
114
  id_columns = [sql_identifier.SqlIdentifier(column_name) for column_name in source_config.id_columns]
114
115
  ts_column = sql_identifier.SqlIdentifier(source_config.timestamp_column)
@@ -125,6 +126,7 @@ class ModelMonitorManager:
125
126
  actual_class_columns=actual_class_columns,
126
127
  id_columns=id_columns,
127
128
  segment_columns=segment_columns,
129
+ custom_metric_columns=custom_metric_columns,
128
130
  )
129
131
 
130
132
  self._model_monitor_client.create_model_monitor(
@@ -147,6 +149,7 @@ class ModelMonitorManager:
147
149
  actual_score_columns=actual_score_columns,
148
150
  actual_class_columns=actual_class_columns,
149
151
  segment_columns=segment_columns,
152
+ custom_metric_columns=custom_metric_columns,
150
153
  refresh_interval=model_monitor_config.refresh_interval,
151
154
  aggregation_window=model_monitor_config.aggregation_window,
152
155
  baseline_database=baseline_database_name_id,
@@ -36,6 +36,9 @@ class ModelMonitorSourceConfig:
36
36
  segment_columns: Optional[list[str]] = None
37
37
  """List of columns in the source containing segment information for grouped monitoring."""
38
38
 
39
+ custom_metric_columns: Optional[list[str]] = None
40
+ """List of columns in the source containing custom metrics."""
41
+
39
42
 
40
43
  @dataclass
41
44
  class ModelMonitorConfig:
@@ -72,3 +72,33 @@ class ModelMonitor:
72
72
  )
73
73
  segment_column_id = sql_identifier.SqlIdentifier(segment_column)
74
74
  self._model_monitor_client.drop_segment_column(self.name, segment_column_id, statement_params=statement_params)
75
+
76
+ @telemetry.send_api_usage_telemetry(
77
+ project=telemetry.TelemetryProject.MLOPS.value,
78
+ subproject=telemetry.TelemetrySubProject.MONITORING.value,
79
+ )
80
+ def add_custom_metric_column(self, custom_metric_column: str) -> None:
81
+ """Add a custom metric column to the Model Monitor"""
82
+ statement_params = telemetry.get_statement_params(
83
+ telemetry.TelemetryProject.MLOPS.value,
84
+ telemetry.TelemetrySubProject.MONITORING.value,
85
+ )
86
+ custom_metric_column_identifier = sql_identifier.SqlIdentifier(custom_metric_column)
87
+ self._model_monitor_client.add_custom_metric_column(
88
+ self.name, custom_metric_column_identifier, statement_params=statement_params
89
+ )
90
+
91
+ @telemetry.send_api_usage_telemetry(
92
+ project=telemetry.TelemetryProject.MLOPS.value,
93
+ subproject=telemetry.TelemetrySubProject.MONITORING.value,
94
+ )
95
+ def drop_custom_metric_column(self, custom_metric_column: str) -> None:
96
+ """Drop a custom metric column from the Model Monitor"""
97
+ statement_params = telemetry.get_statement_params(
98
+ telemetry.TelemetryProject.MLOPS.value,
99
+ telemetry.TelemetrySubProject.MONITORING.value,
100
+ )
101
+ custom_metric_column_identifier = sql_identifier.SqlIdentifier(custom_metric_column)
102
+ self._model_monitor_client.drop_custom_metric_column(
103
+ self.name, custom_metric_column_identifier, statement_params=statement_params
104
+ )
snowflake/ml/version.py CHANGED
@@ -1,2 +1,2 @@
1
1
  # This is parsed by regex in conda recipe meta file. Make sure not to break it.
2
- VERSION = "1.13.0"
2
+ VERSION = "1.14.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: snowflake-ml-python
3
- Version: 1.13.0
3
+ Version: 1.14.0
4
4
  Summary: The machine learning client library that is used for interacting with Snowflake to build machine learning solutions.
5
5
  Author-email: "Snowflake, Inc" <support@snowflake.com>
6
6
  License:
@@ -410,6 +410,16 @@ NOTE: Version 1.7.0 is used as example here. Please choose the the latest versio
410
410
 
411
411
  # Release History
412
412
 
413
+ ## 1.14.0
414
+
415
+ ### Bug Fixes
416
+
417
+ ### Behavior Changes
418
+
419
+ ### New Features
420
+
421
+ * ML Job: The `additional_payloads` argument is now **deprecated** in favor of `imports`.
422
+
413
423
  ## 1.13.0
414
424
 
415
425
  ### Bug Fixes
@@ -10,7 +10,7 @@ snowflake/cortex/_sse_client.py,sha256=sLYgqAfTOPADCnaWH2RWAJi8KbU_7gSRsTUDcDD5T
10
10
  snowflake/cortex/_summarize.py,sha256=7GH8zqfIdOiHA5w4b6EvJEKEWhaTrL4YA6iDGbn7BNM,1307
11
11
  snowflake/cortex/_translate.py,sha256=9ZGjvAnJFisbzJ_bXnt4pyug5UzhHJRXW8AhGQEersM,1652
12
12
  snowflake/cortex/_util.py,sha256=krNTpbkFLXwdFqy1bd0xi7ZmOzOHRnIfHdQCPiLZJxk,3288
13
- snowflake/ml/version.py,sha256=AGFrDevyvV6eDph0eyz5QYMtk3gQG8WYL19gYzcfRUk,99
13
+ snowflake/ml/version.py,sha256=wzImQ4oymKajm6ceymlVQpiY0JE0h0m59Li3mBS-xlE,99
14
14
  snowflake/ml/_internal/env.py,sha256=EY_2KVe8oR3LgKWdaeRb5rRU-NDNXJppPDsFJmMZUUY,265
15
15
  snowflake/ml/_internal/env_utils.py,sha256=x6ID94g6FYoMX3afp0zoUHzBvuvPyiE2F6RDpxx5Cq0,30967
16
16
  snowflake/ml/_internal/file_utils.py,sha256=7sA6loOeSfmGP4yx16P4usT9ZtRqG3ycnXu7_Tk7dOs,14206
@@ -110,10 +110,10 @@ snowflake/ml/fileset/snowfs.py,sha256=uF5QluYtiJ-HezGIhF55dONi3t0E6N7ByaVAIAlM3n
110
110
  snowflake/ml/fileset/stage_fs.py,sha256=V4pysouSKKDPLzuW3u_extxfvjkQa5OlwIRES9Srpzo,20151
111
111
  snowflake/ml/jobs/__init__.py,sha256=v-v9-SA1Vy-M98B31-NlqJgpI6uEg9jEEghJLub1RUY,468
112
112
  snowflake/ml/jobs/decorators.py,sha256=mQgdWvvCwD7q79cSFKZHKegXGh2j1u8WM64UD3lCKr4,3428
113
- snowflake/ml/jobs/job.py,sha256=h88Tj0aQDRywDXk5KbAEVp9q7jZfcGT1xagrkR1tNEM,21981
114
- snowflake/ml/jobs/manager.py,sha256=Ij1ZTKc2JaCUkOVYLR5N9hBgCj0PcT3fdpIow15QvI8,26132
113
+ snowflake/ml/jobs/job.py,sha256=VFBogPXXTWa0p-Jl10lSUFyKeqGQOtbzJIgrWTPA0rQ,22222
114
+ snowflake/ml/jobs/manager.py,sha256=1tOpEE66gQv36BmmYnUIQ2yjS1r-uAHxIQht6ilucO8,27276
115
115
  snowflake/ml/jobs/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
116
- snowflake/ml/jobs/_utils/constants.py,sha256=FRsmwGLYLbESf3c65mLEA34DuGajhBRC8BkHJw9BiMU,3838
116
+ snowflake/ml/jobs/_utils/constants.py,sha256=JB8i85VbI0nIYP_c2nyAADQcmEF8BscVHHKBGhruofY,3838
117
117
  snowflake/ml/jobs/_utils/feature_flags.py,sha256=cH_NyeOncL3_tzbk0WvL1siNyodxBgn1ziPk2yBW6wY,404
118
118
  snowflake/ml/jobs/_utils/function_payload_utils.py,sha256=4LBaStMdhRxcqwRkwFje-WwiEKRWnBfkaOYouF3N3Kg,1308
119
119
  snowflake/ml/jobs/_utils/interop_utils.py,sha256=7mODMTjKCLXkJloACG6_9b2wvmRgjXF0Jx3wpWYyJeA,21413
@@ -125,11 +125,11 @@ snowflake/ml/jobs/_utils/stage_utils.py,sha256=38-LsokaGx0NzlnP8CMRioClRz-3x6xhP
125
125
  snowflake/ml/jobs/_utils/types.py,sha256=AGLu0kPTNRUki26rah_KBwWp0bBJEtUP3zcfxkj5kB0,2326
126
126
  snowflake/ml/jobs/_utils/scripts/constants.py,sha256=YyIWZqQPYOTtgCY6SfyJjk2A98I5RQVmrOuLtET5Pqg,173
127
127
  snowflake/ml/jobs/_utils/scripts/get_instance_ip.py,sha256=N2wJYMPlwg-hidwgHhDhiBWOE6TskqCfWLMRRNnZBQs,5776
128
- snowflake/ml/jobs/_utils/scripts/mljob_launcher.py,sha256=29_zaKjvcLkwqBqYHObtQIT933UsdJGvb82EGwyvk48,15704
128
+ snowflake/ml/jobs/_utils/scripts/mljob_launcher.py,sha256=DPRupPuFqJ9TBbMCuomCf3jRqBJ_QQ1nCplg28HYt04,15752
129
129
  snowflake/ml/jobs/_utils/scripts/signal_workers.py,sha256=AR1Pylkm4-FGh10WXfrCtcxaV0rI7IQ2ZiO0Li7zZ3U,7433
130
130
  snowflake/ml/jobs/_utils/scripts/worker_shutdown_listener.py,sha256=SeJ8v5XDriwHAjIGpcQkwVP-f-lO9QIdVjVD7Fkgafs,7893
131
131
  snowflake/ml/lineage/__init__.py,sha256=8p1YGynC-qOxAZ8jZX2z84Reg5bv1NoJMoJmNJCrzI4,65
132
- snowflake/ml/lineage/lineage_node.py,sha256=vmikk4qaZuVFhQqW-VM6DuW4tDvmQlNbACvIVZEamcU,5830
132
+ snowflake/ml/lineage/lineage_node.py,sha256=SA0rbbI67rMr1qTMs4bAVkvqVtuKNI4lIaO5w0S-IXE,5767
133
133
  snowflake/ml/model/__init__.py,sha256=S9Q77g_uxiSVkPd8fbMsP7h3y3lp0sj6UJQYH9OdeO4,467
134
134
  snowflake/ml/model/custom_model.py,sha256=fDhMObqlyzD_qQG1Bq6HHkBN1w3Qzg9e81JWPiqRfc4,12249
135
135
  snowflake/ml/model/event_handler.py,sha256=pojleQVM9TPNeDvliTvon2Sfxqbf2WWxrOebo1SaEHo,7211
@@ -141,16 +141,16 @@ snowflake/ml/model/task.py,sha256=Zp5JaLB-YfX5p_HSaw81P3J7UnycQq5EMa87A35VOaQ,28
141
141
  snowflake/ml/model/type_hints.py,sha256=G0kp85-ksnYoAUHRdXxLFQBLq3XURuqYOpu_YeKEaNA,9847
142
142
  snowflake/ml/model/_client/model/batch_inference_specs.py,sha256=kzS7YfrBxZ8QTsWE4vx7jMyOjTopPOmGZSqc1t6cCqc,611
143
143
  snowflake/ml/model/_client/model/model_impl.py,sha256=Yabrbir5vPMOnsVmQJ23YN7vqhi756Jcm6pfO8Aq92o,17469
144
- snowflake/ml/model/_client/model/model_version_impl.py,sha256=azf94-xWOIvpa-1F6pvkbe7ErOLmBY2f16nONq80mgw,55002
144
+ snowflake/ml/model/_client/model/model_version_impl.py,sha256=VxIekIwBCyo4tfZJ0M2DzYzDPHgRTffsMHjJD8giqUw,54380
145
145
  snowflake/ml/model/_client/ops/metadata_ops.py,sha256=qpK6PL3OyfuhyOmpvLCpHLy6vCxbZbp1HlEvakFGwv4,4884
146
- snowflake/ml/model/_client/ops/model_ops.py,sha256=dcyV0PgAUynVWBfhPhzRmKe7S0R484-xng5ajJOtHaI,50048
147
- snowflake/ml/model/_client/ops/service_ops.py,sha256=pMKBJDza1AAhzBPELcNU6iIjOLhY_6kxrMWYmiB9Kvc,41887
146
+ snowflake/ml/model/_client/ops/model_ops.py,sha256=XGQB62Nc4ld86I7wJzuJGkmbrI38wIBm4egsZptdrM8,50394
147
+ snowflake/ml/model/_client/ops/service_ops.py,sha256=qJRXYswDruVuH8e2DjQY8l6hq-x4DipA-uPfZzTDfT0,43655
148
148
  snowflake/ml/model/_client/service/model_deployment_spec.py,sha256=LxdhU1m4YGc1MNyApQ0IyUetcH4IXOmwYOY9X3wjznY,19499
149
149
  snowflake/ml/model/_client/service/model_deployment_spec_schema.py,sha256=QpDso2bjx2eCRKIG4-ppc3z46B7hgYMZehOTRoR9IJs,2425
150
150
  snowflake/ml/model/_client/sql/_base.py,sha256=Qrm8M92g3MHb-QnSLUlbd8iVKCRxLhG_zr5M2qmXwJ8,1473
151
151
  snowflake/ml/model/_client/sql/model.py,sha256=nstZ8zR7MkXVEfhqLt7PWMik6dZr06nzq7VsF5NVNow,5840
152
152
  snowflake/ml/model/_client/sql/model_version.py,sha256=QwzFlDH5laTqK2qF7SJQSbt28DgspWj3R11l-yD1Da0,23496
153
- snowflake/ml/model/_client/sql/service.py,sha256=LheEjXaXOKBPl1M9xwcV6EakMZX2pcyjljWPj_8NE5k,10614
153
+ snowflake/ml/model/_client/sql/service.py,sha256=npggGj24DnguaBEuJ7vzy-gmY5OrygyAcyhHW47icAM,10458
154
154
  snowflake/ml/model/_client/sql/stage.py,sha256=2gxYNtmEXricwxeACVUr63OUDCy_iQvCi-kRT4qQtBA,887
155
155
  snowflake/ml/model/_client/sql/tag.py,sha256=9sI0VoldKmsfToWSjMQddozPPGCxYUI6n0gPBiqd6x8,4333
156
156
  snowflake/ml/model/_model_composer/model_composer.py,sha256=Xqi-sxmkBoZl383LQAXhMQkq9KsAS0A3ythC5bN3EOU,8292
@@ -424,13 +424,13 @@ snowflake/ml/modeling/xgboost/xgb_regressor.py,sha256=-k0ZHkTZkVybwNTbnRkR4O0Mgl
424
424
  snowflake/ml/modeling/xgboost/xgbrf_classifier.py,sha256=e7WgTxM3xHVxkOIhgUWnDrvFt1lZIGPJel4LhIGk46I,64253
425
425
  snowflake/ml/modeling/xgboost/xgbrf_regressor.py,sha256=tnCllaN_BEgRybZcxyvI3vZYzsEpCIIM7oGEPgF6y60,63778
426
426
  snowflake/ml/monitoring/explain_visualize.py,sha256=Vj4x7ClGvXY42HQzFcvVr1CbO_vVfZv6eZn_jV9N9gk,16145
427
- snowflake/ml/monitoring/model_monitor.py,sha256=1NOub1Cm9mtyawTj0H6pA1KtmEZ_yKVpn76PMdXWh0Y,3216
427
+ snowflake/ml/monitoring/model_monitor.py,sha256=m-1eeQIhAYAvFQ-8mjMQ-PTzCpnN9XEcWpdHdQuEEus,4707
428
428
  snowflake/ml/monitoring/shap.py,sha256=Dp9nYquPEZjxMTW62YYA9g9qUdmCEFxcSk7ejvOP7PE,3597
429
- snowflake/ml/monitoring/_client/model_monitor_sql_client.py,sha256=DzEEpIshNaPnBZl2EL81J7Eltpv7mitCtVGOVKBFrBM,15913
429
+ snowflake/ml/monitoring/_client/model_monitor_sql_client.py,sha256=6IVU1aQdiRu0GRhpZfNatJdzd5YgUNFlJ3Ti-mBxzN8,18027
430
430
  snowflake/ml/monitoring/_client/queries/record_count.ssql,sha256=Bd1uNMwhPKqPyrDd5ug8iY493t9KamJjrlo82OAfmjY,335
431
431
  snowflake/ml/monitoring/_client/queries/rmse.ssql,sha256=OEJiSStRz9-qKoZaFvmubtY_n0xMUjyVU2uiQHCp7KU,822
432
- snowflake/ml/monitoring/_manager/model_monitor_manager.py,sha256=ZENYZwAQXJEEyamX841qVViJBD0UwS-bdVQnEz7lBKg,10484
433
- snowflake/ml/monitoring/entities/model_monitor_config.py,sha256=navyOSsDvbJ6RIuFge8ECsD9FxWcRKxd4bopMiHybS0,1931
432
+ snowflake/ml/monitoring/_manager/model_monitor_manager.py,sha256=Aouj5ojRk28Na7TnCDPke13MEezHdVXC7WG1CBq4LoQ,10702
433
+ snowflake/ml/monitoring/entities/model_monitor_config.py,sha256=tKjYuzBmnpomIogmXcLyJlHfoCBgguulav8TOdzu0lQ,2053
434
434
  snowflake/ml/registry/__init__.py,sha256=XdPQK9ejYkSJVrSQ7HD3jKQO0hKq2mC4bPCB6qrtH3U,76
435
435
  snowflake/ml/registry/registry.py,sha256=Ro7flVHv3FnEU9Ly3zWRnDAqWiwRSOA2uw_MSKmCBTI,32936
436
436
  snowflake/ml/registry/_manager/model_manager.py,sha256=QsEpIbg3FPEbDOQXb_oo41hBjojrdVibdrNPCyJ0Cb0,17650
@@ -440,8 +440,8 @@ snowflake/ml/utils/connection_params.py,sha256=JuadbzKlgDZLZ5vJ9cnyAiSitvZT9jGSf
440
440
  snowflake/ml/utils/html_utils.py,sha256=L4pzpvFd20SIk4rie2kTAtcQjbxBHfjKmxonMAT2OoA,7665
441
441
  snowflake/ml/utils/sparse.py,sha256=zLBNh-ynhGpKH5TFtopk0YLkHGvv0yq1q-sV59YQKgg,3819
442
442
  snowflake/ml/utils/sql_client.py,sha256=pSe2od6Pkh-8NwG3D-xqN76_uNf-ohOtVbT55HeQg1Y,668
443
- snowflake_ml_python-1.13.0.dist-info/licenses/LICENSE.txt,sha256=PdEp56Av5m3_kl21iFkVTX_EbHJKFGEdmYeIO1pL_Yk,11365
444
- snowflake_ml_python-1.13.0.dist-info/METADATA,sha256=2qYBLI9oOcJD_9nibGr83ZmW8fBFPqqLrbvjaPfrhDM,94503
445
- snowflake_ml_python-1.13.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
446
- snowflake_ml_python-1.13.0.dist-info/top_level.txt,sha256=TY0gFSHKDdZy3THb0FGomyikWQasEGldIR1O0HGOHVw,10
447
- snowflake_ml_python-1.13.0.dist-info/RECORD,,
443
+ snowflake_ml_python-1.14.0.dist-info/licenses/LICENSE.txt,sha256=PdEp56Av5m3_kl21iFkVTX_EbHJKFGEdmYeIO1pL_Yk,11365
444
+ snowflake_ml_python-1.14.0.dist-info/METADATA,sha256=mw74_T9nT9XU_DWlTd6ihFIG41CjQByxElsNo96Xbww,94660
445
+ snowflake_ml_python-1.14.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
446
+ snowflake_ml_python-1.14.0.dist-info/top_level.txt,sha256=TY0gFSHKDdZy3THb0FGomyikWQasEGldIR1O0HGOHVw,10
447
+ snowflake_ml_python-1.14.0.dist-info/RECORD,,