snowflake-ml-python 1.12.0__py3-none-any.whl → 1.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -680,8 +680,10 @@ def _extract_arg_value(field: str, func_spec: inspect.FullArgSpec, args: Any, kw
680
680
  elif func_spec.kwonlydefaults and field in func_spec.kwonlyargs:
681
681
  if field in kwargs:
682
682
  return True, kwargs[field]
683
- else:
683
+ elif field in func_spec.kwonlydefaults:
684
684
  return True, func_spec.kwonlydefaults[field]
685
+ else:
686
+ return False, None
685
687
  else:
686
688
  return False, None
687
689
 
@@ -7,7 +7,7 @@ from urllib.parse import quote
7
7
  import snowflake.snowpark._internal.utils as snowpark_utils
8
8
  from snowflake.ml import model as ml_model, registry
9
9
  from snowflake.ml._internal.human_readable_id import hrid_generator
10
- from snowflake.ml._internal.utils import sql_identifier
10
+ from snowflake.ml._internal.utils import mixins, sql_identifier
11
11
  from snowflake.ml.experiment import (
12
12
  _entities as entities,
13
13
  _experiment_info as experiment_info,
@@ -23,7 +23,7 @@ from snowflake.snowpark import session
23
23
  DEFAULT_EXPERIMENT_NAME = sql_identifier.SqlIdentifier("DEFAULT")
24
24
 
25
25
 
26
- class ExperimentTracking:
26
+ class ExperimentTracking(mixins.SerializableSessionMixin):
27
27
  """
28
28
  Class to manage experiments in Snowflake.
29
29
  """
@@ -73,12 +73,34 @@ class ExperimentTracking:
73
73
  database_name=self._database_name,
74
74
  schema_name=self._schema_name,
75
75
  )
76
+ self._session = session
76
77
 
77
78
  # The experiment in context
78
79
  self._experiment: Optional[entities.Experiment] = None
79
80
  # The run in context
80
81
  self._run: Optional[entities.Run] = None
81
82
 
83
+ def __getstate__(self) -> dict[str, Any]:
84
+ state = super().__getstate__()
85
+ # Remove unpicklable attributes
86
+ state["_sql_client"] = None
87
+ state["_registry"] = None
88
+ return state
89
+
90
+ def __setstate__(self, state: dict[str, Any]) -> None:
91
+ super().__setstate__(state)
92
+ # Restore unpicklable attributes
93
+ self._sql_client = sql_client.ExperimentTrackingSQLClient(
94
+ session=self._session,
95
+ database_name=self._database_name,
96
+ schema_name=self._schema_name,
97
+ )
98
+ self._registry = registry.Registry(
99
+ session=self._session,
100
+ database_name=self._database_name,
101
+ schema_name=self._schema_name,
102
+ )
103
+
82
104
  def set_experiment(
83
105
  self,
84
106
  experiment_name: str,
@@ -236,7 +236,9 @@ def run_script(script_path: str, *script_args: Any, main_func: Optional[str] = N
236
236
 
237
237
  # Create a Snowpark session before running the script
238
238
  # Session can be retrieved from using snowflake.snowpark.context.get_active_session()
239
- session = Session.builder.configs(SnowflakeLoginOptions()).create() # noqa: F841
239
+ config = SnowflakeLoginOptions()
240
+ config["client_session_keep_alive"] = "True"
241
+ session = Session.builder.configs(config).create() # noqa: F841
240
242
 
241
243
  try:
242
244
 
@@ -264,6 +266,7 @@ def run_script(script_path: str, *script_args: Any, main_func: Optional[str] = N
264
266
  finally:
265
267
  # Restore original sys.argv
266
268
  sys.argv = original_argv
269
+ session.close()
267
270
 
268
271
 
269
272
  def main(script_path: str, *script_args: Any, script_main_func: Optional[str] = None) -> ExecutionResult:
@@ -287,6 +290,13 @@ def main(script_path: str, *script_args: Any, script_main_func: Optional[str] =
287
290
  output_dir = os.path.dirname(result_abs_path)
288
291
  os.makedirs(output_dir, exist_ok=True)
289
292
 
293
+ try:
294
+ import ray
295
+
296
+ ray.init(address="auto")
297
+ except ModuleNotFoundError:
298
+ warnings.warn("Ray is not installed, skipping Ray initialization", ImportWarning, stacklevel=1)
299
+
290
300
  try:
291
301
  # Wait for minimum required instances if specified
292
302
  min_instances_str = os.environ.get(MIN_INSTANCES_ENV_VAR) or "1"
snowflake/ml/jobs/job.py CHANGED
@@ -50,7 +50,7 @@ class MLJob(Generic[T], SerializableSessionMixin):
50
50
  def min_instances(self) -> int:
51
51
  try:
52
52
  return int(self._container_spec["env"].get(constants.MIN_INSTANCES_ENV_VAR, 1))
53
- except TypeError:
53
+ except (TypeError, ValueError):
54
54
  return 1
55
55
 
56
56
  @property
@@ -83,7 +83,10 @@ class MLJob(Generic[T], SerializableSessionMixin):
83
83
  def _container_spec(self) -> dict[str, Any]:
84
84
  """Get the job's main container spec."""
85
85
  containers = self._service_spec["spec"]["containers"]
86
- container_spec = next(c for c in containers if c["name"] == constants.DEFAULT_CONTAINER_NAME)
86
+ try:
87
+ container_spec = next(c for c in containers if c["name"] == constants.DEFAULT_CONTAINER_NAME)
88
+ except StopIteration:
89
+ raise ValueError(f"Container '{constants.DEFAULT_CONTAINER_NAME}' not found in job {self.name}")
87
90
  return cast(dict[str, Any], container_spec)
88
91
 
89
92
  @property
@@ -58,7 +58,8 @@ def list_jobs(
58
58
  >>> from snowflake.ml.jobs import list_jobs
59
59
  >>> list_jobs(limit=5)
60
60
  """
61
- session = session or get_active_session()
61
+
62
+ session = _ensure_session(session)
62
63
  try:
63
64
  df = _get_job_history_spcs(
64
65
  session,
@@ -162,7 +163,7 @@ def _get_job_history_spcs(
162
163
  @telemetry.send_api_usage_telemetry(project=_PROJECT)
163
164
  def get_job(job_id: str, session: Optional[snowpark.Session] = None) -> jb.MLJob[Any]:
164
165
  """Retrieve a job service from the backend."""
165
- session = session or get_active_session()
166
+ session = _ensure_session(session)
166
167
  try:
167
168
  database, schema, job_name = identifier.parse_schema_level_object_identifier(job_id)
168
169
  database = identifier.resolve_identifier(cast(str, database or session.get_current_database()))
@@ -434,8 +435,10 @@ def _submit_job(
434
435
 
435
436
  Raises:
436
437
  ValueError: If database or schema value(s) are invalid
438
+ RuntimeError: If schema is not specified in session context or job submission
439
+ snowpark.exceptions.SnowparkSQLException: if failed to upload payload
437
440
  """
438
- session = session or get_active_session()
441
+ session = _ensure_session(session)
439
442
 
440
443
  # Check for deprecated args
441
444
  if "num_instances" in kwargs:
@@ -486,10 +489,17 @@ def _submit_job(
486
489
  stage_name = f"@{'.'.join(filter(None, stage_path_parts[:3]))}"
487
490
  stage_path = pathlib.PurePosixPath(f"{stage_name}{stage_path_parts[-1].rstrip('/')}/{job_name}")
488
491
 
489
- # Upload payload
490
- uploaded_payload = payload_utils.JobPayload(
491
- source, entrypoint=entrypoint, pip_requirements=pip_requirements, additional_payloads=additional_payloads
492
- ).upload(session, stage_path)
492
+ try:
493
+ # Upload payload
494
+ uploaded_payload = payload_utils.JobPayload(
495
+ source, entrypoint=entrypoint, pip_requirements=pip_requirements, additional_payloads=additional_payloads
496
+ ).upload(session, stage_path)
497
+ except snowpark.exceptions.SnowparkSQLException as e:
498
+ if e.sql_error_code == 90106:
499
+ raise RuntimeError(
500
+ "Please specify a schema, either in the session context or as a parameter in the job submission"
501
+ )
502
+ raise
493
503
 
494
504
  if feature_flags.FeatureFlags.USE_SUBMIT_JOB_V2.is_enabled():
495
505
  # Add default env vars (extracted from spec_utils.generate_service_spec)
@@ -651,3 +661,15 @@ def _do_submit_job_v2(
651
661
  actual_job_id = query_helper.run_query(session, query_template, params=params)[0][0]
652
662
 
653
663
  return get_job(actual_job_id, session=session)
664
+
665
+
666
+ def _ensure_session(session: Optional[snowpark.Session]) -> snowpark.Session:
667
+ try:
668
+ session = session or get_active_session()
669
+ except snowpark.exceptions.SnowparkSessionException as e:
670
+ if "More than one active session" in e.message:
671
+ raise RuntimeError("Please specify the session as a parameter in API call")
672
+ if "No default Session is found" in e.message:
673
+ raise RuntimeError("Please create a session before API call")
674
+ raise
675
+ return session
@@ -4,14 +4,11 @@ from pydantic import BaseModel
4
4
 
5
5
 
6
6
  class InputSpec(BaseModel):
7
- input_stage_location: str
8
- input_file_pattern: str = "*"
7
+ stage_location: str
9
8
 
10
9
 
11
10
  class OutputSpec(BaseModel):
12
- output_stage_location: str
13
- output_file_prefix: Optional[str] = None
14
- completion_filename: str = "_SUCCESS"
11
+ stage_location: str
15
12
 
16
13
 
17
14
  class JobSpec(BaseModel):
@@ -25,3 +22,4 @@ class JobSpec(BaseModel):
25
22
  warehouse: Optional[str] = None
26
23
  cpu_requests: Optional[str] = None
27
24
  memory_requests: Optional[str] = None
25
+ replicas: Optional[int] = None
@@ -590,11 +590,12 @@ class ModelVersion(lineage_node.LineageNode):
590
590
  cpu_requests=job_spec.cpu_requests,
591
591
  memory_requests=job_spec.memory_requests,
592
592
  job_name=job_name,
593
+ replicas=job_spec.replicas,
593
594
  # input and output
594
- input_stage_location=input_spec.input_stage_location,
595
- input_file_pattern=input_spec.input_file_pattern,
596
- output_stage_location=output_spec.output_stage_location,
597
- completion_filename=output_spec.completion_filename,
595
+ input_stage_location=input_spec.stage_location,
596
+ input_file_pattern="*",
597
+ output_stage_location=output_spec.stage_location,
598
+ completion_filename="_SUCCESS",
598
599
  # misc
599
600
  statement_params=statement_params,
600
601
  )
@@ -47,6 +47,7 @@ class ServiceInfo(TypedDict):
47
47
  class ModelOperator:
48
48
  INFERENCE_SERVICE_ENDPOINT_NAME = "inference"
49
49
  INGRESS_ENDPOINT_URL_SUFFIX = "snowflakecomputing.app"
50
+ PRIVATELINK_INGRESS_ENDPOINT_URL_SUBSTRING = "privatelink.snowflakecomputing"
50
51
 
51
52
  def __init__(
52
53
  self,
@@ -612,6 +613,30 @@ class ModelOperator:
612
613
  statement_params=statement_params,
613
614
  )
614
615
 
616
+ def _is_privatelink_connection(self) -> bool:
617
+ """Detect if the current session is using a privatelink connection."""
618
+ try:
619
+ host = self._session.connection.host
620
+ return ModelOperator.PRIVATELINK_INGRESS_ENDPOINT_URL_SUBSTRING in host
621
+ except AttributeError:
622
+ return False
623
+
624
+ def _extract_and_validate_ingress_url(self, res_row: "row.Row") -> Optional[str]:
625
+ """Extract and validate ingress URL from endpoint row."""
626
+ url_value = res_row[self._service_client.MODEL_INFERENCE_SERVICE_ENDPOINT_INGRESS_URL_COL_NAME]
627
+ if url_value is None:
628
+ return None
629
+ url_str = str(url_value)
630
+ return url_str if url_str.endswith(ModelOperator.INGRESS_ENDPOINT_URL_SUFFIX) else None
631
+
632
+ def _extract_and_validate_privatelink_url(self, res_row: "row.Row") -> Optional[str]:
633
+ """Extract and validate privatelink ingress URL from endpoint row."""
634
+ url_value = res_row[self._service_client.MODEL_INFERENCE_SERVICE_ENDPOINT_PRIVATELINK_INGRESS_URL_COL_NAME]
635
+ if url_value is None:
636
+ return None
637
+ url_str = str(url_value)
638
+ return url_str if ModelOperator.PRIVATELINK_INGRESS_ENDPOINT_URL_SUBSTRING in url_str else None
639
+
615
640
  def show_services(
616
641
  self,
617
642
  *,
@@ -644,8 +669,10 @@ class ModelOperator:
644
669
  fully_qualified_service_names = [str(service) for service in json_array if "MODEL_BUILD_" not in service]
645
670
 
646
671
  result: list[ServiceInfo] = []
672
+ is_privatelink_connection = self._is_privatelink_connection()
673
+
647
674
  for fully_qualified_service_name in fully_qualified_service_names:
648
- ingress_url: Optional[str] = None
675
+ inference_endpoint: Optional[str] = None
649
676
  db, schema, service_name = sql_identifier.parse_fully_qualified_name(fully_qualified_service_name)
650
677
  statuses = self._service_client.get_service_container_statuses(
651
678
  database_name=db, schema_name=schema, service_name=service_name, statement_params=statement_params
@@ -659,17 +686,23 @@ class ModelOperator:
659
686
  ):
660
687
  if (
661
688
  res_row[self._service_client.MODEL_INFERENCE_SERVICE_ENDPOINT_NAME_COL_NAME]
662
- == self.INFERENCE_SERVICE_ENDPOINT_NAME
663
- and res_row[self._service_client.MODEL_INFERENCE_SERVICE_ENDPOINT_INGRESS_URL_COL_NAME] is not None
689
+ != self.INFERENCE_SERVICE_ENDPOINT_NAME
664
690
  ):
665
- ingress_url = str(
666
- res_row[self._service_client.MODEL_INFERENCE_SERVICE_ENDPOINT_INGRESS_URL_COL_NAME]
667
- )
668
- if not ingress_url.endswith(ModelOperator.INGRESS_ENDPOINT_URL_SUFFIX):
669
- ingress_url = None
691
+ continue
692
+
693
+ ingress_url = self._extract_and_validate_ingress_url(res_row)
694
+ privatelink_ingress_url = self._extract_and_validate_privatelink_url(res_row)
695
+
696
+ if is_privatelink_connection and privatelink_ingress_url is not None:
697
+ inference_endpoint = privatelink_ingress_url
698
+ else:
699
+ inference_endpoint = ingress_url
700
+
670
701
  result.append(
671
702
  ServiceInfo(
672
- name=fully_qualified_service_name, status=service_status.value, inference_endpoint=ingress_url
703
+ name=fully_qualified_service_name,
704
+ status=service_status.value,
705
+ inference_endpoint=inference_endpoint,
673
706
  )
674
707
  )
675
708
 
@@ -881,6 +881,7 @@ class ServiceOperator:
881
881
  max_batch_rows: Optional[int],
882
882
  cpu_requests: Optional[str],
883
883
  memory_requests: Optional[str],
884
+ replicas: Optional[int],
884
885
  statement_params: Optional[dict[str, Any]] = None,
885
886
  ) -> jobs.MLJob[Any]:
886
887
  database_name = self._database_name
@@ -914,6 +915,7 @@ class ServiceOperator:
914
915
  warehouse=warehouse,
915
916
  cpu=cpu_requests,
916
917
  memory=memory_requests,
918
+ replicas=replicas,
917
919
  )
918
920
 
919
921
  self._model_deployment_spec.add_image_build_spec(
@@ -207,6 +207,7 @@ class ModelDeploymentSpec:
207
207
  gpu: Optional[Union[str, int]] = None,
208
208
  num_workers: Optional[int] = None,
209
209
  max_batch_rows: Optional[int] = None,
210
+ replicas: Optional[int] = None,
210
211
  ) -> "ModelDeploymentSpec":
211
212
  """Add job specification to the deployment spec.
212
213
 
@@ -226,6 +227,7 @@ class ModelDeploymentSpec:
226
227
  gpu: GPU requirement.
227
228
  num_workers: Number of workers.
228
229
  max_batch_rows: Maximum batch rows for inference.
230
+ replicas: Number of replicas.
229
231
 
230
232
  Raises:
231
233
  ValueError: If a service spec already exists.
@@ -260,6 +262,7 @@ class ModelDeploymentSpec:
260
262
  output_stage_location=output_stage_location,
261
263
  completion_filename=completion_filename,
262
264
  ),
265
+ replicas=replicas,
263
266
  **self._inference_spec,
264
267
  )
265
268
  return self
@@ -57,6 +57,7 @@ class Job(BaseModel):
57
57
  function_name: str
58
58
  input: Input
59
59
  output: Output
60
+ replicas: Optional[int] = None
60
61
 
61
62
 
62
63
  class LogModelArgs(BaseModel):
@@ -63,6 +63,7 @@ class ServiceStatusInfo:
63
63
  class ServiceSQLClient(_base._BaseSQLClient):
64
64
  MODEL_INFERENCE_SERVICE_ENDPOINT_NAME_COL_NAME = "name"
65
65
  MODEL_INFERENCE_SERVICE_ENDPOINT_INGRESS_URL_COL_NAME = "ingress_url"
66
+ MODEL_INFERENCE_SERVICE_ENDPOINT_PRIVATELINK_INGRESS_URL_COL_NAME = "privatelink_ingress_url"
66
67
  SERVICE_STATUS = "service_status"
67
68
  INSTANCE_ID = "instance_id"
68
69
  INSTANCE_STATUS = "instance_status"
@@ -255,6 +256,9 @@ class ServiceSQLClient(_base._BaseSQLClient):
255
256
  )
256
257
  .has_column(ServiceSQLClient.MODEL_INFERENCE_SERVICE_ENDPOINT_NAME_COL_NAME, allow_empty=True)
257
258
  .has_column(ServiceSQLClient.MODEL_INFERENCE_SERVICE_ENDPOINT_INGRESS_URL_COL_NAME, allow_empty=True)
259
+ .has_column(
260
+ ServiceSQLClient.MODEL_INFERENCE_SERVICE_ENDPOINT_PRIVATELINK_INGRESS_URL_COL_NAME, allow_empty=True
261
+ )
258
262
  )
259
263
 
260
264
  return res.validate()
@@ -1,6 +1,7 @@
1
1
  import json
2
2
  import logging
3
3
  import os
4
+ import shutil
4
5
  import time
5
6
  import uuid
6
7
  import warnings
@@ -88,6 +89,7 @@ class HuggingFacePipelineHandler(
88
89
  _HANDLER_MIGRATOR_PLANS: dict[str, type[base_migrator.BaseModelHandlerMigrator]] = {}
89
90
 
90
91
  MODEL_BLOB_FILE_OR_DIR = "model"
92
+ MODEL_PICKLE_FILE = "snowml_huggingface_pipeline.pkl"
91
93
  ADDITIONAL_CONFIG_FILE = "pipeline_config.pt"
92
94
  DEFAULT_TARGET_METHODS = ["__call__"]
93
95
  IS_AUTO_SIGNATURE = True
@@ -199,6 +201,7 @@ class HuggingFacePipelineHandler(
199
201
  model_blob_path = os.path.join(model_blobs_dir_path, name)
200
202
  os.makedirs(model_blob_path, exist_ok=True)
201
203
 
204
+ is_repo_downloaded = False
202
205
  if type_utils.LazyType("transformers.Pipeline").isinstance(model):
203
206
  save_path = os.path.join(model_blob_path, cls.MODEL_BLOB_FILE_OR_DIR)
204
207
  model.save_pretrained( # type:ignore[attr-defined]
@@ -224,11 +227,22 @@ class HuggingFacePipelineHandler(
224
227
  ) as f:
225
228
  cloudpickle.dump(pipeline_params, f)
226
229
  else:
230
+ model_blob_file_or_dir = os.path.join(model_blob_path, cls.MODEL_BLOB_FILE_OR_DIR)
231
+ model_blob_pickle_file = os.path.join(model_blob_file_or_dir, cls.MODEL_PICKLE_FILE)
232
+ os.makedirs(model_blob_file_or_dir, exist_ok=True)
227
233
  with open(
228
- os.path.join(model_blob_path, cls.MODEL_BLOB_FILE_OR_DIR),
234
+ model_blob_pickle_file,
229
235
  "wb",
230
236
  ) as f:
231
237
  cloudpickle.dump(model, f)
238
+ if model.repo_snapshot_dir:
239
+ logger.info("model's repo_snapshot_dir is available, copying snapshot")
240
+ shutil.copytree(
241
+ model.repo_snapshot_dir,
242
+ model_blob_file_or_dir,
243
+ dirs_exist_ok=True,
244
+ )
245
+ is_repo_downloaded = True
232
246
 
233
247
  base_meta = model_blob_meta.ModelBlobMeta(
234
248
  name=name,
@@ -236,13 +250,12 @@ class HuggingFacePipelineHandler(
236
250
  handler_version=cls.HANDLER_VERSION,
237
251
  path=cls.MODEL_BLOB_FILE_OR_DIR,
238
252
  options=model_meta_schema.HuggingFacePipelineModelBlobOptions(
239
- {
240
- "task": task,
241
- "batch_size": batch_size if batch_size is not None else 1,
242
- "has_tokenizer": has_tokenizer,
243
- "has_feature_extractor": has_feature_extractor,
244
- "has_image_preprocessor": has_image_preprocessor,
245
- }
253
+ task=task,
254
+ batch_size=batch_size if batch_size is not None else 1,
255
+ has_tokenizer=has_tokenizer,
256
+ has_feature_extractor=has_feature_extractor,
257
+ has_image_preprocessor=has_image_preprocessor,
258
+ is_repo_downloaded=is_repo_downloaded,
246
259
  ),
247
260
  )
248
261
  model_meta.models[name] = base_meta
@@ -286,6 +299,27 @@ class HuggingFacePipelineHandler(
286
299
 
287
300
  return device_config
288
301
 
302
+ @staticmethod
303
+ def _load_pickle_model(
304
+ pickle_file: str,
305
+ **kwargs: Unpack[model_types.HuggingFaceLoadOptions],
306
+ ) -> huggingface_pipeline.HuggingFacePipelineModel:
307
+ with open(pickle_file, "rb") as f:
308
+ m = cloudpickle.load(f)
309
+ assert isinstance(m, huggingface_pipeline.HuggingFacePipelineModel)
310
+ torch_dtype: Optional[str] = None
311
+ device_config = None
312
+ if getattr(m, "device", None) is None and getattr(m, "device_map", None) is None:
313
+ device_config = HuggingFacePipelineHandler._get_device_config(**kwargs)
314
+ m.__dict__.update(device_config)
315
+
316
+ if getattr(m, "torch_dtype", None) is None and kwargs.get("use_gpu", False):
317
+ torch_dtype = "auto"
318
+ m.__dict__.update(torch_dtype=torch_dtype)
319
+ else:
320
+ m.__dict__.update(torch_dtype=None)
321
+ return m
322
+
289
323
  @classmethod
290
324
  def load_model(
291
325
  cls,
@@ -310,7 +344,13 @@ class HuggingFacePipelineHandler(
310
344
  raise ValueError("Missing field `batch_size` in model blob metadata for type `huggingface_pipeline`")
311
345
 
312
346
  model_blob_file_or_dir_path = os.path.join(model_blob_path, model_blob_filename)
313
- if os.path.isdir(model_blob_file_or_dir_path):
347
+ is_repo_downloaded = model_blob_options.get("is_repo_downloaded", False)
348
+
349
+ def _create_pipeline_from_dir(
350
+ model_blob_file_or_dir_path: str,
351
+ model_blob_options: model_meta_schema.HuggingFacePipelineModelBlobOptions,
352
+ **kwargs: Unpack[model_types.HuggingFaceLoadOptions],
353
+ ) -> "transformers.Pipeline":
314
354
  import transformers
315
355
 
316
356
  additional_pipeline_params = {}
@@ -330,7 +370,7 @@ class HuggingFacePipelineHandler(
330
370
  ) as f:
331
371
  pipeline_params = cloudpickle.load(f)
332
372
 
333
- device_config = cls._get_device_config(**kwargs)
373
+ device_config = HuggingFacePipelineHandler._get_device_config(**kwargs)
334
374
 
335
375
  m = transformers.pipeline(
336
376
  model_blob_options["task"],
@@ -359,18 +399,59 @@ class HuggingFacePipelineHandler(
359
399
  m.tokenizer.chat_template = DEFAULT_CHAT_TEMPLATE
360
400
 
361
401
  m.__dict__.update(pipeline_params)
402
+ return m
362
403
 
404
+ def _create_pipeline_from_model(
405
+ model_blob_file_or_dir_path: str,
406
+ m: huggingface_pipeline.HuggingFacePipelineModel,
407
+ **kwargs: Unpack[model_types.HuggingFaceLoadOptions],
408
+ ) -> "transformers.Pipeline":
409
+ import transformers
410
+
411
+ return transformers.pipeline(
412
+ m.task,
413
+ model=model_blob_file_or_dir_path,
414
+ trust_remote_code=m.trust_remote_code,
415
+ torch_dtype=getattr(m, "torch_dtype", None),
416
+ revision=m.revision,
417
+ # pass device or device_map when creating the pipeline
418
+ **HuggingFacePipelineHandler._get_device_config(**kwargs),
419
+ # pass other model_kwargs to transformers.pipeline.from_pretrained method
420
+ **m.model_kwargs,
421
+ )
422
+
423
+ if os.path.isdir(model_blob_file_or_dir_path) and not is_repo_downloaded:
424
+ # the logged model is a transformers.Pipeline object
425
+ # weights of the model are saved in the directory
426
+ return _create_pipeline_from_dir(model_blob_file_or_dir_path, model_blob_options, **kwargs)
363
427
  else:
364
- assert os.path.isfile(model_blob_file_or_dir_path)
365
- with open(model_blob_file_or_dir_path, "rb") as f:
366
- m = cloudpickle.load(f)
367
- assert isinstance(m, huggingface_pipeline.HuggingFacePipelineModel)
368
- if getattr(m, "device", None) is None and getattr(m, "device_map", None) is None:
369
- m.__dict__.update(cls._get_device_config(**kwargs))
370
-
371
- if getattr(m, "torch_dtype", None) is None and kwargs.get("use_gpu", False):
372
- m.__dict__.update(torch_dtype="auto")
373
- return m
428
+ # case 1: LEGACY logging, repo snapshot is not logged
429
+ if os.path.isfile(model_blob_file_or_dir_path):
430
+ # LEGACY logging that had model as a pickle file in the model blob directory
431
+ # the logged model is a huggingface_pipeline.HuggingFacePipelineModel object
432
+ # the model_blob_file_or_dir_path is the pickle file that holds
433
+ # the huggingface_pipeline.HuggingFacePipelineModel object
434
+ # the snapshot of the repo is not logged
435
+ return cls._load_pickle_model(model_blob_file_or_dir_path)
436
+ else:
437
+ assert os.path.isdir(model_blob_file_or_dir_path)
438
+ # the logged model is a huggingface_pipeline.HuggingFacePipelineModel object
439
+ # the pickle_file holds the huggingface_pipeline.HuggingFacePipelineModel object
440
+ pickle_file = os.path.join(model_blob_file_or_dir_path, cls.MODEL_PICKLE_FILE)
441
+ m = cls._load_pickle_model(pickle_file)
442
+
443
+ # case 2: logging without the snapshot of the repo
444
+ if not is_repo_downloaded:
445
+ # we return the huggingface_pipeline.HuggingFacePipelineModel object
446
+ return m
447
+ # case 3: logging with the snapshot of the repo
448
+ else:
449
+ # the model_blob_file_or_dir_path is the directory that holds
450
+ # weights of the model from `huggingface_hub.snapshot_download`
451
+ # the huggingface_pipeline.HuggingFacePipelineModel object is logged
452
+ # with a snapshot of the repo, we create a transformers.Pipeline object
453
+ # by reading the snapshot directory
454
+ return _create_pipeline_from_model(model_blob_file_or_dir_path, m, **kwargs)
374
455
 
375
456
  @classmethod
376
457
  def convert_as_custom_model(
@@ -665,7 +746,7 @@ class HuggingFaceOpenAICompatibleModel:
665
746
  prompt_text,
666
747
  return_tensors="pt",
667
748
  padding=True,
668
- )
749
+ ).to(self.model.device)
669
750
  prompt_tokens = inputs.input_ids.shape[1]
670
751
 
671
752
  from transformers import GenerationConfig
@@ -683,6 +764,7 @@ class HuggingFaceOpenAICompatibleModel:
683
764
  num_return_sequences=n,
684
765
  num_beams=max(2, n), # must be >1
685
766
  num_beam_groups=max(2, n) if presence_penalty else 1,
767
+ do_sample=False,
686
768
  )
687
769
 
688
770
  # Generate text
@@ -229,6 +229,11 @@ class XGBModelHandler(_base.BaseModelHandler[Union["xgboost.Booster", "xgboost.X
229
229
  enable_categorical = False
230
230
  for col, d_type in X.dtypes.items():
231
231
  if pd.api.extensions.ExtensionDtype.is_dtype(d_type):
232
+ if pd.CategoricalDtype.is_dtype(d_type):
233
+ enable_categorical = True
234
+ elif isinstance(d_type, pd.StringDtype):
235
+ X[col] = X[col].astype("category")
236
+ enable_categorical = True
232
237
  continue
233
238
  if not np.issubdtype(d_type, np.number):
234
239
  # categorical columns are converted to numpy's str dtype
@@ -51,6 +51,7 @@ class HuggingFacePipelineModelBlobOptions(BaseModelBlobOptions):
51
51
  has_tokenizer: NotRequired[bool]
52
52
  has_feature_extractor: NotRequired[bool]
53
53
  has_image_preprocessor: NotRequired[bool]
54
+ is_repo_downloaded: NotRequired[Optional[bool]]
54
55
 
55
56
 
56
57
  class LightGBMModelBlobOptions(BaseModelBlobOptions):
@@ -28,6 +28,10 @@ class HuggingFacePipelineModel:
28
28
  token: Optional[str] = None,
29
29
  trust_remote_code: Optional[bool] = None,
30
30
  model_kwargs: Optional[dict[str, Any]] = None,
31
+ download_snapshot: bool = True,
32
+ # repo snapshot download args
33
+ allow_patterns: Optional[Union[list[str], str]] = None,
34
+ ignore_patterns: Optional[Union[list[str], str]] = None,
31
35
  **kwargs: Any,
32
36
  ) -> None:
33
37
  """
@@ -52,6 +56,9 @@ class HuggingFacePipelineModel:
52
56
  Defaults to None.
53
57
  model_kwargs: Additional dictionary of keyword arguments passed along to the model's `from_pretrained(...,`.
54
58
  Defaults to None.
59
+ download_snapshot: Whether to download the HuggingFace repository. Defaults to True.
60
+ allow_patterns: If provided, only files matching at least one pattern are downloaded.
61
+ ignore_patterns: If provided, files matching any of the patterns are not downloaded.
55
62
  kwargs: Additional keyword arguments passed along to the specific pipeline init (see the documentation for
56
63
  the corresponding pipeline class for possible values).
57
64
 
@@ -220,6 +227,21 @@ class HuggingFacePipelineModel:
220
227
  stacklevel=2,
221
228
  )
222
229
 
230
+ repo_snapshot_dir: Optional[str] = None
231
+ if download_snapshot:
232
+ try:
233
+ from huggingface_hub import snapshot_download
234
+
235
+ repo_snapshot_dir = snapshot_download(
236
+ repo_id=model,
237
+ revision=revision,
238
+ token=token,
239
+ allow_patterns=allow_patterns,
240
+ ignore_patterns=ignore_patterns,
241
+ )
242
+ except ImportError:
243
+ logger.info("huggingface_hub package is not installed, skipping snapshot download")
244
+
223
245
  # ==== End pipeline logic from transformers ====
224
246
 
225
247
  self.task = normalized_task
@@ -229,6 +251,7 @@ class HuggingFacePipelineModel:
229
251
  self.trust_remote_code = trust_remote_code
230
252
  self.model_kwargs = model_kwargs
231
253
  self.tokenizer = tokenizer
254
+ self.repo_snapshot_dir = repo_snapshot_dir
232
255
  self.__dict__.update(kwargs)
233
256
 
234
257
  @telemetry.send_api_usage_telemetry(
snowflake/ml/version.py CHANGED
@@ -1,2 +1,2 @@
1
1
  # This is parsed by regex in conda recipe meta file. Make sure not to break it.
2
- VERSION = "1.12.0"
2
+ VERSION = "1.13.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: snowflake-ml-python
3
- Version: 1.12.0
3
+ Version: 1.13.0
4
4
  Summary: The machine learning client library that is used for interacting with Snowflake to build machine learning solutions.
5
5
  Author-email: "Snowflake, Inc" <support@snowflake.com>
6
6
  License:
@@ -410,12 +410,28 @@ NOTE: Version 1.7.0 is used as example here. Please choose the the latest versio
410
410
 
411
411
  # Release History
412
412
 
413
+ ## 1.13.0
414
+
415
+ ### Bug Fixes
416
+
417
+ ### Behavior Changes
418
+
419
+ ### New Features
420
+
421
+ * Registry: Log a HuggingFace model without having to load the model in memory using
422
+ the `huggingface_pipeline.HuggingFacePipelineModel`. Requires `huggingface_hub` package to installed.
423
+ To disable downloading HuggingFace repository, provide `download_snapshot=False` while creating the
424
+ `huggingface_pipeline.HuggingFacePipelineModel` object.
425
+ * Registry: Added support for XGBoost models to use `enable_categorical=True` with pandas DataFrame
426
+ * Registry: Added support to display privatelink inference endpoint in ModelVersion list services.
427
+
413
428
  ## 1.12.0
414
429
 
415
430
  ### Bug Fixes
416
431
 
417
432
  * Registry: Fixed an issue where the string representation of dictionary-type output columns was being incorrectly
418
433
  created during structured output deserialization. Now, the original data type is properly preserved.
434
+ * Registry: Fixed the inference server performance issue for wide (500+ features) and JSON inputs.
419
435
 
420
436
  ### Behavior Changes
421
437
 
@@ -10,7 +10,7 @@ snowflake/cortex/_sse_client.py,sha256=sLYgqAfTOPADCnaWH2RWAJi8KbU_7gSRsTUDcDD5T
10
10
  snowflake/cortex/_summarize.py,sha256=7GH8zqfIdOiHA5w4b6EvJEKEWhaTrL4YA6iDGbn7BNM,1307
11
11
  snowflake/cortex/_translate.py,sha256=9ZGjvAnJFisbzJ_bXnt4pyug5UzhHJRXW8AhGQEersM,1652
12
12
  snowflake/cortex/_util.py,sha256=krNTpbkFLXwdFqy1bd0xi7ZmOzOHRnIfHdQCPiLZJxk,3288
13
- snowflake/ml/version.py,sha256=5tlizwZvedjvjGdJByUlVBEhnQ4MZljXjYxb1SotXp0,99
13
+ snowflake/ml/version.py,sha256=AGFrDevyvV6eDph0eyz5QYMtk3gQG8WYL19gYzcfRUk,99
14
14
  snowflake/ml/_internal/env.py,sha256=EY_2KVe8oR3LgKWdaeRb5rRU-NDNXJppPDsFJmMZUUY,265
15
15
  snowflake/ml/_internal/env_utils.py,sha256=x6ID94g6FYoMX3afp0zoUHzBvuvPyiE2F6RDpxx5Cq0,30967
16
16
  snowflake/ml/_internal/file_utils.py,sha256=7sA6loOeSfmGP4yx16P4usT9ZtRqG3ycnXu7_Tk7dOs,14206
@@ -18,7 +18,7 @@ snowflake/ml/_internal/init_utils.py,sha256=WhrlvS-xcmKErSpwg6cUk6XDQ5lQcwDqPJnU
18
18
  snowflake/ml/_internal/migrator_utils.py,sha256=k3erO8x3YJcX6nkKeyJAUNGg1qjE3RFmD-W6dtLzIH0,161
19
19
  snowflake/ml/_internal/platform_capabilities.py,sha256=HkTr2RI5oR54QNmwg3FTrqs0ygOV-fwGkKsWb30WKvQ,7344
20
20
  snowflake/ml/_internal/relax_version_strategy.py,sha256=MYEIZrx1HfKNhl9Na3GN50ipX8c0MKIj9nwxjB0IC0Y,484
21
- snowflake/ml/_internal/telemetry.py,sha256=7obh4QrCvtgVa5QW2_5nMBRv-K-DZrUlDx_Exzn86FI,31788
21
+ snowflake/ml/_internal/telemetry.py,sha256=GCut6xG7SvAV8JRCxuQjvno9t7cLGLByECpMNUY1q30,31867
22
22
  snowflake/ml/_internal/type_utils.py,sha256=bNNW0I9rOvwhx-Y274vGd0qWA0fMIPA3SGnaDE09wvc,2198
23
23
  snowflake/ml/_internal/exceptions/dataset_error_messages.py,sha256=h7uGJbxBM6se-TW_64LKGGGdBCbwflzbBnmijWKX3Gc,285
24
24
  snowflake/ml/_internal/exceptions/dataset_errors.py,sha256=TqESe8cDfWurJdv5X0DOwgzBfHCEqga_F3WQipYbdqg,741
@@ -65,7 +65,7 @@ snowflake/ml/dataset/dataset_metadata.py,sha256=lcNvugBkP8YEkGMQqaV8SlHs5mwUKsUS
65
65
  snowflake/ml/dataset/dataset_reader.py,sha256=mZsG9HyWUGgfotrGkLrunyEsOm_659mH-Sn2OyG6A-Q,5036
66
66
  snowflake/ml/experiment/__init__.py,sha256=r7qdyPd3jwxzqvksim2ju5j_LrnYQrta0ZI6XpWUqmc,109
67
67
  snowflake/ml/experiment/_experiment_info.py,sha256=iaJ65x6nzBYJ5djleSOzBtMpZUJCUDlRpaDw0pu-dcU,2533
68
- snowflake/ml/experiment/experiment_tracking.py,sha256=TgkBE3NDGPdIcyZTlXcouVw2CtDKtFnPnp-BDnknIdE,14675
68
+ snowflake/ml/experiment/experiment_tracking.py,sha256=fvn3EvkMiE9_Ls-ShiRIuvtfFUc6vVbyKioiwD38A6I,15483
69
69
  snowflake/ml/experiment/utils.py,sha256=3bpbkilc5vvFjnti-kcyhhjAd9Ga3LqiKqJDwORiATY,628
70
70
  snowflake/ml/experiment/_client/artifact.py,sha256=R2WB4Y_kqv43BWLfXv8SEDINn1Bnevzgb-mH5LyvgGk,3035
71
71
  snowflake/ml/experiment/_client/experiment_tracking_sql_client.py,sha256=v1NwaNcBCOiff8mHShelbVl0Rm94BnwqqnG_KTEmI2E,6692
@@ -110,8 +110,8 @@ snowflake/ml/fileset/snowfs.py,sha256=uF5QluYtiJ-HezGIhF55dONi3t0E6N7ByaVAIAlM3n
110
110
  snowflake/ml/fileset/stage_fs.py,sha256=V4pysouSKKDPLzuW3u_extxfvjkQa5OlwIRES9Srpzo,20151
111
111
  snowflake/ml/jobs/__init__.py,sha256=v-v9-SA1Vy-M98B31-NlqJgpI6uEg9jEEghJLub1RUY,468
112
112
  snowflake/ml/jobs/decorators.py,sha256=mQgdWvvCwD7q79cSFKZHKegXGh2j1u8WM64UD3lCKr4,3428
113
- snowflake/ml/jobs/job.py,sha256=h0hb-37VY9bUIsW0e-UybCtLhylFGsxO-mqIlzPPv40,21811
114
- snowflake/ml/jobs/manager.py,sha256=cr_z_Q5F2BGL8P9el0Xf0Vs5ad_S947ATn37SzDdjZU,25187
113
+ snowflake/ml/jobs/job.py,sha256=h88Tj0aQDRywDXk5KbAEVp9q7jZfcGT1xagrkR1tNEM,21981
114
+ snowflake/ml/jobs/manager.py,sha256=Ij1ZTKc2JaCUkOVYLR5N9hBgCj0PcT3fdpIow15QvI8,26132
115
115
  snowflake/ml/jobs/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
116
116
  snowflake/ml/jobs/_utils/constants.py,sha256=FRsmwGLYLbESf3c65mLEA34DuGajhBRC8BkHJw9BiMU,3838
117
117
  snowflake/ml/jobs/_utils/feature_flags.py,sha256=cH_NyeOncL3_tzbk0WvL1siNyodxBgn1ziPk2yBW6wY,404
@@ -125,7 +125,7 @@ snowflake/ml/jobs/_utils/stage_utils.py,sha256=38-LsokaGx0NzlnP8CMRioClRz-3x6xhP
125
125
  snowflake/ml/jobs/_utils/types.py,sha256=AGLu0kPTNRUki26rah_KBwWp0bBJEtUP3zcfxkj5kB0,2326
126
126
  snowflake/ml/jobs/_utils/scripts/constants.py,sha256=YyIWZqQPYOTtgCY6SfyJjk2A98I5RQVmrOuLtET5Pqg,173
127
127
  snowflake/ml/jobs/_utils/scripts/get_instance_ip.py,sha256=N2wJYMPlwg-hidwgHhDhiBWOE6TskqCfWLMRRNnZBQs,5776
128
- snowflake/ml/jobs/_utils/scripts/mljob_launcher.py,sha256=wYzNamptbra1M2U85hAPZyA2t88JfaEWB6xRESxCVcQ,15412
128
+ snowflake/ml/jobs/_utils/scripts/mljob_launcher.py,sha256=29_zaKjvcLkwqBqYHObtQIT933UsdJGvb82EGwyvk48,15704
129
129
  snowflake/ml/jobs/_utils/scripts/signal_workers.py,sha256=AR1Pylkm4-FGh10WXfrCtcxaV0rI7IQ2ZiO0Li7zZ3U,7433
130
130
  snowflake/ml/jobs/_utils/scripts/worker_shutdown_listener.py,sha256=SeJ8v5XDriwHAjIGpcQkwVP-f-lO9QIdVjVD7Fkgafs,7893
131
131
  snowflake/ml/lineage/__init__.py,sha256=8p1YGynC-qOxAZ8jZX2z84Reg5bv1NoJMoJmNJCrzI4,65
@@ -139,18 +139,18 @@ snowflake/ml/model/openai_signatures.py,sha256=ZVnHDgaOA6RcvtSP3HIbHVgr3scJH2gG_
139
139
  snowflake/ml/model/target_platform.py,sha256=H5d-wtuKQyVlq9x33vPtYZAlR5ka0ytcKRYgwlKl0bQ,390
140
140
  snowflake/ml/model/task.py,sha256=Zp5JaLB-YfX5p_HSaw81P3J7UnycQq5EMa87A35VOaQ,286
141
141
  snowflake/ml/model/type_hints.py,sha256=G0kp85-ksnYoAUHRdXxLFQBLq3XURuqYOpu_YeKEaNA,9847
142
- snowflake/ml/model/_client/model/batch_inference_specs.py,sha256=89YLUNtPv3OM1L1Jo_-RD6ot1Dg0KWCLZqd-wcCzivs,710
142
+ snowflake/ml/model/_client/model/batch_inference_specs.py,sha256=kzS7YfrBxZ8QTsWE4vx7jMyOjTopPOmGZSqc1t6cCqc,611
143
143
  snowflake/ml/model/_client/model/model_impl.py,sha256=Yabrbir5vPMOnsVmQJ23YN7vqhi756Jcm6pfO8Aq92o,17469
144
- snowflake/ml/model/_client/model/model_version_impl.py,sha256=qjiRgNItMuN2Gk0zKBcQvNCY28HCbRniDtXQnOrkqnE,55022
144
+ snowflake/ml/model/_client/model/model_version_impl.py,sha256=azf94-xWOIvpa-1F6pvkbe7ErOLmBY2f16nONq80mgw,55002
145
145
  snowflake/ml/model/_client/ops/metadata_ops.py,sha256=qpK6PL3OyfuhyOmpvLCpHLy6vCxbZbp1HlEvakFGwv4,4884
146
- snowflake/ml/model/_client/ops/model_ops.py,sha256=z3T71w9ZNIU5eEA5G59Ous59WzEBs3YBcPO1_zeMI8M,48586
147
- snowflake/ml/model/_client/ops/service_ops.py,sha256=n53NrVhmCmVBsDrpSer-WKVm6mdIEP8hXEyQji2M-8c,41823
148
- snowflake/ml/model/_client/service/model_deployment_spec.py,sha256=-JUzVlCLvyifxjHvk5m3ifIWJw6jU5xuTja3og06e0I,19386
149
- snowflake/ml/model/_client/service/model_deployment_spec_schema.py,sha256=5CMFdgaKN9lZg2m8Np68LiGjxNbQ8bd9MSaZG9mYHGk,2390
146
+ snowflake/ml/model/_client/ops/model_ops.py,sha256=dcyV0PgAUynVWBfhPhzRmKe7S0R484-xng5ajJOtHaI,50048
147
+ snowflake/ml/model/_client/ops/service_ops.py,sha256=pMKBJDza1AAhzBPELcNU6iIjOLhY_6kxrMWYmiB9Kvc,41887
148
+ snowflake/ml/model/_client/service/model_deployment_spec.py,sha256=LxdhU1m4YGc1MNyApQ0IyUetcH4IXOmwYOY9X3wjznY,19499
149
+ snowflake/ml/model/_client/service/model_deployment_spec_schema.py,sha256=QpDso2bjx2eCRKIG4-ppc3z46B7hgYMZehOTRoR9IJs,2425
150
150
  snowflake/ml/model/_client/sql/_base.py,sha256=Qrm8M92g3MHb-QnSLUlbd8iVKCRxLhG_zr5M2qmXwJ8,1473
151
151
  snowflake/ml/model/_client/sql/model.py,sha256=nstZ8zR7MkXVEfhqLt7PWMik6dZr06nzq7VsF5NVNow,5840
152
152
  snowflake/ml/model/_client/sql/model_version.py,sha256=QwzFlDH5laTqK2qF7SJQSbt28DgspWj3R11l-yD1Da0,23496
153
- snowflake/ml/model/_client/sql/service.py,sha256=j-JdXeWy4oATTH_Yz6OEqhW81t1vM70b5JpAtymar5g,10360
153
+ snowflake/ml/model/_client/sql/service.py,sha256=LheEjXaXOKBPl1M9xwcV6EakMZX2pcyjljWPj_8NE5k,10614
154
154
  snowflake/ml/model/_client/sql/stage.py,sha256=2gxYNtmEXricwxeACVUr63OUDCy_iQvCi-kRT4qQtBA,887
155
155
  snowflake/ml/model/_client/sql/tag.py,sha256=9sI0VoldKmsfToWSjMQddozPPGCxYUI6n0gPBiqd6x8,4333
156
156
  snowflake/ml/model/_model_composer/model_composer.py,sha256=Xqi-sxmkBoZl383LQAXhMQkq9KsAS0A3ythC5bN3EOU,8292
@@ -170,7 +170,7 @@ snowflake/ml/model/_packager/model_handlers/_base.py,sha256=OZhGv7nyej3PqaoBz021
170
170
  snowflake/ml/model/_packager/model_handlers/_utils.py,sha256=8y-LfiBfoj2txQD4Yh_GM0eEEOrm1S0R1149J5z31O0,12572
171
171
  snowflake/ml/model/_packager/model_handlers/catboost.py,sha256=dbI2QizGZS04l6ehgXb3oy5YSXrlwRHz8YENVefEbms,10676
172
172
  snowflake/ml/model/_packager/model_handlers/custom.py,sha256=fM_13N5ejT0Ta0-M_Uzsqr_TwGVk_3jSjsLJiMEfyR4,8514
173
- snowflake/ml/model/_packager/model_handlers/huggingface_pipeline.py,sha256=zTp3-oWLfH3auq7uUhLZxB_WHoIp2GTyqY1rSog6t-Q,32673
173
+ snowflake/ml/model/_packager/model_handlers/huggingface_pipeline.py,sha256=iIYDJljLRW22XNQn8fLCSHTZfMW5x8m-7hyO40mMSPA,37045
174
174
  snowflake/ml/model/_packager/model_handlers/keras.py,sha256=JKBCiJEjc41zaoEhsen7rnlyPo2RBuEqG9Vq6JR_Cq0,8696
175
175
  snowflake/ml/model/_packager/model_handlers/lightgbm.py,sha256=DAFMiqpXEUmKqeq5rgn5j6rtuwScNnuiMUBwS4OyC7Q,11074
176
176
  snowflake/ml/model/_packager/model_handlers/mlflow.py,sha256=xSpoXO0UOfBUpzx2W1O8P2WF0Xi1vrZ_J-DdgzQG0o8,9177
@@ -180,7 +180,7 @@ snowflake/ml/model/_packager/model_handlers/sklearn.py,sha256=_D1YE7TmEJDsuOUt-m
180
180
  snowflake/ml/model/_packager/model_handlers/snowmlmodel.py,sha256=uvz-hosuNbtcQFprnS8GzjnM8fWULBDMRbXq8immW9Q,18352
181
181
  snowflake/ml/model/_packager/model_handlers/tensorflow.py,sha256=2J2XWYOC70axWaoNJa9aQLMyjLAKIskrT31t_LgqcIk,11350
182
182
  snowflake/ml/model/_packager/model_handlers/torchscript.py,sha256=3IbMoVGlBR-RsQAdYZxjAz1ST-jDMQIyhhdwM5e3NeE,9531
183
- snowflake/ml/model/_packager/model_handlers/xgboost.py,sha256=Nj80oPwvg1Ng9Nfdtf1nRxyBdStoyz9CVe4jPqksxuk,12190
183
+ snowflake/ml/model/_packager/model_handlers/xgboost.py,sha256=t9xSB4Phv56Ev5CLav_k8UM8ZPZ5zJBLpI46-edXqpY,12511
184
184
  snowflake/ml/model/_packager/model_handlers_migrator/base_migrator.py,sha256=BZo14UrywGZM1kTqzN4VFQcYjl7dggDp1U90ZBCMuOg,1409
185
185
  snowflake/ml/model/_packager/model_handlers_migrator/pytorch_migrator_2023_12_01.py,sha256=GVpfYllXa3Voxa54PGNsZ3Hea1kOJe3T_AoA9nrs60A,764
186
186
  snowflake/ml/model/_packager/model_handlers_migrator/tensorflow_migrator_2023_12_01.py,sha256=dXIisQteU55QMw5OvC_1E_sGqFgE88WRhGCWFqUyauM,2239
@@ -188,7 +188,7 @@ snowflake/ml/model/_packager/model_handlers_migrator/tensorflow_migrator_2025_01
188
188
  snowflake/ml/model/_packager/model_handlers_migrator/torchscript_migrator_2023_12_01.py,sha256=MDOAGV6kML9sJh_hnYjnrPH4GtECP5DDCjaRT7NmYpU,768
189
189
  snowflake/ml/model/_packager/model_meta/model_blob_meta.py,sha256=CzY_MhiSshKi9dWzXc4lrC9PysU0FCdHG2oRlz1vCb8,1943
190
190
  snowflake/ml/model/_packager/model_meta/model_meta.py,sha256=CctjNVwdC7ghVIPqbhb62t43SOFsmk2j2FdoZMZ8KXs,20063
191
- snowflake/ml/model/_packager/model_meta/model_meta_schema.py,sha256=e4TUbWl998xQOZUzEWvb9CrUyHwGHBGb0TNbtezAeQ0,3707
191
+ snowflake/ml/model/_packager/model_meta/model_meta_schema.py,sha256=UGPTjzxLBUKn8XSAuMvGnJGdjEvlUZjjfvI9rDRAQl4,3759
192
192
  snowflake/ml/model/_packager/model_meta_migrator/base_migrator.py,sha256=8zTgq3n6TBXv7Vcwmf7b9wjK3m-9HHMsY0Qy1Rs-sZ4,1305
193
193
  snowflake/ml/model/_packager/model_meta_migrator/migrator_plans.py,sha256=5butM-lyaDRhCAO2BaCOIQufpAxAfSAinsNuGqbbjMU,1029
194
194
  snowflake/ml/model/_packager/model_meta_migrator/migrator_v1.py,sha256=cyZVvBGM3nF1IVqDKfYstLCchNO-ZhSkPvLM4aU7J5c,2066
@@ -205,7 +205,7 @@ snowflake/ml/model/_signatures/pytorch_handler.py,sha256=Xy-ITCCX_EgHcyIIqeYSDUI
205
205
  snowflake/ml/model/_signatures/snowpark_handler.py,sha256=aNGPa2v0kTMuSZ80NBdHeAWYva0Nc1vo17ZjQwIjf2E,7621
206
206
  snowflake/ml/model/_signatures/tensorflow_handler.py,sha256=_yrvMg-w_jJoYuyrGXKPX4Dv7Vt8z1e6xIKiWGuZcc4,5660
207
207
  snowflake/ml/model/_signatures/utils.py,sha256=RY4ZNWKCQhEJ80N5fb4TdjZLQ7ktYTzUqjpwbtZgtX4,17285
208
- snowflake/ml/model/models/huggingface_pipeline.py,sha256=VCLhlW_CBJAgU-uKSY5a6BrFjc7ANFWSzNaarR6IBq0,19658
208
+ snowflake/ml/model/models/huggingface_pipeline.py,sha256=jSUihxi6TygN-nsf0wy70fg349buQghlJZVSItJ-TOA,20783
209
209
  snowflake/ml/modeling/_internal/estimator_utils.py,sha256=dfPPWO-RHf5C3Tya3VQ4KEqoa32pm-WKwRrjzjDInLk,13956
210
210
  snowflake/ml/modeling/_internal/model_specifications.py,sha256=3wFMcKPCSoiEzU7Mx6RVem89BRlBBENpX__-Rd7GwdU,4851
211
211
  snowflake/ml/modeling/_internal/model_trainer.py,sha256=5Ck1lbdyzcd-TpzAxEyovIN9fjaaVIqugyMHXt0wzH0,971
@@ -440,8 +440,8 @@ snowflake/ml/utils/connection_params.py,sha256=JuadbzKlgDZLZ5vJ9cnyAiSitvZT9jGSf
440
440
  snowflake/ml/utils/html_utils.py,sha256=L4pzpvFd20SIk4rie2kTAtcQjbxBHfjKmxonMAT2OoA,7665
441
441
  snowflake/ml/utils/sparse.py,sha256=zLBNh-ynhGpKH5TFtopk0YLkHGvv0yq1q-sV59YQKgg,3819
442
442
  snowflake/ml/utils/sql_client.py,sha256=pSe2od6Pkh-8NwG3D-xqN76_uNf-ohOtVbT55HeQg1Y,668
443
- snowflake_ml_python-1.12.0.dist-info/licenses/LICENSE.txt,sha256=PdEp56Av5m3_kl21iFkVTX_EbHJKFGEdmYeIO1pL_Yk,11365
444
- snowflake_ml_python-1.12.0.dist-info/METADATA,sha256=iZRYiUmETq4j6Wo5RmDWu6MWH1pMMA2jncP2Nz_c3p4,93792
445
- snowflake_ml_python-1.12.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
446
- snowflake_ml_python-1.12.0.dist-info/top_level.txt,sha256=TY0gFSHKDdZy3THb0FGomyikWQasEGldIR1O0HGOHVw,10
447
- snowflake_ml_python-1.12.0.dist-info/RECORD,,
443
+ snowflake_ml_python-1.13.0.dist-info/licenses/LICENSE.txt,sha256=PdEp56Av5m3_kl21iFkVTX_EbHJKFGEdmYeIO1pL_Yk,11365
444
+ snowflake_ml_python-1.13.0.dist-info/METADATA,sha256=2qYBLI9oOcJD_9nibGr83ZmW8fBFPqqLrbvjaPfrhDM,94503
445
+ snowflake_ml_python-1.13.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
446
+ snowflake_ml_python-1.13.0.dist-info/top_level.txt,sha256=TY0gFSHKDdZy3THb0FGomyikWQasEGldIR1O0HGOHVw,10
447
+ snowflake_ml_python-1.13.0.dist-info/RECORD,,