snowflake-ml-python 1.13.0__py3-none-any.whl → 1.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. snowflake/ml/_internal/platform_capabilities.py +9 -7
  2. snowflake/ml/_internal/utils/connection_params.py +5 -3
  3. snowflake/ml/_internal/utils/jwt_generator.py +3 -2
  4. snowflake/ml/_internal/utils/temp_file_utils.py +1 -2
  5. snowflake/ml/experiment/_client/experiment_tracking_sql_client.py +16 -3
  6. snowflake/ml/experiment/_entities/__init__.py +2 -1
  7. snowflake/ml/experiment/_entities/run.py +0 -15
  8. snowflake/ml/experiment/_entities/run_metadata.py +3 -51
  9. snowflake/ml/experiment/experiment_tracking.py +8 -8
  10. snowflake/ml/jobs/_utils/constants.py +1 -1
  11. snowflake/ml/jobs/_utils/scripts/mljob_launcher.py +9 -7
  12. snowflake/ml/jobs/job.py +12 -4
  13. snowflake/ml/jobs/manager.py +34 -7
  14. snowflake/ml/lineage/lineage_node.py +0 -1
  15. snowflake/ml/model/__init__.py +2 -6
  16. snowflake/ml/model/_client/model/batch_inference_specs.py +0 -4
  17. snowflake/ml/model/_client/model/inference_engine_utils.py +55 -0
  18. snowflake/ml/model/_client/model/model_version_impl.py +25 -77
  19. snowflake/ml/model/_client/ops/model_ops.py +9 -2
  20. snowflake/ml/model/_client/ops/service_ops.py +82 -36
  21. snowflake/ml/model/_client/sql/service.py +29 -5
  22. snowflake/ml/model/_packager/model_handlers/_utils.py +4 -2
  23. snowflake/ml/model/_packager/model_handlers/huggingface_pipeline.py +7 -5
  24. snowflake/ml/model/_packager/model_packager.py +4 -3
  25. snowflake/ml/model/_packager/model_runtime/_snowml_inference_alternative_requirements.py +0 -1
  26. snowflake/ml/model/_signatures/utils.py +0 -21
  27. snowflake/ml/model/models/huggingface_pipeline.py +56 -21
  28. snowflake/ml/monitoring/_client/model_monitor_sql_client.py +47 -3
  29. snowflake/ml/monitoring/_manager/model_monitor_manager.py +3 -0
  30. snowflake/ml/monitoring/entities/model_monitor_config.py +3 -0
  31. snowflake/ml/monitoring/model_monitor.py +30 -0
  32. snowflake/ml/registry/_manager/model_manager.py +1 -1
  33. snowflake/ml/registry/_manager/model_parameter_reconciler.py +2 -2
  34. snowflake/ml/utils/connection_params.py +5 -3
  35. snowflake/ml/version.py +1 -1
  36. {snowflake_ml_python-1.13.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/METADATA +51 -34
  37. {snowflake_ml_python-1.13.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/RECORD +40 -39
  38. {snowflake_ml_python-1.13.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/WHEEL +0 -0
  39. {snowflake_ml_python-1.13.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/licenses/LICENSE.txt +0 -0
  40. {snowflake_ml_python-1.13.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/top_level.txt +0 -0
@@ -8,6 +8,7 @@ from snowflake import snowpark
8
8
  from snowflake.ml._internal import telemetry
9
9
  from snowflake.ml._internal.human_readable_id import hrid_generator
10
10
  from snowflake.ml._internal.utils import sql_identifier
11
+ from snowflake.ml.model._client.model import inference_engine_utils
11
12
  from snowflake.ml.model._client.ops import service_ops
12
13
  from snowflake.snowpark import async_job, session
13
14
 
@@ -77,6 +78,15 @@ class HuggingFacePipelineModel:
77
78
  framework = kwargs.get("framework", None)
78
79
  feature_extractor = kwargs.get("feature_extractor", None)
79
80
 
81
+ _can_download_snapshot = False
82
+ if download_snapshot:
83
+ try:
84
+ import huggingface_hub as hf_hub
85
+
86
+ _can_download_snapshot = True
87
+ except ImportError:
88
+ pass
89
+
80
90
  # ==== Start pipeline logic from transformers ====
81
91
  if model_kwargs is None:
82
92
  model_kwargs = {}
@@ -141,22 +151,23 @@ class HuggingFacePipelineModel:
141
151
  # Instantiate config if needed
142
152
  config_obj = None
143
153
 
144
- if isinstance(config, str):
145
- config_obj = transformers.AutoConfig.from_pretrained(
146
- config, _from_pipeline=task, **hub_kwargs, **model_kwargs
147
- )
148
- hub_kwargs["_commit_hash"] = config_obj._commit_hash
149
- elif config is None and isinstance(model, str):
150
- config_obj = transformers.AutoConfig.from_pretrained(
151
- model, _from_pipeline=task, **hub_kwargs, **model_kwargs
152
- )
153
- hub_kwargs["_commit_hash"] = config_obj._commit_hash
154
- # We only support string as config argument.
155
- elif config is not None and not isinstance(config, str):
156
- raise RuntimeError(
157
- "Impossible to use non-string config as input for HuggingFacePipelineModel. Use transformers.Pipeline"
158
- " object if required."
159
- )
154
+ if not _can_download_snapshot:
155
+ if isinstance(config, str):
156
+ config_obj = transformers.AutoConfig.from_pretrained(
157
+ config, _from_pipeline=task, **hub_kwargs, **model_kwargs
158
+ )
159
+ hub_kwargs["_commit_hash"] = config_obj._commit_hash
160
+ elif config is None and isinstance(model, str):
161
+ config_obj = transformers.AutoConfig.from_pretrained(
162
+ model, _from_pipeline=task, **hub_kwargs, **model_kwargs
163
+ )
164
+ hub_kwargs["_commit_hash"] = config_obj._commit_hash
165
+ # We only support string as config argument.
166
+ elif config is not None and not isinstance(config, str):
167
+ raise RuntimeError(
168
+ "Impossible to use non-string config as input for HuggingFacePipelineModel. "
169
+ "Use transformers.Pipeline object if required."
170
+ )
160
171
 
161
172
  # ==== Start pipeline logic (Task) from transformers ====
162
173
 
@@ -208,7 +219,7 @@ class HuggingFacePipelineModel:
208
219
  "Using a pipeline without specifying a model name and revision in production is not recommended.",
209
220
  stacklevel=2,
210
221
  )
211
- if config is None and isinstance(model, str):
222
+ if not _can_download_snapshot and config is None and isinstance(model, str):
212
223
  config_obj = transformers.AutoConfig.from_pretrained(
213
224
  model, _from_pipeline=task, **hub_kwargs, **model_kwargs
214
225
  )
@@ -228,11 +239,10 @@ class HuggingFacePipelineModel:
228
239
  )
229
240
 
230
241
  repo_snapshot_dir: Optional[str] = None
231
- if download_snapshot:
242
+ if _can_download_snapshot:
232
243
  try:
233
- from huggingface_hub import snapshot_download
234
244
 
235
- repo_snapshot_dir = snapshot_download(
245
+ repo_snapshot_dir = hf_hub.snapshot_download(
236
246
  repo_id=model,
237
247
  revision=revision,
238
248
  token=token,
@@ -268,7 +278,7 @@ class HuggingFacePipelineModel:
268
278
  ],
269
279
  )
270
280
  @snowpark._internal.utils.private_preview(version="1.9.1")
271
- def create_service(
281
+ def log_model_and_create_service(
272
282
  self,
273
283
  *,
274
284
  session: session.Session,
@@ -293,6 +303,7 @@ class HuggingFacePipelineModel:
293
303
  force_rebuild: bool = False,
294
304
  build_external_access_integrations: Optional[list[str]] = None,
295
305
  block: bool = True,
306
+ experimental_options: Optional[dict[str, Any]] = None,
296
307
  ) -> Union[str, async_job.AsyncJob]:
297
308
  """Logs a Hugging Face model and creates a service in Snowflake.
298
309
 
@@ -319,6 +330,10 @@ class HuggingFacePipelineModel:
319
330
  force_rebuild: Whether to force rebuild the image. Defaults to False.
320
331
  build_external_access_integrations: External access integrations for building the image. Defaults to None.
321
332
  block: Whether to block the operation. Defaults to True.
333
+ experimental_options: Experimental options for the service creation with custom inference engine.
334
+ Currently, only `inference_engine` and `inference_engine_args_override` are supported.
335
+ `inference_engine` is the name of the inference engine to use.
336
+ `inference_engine_args_override` is a list of string arguments to pass to the inference engine.
322
337
 
323
338
  Raises:
324
339
  ValueError: if database and schema name is not provided and session doesn't have a
@@ -360,6 +375,24 @@ class HuggingFacePipelineModel:
360
375
  )
361
376
  logger.info(f"A service job is going to register the hf model as: {model_name}.{version_name}")
362
377
 
378
+ # Check if model is HuggingFace text-generation before doing inference engine checks
379
+ inference_engine_args = None
380
+ if experimental_options:
381
+ if self.task != "text-generation":
382
+ raise ValueError(
383
+ "Currently, InferenceEngine using experimental_options is only supported for "
384
+ "HuggingFace text-generation models."
385
+ )
386
+
387
+ inference_engine_args = inference_engine_utils._get_inference_engine_args(experimental_options)
388
+
389
+ # Enrich inference engine args if inference engine is specified
390
+ if inference_engine_args is not None:
391
+ inference_engine_args = inference_engine_utils._enrich_inference_engine_args(
392
+ inference_engine_args,
393
+ gpu_requests,
394
+ )
395
+
363
396
  from snowflake.ml.model import event_handler
364
397
  from snowflake.snowpark import exceptions
365
398
 
@@ -412,6 +445,8 @@ class HuggingFacePipelineModel:
412
445
  # TODO: remove warehouse in the next release
413
446
  warehouse=session.get_current_warehouse(),
414
447
  ),
448
+ # inference engine
449
+ inference_engine_args=inference_engine_args,
415
450
  )
416
451
  status.update(label="HuggingFace model service created successfully", state="complete", expanded=False)
417
452
  return result
@@ -30,8 +30,8 @@ class MonitorOperation(Enum):
30
30
  _OPERATION_SUPPORTED_PROPS: dict[MonitorOperation, frozenset[str]] = {
31
31
  MonitorOperation.SUSPEND: frozenset(),
32
32
  MonitorOperation.RESUME: frozenset(),
33
- MonitorOperation.ADD: frozenset({"SEGMENT_COLUMN"}),
34
- MonitorOperation.DROP: frozenset({"SEGMENT_COLUMN"}),
33
+ MonitorOperation.ADD: frozenset({"SEGMENT_COLUMN", "CUSTOM_METRIC_COLUMN"}),
34
+ MonitorOperation.DROP: frozenset({"SEGMENT_COLUMN", "CUSTOM_METRIC_COLUMN"}),
35
35
  }
36
36
 
37
37
 
@@ -91,6 +91,7 @@ class ModelMonitorSQLClient:
91
91
  baseline_schema: Optional[sql_identifier.SqlIdentifier] = None,
92
92
  baseline: Optional[sql_identifier.SqlIdentifier] = None,
93
93
  segment_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
94
+ custom_metric_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
94
95
  statement_params: Optional[dict[str, Any]] = None,
95
96
  ) -> None:
96
97
  baseline_sql = ""
@@ -101,6 +102,10 @@ class ModelMonitorSQLClient:
101
102
  if segment_columns:
102
103
  segment_columns_sql = f"SEGMENT_COLUMNS={_build_sql_list_from_columns(segment_columns)}"
103
104
 
105
+ custom_metric_columns_sql = ""
106
+ if custom_metric_columns:
107
+ custom_metric_columns_sql = f"CUSTOM_METRIC_COLUMNS={_build_sql_list_from_columns(custom_metric_columns)}"
108
+
104
109
  query_result_checker.SqlResultValidator(
105
110
  self._sql_client._session,
106
111
  f"""
@@ -120,6 +125,7 @@ class ModelMonitorSQLClient:
120
125
  REFRESH_INTERVAL='{refresh_interval}'
121
126
  AGGREGATION_WINDOW='{aggregation_window}'
122
127
  {segment_columns_sql}
128
+ {custom_metric_columns_sql}
123
129
  {baseline_sql}""",
124
130
  statement_params=statement_params,
125
131
  ).has_column("status").has_dimensions(1, 1).validate()
@@ -210,6 +216,7 @@ class ModelMonitorSQLClient:
210
216
  actual_class_columns: list[sql_identifier.SqlIdentifier],
211
217
  id_columns: list[sql_identifier.SqlIdentifier],
212
218
  segment_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
219
+ custom_metric_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
213
220
  ) -> None:
214
221
  """Ensures all columns exist in the source table.
215
222
 
@@ -222,12 +229,14 @@ class ModelMonitorSQLClient:
222
229
  actual_class_columns: List of actual class column names.
223
230
  id_columns: List of id column names.
224
231
  segment_columns: List of segment column names.
232
+ custom_metric_columns: List of custom metric column names.
225
233
 
226
234
  Raises:
227
235
  ValueError: If any of the columns do not exist in the source.
228
236
  """
229
237
 
230
238
  segment_columns = [] if segment_columns is None else segment_columns
239
+ custom_metric_columns = [] if custom_metric_columns is None else custom_metric_columns
231
240
 
232
241
  if timestamp_column not in source_column_schema:
233
242
  raise ValueError(f"Timestamp column {timestamp_column} does not exist in source.")
@@ -248,6 +257,9 @@ class ModelMonitorSQLClient:
248
257
  if not all([column_name in source_column_schema for column_name in segment_columns]):
249
258
  raise ValueError(f"Segment column(s): {segment_columns} do not exist in source.")
250
259
 
260
+ if not all([column_name in source_column_schema for column_name in custom_metric_columns]):
261
+ raise ValueError(f"Custom Metric column(s): {custom_metric_columns} do not exist in source.")
262
+
251
263
  def validate_source(
252
264
  self,
253
265
  *,
@@ -261,6 +273,7 @@ class ModelMonitorSQLClient:
261
273
  actual_class_columns: list[sql_identifier.SqlIdentifier],
262
274
  id_columns: list[sql_identifier.SqlIdentifier],
263
275
  segment_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
276
+ custom_metric_columns: Optional[list[sql_identifier.SqlIdentifier]] = None,
264
277
  ) -> None:
265
278
 
266
279
  source_database = source_database or self._database_name
@@ -281,6 +294,7 @@ class ModelMonitorSQLClient:
281
294
  actual_class_columns=actual_class_columns,
282
295
  id_columns=id_columns,
283
296
  segment_columns=segment_columns,
297
+ custom_metric_columns=custom_metric_columns,
284
298
  )
285
299
 
286
300
  def _alter_monitor(
@@ -299,7 +313,7 @@ class ModelMonitorSQLClient:
299
313
 
300
314
  if target_property not in supported_target_properties:
301
315
  raise ValueError(
302
- f"Only {', '.join(supported_target_properties)} supported as target property "
316
+ f"Only {', '.join(sorted(supported_target_properties))} supported as target property "
303
317
  f"for {operation.name} operation"
304
318
  )
305
319
 
@@ -366,3 +380,33 @@ class ModelMonitorSQLClient:
366
380
  target_value=segment_column,
367
381
  statement_params=statement_params,
368
382
  )
383
+
384
+ def add_custom_metric_column(
385
+ self,
386
+ monitor_name: sql_identifier.SqlIdentifier,
387
+ custom_metric_column: sql_identifier.SqlIdentifier,
388
+ statement_params: Optional[dict[str, Any]] = None,
389
+ ) -> None:
390
+ """Add a custom metric column to the Model Monitor"""
391
+ self._alter_monitor(
392
+ operation=MonitorOperation.ADD,
393
+ monitor_name=monitor_name,
394
+ target_property="CUSTOM_METRIC_COLUMN",
395
+ target_value=custom_metric_column,
396
+ statement_params=statement_params,
397
+ )
398
+
399
+ def drop_custom_metric_column(
400
+ self,
401
+ monitor_name: sql_identifier.SqlIdentifier,
402
+ custom_metric_column: sql_identifier.SqlIdentifier,
403
+ statement_params: Optional[dict[str, Any]] = None,
404
+ ) -> None:
405
+ """Drop a custom metric column from the Model Monitor"""
406
+ self._alter_monitor(
407
+ operation=MonitorOperation.DROP,
408
+ monitor_name=monitor_name,
409
+ target_property="CUSTOM_METRIC_COLUMN",
410
+ target_value=custom_metric_column,
411
+ statement_params=statement_params,
412
+ )
@@ -109,6 +109,7 @@ class ModelMonitorManager:
109
109
  actual_score_columns = self._build_column_list_from_input(source_config.actual_score_columns)
110
110
  actual_class_columns = self._build_column_list_from_input(source_config.actual_class_columns)
111
111
  segment_columns = self._build_column_list_from_input(source_config.segment_columns)
112
+ custom_metric_columns = self._build_column_list_from_input(source_config.custom_metric_columns)
112
113
 
113
114
  id_columns = [sql_identifier.SqlIdentifier(column_name) for column_name in source_config.id_columns]
114
115
  ts_column = sql_identifier.SqlIdentifier(source_config.timestamp_column)
@@ -125,6 +126,7 @@ class ModelMonitorManager:
125
126
  actual_class_columns=actual_class_columns,
126
127
  id_columns=id_columns,
127
128
  segment_columns=segment_columns,
129
+ custom_metric_columns=custom_metric_columns,
128
130
  )
129
131
 
130
132
  self._model_monitor_client.create_model_monitor(
@@ -147,6 +149,7 @@ class ModelMonitorManager:
147
149
  actual_score_columns=actual_score_columns,
148
150
  actual_class_columns=actual_class_columns,
149
151
  segment_columns=segment_columns,
152
+ custom_metric_columns=custom_metric_columns,
150
153
  refresh_interval=model_monitor_config.refresh_interval,
151
154
  aggregation_window=model_monitor_config.aggregation_window,
152
155
  baseline_database=baseline_database_name_id,
@@ -36,6 +36,9 @@ class ModelMonitorSourceConfig:
36
36
  segment_columns: Optional[list[str]] = None
37
37
  """List of columns in the source containing segment information for grouped monitoring."""
38
38
 
39
+ custom_metric_columns: Optional[list[str]] = None
40
+ """List of columns in the source containing custom metrics."""
41
+
39
42
 
40
43
  @dataclass
41
44
  class ModelMonitorConfig:
@@ -72,3 +72,33 @@ class ModelMonitor:
72
72
  )
73
73
  segment_column_id = sql_identifier.SqlIdentifier(segment_column)
74
74
  self._model_monitor_client.drop_segment_column(self.name, segment_column_id, statement_params=statement_params)
75
+
76
+ @telemetry.send_api_usage_telemetry(
77
+ project=telemetry.TelemetryProject.MLOPS.value,
78
+ subproject=telemetry.TelemetrySubProject.MONITORING.value,
79
+ )
80
+ def add_custom_metric_column(self, custom_metric_column: str) -> None:
81
+ """Add a custom metric column to the Model Monitor"""
82
+ statement_params = telemetry.get_statement_params(
83
+ telemetry.TelemetryProject.MLOPS.value,
84
+ telemetry.TelemetrySubProject.MONITORING.value,
85
+ )
86
+ custom_metric_column_identifier = sql_identifier.SqlIdentifier(custom_metric_column)
87
+ self._model_monitor_client.add_custom_metric_column(
88
+ self.name, custom_metric_column_identifier, statement_params=statement_params
89
+ )
90
+
91
+ @telemetry.send_api_usage_telemetry(
92
+ project=telemetry.TelemetryProject.MLOPS.value,
93
+ subproject=telemetry.TelemetrySubProject.MONITORING.value,
94
+ )
95
+ def drop_custom_metric_column(self, custom_metric_column: str) -> None:
96
+ """Drop a custom metric column from the Model Monitor"""
97
+ statement_params = telemetry.get_statement_params(
98
+ telemetry.TelemetryProject.MLOPS.value,
99
+ telemetry.TelemetrySubProject.MONITORING.value,
100
+ )
101
+ custom_metric_column_identifier = sql_identifier.SqlIdentifier(custom_metric_column)
102
+ self._model_monitor_client.drop_custom_metric_column(
103
+ self.name, custom_metric_column_identifier, statement_params=statement_params
104
+ )
@@ -1,8 +1,8 @@
1
+ import logging
1
2
  from types import ModuleType
2
3
  from typing import TYPE_CHECKING, Any, Optional, Union
3
4
 
4
5
  import pandas as pd
5
- from absl.logging import logging
6
6
 
7
7
  from snowflake.ml._internal import platform_capabilities, telemetry
8
8
  from snowflake.ml._internal.exceptions import error_codes, exceptions
@@ -1,8 +1,8 @@
1
+ import logging
1
2
  import warnings
2
3
  from dataclasses import dataclass
3
4
  from typing import Any, Optional
4
5
 
5
- from absl.logging import logging
6
6
  from packaging import requirements
7
7
 
8
8
  from snowflake.ml import version as snowml_version
@@ -221,7 +221,7 @@ class ModelParameterReconciler:
221
221
  ).get(env_utils.SNOWPARK_ML_PKG_NAME, [])
222
222
 
223
223
  if len(snowml_matched_versions) < 1 and not options.get("embed_local_ml_library", False):
224
- logging.info(
224
+ logger.info(
225
225
  f"Local snowflake-ml-python library has version {snowml_version.VERSION},"
226
226
  " which is not available in the Snowflake server, embedding local ML library automatically."
227
227
  )
@@ -1,13 +1,15 @@
1
1
  import configparser
2
+ import logging
2
3
  import os
3
4
  from typing import Optional, Union
4
5
 
5
- from absl import logging
6
6
  from cryptography.hazmat import backends
7
7
  from cryptography.hazmat.primitives import serialization
8
8
 
9
9
  from snowflake import snowpark
10
10
 
11
+ logger = logging.getLogger(__name__)
12
+
11
13
  _DEFAULT_CONNECTION_FILE = "~/.snowsql/config"
12
14
 
13
15
 
@@ -108,7 +110,7 @@ def _load_from_snowsql_config_file(connection_name: str, login_file: str = "") -
108
110
  """Loads the dictionary from snowsql config file."""
109
111
  snowsql_config_file = login_file if login_file else os.path.expanduser(_DEFAULT_CONNECTION_FILE)
110
112
  if not os.path.exists(snowsql_config_file):
111
- logging.error(f"Connection name given but snowsql config file is not found at: {snowsql_config_file}")
113
+ logger.error(f"Connection name given but snowsql config file is not found at: {snowsql_config_file}")
112
114
  raise Exception("Snowflake SnowSQL config not found.")
113
115
 
114
116
  config = configparser.ConfigParser(inline_comment_prefixes="#")
@@ -124,7 +126,7 @@ def _load_from_snowsql_config_file(connection_name: str, login_file: str = "") -
124
126
  # See https://docs.snowflake.com/en/user-guide/snowsql-start.html#configuring-default-connection-settings
125
127
  connection_name = "connections"
126
128
 
127
- logging.info(f"Reading {snowsql_config_file} for connection parameters defined as {connection_name}")
129
+ logger.info(f"Reading {snowsql_config_file} for connection parameters defined as {connection_name}")
128
130
  config.read(snowsql_config_file)
129
131
  conn_params = dict(config[connection_name])
130
132
  # Remap names to appropriate args in Python Connector API
snowflake/ml/version.py CHANGED
@@ -1,2 +1,2 @@
1
1
  # This is parsed by regex in conda recipe meta file. Make sure not to break it.
2
- VERSION = "1.13.0"
2
+ VERSION = "1.15.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: snowflake-ml-python
3
- Version: 1.13.0
3
+ Version: 1.15.0
4
4
  Summary: The machine learning client library that is used for interacting with Snowflake to build machine learning solutions.
5
5
  Author-email: "Snowflake, Inc" <support@snowflake.com>
6
6
  License:
@@ -233,7 +233,6 @@ Classifier: Topic :: Scientific/Engineering :: Information Analysis
233
233
  Requires-Python: <3.13,>=3.9
234
234
  Description-Content-Type: text/markdown
235
235
  License-File: LICENSE.txt
236
- Requires-Dist: absl-py<2,>=0.15
237
236
  Requires-Dist: anyio<5,>=3.5.0
238
237
  Requires-Dist: cachetools<6,>=3.1.1
239
238
  Requires-Dist: cloudpickle>=2.0.0
@@ -302,21 +301,21 @@ Requires-Dist: torch<3,>=2.0.1; extra == "transformers"
302
301
  Requires-Dist: transformers!=4.51.3,<5,>=4.39.3; extra == "transformers"
303
302
  Dynamic: license-file
304
303
 
305
- # Snowpark ML
304
+ # Snowflake ML Python
306
305
 
307
- Snowpark ML is a set of tools including SDKs and underlying infrastructure to build and deploy machine learning models.
308
- With Snowpark ML, you can pre-process data, train, manage and deploy ML models all within Snowflake, using a single SDK,
306
+ Snowflake ML Python is a set of tools including SDKs and underlying infrastructure to build and deploy machine learning models.
307
+ With Snowflake ML Python, you can pre-process data, train, manage and deploy ML models all within Snowflake,
309
308
  and benefit from Snowflake’s proven performance, scalability, stability and governance at every stage of the Machine
310
309
  Learning workflow.
311
310
 
312
- ## Key Components of Snowpark ML
311
+ ## Key Components of Snowflake ML Python
313
312
 
314
- The Snowpark ML Python SDK provides a number of APIs to support each stage of an end-to-end Machine Learning development
315
- and deployment process, and includes two key components.
313
+ The Snowflake ML Python SDK provides a number of APIs to support each stage of an end-to-end Machine Learning development
314
+ and deployment process.
316
315
 
317
- ### Snowpark ML Development
316
+ ### Snowflake ML Model Development
318
317
 
319
- [Snowpark ML Development](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index#ml-modeling)
318
+ [Snowflake ML Model Development](https://docs.snowflake.com/developer-guide/snowflake-ml/overview#ml-modeling)
320
319
  provides a collection of python APIs enabling efficient ML model development directly in Snowflake:
321
320
 
322
321
  1. Modeling API (`snowflake.ml.modeling`) for data preprocessing, feature engineering and model training in Snowflake.
@@ -327,19 +326,16 @@ model development classes based on sklearn, xgboost, and lightgbm.
327
326
  1. Framework Connectors: Optimized, secure and performant data provisioning for Pytorch and Tensorflow frameworks in
328
327
  their native data loader formats.
329
328
 
330
- 1. FileSet API: FileSet provides a Python fsspec-compliant API for materializing data into a Snowflake internal stage
331
- from a query or Snowpark Dataframe along with a number of convenience APIs.
329
+ ### Snowflake ML Ops
332
330
 
333
- ### Snowflake MLOps
331
+ Snowflake ML Python contains a suite of MLOps tools. It complements
332
+ the Snowflake Modeling API, and provides end to end development to deployment within Snowflake.
333
+ The Snowflake ML Ops suite consists of:
334
334
 
335
- Snowflake MLOps contains suit of tools and objects to make ML development cycle. It complements
336
- the Snowpark ML Development API, and provides end to end development to deployment within Snowflake.
337
- Currently, the API consists of:
338
-
339
- 1. [Registry](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index#snowflake-model-registry): A python API
335
+ 1. [Registry](https://docs.snowflake.com/developer-guide/snowflake-ml/overview#snowflake-model-registry): A python API
340
336
  allows secure deployment and management of models in Snowflake, supporting models trained both inside and outside of
341
337
  Snowflake.
342
- 2. [Feature Store](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index#snowflake-feature-store): A fully
338
+ 2. [Feature Store](https://docs.snowflake.com/developer-guide/snowflake-ml/overview#snowflake-feature-store): A fully
343
339
  integrated solution for defining, managing, storing and discovering ML features derived from your data. The
344
340
  Snowflake Feature Store supports automated, incremental refresh from batch and streaming data sources, so that
345
341
  feature pipelines need be defined only once to be continuously updated with new data.
@@ -348,12 +344,19 @@ Currently, the API consists of:
348
344
 
349
345
  ## Getting started
350
346
 
347
+ Learn about all Snowflake ML feature offerings in the [Developer Guide](https://docs.snowflake.com/developer-guide/snowflake-ml/overview).
348
+
351
349
  ### Have your Snowflake account ready
352
350
 
353
351
  If you don't have a Snowflake account yet, you can [sign up for a 30-day free trial account](https://signup.snowflake.com/).
354
352
 
355
353
  ### Installation
356
354
 
355
+ Snowflake ML Python is pre-installed in Container Runtime notebook environments.
356
+ [Learn more](https://docs.snowflake.com/en/developer-guide/snowflake-ml/notebooks-on-spcs).
357
+
358
+ In Snowflake Warehouse notebook environments, snowflake-ml-python can be installed using the "Packages" drop-down menu.
359
+
357
360
  Follow the [installation instructions](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index#installing-snowpark-ml)
358
361
  in the Snowflake documentation.
359
362
 
@@ -363,8 +366,8 @@ or [virtualenv](https://docs.python.org/3/tutorial/venv.html) to create a virtua
363
366
 
364
367
  ### Conda channels
365
368
 
366
- The [Snowflake Conda Channel](https://repo.anaconda.com/pkgs/snowflake/) contains the official snowpark ML package releases.
367
- The recommended approach is to install `snowflake-ml-python` this conda channel:
369
+ The [Snowflake Anaconda Channel](https://repo.anaconda.com/pkgs/snowflake/) contains the official snowflake-ml-python package
370
+ releases. To install `snowflake-ml-python` from this conda channel:
368
371
 
369
372
  ```sh
370
373
  conda install \
@@ -373,25 +376,18 @@ conda install \
373
376
  snowflake-ml-python
374
377
  ```
375
378
 
376
- See [the developer guide](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index) for installation instructions.
379
+ See [the developer guide](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index) for detailed installation instructions.
377
380
 
378
- The latest version of the `snowpark-ml-python` package is also published in a conda channel in this repository. Package versions
379
- in this channel may not yet be present in the official Snowflake conda channel.
381
+ The snowflake-ml-python package is also published in [conda-forge](https://anaconda.org/conda-forge/snowflake-ml-python).
382
+ To install `snowflake-ml-python` from conda forge:
380
383
 
381
- Install `snowflake-ml-python` from this channel with the following (being sure to replace `<version_specifier>` with the
382
- desired version, e.g. `1.0.10`):
383
-
384
- ```bash
384
+ ```sh
385
385
  conda install \
386
- -c https://raw.githubusercontent.com/snowflakedb/snowflake-ml-python/conda/releases/ \
387
- -c https://repo.anaconda.com/pkgs/snowflake \
386
+ -c https://conda.anaconda.org/conda-forge/ \
388
387
  --override-channels \
389
- snowflake-ml-python==<version_specifier>
388
+ snowflake-ml-python
390
389
  ```
391
390
 
392
- Note that until a `snowflake-ml-python` package version is available in the official Snowflake conda channel, there may
393
- be compatibility issues. Server-side functionality that `snowflake-ml-python` depends on may not yet be released.
394
-
395
391
  ### Verifying the package
396
392
 
397
393
  1. Install cosign.
@@ -410,6 +406,27 @@ NOTE: Version 1.7.0 is used as example here. Please choose the the latest versio
410
406
 
411
407
  # Release History
412
408
 
409
+ ## 1.15.0
410
+
411
+ ### Bug Fixes
412
+
413
+ ### Behavior Changes
414
+
415
+ * Registry: Dropping support for deprecated `conversational` task type for Huggingface models.
416
+ To read more <https://github.com/huggingface/transformers/pull/31165>
417
+
418
+ ### New Features
419
+
420
+ ## 1.14.0 (09-18-2025)
421
+
422
+ ### Bug Fixes
423
+
424
+ ### Behavior Changes
425
+
426
+ ### New Features
427
+
428
+ * ML Job: The `additional_payloads` argument is now **deprecated** in favor of `imports`.
429
+
413
430
  ## 1.13.0
414
431
 
415
432
  ### Bug Fixes