snowflake-ml-python 1.14.0__py3-none-any.whl → 1.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/ml/_internal/platform_capabilities.py +9 -7
- snowflake/ml/_internal/utils/connection_params.py +5 -3
- snowflake/ml/_internal/utils/jwt_generator.py +3 -2
- snowflake/ml/_internal/utils/temp_file_utils.py +1 -2
- snowflake/ml/experiment/_client/experiment_tracking_sql_client.py +16 -3
- snowflake/ml/experiment/_entities/__init__.py +2 -1
- snowflake/ml/experiment/_entities/run.py +0 -15
- snowflake/ml/experiment/_entities/run_metadata.py +3 -51
- snowflake/ml/experiment/experiment_tracking.py +8 -8
- snowflake/ml/model/__init__.py +2 -6
- snowflake/ml/model/_client/model/batch_inference_specs.py +0 -4
- snowflake/ml/model/_client/model/inference_engine_utils.py +55 -0
- snowflake/ml/model/_client/model/model_version_impl.py +25 -62
- snowflake/ml/model/_client/ops/service_ops.py +18 -2
- snowflake/ml/model/_client/sql/service.py +29 -2
- snowflake/ml/model/_packager/model_handlers/_utils.py +4 -2
- snowflake/ml/model/_packager/model_handlers/huggingface_pipeline.py +7 -5
- snowflake/ml/model/_packager/model_packager.py +4 -3
- snowflake/ml/model/_packager/model_runtime/_snowml_inference_alternative_requirements.py +0 -1
- snowflake/ml/model/_signatures/utils.py +0 -21
- snowflake/ml/model/models/huggingface_pipeline.py +56 -21
- snowflake/ml/registry/_manager/model_manager.py +1 -1
- snowflake/ml/registry/_manager/model_parameter_reconciler.py +2 -2
- snowflake/ml/utils/connection_params.py +5 -3
- snowflake/ml/version.py +1 -1
- {snowflake_ml_python-1.14.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/METADATA +42 -35
- {snowflake_ml_python-1.14.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/RECORD +30 -29
- {snowflake_ml_python-1.14.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/WHEEL +0 -0
- {snowflake_ml_python-1.14.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/licenses/LICENSE.txt +0 -0
- {snowflake_ml_python-1.14.0.dist-info → snowflake_ml_python-1.15.0.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import importlib
|
|
2
2
|
import json
|
|
3
|
+
import logging
|
|
3
4
|
import os
|
|
4
5
|
import pathlib
|
|
5
6
|
import warnings
|
|
@@ -8,7 +9,6 @@ from typing import Any, Callable, Iterable, Optional, Sequence, cast
|
|
|
8
9
|
import numpy as np
|
|
9
10
|
import numpy.typing as npt
|
|
10
11
|
import pandas as pd
|
|
11
|
-
from absl import logging
|
|
12
12
|
|
|
13
13
|
import snowflake.snowpark.dataframe as sp_df
|
|
14
14
|
from snowflake.ml._internal import env
|
|
@@ -23,6 +23,8 @@ from snowflake.ml.model._signatures import (
|
|
|
23
23
|
)
|
|
24
24
|
from snowflake.snowpark import DataFrame as SnowparkDataFrame
|
|
25
25
|
|
|
26
|
+
logger = logging.getLogger(__name__)
|
|
27
|
+
|
|
26
28
|
EXPLAIN_BACKGROUND_DATA_ROWS_COUNT_LIMIT = 1000
|
|
27
29
|
|
|
28
30
|
|
|
@@ -257,7 +259,7 @@ def validate_model_task(passed_model_task: model_types.Task, inferred_model_task
|
|
|
257
259
|
)
|
|
258
260
|
return inferred_model_task
|
|
259
261
|
elif inferred_model_task != model_types.Task.UNKNOWN:
|
|
260
|
-
|
|
262
|
+
logger.info(f"Inferred Task: {inferred_model_task.name} is used as task for this model " f"version")
|
|
261
263
|
return inferred_model_task
|
|
262
264
|
return passed_model_task
|
|
263
265
|
|
|
@@ -43,7 +43,6 @@ DEFAULT_CHAT_TEMPLATE = "{% for message in messages %}{{'<|im_start|>' + message
|
|
|
43
43
|
def get_requirements_from_task(task: str, spcs_only: bool = False) -> list[model_env.ModelDependency]:
|
|
44
44
|
# Text
|
|
45
45
|
if task in [
|
|
46
|
-
"conversational",
|
|
47
46
|
"fill-mask",
|
|
48
47
|
"ner",
|
|
49
48
|
"token-classification",
|
|
@@ -521,6 +520,7 @@ class HuggingFacePipelineHandler(
|
|
|
521
520
|
input_data = X[signature.inputs[0].name].to_list()
|
|
522
521
|
temp_res = getattr(raw_model, target_method)(input_data)
|
|
523
522
|
else:
|
|
523
|
+
# TODO: remove conversational pipeline code
|
|
524
524
|
# For others, we could offer the whole dataframe as a list.
|
|
525
525
|
# Some of them may need some conversion
|
|
526
526
|
if hasattr(transformers, "ConversationalPipeline") and isinstance(
|
|
@@ -759,11 +759,13 @@ class HuggingFaceOpenAICompatibleModel:
|
|
|
759
759
|
eos_token_id=self.tokenizer.eos_token_id,
|
|
760
760
|
stop_strings=stop_strings,
|
|
761
761
|
stream=stream,
|
|
762
|
-
repetition_penalty=frequency_penalty,
|
|
763
|
-
diversity_penalty=presence_penalty if n > 1 else None,
|
|
764
762
|
num_return_sequences=n,
|
|
765
|
-
num_beams=max(
|
|
766
|
-
|
|
763
|
+
num_beams=max(1, n), # must be >1
|
|
764
|
+
repetition_penalty=frequency_penalty,
|
|
765
|
+
# TODO: Handle diversity_penalty and num_beam_groups
|
|
766
|
+
# not all models support them making it hard to support any huggingface model
|
|
767
|
+
# diversity_penalty=presence_penalty if n > 1 else None,
|
|
768
|
+
# num_beam_groups=max(2, n) if presence_penalty else 1,
|
|
767
769
|
do_sample=False,
|
|
768
770
|
)
|
|
769
771
|
|
|
@@ -1,9 +1,8 @@
|
|
|
1
|
+
import logging
|
|
1
2
|
import os
|
|
2
3
|
from types import ModuleType
|
|
3
4
|
from typing import Optional
|
|
4
5
|
|
|
5
|
-
from absl import logging
|
|
6
|
-
|
|
7
6
|
from snowflake.ml._internal.exceptions import (
|
|
8
7
|
error_codes,
|
|
9
8
|
exceptions as snowml_exceptions,
|
|
@@ -12,6 +11,8 @@ from snowflake.ml.model import custom_model, model_signature, type_hints as mode
|
|
|
12
11
|
from snowflake.ml.model._packager import model_handler
|
|
13
12
|
from snowflake.ml.model._packager.model_meta import model_meta
|
|
14
13
|
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
15
16
|
|
|
16
17
|
class ModelPackager:
|
|
17
18
|
"""Top-level class to save/load and manage a Snowflake Native formatted model.
|
|
@@ -96,7 +97,7 @@ class ModelPackager:
|
|
|
96
97
|
**options,
|
|
97
98
|
)
|
|
98
99
|
if signatures is None:
|
|
99
|
-
|
|
100
|
+
logger.info(f"Model signatures are auto inferred as:\n\n{meta.signatures}")
|
|
100
101
|
|
|
101
102
|
self.model = model
|
|
102
103
|
self.meta = meta
|
|
@@ -110,27 +110,6 @@ def huggingface_pipeline_signature_auto_infer(
|
|
|
110
110
|
) -> Optional[core.ModelSignature]:
|
|
111
111
|
# Text
|
|
112
112
|
|
|
113
|
-
# https://huggingface.co/docs/transformers/en/main_classes/pipelines#transformers.ConversationalPipeline
|
|
114
|
-
# Needs to convert to conversation object.
|
|
115
|
-
if task == "conversational":
|
|
116
|
-
warnings.warn(
|
|
117
|
-
(
|
|
118
|
-
"Conversational pipeline is removed from transformers since 4.42.0. "
|
|
119
|
-
"Support will be removed from snowflake-ml-python soon."
|
|
120
|
-
),
|
|
121
|
-
category=DeprecationWarning,
|
|
122
|
-
stacklevel=1,
|
|
123
|
-
)
|
|
124
|
-
return core.ModelSignature(
|
|
125
|
-
inputs=[
|
|
126
|
-
core.FeatureSpec(name="user_inputs", dtype=core.DataType.STRING, shape=(-1,)),
|
|
127
|
-
core.FeatureSpec(name="generated_responses", dtype=core.DataType.STRING, shape=(-1,)),
|
|
128
|
-
],
|
|
129
|
-
outputs=[
|
|
130
|
-
core.FeatureSpec(name="generated_responses", dtype=core.DataType.STRING, shape=(-1,)),
|
|
131
|
-
],
|
|
132
|
-
)
|
|
133
|
-
|
|
134
113
|
# https://huggingface.co/docs/transformers/en/main_classes/pipelines#transformers.TokenClassificationPipeline
|
|
135
114
|
if task == "fill-mask":
|
|
136
115
|
return core.ModelSignature(
|
|
@@ -8,6 +8,7 @@ from snowflake import snowpark
|
|
|
8
8
|
from snowflake.ml._internal import telemetry
|
|
9
9
|
from snowflake.ml._internal.human_readable_id import hrid_generator
|
|
10
10
|
from snowflake.ml._internal.utils import sql_identifier
|
|
11
|
+
from snowflake.ml.model._client.model import inference_engine_utils
|
|
11
12
|
from snowflake.ml.model._client.ops import service_ops
|
|
12
13
|
from snowflake.snowpark import async_job, session
|
|
13
14
|
|
|
@@ -77,6 +78,15 @@ class HuggingFacePipelineModel:
|
|
|
77
78
|
framework = kwargs.get("framework", None)
|
|
78
79
|
feature_extractor = kwargs.get("feature_extractor", None)
|
|
79
80
|
|
|
81
|
+
_can_download_snapshot = False
|
|
82
|
+
if download_snapshot:
|
|
83
|
+
try:
|
|
84
|
+
import huggingface_hub as hf_hub
|
|
85
|
+
|
|
86
|
+
_can_download_snapshot = True
|
|
87
|
+
except ImportError:
|
|
88
|
+
pass
|
|
89
|
+
|
|
80
90
|
# ==== Start pipeline logic from transformers ====
|
|
81
91
|
if model_kwargs is None:
|
|
82
92
|
model_kwargs = {}
|
|
@@ -141,22 +151,23 @@ class HuggingFacePipelineModel:
|
|
|
141
151
|
# Instantiate config if needed
|
|
142
152
|
config_obj = None
|
|
143
153
|
|
|
144
|
-
if
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
154
|
+
if not _can_download_snapshot:
|
|
155
|
+
if isinstance(config, str):
|
|
156
|
+
config_obj = transformers.AutoConfig.from_pretrained(
|
|
157
|
+
config, _from_pipeline=task, **hub_kwargs, **model_kwargs
|
|
158
|
+
)
|
|
159
|
+
hub_kwargs["_commit_hash"] = config_obj._commit_hash
|
|
160
|
+
elif config is None and isinstance(model, str):
|
|
161
|
+
config_obj = transformers.AutoConfig.from_pretrained(
|
|
162
|
+
model, _from_pipeline=task, **hub_kwargs, **model_kwargs
|
|
163
|
+
)
|
|
164
|
+
hub_kwargs["_commit_hash"] = config_obj._commit_hash
|
|
165
|
+
# We only support string as config argument.
|
|
166
|
+
elif config is not None and not isinstance(config, str):
|
|
167
|
+
raise RuntimeError(
|
|
168
|
+
"Impossible to use non-string config as input for HuggingFacePipelineModel. "
|
|
169
|
+
"Use transformers.Pipeline object if required."
|
|
170
|
+
)
|
|
160
171
|
|
|
161
172
|
# ==== Start pipeline logic (Task) from transformers ====
|
|
162
173
|
|
|
@@ -208,7 +219,7 @@ class HuggingFacePipelineModel:
|
|
|
208
219
|
"Using a pipeline without specifying a model name and revision in production is not recommended.",
|
|
209
220
|
stacklevel=2,
|
|
210
221
|
)
|
|
211
|
-
if config is None and isinstance(model, str):
|
|
222
|
+
if not _can_download_snapshot and config is None and isinstance(model, str):
|
|
212
223
|
config_obj = transformers.AutoConfig.from_pretrained(
|
|
213
224
|
model, _from_pipeline=task, **hub_kwargs, **model_kwargs
|
|
214
225
|
)
|
|
@@ -228,11 +239,10 @@ class HuggingFacePipelineModel:
|
|
|
228
239
|
)
|
|
229
240
|
|
|
230
241
|
repo_snapshot_dir: Optional[str] = None
|
|
231
|
-
if
|
|
242
|
+
if _can_download_snapshot:
|
|
232
243
|
try:
|
|
233
|
-
from huggingface_hub import snapshot_download
|
|
234
244
|
|
|
235
|
-
repo_snapshot_dir = snapshot_download(
|
|
245
|
+
repo_snapshot_dir = hf_hub.snapshot_download(
|
|
236
246
|
repo_id=model,
|
|
237
247
|
revision=revision,
|
|
238
248
|
token=token,
|
|
@@ -268,7 +278,7 @@ class HuggingFacePipelineModel:
|
|
|
268
278
|
],
|
|
269
279
|
)
|
|
270
280
|
@snowpark._internal.utils.private_preview(version="1.9.1")
|
|
271
|
-
def
|
|
281
|
+
def log_model_and_create_service(
|
|
272
282
|
self,
|
|
273
283
|
*,
|
|
274
284
|
session: session.Session,
|
|
@@ -293,6 +303,7 @@ class HuggingFacePipelineModel:
|
|
|
293
303
|
force_rebuild: bool = False,
|
|
294
304
|
build_external_access_integrations: Optional[list[str]] = None,
|
|
295
305
|
block: bool = True,
|
|
306
|
+
experimental_options: Optional[dict[str, Any]] = None,
|
|
296
307
|
) -> Union[str, async_job.AsyncJob]:
|
|
297
308
|
"""Logs a Hugging Face model and creates a service in Snowflake.
|
|
298
309
|
|
|
@@ -319,6 +330,10 @@ class HuggingFacePipelineModel:
|
|
|
319
330
|
force_rebuild: Whether to force rebuild the image. Defaults to False.
|
|
320
331
|
build_external_access_integrations: External access integrations for building the image. Defaults to None.
|
|
321
332
|
block: Whether to block the operation. Defaults to True.
|
|
333
|
+
experimental_options: Experimental options for the service creation with custom inference engine.
|
|
334
|
+
Currently, only `inference_engine` and `inference_engine_args_override` are supported.
|
|
335
|
+
`inference_engine` is the name of the inference engine to use.
|
|
336
|
+
`inference_engine_args_override` is a list of string arguments to pass to the inference engine.
|
|
322
337
|
|
|
323
338
|
Raises:
|
|
324
339
|
ValueError: if database and schema name is not provided and session doesn't have a
|
|
@@ -360,6 +375,24 @@ class HuggingFacePipelineModel:
|
|
|
360
375
|
)
|
|
361
376
|
logger.info(f"A service job is going to register the hf model as: {model_name}.{version_name}")
|
|
362
377
|
|
|
378
|
+
# Check if model is HuggingFace text-generation before doing inference engine checks
|
|
379
|
+
inference_engine_args = None
|
|
380
|
+
if experimental_options:
|
|
381
|
+
if self.task != "text-generation":
|
|
382
|
+
raise ValueError(
|
|
383
|
+
"Currently, InferenceEngine using experimental_options is only supported for "
|
|
384
|
+
"HuggingFace text-generation models."
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
inference_engine_args = inference_engine_utils._get_inference_engine_args(experimental_options)
|
|
388
|
+
|
|
389
|
+
# Enrich inference engine args if inference engine is specified
|
|
390
|
+
if inference_engine_args is not None:
|
|
391
|
+
inference_engine_args = inference_engine_utils._enrich_inference_engine_args(
|
|
392
|
+
inference_engine_args,
|
|
393
|
+
gpu_requests,
|
|
394
|
+
)
|
|
395
|
+
|
|
363
396
|
from snowflake.ml.model import event_handler
|
|
364
397
|
from snowflake.snowpark import exceptions
|
|
365
398
|
|
|
@@ -412,6 +445,8 @@ class HuggingFacePipelineModel:
|
|
|
412
445
|
# TODO: remove warehouse in the next release
|
|
413
446
|
warehouse=session.get_current_warehouse(),
|
|
414
447
|
),
|
|
448
|
+
# inference engine
|
|
449
|
+
inference_engine_args=inference_engine_args,
|
|
415
450
|
)
|
|
416
451
|
status.update(label="HuggingFace model service created successfully", state="complete", expanded=False)
|
|
417
452
|
return result
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
+
import logging
|
|
1
2
|
from types import ModuleType
|
|
2
3
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
|
3
4
|
|
|
4
5
|
import pandas as pd
|
|
5
|
-
from absl.logging import logging
|
|
6
6
|
|
|
7
7
|
from snowflake.ml._internal import platform_capabilities, telemetry
|
|
8
8
|
from snowflake.ml._internal.exceptions import error_codes, exceptions
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
+
import logging
|
|
1
2
|
import warnings
|
|
2
3
|
from dataclasses import dataclass
|
|
3
4
|
from typing import Any, Optional
|
|
4
5
|
|
|
5
|
-
from absl.logging import logging
|
|
6
6
|
from packaging import requirements
|
|
7
7
|
|
|
8
8
|
from snowflake.ml import version as snowml_version
|
|
@@ -221,7 +221,7 @@ class ModelParameterReconciler:
|
|
|
221
221
|
).get(env_utils.SNOWPARK_ML_PKG_NAME, [])
|
|
222
222
|
|
|
223
223
|
if len(snowml_matched_versions) < 1 and not options.get("embed_local_ml_library", False):
|
|
224
|
-
|
|
224
|
+
logger.info(
|
|
225
225
|
f"Local snowflake-ml-python library has version {snowml_version.VERSION},"
|
|
226
226
|
" which is not available in the Snowflake server, embedding local ML library automatically."
|
|
227
227
|
)
|
|
@@ -1,13 +1,15 @@
|
|
|
1
1
|
import configparser
|
|
2
|
+
import logging
|
|
2
3
|
import os
|
|
3
4
|
from typing import Optional, Union
|
|
4
5
|
|
|
5
|
-
from absl import logging
|
|
6
6
|
from cryptography.hazmat import backends
|
|
7
7
|
from cryptography.hazmat.primitives import serialization
|
|
8
8
|
|
|
9
9
|
from snowflake import snowpark
|
|
10
10
|
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
11
13
|
_DEFAULT_CONNECTION_FILE = "~/.snowsql/config"
|
|
12
14
|
|
|
13
15
|
|
|
@@ -108,7 +110,7 @@ def _load_from_snowsql_config_file(connection_name: str, login_file: str = "") -
|
|
|
108
110
|
"""Loads the dictionary from snowsql config file."""
|
|
109
111
|
snowsql_config_file = login_file if login_file else os.path.expanduser(_DEFAULT_CONNECTION_FILE)
|
|
110
112
|
if not os.path.exists(snowsql_config_file):
|
|
111
|
-
|
|
113
|
+
logger.error(f"Connection name given but snowsql config file is not found at: {snowsql_config_file}")
|
|
112
114
|
raise Exception("Snowflake SnowSQL config not found.")
|
|
113
115
|
|
|
114
116
|
config = configparser.ConfigParser(inline_comment_prefixes="#")
|
|
@@ -124,7 +126,7 @@ def _load_from_snowsql_config_file(connection_name: str, login_file: str = "") -
|
|
|
124
126
|
# See https://docs.snowflake.com/en/user-guide/snowsql-start.html#configuring-default-connection-settings
|
|
125
127
|
connection_name = "connections"
|
|
126
128
|
|
|
127
|
-
|
|
129
|
+
logger.info(f"Reading {snowsql_config_file} for connection parameters defined as {connection_name}")
|
|
128
130
|
config.read(snowsql_config_file)
|
|
129
131
|
conn_params = dict(config[connection_name])
|
|
130
132
|
# Remap names to appropriate args in Python Connector API
|
snowflake/ml/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
# This is parsed by regex in conda recipe meta file. Make sure not to break it.
|
|
2
|
-
VERSION = "1.
|
|
2
|
+
VERSION = "1.15.0"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: snowflake-ml-python
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.15.0
|
|
4
4
|
Summary: The machine learning client library that is used for interacting with Snowflake to build machine learning solutions.
|
|
5
5
|
Author-email: "Snowflake, Inc" <support@snowflake.com>
|
|
6
6
|
License:
|
|
@@ -233,7 +233,6 @@ Classifier: Topic :: Scientific/Engineering :: Information Analysis
|
|
|
233
233
|
Requires-Python: <3.13,>=3.9
|
|
234
234
|
Description-Content-Type: text/markdown
|
|
235
235
|
License-File: LICENSE.txt
|
|
236
|
-
Requires-Dist: absl-py<2,>=0.15
|
|
237
236
|
Requires-Dist: anyio<5,>=3.5.0
|
|
238
237
|
Requires-Dist: cachetools<6,>=3.1.1
|
|
239
238
|
Requires-Dist: cloudpickle>=2.0.0
|
|
@@ -302,21 +301,21 @@ Requires-Dist: torch<3,>=2.0.1; extra == "transformers"
|
|
|
302
301
|
Requires-Dist: transformers!=4.51.3,<5,>=4.39.3; extra == "transformers"
|
|
303
302
|
Dynamic: license-file
|
|
304
303
|
|
|
305
|
-
#
|
|
304
|
+
# Snowflake ML Python
|
|
306
305
|
|
|
307
|
-
|
|
308
|
-
With
|
|
306
|
+
Snowflake ML Python is a set of tools including SDKs and underlying infrastructure to build and deploy machine learning models.
|
|
307
|
+
With Snowflake ML Python, you can pre-process data, train, manage and deploy ML models all within Snowflake,
|
|
309
308
|
and benefit from Snowflake’s proven performance, scalability, stability and governance at every stage of the Machine
|
|
310
309
|
Learning workflow.
|
|
311
310
|
|
|
312
|
-
## Key Components of
|
|
311
|
+
## Key Components of Snowflake ML Python
|
|
313
312
|
|
|
314
|
-
The
|
|
315
|
-
and deployment process
|
|
313
|
+
The Snowflake ML Python SDK provides a number of APIs to support each stage of an end-to-end Machine Learning development
|
|
314
|
+
and deployment process.
|
|
316
315
|
|
|
317
|
-
###
|
|
316
|
+
### Snowflake ML Model Development
|
|
318
317
|
|
|
319
|
-
[
|
|
318
|
+
[Snowflake ML Model Development](https://docs.snowflake.com/developer-guide/snowflake-ml/overview#ml-modeling)
|
|
320
319
|
provides a collection of python APIs enabling efficient ML model development directly in Snowflake:
|
|
321
320
|
|
|
322
321
|
1. Modeling API (`snowflake.ml.modeling`) for data preprocessing, feature engineering and model training in Snowflake.
|
|
@@ -327,19 +326,16 @@ model development classes based on sklearn, xgboost, and lightgbm.
|
|
|
327
326
|
1. Framework Connectors: Optimized, secure and performant data provisioning for Pytorch and Tensorflow frameworks in
|
|
328
327
|
their native data loader formats.
|
|
329
328
|
|
|
330
|
-
|
|
331
|
-
from a query or Snowpark Dataframe along with a number of convenience APIs.
|
|
329
|
+
### Snowflake ML Ops
|
|
332
330
|
|
|
333
|
-
|
|
331
|
+
Snowflake ML Python contains a suite of MLOps tools. It complements
|
|
332
|
+
the Snowflake Modeling API, and provides end to end development to deployment within Snowflake.
|
|
333
|
+
The Snowflake ML Ops suite consists of:
|
|
334
334
|
|
|
335
|
-
|
|
336
|
-
the Snowpark ML Development API, and provides end to end development to deployment within Snowflake.
|
|
337
|
-
Currently, the API consists of:
|
|
338
|
-
|
|
339
|
-
1. [Registry](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index#snowflake-model-registry): A python API
|
|
335
|
+
1. [Registry](https://docs.snowflake.com/developer-guide/snowflake-ml/overview#snowflake-model-registry): A python API
|
|
340
336
|
allows secure deployment and management of models in Snowflake, supporting models trained both inside and outside of
|
|
341
337
|
Snowflake.
|
|
342
|
-
2. [Feature Store](https://docs.snowflake.com/
|
|
338
|
+
2. [Feature Store](https://docs.snowflake.com/developer-guide/snowflake-ml/overview#snowflake-feature-store): A fully
|
|
343
339
|
integrated solution for defining, managing, storing and discovering ML features derived from your data. The
|
|
344
340
|
Snowflake Feature Store supports automated, incremental refresh from batch and streaming data sources, so that
|
|
345
341
|
feature pipelines need be defined only once to be continuously updated with new data.
|
|
@@ -348,12 +344,19 @@ Currently, the API consists of:
|
|
|
348
344
|
|
|
349
345
|
## Getting started
|
|
350
346
|
|
|
347
|
+
Learn about all Snowflake ML feature offerings in the [Developer Guide](https://docs.snowflake.com/developer-guide/snowflake-ml/overview).
|
|
348
|
+
|
|
351
349
|
### Have your Snowflake account ready
|
|
352
350
|
|
|
353
351
|
If you don't have a Snowflake account yet, you can [sign up for a 30-day free trial account](https://signup.snowflake.com/).
|
|
354
352
|
|
|
355
353
|
### Installation
|
|
356
354
|
|
|
355
|
+
Snowflake ML Python is pre-installed in Container Runtime notebook environments.
|
|
356
|
+
[Learn more](https://docs.snowflake.com/en/developer-guide/snowflake-ml/notebooks-on-spcs).
|
|
357
|
+
|
|
358
|
+
In Snowflake Warehouse notebook environments, snowflake-ml-python can be installed using the "Packages" drop-down menu.
|
|
359
|
+
|
|
357
360
|
Follow the [installation instructions](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index#installing-snowpark-ml)
|
|
358
361
|
in the Snowflake documentation.
|
|
359
362
|
|
|
@@ -363,8 +366,8 @@ or [virtualenv](https://docs.python.org/3/tutorial/venv.html) to create a virtua
|
|
|
363
366
|
|
|
364
367
|
### Conda channels
|
|
365
368
|
|
|
366
|
-
The [Snowflake
|
|
367
|
-
|
|
369
|
+
The [Snowflake Anaconda Channel](https://repo.anaconda.com/pkgs/snowflake/) contains the official snowflake-ml-python package
|
|
370
|
+
releases. To install `snowflake-ml-python` from this conda channel:
|
|
368
371
|
|
|
369
372
|
```sh
|
|
370
373
|
conda install \
|
|
@@ -373,25 +376,18 @@ conda install \
|
|
|
373
376
|
snowflake-ml-python
|
|
374
377
|
```
|
|
375
378
|
|
|
376
|
-
See [the developer guide](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index) for installation instructions.
|
|
377
|
-
|
|
378
|
-
The latest version of the `snowpark-ml-python` package is also published in a conda channel in this repository. Package versions
|
|
379
|
-
in this channel may not yet be present in the official Snowflake conda channel.
|
|
379
|
+
See [the developer guide](https://docs.snowflake.com/en/developer-guide/snowpark-ml/index) for detailed installation instructions.
|
|
380
380
|
|
|
381
|
-
|
|
382
|
-
|
|
381
|
+
The snowflake-ml-python package is also published in [conda-forge](https://anaconda.org/conda-forge/snowflake-ml-python).
|
|
382
|
+
To install `snowflake-ml-python` from conda forge:
|
|
383
383
|
|
|
384
|
-
```
|
|
384
|
+
```sh
|
|
385
385
|
conda install \
|
|
386
|
-
-c https://
|
|
387
|
-
-c https://repo.anaconda.com/pkgs/snowflake \
|
|
386
|
+
-c https://conda.anaconda.org/conda-forge/ \
|
|
388
387
|
--override-channels \
|
|
389
|
-
snowflake-ml-python
|
|
388
|
+
snowflake-ml-python
|
|
390
389
|
```
|
|
391
390
|
|
|
392
|
-
Note that until a `snowflake-ml-python` package version is available in the official Snowflake conda channel, there may
|
|
393
|
-
be compatibility issues. Server-side functionality that `snowflake-ml-python` depends on may not yet be released.
|
|
394
|
-
|
|
395
391
|
### Verifying the package
|
|
396
392
|
|
|
397
393
|
1. Install cosign.
|
|
@@ -410,7 +406,18 @@ NOTE: Version 1.7.0 is used as example here. Please choose the the latest versio
|
|
|
410
406
|
|
|
411
407
|
# Release History
|
|
412
408
|
|
|
413
|
-
## 1.
|
|
409
|
+
## 1.15.0
|
|
410
|
+
|
|
411
|
+
### Bug Fixes
|
|
412
|
+
|
|
413
|
+
### Behavior Changes
|
|
414
|
+
|
|
415
|
+
* Registry: Dropping support for deprecated `conversational` task type for Huggingface models.
|
|
416
|
+
To read more <https://github.com/huggingface/transformers/pull/31165>
|
|
417
|
+
|
|
418
|
+
### New Features
|
|
419
|
+
|
|
420
|
+
## 1.14.0 (09-18-2025)
|
|
414
421
|
|
|
415
422
|
### Bug Fixes
|
|
416
423
|
|