snowflake-ml-python 1.8.1__py3-none-any.whl → 1.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. snowflake/cortex/_complete.py +44 -10
  2. snowflake/ml/_internal/platform_capabilities.py +39 -3
  3. snowflake/ml/data/data_connector.py +25 -0
  4. snowflake/ml/dataset/dataset_reader.py +5 -1
  5. snowflake/ml/jobs/_utils/constants.py +2 -4
  6. snowflake/ml/jobs/_utils/interop_utils.py +442 -0
  7. snowflake/ml/jobs/_utils/payload_utils.py +81 -47
  8. snowflake/ml/jobs/_utils/scripts/constants.py +4 -0
  9. snowflake/ml/jobs/_utils/scripts/get_instance_ip.py +136 -0
  10. snowflake/ml/jobs/_utils/scripts/mljob_launcher.py +178 -0
  11. snowflake/ml/jobs/_utils/scripts/signal_workers.py +203 -0
  12. snowflake/ml/jobs/_utils/scripts/worker_shutdown_listener.py +242 -0
  13. snowflake/ml/jobs/_utils/spec_utils.py +5 -8
  14. snowflake/ml/jobs/_utils/types.py +6 -0
  15. snowflake/ml/jobs/decorators.py +3 -3
  16. snowflake/ml/jobs/job.py +145 -23
  17. snowflake/ml/jobs/manager.py +62 -10
  18. snowflake/ml/model/_client/ops/service_ops.py +42 -35
  19. snowflake/ml/model/_client/service/model_deployment_spec.py +7 -4
  20. snowflake/ml/model/_client/sql/service.py +9 -5
  21. snowflake/ml/model/_model_composer/model_composer.py +29 -11
  22. snowflake/ml/model/_packager/model_env/model_env.py +8 -2
  23. snowflake/ml/model/_packager/model_meta/model_meta.py +6 -1
  24. snowflake/ml/model/_packager/model_packager.py +2 -0
  25. snowflake/ml/model/_packager/model_runtime/_snowml_inference_alternative_requirements.py +1 -1
  26. snowflake/ml/model/type_hints.py +2 -0
  27. snowflake/ml/registry/_manager/model_manager.py +20 -1
  28. snowflake/ml/registry/registry.py +5 -1
  29. snowflake/ml/version.py +1 -1
  30. {snowflake_ml_python-1.8.1.dist-info → snowflake_ml_python-1.8.2.dist-info}/METADATA +35 -4
  31. {snowflake_ml_python-1.8.1.dist-info → snowflake_ml_python-1.8.2.dist-info}/RECORD +34 -28
  32. {snowflake_ml_python-1.8.1.dist-info → snowflake_ml_python-1.8.2.dist-info}/WHEEL +0 -0
  33. {snowflake_ml_python-1.8.1.dist-info → snowflake_ml_python-1.8.2.dist-info}/licenses/LICENSE.txt +0 -0
  34. {snowflake_ml_python-1.8.1.dist-info → snowflake_ml_python-1.8.2.dist-info}/top_level.txt +0 -0
@@ -9,7 +9,7 @@ import time
9
9
  from typing import Any, Dict, List, Optional, Tuple, Union, cast
10
10
 
11
11
  from snowflake import snowpark
12
- from snowflake.ml._internal import file_utils
12
+ from snowflake.ml._internal import file_utils, platform_capabilities as pc
13
13
  from snowflake.ml._internal.utils import service_logger, sql_identifier
14
14
  from snowflake.ml.model._client.service import model_deployment_spec
15
15
  from snowflake.ml.model._client.sql import service as service_sql, stage as stage_sql
@@ -57,30 +57,30 @@ class ServiceOperator:
57
57
  self._session = session
58
58
  self._database_name = database_name
59
59
  self._schema_name = schema_name
60
- self._workspace = tempfile.TemporaryDirectory()
61
60
  self._service_client = service_sql.ServiceSQLClient(
62
61
  session,
63
62
  database_name=database_name,
64
63
  schema_name=schema_name,
65
64
  )
66
- self._stage_client = stage_sql.StageSQLClient(
67
- session,
68
- database_name=database_name,
69
- schema_name=schema_name,
70
- )
71
- self._model_deployment_spec = model_deployment_spec.ModelDeploymentSpec(
72
- workspace_path=pathlib.Path(self._workspace.name)
73
- )
65
+ if pc.PlatformCapabilities.get_instance().is_inlined_deployment_spec_enabled():
66
+ self._workspace = None
67
+ self._model_deployment_spec = model_deployment_spec.ModelDeploymentSpec()
68
+ else:
69
+ self._workspace = tempfile.TemporaryDirectory()
70
+ self._stage_client = stage_sql.StageSQLClient(
71
+ session,
72
+ database_name=database_name,
73
+ schema_name=schema_name,
74
+ )
75
+ self._model_deployment_spec = model_deployment_spec.ModelDeploymentSpec(
76
+ workspace_path=pathlib.Path(self._workspace.name)
77
+ )
74
78
 
75
79
  def __eq__(self, __value: object) -> bool:
76
80
  if not isinstance(__value, ServiceOperator):
77
81
  return False
78
82
  return self._service_client == __value._service_client
79
83
 
80
- @property
81
- def workspace_path(self) -> pathlib.Path:
82
- return pathlib.Path(self._workspace.name)
83
-
84
84
  def create_service(
85
85
  self,
86
86
  *,
@@ -119,19 +119,21 @@ class ServiceOperator:
119
119
 
120
120
  image_repo_database_name = image_repo_database_name or database_name or self._database_name
121
121
  image_repo_schema_name = image_repo_schema_name or schema_name or self._schema_name
122
- # create a temp stage
123
- stage_name = sql_identifier.SqlIdentifier(
124
- snowpark_utils.random_name_for_temp_object(snowpark_utils.TempObjectType.STAGE)
125
- )
126
- self._stage_client.create_tmp_stage(
127
- database_name=database_name,
128
- schema_name=schema_name,
129
- stage_name=stage_name,
130
- statement_params=statement_params,
131
- )
132
- stage_path = self._stage_client.fully_qualified_object_name(database_name, schema_name, stage_name)
133
-
134
- self._model_deployment_spec.save(
122
+ if self._workspace:
123
+ # create a temp stage
124
+ stage_name = sql_identifier.SqlIdentifier(
125
+ snowpark_utils.random_name_for_temp_object(snowpark_utils.TempObjectType.STAGE)
126
+ )
127
+ self._stage_client.create_tmp_stage(
128
+ database_name=database_name,
129
+ schema_name=schema_name,
130
+ stage_name=stage_name,
131
+ statement_params=statement_params,
132
+ )
133
+ stage_path = self._stage_client.fully_qualified_object_name(database_name, schema_name, stage_name)
134
+ else:
135
+ stage_path = None
136
+ spec_yaml_str_or_path = self._model_deployment_spec.save(
135
137
  database_name=database_name,
136
138
  schema_name=schema_name,
137
139
  model_name=model_name,
@@ -154,12 +156,14 @@ class ServiceOperator:
154
156
  force_rebuild=force_rebuild,
155
157
  external_access_integrations=build_external_access_integrations,
156
158
  )
157
- file_utils.upload_directory_to_stage(
158
- self._session,
159
- local_path=self.workspace_path,
160
- stage_path=pathlib.PurePosixPath(stage_path),
161
- statement_params=statement_params,
162
- )
159
+ if self._workspace:
160
+ assert stage_path is not None
161
+ file_utils.upload_directory_to_stage(
162
+ self._session,
163
+ local_path=pathlib.Path(self._workspace.name),
164
+ stage_path=pathlib.PurePosixPath(stage_path),
165
+ statement_params=statement_params,
166
+ )
163
167
 
164
168
  # check if the inference service is already running/suspended
165
169
  model_inference_service_exists = self._check_if_service_exists(
@@ -176,8 +180,11 @@ class ServiceOperator:
176
180
 
177
181
  # deploy the model service
178
182
  query_id, async_job = self._service_client.deploy_model(
179
- stage_path=stage_path,
180
- model_deployment_spec_file_rel_path=model_deployment_spec.ModelDeploymentSpec.DEPLOY_SPEC_FILE_REL_PATH,
183
+ stage_path=stage_path if self._workspace else None,
184
+ model_deployment_spec_file_rel_path=(
185
+ model_deployment_spec.ModelDeploymentSpec.DEPLOY_SPEC_FILE_REL_PATH if self._workspace else None
186
+ ),
187
+ model_deployment_spec_yaml_str=None if self._workspace else spec_yaml_str_or_path,
181
188
  statement_params=statement_params,
182
189
  )
183
190
 
@@ -16,7 +16,7 @@ class ModelDeploymentSpec:
16
16
 
17
17
  DEPLOY_SPEC_FILE_REL_PATH = "deploy.yml"
18
18
 
19
- def __init__(self, workspace_path: pathlib.Path) -> None:
19
+ def __init__(self, workspace_path: Optional[pathlib.Path] = None) -> None:
20
20
  self.workspace_path = workspace_path
21
21
 
22
22
  def save(
@@ -43,7 +43,7 @@ class ModelDeploymentSpec:
43
43
  max_batch_rows: Optional[int],
44
44
  force_rebuild: bool,
45
45
  external_access_integrations: Optional[List[sql_identifier.SqlIdentifier]],
46
- ) -> None:
46
+ ) -> str:
47
47
  # create the deployment spec
48
48
  # models spec
49
49
  fq_model_name = identifier.get_schema_level_object_identifier(
@@ -105,9 +105,12 @@ class ModelDeploymentSpec:
105
105
  service=service_dict,
106
106
  )
107
107
 
108
+ # Anchors are not supported in the server, avoid that.
109
+ yaml.SafeDumper.ignore_aliases = lambda *args: True # type: ignore[method-assign]
110
+ if self.workspace_path is None:
111
+ return yaml.safe_dump(model_deployment_spec_dict)
108
112
  # save the yaml
109
113
  file_path = self.workspace_path / self.DEPLOY_SPEC_FILE_REL_PATH
110
114
  with file_path.open("w", encoding="utf-8") as f:
111
- # Anchors are not supported in the server, avoid that.
112
- yaml.SafeDumper.ignore_aliases = lambda *args: True # type: ignore[method-assign]
113
115
  yaml.safe_dump(model_deployment_spec_dict, f)
116
+ return str(file_path.resolve())
@@ -73,13 +73,17 @@ class ServiceSQLClient(_base._BaseSQLClient):
73
73
  def deploy_model(
74
74
  self,
75
75
  *,
76
- stage_path: str,
77
- model_deployment_spec_file_rel_path: str,
76
+ stage_path: Optional[str] = None,
77
+ model_deployment_spec_yaml_str: Optional[str] = None,
78
+ model_deployment_spec_file_rel_path: Optional[str] = None,
78
79
  statement_params: Optional[Dict[str, Any]] = None,
79
80
  ) -> Tuple[str, snowpark.AsyncJob]:
80
- async_job = self._session.sql(
81
- f"CALL SYSTEM$DEPLOY_MODEL('@{stage_path}/{model_deployment_spec_file_rel_path}')"
82
- ).collect(block=False, statement_params=statement_params)
81
+ assert model_deployment_spec_yaml_str or model_deployment_spec_file_rel_path
82
+ if model_deployment_spec_yaml_str:
83
+ sql_str = f"CALL SYSTEM$DEPLOY_MODEL('{model_deployment_spec_yaml_str}')"
84
+ else:
85
+ sql_str = f"CALL SYSTEM$DEPLOY_MODEL('@{stage_path}/{model_deployment_spec_file_rel_path}')"
86
+ async_job = self._session.sql(sql_str).collect(block=False, statement_params=statement_params)
83
87
  assert isinstance(async_job, snowpark.AsyncJob)
84
88
  return async_job.query_id, async_job
85
89
 
@@ -44,6 +44,7 @@ class ModelComposer:
44
44
  stage_path: str,
45
45
  *,
46
46
  statement_params: Optional[Dict[str, Any]] = None,
47
+ save_location: Optional[str] = None,
47
48
  ) -> None:
48
49
  self.session = session
49
50
  self.stage_path: Union[pathlib.PurePosixPath, parse.ParseResult] = None # type: ignore[assignment]
@@ -54,10 +55,29 @@ class ModelComposer:
54
55
  # The stage path is a user stage path
55
56
  self.stage_path = pathlib.PurePosixPath(stage_path)
56
57
 
57
- self._workspace = tempfile.TemporaryDirectory()
58
- self._packager_workspace = tempfile.TemporaryDirectory()
58
+ # Set up workspace based on save_location if provided, otherwise use temporary directory
59
+ self.save_location = save_location
60
+ if save_location:
61
+ # Use the save_location directory directly
62
+ self._workspace_path = pathlib.Path(save_location)
63
+ self._workspace_path.mkdir(exist_ok=True)
64
+ # ensure that the directory is empty
65
+ if any(self._workspace_path.iterdir()):
66
+ raise ValueError(f"The directory {self._workspace_path} is not empty.")
67
+ self._workspace = None
68
+
69
+ self._packager_workspace_path = self._workspace_path / ModelComposer.MODEL_DIR_REL_PATH
70
+ self._packager_workspace_path.mkdir(exist_ok=True)
71
+ self._packager_workspace = None
72
+ else:
73
+ # Use a temporary directory
74
+ self._workspace = tempfile.TemporaryDirectory()
75
+ self._workspace_path = pathlib.Path(self._workspace.name)
76
+
77
+ self._packager_workspace_path = self._workspace_path / ModelComposer.MODEL_DIR_REL_PATH
78
+ self._packager_workspace_path.mkdir(exist_ok=True)
59
79
 
60
- self.packager = model_packager.ModelPackager(local_dir_path=str(self._packager_workspace_path))
80
+ self.packager = model_packager.ModelPackager(local_dir_path=str(self.packager_workspace_path))
61
81
  self.manifest = model_manifest.ModelManifest(workspace_path=self.workspace_path)
62
82
 
63
83
  self.model_file_rel_path = f"model-{uuid.uuid4().hex}.zip"
@@ -65,16 +85,16 @@ class ModelComposer:
65
85
  self._statement_params = statement_params
66
86
 
67
87
  def __del__(self) -> None:
68
- self._workspace.cleanup()
69
- self._packager_workspace.cleanup()
88
+ if self._workspace:
89
+ self._workspace.cleanup()
70
90
 
71
91
  @property
72
92
  def workspace_path(self) -> pathlib.Path:
73
- return pathlib.Path(self._workspace.name)
93
+ return self._workspace_path
74
94
 
75
95
  @property
76
- def _packager_workspace_path(self) -> pathlib.Path:
77
- return pathlib.Path(self._packager_workspace.name)
96
+ def packager_workspace_path(self) -> pathlib.Path:
97
+ return self._packager_workspace_path
78
98
 
79
99
  @property
80
100
  def model_stage_path(self) -> str:
@@ -167,6 +187,7 @@ class ModelComposer:
167
187
  conda_dependencies=conda_dependencies,
168
188
  pip_requirements=pip_requirements,
169
189
  artifact_repository_map=artifact_repository_map,
190
+ target_platforms=target_platforms,
170
191
  python_version=python_version,
171
192
  ext_modules=ext_modules,
172
193
  code_paths=code_paths,
@@ -175,9 +196,6 @@ class ModelComposer:
175
196
  )
176
197
  assert self.packager.meta is not None
177
198
 
178
- file_utils.copytree(
179
- str(self._packager_workspace_path), str(self.workspace_path / ModelComposer.MODEL_DIR_REL_PATH)
180
- )
181
199
  self.manifest.save(
182
200
  model_meta=self.packager.meta,
183
201
  model_rel_path=pathlib.PurePosixPath(ModelComposer.MODEL_DIR_REL_PATH),
@@ -29,11 +29,13 @@ class ModelEnv:
29
29
  self,
30
30
  conda_env_rel_path: Optional[str] = None,
31
31
  pip_requirements_rel_path: Optional[str] = None,
32
+ prefer_pip: bool = False,
32
33
  ) -> None:
33
34
  if conda_env_rel_path is None:
34
35
  conda_env_rel_path = os.path.join(_DEFAULT_ENV_DIR, _DEFAULT_CONDA_ENV_FILENAME)
35
36
  if pip_requirements_rel_path is None:
36
37
  pip_requirements_rel_path = os.path.join(_DEFAULT_ENV_DIR, _DEFAULT_PIP_REQUIREMENTS_FILENAME)
38
+ self.prefer_pip: bool = prefer_pip
37
39
  self.conda_env_rel_path = pathlib.PurePosixPath(pathlib.Path(conda_env_rel_path).as_posix())
38
40
  self.pip_requirements_rel_path = pathlib.PurePosixPath(pathlib.Path(pip_requirements_rel_path).as_posix())
39
41
  self.artifact_repository_map: Optional[Dict[str, str]] = None
@@ -113,7 +115,11 @@ class ModelEnv:
113
115
  if snowpark_ml_version:
114
116
  self._snowpark_ml_version = version.parse(snowpark_ml_version)
115
117
 
116
- def include_if_absent(self, pkgs: List[ModelDependency], check_local_version: bool = False) -> None:
118
+ def include_if_absent(
119
+ self,
120
+ pkgs: List[ModelDependency],
121
+ check_local_version: bool = False,
122
+ ) -> None:
117
123
  """Append requirements into model env if absent. Depending on the environment, requirements may be added
118
124
  to either the pip requirements or conda dependencies.
119
125
 
@@ -121,7 +127,7 @@ class ModelEnv:
121
127
  pkgs: A list of ModelDependency namedtuple to be appended.
122
128
  check_local_version: Flag to indicate if it is required to pin to local version. Defaults to False.
123
129
  """
124
- if self.pip_requirements and not self.conda_dependencies and pkgs:
130
+ if (self.pip_requirements or self.prefer_pip) and not self.conda_dependencies and pkgs:
125
131
  pip_pkg_reqs: List[str] = []
126
132
  warnings.warn(
127
133
  (
@@ -49,6 +49,7 @@ def create_model_metadata(
49
49
  conda_dependencies: Optional[List[str]] = None,
50
50
  pip_requirements: Optional[List[str]] = None,
51
51
  artifact_repository_map: Optional[Dict[str, str]] = None,
52
+ target_platforms: Optional[List[model_types.TargetPlatform]] = None,
52
53
  python_version: Optional[str] = None,
53
54
  task: model_types.Task = model_types.Task.UNKNOWN,
54
55
  **kwargs: Any,
@@ -69,6 +70,7 @@ def create_model_metadata(
69
70
  conda_dependencies: List of conda requirements for running the model. Defaults to None.
70
71
  pip_requirements: List of pip Python packages requirements for running the model. Defaults to None.
71
72
  artifact_repository_map: A dict mapping from package channel to artifact repository name.
73
+ target_platforms: List of target platforms to run the model.
72
74
  python_version: A string of python version where model is run. Used for user override. If specified as None,
73
75
  current version would be captured. Defaults to None.
74
76
  task: The task of the Model Version. It is an enum class Task with values TABULAR_REGRESSION,
@@ -101,12 +103,14 @@ def create_model_metadata(
101
103
  else:
102
104
  raise ValueError("`snowflake.ml` is imported via a way that embedding local ML library is not supported.")
103
105
 
106
+ prefer_pip = target_platforms == [model_types.TargetPlatform.SNOWPARK_CONTAINER_SERVICES]
104
107
  env = _create_env_for_model_metadata(
105
108
  conda_dependencies=conda_dependencies,
106
109
  pip_requirements=pip_requirements,
107
110
  artifact_repository_map=artifact_repository_map,
108
111
  python_version=python_version,
109
112
  embed_local_ml_library=embed_local_ml_library,
113
+ prefer_pip=prefer_pip,
110
114
  )
111
115
 
112
116
  if embed_local_ml_library:
@@ -157,8 +161,9 @@ def _create_env_for_model_metadata(
157
161
  artifact_repository_map: Optional[Dict[str, str]] = None,
158
162
  python_version: Optional[str] = None,
159
163
  embed_local_ml_library: bool = False,
164
+ prefer_pip: bool = False,
160
165
  ) -> model_env.ModelEnv:
161
- env = model_env.ModelEnv()
166
+ env = model_env.ModelEnv(prefer_pip=prefer_pip)
162
167
 
163
168
  # Mypy doesn't like getter and setter have different types. See python/mypy #3004
164
169
  env.conda_dependencies = conda_dependencies # type: ignore[assignment]
@@ -44,6 +44,7 @@ class ModelPackager:
44
44
  conda_dependencies: Optional[List[str]] = None,
45
45
  pip_requirements: Optional[List[str]] = None,
46
46
  artifact_repository_map: Optional[Dict[str, str]] = None,
47
+ target_platforms: Optional[List[model_types.TargetPlatform]] = None,
47
48
  python_version: Optional[str] = None,
48
49
  ext_modules: Optional[List[ModuleType]] = None,
49
50
  code_paths: Optional[List[str]] = None,
@@ -77,6 +78,7 @@ class ModelPackager:
77
78
  artifact_repository_map=artifact_repository_map,
78
79
  python_version=python_version,
79
80
  task=task,
81
+ target_platforms=target_platforms,
80
82
  **options,
81
83
  ) as meta:
82
84
  model_blobs_path = os.path.join(self.local_dir_path, ModelPackager.MODEL_BLOBS_DIR)
@@ -1 +1 @@
1
- REQUIREMENTS = ['absl-py>=0.15,<2', 'aiohttp!=4.0.0a0, !=4.0.0a1', 'anyio>=3.5.0,<5', 'cachetools>=3.1.1,<6', 'cloudpickle>=2.0.0,<3', 'cryptography', 'fsspec>=2024.6.1,<2026', 'importlib_resources>=6.1.1, <7', 'numpy>=1.23,<2', 'packaging>=20.9,<25', 'pandas>=1.0.0,<3', 'pyarrow', 'pyjwt>=2.0.0, <3', 'pytimeparse>=1.1.8,<2', 'pyyaml>=6.0,<7', 'requests', 'retrying>=1.3.3,<2', 's3fs>=2024.6.1,<2026', 'scikit-learn>=1.4,<1.6', 'scipy>=1.9,<2', 'snowflake-connector-python>=3.12.0,<4', 'snowflake-snowpark-python>=1.17.0,<2,!=1.26.0', 'sqlparse>=0.4,<1', 'typing-extensions>=4.1.0,<5', 'xgboost>=1.7.3,<3']
1
+ REQUIREMENTS = ['absl-py>=0.15,<2', 'aiohttp!=4.0.0a0, !=4.0.0a1', 'anyio>=3.5.0,<5', 'cachetools>=3.1.1,<6', 'cloudpickle>=2.0.0,<3', 'cryptography', 'fsspec>=2024.6.1,<2026', 'importlib_resources>=6.1.1, <7', 'numpy>=1.23,<2', 'packaging>=20.9,<25', 'pandas>=1.0.0,<3', 'pyarrow', 'pyjwt>=2.0.0, <3', 'pytimeparse>=1.1.8,<2', 'pyyaml>=6.0,<7', 'requests', 'retrying>=1.3.3,<2', 's3fs>=2024.6.1,<2026', 'scikit-learn>=1.4,<1.6', 'scipy>=1.9,<2', 'snowflake-connector-python>=3.12.0,<4', 'snowflake-snowpark-python>=1.17.0,<2,!=1.26.0', 'snowflake.core>=1.0.2,<2', 'sqlparse>=0.4,<1', 'typing-extensions>=4.1.0,<5', 'xgboost>=1.7.3,<3']
@@ -147,6 +147,7 @@ class BaseModelSaveOption(TypedDict):
147
147
  embed_local_ml_library: Embedding local SnowML into the code directory of the folder.
148
148
  relax_version: Whether or not relax the version constraints of the dependencies if unresolvable in Warehouse.
149
149
  It detects any ==x.y.z in specifiers and replaced with >=x.y, <(x+1). Defaults to True.
150
+ save_location: Local directory path to save the model and metadata.
150
151
  """
151
152
 
152
153
  embed_local_ml_library: NotRequired[bool]
@@ -154,6 +155,7 @@ class BaseModelSaveOption(TypedDict):
154
155
  function_type: NotRequired[Literal["FUNCTION", "TABLE_FUNCTION"]]
155
156
  method_options: NotRequired[Dict[str, ModelMethodSaveOptions]]
156
157
  enable_explainability: NotRequired[bool]
158
+ save_location: NotRequired[str]
157
159
 
158
160
 
159
161
  class CatBoostModelSaveOptions(BaseModelSaveOption):
@@ -1,3 +1,4 @@
1
+ import os
1
2
  from types import ModuleType
2
3
  from typing import Any, Dict, List, Optional, Tuple, Union
3
4
 
@@ -13,6 +14,7 @@ from snowflake.ml.model._client.model import model_impl, model_version_impl
13
14
  from snowflake.ml.model._client.ops import metadata_ops, model_ops, service_ops
14
15
  from snowflake.ml.model._model_composer import model_composer
15
16
  from snowflake.ml.model._packager.model_meta import model_meta
17
+ from snowflake.ml.modeling._internal import constants
16
18
  from snowflake.snowpark import exceptions as snowpark_exceptions, session
17
19
 
18
20
  logger = logging.getLogger(__name__)
@@ -208,6 +210,14 @@ class ModelManager:
208
210
  if target_platforms:
209
211
  # Convert any string target platforms to TargetPlatform objects
210
212
  platforms = [model_types.TargetPlatform(platform) for platform in target_platforms]
213
+ else:
214
+ # Default the target platform to SPCS if not specified when running in ML runtime
215
+ if os.getenv(constants.IN_ML_RUNTIME_ENV_VAR):
216
+ logger.info(
217
+ "Logging the model on Container Runtime for ML without specifying `target_platforms`. "
218
+ 'Default to `target_platforms=["SNOWPARK_CONTAINER_SERVICES"]`.'
219
+ )
220
+ platforms = [model_types.TargetPlatform.SNOWPARK_CONTAINER_SERVICES]
211
221
 
212
222
  if artifact_repository_map:
213
223
  for channel, artifact_repository_name in artifact_repository_map.items():
@@ -223,8 +233,17 @@ class ModelManager:
223
233
 
224
234
  logger.info("Start packaging and uploading your model. It might take some time based on the size of the model.")
225
235
 
236
+ # Extract save_location from options if present
237
+ save_location = None
238
+ if options and "save_location" in options:
239
+ save_location = options.get("save_location")
240
+ logger.info(f"Model will be saved to local directory: {save_location}")
241
+
226
242
  mc = model_composer.ModelComposer(
227
- self._model_ops._session, stage_path=stage_path, statement_params=statement_params
243
+ self._model_ops._session,
244
+ stage_path=stage_path,
245
+ statement_params=statement_params,
246
+ save_location=save_location,
228
247
  )
229
248
  model_metadata: model_meta.ModelMetadata = mc.save(
230
249
  name=model_name_id.resolved(),
@@ -75,7 +75,9 @@ class Registry:
75
75
  )
76
76
 
77
77
  self._model_manager = model_manager.ModelManager(
78
- session, database_name=self._database_name, schema_name=self._schema_name
78
+ session,
79
+ database_name=self._database_name,
80
+ schema_name=self._schema_name,
79
81
  )
80
82
 
81
83
  self.enable_monitoring = options.get("enable_monitoring", True) if options else True
@@ -181,6 +183,7 @@ class Registry:
181
183
  - target_methods: List of target methods to register when logging the model.
182
184
  This option is not used in MLFlow models. Defaults to None, in which case the model handler's
183
185
  default target methods will be used.
186
+ - save_location: Location to save the model and metadata.
184
187
  - method_options: Per-method saving options. This dictionary has method names as keys and dictionary
185
188
  values with the desired options.
186
189
 
@@ -317,6 +320,7 @@ class Registry:
317
320
  - target_methods: List of target methods to register when logging the model.
318
321
  This option is not used in MLFlow models. Defaults to None, in which case the model handler's
319
322
  default target methods will be used.
323
+ - save_location: Location to save the model and metadata.
320
324
  - method_options: Per-method saving options. This dictionary has method names as keys and dictionary
321
325
  values with the desired options. See the example below.
322
326
 
snowflake/ml/version.py CHANGED
@@ -1 +1 @@
1
- VERSION="1.8.1"
1
+ VERSION="1.8.2"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: snowflake-ml-python
3
- Version: 1.8.1
3
+ Version: 1.8.2
4
4
  Summary: The machine learning client library that is used for interacting with Snowflake to build machine learning solutions.
5
5
  Author-email: "Snowflake, Inc" <support@snowflake.com>
6
6
  License:
@@ -253,6 +253,7 @@ Requires-Dist: scikit-learn<1.6,>=1.4
253
253
  Requires-Dist: scipy<2,>=1.9
254
254
  Requires-Dist: snowflake-connector-python[pandas]<4,>=3.12.0
255
255
  Requires-Dist: snowflake-snowpark-python!=1.26.0,<2,>=1.17.0
256
+ Requires-Dist: snowflake.core<2,>=1.0.2
256
257
  Requires-Dist: sqlparse<1,>=0.4
257
258
  Requires-Dist: typing-extensions<5,>=4.1.0
258
259
  Requires-Dist: xgboost<3,>=1.7.3
@@ -402,7 +403,38 @@ NOTE: Version 1.7.0 is used as example here. Please choose the the latest versio
402
403
 
403
404
  # Release History
404
405
 
405
- ## 1.8.1
406
+ ## 1.8.2
407
+
408
+ ### Bug Fixes
409
+
410
+ ### Behavior Change
411
+
412
+ ### New Features
413
+
414
+ - ML Job now available as a PuPr feature
415
+ - ML Job: Add ability to retrieve results for `@remote` decorated functions using
416
+ new `MLJobWithResult.result()` API, which will return the unpickled result
417
+ or raise an exception if the job execution failed.
418
+ - ML Job: Pre-created Snowpark Session is now available inside job payloads using
419
+ `snowflake.snowpark.context.get_active_session()`
420
+ - Registry: Introducing `save_location` to `log_model` using the `options` argument.
421
+ User's can provide the path to write the model version's files that get stored in Snowflake's stage.
422
+ - Registry: Include model dependencies in pip requirements by default when logging in Container Runtime.
423
+
424
+ ```python
425
+ reg.log_model(
426
+ model=...,
427
+ model_name=...,
428
+ version_name=...,
429
+ ...,
430
+ options={"save_location": "./model_directory"},
431
+ )
432
+ ```
433
+
434
+ - ML Job (PrPr): Add `instance_id` argument to `get_logs` and `show_logs` method to support multi node log retrieval
435
+ - ML Job (PrPr): Add `job.get_instance_status(instance_id=...)` API to support multi node status retrieval
436
+
437
+ ## 1.8.1 (03-26-2025)
406
438
 
407
439
  ### Bug Fixes
408
440
 
@@ -422,7 +454,7 @@ NOTE: Version 1.7.0 is used as example here. Please choose the the latest versio
422
454
  for cost implications.
423
455
  - Registry: When creating a copy of a `ModelVersion` with `log_model`, raise an exception if unsupported arguments are provided.
424
456
 
425
- ## 1.8.0
457
+ ## 1.8.0 (03-20-2025)
426
458
 
427
459
  ### Bug Fixes
428
460
 
@@ -723,7 +755,6 @@ NOTE: Version 1.7.0 is used as example here. Please choose the the latest versio
723
755
 
724
756
  - Registry: Added support for handling Hugging Face model configurations with auto-mapping functionality.
725
757
  - Registry: Added support for `keras` 3.x model with `tensorflow` and `pytorch` backend
726
- - ML Job (PrPr): Support any serializable (pickleable) argument for `@remote` decorated functions
727
758
 
728
759
  ## 1.7.4 (01-28-2025)
729
760