snowflake-ml-python 1.8.0__py3-none-any.whl → 1.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/ml/jobs/_utils/constants.py +1 -1
- snowflake/ml/jobs/_utils/spec_utils.py +22 -0
- snowflake/ml/jobs/decorators.py +7 -3
- snowflake/ml/jobs/manager.py +17 -2
- snowflake/ml/model/_client/ops/model_ops.py +6 -3
- snowflake/ml/model/_client/ops/service_ops.py +15 -4
- snowflake/ml/model/_client/sql/service.py +2 -0
- snowflake/ml/model/_model_composer/model_manifest/model_manifest.py +1 -2
- snowflake/ml/model/_packager/model_handlers/sklearn.py +1 -4
- snowflake/ml/model/_packager/model_meta/_packaging_requirements.py +1 -1
- snowflake/ml/model/_packager/model_meta/model_meta_schema.py +1 -0
- snowflake/ml/model/_packager/model_runtime/_snowml_inference_alternative_requirements.py +1 -1
- snowflake/ml/modeling/_internal/estimator_utils.py +5 -1
- snowflake/ml/registry/registry.py +41 -1
- snowflake/ml/version.py +1 -1
- {snowflake_ml_python-1.8.0.dist-info → snowflake_ml_python-1.8.1.dist-info}/METADATA +22 -2
- {snowflake_ml_python-1.8.0.dist-info → snowflake_ml_python-1.8.1.dist-info}/RECORD +20 -20
- {snowflake_ml_python-1.8.0.dist-info → snowflake_ml_python-1.8.1.dist-info}/WHEEL +1 -1
- {snowflake_ml_python-1.8.0.dist-info → snowflake_ml_python-1.8.1.dist-info}/licenses/LICENSE.txt +0 -0
- {snowflake_ml_python-1.8.0.dist-info → snowflake_ml_python-1.8.1.dist-info}/top_level.txt +0 -0
@@ -12,7 +12,7 @@ STAGE_VOLUME_MOUNT_PATH = "/mnt/app"
|
|
12
12
|
DEFAULT_IMAGE_REPO = "/snowflake/images/snowflake_images"
|
13
13
|
DEFAULT_IMAGE_CPU = "st_plat/runtime/x86/runtime_image/snowbooks"
|
14
14
|
DEFAULT_IMAGE_GPU = "st_plat/runtime/x86/generic_gpu/runtime_image/snowbooks"
|
15
|
-
DEFAULT_IMAGE_TAG = "0.
|
15
|
+
DEFAULT_IMAGE_TAG = "1.0.1"
|
16
16
|
DEFAULT_ENTRYPOINT_PATH = "func.py"
|
17
17
|
|
18
18
|
# Percent of container memory to allocate for /dev/shm volume
|
@@ -97,6 +97,7 @@ def generate_service_spec(
|
|
97
97
|
payload: types.UploadedPayload,
|
98
98
|
args: Optional[List[str]] = None,
|
99
99
|
num_instances: Optional[int] = None,
|
100
|
+
enable_metrics: bool = False,
|
100
101
|
) -> Dict[str, Any]:
|
101
102
|
"""
|
102
103
|
Generate a service specification for a job.
|
@@ -107,6 +108,7 @@ def generate_service_spec(
|
|
107
108
|
payload: Uploaded job payload
|
108
109
|
args: Arguments to pass to entrypoint script
|
109
110
|
num_instances: Number of instances for multi-node job
|
111
|
+
enable_metrics: Enable platform metrics for the job
|
110
112
|
|
111
113
|
Returns:
|
112
114
|
Job service specification
|
@@ -211,6 +213,16 @@ def generate_service_spec(
|
|
211
213
|
]
|
212
214
|
endpoints.extend(ray_endpoints)
|
213
215
|
|
216
|
+
metrics = []
|
217
|
+
if enable_metrics:
|
218
|
+
# https://docs.snowflake.com/en/developer-guide/snowpark-container-services/monitoring-services#label-spcs-available-platform-metrics
|
219
|
+
metrics = [
|
220
|
+
"system",
|
221
|
+
"status",
|
222
|
+
"network",
|
223
|
+
"storage",
|
224
|
+
]
|
225
|
+
|
214
226
|
spec_dict = {
|
215
227
|
"containers": [
|
216
228
|
{
|
@@ -233,6 +245,16 @@ def generate_service_spec(
|
|
233
245
|
}
|
234
246
|
if endpoints:
|
235
247
|
spec_dict["endpoints"] = endpoints
|
248
|
+
if metrics:
|
249
|
+
spec_dict.update(
|
250
|
+
{
|
251
|
+
"platformMonitor": {
|
252
|
+
"metricConfig": {
|
253
|
+
"groups": metrics,
|
254
|
+
},
|
255
|
+
},
|
256
|
+
}
|
257
|
+
)
|
236
258
|
|
237
259
|
# Assemble into service specification dict
|
238
260
|
spec = {"spec": spec_dict}
|
snowflake/ml/jobs/decorators.py
CHANGED
@@ -19,13 +19,15 @@ _ReturnValue = TypeVar("_ReturnValue")
|
|
19
19
|
@telemetry.send_api_usage_telemetry(project=_PROJECT)
|
20
20
|
def remote(
|
21
21
|
compute_pool: str,
|
22
|
+
*,
|
22
23
|
stage_name: str,
|
23
24
|
pip_requirements: Optional[List[str]] = None,
|
24
25
|
external_access_integrations: Optional[List[str]] = None,
|
25
26
|
query_warehouse: Optional[str] = None,
|
26
27
|
env_vars: Optional[Dict[str, str]] = None,
|
27
|
-
session: Optional[snowpark.Session] = None,
|
28
28
|
num_instances: Optional[int] = None,
|
29
|
+
enable_metrics: bool = False,
|
30
|
+
session: Optional[snowpark.Session] = None,
|
29
31
|
) -> Callable[[Callable[_Args, _ReturnValue]], Callable[_Args, jb.MLJob]]:
|
30
32
|
"""
|
31
33
|
Submit a job to the compute pool.
|
@@ -37,8 +39,9 @@ def remote(
|
|
37
39
|
external_access_integrations: A list of external access integrations.
|
38
40
|
query_warehouse: The query warehouse to use. Defaults to session warehouse.
|
39
41
|
env_vars: Environment variables to set in container
|
40
|
-
session: The Snowpark session to use. If none specified, uses active session.
|
41
42
|
num_instances: The number of nodes in the job. If none specified, create a single node job.
|
43
|
+
enable_metrics: Whether to enable metrics publishing for the job.
|
44
|
+
session: The Snowpark session to use. If none specified, uses active session.
|
42
45
|
|
43
46
|
Returns:
|
44
47
|
Decorator that dispatches invocations of the decorated function as remote jobs.
|
@@ -63,8 +66,9 @@ def remote(
|
|
63
66
|
external_access_integrations=external_access_integrations,
|
64
67
|
query_warehouse=query_warehouse,
|
65
68
|
env_vars=env_vars,
|
66
|
-
session=session,
|
67
69
|
num_instances=num_instances,
|
70
|
+
enable_metrics=enable_metrics,
|
71
|
+
session=session,
|
68
72
|
)
|
69
73
|
assert isinstance(job, jb.MLJob), f"Unexpected job type: {type(job)}"
|
70
74
|
return job
|
snowflake/ml/jobs/manager.py
CHANGED
@@ -106,6 +106,8 @@ def submit_file(
|
|
106
106
|
external_access_integrations: Optional[List[str]] = None,
|
107
107
|
query_warehouse: Optional[str] = None,
|
108
108
|
spec_overrides: Optional[Dict[str, Any]] = None,
|
109
|
+
num_instances: Optional[int] = None,
|
110
|
+
enable_metrics: bool = False,
|
109
111
|
session: Optional[snowpark.Session] = None,
|
110
112
|
) -> jb.MLJob:
|
111
113
|
"""
|
@@ -121,6 +123,8 @@ def submit_file(
|
|
121
123
|
external_access_integrations: A list of external access integrations.
|
122
124
|
query_warehouse: The query warehouse to use. Defaults to session warehouse.
|
123
125
|
spec_overrides: Custom service specification overrides to apply.
|
126
|
+
num_instances: The number of instances to use for the job. If none specified, single node job is created.
|
127
|
+
enable_metrics: Whether to enable metrics publishing for the job.
|
124
128
|
session: The Snowpark session to use. If none specified, uses active session.
|
125
129
|
|
126
130
|
Returns:
|
@@ -136,6 +140,8 @@ def submit_file(
|
|
136
140
|
external_access_integrations=external_access_integrations,
|
137
141
|
query_warehouse=query_warehouse,
|
138
142
|
spec_overrides=spec_overrides,
|
143
|
+
num_instances=num_instances,
|
144
|
+
enable_metrics=enable_metrics,
|
139
145
|
session=session,
|
140
146
|
)
|
141
147
|
|
@@ -154,6 +160,8 @@ def submit_directory(
|
|
154
160
|
external_access_integrations: Optional[List[str]] = None,
|
155
161
|
query_warehouse: Optional[str] = None,
|
156
162
|
spec_overrides: Optional[Dict[str, Any]] = None,
|
163
|
+
num_instances: Optional[int] = None,
|
164
|
+
enable_metrics: bool = False,
|
157
165
|
session: Optional[snowpark.Session] = None,
|
158
166
|
) -> jb.MLJob:
|
159
167
|
"""
|
@@ -170,6 +178,8 @@ def submit_directory(
|
|
170
178
|
external_access_integrations: A list of external access integrations.
|
171
179
|
query_warehouse: The query warehouse to use. Defaults to session warehouse.
|
172
180
|
spec_overrides: Custom service specification overrides to apply.
|
181
|
+
num_instances: The number of instances to use for the job. If none specified, single node job is created.
|
182
|
+
enable_metrics: Whether to enable metrics publishing for the job.
|
173
183
|
session: The Snowpark session to use. If none specified, uses active session.
|
174
184
|
|
175
185
|
Returns:
|
@@ -186,6 +196,8 @@ def submit_directory(
|
|
186
196
|
external_access_integrations=external_access_integrations,
|
187
197
|
query_warehouse=query_warehouse,
|
188
198
|
spec_overrides=spec_overrides,
|
199
|
+
num_instances=num_instances,
|
200
|
+
enable_metrics=enable_metrics,
|
189
201
|
session=session,
|
190
202
|
)
|
191
203
|
|
@@ -212,8 +224,9 @@ def _submit_job(
|
|
212
224
|
external_access_integrations: Optional[List[str]] = None,
|
213
225
|
query_warehouse: Optional[str] = None,
|
214
226
|
spec_overrides: Optional[Dict[str, Any]] = None,
|
215
|
-
session: Optional[snowpark.Session] = None,
|
216
227
|
num_instances: Optional[int] = None,
|
228
|
+
enable_metrics: bool = False,
|
229
|
+
session: Optional[snowpark.Session] = None,
|
217
230
|
) -> jb.MLJob:
|
218
231
|
"""
|
219
232
|
Submit a job to the compute pool.
|
@@ -229,8 +242,9 @@ def _submit_job(
|
|
229
242
|
external_access_integrations: A list of external access integrations.
|
230
243
|
query_warehouse: The query warehouse to use. Defaults to session warehouse.
|
231
244
|
spec_overrides: Custom service specification overrides to apply.
|
232
|
-
session: The Snowpark session to use. If none specified, uses active session.
|
233
245
|
num_instances: The number of instances to use for the job. If none specified, single node job is created.
|
246
|
+
enable_metrics: Whether to enable metrics publishing for the job.
|
247
|
+
session: The Snowpark session to use. If none specified, uses active session.
|
234
248
|
|
235
249
|
Returns:
|
236
250
|
An object representing the submitted job.
|
@@ -257,6 +271,7 @@ def _submit_job(
|
|
257
271
|
payload=uploaded_payload,
|
258
272
|
args=args,
|
259
273
|
num_instances=num_instances,
|
274
|
+
enable_metrics=enable_metrics,
|
260
275
|
)
|
261
276
|
spec_overrides = spec_utils.generate_spec_overrides(
|
262
277
|
environment_vars=env_vars,
|
@@ -789,14 +789,17 @@ class ModelOperator:
|
|
789
789
|
version_name: sql_identifier.SqlIdentifier,
|
790
790
|
statement_params: Optional[Dict[str, Any]] = None,
|
791
791
|
) -> type_hints.Task:
|
792
|
-
|
792
|
+
model_version = self._model_client.show_versions(
|
793
793
|
database_name=database_name,
|
794
794
|
schema_name=schema_name,
|
795
795
|
model_name=model_name,
|
796
796
|
version_name=version_name,
|
797
|
+
validate_result=True,
|
797
798
|
statement_params=statement_params,
|
798
|
-
)
|
799
|
-
|
799
|
+
)[0]
|
800
|
+
|
801
|
+
model_attributes = json.loads(model_version.model_attributes)
|
802
|
+
task_val = model_attributes.get("task", type_hints.Task.UNKNOWN.value)
|
800
803
|
return type_hints.Task(task_val)
|
801
804
|
|
802
805
|
def get_functions(
|
@@ -161,12 +161,16 @@ class ServiceOperator:
|
|
161
161
|
statement_params=statement_params,
|
162
162
|
)
|
163
163
|
|
164
|
-
# check if the inference service is already running
|
164
|
+
# check if the inference service is already running/suspended
|
165
165
|
model_inference_service_exists = self._check_if_service_exists(
|
166
166
|
database_name=service_database_name,
|
167
167
|
schema_name=service_schema_name,
|
168
168
|
service_name=service_name,
|
169
|
-
service_status_list_if_exists=[
|
169
|
+
service_status_list_if_exists=[
|
170
|
+
service_sql.ServiceStatus.READY,
|
171
|
+
service_sql.ServiceStatus.SUSPENDING,
|
172
|
+
service_sql.ServiceStatus.SUSPENDED,
|
173
|
+
],
|
170
174
|
statement_params=statement_params,
|
171
175
|
)
|
172
176
|
|
@@ -309,7 +313,10 @@ class ServiceOperator:
|
|
309
313
|
set_service_log_metadata_to_model_inference(
|
310
314
|
service_log_meta,
|
311
315
|
model_inference_service,
|
312
|
-
|
316
|
+
(
|
317
|
+
"Model Inference image build is not rebuilding the image, but using a previously built "
|
318
|
+
"image."
|
319
|
+
),
|
313
320
|
)
|
314
321
|
continue
|
315
322
|
|
@@ -366,7 +373,9 @@ class ServiceOperator:
|
|
366
373
|
time.sleep(5)
|
367
374
|
|
368
375
|
if model_inference_service_exists:
|
369
|
-
module_logger.info(
|
376
|
+
module_logger.info(
|
377
|
+
f"Inference service {model_inference_service.display_service_name} has already been deployed."
|
378
|
+
)
|
370
379
|
else:
|
371
380
|
self._finalize_logs(
|
372
381
|
service_log_meta.service_logger, service_log_meta.service, service_log_meta.log_offset, statement_params
|
@@ -416,6 +425,8 @@ class ServiceOperator:
|
|
416
425
|
service_status_list_if_exists = [
|
417
426
|
service_sql.ServiceStatus.PENDING,
|
418
427
|
service_sql.ServiceStatus.READY,
|
428
|
+
service_sql.ServiceStatus.SUSPENDING,
|
429
|
+
service_sql.ServiceStatus.SUSPENDED,
|
419
430
|
service_sql.ServiceStatus.DONE,
|
420
431
|
service_sql.ServiceStatus.FAILED,
|
421
432
|
]
|
@@ -20,6 +20,8 @@ class ServiceStatus(enum.Enum):
|
|
20
20
|
UNKNOWN = "UNKNOWN" # status is unknown because we have not received enough data from K8s yet.
|
21
21
|
PENDING = "PENDING" # resource set is being created, can't be used yet
|
22
22
|
READY = "READY" # resource set has been deployed.
|
23
|
+
SUSPENDING = "SUSPENDING" # the service is set to suspended but the resource set is still in deleting state
|
24
|
+
SUSPENDED = "SUSPENDED" # the service is suspended and the resource set is deleted
|
23
25
|
DELETING = "DELETING" # resource set is being deleted
|
24
26
|
FAILED = "FAILED" # resource set has failed and cannot be used anymore
|
25
27
|
DONE = "DONE" # resource set has finished running
|
@@ -36,7 +36,6 @@ class ModelManifest:
|
|
36
36
|
"""
|
37
37
|
|
38
38
|
MANIFEST_FILE_REL_PATH = "MANIFEST.yml"
|
39
|
-
_ENABLE_USER_FILES = False
|
40
39
|
_DEFAULT_RUNTIME_NAME = "python_runtime"
|
41
40
|
|
42
41
|
def __init__(self, workspace_path: pathlib.Path) -> None:
|
@@ -149,7 +148,7 @@ class ModelManifest:
|
|
149
148
|
],
|
150
149
|
)
|
151
150
|
|
152
|
-
if self.
|
151
|
+
if self.user_files:
|
153
152
|
manifest_dict["user_files"] = [user_file.save(self.workspace_path) for user_file in self.user_files]
|
154
153
|
|
155
154
|
lineage_sources = self._extract_lineage_info(data_sources)
|
@@ -57,6 +57,7 @@ class SKLModelHandler(_base.BaseModelHandler[Union["sklearn.base.BaseEstimator",
|
|
57
57
|
"predict_proba",
|
58
58
|
"predict_log_proba",
|
59
59
|
"decision_function",
|
60
|
+
"score_samples",
|
60
61
|
]
|
61
62
|
EXPLAIN_TARGET_METHODS = ["predict", "predict_proba", "predict_log_proba"]
|
62
63
|
|
@@ -74,10 +75,6 @@ class SKLModelHandler(_base.BaseModelHandler[Union["sklearn.base.BaseEstimator",
|
|
74
75
|
and (
|
75
76
|
not type_utils.LazyType("lightgbm.LGBMModel").isinstance(model)
|
76
77
|
) # LGBMModel is actually a BaseEstimator
|
77
|
-
and any(
|
78
|
-
(hasattr(model, method) and callable(getattr(model, method, None)))
|
79
|
-
for method in cls.DEFAULT_TARGET_METHODS
|
80
|
-
)
|
81
78
|
)
|
82
79
|
|
83
80
|
@classmethod
|
@@ -1 +1 @@
|
|
1
|
-
REQUIREMENTS = ['cloudpickle>=2.0.0']
|
1
|
+
REQUIREMENTS = ['cloudpickle>=2.0.0,<3']
|
@@ -1 +1 @@
|
|
1
|
-
REQUIREMENTS = ['absl-py>=0.15,<2', 'aiohttp!=4.0.0a0, !=4.0.0a1', 'anyio>=3.5.0,<5', 'cachetools>=3.1.1,<6', 'cloudpickle>=2.0.0', 'cryptography', 'fsspec>=2024.6.1,<2026', 'importlib_resources>=6.1.1, <7', 'numpy>=1.23,<2', 'packaging>=20.9,<25', 'pandas>=1.0.0,<3', 'pyarrow', 'pyjwt>=2.0.0, <3', 'pytimeparse>=1.1.8,<2', 'pyyaml>=6.0,<7', 'requests', 'retrying>=1.3.3,<2', 's3fs>=2024.6.1,<2026', 'scikit-learn>=1.4,<1.6', 'scipy>=1.9,<2', 'snowflake-connector-python>=3.12.0,<4', 'snowflake-snowpark-python>=1.17.0,<2,!=1.26.0', 'sqlparse>=0.4,<1', 'typing-extensions>=4.1.0,<5', 'xgboost>=1.7.3,<3']
|
1
|
+
REQUIREMENTS = ['absl-py>=0.15,<2', 'aiohttp!=4.0.0a0, !=4.0.0a1', 'anyio>=3.5.0,<5', 'cachetools>=3.1.1,<6', 'cloudpickle>=2.0.0,<3', 'cryptography', 'fsspec>=2024.6.1,<2026', 'importlib_resources>=6.1.1, <7', 'numpy>=1.23,<2', 'packaging>=20.9,<25', 'pandas>=1.0.0,<3', 'pyarrow', 'pyjwt>=2.0.0, <3', 'pytimeparse>=1.1.8,<2', 'pyyaml>=6.0,<7', 'requests', 'retrying>=1.3.3,<2', 's3fs>=2024.6.1,<2026', 'scikit-learn>=1.4,<1.6', 'scipy>=1.9,<2', 'snowflake-connector-python>=3.12.0,<4', 'snowflake-snowpark-python>=1.17.0,<2,!=1.26.0', 'sqlparse>=0.4,<1', 'typing-extensions>=4.1.0,<5', 'xgboost>=1.7.3,<3']
|
@@ -130,7 +130,11 @@ def is_single_node(session: Session) -> bool:
|
|
130
130
|
warehouse_name = session.get_current_warehouse()
|
131
131
|
if warehouse_name:
|
132
132
|
warehouse_name = warehouse_name.replace('"', "")
|
133
|
-
|
133
|
+
df_list = session.sql(f"SHOW WAREHOUSES like '{warehouse_name}';")['"type"', '"size"'].collect()
|
134
|
+
# If no warehouse data is found, default to True (single node)
|
135
|
+
if not df_list:
|
136
|
+
return True
|
137
|
+
df = df_list[0]
|
134
138
|
# filter out the conditions when it is single node
|
135
139
|
single_node: bool = (df[0] == "SNOWPARK-OPTIMIZED" and df[1] == "Medium") or (
|
136
140
|
df[0] == "STANDARD" and df[1] == "X-Small"
|
@@ -113,8 +113,10 @@ class Registry:
|
|
113
113
|
python_version: Optional[str] = None,
|
114
114
|
signatures: Optional[Dict[str, model_signature.ModelSignature]] = None,
|
115
115
|
sample_input_data: Optional[model_types.SupportedDataType] = None,
|
116
|
+
user_files: Optional[Dict[str, List[str]]] = None,
|
116
117
|
code_paths: Optional[List[str]] = None,
|
117
118
|
ext_modules: Optional[List[ModuleType]] = None,
|
119
|
+
task: model_types.Task = model_types.Task.UNKNOWN,
|
118
120
|
options: Optional[model_types.ModelSaveOption] = None,
|
119
121
|
) -> ModelVersion:
|
120
122
|
"""
|
@@ -156,10 +158,15 @@ class Registry:
|
|
156
158
|
infer the signature. If not None, sample_input_data should not be specified. Defaults to None.
|
157
159
|
sample_input_data: Sample input data to infer model signatures from.
|
158
160
|
It would also be used as background data in explanation and to capture data lineage. Defaults to None.
|
161
|
+
user_files: Dictionary where the keys are subdirectories, and values are lists of local file name
|
162
|
+
strings. The local file name strings can include wildcards (? or *) for matching multiple files.
|
159
163
|
code_paths: List of directories containing code to import. Defaults to None.
|
160
164
|
ext_modules: List of external modules to pickle with the model object.
|
161
165
|
Only supported when logging the following types of model:
|
162
166
|
Scikit-learn, Snowpark ML, PyTorch, TorchScript and Custom Model. Defaults to None.
|
167
|
+
task: The task of the Model Version. It is an enum class Task with values TABULAR_REGRESSION,
|
168
|
+
TABULAR_BINARY_CLASSIFICATION, TABULAR_MULTI_CLASSIFICATION, TABULAR_RANKING, or UNKNOWN. By default,
|
169
|
+
it is set to Task.UNKNOWN and may be overridden by inferring from the Model Object.
|
163
170
|
options (Dict[str, Any], optional): Additional model saving options.
|
164
171
|
|
165
172
|
Model Saving Options include:
|
@@ -171,6 +178,9 @@ class Registry:
|
|
171
178
|
Warehouse. It detects any ==x.y.z in specifiers and replaced with >=x.y, <(x+1). Defaults to True.
|
172
179
|
- function_type: Set the method function type globally. To set method function types individually see
|
173
180
|
function_type in model_options.
|
181
|
+
- target_methods: List of target methods to register when logging the model.
|
182
|
+
This option is not used in MLFlow models. Defaults to None, in which case the model handler's
|
183
|
+
default target methods will be used.
|
174
184
|
- method_options: Per-method saving options. This dictionary has method names as keys and dictionary
|
175
185
|
values with the desired options.
|
176
186
|
|
@@ -304,6 +314,9 @@ class Registry:
|
|
304
314
|
Warehouse. It detects any ==x.y.z in specifiers and replaced with >=x.y, <(x+1). Defaults to True.
|
305
315
|
- function_type: Set the method function type globally. To set method function types individually see
|
306
316
|
function_type in model_options.
|
317
|
+
- target_methods: List of target methods to register when logging the model.
|
318
|
+
This option is not used in MLFlow models. Defaults to None, in which case the model handler's
|
319
|
+
default target methods will be used.
|
307
320
|
- method_options: Per-method saving options. This dictionary has method names as keys and dictionary
|
308
321
|
values with the desired options. See the example below.
|
309
322
|
|
@@ -317,6 +330,9 @@ class Registry:
|
|
317
330
|
Defaults to None, determined automatically by Snowflake.
|
318
331
|
- function_type: One of supported model method function types (FUNCTION or TABLE_FUNCTION).
|
319
332
|
|
333
|
+
Raises:
|
334
|
+
ValueError: If extra arguments are specified ModelVersion is provided.
|
335
|
+
|
320
336
|
Returns:
|
321
337
|
ModelVersion: ModelVersion object corresponding to the model just logged.
|
322
338
|
|
@@ -339,13 +355,37 @@ class Registry:
|
|
339
355
|
registry.log_model(
|
340
356
|
model=model,
|
341
357
|
model_name="my_model",
|
342
|
-
|
358
|
+
options={"method_options": method_options},
|
343
359
|
)
|
344
360
|
"""
|
345
361
|
statement_params = telemetry.get_statement_params(
|
346
362
|
project=_TELEMETRY_PROJECT,
|
347
363
|
subproject=_MODEL_TELEMETRY_SUBPROJECT,
|
348
364
|
)
|
365
|
+
if isinstance(model, ModelVersion):
|
366
|
+
# check that no arguments are provided other than the ones for copy model.
|
367
|
+
invalid_args = [
|
368
|
+
comment,
|
369
|
+
conda_dependencies,
|
370
|
+
pip_requirements,
|
371
|
+
artifact_repository_map,
|
372
|
+
target_platforms,
|
373
|
+
python_version,
|
374
|
+
signatures,
|
375
|
+
sample_input_data,
|
376
|
+
user_files,
|
377
|
+
code_paths,
|
378
|
+
ext_modules,
|
379
|
+
options,
|
380
|
+
]
|
381
|
+
for arg in invalid_args:
|
382
|
+
if arg is not None:
|
383
|
+
raise ValueError(
|
384
|
+
"When calling log_model with a ModelVersion, only model_name and version_name may be specified."
|
385
|
+
)
|
386
|
+
if task is not model_types.Task.UNKNOWN:
|
387
|
+
raise ValueError("`task` cannot be specified when calling log_model with a ModelVersion.")
|
388
|
+
|
349
389
|
if pip_requirements:
|
350
390
|
warnings.warn(
|
351
391
|
"Models logged specifying `pip_requirements` can not be executed "
|
snowflake/ml/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
VERSION="1.8.
|
1
|
+
VERSION="1.8.1"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: snowflake-ml-python
|
3
|
-
Version: 1.8.
|
3
|
+
Version: 1.8.1
|
4
4
|
Summary: The machine learning client library that is used for interacting with Snowflake to build machine learning solutions.
|
5
5
|
Author-email: "Snowflake, Inc" <support@snowflake.com>
|
6
6
|
License:
|
@@ -236,7 +236,7 @@ License-File: LICENSE.txt
|
|
236
236
|
Requires-Dist: absl-py<2,>=0.15
|
237
237
|
Requires-Dist: anyio<5,>=3.5.0
|
238
238
|
Requires-Dist: cachetools<6,>=3.1.1
|
239
|
-
Requires-Dist: cloudpickle
|
239
|
+
Requires-Dist: cloudpickle<3,>=2.0.0
|
240
240
|
Requires-Dist: cryptography
|
241
241
|
Requires-Dist: fsspec[http]<2026,>=2024.6.1
|
242
242
|
Requires-Dist: importlib_resources<7,>=6.1.1
|
@@ -402,6 +402,26 @@ NOTE: Version 1.7.0 is used as example here. Please choose the the latest versio
|
|
402
402
|
|
403
403
|
# Release History
|
404
404
|
|
405
|
+
## 1.8.1
|
406
|
+
|
407
|
+
### Bug Fixes
|
408
|
+
|
409
|
+
- Registry: Fix a bug that caused `unsupported model type` error while logging a sklearn model with `score_samples`
|
410
|
+
inference method.
|
411
|
+
- Registry: Fix a bug that model inference service creation fails on an existing and suspended service.
|
412
|
+
|
413
|
+
### Behavior Change
|
414
|
+
|
415
|
+
### New Features
|
416
|
+
|
417
|
+
- ML Job (PrPr): Update Container Runtime image version to `1.0.1`
|
418
|
+
- ML Job (PrPr): Add `enable_metrics` argument to job submission APIs to enable publishing service metrics to Event Table.
|
419
|
+
See [Accessing Event Table service metrics](https://docs.snowflake.com/en/developer-guide/snowpark-container-services/monitoring-services#accessing-event-table-service-metrics)
|
420
|
+
for retrieving published metrics
|
421
|
+
and [Costs of telemetry data collection](https://docs.snowflake.com/en/developer-guide/logging-tracing/logging-tracing-billing)
|
422
|
+
for cost implications.
|
423
|
+
- Registry: When creating a copy of a `ModelVersion` with `log_model`, raise an exception if unsupported arguments are provided.
|
424
|
+
|
405
425
|
## 1.8.0
|
406
426
|
|
407
427
|
### Bug Fixes
|
@@ -10,7 +10,7 @@ snowflake/cortex/_sse_client.py,sha256=sLYgqAfTOPADCnaWH2RWAJi8KbU_7gSRsTUDcDD5T
|
|
10
10
|
snowflake/cortex/_summarize.py,sha256=7GH8zqfIdOiHA5w4b6EvJEKEWhaTrL4YA6iDGbn7BNM,1307
|
11
11
|
snowflake/cortex/_translate.py,sha256=9ZGjvAnJFisbzJ_bXnt4pyug5UzhHJRXW8AhGQEersM,1652
|
12
12
|
snowflake/cortex/_util.py,sha256=cwRGgrcUo3E05ZaIDT9436vXLQ7GfuBVAjR0QeQ2bDE,3320
|
13
|
-
snowflake/ml/version.py,sha256=
|
13
|
+
snowflake/ml/version.py,sha256=C2u-bHpRGJyrAk-5y4pGUw5ooUq1x-CZOlRM4RSzGbQ,16
|
14
14
|
snowflake/ml/_internal/env.py,sha256=kCrJTRnqQ97VGUVI1cWUPD8HuBWeL5vOOtwUR0NB9Mg,161
|
15
15
|
snowflake/ml/_internal/env_utils.py,sha256=5ps0v7c655lXsFVfnASxIyEwiVSGxZXke-VjeAWDs0A,27866
|
16
16
|
snowflake/ml/_internal/file_utils.py,sha256=R3GRaKzJPLOa1yq9E55rhxUr59DztZlIqAnaqcZ1HfE,14275
|
@@ -94,12 +94,12 @@ snowflake/ml/fileset/sfcfs.py,sha256=uPn8v5jlC3h_FrNqb4UMRAZjRZLn0I3tzu0sfi5RHik
|
|
94
94
|
snowflake/ml/fileset/snowfs.py,sha256=uF5QluYtiJ-HezGIhF55dONi3t0E6N7ByaVAIAlM3nk,5133
|
95
95
|
snowflake/ml/fileset/stage_fs.py,sha256=IEVZ6imH77JiSOIRlRHNWalwafoACRgHFr8RAaICSP8,20170
|
96
96
|
snowflake/ml/jobs/__init__.py,sha256=ORX_0blPSpl9u5442R-i4e8cqWYfO_vVjFFtX3as184,420
|
97
|
-
snowflake/ml/jobs/decorators.py,sha256=
|
97
|
+
snowflake/ml/jobs/decorators.py,sha256=mSlzA6n9Xb1uMJrXNtOz9SiZStWp0_TQLVLNbGisdrs,3134
|
98
98
|
snowflake/ml/jobs/job.py,sha256=dWILWTNaumgdqs6P58xl4PdyoARU9Sk89OMfRU39SQE,4239
|
99
|
-
snowflake/ml/jobs/manager.py,sha256=
|
100
|
-
snowflake/ml/jobs/_utils/constants.py,sha256=
|
99
|
+
snowflake/ml/jobs/manager.py,sha256=4E0LoFdWSWktNr6K63sfBCDvM0M_U9M2Kkk22i0gO7w,12136
|
100
|
+
snowflake/ml/jobs/_utils/constants.py,sha256=2GK7EMTYQgXMvD7KAxLKRibfsRVgOVmZclkFnRBai80,3242
|
101
101
|
snowflake/ml/jobs/_utils/payload_utils.py,sha256=RC9vJt2CKmrkxzdWD7vEw0D7xWhXAC6RnuvS-0Ly_yY,20421
|
102
|
-
snowflake/ml/jobs/_utils/spec_utils.py,sha256=
|
102
|
+
snowflake/ml/jobs/_utils/spec_utils.py,sha256=6hiwM5sve1f1xg2gDZdOEnXlHgGU_FQXqMsTZLzpAIY,12893
|
103
103
|
snowflake/ml/jobs/_utils/types.py,sha256=sEV-jCA-0Bb8KdKMBZGrIdqJhSWGb6QI8pYRL56dIDA,874
|
104
104
|
snowflake/ml/lineage/__init__.py,sha256=8p1YGynC-qOxAZ8jZX2z84Reg5bv1NoJMoJmNJCrzI4,65
|
105
105
|
snowflake/ml/lineage/lineage_node.py,sha256=e6L4bdYDSVgTv0BEfqgPQWNoDiTiuI7HmfJ6n-WmNLE,5812
|
@@ -110,18 +110,18 @@ snowflake/ml/model/type_hints.py,sha256=fvHjtvnt_CAsXAn3q4DDQlxCn6wqIEHypHh93Ce3
|
|
110
110
|
snowflake/ml/model/_client/model/model_impl.py,sha256=pqjK8mSZIQJ_30tRWWFPIo8X35InSVoAunXlQNtSJEM,15369
|
111
111
|
snowflake/ml/model/_client/model/model_version_impl.py,sha256=kslv-oOyh5OvoG8BjNBl_t4mqRjMn-kLpla6CdJRBaA,40272
|
112
112
|
snowflake/ml/model/_client/ops/metadata_ops.py,sha256=7cGx8zYzye2_cvZnyGxoukPtT6Q-Kexd-s4yeZmpmj8,4890
|
113
|
-
snowflake/ml/model/_client/ops/model_ops.py,sha256=
|
114
|
-
snowflake/ml/model/_client/ops/service_ops.py,sha256=
|
113
|
+
snowflake/ml/model/_client/ops/model_ops.py,sha256=N6xuikPmuK7jpag3CluKyz4bu00oJbMC3_266T2nBLQ,47990
|
114
|
+
snowflake/ml/model/_client/ops/service_ops.py,sha256=d3wxFLe4qqPpzIpO-3bYIOZi88wjFxjhvWWw-_u5vv0,19194
|
115
115
|
snowflake/ml/model/_client/service/model_deployment_spec.py,sha256=K1MkVFgATk_OHCV68QR0jNF1lkY7brDxv7yVhQhK7eY,4599
|
116
116
|
snowflake/ml/model/_client/service/model_deployment_spec_schema.py,sha256=eaulF6OFNuDfQz3oPYlDjP26Ww2jWWatm81dCbg602E,825
|
117
117
|
snowflake/ml/model/_client/sql/_base.py,sha256=Qrm8M92g3MHb-QnSLUlbd8iVKCRxLhG_zr5M2qmXwJ8,1473
|
118
118
|
snowflake/ml/model/_client/sql/model.py,sha256=o36oPq4aU9TwahqY2uODYvICxmj1orLztijJ0yMbWnM,5852
|
119
119
|
snowflake/ml/model/_client/sql/model_version.py,sha256=R0TnsRta7tSmd7RBphERzBKXpYBxOhec1CefW6VWrVE,23543
|
120
|
-
snowflake/ml/model/_client/sql/service.py,sha256=
|
120
|
+
snowflake/ml/model/_client/sql/service.py,sha256=RIMo8SFyhm3XLNJ3-gGKQo67nP8KZV_dQnPt--aLdqU,11425
|
121
121
|
snowflake/ml/model/_client/sql/stage.py,sha256=165vyAtrScSQWJB8wLXKRUO1QvHTWDmPykeWOyxrDRg,826
|
122
122
|
snowflake/ml/model/_client/sql/tag.py,sha256=pwwrcyPtSnkUfDzL3M8kqM0KSx7CaTtgty3HDhVC9vg,4345
|
123
123
|
snowflake/ml/model/_model_composer/model_composer.py,sha256=g96CQx3sh75VlPdhKQfA6Hzr6CzyZ-s0T3XaiFfr3A4,9325
|
124
|
-
snowflake/ml/model/_model_composer/model_manifest/model_manifest.py,sha256=
|
124
|
+
snowflake/ml/model/_model_composer/model_manifest/model_manifest.py,sha256=d3Ej_OzQkfSugx15m5zrheBktKVod5ZDtjrd3O5bTU8,8984
|
125
125
|
snowflake/ml/model/_model_composer/model_manifest/model_manifest_schema.py,sha256=s71r-JGI-9aPpA7dDiic5bF2y-7m18UgHipqNhn9aS4,2836
|
126
126
|
snowflake/ml/model/_model_composer/model_method/constants.py,sha256=hoJwIopSdZiYn0fGq15_NiirC0l02d5LEs2D-4J_tPk,35
|
127
127
|
snowflake/ml/model/_model_composer/model_method/function_generator.py,sha256=nnUJki3bJVCTF3gZ-usZW3xQ6wwlJ08EfNsPAgsnI3s,2625
|
@@ -143,7 +143,7 @@ snowflake/ml/model/_packager/model_handlers/lightgbm.py,sha256=qm_Q7FxD19r1fFnSt
|
|
143
143
|
snowflake/ml/model/_packager/model_handlers/mlflow.py,sha256=A3HnCa065jtHsRM40ZxfLv5alk0RYhVmsU4Jt2klRwQ,9189
|
144
144
|
snowflake/ml/model/_packager/model_handlers/pytorch.py,sha256=FamqiwFhtIwlPeb6CoQD2Xkout7f5wKVugvWFX98DV0,9790
|
145
145
|
snowflake/ml/model/_packager/model_handlers/sentence_transformers.py,sha256=EKgpN6e4c8bi0znnV-pWzAR3cwDvORcsL72x6o-JPqA,11381
|
146
|
-
snowflake/ml/model/_packager/model_handlers/sklearn.py,sha256=
|
146
|
+
snowflake/ml/model/_packager/model_handlers/sklearn.py,sha256=v2ZSQ_MWrORiKvUpdjY_SwzhpY4vxOyARJQJuQnxKdw,15443
|
147
147
|
snowflake/ml/model/_packager/model_handlers/snowmlmodel.py,sha256=Jx6MnlfNGdPcBUcLcSs-E1yaWCB4hM3OroeGZb2kE2I,17185
|
148
148
|
snowflake/ml/model/_packager/model_handlers/tensorflow.py,sha256=Fr_iqjJf69_az7uUSagt9qB0ipkq4f1AkjeEGw7PcI4,11205
|
149
149
|
snowflake/ml/model/_packager/model_handlers/torchscript.py,sha256=Tmkpj_4RAlz3r8YvEQvtTTcB_q30UpeLymPGkHmQ390,9536
|
@@ -153,14 +153,14 @@ snowflake/ml/model/_packager/model_handlers_migrator/pytorch_migrator_2023_12_01
|
|
153
153
|
snowflake/ml/model/_packager/model_handlers_migrator/tensorflow_migrator_2023_12_01.py,sha256=dXIisQteU55QMw5OvC_1E_sGqFgE88WRhGCWFqUyauM,2239
|
154
154
|
snowflake/ml/model/_packager/model_handlers_migrator/tensorflow_migrator_2025_01_01.py,sha256=0DxwZtXFgXpxb5LQEAfTUfEFV7zgbG4j3F-oNHLkTgE,769
|
155
155
|
snowflake/ml/model/_packager/model_handlers_migrator/torchscript_migrator_2023_12_01.py,sha256=MDOAGV6kML9sJh_hnYjnrPH4GtECP5DDCjaRT7NmYpU,768
|
156
|
-
snowflake/ml/model/_packager/model_meta/_packaging_requirements.py,sha256=
|
156
|
+
snowflake/ml/model/_packager/model_meta/_packaging_requirements.py,sha256=E8LiAHewa-lsm6_SL6d9AcpO0m23fYdsKPXOevmHjB8,41
|
157
157
|
snowflake/ml/model/_packager/model_meta/model_blob_meta.py,sha256=GmiqqI-XVjrOX7cSa5GKerKhfHptlsg74MKqTGwJ5Jk,1949
|
158
158
|
snowflake/ml/model/_packager/model_meta/model_meta.py,sha256=kYfCyX8Q7tlpoxrXNsryKJ_XZDrMFHZ8fBbZX3XkMhA,19332
|
159
|
-
snowflake/ml/model/_packager/model_meta/model_meta_schema.py,sha256=
|
159
|
+
snowflake/ml/model/_packager/model_meta/model_meta_schema.py,sha256=xWMbdWcDBzC-ZwFgA_hPK60o91AserkR-DpXEEKYK_w,3551
|
160
160
|
snowflake/ml/model/_packager/model_meta_migrator/base_migrator.py,sha256=SORlqpPbOeBg6dvJ3DidHeLVi0w9YF0Zv4tC0Kbc20g,1311
|
161
161
|
snowflake/ml/model/_packager/model_meta_migrator/migrator_plans.py,sha256=nf6PWDH_gvX_OiS4A-G6BzyCLFEG4dASU0t5JTsijM4,1041
|
162
162
|
snowflake/ml/model/_packager/model_meta_migrator/migrator_v1.py,sha256=qEPzdCw_FzExMbPuyFHupeWlYD88yejLdcmkPwjJzDk,2070
|
163
|
-
snowflake/ml/model/_packager/model_runtime/_snowml_inference_alternative_requirements.py,sha256=
|
163
|
+
snowflake/ml/model/_packager/model_runtime/_snowml_inference_alternative_requirements.py,sha256=O5c8oSNb_2nwjNZ_t9YrY-R-33Gsa_vlOpW4ub6SGsc,609
|
164
164
|
snowflake/ml/model/_packager/model_runtime/model_runtime.py,sha256=0l8mgrfmpvTn516Id6xgIG4jIqxSy9nN2PFlnqOljiI,5365
|
165
165
|
snowflake/ml/model/_packager/model_task/model_task_utils.py,sha256=_nm3Irl5W6Oa8_OnJyp3bLeA9QAbV9ygGCsgHI70GX4,6641
|
166
166
|
snowflake/ml/model/_signatures/base_handler.py,sha256=4CTZKKbg4WIz_CmXjyVy8tKZW-5OFcz0J8XVPHm2dfQ,1269
|
@@ -175,7 +175,7 @@ snowflake/ml/model/_signatures/tensorflow_handler.py,sha256=_yrvMg-w_jJoYuyrGXKP
|
|
175
175
|
snowflake/ml/model/_signatures/utils.py,sha256=gHEU2u8VCil0wvmd9C61ZNB-KiNz4QazpC7-0XQPHd8,17176
|
176
176
|
snowflake/ml/model/models/huggingface_pipeline.py,sha256=62GpPZxBheqCnFNxNOggiDE1y9Dhst-v6D4IkGLuDeQ,10221
|
177
177
|
snowflake/ml/modeling/_internal/constants.py,sha256=aJGngY599w3KqN8cDZCYrjbWe6UwYIbgv0gx0Ukdtc0,105
|
178
|
-
snowflake/ml/modeling/_internal/estimator_utils.py,sha256=
|
178
|
+
snowflake/ml/modeling/_internal/estimator_utils.py,sha256=jpiq6h6mJfPa1yZbEjrP1tEFdw-1f_XBxDRHgdH3hps,12017
|
179
179
|
snowflake/ml/modeling/_internal/model_specifications.py,sha256=P9duVMP9-X7us_RZFPyXvWxOrm5K30sWDVYwSMEzG1M,4876
|
180
180
|
snowflake/ml/modeling/_internal/model_trainer.py,sha256=RxpZ5ARy_3sfRMCvArkdK-KmsdbNXxEZTbXoaJ4c1ag,984
|
181
181
|
snowflake/ml/modeling/_internal/model_trainer_builder.py,sha256=n1l9i9LFLcdbMFRvxkWNIs7kYnNNlUJnaToRvFBEjls,8062
|
@@ -401,14 +401,14 @@ snowflake/ml/monitoring/_client/queries/rmse.ssql,sha256=OEJiSStRz9-qKoZaFvmubtY
|
|
401
401
|
snowflake/ml/monitoring/_manager/model_monitor_manager.py,sha256=_-vxqnHqohTHTrwfURjPXijyAeh1mTRdHCG436GaBik,10314
|
402
402
|
snowflake/ml/monitoring/entities/model_monitor_config.py,sha256=IxEiee1HfBXCQGzJOZbrDrvoV8J1tDNk43ygNuN00Io,1793
|
403
403
|
snowflake/ml/registry/__init__.py,sha256=XdPQK9ejYkSJVrSQ7HD3jKQO0hKq2mC4bPCB6qrtH3U,76
|
404
|
-
snowflake/ml/registry/registry.py,sha256=
|
404
|
+
snowflake/ml/registry/registry.py,sha256=eAV9AreGwO4JSyRn3Pftvu-JfeH4KipNG4ViQ16Pe5I,30052
|
405
405
|
snowflake/ml/registry/_manager/model_manager.py,sha256=MjS0AnHQ8g2yn3svvhSPgmPB0j0bpw4gH-p5LjtYKds,16083
|
406
406
|
snowflake/ml/utils/authentication.py,sha256=Wx1kVBZ9XBDuKkRHpPEB2pBxpiJepVLFAirDMx4m5Gk,2612
|
407
407
|
snowflake/ml/utils/connection_params.py,sha256=JRpQppuWRk6bhdLzVDhMfz3Y6yInobFNLHmIBaXD7po,8005
|
408
408
|
snowflake/ml/utils/sparse.py,sha256=XqDQkw39Ml6YIknswdkvFIwUwBk_GBXAbP8IACfPENg,3817
|
409
409
|
snowflake/ml/utils/sql_client.py,sha256=z4Rhi7pQz3s9cyu_Uzfr3deCnrkCdFh9IYIvicsuwdc,692
|
410
|
-
snowflake_ml_python-1.8.
|
411
|
-
snowflake_ml_python-1.8.
|
412
|
-
snowflake_ml_python-1.8.
|
413
|
-
snowflake_ml_python-1.8.
|
414
|
-
snowflake_ml_python-1.8.
|
410
|
+
snowflake_ml_python-1.8.1.dist-info/licenses/LICENSE.txt,sha256=PdEp56Av5m3_kl21iFkVTX_EbHJKFGEdmYeIO1pL_Yk,11365
|
411
|
+
snowflake_ml_python-1.8.1.dist-info/METADATA,sha256=f7TRoR2YP8Ub6UAq-QZrPANrUlNRA7AT_5kliunN6Zk,81143
|
412
|
+
snowflake_ml_python-1.8.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
413
|
+
snowflake_ml_python-1.8.1.dist-info/top_level.txt,sha256=TY0gFSHKDdZy3THb0FGomyikWQasEGldIR1O0HGOHVw,10
|
414
|
+
snowflake_ml_python-1.8.1.dist-info/RECORD,,
|
{snowflake_ml_python-1.8.0.dist-info → snowflake_ml_python-1.8.1.dist-info}/licenses/LICENSE.txt
RENAMED
File without changes
|
File without changes
|