snowflake-ml-python 1.3.1__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. snowflake/ml/_internal/human_readable_id/adjectives.txt +128 -0
  2. snowflake/ml/_internal/human_readable_id/animals.txt +128 -0
  3. snowflake/ml/_internal/human_readable_id/hrid_generator.py +40 -0
  4. snowflake/ml/_internal/human_readable_id/hrid_generator_base.py +135 -0
  5. snowflake/ml/_internal/utils/formatting.py +1 -1
  6. snowflake/ml/feature_store/feature_store.py +15 -106
  7. snowflake/ml/model/_client/model/model_version_impl.py +20 -15
  8. snowflake/ml/model/_deploy_client/image_builds/server_image_builder.py +1 -3
  9. snowflake/ml/model/_deploy_client/snowservice/deploy.py +5 -2
  10. snowflake/ml/model/_model_composer/model_composer.py +7 -5
  11. snowflake/ml/model/_model_composer/model_method/infer_table_function.py_template +1 -1
  12. snowflake/ml/model/_packager/model_handlers/snowmlmodel.py +13 -1
  13. snowflake/ml/model/_packager/model_handlers/xgboost.py +1 -1
  14. snowflake/ml/model/custom_model.py +3 -1
  15. snowflake/ml/modeling/_internal/model_specifications.py +3 -1
  16. snowflake/ml/modeling/_internal/snowpark_implementations/distributed_hpo_trainer.py +546 -0
  17. snowflake/ml/modeling/_internal/snowpark_implementations/snowpark_handlers.py +3 -0
  18. snowflake/ml/modeling/framework/base.py +15 -5
  19. snowflake/ml/modeling/impute/simple_imputer.py +4 -15
  20. snowflake/ml/modeling/lightgbm/lgbm_classifier.py +3 -2
  21. snowflake/ml/modeling/lightgbm/lgbm_regressor.py +3 -2
  22. snowflake/ml/registry/_manager/model_manager.py +5 -1
  23. snowflake/ml/registry/model_registry.py +99 -26
  24. snowflake/ml/registry/registry.py +2 -1
  25. snowflake/ml/version.py +1 -1
  26. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.0.dist-info}/METADATA +31 -3
  27. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.0.dist-info}/RECORD +30 -26
  28. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.0.dist-info}/LICENSE.txt +0 -0
  29. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.0.dist-info}/WHEEL +0 -0
  30. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.0.dist-info}/top_level.txt +0 -0
@@ -239,14 +239,17 @@ def _validate_compute_pool(session: Session, *, options: deploy_options.SnowServ
239
239
  ),
240
240
  )
241
241
 
242
- elif state not in ["ACTIVE", "IDLE"]:
242
+ elif state not in ["STARTING", "ACTIVE", "IDLE"]:
243
243
  raise snowml_exceptions.SnowflakeMLException(
244
244
  error_code=error_codes.INVALID_SNOWPARK_COMPUTE_POOL,
245
245
  original_exception=RuntimeError(
246
- "The compute pool you are requesting to use is not in the ACTIVE/IDLE status."
246
+ "The compute pool you are requesting to use is not in the ACTIVE/IDLE/STARTING status."
247
247
  ),
248
248
  )
249
249
 
250
+ if state in ["SUSPENDED", "STARTING"]:
251
+ logger.warning(f"The compute pool you are requesting is in {state} state. We are waiting it to be ready.")
252
+
250
253
  if options.use_gpu:
251
254
  assert options.num_gpus is not None
252
255
  request_gpus = options.num_gpus
@@ -1,6 +1,7 @@
1
1
  import glob
2
2
  import pathlib
3
3
  import tempfile
4
+ import uuid
4
5
  import zipfile
5
6
  from types import ModuleType
6
7
  from typing import Any, Dict, List, Optional
@@ -31,7 +32,6 @@ class ModelComposer:
31
32
  will zip it. This would not be required if we make directory import work.
32
33
  """
33
34
 
34
- MODEL_FILE_REL_PATH = "model.zip"
35
35
  MODEL_DIR_REL_PATH = "model"
36
36
 
37
37
  def __init__(
@@ -50,6 +50,8 @@ class ModelComposer:
50
50
  self.packager = model_packager.ModelPackager(local_dir_path=str(self._packager_workspace_path))
51
51
  self.manifest = model_manifest.ModelManifest(workspace_path=self.workspace_path)
52
52
 
53
+ self.model_file_rel_path = f"model-{uuid.uuid4().hex}.zip"
54
+
53
55
  self._statement_params = statement_params
54
56
 
55
57
  def __del__(self) -> None:
@@ -66,11 +68,11 @@ class ModelComposer:
66
68
 
67
69
  @property
68
70
  def model_stage_path(self) -> str:
69
- return (self.stage_path / ModelComposer.MODEL_FILE_REL_PATH).as_posix()
71
+ return (self.stage_path / self.model_file_rel_path).as_posix()
70
72
 
71
73
  @property
72
74
  def model_local_path(self) -> str:
73
- return str(self.workspace_path / ModelComposer.MODEL_FILE_REL_PATH)
75
+ return str(self.workspace_path / self.model_file_rel_path)
74
76
 
75
77
  def save(
76
78
  self,
@@ -130,7 +132,7 @@ class ModelComposer:
130
132
  self.manifest.save(
131
133
  session=self.session,
132
134
  model_meta=self.packager.meta,
133
- model_file_rel_path=pathlib.PurePosixPath(ModelComposer.MODEL_FILE_REL_PATH),
135
+ model_file_rel_path=pathlib.PurePosixPath(self.model_file_rel_path),
134
136
  options=options,
135
137
  )
136
138
 
@@ -156,7 +158,7 @@ class ModelComposer:
156
158
 
157
159
  # TODO (Server-side Model Rollout): Remove this section.
158
160
  model_zip_path = pathlib.Path(glob.glob(str(self.workspace_path / "*.zip"))[0])
159
- ModelComposer.MODEL_FILE_REL_PATH = str(model_zip_path.relative_to(self.workspace_path))
161
+ self.model_file_rel_path = str(model_zip_path.relative_to(self.workspace_path))
160
162
 
161
163
  with zipfile.ZipFile(self.model_local_path, mode="r", compression=zipfile.ZIP_DEFLATED) as zf:
162
164
  zf.extractall(path=self._packager_workspace_path)
@@ -73,5 +73,5 @@ dtype_map = {{feature.name: feature.as_dtype() for feature in features}}
73
73
  # Actual table function
74
74
  class {function_name}:
75
75
  @vectorized(input=pd.DataFrame)
76
- def end_partition(df: pd.DataFrame) -> pd.DataFrame:
76
+ def end_partition(self, df: pd.DataFrame) -> pd.DataFrame:
77
77
  return runner(df)
@@ -87,7 +87,19 @@ class SnowMLModelHandler(_base.BaseModelHandler["BaseEstimator"]):
87
87
  stacklevel=2,
88
88
  )
89
89
  assert hasattr(model, "model_signatures"), "Model does not have model signatures as expected."
90
- model_meta.signatures = getattr(model, "model_signatures", {})
90
+ model_signature_dict = getattr(model, "model_signatures", {})
91
+ target_methods = kwargs.pop("target_methods", None)
92
+ if not target_methods:
93
+ model_meta.signatures = model_signature_dict
94
+ else:
95
+ temp_model_signature_dict = {}
96
+ for method_name in target_methods:
97
+ method_model_signature = model_signature_dict.get(method_name, None)
98
+ if method_model_signature is not None:
99
+ temp_model_signature_dict[method_name] = method_model_signature
100
+ else:
101
+ raise ValueError(f"Target method {method_name} does not exist in the model.")
102
+ model_meta.signatures = temp_model_signature_dict
91
103
 
92
104
  model_blob_path = os.path.join(model_blobs_dir_path, name)
93
105
  os.makedirs(model_blob_path, exist_ok=True)
@@ -45,7 +45,7 @@ class XGBModelHandler(_base.BaseModelHandler[Union["xgboost.Booster", "xgboost.X
45
45
  _HANDLER_MIGRATOR_PLANS: Dict[str, Type[base_migrator.BaseModelHandlerMigrator]] = {}
46
46
 
47
47
  MODELE_BLOB_FILE_OR_DIR = "model.ubj"
48
- DEFAULT_TARGET_METHODS = ["apply", "predict", "predict_proba"]
48
+ DEFAULT_TARGET_METHODS = ["predict", "predict_proba"]
49
49
 
50
50
  @classmethod
51
51
  def can_handle(
@@ -149,7 +149,9 @@ class CustomModel:
149
149
  context: A ModelContext object showing sub-models and artifacts related to this model.
150
150
  """
151
151
 
152
- def __init__(self, context: ModelContext) -> None:
152
+ def __init__(self, context: Optional[ModelContext] = None) -> None:
153
+ if context is None:
154
+ context = ModelContext()
153
155
  self.context = context
154
156
  for method in self._get_infer_methods():
155
157
  _validate_predict_function(method)
@@ -29,7 +29,7 @@ class SKLearnModelSpecifications(ModelSpecifications):
29
29
  ]
30
30
 
31
31
  # A change from previous implementation.
32
- # When reusing the Sprocs for all the fit() call in the session, the static dpendencies list should include
32
+ # When reusing the Sprocs for all the fit() call in the session, the static dependencies list should include
33
33
  # all the possible dependencies required during the lifetime.
34
34
 
35
35
  # Include XGBoost in the dependencies if it is installed.
@@ -67,10 +67,12 @@ class XGBoostModelSpecifications(ModelSpecifications):
67
67
  class LightGBMModelSpecifications(ModelSpecifications):
68
68
  def __init__(self) -> None:
69
69
  import lightgbm
70
+ import sklearn
70
71
 
71
72
  imports: List[str] = ["lightgbm"]
72
73
  pkgDependencies: List[str] = [
73
74
  f"numpy=={np.__version__}",
75
+ f"scikit-learn=={sklearn.__version__}",
74
76
  f"lightgbm=={lightgbm.__version__}",
75
77
  f"cloudpickle=={cp.__version__}",
76
78
  ]