snowflake-ml-python 1.3.1__py3-none-any.whl → 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (219) hide show
  1. snowflake/ml/_internal/env_utils.py +11 -1
  2. snowflake/ml/_internal/human_readable_id/adjectives.txt +128 -0
  3. snowflake/ml/_internal/human_readable_id/animals.txt +128 -0
  4. snowflake/ml/_internal/human_readable_id/hrid_generator.py +40 -0
  5. snowflake/ml/_internal/human_readable_id/hrid_generator_base.py +135 -0
  6. snowflake/ml/_internal/utils/formatting.py +1 -1
  7. snowflake/ml/_internal/utils/identifier.py +3 -1
  8. snowflake/ml/_internal/utils/sql_identifier.py +2 -6
  9. snowflake/ml/feature_store/feature_store.py +166 -184
  10. snowflake/ml/feature_store/feature_view.py +12 -24
  11. snowflake/ml/fileset/sfcfs.py +56 -50
  12. snowflake/ml/fileset/stage_fs.py +48 -13
  13. snowflake/ml/model/_client/model/model_version_impl.py +6 -49
  14. snowflake/ml/model/_client/ops/model_ops.py +78 -29
  15. snowflake/ml/model/_client/sql/model.py +23 -2
  16. snowflake/ml/model/_client/sql/model_version.py +22 -1
  17. snowflake/ml/model/_deploy_client/image_builds/server_image_builder.py +1 -3
  18. snowflake/ml/model/_deploy_client/snowservice/deploy.py +5 -2
  19. snowflake/ml/model/_model_composer/model_composer.py +7 -5
  20. snowflake/ml/model/_model_composer/model_manifest/model_manifest.py +19 -54
  21. snowflake/ml/model/_model_composer/model_manifest/model_manifest_schema.py +8 -1
  22. snowflake/ml/model/_model_composer/model_method/infer_table_function.py_template +1 -1
  23. snowflake/ml/model/_model_composer/model_method/model_method.py +6 -10
  24. snowflake/ml/model/_packager/model_handlers/catboost.py +206 -0
  25. snowflake/ml/model/_packager/model_handlers/lightgbm.py +218 -0
  26. snowflake/ml/model/_packager/model_handlers/sklearn.py +3 -0
  27. snowflake/ml/model/_packager/model_handlers/snowmlmodel.py +13 -1
  28. snowflake/ml/model/_packager/model_handlers/xgboost.py +1 -1
  29. snowflake/ml/model/_packager/model_meta/_core_requirements.py +1 -1
  30. snowflake/ml/model/_packager/model_meta/model_meta.py +36 -6
  31. snowflake/ml/model/_packager/model_meta/model_meta_schema.py +20 -1
  32. snowflake/ml/model/_packager/model_meta_migrator/migrator_plans.py +3 -1
  33. snowflake/ml/model/_packager/model_packager.py +2 -2
  34. snowflake/ml/model/{_model_composer/model_runtime/_runtime_requirements.py → _packager/model_runtime/_snowml_inference_alternative_requirements.py} +1 -1
  35. snowflake/ml/model/_packager/model_runtime/model_runtime.py +137 -0
  36. snowflake/ml/model/custom_model.py +3 -1
  37. snowflake/ml/model/type_hints.py +21 -2
  38. snowflake/ml/modeling/_internal/estimator_utils.py +16 -11
  39. snowflake/ml/modeling/_internal/local_implementations/pandas_handlers.py +4 -1
  40. snowflake/ml/modeling/_internal/model_specifications.py +3 -1
  41. snowflake/ml/modeling/_internal/snowpark_implementations/distributed_hpo_trainer.py +545 -0
  42. snowflake/ml/modeling/_internal/snowpark_implementations/snowpark_handlers.py +8 -5
  43. snowflake/ml/modeling/calibration/calibrated_classifier_cv.py +195 -123
  44. snowflake/ml/modeling/cluster/affinity_propagation.py +195 -123
  45. snowflake/ml/modeling/cluster/agglomerative_clustering.py +195 -123
  46. snowflake/ml/modeling/cluster/birch.py +195 -123
  47. snowflake/ml/modeling/cluster/bisecting_k_means.py +195 -123
  48. snowflake/ml/modeling/cluster/dbscan.py +195 -123
  49. snowflake/ml/modeling/cluster/feature_agglomeration.py +195 -123
  50. snowflake/ml/modeling/cluster/k_means.py +195 -123
  51. snowflake/ml/modeling/cluster/mean_shift.py +195 -123
  52. snowflake/ml/modeling/cluster/mini_batch_k_means.py +195 -123
  53. snowflake/ml/modeling/cluster/optics.py +195 -123
  54. snowflake/ml/modeling/cluster/spectral_biclustering.py +195 -123
  55. snowflake/ml/modeling/cluster/spectral_clustering.py +195 -123
  56. snowflake/ml/modeling/cluster/spectral_coclustering.py +195 -123
  57. snowflake/ml/modeling/compose/column_transformer.py +195 -123
  58. snowflake/ml/modeling/compose/transformed_target_regressor.py +195 -123
  59. snowflake/ml/modeling/covariance/elliptic_envelope.py +195 -123
  60. snowflake/ml/modeling/covariance/empirical_covariance.py +195 -123
  61. snowflake/ml/modeling/covariance/graphical_lasso.py +195 -123
  62. snowflake/ml/modeling/covariance/graphical_lasso_cv.py +195 -123
  63. snowflake/ml/modeling/covariance/ledoit_wolf.py +195 -123
  64. snowflake/ml/modeling/covariance/min_cov_det.py +195 -123
  65. snowflake/ml/modeling/covariance/oas.py +195 -123
  66. snowflake/ml/modeling/covariance/shrunk_covariance.py +195 -123
  67. snowflake/ml/modeling/decomposition/dictionary_learning.py +195 -123
  68. snowflake/ml/modeling/decomposition/factor_analysis.py +195 -123
  69. snowflake/ml/modeling/decomposition/fast_ica.py +195 -123
  70. snowflake/ml/modeling/decomposition/incremental_pca.py +195 -123
  71. snowflake/ml/modeling/decomposition/kernel_pca.py +195 -123
  72. snowflake/ml/modeling/decomposition/mini_batch_dictionary_learning.py +195 -123
  73. snowflake/ml/modeling/decomposition/mini_batch_sparse_pca.py +195 -123
  74. snowflake/ml/modeling/decomposition/pca.py +195 -123
  75. snowflake/ml/modeling/decomposition/sparse_pca.py +195 -123
  76. snowflake/ml/modeling/decomposition/truncated_svd.py +195 -123
  77. snowflake/ml/modeling/discriminant_analysis/linear_discriminant_analysis.py +195 -123
  78. snowflake/ml/modeling/discriminant_analysis/quadratic_discriminant_analysis.py +195 -123
  79. snowflake/ml/modeling/ensemble/ada_boost_classifier.py +195 -123
  80. snowflake/ml/modeling/ensemble/ada_boost_regressor.py +195 -123
  81. snowflake/ml/modeling/ensemble/bagging_classifier.py +195 -123
  82. snowflake/ml/modeling/ensemble/bagging_regressor.py +195 -123
  83. snowflake/ml/modeling/ensemble/extra_trees_classifier.py +195 -123
  84. snowflake/ml/modeling/ensemble/extra_trees_regressor.py +195 -123
  85. snowflake/ml/modeling/ensemble/gradient_boosting_classifier.py +195 -123
  86. snowflake/ml/modeling/ensemble/gradient_boosting_regressor.py +195 -123
  87. snowflake/ml/modeling/ensemble/hist_gradient_boosting_classifier.py +195 -123
  88. snowflake/ml/modeling/ensemble/hist_gradient_boosting_regressor.py +195 -123
  89. snowflake/ml/modeling/ensemble/isolation_forest.py +195 -123
  90. snowflake/ml/modeling/ensemble/random_forest_classifier.py +195 -123
  91. snowflake/ml/modeling/ensemble/random_forest_regressor.py +195 -123
  92. snowflake/ml/modeling/ensemble/stacking_regressor.py +195 -123
  93. snowflake/ml/modeling/ensemble/voting_classifier.py +195 -123
  94. snowflake/ml/modeling/ensemble/voting_regressor.py +195 -123
  95. snowflake/ml/modeling/feature_selection/generic_univariate_select.py +195 -123
  96. snowflake/ml/modeling/feature_selection/select_fdr.py +195 -123
  97. snowflake/ml/modeling/feature_selection/select_fpr.py +195 -123
  98. snowflake/ml/modeling/feature_selection/select_fwe.py +195 -123
  99. snowflake/ml/modeling/feature_selection/select_k_best.py +195 -123
  100. snowflake/ml/modeling/feature_selection/select_percentile.py +195 -123
  101. snowflake/ml/modeling/feature_selection/sequential_feature_selector.py +195 -123
  102. snowflake/ml/modeling/feature_selection/variance_threshold.py +195 -123
  103. snowflake/ml/modeling/framework/_utils.py +8 -1
  104. snowflake/ml/modeling/framework/base.py +24 -6
  105. snowflake/ml/modeling/gaussian_process/gaussian_process_classifier.py +195 -123
  106. snowflake/ml/modeling/gaussian_process/gaussian_process_regressor.py +195 -123
  107. snowflake/ml/modeling/impute/iterative_imputer.py +195 -123
  108. snowflake/ml/modeling/impute/knn_imputer.py +195 -123
  109. snowflake/ml/modeling/impute/missing_indicator.py +195 -123
  110. snowflake/ml/modeling/impute/simple_imputer.py +4 -15
  111. snowflake/ml/modeling/kernel_approximation/additive_chi2_sampler.py +195 -123
  112. snowflake/ml/modeling/kernel_approximation/nystroem.py +195 -123
  113. snowflake/ml/modeling/kernel_approximation/polynomial_count_sketch.py +195 -123
  114. snowflake/ml/modeling/kernel_approximation/rbf_sampler.py +195 -123
  115. snowflake/ml/modeling/kernel_approximation/skewed_chi2_sampler.py +195 -123
  116. snowflake/ml/modeling/kernel_ridge/kernel_ridge.py +195 -123
  117. snowflake/ml/modeling/lightgbm/lgbm_classifier.py +198 -125
  118. snowflake/ml/modeling/lightgbm/lgbm_regressor.py +198 -125
  119. snowflake/ml/modeling/linear_model/ard_regression.py +195 -123
  120. snowflake/ml/modeling/linear_model/bayesian_ridge.py +195 -123
  121. snowflake/ml/modeling/linear_model/elastic_net.py +195 -123
  122. snowflake/ml/modeling/linear_model/elastic_net_cv.py +195 -123
  123. snowflake/ml/modeling/linear_model/gamma_regressor.py +195 -123
  124. snowflake/ml/modeling/linear_model/huber_regressor.py +195 -123
  125. snowflake/ml/modeling/linear_model/lars.py +195 -123
  126. snowflake/ml/modeling/linear_model/lars_cv.py +195 -123
  127. snowflake/ml/modeling/linear_model/lasso.py +195 -123
  128. snowflake/ml/modeling/linear_model/lasso_cv.py +195 -123
  129. snowflake/ml/modeling/linear_model/lasso_lars.py +195 -123
  130. snowflake/ml/modeling/linear_model/lasso_lars_cv.py +195 -123
  131. snowflake/ml/modeling/linear_model/lasso_lars_ic.py +195 -123
  132. snowflake/ml/modeling/linear_model/linear_regression.py +195 -123
  133. snowflake/ml/modeling/linear_model/logistic_regression.py +195 -123
  134. snowflake/ml/modeling/linear_model/logistic_regression_cv.py +195 -123
  135. snowflake/ml/modeling/linear_model/multi_task_elastic_net.py +195 -123
  136. snowflake/ml/modeling/linear_model/multi_task_elastic_net_cv.py +195 -123
  137. snowflake/ml/modeling/linear_model/multi_task_lasso.py +195 -123
  138. snowflake/ml/modeling/linear_model/multi_task_lasso_cv.py +195 -123
  139. snowflake/ml/modeling/linear_model/orthogonal_matching_pursuit.py +195 -123
  140. snowflake/ml/modeling/linear_model/passive_aggressive_classifier.py +195 -123
  141. snowflake/ml/modeling/linear_model/passive_aggressive_regressor.py +195 -123
  142. snowflake/ml/modeling/linear_model/perceptron.py +195 -123
  143. snowflake/ml/modeling/linear_model/poisson_regressor.py +195 -123
  144. snowflake/ml/modeling/linear_model/ransac_regressor.py +195 -123
  145. snowflake/ml/modeling/linear_model/ridge.py +195 -123
  146. snowflake/ml/modeling/linear_model/ridge_classifier.py +195 -123
  147. snowflake/ml/modeling/linear_model/ridge_classifier_cv.py +195 -123
  148. snowflake/ml/modeling/linear_model/ridge_cv.py +195 -123
  149. snowflake/ml/modeling/linear_model/sgd_classifier.py +195 -123
  150. snowflake/ml/modeling/linear_model/sgd_one_class_svm.py +195 -123
  151. snowflake/ml/modeling/linear_model/sgd_regressor.py +195 -123
  152. snowflake/ml/modeling/linear_model/theil_sen_regressor.py +195 -123
  153. snowflake/ml/modeling/linear_model/tweedie_regressor.py +195 -123
  154. snowflake/ml/modeling/manifold/isomap.py +195 -123
  155. snowflake/ml/modeling/manifold/mds.py +195 -123
  156. snowflake/ml/modeling/manifold/spectral_embedding.py +195 -123
  157. snowflake/ml/modeling/manifold/tsne.py +195 -123
  158. snowflake/ml/modeling/mixture/bayesian_gaussian_mixture.py +195 -123
  159. snowflake/ml/modeling/mixture/gaussian_mixture.py +195 -123
  160. snowflake/ml/modeling/model_selection/grid_search_cv.py +42 -18
  161. snowflake/ml/modeling/model_selection/randomized_search_cv.py +42 -18
  162. snowflake/ml/modeling/multiclass/one_vs_one_classifier.py +195 -123
  163. snowflake/ml/modeling/multiclass/one_vs_rest_classifier.py +195 -123
  164. snowflake/ml/modeling/multiclass/output_code_classifier.py +195 -123
  165. snowflake/ml/modeling/naive_bayes/bernoulli_nb.py +195 -123
  166. snowflake/ml/modeling/naive_bayes/categorical_nb.py +195 -123
  167. snowflake/ml/modeling/naive_bayes/complement_nb.py +195 -123
  168. snowflake/ml/modeling/naive_bayes/gaussian_nb.py +195 -123
  169. snowflake/ml/modeling/naive_bayes/multinomial_nb.py +195 -123
  170. snowflake/ml/modeling/neighbors/k_neighbors_classifier.py +195 -123
  171. snowflake/ml/modeling/neighbors/k_neighbors_regressor.py +195 -123
  172. snowflake/ml/modeling/neighbors/kernel_density.py +195 -123
  173. snowflake/ml/modeling/neighbors/local_outlier_factor.py +195 -123
  174. snowflake/ml/modeling/neighbors/nearest_centroid.py +195 -123
  175. snowflake/ml/modeling/neighbors/nearest_neighbors.py +195 -123
  176. snowflake/ml/modeling/neighbors/neighborhood_components_analysis.py +195 -123
  177. snowflake/ml/modeling/neighbors/radius_neighbors_classifier.py +195 -123
  178. snowflake/ml/modeling/neighbors/radius_neighbors_regressor.py +195 -123
  179. snowflake/ml/modeling/neural_network/bernoulli_rbm.py +195 -123
  180. snowflake/ml/modeling/neural_network/mlp_classifier.py +195 -123
  181. snowflake/ml/modeling/neural_network/mlp_regressor.py +195 -123
  182. snowflake/ml/modeling/pipeline/pipeline.py +4 -4
  183. snowflake/ml/modeling/preprocessing/binarizer.py +1 -5
  184. snowflake/ml/modeling/preprocessing/k_bins_discretizer.py +1 -5
  185. snowflake/ml/modeling/preprocessing/label_encoder.py +1 -5
  186. snowflake/ml/modeling/preprocessing/max_abs_scaler.py +1 -5
  187. snowflake/ml/modeling/preprocessing/min_max_scaler.py +10 -12
  188. snowflake/ml/modeling/preprocessing/normalizer.py +1 -5
  189. snowflake/ml/modeling/preprocessing/one_hot_encoder.py +1 -5
  190. snowflake/ml/modeling/preprocessing/ordinal_encoder.py +1 -5
  191. snowflake/ml/modeling/preprocessing/polynomial_features.py +195 -123
  192. snowflake/ml/modeling/preprocessing/robust_scaler.py +1 -5
  193. snowflake/ml/modeling/preprocessing/standard_scaler.py +11 -11
  194. snowflake/ml/modeling/semi_supervised/label_propagation.py +195 -123
  195. snowflake/ml/modeling/semi_supervised/label_spreading.py +195 -123
  196. snowflake/ml/modeling/svm/linear_svc.py +195 -123
  197. snowflake/ml/modeling/svm/linear_svr.py +195 -123
  198. snowflake/ml/modeling/svm/nu_svc.py +195 -123
  199. snowflake/ml/modeling/svm/nu_svr.py +195 -123
  200. snowflake/ml/modeling/svm/svc.py +195 -123
  201. snowflake/ml/modeling/svm/svr.py +195 -123
  202. snowflake/ml/modeling/tree/decision_tree_classifier.py +195 -123
  203. snowflake/ml/modeling/tree/decision_tree_regressor.py +195 -123
  204. snowflake/ml/modeling/tree/extra_tree_classifier.py +195 -123
  205. snowflake/ml/modeling/tree/extra_tree_regressor.py +195 -123
  206. snowflake/ml/modeling/xgboost/xgb_classifier.py +195 -123
  207. snowflake/ml/modeling/xgboost/xgb_regressor.py +195 -123
  208. snowflake/ml/modeling/xgboost/xgbrf_classifier.py +195 -123
  209. snowflake/ml/modeling/xgboost/xgbrf_regressor.py +195 -123
  210. snowflake/ml/registry/_manager/model_manager.py +5 -1
  211. snowflake/ml/registry/model_registry.py +99 -26
  212. snowflake/ml/registry/registry.py +3 -2
  213. snowflake/ml/version.py +1 -1
  214. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.1.dist-info}/METADATA +94 -55
  215. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.1.dist-info}/RECORD +218 -212
  216. snowflake/ml/model/_model_composer/model_runtime/model_runtime.py +0 -97
  217. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.1.dist-info}/LICENSE.txt +0 -0
  218. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.1.dist-info}/WHEEL +0 -0
  219. {snowflake_ml_python-1.3.1.dist-info → snowflake_ml_python-1.4.1.dist-info}/top_level.txt +0 -0
@@ -33,6 +33,15 @@ from snowflake.ml.modeling._internal.transformer_protocols import (
33
33
  BatchInferenceKwargsTypedDict,
34
34
  ScoreKwargsTypedDict
35
35
  )
36
+ from snowflake.ml.model._signatures import utils as model_signature_utils
37
+ from snowflake.ml.model.model_signature import (
38
+ BaseFeatureSpec,
39
+ DataType,
40
+ FeatureSpec,
41
+ ModelSignature,
42
+ _infer_signature,
43
+ _rename_signature_with_snowflake_identifiers,
44
+ )
36
45
 
37
46
  from snowflake.ml.modeling._internal.model_transformer_builder import ModelTransformerBuilder
38
47
 
@@ -43,16 +52,6 @@ from snowflake.ml.modeling._internal.estimator_utils import (
43
52
  validate_sklearn_args,
44
53
  )
45
54
 
46
- from snowflake.ml.model.model_signature import (
47
- DataType,
48
- FeatureSpec,
49
- ModelSignature,
50
- _infer_signature,
51
- _rename_signature_with_snowflake_identifiers,
52
- BaseFeatureSpec,
53
- )
54
- from snowflake.ml.model._signatures import utils as model_signature_utils
55
-
56
55
  _PROJECT = "ModelDevelopment"
57
56
  # Derive subproject from module name by removing "sklearn"
58
57
  # and converting module name from underscore to CamelCase
@@ -252,12 +251,7 @@ class MultiTaskElasticNet(BaseTransformer):
252
251
  )
253
252
  return selected_cols
254
253
 
255
- @telemetry.send_api_usage_telemetry(
256
- project=_PROJECT,
257
- subproject=_SUBPROJECT,
258
- custom_tags=dict([("autogen", True)]),
259
- )
260
- def fit(self, dataset: Union[DataFrame, pd.DataFrame]) -> "MultiTaskElasticNet":
254
+ def _fit(self, dataset: Union[DataFrame, pd.DataFrame]) -> "MultiTaskElasticNet":
261
255
  """Fit MultiTaskElasticNet model with coordinate descent
262
256
  For more details on this function, see [sklearn.linear_model.MultiTaskElasticNet.fit]
263
257
  (https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.MultiTaskElasticNet.html#sklearn.linear_model.MultiTaskElasticNet.fit)
@@ -284,12 +278,14 @@ class MultiTaskElasticNet(BaseTransformer):
284
278
 
285
279
  self._snowpark_cols = dataset.select(self.input_cols).columns
286
280
 
287
- # If we are already in a stored procedure, no need to kick off another one.
281
+ # If we are already in a stored procedure, no need to kick off another one.
288
282
  if SNOWML_SPROC_ENV in os.environ:
289
283
  statement_params = telemetry.get_function_usage_statement_params(
290
284
  project=_PROJECT,
291
285
  subproject=_SUBPROJECT,
292
- function_name=telemetry.get_statement_params_full_func_name(inspect.currentframe(), MultiTaskElasticNet.__class__.__name__),
286
+ function_name=telemetry.get_statement_params_full_func_name(
287
+ inspect.currentframe(), MultiTaskElasticNet.__class__.__name__
288
+ ),
293
289
  api_calls=[Session.call],
294
290
  custom_tags=dict([("autogen", True)]) if self._autogenerated else None,
295
291
  )
@@ -310,7 +306,7 @@ class MultiTaskElasticNet(BaseTransformer):
310
306
  )
311
307
  self._sklearn_object = model_trainer.train()
312
308
  self._is_fitted = True
313
- self._get_model_signatures(dataset)
309
+ self._generate_model_signatures(dataset)
314
310
  return self
315
311
 
316
312
  def _batch_inference_validate_snowpark(
@@ -386,7 +382,9 @@ class MultiTaskElasticNet(BaseTransformer):
386
382
  # when it is classifier, infer the datatype from label columns
387
383
  if expected_type_inferred == "" and 'predict' in self.model_signatures:
388
384
  # Batch inference takes a single expected output column type. Use the first columns type for now.
389
- label_cols_signatures = [row for row in self.model_signatures['predict'].outputs if row.name in self.output_cols]
385
+ label_cols_signatures = [
386
+ row for row in self.model_signatures['predict'].outputs if row.name in self.output_cols
387
+ ]
390
388
  if len(label_cols_signatures) == 0:
391
389
  error_str = f"Output columns {self.output_cols} do not match model signatures {self.model_signatures['predict'].outputs}."
392
390
  raise exceptions.SnowflakeMLException(
@@ -394,25 +392,22 @@ class MultiTaskElasticNet(BaseTransformer):
394
392
  original_exception=ValueError(error_str),
395
393
  )
396
394
 
397
- expected_type_inferred = convert_sp_to_sf_type(
398
- label_cols_signatures[0].as_snowpark_type()
399
- )
395
+ expected_type_inferred = convert_sp_to_sf_type(label_cols_signatures[0].as_snowpark_type())
400
396
 
401
397
  self._deps = self._batch_inference_validate_snowpark(dataset=dataset, inference_method=inference_method)
402
- assert isinstance(dataset._session, Session) # mypy does not recognize the check in _batch_inference_validate_snowpark()
398
+ assert isinstance(
399
+ dataset._session, Session
400
+ ) # mypy does not recognize the check in _batch_inference_validate_snowpark()
403
401
 
404
402
  transform_kwargs = dict(
405
- session = dataset._session,
406
- dependencies = self._deps,
407
- drop_input_cols = self._drop_input_cols,
408
- expected_output_cols_type = expected_type_inferred,
403
+ session=dataset._session,
404
+ dependencies=self._deps,
405
+ drop_input_cols=self._drop_input_cols,
406
+ expected_output_cols_type=expected_type_inferred,
409
407
  )
410
408
 
411
409
  elif isinstance(dataset, pd.DataFrame):
412
- transform_kwargs = dict(
413
- snowpark_input_cols = self._snowpark_cols,
414
- drop_input_cols = self._drop_input_cols
415
- )
410
+ transform_kwargs = dict(snowpark_input_cols=self._snowpark_cols, drop_input_cols=self._drop_input_cols)
416
411
 
417
412
  transform_handlers = ModelTransformerBuilder.build(
418
413
  dataset=dataset,
@@ -452,7 +447,7 @@ class MultiTaskElasticNet(BaseTransformer):
452
447
  Transformed dataset.
453
448
  """
454
449
  super()._check_dataset_type(dataset)
455
- inference_method="transform"
450
+ inference_method = "transform"
456
451
 
457
452
  # This dictionary contains optional kwargs for batch inference. These kwargs
458
453
  # are specific to the type of dataset used.
@@ -489,17 +484,14 @@ class MultiTaskElasticNet(BaseTransformer):
489
484
  assert isinstance(dataset._session, Session) # mypy does not recognize the check in _batch_inference_validate_snowpark()
490
485
 
491
486
  transform_kwargs = dict(
492
- session = dataset._session,
493
- dependencies = self._deps,
494
- drop_input_cols = self._drop_input_cols,
495
- expected_output_cols_type = expected_dtype,
487
+ session=dataset._session,
488
+ dependencies=self._deps,
489
+ drop_input_cols=self._drop_input_cols,
490
+ expected_output_cols_type=expected_dtype,
496
491
  )
497
492
 
498
493
  elif isinstance(dataset, pd.DataFrame):
499
- transform_kwargs = dict(
500
- snowpark_input_cols = self._snowpark_cols,
501
- drop_input_cols = self._drop_input_cols
502
- )
494
+ transform_kwargs = dict(snowpark_input_cols=self._snowpark_cols, drop_input_cols=self._drop_input_cols)
503
495
 
504
496
  transform_handlers = ModelTransformerBuilder.build(
505
497
  dataset=dataset,
@@ -518,7 +510,11 @@ class MultiTaskElasticNet(BaseTransformer):
518
510
  return output_df
519
511
 
520
512
  @available_if(original_estimator_has_callable("fit_predict")) # type: ignore[misc]
521
- def fit_predict(self, dataset: Union[DataFrame, pd.DataFrame], output_cols_prefix: str = "fit_predict_",) -> Union[DataFrame, pd.DataFrame]:
513
+ def fit_predict(
514
+ self,
515
+ dataset: Union[DataFrame, pd.DataFrame],
516
+ output_cols_prefix: str = "fit_predict_",
517
+ ) -> Union[DataFrame, pd.DataFrame]:
522
518
  """ Method not supported for this class.
523
519
 
524
520
 
@@ -543,7 +539,9 @@ class MultiTaskElasticNet(BaseTransformer):
543
539
  )
544
540
  output_result, fitted_estimator = model_trainer.train_fit_predict(
545
541
  drop_input_cols=self._drop_input_cols,
546
- expected_output_cols_list=self.output_cols if self.output_cols else self._get_output_column_names(output_cols_prefix),
542
+ expected_output_cols_list=(
543
+ self.output_cols if self.output_cols else self._get_output_column_names(output_cols_prefix)
544
+ ),
547
545
  )
548
546
  self._sklearn_object = fitted_estimator
549
547
  self._is_fitted = True
@@ -560,6 +558,62 @@ class MultiTaskElasticNet(BaseTransformer):
560
558
  assert self._sklearn_object is not None
561
559
  return self._sklearn_object.embedding_
562
560
 
561
+
562
+ def _get_output_column_names(self, output_cols_prefix: str, output_cols: Optional[List[str]] = None) -> List[str]:
563
+ """ Returns the list of output columns for predict_proba(), decision_function(), etc.. functions.
564
+ Returns a list with output_cols_prefix as the only element if the estimator is not a classifier.
565
+ """
566
+ output_cols_prefix = identifier.resolve_identifier(output_cols_prefix)
567
+ # The following condition is introduced for kneighbors methods, and not used in other methods
568
+ if output_cols:
569
+ output_cols = [
570
+ identifier.concat_names([output_cols_prefix, identifier.resolve_identifier(c)])
571
+ for c in output_cols
572
+ ]
573
+ elif getattr(self._sklearn_object, "classes_", None) is None:
574
+ output_cols = [output_cols_prefix]
575
+ elif self._sklearn_object is not None:
576
+ classes = self._sklearn_object.classes_
577
+ if isinstance(classes, numpy.ndarray):
578
+ output_cols = [f'{output_cols_prefix}{str(c)}' for c in classes.tolist()]
579
+ elif isinstance(classes, list) and len(classes) > 0 and isinstance(classes[0], numpy.ndarray):
580
+ # If the estimator is a multioutput estimator, classes_ will be a list of ndarrays.
581
+ output_cols = []
582
+ for i, cl in enumerate(classes):
583
+ # For binary classification, there is only one output column for each class
584
+ # ndarray as the two classes are complementary.
585
+ if len(cl) == 2:
586
+ output_cols.append(f'{output_cols_prefix}{i}_{cl[0]}')
587
+ else:
588
+ output_cols.extend([
589
+ f'{output_cols_prefix}{i}_{c}' for c in cl.tolist()
590
+ ])
591
+ else:
592
+ output_cols = []
593
+
594
+ # Make sure column names are valid snowflake identifiers.
595
+ assert output_cols is not None # Make MyPy happy
596
+ rv = [identifier.rename_to_valid_snowflake_identifier(c) for c in output_cols]
597
+
598
+ return rv
599
+
600
+ def _align_expected_output_names(
601
+ self, method: str, dataset: DataFrame, expected_output_cols_list: List[str], output_cols_prefix: str
602
+ ) -> List[str]:
603
+ # in case the inferred output column names dimension is different
604
+ # we use one line of snowpark dataframe and put it into sklearn estimator using pandas
605
+ output_df_pd = getattr(self, method)(dataset.limit(1).to_pandas(), output_cols_prefix)
606
+ output_df_columns = list(output_df_pd.columns)
607
+ output_df_columns_set: Set[str] = set(output_df_columns) - set(dataset.columns)
608
+ if self.sample_weight_col:
609
+ output_df_columns_set -= set(self.sample_weight_col)
610
+ # if the dimension of inferred output column names is correct; use it
611
+ if len(expected_output_cols_list) == len(output_df_columns_set):
612
+ return expected_output_cols_list
613
+ # otherwise, use the sklearn estimator's output
614
+ else:
615
+ return sorted(list(output_df_columns_set), key=lambda x: output_df_columns.index(x))
616
+
563
617
  @available_if(original_estimator_has_callable("predict_proba")) # type: ignore[misc]
564
618
  @telemetry.send_api_usage_telemetry(
565
619
  project=_PROJECT,
@@ -590,24 +644,28 @@ class MultiTaskElasticNet(BaseTransformer):
590
644
  # are specific to the type of dataset used.
591
645
  transform_kwargs: BatchInferenceKwargsTypedDict = dict()
592
646
 
647
+ expected_output_cols = self._get_output_column_names(output_cols_prefix)
648
+
593
649
  if isinstance(dataset, DataFrame):
594
650
  self._deps = self._batch_inference_validate_snowpark(
595
651
  dataset=dataset,
596
652
  inference_method=inference_method,
597
653
  )
598
- assert isinstance(dataset._session, Session) # mypy does not recognize the check in _batch_inference_validate_snowpark()
654
+ assert isinstance(
655
+ dataset._session, Session
656
+ ) # mypy does not recognize the check in _batch_inference_validate_snowpark()
599
657
  transform_kwargs = dict(
600
658
  session=dataset._session,
601
659
  dependencies=self._deps,
602
- drop_input_cols = self._drop_input_cols,
660
+ drop_input_cols=self._drop_input_cols,
603
661
  expected_output_cols_type="float",
604
662
  )
663
+ expected_output_cols = self._align_expected_output_names(
664
+ inference_method, dataset, expected_output_cols, output_cols_prefix
665
+ )
605
666
 
606
667
  elif isinstance(dataset, pd.DataFrame):
607
- transform_kwargs = dict(
608
- snowpark_input_cols = self._snowpark_cols,
609
- drop_input_cols = self._drop_input_cols
610
- )
668
+ transform_kwargs = dict(snowpark_input_cols=self._snowpark_cols, drop_input_cols=self._drop_input_cols)
611
669
 
612
670
  transform_handlers = ModelTransformerBuilder.build(
613
671
  dataset=dataset,
@@ -619,7 +677,7 @@ class MultiTaskElasticNet(BaseTransformer):
619
677
  output_df: DATAFRAME_TYPE = transform_handlers.batch_inference(
620
678
  inference_method=inference_method,
621
679
  input_cols=self.input_cols,
622
- expected_output_cols=self._get_output_column_names(output_cols_prefix),
680
+ expected_output_cols=expected_output_cols,
623
681
  **transform_kwargs
624
682
  )
625
683
  return output_df
@@ -649,7 +707,8 @@ class MultiTaskElasticNet(BaseTransformer):
649
707
  Output dataset with log probability of the sample for each class in the model.
650
708
  """
651
709
  super()._check_dataset_type(dataset)
652
- inference_method="predict_log_proba"
710
+ inference_method = "predict_log_proba"
711
+ expected_output_cols = self._get_output_column_names(output_cols_prefix)
653
712
 
654
713
  # This dictionary contains optional kwargs for batch inference. These kwargs
655
714
  # are specific to the type of dataset used.
@@ -660,18 +719,20 @@ class MultiTaskElasticNet(BaseTransformer):
660
719
  dataset=dataset,
661
720
  inference_method=inference_method,
662
721
  )
663
- assert isinstance(dataset._session, Session) # mypy does not recognize the check in _batch_inference_validate_snowpark()
722
+ assert isinstance(
723
+ dataset._session, Session
724
+ ) # mypy does not recognize the check in _batch_inference_validate_snowpark()
664
725
  transform_kwargs = dict(
665
726
  session=dataset._session,
666
727
  dependencies=self._deps,
667
- drop_input_cols = self._drop_input_cols,
728
+ drop_input_cols=self._drop_input_cols,
668
729
  expected_output_cols_type="float",
669
730
  )
731
+ expected_output_cols = self._align_expected_output_names(
732
+ inference_method, dataset, expected_output_cols, output_cols_prefix
733
+ )
670
734
  elif isinstance(dataset, pd.DataFrame):
671
- transform_kwargs = dict(
672
- snowpark_input_cols = self._snowpark_cols,
673
- drop_input_cols = self._drop_input_cols
674
- )
735
+ transform_kwargs = dict(snowpark_input_cols=self._snowpark_cols, drop_input_cols=self._drop_input_cols)
675
736
 
676
737
  transform_handlers = ModelTransformerBuilder.build(
677
738
  dataset=dataset,
@@ -684,7 +745,7 @@ class MultiTaskElasticNet(BaseTransformer):
684
745
  output_df: DATAFRAME_TYPE = transform_handlers.batch_inference(
685
746
  inference_method=inference_method,
686
747
  input_cols=self.input_cols,
687
- expected_output_cols=self._get_output_column_names(output_cols_prefix),
748
+ expected_output_cols=expected_output_cols,
688
749
  **transform_kwargs
689
750
  )
690
751
  return output_df
@@ -710,30 +771,34 @@ class MultiTaskElasticNet(BaseTransformer):
710
771
  Output dataset with results of the decision function for the samples in input dataset.
711
772
  """
712
773
  super()._check_dataset_type(dataset)
713
- inference_method="decision_function"
774
+ inference_method = "decision_function"
714
775
 
715
776
  # This dictionary contains optional kwargs for batch inference. These kwargs
716
777
  # are specific to the type of dataset used.
717
778
  transform_kwargs: BatchInferenceKwargsTypedDict = dict()
718
779
 
780
+ expected_output_cols = self._get_output_column_names(output_cols_prefix)
781
+
719
782
  if isinstance(dataset, DataFrame):
720
783
  self._deps = self._batch_inference_validate_snowpark(
721
784
  dataset=dataset,
722
785
  inference_method=inference_method,
723
786
  )
724
- assert isinstance(dataset._session, Session) # mypy does not recognize the check in _batch_inference_validate_snowpark()
787
+ assert isinstance(
788
+ dataset._session, Session
789
+ ) # mypy does not recognize the check in _batch_inference_validate_snowpark()
725
790
  transform_kwargs = dict(
726
791
  session=dataset._session,
727
792
  dependencies=self._deps,
728
- drop_input_cols = self._drop_input_cols,
793
+ drop_input_cols=self._drop_input_cols,
729
794
  expected_output_cols_type="float",
730
795
  )
796
+ expected_output_cols = self._align_expected_output_names(
797
+ inference_method, dataset, expected_output_cols, output_cols_prefix
798
+ )
731
799
 
732
800
  elif isinstance(dataset, pd.DataFrame):
733
- transform_kwargs = dict(
734
- snowpark_input_cols = self._snowpark_cols,
735
- drop_input_cols = self._drop_input_cols
736
- )
801
+ transform_kwargs = dict(snowpark_input_cols=self._snowpark_cols, drop_input_cols=self._drop_input_cols)
737
802
 
738
803
  transform_handlers = ModelTransformerBuilder.build(
739
804
  dataset=dataset,
@@ -746,7 +811,7 @@ class MultiTaskElasticNet(BaseTransformer):
746
811
  output_df: DATAFRAME_TYPE = transform_handlers.batch_inference(
747
812
  inference_method=inference_method,
748
813
  input_cols=self.input_cols,
749
- expected_output_cols=self._get_output_column_names(output_cols_prefix),
814
+ expected_output_cols=expected_output_cols,
750
815
  **transform_kwargs
751
816
  )
752
817
  return output_df
@@ -775,12 +840,14 @@ class MultiTaskElasticNet(BaseTransformer):
775
840
  Output dataset with probability of the sample for each class in the model.
776
841
  """
777
842
  super()._check_dataset_type(dataset)
778
- inference_method="score_samples"
843
+ inference_method = "score_samples"
779
844
 
780
845
  # This dictionary contains optional kwargs for batch inference. These kwargs
781
846
  # are specific to the type of dataset used.
782
847
  transform_kwargs: BatchInferenceKwargsTypedDict = dict()
783
848
 
849
+ expected_output_cols = self._get_output_column_names(output_cols_prefix)
850
+
784
851
  if isinstance(dataset, DataFrame):
785
852
  self._deps = self._batch_inference_validate_snowpark(
786
853
  dataset=dataset,
@@ -793,6 +860,9 @@ class MultiTaskElasticNet(BaseTransformer):
793
860
  drop_input_cols = self._drop_input_cols,
794
861
  expected_output_cols_type="float",
795
862
  )
863
+ expected_output_cols = self._align_expected_output_names(
864
+ inference_method, dataset, expected_output_cols, output_cols_prefix
865
+ )
796
866
 
797
867
  elif isinstance(dataset, pd.DataFrame):
798
868
  transform_kwargs = dict(
@@ -811,7 +881,7 @@ class MultiTaskElasticNet(BaseTransformer):
811
881
  output_df: DATAFRAME_TYPE = transform_handlers.batch_inference(
812
882
  inference_method=inference_method,
813
883
  input_cols=self.input_cols,
814
- expected_output_cols=self._get_output_column_names(output_cols_prefix),
884
+ expected_output_cols=expected_output_cols,
815
885
  **transform_kwargs
816
886
  )
817
887
  return output_df
@@ -958,50 +1028,84 @@ class MultiTaskElasticNet(BaseTransformer):
958
1028
  )
959
1029
  return output_df
960
1030
 
1031
+
1032
+
1033
+ def to_sklearn(self) -> Any:
1034
+ """Get sklearn.linear_model.MultiTaskElasticNet object.
1035
+ """
1036
+ if self._sklearn_object is None:
1037
+ self._sklearn_object = self._create_sklearn_object()
1038
+ return self._sklearn_object
1039
+
1040
+ def to_xgboost(self) -> Any:
1041
+ raise exceptions.SnowflakeMLException(
1042
+ error_code=error_codes.METHOD_NOT_ALLOWED,
1043
+ original_exception=AttributeError(
1044
+ modeling_error_messages.UNSUPPORTED_MODEL_CONVERSION.format(
1045
+ "to_xgboost()",
1046
+ "to_sklearn()"
1047
+ )
1048
+ ),
1049
+ )
1050
+
1051
+ def to_lightgbm(self) -> Any:
1052
+ raise exceptions.SnowflakeMLException(
1053
+ error_code=error_codes.METHOD_NOT_ALLOWED,
1054
+ original_exception=AttributeError(
1055
+ modeling_error_messages.UNSUPPORTED_MODEL_CONVERSION.format(
1056
+ "to_lightgbm()",
1057
+ "to_sklearn()"
1058
+ )
1059
+ ),
1060
+ )
961
1061
 
962
- def _get_model_signatures(self, dataset: Union[DataFrame, pd.DataFrame]) -> None:
1062
+ def _get_dependencies(self) -> List[str]:
1063
+ return self._deps
1064
+
1065
+
1066
+ def _generate_model_signatures(self, dataset: Union[DataFrame, pd.DataFrame]) -> None:
963
1067
  self._model_signature_dict = dict()
964
1068
 
965
1069
  PROB_FUNCTIONS = ["predict_log_proba", "predict_proba", "decision_function"]
966
1070
 
967
- inputs = list(_infer_signature(dataset[self.input_cols], "input"))
1071
+ inputs = list(_infer_signature(dataset[self.input_cols], "input", use_snowflake_identifiers=True))
968
1072
  outputs: List[BaseFeatureSpec] = []
969
1073
  if hasattr(self, "predict"):
970
1074
  # keep mypy happy
971
- assert self._sklearn_object is not None and hasattr(self._sklearn_object, "_estimator_type")
1075
+ assert self._sklearn_object is not None and hasattr(self._sklearn_object, "_estimator_type")
972
1076
  # For classifier, the type of predict is the same as the type of label
973
- if self._sklearn_object._estimator_type == 'classifier':
974
- # label columns is the desired type for output
1077
+ if self._sklearn_object._estimator_type == "classifier":
1078
+ # label columns is the desired type for output
975
1079
  outputs = list(_infer_signature(dataset[self.label_cols], "output", use_snowflake_identifiers=True))
976
1080
  # rename the output columns
977
1081
  outputs = list(model_signature_utils.rename_features(outputs, self.output_cols))
978
- self._model_signature_dict["predict"] = ModelSignature(inputs,
979
- ([] if self._drop_input_cols else inputs)
980
- + outputs)
1082
+ self._model_signature_dict["predict"] = ModelSignature(
1083
+ inputs, ([] if self._drop_input_cols else inputs) + outputs
1084
+ )
981
1085
  # For mixture models that use the density mixin, `predict` returns the argmax of the log prob.
982
1086
  # For outlier models, returns -1 for outliers and 1 for inliers.
983
- # Clusterer returns int64 cluster labels.
1087
+ # Clusterer returns int64 cluster labels.
984
1088
  elif self._sklearn_object._estimator_type in ["DensityEstimator", "clusterer", "outlier_detector"]:
985
1089
  outputs = [FeatureSpec(dtype=DataType.INT64, name=c) for c in self.output_cols]
986
- self._model_signature_dict["predict"] = ModelSignature(inputs,
987
- ([] if self._drop_input_cols else inputs)
988
- + outputs)
989
-
1090
+ self._model_signature_dict["predict"] = ModelSignature(
1091
+ inputs, ([] if self._drop_input_cols else inputs) + outputs
1092
+ )
1093
+
990
1094
  # For regressor, the type of predict is float64
991
- elif self._sklearn_object._estimator_type == 'regressor':
1095
+ elif self._sklearn_object._estimator_type == "regressor":
992
1096
  outputs = [FeatureSpec(dtype=DataType.DOUBLE, name=c) for c in self.output_cols]
993
- self._model_signature_dict["predict"] = ModelSignature(inputs,
994
- ([] if self._drop_input_cols else inputs)
995
- + outputs)
996
-
1097
+ self._model_signature_dict["predict"] = ModelSignature(
1098
+ inputs, ([] if self._drop_input_cols else inputs) + outputs
1099
+ )
1100
+
997
1101
  for prob_func in PROB_FUNCTIONS:
998
1102
  if hasattr(self, prob_func):
999
1103
  output_cols_prefix: str = f"{prob_func}_"
1000
1104
  output_column_names = self._get_output_column_names(output_cols_prefix)
1001
1105
  outputs = [FeatureSpec(dtype=DataType.DOUBLE, name=c) for c in output_column_names]
1002
- self._model_signature_dict[prob_func] = ModelSignature(inputs,
1003
- ([] if self._drop_input_cols else inputs)
1004
- + outputs)
1106
+ self._model_signature_dict[prob_func] = ModelSignature(
1107
+ inputs, ([] if self._drop_input_cols else inputs) + outputs
1108
+ )
1005
1109
 
1006
1110
  # Output signature names may still need to be renamed, since they were not created with `_infer_signature`.
1007
1111
  items = list(self._model_signature_dict.items())
@@ -1014,10 +1118,10 @@ class MultiTaskElasticNet(BaseTransformer):
1014
1118
  """Returns model signature of current class.
1015
1119
 
1016
1120
  Raises:
1017
- exceptions.SnowflakeMLException: If estimator is not fitted, then model signature cannot be inferred
1121
+ SnowflakeMLException: If estimator is not fitted, then model signature cannot be inferred
1018
1122
 
1019
1123
  Returns:
1020
- Dict[str, ModelSignature]: each method and its input output signature
1124
+ Dict with each method and its input output signature
1021
1125
  """
1022
1126
  if self._model_signature_dict is None:
1023
1127
  raise exceptions.SnowflakeMLException(
@@ -1025,35 +1129,3 @@ class MultiTaskElasticNet(BaseTransformer):
1025
1129
  original_exception=RuntimeError("Estimator not fitted before accessing property model_signatures!"),
1026
1130
  )
1027
1131
  return self._model_signature_dict
1028
-
1029
- def to_sklearn(self) -> Any:
1030
- """Get sklearn.linear_model.MultiTaskElasticNet object.
1031
- """
1032
- if self._sklearn_object is None:
1033
- self._sklearn_object = self._create_sklearn_object()
1034
- return self._sklearn_object
1035
-
1036
- def to_xgboost(self) -> Any:
1037
- raise exceptions.SnowflakeMLException(
1038
- error_code=error_codes.METHOD_NOT_ALLOWED,
1039
- original_exception=AttributeError(
1040
- modeling_error_messages.UNSUPPORTED_MODEL_CONVERSION.format(
1041
- "to_xgboost()",
1042
- "to_sklearn()"
1043
- )
1044
- ),
1045
- )
1046
-
1047
- def to_lightgbm(self) -> Any:
1048
- raise exceptions.SnowflakeMLException(
1049
- error_code=error_codes.METHOD_NOT_ALLOWED,
1050
- original_exception=AttributeError(
1051
- modeling_error_messages.UNSUPPORTED_MODEL_CONVERSION.format(
1052
- "to_lightgbm()",
1053
- "to_sklearn()"
1054
- )
1055
- ),
1056
- )
1057
-
1058
- def _get_dependencies(self) -> List[str]:
1059
- return self._deps