snowflake-ml-python 1.0.2__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (189) hide show
  1. snowflake/ml/_internal/env_utils.py +2 -1
  2. snowflake/ml/_internal/file_utils.py +29 -7
  3. snowflake/ml/_internal/telemetry.py +5 -8
  4. snowflake/ml/_internal/utils/uri.py +7 -2
  5. snowflake/ml/model/_deploy_client/image_builds/base_image_builder.py +15 -0
  6. snowflake/ml/model/_deploy_client/image_builds/client_image_builder.py +259 -0
  7. snowflake/ml/model/_deploy_client/image_builds/docker_context.py +89 -0
  8. snowflake/ml/model/_deploy_client/image_builds/gunicorn_run.sh +24 -0
  9. snowflake/ml/model/_deploy_client/image_builds/inference_server/main.py +118 -0
  10. snowflake/ml/model/_deploy_client/image_builds/templates/dockerfile_template +40 -0
  11. snowflake/ml/model/_deploy_client/snowservice/deploy.py +199 -0
  12. snowflake/ml/model/_deploy_client/snowservice/deploy_options.py +88 -0
  13. snowflake/ml/model/_deploy_client/snowservice/templates/service_spec_template +24 -0
  14. snowflake/ml/model/_deploy_client/utils/constants.py +47 -0
  15. snowflake/ml/model/_deploy_client/utils/snowservice_client.py +178 -0
  16. snowflake/ml/model/_deploy_client/warehouse/deploy.py +24 -6
  17. snowflake/ml/model/_deploy_client/warehouse/infer_template.py +5 -2
  18. snowflake/ml/model/_deployer.py +14 -27
  19. snowflake/ml/model/_env.py +4 -4
  20. snowflake/ml/model/_handlers/custom.py +14 -2
  21. snowflake/ml/model/_handlers/pytorch.py +186 -0
  22. snowflake/ml/model/_handlers/sklearn.py +14 -9
  23. snowflake/ml/model/_handlers/snowmlmodel.py +14 -9
  24. snowflake/ml/model/_handlers/torchscript.py +180 -0
  25. snowflake/ml/model/_handlers/xgboost.py +19 -9
  26. snowflake/ml/model/_model.py +3 -2
  27. snowflake/ml/model/_model_meta.py +12 -7
  28. snowflake/ml/model/model_signature.py +446 -66
  29. snowflake/ml/model/type_hints.py +23 -4
  30. snowflake/ml/modeling/calibration/calibrated_classifier_cv.py +51 -26
  31. snowflake/ml/modeling/cluster/affinity_propagation.py +51 -26
  32. snowflake/ml/modeling/cluster/agglomerative_clustering.py +51 -26
  33. snowflake/ml/modeling/cluster/birch.py +51 -26
  34. snowflake/ml/modeling/cluster/bisecting_k_means.py +51 -26
  35. snowflake/ml/modeling/cluster/dbscan.py +51 -26
  36. snowflake/ml/modeling/cluster/feature_agglomeration.py +51 -26
  37. snowflake/ml/modeling/cluster/k_means.py +51 -26
  38. snowflake/ml/modeling/cluster/mean_shift.py +51 -26
  39. snowflake/ml/modeling/cluster/mini_batch_k_means.py +51 -26
  40. snowflake/ml/modeling/cluster/optics.py +51 -26
  41. snowflake/ml/modeling/cluster/spectral_biclustering.py +51 -26
  42. snowflake/ml/modeling/cluster/spectral_clustering.py +51 -26
  43. snowflake/ml/modeling/cluster/spectral_coclustering.py +51 -26
  44. snowflake/ml/modeling/compose/column_transformer.py +51 -26
  45. snowflake/ml/modeling/compose/transformed_target_regressor.py +51 -26
  46. snowflake/ml/modeling/covariance/elliptic_envelope.py +51 -26
  47. snowflake/ml/modeling/covariance/empirical_covariance.py +51 -26
  48. snowflake/ml/modeling/covariance/graphical_lasso.py +51 -26
  49. snowflake/ml/modeling/covariance/graphical_lasso_cv.py +51 -26
  50. snowflake/ml/modeling/covariance/ledoit_wolf.py +51 -26
  51. snowflake/ml/modeling/covariance/min_cov_det.py +51 -26
  52. snowflake/ml/modeling/covariance/oas.py +51 -26
  53. snowflake/ml/modeling/covariance/shrunk_covariance.py +51 -26
  54. snowflake/ml/modeling/decomposition/dictionary_learning.py +51 -26
  55. snowflake/ml/modeling/decomposition/factor_analysis.py +51 -26
  56. snowflake/ml/modeling/decomposition/fast_ica.py +51 -26
  57. snowflake/ml/modeling/decomposition/incremental_pca.py +51 -26
  58. snowflake/ml/modeling/decomposition/kernel_pca.py +51 -26
  59. snowflake/ml/modeling/decomposition/mini_batch_dictionary_learning.py +51 -26
  60. snowflake/ml/modeling/decomposition/mini_batch_sparse_pca.py +51 -26
  61. snowflake/ml/modeling/decomposition/pca.py +51 -26
  62. snowflake/ml/modeling/decomposition/sparse_pca.py +51 -26
  63. snowflake/ml/modeling/decomposition/truncated_svd.py +51 -26
  64. snowflake/ml/modeling/discriminant_analysis/linear_discriminant_analysis.py +51 -26
  65. snowflake/ml/modeling/discriminant_analysis/quadratic_discriminant_analysis.py +51 -26
  66. snowflake/ml/modeling/ensemble/ada_boost_classifier.py +51 -26
  67. snowflake/ml/modeling/ensemble/ada_boost_regressor.py +51 -26
  68. snowflake/ml/modeling/ensemble/bagging_classifier.py +51 -26
  69. snowflake/ml/modeling/ensemble/bagging_regressor.py +51 -26
  70. snowflake/ml/modeling/ensemble/extra_trees_classifier.py +51 -26
  71. snowflake/ml/modeling/ensemble/extra_trees_regressor.py +51 -26
  72. snowflake/ml/modeling/ensemble/gradient_boosting_classifier.py +51 -26
  73. snowflake/ml/modeling/ensemble/gradient_boosting_regressor.py +51 -26
  74. snowflake/ml/modeling/ensemble/hist_gradient_boosting_classifier.py +51 -26
  75. snowflake/ml/modeling/ensemble/hist_gradient_boosting_regressor.py +51 -26
  76. snowflake/ml/modeling/ensemble/isolation_forest.py +51 -26
  77. snowflake/ml/modeling/ensemble/random_forest_classifier.py +51 -26
  78. snowflake/ml/modeling/ensemble/random_forest_regressor.py +51 -26
  79. snowflake/ml/modeling/ensemble/stacking_regressor.py +51 -26
  80. snowflake/ml/modeling/ensemble/voting_classifier.py +51 -26
  81. snowflake/ml/modeling/ensemble/voting_regressor.py +51 -26
  82. snowflake/ml/modeling/feature_selection/generic_univariate_select.py +51 -26
  83. snowflake/ml/modeling/feature_selection/select_fdr.py +51 -26
  84. snowflake/ml/modeling/feature_selection/select_fpr.py +51 -26
  85. snowflake/ml/modeling/feature_selection/select_fwe.py +51 -26
  86. snowflake/ml/modeling/feature_selection/select_k_best.py +51 -26
  87. snowflake/ml/modeling/feature_selection/select_percentile.py +51 -26
  88. snowflake/ml/modeling/feature_selection/sequential_feature_selector.py +51 -26
  89. snowflake/ml/modeling/feature_selection/variance_threshold.py +51 -26
  90. snowflake/ml/modeling/gaussian_process/gaussian_process_classifier.py +51 -26
  91. snowflake/ml/modeling/gaussian_process/gaussian_process_regressor.py +51 -26
  92. snowflake/ml/modeling/impute/iterative_imputer.py +51 -26
  93. snowflake/ml/modeling/impute/knn_imputer.py +51 -26
  94. snowflake/ml/modeling/impute/missing_indicator.py +51 -26
  95. snowflake/ml/modeling/kernel_approximation/additive_chi2_sampler.py +51 -26
  96. snowflake/ml/modeling/kernel_approximation/nystroem.py +51 -26
  97. snowflake/ml/modeling/kernel_approximation/polynomial_count_sketch.py +51 -26
  98. snowflake/ml/modeling/kernel_approximation/rbf_sampler.py +51 -26
  99. snowflake/ml/modeling/kernel_approximation/skewed_chi2_sampler.py +51 -26
  100. snowflake/ml/modeling/kernel_ridge/kernel_ridge.py +51 -26
  101. snowflake/ml/modeling/lightgbm/lgbm_classifier.py +51 -26
  102. snowflake/ml/modeling/lightgbm/lgbm_regressor.py +51 -26
  103. snowflake/ml/modeling/linear_model/ard_regression.py +51 -26
  104. snowflake/ml/modeling/linear_model/bayesian_ridge.py +51 -26
  105. snowflake/ml/modeling/linear_model/elastic_net.py +51 -26
  106. snowflake/ml/modeling/linear_model/elastic_net_cv.py +51 -26
  107. snowflake/ml/modeling/linear_model/gamma_regressor.py +51 -26
  108. snowflake/ml/modeling/linear_model/huber_regressor.py +51 -26
  109. snowflake/ml/modeling/linear_model/lars.py +51 -26
  110. snowflake/ml/modeling/linear_model/lars_cv.py +51 -26
  111. snowflake/ml/modeling/linear_model/lasso.py +51 -26
  112. snowflake/ml/modeling/linear_model/lasso_cv.py +51 -26
  113. snowflake/ml/modeling/linear_model/lasso_lars.py +51 -26
  114. snowflake/ml/modeling/linear_model/lasso_lars_cv.py +51 -26
  115. snowflake/ml/modeling/linear_model/lasso_lars_ic.py +51 -26
  116. snowflake/ml/modeling/linear_model/linear_regression.py +51 -26
  117. snowflake/ml/modeling/linear_model/logistic_regression.py +51 -26
  118. snowflake/ml/modeling/linear_model/logistic_regression_cv.py +51 -26
  119. snowflake/ml/modeling/linear_model/multi_task_elastic_net.py +51 -26
  120. snowflake/ml/modeling/linear_model/multi_task_elastic_net_cv.py +51 -26
  121. snowflake/ml/modeling/linear_model/multi_task_lasso.py +51 -26
  122. snowflake/ml/modeling/linear_model/multi_task_lasso_cv.py +51 -26
  123. snowflake/ml/modeling/linear_model/orthogonal_matching_pursuit.py +51 -26
  124. snowflake/ml/modeling/linear_model/passive_aggressive_classifier.py +51 -26
  125. snowflake/ml/modeling/linear_model/passive_aggressive_regressor.py +51 -26
  126. snowflake/ml/modeling/linear_model/perceptron.py +51 -26
  127. snowflake/ml/modeling/linear_model/poisson_regressor.py +51 -26
  128. snowflake/ml/modeling/linear_model/ransac_regressor.py +51 -26
  129. snowflake/ml/modeling/linear_model/ridge.py +51 -26
  130. snowflake/ml/modeling/linear_model/ridge_classifier.py +51 -26
  131. snowflake/ml/modeling/linear_model/ridge_classifier_cv.py +51 -26
  132. snowflake/ml/modeling/linear_model/ridge_cv.py +51 -26
  133. snowflake/ml/modeling/linear_model/sgd_classifier.py +51 -26
  134. snowflake/ml/modeling/linear_model/sgd_one_class_svm.py +51 -26
  135. snowflake/ml/modeling/linear_model/sgd_regressor.py +51 -26
  136. snowflake/ml/modeling/linear_model/theil_sen_regressor.py +51 -26
  137. snowflake/ml/modeling/linear_model/tweedie_regressor.py +51 -26
  138. snowflake/ml/modeling/manifold/isomap.py +51 -26
  139. snowflake/ml/modeling/manifold/mds.py +51 -26
  140. snowflake/ml/modeling/manifold/spectral_embedding.py +51 -26
  141. snowflake/ml/modeling/manifold/tsne.py +51 -26
  142. snowflake/ml/modeling/mixture/bayesian_gaussian_mixture.py +51 -26
  143. snowflake/ml/modeling/mixture/gaussian_mixture.py +51 -26
  144. snowflake/ml/modeling/model_selection/grid_search_cv.py +51 -26
  145. snowflake/ml/modeling/model_selection/randomized_search_cv.py +51 -26
  146. snowflake/ml/modeling/multiclass/one_vs_one_classifier.py +51 -26
  147. snowflake/ml/modeling/multiclass/one_vs_rest_classifier.py +51 -26
  148. snowflake/ml/modeling/multiclass/output_code_classifier.py +51 -26
  149. snowflake/ml/modeling/naive_bayes/bernoulli_nb.py +51 -26
  150. snowflake/ml/modeling/naive_bayes/categorical_nb.py +51 -26
  151. snowflake/ml/modeling/naive_bayes/complement_nb.py +51 -26
  152. snowflake/ml/modeling/naive_bayes/gaussian_nb.py +51 -26
  153. snowflake/ml/modeling/naive_bayes/multinomial_nb.py +51 -26
  154. snowflake/ml/modeling/neighbors/k_neighbors_classifier.py +51 -26
  155. snowflake/ml/modeling/neighbors/k_neighbors_regressor.py +51 -26
  156. snowflake/ml/modeling/neighbors/kernel_density.py +51 -26
  157. snowflake/ml/modeling/neighbors/local_outlier_factor.py +51 -26
  158. snowflake/ml/modeling/neighbors/nearest_centroid.py +51 -26
  159. snowflake/ml/modeling/neighbors/nearest_neighbors.py +51 -26
  160. snowflake/ml/modeling/neighbors/neighborhood_components_analysis.py +51 -26
  161. snowflake/ml/modeling/neighbors/radius_neighbors_classifier.py +51 -26
  162. snowflake/ml/modeling/neighbors/radius_neighbors_regressor.py +51 -26
  163. snowflake/ml/modeling/neural_network/bernoulli_rbm.py +51 -26
  164. snowflake/ml/modeling/neural_network/mlp_classifier.py +51 -26
  165. snowflake/ml/modeling/neural_network/mlp_regressor.py +51 -26
  166. snowflake/ml/modeling/preprocessing/ordinal_encoder.py +2 -0
  167. snowflake/ml/modeling/preprocessing/polynomial_features.py +51 -26
  168. snowflake/ml/modeling/semi_supervised/label_propagation.py +51 -26
  169. snowflake/ml/modeling/semi_supervised/label_spreading.py +51 -26
  170. snowflake/ml/modeling/svm/linear_svc.py +51 -26
  171. snowflake/ml/modeling/svm/linear_svr.py +51 -26
  172. snowflake/ml/modeling/svm/nu_svc.py +51 -26
  173. snowflake/ml/modeling/svm/nu_svr.py +51 -26
  174. snowflake/ml/modeling/svm/svc.py +51 -26
  175. snowflake/ml/modeling/svm/svr.py +51 -26
  176. snowflake/ml/modeling/tree/decision_tree_classifier.py +51 -26
  177. snowflake/ml/modeling/tree/decision_tree_regressor.py +51 -26
  178. snowflake/ml/modeling/tree/extra_tree_classifier.py +51 -26
  179. snowflake/ml/modeling/tree/extra_tree_regressor.py +51 -26
  180. snowflake/ml/modeling/xgboost/xgb_classifier.py +51 -26
  181. snowflake/ml/modeling/xgboost/xgb_regressor.py +51 -26
  182. snowflake/ml/modeling/xgboost/xgbrf_classifier.py +51 -26
  183. snowflake/ml/modeling/xgboost/xgbrf_regressor.py +51 -26
  184. snowflake/ml/registry/model_registry.py +74 -56
  185. snowflake/ml/version.py +1 -1
  186. {snowflake_ml_python-1.0.2.dist-info → snowflake_ml_python-1.0.3.dist-info}/METADATA +27 -8
  187. snowflake_ml_python-1.0.3.dist-info/RECORD +259 -0
  188. snowflake_ml_python-1.0.2.dist-info/RECORD +0 -246
  189. {snowflake_ml_python-1.0.2.dist-info → snowflake_ml_python-1.0.3.dist-info}/WHEEL +0 -0
@@ -0,0 +1,40 @@
1
+ FROM $base_image as build
2
+
3
+ COPY $model_dir/env/conda.yaml conda.yaml
4
+ COPY $model_dir/env/requirements.txt requirements.txt
5
+
6
+ # Set MAMBA_DOCKERFILE_ACTIVATE=1 to activate the conda environment during build time.
7
+ ARG MAMBA_DOCKERFILE_ACTIVATE=1
8
+
9
+ # The micromamba image comes with an empty environment named base.
10
+ RUN --mount=type=cache,target=/opt/conda/pkgs micromamba install -y -n base -f conda.yaml && \
11
+ python -m pip install "uvicorn[standard]" gunicorn starlette && \
12
+ python -m pip install -r requirements.txt
13
+
14
+ FROM debian:buster-slim AS runtime
15
+
16
+ ENV USER nonrootuser
17
+ ENV UID 1000
18
+ ENV HOME /home/$USER
19
+ RUN adduser --disabled-password \
20
+ --gecos "A non-root user for running inference server" \
21
+ --uid $UID \
22
+ --home $HOME \
23
+ $USER
24
+
25
+ COPY $inference_server_dir ./$inference_server_dir
26
+ COPY $entrypoint_script ./$entrypoint_script
27
+ RUN chmod +x /$entrypoint_script
28
+ # Copy Snowflake/ml source code
29
+ # TODO: not needed as source code is either in model, or pulled from conda
30
+ COPY snowflake ./snowflake
31
+
32
+ # The mamba root prefix by default is set to /opt/conda, in which the base conda environment is built at.
33
+ COPY --from=build /opt/conda /opt/conda
34
+
35
+ # Expose the port on which the Starlette app will run.
36
+ EXPOSE 5000
37
+
38
+ USER nonrootuser
39
+
40
+ CMD ["/$entrypoint_script"]
@@ -0,0 +1,199 @@
1
+ import logging
2
+ import os
3
+ import posixpath
4
+ import string
5
+ import tempfile
6
+ from abc import ABC
7
+ from typing import Any, Dict, cast
8
+
9
+ from typing_extensions import Unpack
10
+
11
+ from snowflake.ml.model._deploy_client.image_builds import (
12
+ base_image_builder,
13
+ client_image_builder,
14
+ )
15
+ from snowflake.ml.model._deploy_client.snowservice import deploy_options
16
+ from snowflake.ml.model._deploy_client.utils import constants, snowservice_client
17
+ from snowflake.snowpark import Session
18
+
19
+
20
+ def _deploy(
21
+ session: Session,
22
+ *,
23
+ model_id: str,
24
+ service_func_name: str,
25
+ model_zip_stage_path: str,
26
+ **kwargs: Unpack[deploy_options.SnowServiceDeployOptionsTypedHint],
27
+ ) -> None:
28
+ """Entrypoint for model deployment to SnowService. This function will trigger a docker image build followed by
29
+ workflow deployment to SnowService.
30
+
31
+ Args:
32
+ session: Snowpark session
33
+ model_id: Unique hex string of length 32, provided by model registry.
34
+ service_func_name: The service function name in SnowService associated with the created service.
35
+ model_zip_stage_path: Path to model zip file in stage. Note that this path has a "@" prefix.
36
+ **kwargs: various SnowService deployment options.
37
+
38
+ Raises:
39
+ ValueError: Raised when model_id is empty.
40
+ ValueError: Raised when service_func_name is empty.
41
+ ValueError: Raised when model_stage_file_path is empty.
42
+ """
43
+ snowpark_logger = logging.getLogger("snowflake.snowpark")
44
+ snowflake_connector_logger = logging.getLogger("snowflake.connector")
45
+ snowpark_log_level = snowpark_logger.level
46
+ snowflake_connector_log_level = snowflake_connector_logger.level
47
+ try:
48
+ # Setting appropriate log level to prevent console from being polluted by vast amount of snowpark and snowflake
49
+ # connector logging.
50
+ snowpark_logger.setLevel(logging.WARNING)
51
+ snowflake_connector_logger.setLevel(logging.WARNING)
52
+ if not model_id:
53
+ raise ValueError('Must provide a non-empty string for "model_id" when deploying to SnowService')
54
+ if not service_func_name:
55
+ raise ValueError('Must provide a non-empty string for "service_func_name" when deploying to SnowService')
56
+ if not model_zip_stage_path:
57
+ raise ValueError(
58
+ 'Must provide a non-empty string for "model_stage_file_path" when deploying to SnowService'
59
+ )
60
+ assert model_zip_stage_path.startswith("@"), f"stage path should start with @, actual: {model_zip_stage_path}"
61
+ options = deploy_options.SnowServiceDeployOptions.from_dict(cast(Dict[str, Any], kwargs))
62
+ image_builder = client_image_builder.ClientImageBuilder(
63
+ id=model_id, image_repo=options.image_repo, model_zip_stage_path=model_zip_stage_path, session=session
64
+ )
65
+ ss_deployment = SnowServiceDeployment(
66
+ session=session,
67
+ model_id=model_id,
68
+ service_func_name=service_func_name,
69
+ model_zip_stage_path=model_zip_stage_path,
70
+ image_builder=image_builder,
71
+ options=options,
72
+ )
73
+ ss_deployment.deploy()
74
+ finally:
75
+ # Preserve the original logging level.
76
+ snowpark_logger.setLevel(snowpark_log_level)
77
+ snowflake_connector_logger.setLevel(snowflake_connector_log_level)
78
+
79
+
80
+ class SnowServiceDeployment(ABC):
81
+ """
82
+ Class implementation that encapsulates image build and workflow deployment to SnowService
83
+
84
+ #TODO[shchen], SNOW-830093 GPU support on model deployment to SnowService
85
+ """
86
+
87
+ def __init__(
88
+ self,
89
+ session: Session,
90
+ model_id: str,
91
+ service_func_name: str,
92
+ model_zip_stage_path: str,
93
+ image_builder: base_image_builder.ImageBuilder,
94
+ options: deploy_options.SnowServiceDeployOptions,
95
+ ) -> None:
96
+ """Initialization
97
+
98
+ Args:
99
+ session: Snowpark session
100
+ model_id: Unique hex string of length 32, provided by model registry; if not provided, auto-generate one for
101
+ resource naming.The model_id serves as an idempotent key throughout the deployment workflow.
102
+ service_func_name: The service function name in SnowService associated with the created service.
103
+ model_zip_stage_path: Path to model zip file in stage.
104
+ image_builder: InferenceImageBuilder instance that handles image build and upload to image registry.
105
+ options: A SnowServiceDeployOptions object containing deployment options.
106
+ """
107
+
108
+ self.session = session
109
+ self.id = model_id
110
+ self.service_func_name = service_func_name
111
+ self.model_zip_stage_path = model_zip_stage_path
112
+ self.image_builder = image_builder
113
+ self.options = options
114
+ self._service_name = f"service_{model_id}"
115
+ # Spec file and future deployment related artifacts will be stored under {stage}/models/{model_id}
116
+ self._model_artifact_stage_location = posixpath.join(options.stage, "models", self.id)
117
+
118
+ def deploy(self) -> None:
119
+ """
120
+ This function triggers image build followed by workflow deployment to SnowService.
121
+ """
122
+ if self.options.prebuilt_snowflake_image:
123
+ image = self.options.prebuilt_snowflake_image
124
+ logging.info(f"Skipped image build. Use Snowflake prebuilt image: {self.options.prebuilt_snowflake_image}")
125
+ else:
126
+ image = self._build_and_upload_image()
127
+ self._deploy_workflow(image)
128
+
129
+ def _build_and_upload_image(self) -> str:
130
+ """This function handles image build and upload to image registry.
131
+
132
+ Returns:
133
+ Path to the image in the remote image repository.
134
+ """
135
+ return self.image_builder.build_and_upload_image()
136
+
137
+ def _prepare_and_upload_artifacts_to_stage(self, image: str) -> None:
138
+ """Constructs and upload service spec to stage.
139
+
140
+ Args:
141
+ image: Name of the image to create SnowService container from.
142
+ """
143
+
144
+ with tempfile.TemporaryDirectory() as tempdir:
145
+ spec_template_path = os.path.join(os.path.dirname(__file__), "templates/service_spec_template")
146
+ spec_file_path = os.path.join(tempdir, f"{constants.SERVICE_SPEC}.yaml")
147
+
148
+ with open(spec_template_path, encoding="utf-8") as template, open(
149
+ spec_file_path, "w", encoding="utf-8"
150
+ ) as spec_file:
151
+ content = string.Template(template.read()).substitute(
152
+ {
153
+ "image": image,
154
+ "predict_endpoint_name": constants.PREDICT,
155
+ "stage": self.options.stage,
156
+ "model_zip_stage_path": self.model_zip_stage_path[1:], # Remove the @ prefix
157
+ "inference_server_container_name": constants.INFERENCE_SERVER_CONTAINER,
158
+ }
159
+ )
160
+ spec_file.write(content)
161
+ logging.info(f"Create service spec: \n {content}")
162
+
163
+ self.session.file.put(
164
+ local_file_name=spec_file_path,
165
+ stage_location=self._model_artifact_stage_location,
166
+ auto_compress=False,
167
+ overwrite=True,
168
+ )
169
+ logging.info(
170
+ f"Uploaded spec file {os.path.basename(spec_file_path)} " f"to {self._model_artifact_stage_location}"
171
+ )
172
+
173
+ def _deploy_workflow(self, image: str) -> None:
174
+ """This function handles workflow deployment to SnowService with the given image.
175
+
176
+ Args:
177
+ image: Name of the image to create SnowService container from.
178
+ """
179
+
180
+ self._prepare_and_upload_artifacts_to_stage(image)
181
+ client = snowservice_client.SnowServiceClient(self.session)
182
+ spec_stage_location = posixpath.join(
183
+ self._model_artifact_stage_location.rstrip("/"), f"{constants.SERVICE_SPEC}.yaml"
184
+ )
185
+ client.create_or_replace_service(
186
+ service_name=self._service_name,
187
+ compute_pool=self.options.compute_pool,
188
+ spec_stage_location=spec_stage_location,
189
+ min_instances=self.options.min_instances,
190
+ max_instances=self.options.max_instances,
191
+ )
192
+ client.block_until_resource_is_ready(
193
+ resource_name=self._service_name, resource_type=constants.ResourceType.SERVICE
194
+ )
195
+ client.create_or_replace_service_function(
196
+ service_func_name=self.service_func_name,
197
+ service_name=self._service_name,
198
+ endpoint_name=constants.PREDICT,
199
+ )
@@ -0,0 +1,88 @@
1
+ from typing import Any, Dict, Optional, TypedDict
2
+
3
+ from typing_extensions import NotRequired
4
+
5
+ from snowflake.ml.model._deploy_client.utils import constants
6
+
7
+
8
+ class SnowServiceDeployOptionsTypedHint(TypedDict):
9
+ """Deployment options for deploying to SnowService.
10
+
11
+ stage: the name of the stage for uploading artifacts.
12
+ compute_pool: SnowService compute pool name.
13
+ image_repo: SnowService image repo path. e.g. "<image_registry>/<db>/<schema>/<repo>"
14
+ min_instances: Minimum number of service replicas.
15
+ max_instances: Maximum number of service replicas.
16
+ endpoint: The specific name of the endpoint that the service function will communicate with. Default to
17
+ "predict". This option is useful when service has multiple endpoints.
18
+ overridden_base_image: When provided, it will override the base image.
19
+ """
20
+
21
+ stage: str
22
+ compute_pool: str
23
+ image_repo: str
24
+ min_instances: NotRequired[int]
25
+ max_instances: NotRequired[int]
26
+ endpoint: NotRequired[str]
27
+ overridden_base_image: NotRequired[str]
28
+
29
+
30
+ class SnowServiceDeployOptions:
31
+ def __init__(
32
+ self,
33
+ stage: str,
34
+ compute_pool: str,
35
+ image_repo: str,
36
+ *,
37
+ min_instances: int = 1,
38
+ max_instances: int = 1,
39
+ endpoint: str = constants.PREDICT,
40
+ overridden_base_image: Optional[str] = None,
41
+ prebuilt_snowflake_image: Optional[str] = None,
42
+ ) -> None:
43
+ """Initialization
44
+
45
+ Args:
46
+ stage: the name of the stage for uploading artifacts.
47
+ compute_pool: SnowService compute pool name.
48
+ image_repo: SnowService image repo path. e.g. "<image_registry>/<db>/<schema>/<repo>"
49
+ min_instances: Minimum number of service replicas.
50
+ max_instances: Maximum number of service replicas.
51
+ endpoint: The specific name of the endpoint that the service function will communicate with. Default to
52
+ "predict". This option is useful when service has multiple endpoints.
53
+ overridden_base_image: When provided, it will override the base image.
54
+ prebuilt_snowflake_image: When provided, the image building step is skipped, and the pre-built image from
55
+ Snowflake is used as is. This option is for users who consistently use the same image for multiple use
56
+ cases, allowing faster deployment. The snowflake image used for deployment is logged to the console for
57
+ future use.
58
+ """
59
+
60
+ self.stage = stage
61
+ self.compute_pool = compute_pool
62
+ self.image_repo = image_repo
63
+ self.min_instances = min_instances
64
+ self.max_instances = max_instances
65
+ self.endpoint = endpoint
66
+ self.overridden_base_image = overridden_base_image
67
+ self.prebuilt_snowflake_image = prebuilt_snowflake_image
68
+
69
+ @classmethod
70
+ def from_dict(cls, options_dict: Dict[str, Any]) -> "SnowServiceDeployOptions":
71
+ """Construct SnowServiceDeployOptions instance based from an option dictionary.
72
+
73
+ Args:
74
+ options_dict: The dict containing various deployment options.
75
+
76
+ Raises:
77
+ ValueError: When required option is missing.
78
+
79
+ Returns:
80
+ A SnowServiceDeployOptions object
81
+ """
82
+ required_options = [constants.STAGE, constants.COMPUTE_POOL, constants.IMAGE_REPO]
83
+ missing_keys = [key for key in required_options if options_dict.get(key) is None]
84
+ if missing_keys:
85
+ raise ValueError(f"Must provide options when deploying to SnowService: {', '.join(missing_keys)}")
86
+ # SnowService image repo cannot handle upper case repo name.
87
+ options_dict[constants.IMAGE_REPO] = options_dict[constants.IMAGE_REPO].lower()
88
+ return cls(**options_dict)
@@ -0,0 +1,24 @@
1
+ spec:
2
+ container:
3
+ - name: ${inference_server_container_name}
4
+ image: ${image}
5
+ env:
6
+ MODEL_ZIP_STAGE_PATH: ${model_zip_stage_path}
7
+ readinessProbe:
8
+ port: 5000
9
+ path: /health
10
+ volumeMounts:
11
+ - name: vol1
12
+ mountPath: /local/user/vol1
13
+ - name: stage
14
+ mountPath: ${stage}
15
+ endpoint:
16
+ - name: ${predict_endpoint_name}
17
+ port: 5000
18
+ volume:
19
+ - name: vol1
20
+ source: local # only local emptyDir volume is supported
21
+ - name: stage
22
+ source: "@${stage}"
23
+ uid: 1000
24
+ gid: 1000
@@ -0,0 +1,47 @@
1
+ from enum import Enum
2
+
3
+
4
+ class ResourceType(Enum):
5
+ SERVICE = "service"
6
+ JOB = "job"
7
+
8
+
9
+ """
10
+ Potential SnowService status based on existing ResourceSetStatus proto:
11
+
12
+ github.com/snowflakedb/snowflake/blob/main/GlobalServices/src/main/protobuf/snowservices_resourceset_reconciler.proto
13
+ """
14
+
15
+
16
+ class ResourceStatus(Enum):
17
+ UNKNOWN = "UNKNOWN" # status is unknown because we have not received enough data from K8s yet.
18
+ PENDING = "PENDING" # resource set is being created, can't be used yet
19
+ READY = "READY" # resource set has been deployed.
20
+ DELETING = "DELETING" # resource set is being deleted
21
+ FAILED = "FAILED" # resource set has failed and cannot be used anymore
22
+ DONE = "DONE" # resource set has finished running
23
+ NOT_FOUND = "NOT_FOUND" # not found or deleted
24
+ INTERNAL_ERROR = "INTERNAL_ERROR" # there was an internal service error.
25
+
26
+
27
+ RESOURCE_TO_STATUS_FUNCTION_MAPPING = {
28
+ ResourceType.SERVICE: "SYSTEM$GET_SNOWSERVICE_STATUS",
29
+ ResourceType.JOB: "SYSTEM$GET_JOB_STATUS",
30
+ }
31
+
32
+ PREDICT = "predict"
33
+ STAGE = "stage"
34
+ COMPUTE_POOL = "compute_pool"
35
+ IMAGE_REPO = "image_repo"
36
+ MIN_INSTANCES = "min_instances"
37
+ MAX_INSTANCES = "max_instances"
38
+ GPU_COUNT = "gpu"
39
+ OVERRIDDEN_BASE_IMAGE = "image"
40
+ ENDPOINT = "endpoint"
41
+ SERVICE_SPEC = "service_spec"
42
+ INFERENCE_SERVER_CONTAINER = "inference-server"
43
+
44
+ """Image build related constants"""
45
+ MODEL_DIR = "model_dir"
46
+ INFERENCE_SERVER_DIR = "inference_server"
47
+ ENTRYPOINT_SCRIPT = "gunicorn_run.sh"
@@ -0,0 +1,178 @@
1
+ import json
2
+ import logging
3
+ import time
4
+ from typing import Optional
5
+
6
+ from snowflake.ml.model._deploy_client.utils import constants
7
+ from snowflake.snowpark import Session
8
+
9
+
10
+ class SnowServiceClient:
11
+ """
12
+ SnowService client implementation: a Python wrapper for SnowService SQL queries.
13
+ """
14
+
15
+ def __init__(self, session: Session) -> None:
16
+ """Initialization
17
+
18
+ Args:
19
+ session: Snowpark session
20
+ """
21
+ self.session = session
22
+
23
+ def create_or_replace_service(
24
+ self,
25
+ service_name: str,
26
+ compute_pool: str,
27
+ spec_stage_location: str,
28
+ *,
29
+ min_instances: int = 1,
30
+ max_instances: int = 1,
31
+ ) -> None:
32
+ """Create or replace service. Since SnowService doesn't support the CREATE OR REPLACE service syntax, we will
33
+ first attempt to drop the service if it exists, and then create the service. Please note that this approach may
34
+ have side effects due to the lack of transaction support.
35
+
36
+ Args:
37
+ service_name: Name of the service.
38
+ min_instances: Minimum number of service replicas.
39
+ max_instances: Maximum number of service replicas.
40
+ compute_pool: Name of the compute pool.
41
+ spec_stage_location: Stage path for the service spec.
42
+ """
43
+ self._drop_service_if_exists(service_name)
44
+ sql = f"""
45
+ CREATE SERVICE {service_name}
46
+ MIN_INSTANCES={min_instances}
47
+ MAX_INSTANCES={max_instances}
48
+ COMPUTE_POOL={compute_pool}
49
+ SPEC=@{spec_stage_location}
50
+ """
51
+ logging.info(f"Create service with SQL: \n {sql}")
52
+ self.session.sql(sql).collect()
53
+
54
+ def _drop_service_if_exists(self, service_name: str) -> None:
55
+ """Drop service if it already exists.
56
+
57
+ Args:
58
+ service_name: Name of the service.
59
+ """
60
+ self.session.sql(f"DROP SERVICE IF EXISTS {service_name}").collect()
61
+
62
+ def create_or_replace_service_function(
63
+ self,
64
+ service_func_name: str,
65
+ service_name: str,
66
+ *,
67
+ endpoint_name: str = constants.PREDICT,
68
+ path_at_service_endpoint: str = constants.PREDICT,
69
+ ) -> None:
70
+ """Create or replace service function.
71
+
72
+ Args:
73
+ service_func_name: Name of the service function.
74
+ service_name: Name of the service.
75
+ endpoint_name: Name the service endpoint, declared in the service spec, indicating the listening port.
76
+ path_at_service_endpoint: Specify the path/route at the service endpoint. Multiple paths can exist for a
77
+ given endpoint. For example, an inference server listening on port 5000 may have paths like "/predict"
78
+ and "/monitoring
79
+
80
+ """
81
+ sql = f"""
82
+ CREATE OR REPLACE FUNCTION {service_func_name}(input OBJECT)
83
+ RETURNS OBJECT
84
+ SERVICE={service_name}
85
+ ENDPOINT={endpoint_name}
86
+ AS '/{path_at_service_endpoint}'
87
+ """
88
+ logging.info(f"Create service function with SQL: \n {sql}")
89
+ self.session.sql(sql).collect()
90
+
91
+ def block_until_resource_is_ready(
92
+ self,
93
+ resource_name: str,
94
+ resource_type: constants.ResourceType,
95
+ *,
96
+ max_retries: int = 60,
97
+ retry_interval_secs: int = 5,
98
+ ) -> None:
99
+ """Blocks execution until the specified resource is ready.
100
+ Note that this is a best-effort approach because when launching a service, it's possible for it to initially
101
+ fail due to a system error. However, SnowService may automatically retry and recover the service, leading to
102
+ potential false-negative information.
103
+
104
+ Args:
105
+ resource_name: Name of the resource.
106
+ resource_type: Type of the resource.
107
+ max_retries: The maximum number of retries to check the resource readiness (default: 60).
108
+ retry_interval_secs: The number of seconds to wait between each retry (default: 5).
109
+
110
+ Raises:
111
+ RuntimeError: If the resource received the following status [failed, not_found, internal_error, deleting]
112
+ RuntimeError: If the resource does not reach the ready/done state within the specified number of retries.
113
+ """
114
+ for _ in range(max_retries):
115
+ status = self.get_resource_status(resource_name=resource_name, resource_type=resource_type)
116
+ if status in [constants.ResourceStatus.READY, constants.ResourceStatus.DONE]:
117
+ return
118
+ elif status in [
119
+ constants.ResourceStatus.FAILED,
120
+ constants.ResourceStatus.NOT_FOUND,
121
+ constants.ResourceStatus.INTERNAL_ERROR,
122
+ constants.ResourceStatus.DELETING,
123
+ ]:
124
+ error_log = self.get_resource_log(
125
+ resource_name=resource_name,
126
+ resource_type=resource_type,
127
+ container_name=constants.INFERENCE_SERVER_CONTAINER,
128
+ )
129
+ raise RuntimeError(f"{resource_type} {resource_name} failed. \n {error_log if error_log else ''}")
130
+ time.sleep(retry_interval_secs)
131
+
132
+ raise RuntimeError("Resource never reached the ready/done state.")
133
+
134
+ def get_resource_log(
135
+ self, resource_name: str, resource_type: constants.ResourceType, container_name: str
136
+ ) -> Optional[str]:
137
+ if resource_type != constants.ResourceType.SERVICE:
138
+ raise NotImplementedError(f"{resource_type.name} is not yet supported in get_resource_log function")
139
+ try:
140
+ row = self.session.sql(
141
+ f"CALL SYSTEM$GET_SNOWSERVICE_LOGS('{resource_name}', '0', '{container_name}')"
142
+ ).collect()
143
+ return str(row[0]["SYSTEM$GET_SNOWSERVICE_LOGS"])
144
+ except Exception:
145
+ return None
146
+
147
+ def get_resource_status(
148
+ self, resource_name: str, resource_type: constants.ResourceType
149
+ ) -> Optional[constants.ResourceStatus]:
150
+ """Get resource status.
151
+
152
+ Args:
153
+ resource_name: Name of the resource.
154
+ resource_type: Type of the resource.
155
+
156
+ Raises:
157
+ ValueError: If resource type does not have a corresponding system function for querying status.
158
+ RuntimeError: If corresponding status call failed.
159
+
160
+ Returns:
161
+ Optional[constants.ResourceStatus]: The status of the resource, or None if the resource status is empty.
162
+ """
163
+ if resource_type not in constants.RESOURCE_TO_STATUS_FUNCTION_MAPPING:
164
+ raise ValueError(f"Status querying is not supported for resources of type '{resource_type}'.")
165
+ status_func = constants.RESOURCE_TO_STATUS_FUNCTION_MAPPING[resource_type]
166
+ try:
167
+ row = self.session.sql(f"CALL {status_func}('{resource_name}');").collect()
168
+ except Exception as e:
169
+ raise RuntimeError(f"Error while querying the {resource_type} {resource_name} status: {str(e)}")
170
+ resource_metadata = json.loads(row[0][status_func])[0]
171
+ logging.info(f"Resource status metadata: {resource_metadata}")
172
+ if resource_metadata and resource_metadata["status"]:
173
+ try:
174
+ status = resource_metadata["status"]
175
+ return constants.ResourceStatus(status)
176
+ except ValueError:
177
+ logging.warning(f"Unknown status returned: {status}")
178
+ return None
@@ -1,4 +1,5 @@
1
1
  import os
2
+ import posixpath
2
3
  import tempfile
3
4
  import warnings
4
5
  from types import ModuleType
@@ -6,7 +7,8 @@ from typing import IO, List, Optional, Tuple, TypedDict, Union
6
7
 
7
8
  from typing_extensions import Unpack
8
9
 
9
- from snowflake.ml._internal import env_utils
10
+ from snowflake.ml._internal import env_utils, file_utils
11
+ from snowflake.ml._internal.utils import identifier
10
12
  from snowflake.ml.model import (
11
13
  _env as model_env,
12
14
  _model,
@@ -37,6 +39,7 @@ def _deploy_to_warehouse(
37
39
  **kwargs: Options that control some features in generated udf code.
38
40
 
39
41
  Raises:
42
+ ValueError: Raised when model file name is unable to encoded using ASCII.
40
43
  ValueError: Raised when incompatible model.
41
44
  ValueError: Raised when target method does not exist in model.
42
45
  ValueError: Raised when confronting invalid stage location.
@@ -44,14 +47,20 @@ def _deploy_to_warehouse(
44
47
  Returns:
45
48
  The metadata of the model deployed.
46
49
  """
50
+ # TODO(SNOW-862576): Should remove check on ASCII encoding after SNOW-862576 fixed.
47
51
  if model_dir_path:
48
52
  model_dir_path = os.path.normpath(model_dir_path)
49
53
  model_dir_name = os.path.basename(model_dir_path)
54
+ if not file_utils._able_ascii_encode(model_dir_name):
55
+ raise ValueError(f"Model file name {model_dir_name} cannot be encoded using ASCII. Please rename.")
50
56
  extract_model_code = infer_template._EXTRACT_LOCAL_MODEL_CODE.format(model_dir_name=model_dir_name)
51
57
  meta = _model.load_model(model_dir_path=model_dir_path, meta_only=True)
52
58
  else:
53
59
  assert model_stage_file_path is not None, "Unreachable assertion error."
54
- model_stage_file_name = os.path.basename(model_stage_file_path)
60
+ model_stage_file_name = posixpath.basename(model_stage_file_path)
61
+ if not file_utils._able_ascii_encode(model_stage_file_name):
62
+ raise ValueError(f"Model file name {model_stage_file_name} cannot be encoded using ASCII. Please rename.")
63
+
55
64
  extract_model_code = infer_template._EXTRACT_STAGE_MODEL_CODE.format(
56
65
  model_stage_file_name=model_stage_file_name
57
66
  )
@@ -59,18 +68,22 @@ def _deploy_to_warehouse(
59
68
 
60
69
  relax_version = kwargs.get("relax_version", False)
61
70
 
71
+ disable_local_conda_resolver = kwargs.get("disable_local_conda_resolver", False)
72
+
62
73
  if target_method not in meta.signatures.keys():
63
74
  raise ValueError(f"Target method {target_method} does not exist in model.")
64
75
 
65
- final_packages = _get_model_final_packages(meta, session, relax_version=relax_version)
76
+ final_packages = _get_model_final_packages(
77
+ meta, session, relax_version=relax_version, disable_local_conda_resolver=disable_local_conda_resolver
78
+ )
66
79
 
67
80
  stage_location = kwargs.get("permanent_udf_stage_location", None)
68
81
  if stage_location:
69
- stage_location = stage_location.strip().rstrip("/")
82
+ stage_location = posixpath.normpath(stage_location.strip())
70
83
  if not stage_location.startswith("@"):
71
84
  raise ValueError(f"Invalid stage location {stage_location}.")
72
85
 
73
- with tempfile.NamedTemporaryFile(mode="w", suffix=".py", delete=False) as f:
86
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".py", delete=False, encoding="utf-8") as f:
74
87
  _write_UDF_py_file(f.file, extract_model_code, target_method, **kwargs)
75
88
  print(f"Generated UDF file is persisted at: {f.name}")
76
89
  imports = ([model_dir_path] if model_dir_path else []) + (
@@ -89,7 +102,7 @@ def _deploy_to_warehouse(
89
102
  params = _UDFParams(
90
103
  file_path=f.name,
91
104
  func_name="infer",
92
- name=f"{udf_name}",
105
+ name=identifier.get_inferred_name(udf_name),
93
106
  return_type=st.PandasSeriesType(st.MapType(st.StringType(), st.VariantType())),
94
107
  input_types=[st.PandasDataFrameType([st.MapType()])],
95
108
  imports=list(imports),
@@ -139,6 +152,7 @@ def _get_model_final_packages(
139
152
  meta: _model_meta.ModelMetadata,
140
153
  session: snowpark_session.Session,
141
154
  relax_version: Optional[bool] = False,
155
+ disable_local_conda_resolver: Optional[bool] = False,
142
156
  ) -> List[str]:
143
157
  """Generate final packages list of dependency of a model to be deployed to warehouse.
144
158
 
@@ -147,6 +161,8 @@ def _get_model_final_packages(
147
161
  session: Snowpark connection session.
148
162
  relax_version: Whether or not relax the version restriction when fail to resolve dependencies.
149
163
  Defaults to False.
164
+ disable_local_conda_resolver: Set to disable use local conda resolver to do pre-check on environment and rely on
165
+ the information schema only. Defaults to False.
150
166
 
151
167
  Raises:
152
168
  RuntimeError: Raised when PIP requirements and dependencies from non-Snowflake anaconda channel found.
@@ -165,6 +181,8 @@ def _get_model_final_packages(
165
181
  deps = meta._conda_dependencies[""]
166
182
 
167
183
  try:
184
+ if disable_local_conda_resolver:
185
+ raise ImportError("Raise to disable local conda resolver. Should be captured.")
168
186
  final_packages = env_utils.resolve_conda_environment(
169
187
  deps, [model_env._SNOWFLAKE_CONDA_CHANNEL_URL], python_version=meta.python_version
170
188
  )