snowflake-cli 3.13.1__py3-none-any.whl → 3.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/dev/docs/project_definition_generate_json_schema.py +2 -2
  3. snowflake/cli/_app/printing.py +14 -12
  4. snowflake/cli/_app/snow_connector.py +59 -9
  5. snowflake/cli/_plugins/dbt/commands.py +37 -7
  6. snowflake/cli/_plugins/dbt/manager.py +81 -53
  7. snowflake/cli/_plugins/dcm/commands.py +94 -4
  8. snowflake/cli/_plugins/dcm/manager.py +87 -33
  9. snowflake/cli/_plugins/dcm/reporters.py +462 -0
  10. snowflake/cli/_plugins/dcm/styles.py +26 -0
  11. snowflake/cli/_plugins/dcm/utils.py +88 -0
  12. snowflake/cli/_plugins/git/manager.py +24 -22
  13. snowflake/cli/_plugins/object/command_aliases.py +7 -1
  14. snowflake/cli/_plugins/object/commands.py +12 -2
  15. snowflake/cli/_plugins/object/manager.py +7 -2
  16. snowflake/cli/_plugins/snowpark/commands.py +8 -1
  17. snowflake/cli/_plugins/snowpark/package/commands.py +1 -1
  18. snowflake/cli/_plugins/streamlit/commands.py +23 -4
  19. snowflake/cli/_plugins/streamlit/streamlit_entity.py +89 -46
  20. snowflake/cli/api/commands/decorators.py +1 -1
  21. snowflake/cli/api/commands/flags.py +30 -5
  22. snowflake/cli/api/console/abc.py +7 -3
  23. snowflake/cli/api/console/console.py +14 -2
  24. snowflake/cli/api/exceptions.py +1 -1
  25. snowflake/cli/api/feature_flags.py +1 -3
  26. snowflake/cli/api/output/types.py +6 -0
  27. snowflake/cli/api/utils/types.py +20 -1
  28. {snowflake_cli-3.13.1.dist-info → snowflake_cli-3.15.0.dist-info}/METADATA +10 -5
  29. {snowflake_cli-3.13.1.dist-info → snowflake_cli-3.15.0.dist-info}/RECORD +32 -29
  30. {snowflake_cli-3.13.1.dist-info → snowflake_cli-3.15.0.dist-info}/WHEEL +1 -1
  31. {snowflake_cli-3.13.1.dist-info → snowflake_cli-3.15.0.dist-info}/entry_points.txt +0 -0
  32. {snowflake_cli-3.13.1.dist-info → snowflake_cli-3.15.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,88 @@
1
+ # Copyright (c) 2024 Snowflake Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ import json
15
+ import os
16
+ from functools import wraps
17
+ from pathlib import Path
18
+ from typing import Any
19
+
20
+ from snowflake.cli._plugins.dcm.reporters import RefreshReporter, TestReporter
21
+ from snowflake.cli.api.output.types import EmptyResult
22
+
23
+
24
+ class FakeCursor:
25
+ def __init__(self, data: Any):
26
+ self._data = data
27
+ self._fetched = False
28
+
29
+ def fetchone(self):
30
+ if self._fetched:
31
+ return None
32
+ self._fetched = True
33
+ return (json.dumps(self._data),)
34
+
35
+
36
+ def _get_debug_file_number():
37
+ dcm_debug = os.environ.get("DCM_DEBUG")
38
+ if dcm_debug:
39
+ try:
40
+ return int(dcm_debug)
41
+ except ValueError:
42
+ return None
43
+ return None
44
+
45
+
46
+ def _load_debug_data(command_name: str, file_number: int):
47
+ results_dir = Path.cwd() / "results"
48
+
49
+ debug_file = results_dir / f"{command_name}{file_number}.json"
50
+
51
+ if not debug_file.exists():
52
+ raise FileNotFoundError(f"Debug file not found: {debug_file}")
53
+
54
+ with open(debug_file, "r") as f:
55
+ data = json.load(f)
56
+
57
+ if isinstance(data, list) and len(data) > 0:
58
+ if command_name in ("test", "refresh", "analyze"):
59
+ data = data[0]
60
+
61
+ return data
62
+
63
+
64
+ def mock_dcm_response(command_name: str):
65
+ # testing utility to test different reporting styles on mocked responses without touching the backend
66
+ def decorator(func):
67
+ @wraps(func)
68
+ def wrapper(*args: Any, **kwargs: Any):
69
+ file_number = _get_debug_file_number()
70
+ if file_number is None:
71
+ return func(*args, **kwargs)
72
+
73
+ actual_command = "plan" if command_name == "deploy" else command_name
74
+ data = _load_debug_data(actual_command, file_number)
75
+
76
+ if data is None:
77
+ return func(*args, **kwargs)
78
+
79
+ cursor = FakeCursor(data)
80
+ reporter_mapping = {"refresh": RefreshReporter, "test": TestReporter}
81
+
82
+ reporter = reporter_mapping[command_name]()
83
+ reporter.process(cursor)
84
+ return EmptyResult()
85
+
86
+ return wrapper
87
+
88
+ return decorator
@@ -14,6 +14,7 @@
14
14
 
15
15
  from __future__ import annotations
16
16
 
17
+ import re
17
18
  from pathlib import PurePosixPath
18
19
  from textwrap import dedent
19
20
 
@@ -69,6 +70,16 @@ class GitStagePathParts(StagePathParts):
69
70
 
70
71
 
71
72
  class GitManager(StageManager):
73
+ """
74
+ Git stage manager utilities.
75
+
76
+ The `_QUOTED_OR_TOKEN` regex matches either a quoted span (double quotes
77
+ included) or a run of non-slash characters. We use it to tokenize git stage
78
+ paths while preserving quoted repo or branch names that may contain slashes.
79
+ """
80
+
81
+ _QUOTED_OR_TOKEN = re.compile(r'"[^"]*"|[^/]+')
82
+
72
83
  @staticmethod
73
84
  def build_path(stage_path: str) -> StagePathParts:
74
85
  return StagePath.from_git_str(stage_path)
@@ -114,32 +125,23 @@ class GitManager(StageManager):
114
125
 
115
126
  @staticmethod
116
127
  def split_git_path(path: str):
117
- # Check if path contains quotes and split it accordingly
118
- if '/"' in path and '"/' in path:
119
- if path.count('"') > 2:
128
+ match path.count('"'):
129
+ case 0:
130
+ return GitManager._split_path_without_empty_parts(path)
131
+ case 2 | 4:
132
+ tokens = GitManager._QUOTED_OR_TOKEN.findall(path)
133
+ case _:
120
134
  raise UsageError(
121
- f'Invalid string {path}, too much " in path, expected 2.'
135
+ f'Invalid path "{path}": expected 0, 2, or 4 double quotes.'
122
136
  )
123
137
 
124
- path_parts = path.split('"')
125
- before_quoted_part = GitManager._split_path_without_empty_parts(
126
- path_parts[0]
127
- )
128
-
129
- if path_parts[2] == "/":
130
- after_quoted_part = []
138
+ parts = []
139
+ for token in tokens:
140
+ if token.startswith('"') and token.endswith('"'):
141
+ parts.append(token)
131
142
  else:
132
- after_quoted_part = GitManager._split_path_without_empty_parts(
133
- path_parts[2]
134
- )
135
-
136
- return [
137
- *before_quoted_part,
138
- f'"{path_parts[1]}"',
139
- *after_quoted_part,
140
- ]
141
- else:
142
- return GitManager._split_path_without_empty_parts(path)
143
+ parts.extend(GitManager._split_path_without_empty_parts(token))
144
+ return parts
143
145
 
144
146
  @staticmethod
145
147
  def _split_path_without_empty_parts(path: str):
@@ -27,6 +27,7 @@ from snowflake.cli._plugins.object.commands import (
27
27
  scope_option, # noqa: F401
28
28
  terse_option_,
29
29
  )
30
+ from snowflake.cli.api.commands.flags import IfExistsOption
30
31
  from snowflake.cli.api.commands.snow_typer import SnowTyperFactory
31
32
  from snowflake.cli.api.constants import ObjectType
32
33
  from snowflake.cli.api.identifiers import FQN
@@ -90,10 +91,15 @@ def add_object_command_aliases(
90
91
  if "drop" not in ommit_commands:
91
92
 
92
93
  @app.command("drop", requires_connection=True)
93
- def drop_cmd(name: FQN = name_argument, **options):
94
+ def drop_cmd(
95
+ name: FQN = name_argument,
96
+ if_exists: bool = IfExistsOption(),
97
+ **options,
98
+ ):
94
99
  return drop(
95
100
  object_type=object_type.value.cli_name,
96
101
  object_name=name,
102
+ if_exists=if_exists,
97
103
  **options,
98
104
  )
99
105
 
@@ -21,6 +21,7 @@ from click import ClickException
21
21
  from snowflake.cli._plugins.object.manager import ObjectManager
22
22
  from snowflake.cli.api.commands.flags import (
23
23
  IdentifierType,
24
+ IfExistsOption,
24
25
  IfNotExistsOption,
25
26
  ReplaceOption,
26
27
  like_option,
@@ -148,8 +149,17 @@ def list_(
148
149
  help=f"Drops Snowflake object of given name and type. {SUPPORTED_TYPES_MSG}",
149
150
  requires_connection=True,
150
151
  )
151
- def drop(object_type: str = ObjectArgument, object_name: FQN = NameArgument, **options):
152
- return QueryResult(ObjectManager().drop(object_type=object_type, fqn=object_name))
152
+ def drop(
153
+ object_type: str = ObjectArgument,
154
+ object_name: FQN = NameArgument,
155
+ if_exists: bool = IfExistsOption(),
156
+ **options,
157
+ ):
158
+ return QueryResult(
159
+ ObjectManager().drop(
160
+ object_type=object_type, fqn=object_name, if_exists=if_exists
161
+ )
162
+ )
153
163
 
154
164
 
155
165
  # Image repository is the only supported object that does not have a DESCRIBE command.
@@ -65,9 +65,14 @@ class ObjectManager(SqlExecutionMixin):
65
65
  query += f" limit {limit}"
66
66
  return self.execute_query(query, **kwargs)
67
67
 
68
- def drop(self, *, object_type: str, fqn: FQN) -> SnowflakeCursor:
68
+ def drop(
69
+ self, *, object_type: str, fqn: FQN, if_exists: bool = False
70
+ ) -> SnowflakeCursor:
69
71
  object_name = _get_object_names(object_type).sf_name
70
- return self.execute_query(f"drop {object_name} {fqn.sql_identifier}")
72
+ if_exists_clause = " if exists" if if_exists else ""
73
+ return self.execute_query(
74
+ f"drop {object_name}{if_exists_clause} {fqn.sql_identifier}"
75
+ )
71
76
 
72
77
  def describe(self, *, object_type: str, fqn: FQN, **kwargs):
73
78
  # Image repository is the only supported object that does not have a DESCRIBE command.
@@ -71,6 +71,7 @@ from snowflake.cli.api.commands.decorators import (
71
71
  )
72
72
  from snowflake.cli.api.commands.flags import (
73
73
  ForceReplaceOption,
74
+ IfExistsOption,
74
75
  PruneOption,
75
76
  ReplaceOption,
76
77
  execution_identifier_argument,
@@ -462,10 +463,16 @@ def list_(
462
463
  def drop(
463
464
  object_type: SnowparkObject = ObjectTypeArgument,
464
465
  identifier: FQN = IdentifierArgument,
466
+ if_exists: bool = IfExistsOption(),
465
467
  **options,
466
468
  ):
467
469
  """Drop procedure or function."""
468
- return object_drop(object_type=object_type.value, object_name=identifier, **options)
470
+ return object_drop(
471
+ object_type=object_type.value,
472
+ object_name=identifier,
473
+ if_exists=if_exists,
474
+ **options,
475
+ )
469
476
 
470
477
 
471
478
  @app.command("describe", requires_connection=True)
@@ -183,7 +183,7 @@ def package_create(
183
183
  f"""
184
184
  The package {name} is successfully created, but depends on the following
185
185
  Anaconda libraries. They need to be included in project requirements,
186
- as their are not included in .zip.
186
+ as they are not included in the .zip.
187
187
  """
188
188
  )
189
189
  message += "\n".join(
@@ -127,9 +127,17 @@ def _default_file_callback(param_name: str):
127
127
  return _check_file_exists_if_not_default
128
128
 
129
129
 
130
+ LegacyOption = typer.Option(
131
+ False,
132
+ "--legacy",
133
+ help="Use legacy ROOT_LOCATION SQL syntax.",
134
+ is_flag=True,
135
+ )
136
+
137
+
130
138
  @app.command("deploy", requires_connection=True)
131
139
  @with_project_definition()
132
- @with_experimental_behaviour()
140
+ @with_experimental_behaviour() # Kept for backward compatibility
133
141
  def streamlit_deploy(
134
142
  replace: bool = ReplaceOption(
135
143
  help="Replaces the Streamlit app if it already exists. It only uploads new and overwrites existing files, "
@@ -138,16 +146,27 @@ def streamlit_deploy(
138
146
  prune: bool = PruneOption(),
139
147
  entity_id: str = entity_argument("streamlit"),
140
148
  open_: bool = OpenOption,
149
+ legacy: bool = LegacyOption,
141
150
  **options,
142
151
  ) -> CommandResult:
143
152
  """
144
153
  Deploys a Streamlit app defined in the project definition file (snowflake.yml). By default, the command uploads
145
- environment.yml and any other pages or folders, if present. If you dont specify a stage name, the `streamlit`
154
+ environment.yml and any other pages or folders, if present. If you don't specify a stage name, the `streamlit`
146
155
  stage is used. If the specified stage does not exist, the command creates it. If multiple Streamlits are defined
147
156
  in snowflake.yml and no entity_id is provided then command will raise an error.
148
157
  """
149
158
 
150
159
  cli_context = get_cli_context()
160
+ workspace_ctx = _get_current_workspace_context()
161
+
162
+ # Handle deprecated --experimental flag for backward compatibility
163
+ if options.get("experimental"):
164
+ workspace_ctx.console.warning(
165
+ "[Deprecation] The --experimental flag is deprecated. "
166
+ "Versioned deployment is now the default behavior. "
167
+ "This flag will be removed in a future version."
168
+ )
169
+
151
170
  pd = cli_context.project_definition
152
171
  if not pd.meets_version_requirement("2"):
153
172
  if not pd.streamlit:
@@ -163,7 +182,7 @@ def streamlit_deploy(
163
182
  project_definition=pd,
164
183
  entity_type=ObjectType.STREAMLIT.value.cli_name,
165
184
  ),
166
- workspace_ctx=_get_current_workspace_context(),
185
+ workspace_ctx=workspace_ctx,
167
186
  )
168
187
 
169
188
  url = streamlit.perform(
@@ -173,7 +192,7 @@ def streamlit_deploy(
173
192
  ),
174
193
  _open=open_,
175
194
  replace=replace,
176
- experimental=options.get("experimental"),
195
+ legacy=legacy,
177
196
  prune=prune,
178
197
  )
179
198
 
@@ -15,7 +15,7 @@ from snowflake.cli._plugins.workspace.context import ActionContext
15
15
  from snowflake.cli.api.artifacts.bundle_map import BundleMap
16
16
  from snowflake.cli.api.entities.common import EntityBase
17
17
  from snowflake.cli.api.entities.utils import EntityActions, sync_deploy_root_with_stage
18
- from snowflake.cli.api.feature_flags import FeatureFlag as GlobalFeatureFlag
18
+ from snowflake.cli.api.exceptions import CliError
19
19
  from snowflake.cli.api.identifiers import FQN
20
20
  from snowflake.cli.api.project.project_paths import bundle_root
21
21
  from snowflake.cli.api.project.schemas.entities.common import Identifier, PathMapping
@@ -66,15 +66,20 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
66
66
  self._conn, f"/#/streamlit-apps/{name.url_identifier}"
67
67
  )
68
68
 
69
- def _is_spcs_runtime_v2_mode(self, experimental: bool = False) -> bool:
69
+ def _is_spcs_runtime_v2_mode(self) -> bool:
70
70
  """Check if SPCS runtime v2 mode is enabled."""
71
71
  return (
72
- experimental
73
- and self.model.runtime_name == SPCS_RUNTIME_V2_NAME
74
- and self.model.compute_pool
72
+ self.model.runtime_name == SPCS_RUNTIME_V2_NAME and self.model.compute_pool
75
73
  )
76
74
 
77
75
  def bundle(self, output_dir: Optional[Path] = None) -> BundleMap:
76
+ artifacts = list(self._entity_model.artifacts or [])
77
+
78
+ # Ensure main_file is included in artifacts
79
+ main_file = self._entity_model.main_file
80
+ if main_file and not any(artifact.src == main_file for artifact in artifacts):
81
+ artifacts.insert(0, PathMapping(src=main_file))
82
+
78
83
  return build_bundle(
79
84
  self.root,
80
85
  output_dir or bundle_root(self.root, "streamlit") / self.entity_id,
@@ -82,7 +87,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
82
87
  PathMapping(
83
88
  src=artifact.src, dest=artifact.dest, processors=artifact.processors
84
89
  )
85
- for artifact in self._entity_model.artifacts
90
+ for artifact in artifacts
86
91
  ],
87
92
  )
88
93
 
@@ -93,7 +98,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
93
98
  replace: bool,
94
99
  prune: bool = False,
95
100
  bundle_map: Optional[BundleMap] = None,
96
- experimental: bool = False,
101
+ legacy: bool = False,
97
102
  *args,
98
103
  **kwargs,
99
104
  ):
@@ -104,49 +109,40 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
104
109
 
105
110
  console = self._workspace_ctx.console
106
111
  console.step(f"Checking if object exists")
107
- if self._object_exists() and not replace:
112
+ object_exists = self._object_exists()
113
+
114
+ if object_exists and not replace:
108
115
  raise ClickException(
109
116
  f"Streamlit {self.model.fqn.sql_identifier} already exists. Use 'replace' option to overwrite."
110
117
  )
111
118
 
112
- if (
113
- experimental
114
- or GlobalFeatureFlag.ENABLE_STREAMLIT_VERSIONED_STAGE.is_enabled()
115
- ):
116
- self._deploy_experimental(bundle_map=bundle_map, replace=replace)
117
- else:
118
- console.step(f"Uploading artifacts to stage {self.model.stage}")
119
-
120
- # We use a static method from StageManager here, but maybe this logic could be implemented elswhere, as we implement entities?
121
- name = (
122
- self.model.identifier.name
123
- if isinstance(self.model.identifier, Identifier)
124
- else self.model.identifier or self.entity_id
125
- )
126
- stage_root = StageManager.get_standard_stage_prefix(
127
- f"{FQN.from_string(self.model.stage).using_connection(self._conn)}/{name}"
128
- )
129
- sync_deploy_root_with_stage(
130
- console=self._workspace_ctx.console,
131
- deploy_root=bundle_map.deploy_root(),
132
- bundle_map=bundle_map,
133
- prune=prune,
134
- recursive=True,
135
- stage_path_parts=StageManager().stage_path_parts_from_str(stage_root),
136
- print_diff=True,
119
+ if legacy and self._is_spcs_runtime_v2_mode():
120
+ raise CliError(
121
+ "runtime_name and compute_pool are not compatible with --legacy flag. "
122
+ "Please remove the --legacy flag to use versioned deployment, or remove "
123
+ "runtime_name and compute_pool from your snowflake.yml to use legacy deployment."
137
124
  )
138
125
 
139
- console.step(f"Creating Streamlit object {self.model.fqn.sql_identifier}")
140
-
141
- self._execute_query(
142
- self.get_deploy_sql(
143
- replace=replace,
144
- from_stage_name=stage_root,
145
- experimental=False,
126
+ # Warn if replacing with a different deployment style
127
+ if object_exists and replace:
128
+ existing_is_legacy = self._is_legacy_deployment()
129
+ if existing_is_legacy and not legacy:
130
+ console.warning(
131
+ "Replacing legacy ROOT_LOCATION deployment with versioned deployment. "
132
+ "Files from the old stage location will not be automatically migrated. "
133
+ "The new deployment will use a separate versioned stage location."
134
+ )
135
+ elif not existing_is_legacy and legacy:
136
+ console.warning(
137
+ "Deployment style is changing from versioned to legacy. "
138
+ "Your existing files will remain in the versioned stage. "
139
+ "If needed, manually copy any additional files to the legacy stage after deployment."
146
140
  )
147
- )
148
141
 
149
- StreamlitManager(connection=self._conn).grant_privileges(self.model)
142
+ if legacy:
143
+ self._deploy_legacy(bundle_map=bundle_map, replace=replace, prune=prune)
144
+ else:
145
+ self._deploy_versioned(bundle_map=bundle_map, replace=replace, prune=prune)
150
146
 
151
147
  return self.perform(EntityActions.GET_URL, action_context, *args, **kwargs)
152
148
 
@@ -172,7 +168,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
172
168
  artifacts_dir: Optional[Path] = None,
173
169
  schema: Optional[str] = None,
174
170
  database: Optional[str] = None,
175
- experimental: bool = False,
171
+ legacy: bool = False,
176
172
  *args,
177
173
  **kwargs,
178
174
  ) -> str:
@@ -218,7 +214,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
218
214
 
219
215
  # SPCS runtime fields are only supported for FBE/versioned streamlits (FROM syntax)
220
216
  # Never add these fields for stage-based deployments (ROOT_LOCATION syntax)
221
- if not from_stage_name and self._is_spcs_runtime_v2_mode(experimental):
217
+ if not from_stage_name and not legacy and self._is_spcs_runtime_v2_mode():
222
218
  query += f"\nRUNTIME_NAME = '{self.model.runtime_name}'"
223
219
  query += f"\nCOMPUTE_POOL = '{self.model.compute_pool}'"
224
220
 
@@ -249,14 +245,61 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
249
245
  except ProgrammingError:
250
246
  return False
251
247
 
252
- def _deploy_experimental(
248
+ def _is_legacy_deployment(self) -> bool:
249
+ """Check if the existing streamlit uses legacy ROOT_LOCATION deployment."""
250
+ try:
251
+ result = self.describe().fetchone()
252
+ # Versioned deployments have live_version_location_uri, legacy ones don't
253
+ return result.get("live_version_location_uri") is None
254
+ except (ProgrammingError, AttributeError, KeyError):
255
+ # If we can't determine, assume it doesn't exist or is inaccessible
256
+ return False
257
+
258
+ def _deploy_legacy(
259
+ self, bundle_map: BundleMap, replace: bool = False, prune: bool = False
260
+ ):
261
+ console = self._workspace_ctx.console
262
+ console.step(f"Uploading artifacts to stage {self.model.stage}")
263
+
264
+ # We use a static method from StageManager here, but maybe this logic could be implemented elswhere, as we implement entities?
265
+ name = (
266
+ self.model.identifier.name
267
+ if isinstance(self.model.identifier, Identifier)
268
+ else self.model.identifier or self.entity_id
269
+ )
270
+ stage_root = StageManager.get_standard_stage_prefix(
271
+ f"{FQN.from_string(self.model.stage).using_connection(self._conn)}/{name}"
272
+ )
273
+ sync_deploy_root_with_stage(
274
+ console=self._workspace_ctx.console,
275
+ deploy_root=bundle_map.deploy_root(),
276
+ bundle_map=bundle_map,
277
+ prune=prune,
278
+ recursive=True,
279
+ stage_path_parts=StageManager().stage_path_parts_from_str(stage_root),
280
+ print_diff=True,
281
+ )
282
+
283
+ console.step(f"Creating Streamlit object {self.model.fqn.sql_identifier}")
284
+
285
+ self._execute_query(
286
+ self.get_deploy_sql(
287
+ replace=replace,
288
+ from_stage_name=stage_root,
289
+ legacy=True,
290
+ )
291
+ )
292
+
293
+ StreamlitManager(connection=self._conn).grant_privileges(self.model)
294
+
295
+ def _deploy_versioned(
253
296
  self, bundle_map: BundleMap, replace: bool = False, prune: bool = False
254
297
  ):
255
298
  self._execute_query(
256
299
  self.get_deploy_sql(
257
300
  if_not_exists=True,
258
301
  replace=replace,
259
- experimental=True,
302
+ legacy=False,
260
303
  )
261
304
  )
262
305
  try:
@@ -163,7 +163,7 @@ def _options_decorator_factory(
163
163
  ):
164
164
  """
165
165
  execute_before_command_using_new_options executes before command telemetry has been emitted,
166
- but after command line options have been populated.
166
+ but after command-line options have been populated.
167
167
  """
168
168
 
169
169
  @wraps(func)
@@ -28,13 +28,18 @@ from snowflake.cli.api.cli_global_context import (
28
28
  from snowflake.cli.api.commands.common import OnErrorType
29
29
  from snowflake.cli.api.commands.overrideable_parameter import OverrideableOption
30
30
  from snowflake.cli.api.commands.utils import parse_key_value_variables
31
- from snowflake.cli.api.config import get_all_connections, get_config_value
31
+ from snowflake.cli.api.config import (
32
+ get_all_connections,
33
+ get_config_value,
34
+ get_env_value,
35
+ )
32
36
  from snowflake.cli.api.connections import ConnectionContext
33
37
  from snowflake.cli.api.console import cli_console
34
38
  from snowflake.cli.api.identifiers import FQN
35
39
  from snowflake.cli.api.output.formats import OutputFormat
36
40
  from snowflake.cli.api.secret import SecretType
37
41
  from snowflake.cli.api.stage_path import StagePath
42
+ from snowflake.cli.api.utils.types import try_cast_to_int
38
43
  from snowflake.connector.auth.workload_identity import ApiFederatedAuthenticationType
39
44
 
40
45
  DEFAULT_CONTEXT_SETTINGS = {"help_option_names": ["--help", "-h"]}
@@ -97,7 +102,7 @@ TemporaryConnectionOption = typer.Option(
97
102
  False,
98
103
  "--temporary-connection",
99
104
  "-x",
100
- help="Uses a connection defined with command line parameters, instead of one defined in config",
105
+ help="Uses a connection defined with command-line parameters, instead of one defined in config",
101
106
  callback=_connection_callback("temporary_connection"),
102
107
  is_flag=True,
103
108
  rich_help_panel=_CONNECTION_SECTION,
@@ -515,16 +520,36 @@ EnhancedExitCodesOption = typer.Option(
515
520
  )
516
521
 
517
522
 
518
- def _decimal_precision_callback(value: int | None):
523
+ def _decimal_precision_callback(value: int | str | None):
519
524
  """Callback to set decimal precision globally when provided."""
520
525
  if value is None:
521
526
  try:
522
- value = get_config_value(key="decimal_precision", default=None)
527
+ value = get_config_value("cli", key="decimal_precision", default=None)
523
528
  except Exception:
524
529
  pass
525
530
 
531
+ # env variable name and it's expected location within config file got inconsistent, so we
532
+ # need to handle this extra pattern
533
+ env_variable = get_env_value(key="decimal_precision")
534
+ if env_variable:
535
+ value = env_variable
536
+
526
537
  if value is not None:
527
- getcontext().prec = value
538
+ try:
539
+ int_value = try_cast_to_int(value)
540
+ except ValueError:
541
+ raise ClickException(
542
+ f"Invalid value for decimal_precision: '{value}'. Must be a positive integer."
543
+ )
544
+
545
+ if int_value <= 0:
546
+ raise ClickException(
547
+ f"Invalid value for decimal_precision: '{value}'. Must be a positive integer."
548
+ )
549
+
550
+ getcontext().prec = int_value
551
+ return int_value
552
+
528
553
  return value
529
554
 
530
555
 
@@ -16,7 +16,7 @@ from __future__ import annotations
16
16
 
17
17
  from abc import ABC, abstractmethod
18
18
  from contextlib import contextmanager
19
- from typing import Callable, Iterator, Optional
19
+ from typing import Any, Callable, Iterator, Optional
20
20
 
21
21
  from rich import print as rich_print
22
22
  from rich.jupyter import JupyterMixin
@@ -61,10 +61,10 @@ class AbstractConsole(ABC):
61
61
  """Indicated whether output should be grouped."""
62
62
  return self._in_phase
63
63
 
64
- def _print(self, text: JupyterMixin):
64
+ def _print(self, text: JupyterMixin, end: str = "\n"):
65
65
  if self.is_silent:
66
66
  return
67
- rich_print(text)
67
+ rich_print(text, end=end)
68
68
 
69
69
  @contextmanager
70
70
  @abstractmethod
@@ -109,3 +109,7 @@ class AbstractConsole(ABC):
109
109
  """
110
110
  A context manager for indicating a long-running operation.
111
111
  """
112
+
113
+ @abstractmethod
114
+ def styled_message(self, message: str, style: Any):
115
+ """Displays a message with provided style."""