snowflake-cli 3.12.0__py3-none-any.whl → 3.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/cli_app.py +43 -0
  3. snowflake/cli/_app/commands_registration/builtin_plugins.py +1 -1
  4. snowflake/cli/_app/commands_registration/command_plugins_loader.py +14 -1
  5. snowflake/cli/_app/telemetry.py +25 -10
  6. snowflake/cli/_plugins/auth/__init__.py +0 -2
  7. snowflake/cli/_plugins/connection/commands.py +1 -78
  8. snowflake/cli/_plugins/dbt/commands.py +15 -19
  9. snowflake/cli/_plugins/dbt/constants.py +1 -1
  10. snowflake/cli/_plugins/dbt/manager.py +137 -66
  11. snowflake/cli/_plugins/nativeapp/entities/application_package.py +4 -1
  12. snowflake/cli/_plugins/object/manager.py +1 -0
  13. snowflake/cli/_plugins/spcs/services/commands.py +19 -1
  14. snowflake/cli/_plugins/spcs/services/manager.py +12 -0
  15. snowflake/cli/_plugins/spcs/services/service_entity_model.py +5 -0
  16. snowflake/cli/_plugins/streamlit/streamlit_entity.py +28 -2
  17. snowflake/cli/_plugins/streamlit/streamlit_entity_model.py +24 -4
  18. snowflake/cli/api/commands/decorators.py +7 -0
  19. snowflake/cli/api/commands/flags.py +24 -1
  20. snowflake/cli/api/feature_flags.py +3 -3
  21. {snowflake_cli-3.12.0.dist-info → snowflake_cli-3.13.0.dist-info}/METADATA +4 -4
  22. {snowflake_cli-3.12.0.dist-info → snowflake_cli-3.13.0.dist-info}/RECORD +26 -29
  23. snowflake/cli/_plugins/auth/keypair/__init__.py +0 -0
  24. snowflake/cli/_plugins/auth/keypair/commands.py +0 -153
  25. snowflake/cli/_plugins/auth/keypair/manager.py +0 -331
  26. /snowflake/cli/_plugins/auth/{keypair/plugin_spec.py → plugin_spec.py} +0 -0
  27. {snowflake_cli-3.12.0.dist-info → snowflake_cli-3.13.0.dist-info}/WHEEL +0 -0
  28. {snowflake_cli-3.12.0.dist-info → snowflake_cli-3.13.0.dist-info}/entry_points.txt +0 -0
  29. {snowflake_cli-3.12.0.dist-info → snowflake_cli-3.13.0.dist-info}/licenses/LICENSE +0 -0
@@ -17,7 +17,7 @@ from __future__ import annotations
17
17
  from collections import defaultdict
18
18
  from pathlib import Path
19
19
  from tempfile import TemporaryDirectory
20
- from typing import List, Optional, TypedDict
20
+ from typing import Dict, List, Optional, TypedDict
21
21
 
22
22
  import yaml
23
23
  from snowflake.cli._plugins.dbt.constants import PROFILES_FILENAME
@@ -35,6 +35,7 @@ from snowflake.connector.errors import ProgrammingError
35
35
 
36
36
  class DBTObjectEditableAttributes(TypedDict):
37
37
  default_target: Optional[str]
38
+ external_access_integrations: Optional[List[str]]
38
39
 
39
40
 
40
41
  class DBTManager(SqlExecutionMixin):
@@ -70,11 +71,25 @@ class DBTManager(SqlExecutionMixin):
70
71
 
71
72
  row = rows[0]
72
73
  # Convert row to dict using column names
73
- columns = [desc[0] for desc in cursor.description]
74
+ columns = [desc[0].lower() for desc in cursor.description]
74
75
  row_dict = dict(zip(columns, row))
75
76
 
77
+ external_access_integrations = row_dict.get("external_access_integrations")
78
+ if external_access_integrations:
79
+ if isinstance(external_access_integrations, str):
80
+ external_access_integrations = [
81
+ x.strip()
82
+ for x in external_access_integrations.strip("[]").split(",")
83
+ if x.strip()
84
+ ]
85
+ elif not isinstance(external_access_integrations, list):
86
+ external_access_integrations = None
87
+ else:
88
+ external_access_integrations = None
89
+
76
90
  return DBTObjectEditableAttributes(
77
- default_target=row_dict.get("default_target")
91
+ default_target=row_dict.get("default_target"),
92
+ external_access_integrations=external_access_integrations,
78
93
  )
79
94
 
80
95
  def deploy(
@@ -86,6 +101,7 @@ class DBTManager(SqlExecutionMixin):
86
101
  default_target: Optional[str] = None,
87
102
  unset_default_target: bool = False,
88
103
  external_access_integrations: Optional[List[str]] = None,
104
+ install_local_deps: bool = False,
89
105
  ) -> SnowflakeCursor:
90
106
  dbt_project_path = path / "dbt_project.yml"
91
107
  if not dbt_project_path.exists():
@@ -125,7 +141,11 @@ class DBTManager(SqlExecutionMixin):
125
141
  with cli_console.phase("Creating DBT project"):
126
142
  if force is True:
127
143
  return self._deploy_create_or_replace(
128
- fqn, stage_name, default_target, external_access_integrations
144
+ fqn,
145
+ stage_name,
146
+ default_target,
147
+ external_access_integrations,
148
+ install_local_deps,
129
149
  )
130
150
  else:
131
151
  dbt_object_attributes = self.get_dbt_object_attributes(fqn)
@@ -137,10 +157,15 @@ class DBTManager(SqlExecutionMixin):
137
157
  default_target,
138
158
  unset_default_target,
139
159
  external_access_integrations,
160
+ install_local_deps,
140
161
  )
141
162
  else:
142
163
  return self._deploy_create(
143
- fqn, stage_name, default_target, external_access_integrations
164
+ fqn,
165
+ stage_name,
166
+ default_target,
167
+ external_access_integrations,
168
+ install_local_deps,
144
169
  )
145
170
 
146
171
  def _deploy_alter(
@@ -151,51 +176,100 @@ class DBTManager(SqlExecutionMixin):
151
176
  default_target: Optional[str],
152
177
  unset_default_target: bool,
153
178
  external_access_integrations: Optional[List[str]],
179
+ install_local_deps: bool,
154
180
  ) -> SnowflakeCursor:
155
181
  query = f"ALTER DBT PROJECT {fqn} ADD VERSION"
156
182
  query += f"\nFROM {stage_name}"
157
- query = self._handle_external_access_integrations_query(
158
- query, external_access_integrations
159
- )
160
183
  result = self.execute_query(query)
184
+
185
+ set_properties = []
186
+ unset_properties = []
187
+
161
188
  current_default_target = dbt_object_attributes.get("default_target")
162
189
  if unset_default_target and current_default_target is not None:
163
- unset_query = f"ALTER DBT PROJECT {fqn} UNSET DEFAULT_TARGET"
164
- self.execute_query(unset_query)
190
+ unset_properties.append("DEFAULT_TARGET")
165
191
  elif default_target and (
166
192
  current_default_target is None
167
193
  or current_default_target.lower() != default_target.lower()
168
194
  ):
169
- set_default_query = (
170
- f"ALTER DBT PROJECT {fqn} SET DEFAULT_TARGET='{default_target}'"
171
- )
172
- self.execute_query(set_default_query)
195
+ set_properties.append(f"DEFAULT_TARGET='{default_target}'")
196
+
197
+ current_external_access_integrations = dbt_object_attributes.get(
198
+ "external_access_integrations"
199
+ )
200
+ if self._should_update_external_access_integrations(
201
+ current_external_access_integrations,
202
+ external_access_integrations,
203
+ install_local_deps,
204
+ ):
205
+ if external_access_integrations:
206
+ integrations_str = ", ".join(sorted(external_access_integrations))
207
+ set_properties.append(
208
+ f"EXTERNAL_ACCESS_INTEGRATIONS=({integrations_str})"
209
+ )
210
+ elif install_local_deps:
211
+ set_properties.append("EXTERNAL_ACCESS_INTEGRATIONS=()")
212
+
213
+ if set_properties or unset_properties:
214
+ self._execute_property_updates(fqn, set_properties, unset_properties)
215
+
173
216
  return result
174
217
 
218
+ @staticmethod
219
+ def _should_update_external_access_integrations(
220
+ current: Optional[List[str]],
221
+ requested: Optional[List[str]],
222
+ install_local_deps: bool,
223
+ ) -> bool:
224
+ if requested is not None:
225
+ current_set = set(current) if current else set()
226
+ requested_set = set(requested)
227
+ return current_set != requested_set
228
+ elif install_local_deps:
229
+ current_set = set(current) if current else set()
230
+ return current_set != set()
231
+ return False
232
+
233
+ def _execute_property_updates(
234
+ self, fqn: FQN, set_clauses: List[str], unset_properties: List[str]
235
+ ) -> None:
236
+ if set_clauses:
237
+ query = f"ALTER DBT PROJECT {fqn} SET {', '.join(set_clauses)}"
238
+ self.execute_query(query)
239
+
240
+ for property_name in unset_properties:
241
+ query = f"ALTER DBT PROJECT {fqn} UNSET {property_name}"
242
+ self.execute_query(query)
243
+
175
244
  def _deploy_create(
176
245
  self,
177
246
  fqn: FQN,
178
247
  stage_name: str,
179
248
  default_target: Optional[str],
180
249
  external_access_integrations: Optional[List[str]],
250
+ install_local_deps: bool,
181
251
  ) -> SnowflakeCursor:
182
- # Project doesn't exist - create new one
183
252
  query = f"CREATE DBT PROJECT {fqn}"
184
253
  query += f"\nFROM {stage_name}"
185
254
  if default_target:
186
255
  query += f" DEFAULT_TARGET='{default_target}'"
187
256
  query = self._handle_external_access_integrations_query(
188
- query, external_access_integrations
257
+ query, external_access_integrations, install_local_deps
189
258
  )
190
259
  return self.execute_query(query)
191
260
 
192
261
  @staticmethod
193
262
  def _handle_external_access_integrations_query(
194
- query: str, external_access_integrations: Optional[List[str]]
263
+ query: str,
264
+ external_access_integrations: Optional[List[str]],
265
+ install_local_deps: bool,
195
266
  ) -> str:
267
+ # Providing external access integrations will trigger installation of local deps as well
196
268
  if external_access_integrations:
197
269
  integrations_str = ", ".join(external_access_integrations)
198
270
  query += f"\nEXTERNAL_ACCESS_INTEGRATIONS = ({integrations_str})"
271
+ elif install_local_deps:
272
+ query += f"\nEXTERNAL_ACCESS_INTEGRATIONS = ()"
199
273
  return query
200
274
 
201
275
  def _deploy_create_or_replace(
@@ -204,28 +278,27 @@ class DBTManager(SqlExecutionMixin):
204
278
  stage_name: str,
205
279
  default_target: Optional[str],
206
280
  external_access_integrations: Optional[List[str]],
281
+ install_local_deps: bool,
207
282
  ) -> SnowflakeCursor:
208
283
  query = f"CREATE OR REPLACE DBT PROJECT {fqn}"
209
284
  query += f"\nFROM {stage_name}"
210
285
  if default_target:
211
286
  query += f" DEFAULT_TARGET='{default_target}'"
212
287
  query = self._handle_external_access_integrations_query(
213
- query, external_access_integrations
288
+ query, external_access_integrations, install_local_deps
214
289
  )
215
290
  return self.execute_query(query)
216
291
 
217
- @staticmethod
218
292
  def _validate_profiles(
293
+ self,
219
294
  profiles_path: SecurePath,
220
- target_profile: str,
295
+ profile_name: str,
221
296
  default_target: str | None = None,
222
297
  ) -> None:
223
298
  """
224
299
  Validates that:
225
300
  * profiles.yml exists
226
301
  * contain profile specified in dbt_project.yml
227
- * no other profiles are defined there
228
- * does not contain any confidential data like passwords
229
302
  * default_target (if specified) exists in the profile's outputs
230
303
  """
231
304
  profiles_file = profiles_path / PROFILES_FILENAME
@@ -236,62 +309,60 @@ class DBTManager(SqlExecutionMixin):
236
309
  with profiles_file.open(read_file_limit_mb=DEFAULT_SIZE_LIMIT_MB) as fd:
237
310
  profiles = yaml.safe_load(fd)
238
311
 
239
- if target_profile not in profiles:
312
+ if profile_name not in profiles:
240
313
  raise CliError(
241
- f"profile {target_profile} is not defined in {PROFILES_FILENAME}"
314
+ f"Profile {profile_name} is not defined in {PROFILES_FILENAME}."
242
315
  )
243
316
 
244
317
  errors = defaultdict(list)
245
- if len(profiles.keys()) > 1:
246
- for profile_name in profiles.keys():
247
- if profile_name.lower() != target_profile.lower():
248
- errors[profile_name].append("Remove unnecessary profiles")
318
+ profile = profiles[profile_name]
319
+ target_name = default_target or profile.get("target")
320
+ available_targets = set(profile["outputs"].keys())
321
+ if target_name in available_targets:
322
+ target = profile["outputs"][target_name]
323
+ target_errors = self._validate_target(target_name, target)
324
+ if target_errors:
325
+ errors[profile_name].extend(target_errors)
326
+ else:
327
+ available_targets_str = ", ".join(sorted(available_targets))
328
+ errors[profile_name].append(
329
+ f"Target '{target_name}' is not defined in profile '{profile_name}'. "
330
+ f"Available targets: {available_targets_str}"
331
+ )
332
+
333
+ if errors:
334
+ message = f"Found following errors in {PROFILES_FILENAME}. Please fix them before proceeding:"
335
+ for target, issues in errors.items():
336
+ message += f"\n{target}"
337
+ message += "\n * " + "\n * ".join(issues)
338
+ raise CliError(message)
249
339
 
340
+ def _validate_target(
341
+ self, target_name: str, target_details: Dict[str, str]
342
+ ) -> List[str]:
343
+ errors = []
250
344
  required_fields = {
251
- "account",
252
345
  "database",
253
346
  "role",
254
347
  "schema",
255
348
  "type",
256
- "user",
257
- "warehouse",
258
349
  }
259
- supported_fields = {
260
- "threads",
261
- }
262
- for target_name, target in profiles[target_profile]["outputs"].items():
263
- if missing_keys := required_fields - set(target.keys()):
264
- errors[target_profile].append(
265
- f"Missing required fields: {', '.join(sorted(missing_keys))} in target {target_name}"
266
- )
267
- if (
268
- unsupported_keys := set(target.keys())
269
- - required_fields
270
- - supported_fields
271
- ):
272
- errors[target_profile].append(
273
- f"Unsupported fields found: {', '.join(sorted(unsupported_keys))} in target {target_name}"
274
- )
275
- if "type" in target and target["type"].lower() != "snowflake":
276
- errors[target_profile].append(
277
- f"Value for type field is invalid. Should be set to `snowflake` in target {target_name}"
278
- )
279
-
280
- if default_target is not None:
281
- available_targets = set(profiles[target_profile]["outputs"].keys())
282
- if default_target not in available_targets:
283
- available_targets_str = ", ".join(sorted(available_targets))
284
- errors["default_target"].append(
285
- f"Default target '{default_target}' is not defined in profile '{target_profile}'. "
286
- f"Available targets: {available_targets_str}"
287
- )
350
+ if missing_keys := required_fields - set(target_details.keys()):
351
+ errors.append(
352
+ f"Missing required fields: {', '.join(sorted(missing_keys))} in target {target_name}"
353
+ )
354
+ if role := target_details.get("role"):
355
+ if not self._validate_role(role_name=role):
356
+ errors.append(f"Role '{role}' does not exist or is not accessible.")
357
+ return errors
288
358
 
289
- if errors:
290
- message = f"Found following errors in {PROFILES_FILENAME}. Please fix them before proceeding:"
291
- for target, issues in errors.items():
292
- message += f"\n{target}"
293
- message += "\n * " + "\n * ".join(issues)
294
- raise CliError(message)
359
+ def _validate_role(self, role_name: str) -> bool:
360
+ try:
361
+ with self.use_role(role_name):
362
+ self.execute_query("select 1")
363
+ return True
364
+ except ProgrammingError:
365
+ return False
295
366
 
296
367
  @staticmethod
297
368
  def _prepare_profiles_file(profiles_path: Path, tmp_path: Path):
@@ -1066,7 +1066,10 @@ class ApplicationPackageEntity(EntityBase[ApplicationPackageEntityModel]):
1066
1066
 
1067
1067
  for version in free_versions:
1068
1068
  last_updated = last_updated_map[version]
1069
- if not oldest_version or last_updated < oldest_version_last_updated_on:
1069
+ if not oldest_version or (
1070
+ oldest_version_last_updated_on is not None
1071
+ and last_updated < oldest_version_last_updated_on
1072
+ ):
1070
1073
  oldest_version = version
1071
1074
  oldest_version_last_updated_on = last_updated
1072
1075
 
@@ -137,3 +137,4 @@ def _handle_create_error_codes(err: Exception) -> None:
137
137
  raise ClickException(f"{err_code} internal server error.")
138
138
  case _:
139
139
  raise err
140
+ raise err
@@ -151,6 +151,14 @@ AutoResumeOption = OverrideableOption(
151
151
  help=_AUTO_RESUME_HELP,
152
152
  )
153
153
 
154
+ _AUTO_SUSPEND_SECS_HELP = "Number of seconds of inactivity after which the service will be automatically suspended."
155
+ AutoSuspendSecsOption = OverrideableOption(
156
+ None,
157
+ "--auto-suspend-secs",
158
+ help=_AUTO_SUSPEND_SECS_HELP,
159
+ min=0,
160
+ )
161
+
154
162
  _COMMENT_HELP = "Comment for the service."
155
163
 
156
164
  add_object_command_aliases(
@@ -217,7 +225,7 @@ def deploy(
217
225
  upgrade: bool = typer.Option(
218
226
  False,
219
227
  "--upgrade",
220
- help="Updates the existing service. Can update min_instances, max_instances, query_warehouse, auto_resume, external_access_integrations and comment.",
228
+ help="Updates the existing service. Can update min_instances, max_instances, query_warehouse, auto_resume, auto_suspend_secs, external_access_integrations and comment.",
221
229
  ),
222
230
  **options,
223
231
  ) -> CommandResult:
@@ -241,6 +249,7 @@ def deploy(
241
249
  min_instances=service.min_instances,
242
250
  max_instances=max_instances,
243
251
  auto_resume=service.auto_resume,
252
+ auto_suspend_secs=service.auto_suspend_secs,
244
253
  external_access_integrations=service.external_access_integrations,
245
254
  query_warehouse=service.query_warehouse,
246
255
  tags=service.tags,
@@ -529,6 +538,7 @@ def set_property(
529
538
  max_instances: Optional[int] = MaxInstancesOption(show_default=False),
530
539
  query_warehouse: Optional[str] = QueryWarehouseOption(show_default=False),
531
540
  auto_resume: Optional[bool] = AutoResumeOption(default=None, show_default=False),
541
+ auto_suspend_secs: Optional[int] = AutoSuspendSecsOption(show_default=False),
532
542
  external_access_integrations: Optional[List[str]] = typer.Option(
533
543
  None,
534
544
  "--eai-name",
@@ -546,6 +556,7 @@ def set_property(
546
556
  max_instances=max_instances,
547
557
  query_warehouse=query_warehouse,
548
558
  auto_resume=auto_resume,
559
+ auto_suspend_secs=auto_suspend_secs,
549
560
  external_access_integrations=external_access_integrations,
550
561
  comment=comment,
551
562
  )
@@ -576,6 +587,12 @@ def unset_property(
576
587
  help=f"Reset the AUTO_RESUME property - {_AUTO_RESUME_HELP}",
577
588
  show_default=False,
578
589
  ),
590
+ auto_suspend_secs: bool = AutoSuspendSecsOption(
591
+ default=False,
592
+ param_decls=["--auto-suspend-secs"],
593
+ help=f"Reset the AUTO_SUSPEND_SECS property - {_AUTO_SUSPEND_SECS_HELP}",
594
+ show_default=False,
595
+ ),
579
596
  comment: bool = CommentOption(
580
597
  default=False,
581
598
  help=f"Reset the COMMENT property - {_COMMENT_HELP}",
@@ -593,6 +610,7 @@ def unset_property(
593
610
  max_instances=max_instances,
594
611
  query_warehouse=query_warehouse,
595
612
  auto_resume=auto_resume,
613
+ auto_suspend_secs=auto_suspend_secs,
596
614
  comment=comment,
597
615
  )
598
616
  return SingleQueryResult(cursor)
@@ -114,6 +114,7 @@ class ServiceManager(SqlExecutionMixin):
114
114
  min_instances: int,
115
115
  max_instances: int,
116
116
  auto_resume: bool,
117
+ auto_suspend_secs: Optional[int],
117
118
  external_access_integrations: Optional[List[str]],
118
119
  query_warehouse: Optional[str],
119
120
  tags: Optional[List[Tag]],
@@ -139,6 +140,7 @@ class ServiceManager(SqlExecutionMixin):
139
140
  max_instances=max_instances,
140
141
  query_warehouse=query_warehouse,
141
142
  auto_resume=auto_resume,
143
+ auto_suspend_secs=auto_suspend_secs,
142
144
  external_access_integrations=external_access_integrations,
143
145
  comment=comment,
144
146
  )
@@ -163,6 +165,9 @@ class ServiceManager(SqlExecutionMixin):
163
165
  if max_instances:
164
166
  query.append(f"MAX_INSTANCES = {max_instances}")
165
167
 
168
+ if auto_suspend_secs is not None:
169
+ query.append(f"AUTO_SUSPEND_SECS = {auto_suspend_secs}")
170
+
166
171
  if query_warehouse:
167
172
  query.append(f"QUERY_WAREHOUSE = {query_warehouse}")
168
173
 
@@ -532,6 +537,7 @@ class ServiceManager(SqlExecutionMixin):
532
537
  max_instances: Optional[int],
533
538
  query_warehouse: Optional[str],
534
539
  auto_resume: Optional[bool],
540
+ auto_suspend_secs: Optional[int],
535
541
  external_access_integrations: Optional[List[str]],
536
542
  comment: Optional[str],
537
543
  ):
@@ -540,6 +546,7 @@ class ServiceManager(SqlExecutionMixin):
540
546
  ("max_instances", max_instances),
541
547
  ("query_warehouse", query_warehouse),
542
548
  ("auto_resume", auto_resume),
549
+ ("auto_suspend_secs", auto_suspend_secs),
543
550
  ("external_access_integrations", external_access_integrations),
544
551
  ("comment", comment),
545
552
  ]
@@ -563,6 +570,9 @@ class ServiceManager(SqlExecutionMixin):
563
570
  if auto_resume is not None:
564
571
  query.append(f" auto_resume = {auto_resume}")
565
572
 
573
+ if auto_suspend_secs is not None:
574
+ query.append(f" auto_suspend_secs = {auto_suspend_secs}")
575
+
566
576
  if external_access_integrations is not None:
567
577
  external_access_integration_list = ",".join(
568
578
  f"{e}" for e in external_access_integrations
@@ -583,6 +593,7 @@ class ServiceManager(SqlExecutionMixin):
583
593
  max_instances: bool,
584
594
  query_warehouse: bool,
585
595
  auto_resume: bool,
596
+ auto_suspend_secs: bool,
586
597
  comment: bool,
587
598
  ):
588
599
  property_pairs = [
@@ -590,6 +601,7 @@ class ServiceManager(SqlExecutionMixin):
590
601
  ("max_instances", max_instances),
591
602
  ("query_warehouse", query_warehouse),
592
603
  ("auto_resume", auto_resume),
604
+ ("auto_suspend_secs", auto_suspend_secs),
593
605
  ("comment", comment),
594
606
  ]
595
607
 
@@ -30,6 +30,11 @@ class ServiceEntityModel(EntityModelBaseWithArtifacts, ExternalAccessBaseModel):
30
30
  title="The service will automatically resume when a service function or ingress is called.",
31
31
  default=True,
32
32
  )
33
+ auto_suspend_secs: Optional[int] = Field(
34
+ title="Number of seconds of inactivity after which the service is automatically suspended.",
35
+ default=None,
36
+ ge=0,
37
+ )
33
38
  query_warehouse: Optional[str] = Field(
34
39
  title="Warehouse to use if a service container connects to Snowflake to execute a query without explicitly specifying a warehouse to use",
35
40
  default=None,
@@ -6,7 +6,9 @@ from click import ClickException
6
6
  from snowflake.cli._plugins.connection.util import make_snowsight_url
7
7
  from snowflake.cli._plugins.nativeapp.artifacts import build_bundle
8
8
  from snowflake.cli._plugins.stage.manager import StageManager
9
+ from snowflake.cli._plugins.streamlit.manager import StreamlitManager
9
10
  from snowflake.cli._plugins.streamlit.streamlit_entity_model import (
11
+ SPCS_RUNTIME_V2_NAME,
10
12
  StreamlitEntityModel,
11
13
  )
12
14
  from snowflake.cli._plugins.workspace.context import ActionContext
@@ -64,6 +66,14 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
64
66
  self._conn, f"/#/streamlit-apps/{name.url_identifier}"
65
67
  )
66
68
 
69
+ def _is_spcs_runtime_v2_mode(self, experimental: bool = False) -> bool:
70
+ """Check if SPCS runtime v2 mode is enabled."""
71
+ return (
72
+ experimental
73
+ and self.model.runtime_name == SPCS_RUNTIME_V2_NAME
74
+ and self.model.compute_pool
75
+ )
76
+
67
77
  def bundle(self, output_dir: Optional[Path] = None) -> BundleMap:
68
78
  return build_bundle(
69
79
  self.root,
@@ -83,7 +93,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
83
93
  replace: bool,
84
94
  prune: bool = False,
85
95
  bundle_map: Optional[BundleMap] = None,
86
- experimental: Optional[bool] = False,
96
+ experimental: bool = False,
87
97
  *args,
88
98
  **kwargs,
89
99
  ):
@@ -129,9 +139,15 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
129
139
  console.step(f"Creating Streamlit object {self.model.fqn.sql_identifier}")
130
140
 
131
141
  self._execute_query(
132
- self.get_deploy_sql(replace=replace, from_stage_name=stage_root)
142
+ self.get_deploy_sql(
143
+ replace=replace,
144
+ from_stage_name=stage_root,
145
+ experimental=False,
146
+ )
133
147
  )
134
148
 
149
+ StreamlitManager(connection=self._conn).grant_privileges(self.model)
150
+
135
151
  return self.perform(EntityActions.GET_URL, action_context, *args, **kwargs)
136
152
 
137
153
  def describe(self) -> SnowflakeCursor:
@@ -156,6 +172,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
156
172
  artifacts_dir: Optional[Path] = None,
157
173
  schema: Optional[str] = None,
158
174
  database: Optional[str] = None,
175
+ experimental: bool = False,
159
176
  *args,
160
177
  **kwargs,
161
178
  ) -> str:
@@ -199,6 +216,12 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
199
216
  if self.model.secrets:
200
217
  query += "\n" + self.model.get_secrets_sql()
201
218
 
219
+ # SPCS runtime fields are only supported for FBE/versioned streamlits (FROM syntax)
220
+ # Never add these fields for stage-based deployments (ROOT_LOCATION syntax)
221
+ if not from_stage_name and self._is_spcs_runtime_v2_mode(experimental):
222
+ query += f"\nRUNTIME_NAME = '{self.model.runtime_name}'"
223
+ query += f"\nCOMPUTE_POOL = '{self.model.compute_pool}'"
224
+
202
225
  return query + ";"
203
226
 
204
227
  def get_describe_sql(self) -> str:
@@ -233,6 +256,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
233
256
  self.get_deploy_sql(
234
257
  if_not_exists=True,
235
258
  replace=replace,
259
+ experimental=True,
236
260
  )
237
261
  )
238
262
  try:
@@ -256,3 +280,5 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
256
280
  print_diff=True,
257
281
  force_overwrite=True, # files copied to streamlit vstage need to be overwritten
258
282
  )
283
+
284
+ StreamlitManager(connection=self._conn).grant_privileges(self.model)
@@ -15,7 +15,7 @@ from __future__ import annotations
15
15
 
16
16
  from typing import Literal, Optional
17
17
 
18
- from pydantic import Field
18
+ from pydantic import Field, model_validator
19
19
  from snowflake.cli.api.project.schemas.entities.common import (
20
20
  Artifacts,
21
21
  EntityModelBaseWithArtifacts,
@@ -23,9 +23,10 @@ from snowflake.cli.api.project.schemas.entities.common import (
23
23
  GrantBaseModel,
24
24
  ImportsBaseModel,
25
25
  )
26
- from snowflake.cli.api.project.schemas.updatable_model import (
27
- DiscriminatorField,
28
- )
26
+ from snowflake.cli.api.project.schemas.updatable_model import DiscriminatorField
27
+
28
+ # SPCS Runtime v2 constants
29
+ SPCS_RUNTIME_V2_NAME = "SYSTEM$ST_CONTAINER_RUNTIME_PY3_11"
29
30
 
30
31
 
31
32
  class StreamlitEntityModel(
@@ -54,3 +55,22 @@ class StreamlitEntityModel(
54
55
  title="List of paths or file source/destination pairs to add to the deploy root",
55
56
  default=None,
56
57
  )
58
+ runtime_name: Optional[str] = Field(
59
+ title="The runtime name to run the streamlit app on", default=None
60
+ )
61
+ compute_pool: Optional[str] = Field(
62
+ title="The compute pool name of the snowservices running the streamlit app",
63
+ default=None,
64
+ )
65
+
66
+ @model_validator(mode="after")
67
+ def validate_spcs_runtime_fields(self):
68
+ """Validate that runtime_name and compute_pool are provided together for SPCS container runtime."""
69
+ # Only validate for SPCS container runtime, not warehouse runtime
70
+ if self.compute_pool and not self.runtime_name:
71
+ raise ValueError("compute_pool is specified without runtime_name")
72
+ if self.runtime_name == SPCS_RUNTIME_V2_NAME and not self.compute_pool:
73
+ raise ValueError(
74
+ f"compute_pool is required when using {SPCS_RUNTIME_V2_NAME}"
75
+ )
76
+ return self
@@ -27,6 +27,7 @@ from snowflake.cli.api.commands.flags import (
27
27
  ConnectionOption,
28
28
  DatabaseOption,
29
29
  DebugOption,
30
+ DecimalPrecisionOption,
30
31
  DiagAllowlistPathOption,
31
32
  DiagLogPathOption,
32
33
  EnableDiagOption,
@@ -446,6 +447,12 @@ GLOBAL_OPTIONS = [
446
447
  annotation=Optional[bool],
447
448
  default=EnhancedExitCodesOption,
448
449
  ),
450
+ inspect.Parameter(
451
+ "decimal_precision",
452
+ inspect.Parameter.KEYWORD_ONLY,
453
+ annotation=Optional[int],
454
+ default=DecimalPrecisionOption,
455
+ ),
449
456
  ]
450
457
 
451
458