snowflake-cli 3.9.1__py3-none-any.whl → 3.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/commands_registration/builtin_plugins.py +2 -2
  3. snowflake/cli/_app/printing.py +53 -13
  4. snowflake/cli/_app/snow_connector.py +1 -0
  5. snowflake/cli/_app/telemetry.py +2 -0
  6. snowflake/cli/_app/version_check.py +73 -6
  7. snowflake/cli/_plugins/cortex/commands.py +8 -3
  8. snowflake/cli/_plugins/cortex/manager.py +24 -20
  9. snowflake/cli/_plugins/dbt/commands.py +5 -2
  10. snowflake/cli/_plugins/dbt/manager.py +9 -7
  11. snowflake/cli/_plugins/{project → dcm}/commands.py +95 -48
  12. snowflake/cli/_plugins/{project/project_entity_model.py → dcm/dcm_project_entity_model.py} +5 -5
  13. snowflake/cli/_plugins/{project → dcm}/manager.py +35 -14
  14. snowflake/cli/_plugins/{project → dcm}/plugin_spec.py +1 -1
  15. snowflake/cli/_plugins/git/manager.py +1 -11
  16. snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py +4 -0
  17. snowflake/cli/_plugins/nativeapp/commands.py +3 -4
  18. snowflake/cli/_plugins/nativeapp/entities/application_package.py +1 -1
  19. snowflake/cli/_plugins/nativeapp/release_channel/commands.py +1 -2
  20. snowflake/cli/_plugins/nativeapp/version/commands.py +1 -2
  21. snowflake/cli/_plugins/snowpark/common.py +23 -11
  22. snowflake/cli/_plugins/snowpark/snowpark_entity.py +13 -5
  23. snowflake/cli/_plugins/snowpark/snowpark_entity_model.py +10 -2
  24. snowflake/cli/_plugins/sql/commands.py +49 -1
  25. snowflake/cli/_plugins/sql/manager.py +14 -4
  26. snowflake/cli/_plugins/sql/repl.py +4 -0
  27. snowflake/cli/_plugins/stage/commands.py +30 -11
  28. snowflake/cli/_plugins/stage/diff.py +2 -0
  29. snowflake/cli/_plugins/stage/manager.py +79 -55
  30. snowflake/cli/_plugins/streamlit/streamlit_entity.py +17 -30
  31. snowflake/cli/api/artifacts/upload.py +1 -1
  32. snowflake/cli/api/cli_global_context.py +5 -14
  33. snowflake/cli/api/commands/decorators.py +7 -0
  34. snowflake/cli/api/commands/flags.py +12 -0
  35. snowflake/cli/api/commands/snow_typer.py +23 -2
  36. snowflake/cli/api/config.py +9 -5
  37. snowflake/cli/api/connections.py +1 -0
  38. snowflake/cli/api/constants.py +2 -2
  39. snowflake/cli/api/entities/common.py +16 -13
  40. snowflake/cli/api/entities/utils.py +15 -9
  41. snowflake/cli/api/feature_flags.py +2 -5
  42. snowflake/cli/api/output/formats.py +6 -0
  43. snowflake/cli/api/output/types.py +48 -2
  44. snowflake/cli/api/project/schemas/entities/entities.py +6 -6
  45. snowflake/cli/api/rendering/sql_templates.py +67 -11
  46. snowflake/cli/api/rest_api.py +1 -0
  47. snowflake/cli/api/stage_path.py +41 -5
  48. {snowflake_cli-3.9.1.dist-info → snowflake_cli-3.10.1.dist-info}/METADATA +46 -13
  49. {snowflake_cli-3.9.1.dist-info → snowflake_cli-3.10.1.dist-info}/RECORD +53 -54
  50. snowflake/cli/_plugins/project/feature_flags.py +0 -22
  51. /snowflake/cli/_plugins/{project → dcm}/__init__.py +0 -0
  52. {snowflake_cli-3.9.1.dist-info → snowflake_cli-3.10.1.dist-info}/WHEEL +0 -0
  53. {snowflake_cli-3.9.1.dist-info → snowflake_cli-3.10.1.dist-info}/entry_points.txt +0 -0
  54. {snowflake_cli-3.9.1.dist-info → snowflake_cli-3.10.1.dist-info}/licenses/LICENSE +0 -0
@@ -18,7 +18,7 @@ from snowflake.cli.api.identifiers import FQN
18
18
  from snowflake.cli.api.project.project_paths import bundle_root
19
19
  from snowflake.cli.api.project.schemas.entities.common import Identifier, PathMapping
20
20
  from snowflake.connector import ProgrammingError
21
- from snowflake.connector.cursor import SnowflakeCursor
21
+ from snowflake.connector.cursor import DictCursor, SnowflakeCursor
22
22
 
23
23
  log = logging.getLogger(__name__)
24
24
 
@@ -102,7 +102,6 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
102
102
  if (
103
103
  experimental
104
104
  or GlobalFeatureFlag.ENABLE_STREAMLIT_VERSIONED_STAGE.is_enabled()
105
- or GlobalFeatureFlag.ENABLE_STREAMLIT_EMBEDDED_STAGE.is_enabled()
106
105
  ):
107
106
  self._deploy_experimental(bundle_map=bundle_map, replace=replace)
108
107
  else:
@@ -123,7 +122,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
123
122
  bundle_map=bundle_map,
124
123
  prune=prune,
125
124
  recursive=True,
126
- stage_path=StageManager().stage_path_parts_from_str(stage_root),
125
+ stage_path_parts=StageManager().stage_path_parts_from_str(stage_root),
127
126
  print_diff=True,
128
127
  )
129
128
 
@@ -136,7 +135,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
136
135
  return self.perform(EntityActions.GET_URL, action_context, *args, **kwargs)
137
136
 
138
137
  def describe(self) -> SnowflakeCursor:
139
- return self._execute_query(self.get_describe_sql())
138
+ return self._execute_query(self.get_describe_sql(), cursor_class=DictCursor)
140
139
 
141
140
  def action_share(
142
141
  self, action_ctx: ActionContext, to_role: str, *args, **kwargs
@@ -146,13 +145,9 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
146
145
  def get_add_live_version_sql(
147
146
  self, schema: Optional[str] = None, database: Optional[str] = None
148
147
  ):
148
+ # this query unlike most others doesn't accept fqn wrapped in `IDENTIFIER('')`
149
149
  return f"ALTER STREAMLIT {self._get_identifier(schema,database)} ADD LIVE VERSION FROM LAST;"
150
150
 
151
- def get_checkout_sql(
152
- self, schema: Optional[str] = None, database: Optional[str] = None
153
- ):
154
- return f"ALTER STREAMLIT {self._get_identifier(schema,database)} CHECKOUT;"
155
-
156
151
  def get_deploy_sql(
157
152
  self,
158
153
  if_not_exists: bool = False,
@@ -172,7 +167,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
172
167
  else:
173
168
  query = "CREATE STREAMLIT"
174
169
 
175
- query += f" {self._get_identifier(schema, database)}"
170
+ query += f" {self._get_sql_identifier(schema, database)}"
176
171
 
177
172
  if from_stage_name:
178
173
  query += f"\nROOT_LOCATION = '{from_stage_name}'"
@@ -207,13 +202,15 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
207
202
  return query + ";"
208
203
 
209
204
  def get_describe_sql(self) -> str:
210
- return f"DESCRIBE STREAMLIT {self._get_identifier()};"
205
+ return f"DESCRIBE STREAMLIT {self._get_sql_identifier()};"
211
206
 
212
207
  def get_share_sql(self, to_role: str) -> str:
213
- return f"GRANT USAGE ON STREAMLIT {self._get_identifier()} TO ROLE {to_role};"
208
+ return (
209
+ f"GRANT USAGE ON STREAMLIT {self._get_sql_identifier()} TO ROLE {to_role};"
210
+ )
214
211
 
215
212
  def get_execute_sql(self):
216
- return f"EXECUTE STREAMLIT {self._get_identifier()}();"
213
+ return f"EXECUTE STREAMLIT {self._get_sql_identifier()}();"
217
214
 
218
215
  def get_usage_grant_sql(self, app_role: str, schema: Optional[str] = None) -> str:
219
216
  entity_id = self.entity_id
@@ -239,26 +236,15 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
239
236
  )
240
237
  )
241
238
  try:
242
- if GlobalFeatureFlag.ENABLE_STREAMLIT_VERSIONED_STAGE.is_enabled():
243
- self._execute_query(self.get_add_live_version_sql())
244
- elif not GlobalFeatureFlag.ENABLE_STREAMLIT_NO_CHECKOUTS.is_enabled():
245
- self._execute_query(self.get_checkout_sql())
239
+ self._execute_query(self.get_add_live_version_sql())
246
240
  except ProgrammingError as e:
247
- if "Checkout already exists" in str(
248
- e
249
- ) or "There is already a live version" in str(e):
250
- log.info("Checkout already exists, continuing")
241
+ if "There is already a live version" in str(e):
242
+ log.info("Live version already exists, continuing")
251
243
  else:
252
244
  raise
253
245
 
254
- embeded_stage_name = (
255
- f"snow://streamlit/{self.model.fqn.using_connection(self._conn).identifier}"
256
- )
257
-
258
- if GlobalFeatureFlag.ENABLE_STREAMLIT_VERSIONED_STAGE.is_enabled():
259
- stage_root = f"{embeded_stage_name}/versions/live"
260
- else:
261
- stage_root = f"{embeded_stage_name}/default_checkout"
246
+ stage_root = self.describe().fetchone()["live_version_location_uri"]
247
+ stage_path_parts = StageManager().stage_path_parts_from_str(stage_root)
262
248
 
263
249
  sync_deploy_root_with_stage(
264
250
  console=self._workspace_ctx.console,
@@ -266,6 +252,7 @@ class StreamlitEntity(EntityBase[StreamlitEntityModel]):
266
252
  bundle_map=bundle_map,
267
253
  prune=prune,
268
254
  recursive=True,
269
- stage_path=StageManager().stage_path_parts_from_str(stage_root),
255
+ stage_path_parts=stage_path_parts,
270
256
  print_diff=True,
257
+ force_overwrite=True, # files copied to streamlit vstage need to be overwritten
271
258
  )
@@ -30,7 +30,7 @@ def sync_artifacts_with_stage(
30
30
  bundle_map=bundle_map,
31
31
  prune=prune,
32
32
  recursive=True,
33
- stage_path=stage_path_parts,
33
+ stage_path_parts=stage_path_parts,
34
34
  print_diff=True,
35
35
  )
36
36
  project_paths.clean_up_output()
@@ -19,7 +19,7 @@ from contextvars import ContextVar
19
19
  from dataclasses import dataclass, field, replace
20
20
  from functools import wraps
21
21
  from pathlib import Path
22
- from typing import TYPE_CHECKING, Iterator, Optional
22
+ from typing import TYPE_CHECKING, Iterator
23
23
 
24
24
  from snowflake.cli.api.connections import ConnectionContext, OpenConnectionCache
25
25
  from snowflake.cli.api.exceptions import MissingConfigurationError
@@ -31,7 +31,6 @@ from snowflake.connector import SnowflakeConnection
31
31
  if TYPE_CHECKING:
32
32
  from snowflake.cli.api.project.definition_manager import DefinitionManager
33
33
  from snowflake.cli.api.project.schemas.project_definition import ProjectDefinition
34
- from snowflake.core import Root
35
34
 
36
35
  _CONNECTION_CACHE = OpenConnectionCache()
37
36
 
@@ -197,18 +196,10 @@ class _CliGlobalContextAccess:
197
196
  @property
198
197
  def _should_force_mute_intermediate_output(self) -> bool:
199
198
  """Computes whether cli_console output should be muted."""
200
- return self._manager.output_format == OutputFormat.JSON
201
-
202
- @property
203
- def snow_api_root(
204
- self,
205
- ) -> Optional[Root]:
206
- from snowflake.core import Root
207
-
208
- if self.connection:
209
- return Root(self.connection)
210
- else:
211
- return None
199
+ return (
200
+ self._manager.output_format.is_json
201
+ or self._manager.output_format == OutputFormat.CSV
202
+ )
212
203
 
213
204
  @property
214
205
  def enhanced_exit_codes(self) -> bool:
@@ -53,6 +53,7 @@ from snowflake.cli.api.commands.flags import (
53
53
  SilentOption,
54
54
  TemporaryConnectionOption,
55
55
  TokenFilePathOption,
56
+ TokenOption,
56
57
  UserOption,
57
58
  VerboseOption,
58
59
  WarehouseOption,
@@ -279,6 +280,12 @@ GLOBAL_CONNECTION_OPTIONS = [
279
280
  annotation=Optional[str],
280
281
  default=MasterTokenOption,
281
282
  ),
283
+ inspect.Parameter(
284
+ "token",
285
+ inspect.Parameter.KEYWORD_ONLY,
286
+ annotation=Optional[str],
287
+ default=TokenOption,
288
+ ),
282
289
  inspect.Parameter(
283
290
  "token_file_path",
284
291
  inspect.Parameter.KEYWORD_ONLY,
@@ -212,6 +212,16 @@ MasterTokenOption = typer.Option(
212
212
  hidden=True,
213
213
  )
214
214
 
215
+
216
+ TokenOption = typer.Option(
217
+ None,
218
+ "--token",
219
+ help="OAuth token to use when connecting to Snowflake.",
220
+ callback=_connection_callback("token"),
221
+ show_default=False,
222
+ rich_help_panel=_CONNECTION_SECTION,
223
+ )
224
+
215
225
  TokenFilePathOption = typer.Option(
216
226
  None,
217
227
  "--token-file-path",
@@ -274,6 +284,7 @@ MfaPasscodeOption = typer.Option(
274
284
  rich_help_panel=_CONNECTION_SECTION,
275
285
  )
276
286
 
287
+
277
288
  EnableDiagOption = typer.Option(
278
289
  False,
279
290
  "--enable-diag",
@@ -435,6 +446,7 @@ OutputFormatOption = typer.Option(
435
446
  rich_help_panel=_CLI_BEHAVIOUR,
436
447
  )
437
448
 
449
+
438
450
  SilentOption = typer.Option(
439
451
  False,
440
452
  "--silent",
@@ -46,6 +46,8 @@ from typer.core import TyperGroup
46
46
 
47
47
  log = logging.getLogger(__name__)
48
48
 
49
+ PREVIEW_PREFIX = ""
50
+
49
51
 
50
52
  class SortedTyperGroup(TyperGroup):
51
53
  def list_commands(self, ctx: click.Context) -> List[str]:
@@ -92,6 +94,7 @@ class SnowTyper(typer.Typer):
92
94
  requires_connection: bool = False,
93
95
  is_enabled: Callable[[], bool] | None = None,
94
96
  require_warehouse: bool = False,
97
+ preview: bool = False,
95
98
  **kwargs,
96
99
  ):
97
100
  """
@@ -106,9 +109,18 @@ class SnowTyper(typer.Typer):
106
109
 
107
110
  def custom_command(command_callable):
108
111
  """Custom command wrapper similar to Typer.command."""
109
- # Sanitize doc string which is used to create help in terminal
110
112
  command_callable.__doc__ = sanitize_for_terminal(command_callable.__doc__)
111
113
 
114
+ if preview and command_callable.__doc__:
115
+ if not command_callable.__doc__.strip().startswith(PREVIEW_PREFIX):
116
+ command_callable.__doc__ = (
117
+ f"{PREVIEW_PREFIX}{command_callable.__doc__.strip()}"
118
+ )
119
+
120
+ if preview and "help" in kwargs and kwargs["help"]:
121
+ if not kwargs["help"].strip().startswith(PREVIEW_PREFIX):
122
+ kwargs["help"] = f"{PREVIEW_PREFIX}{kwargs['help'].strip()}"
123
+
112
124
  if requires_connection:
113
125
  command_callable = global_options_with_connection(command_callable)
114
126
  elif requires_global_options:
@@ -228,6 +240,7 @@ class SnowTyperFactory:
228
240
  short_help: Optional[str] = None,
229
241
  is_hidden: Optional[Callable[[], bool]] = None,
230
242
  deprecated: bool = False,
243
+ preview: bool = False,
231
244
  subcommand_metavar: Optional[str] = None,
232
245
  ):
233
246
  self.name = name
@@ -235,15 +248,21 @@ class SnowTyperFactory:
235
248
  self.short_help = short_help
236
249
  self.is_hidden = is_hidden
237
250
  self.deprecated = deprecated
251
+ self.preview = preview
238
252
  self.commands_to_register: List[SnowTyperCommandData] = []
239
253
  self.subapps_to_register: List[SnowTyperFactory] = []
240
254
  self.callbacks_to_register: List[Callable] = []
241
255
  self.subcommand_metavar = subcommand_metavar
242
256
 
243
257
  def create_instance(self) -> SnowTyper:
258
+ help_text = self.help
259
+ if self.preview and help_text:
260
+ if not help_text.strip().startswith(PREVIEW_PREFIX):
261
+ help_text = f"{PREVIEW_PREFIX}{help_text.strip()}"
262
+
244
263
  app = SnowTyper(
245
264
  name=self.name,
246
- help=self.help,
265
+ help=help_text,
247
266
  short_help=self.short_help,
248
267
  hidden=self.is_hidden() if self.is_hidden else False,
249
268
  deprecated=self.deprecated,
@@ -251,6 +270,8 @@ class SnowTyperFactory:
251
270
  )
252
271
  # register commands
253
272
  for command in self.commands_to_register:
273
+ if self.preview and "preview" not in command.kwargs:
274
+ command.kwargs["preview"] = True
254
275
  app.command(*command.args, **command.kwargs)(command.func)
255
276
  # register callbacks
256
277
  for callback in self.callbacks_to_register:
@@ -55,6 +55,7 @@ CONNECTIONS_SECTION = "connections"
55
55
  CLI_SECTION = "cli"
56
56
  LOGS_SECTION = "logs"
57
57
  PLUGINS_SECTION = "plugins"
58
+ IGNORE_NEW_VERSION_WARNING_KEY = "ignore_new_version_warning"
58
59
 
59
60
  LOGS_SECTION_PATH = [CLI_SECTION, LOGS_SECTION]
60
61
  PLUGINS_SECTION_PATH = [CLI_SECTION, PLUGINS_SECTION]
@@ -204,10 +205,12 @@ def _read_config_file():
204
205
 
205
206
  def _initialise_logs_section():
206
207
  with _config_file() as conf_file_cache:
207
- if conf_file_cache.get(CLI_SECTION) is None:
208
- conf_file_cache[CLI_SECTION] = _DEFAULT_CLI_CONFIG
209
- if conf_file_cache[CLI_SECTION].get(LOGS_SECTION) is None:
210
- conf_file_cache[CLI_SECTION][LOGS_SECTION] = _DEFAULT_LOGS_CONFIG
208
+ conf_file_cache[CLI_SECTION][LOGS_SECTION] = _DEFAULT_LOGS_CONFIG
209
+
210
+
211
+ def _initialise_cli_section():
212
+ with _config_file() as conf_file_cache:
213
+ conf_file_cache[CLI_SECTION] = {IGNORE_NEW_VERSION_WARNING_KEY: False}
211
214
 
212
215
 
213
216
  def set_config_value(path: List[str], value: Any) -> None:
@@ -297,7 +300,7 @@ def get_config_value(*path, key: str, default: Optional[Any] = Empty) -> Any:
297
300
  return env_variable
298
301
  try:
299
302
  return get_config_section(*path)[key]
300
- except (KeyError, NonExistentKey, MissingConfigOptionError):
303
+ except (KeyError, NonExistentKey, MissingConfigOptionError, ConfigSourceError):
301
304
  if default is not Empty:
302
305
  return default
303
306
  raise
@@ -321,6 +324,7 @@ def _initialise_config(config_file: Path) -> None:
321
324
  config_file = SecurePath(config_file)
322
325
  config_file.parent.mkdir(parents=True, exist_ok=True)
323
326
  config_file.touch()
327
+ _initialise_cli_section()
324
328
  _initialise_logs_section()
325
329
  log.info("Created Snowflake configuration file at %s", CONFIG_MANAGER.file_path)
326
330
 
@@ -48,6 +48,7 @@ class ConnectionContext:
48
48
  private_key_file: Optional[str] = None
49
49
  warehouse: Optional[str] = None
50
50
  mfa_passcode: Optional[str] = None
51
+ token: Optional[str] = None
51
52
  enable_diag: Optional[bool] = False
52
53
  diag_log_path: Optional[Path] = None
53
54
  diag_allowlist_path: Optional[Path] = None
@@ -36,6 +36,7 @@ class ObjectNames:
36
36
  class ObjectType(Enum):
37
37
  COMPUTE_POOL = ObjectNames("compute-pool", "compute pool", "compute pools")
38
38
  DBT_PROJECT = ObjectNames("dbt-project", "dbt project", "dbt projects")
39
+ DCM_PROJECT = ObjectNames("dcm", "DCM Project", "DCM Projects")
39
40
  DATABASE = ObjectNames("database", "database", "databases")
40
41
  FUNCTION = ObjectNames("function", "function", "functions")
41
42
  INTEGRATION = ObjectNames("integration", "integration", "integrations")
@@ -48,7 +49,6 @@ class ObjectType(Enum):
48
49
  NETWORK_RULE = ObjectNames("network-rule", "network rule", "network rules")
49
50
  NOTEBOOK = ObjectNames("notebook", "notebook", "notebooks")
50
51
  PROCEDURE = ObjectNames("procedure", "procedure", "procedures")
51
- PROJECT = ObjectNames("project", "project", "projects")
52
52
  ROLE = ObjectNames("role", "role", "roles")
53
53
  SCHEMA = ObjectNames("schema", "schema", "schemas")
54
54
  SERVICE = ObjectNames("service", "service", "services")
@@ -79,7 +79,7 @@ OBJECT_TO_NAMES = {o.value.cli_name: o.value for o in ObjectType}
79
79
  UNSUPPORTED_OBJECTS = {
80
80
  ObjectType.APPLICATION.value.cli_name,
81
81
  ObjectType.APPLICATION_PACKAGE.value.cli_name,
82
- ObjectType.PROJECT.value.cli_name,
82
+ ObjectType.DCM_PROJECT.value.cli_name,
83
83
  ObjectType.DBT_PROJECT.value.cli_name,
84
84
  }
85
85
  SUPPORTED_OBJECTS = sorted(OBJECT_TO_NAMES.keys() - UNSUPPORTED_OBJECTS)
@@ -4,7 +4,7 @@ from pathlib import Path
4
4
  from typing import Generic, List, Optional, Type, TypeVar, get_args
5
5
 
6
6
  from snowflake.cli._plugins.workspace.context import ActionContext, WorkspaceContext
7
- from snowflake.cli.api.cli_global_context import get_cli_context, span
7
+ from snowflake.cli.api.cli_global_context import span
8
8
  from snowflake.cli.api.entities.resolver import Dependency, DependencyResolver
9
9
  from snowflake.cli.api.entities.utils import EntityActions, get_sql_executor
10
10
  from snowflake.cli.api.identifiers import FQN
@@ -102,8 +102,8 @@ class EntityBase(Generic[T]):
102
102
  ) -> SqlExecutor:
103
103
  return get_sql_executor()
104
104
 
105
- def _execute_query(self, sql: str) -> SnowflakeCursor:
106
- return self._sql_executor.execute_query(sql)
105
+ def _execute_query(self, sql: str, **kwargs) -> SnowflakeCursor:
106
+ return self._sql_executor.execute_query(sql, **kwargs)
107
107
 
108
108
  @functools.cached_property
109
109
  def _conn(self) -> SnowflakeConnection:
@@ -117,13 +117,6 @@ class EntityBase(Generic[T]):
117
117
  def schema(self) -> Optional[str]:
118
118
  return self.get_from_fqn_or_conn("schema")
119
119
 
120
- @property
121
- def snow_api_root(self) -> Optional[object]:
122
- root = get_cli_context().snow_api_root
123
- if root is None:
124
- raise ValueError("snow_api_root is not set")
125
- return root
126
-
127
120
  @property
128
121
  def model(self) -> T:
129
122
  return self._entity_model
@@ -140,12 +133,22 @@ class EntityBase(Generic[T]):
140
133
  def get_drop_sql(self) -> str:
141
134
  return f"DROP {self.model.type.upper()} {self.identifier};" # type: ignore[attr-defined]
142
135
 
143
- def _get_identifier(
136
+ def _get_fqn(
144
137
  self, schema: Optional[str] = None, database: Optional[str] = None
145
- ) -> str:
138
+ ) -> FQN:
146
139
  schema_to_use = schema or self._entity_model.fqn.schema or self._conn.schema # type: ignore
147
140
  db_to_use = database or self._entity_model.fqn.database or self._conn.database # type: ignore
148
- return f"{self._entity_model.fqn.set_schema(schema_to_use).set_database(db_to_use).sql_identifier}" # type: ignore
141
+ return self._entity_model.fqn.set_schema(schema_to_use).set_database(db_to_use) # type: ignore
142
+
143
+ def _get_sql_identifier(
144
+ self, schema: Optional[str] = None, database: Optional[str] = None
145
+ ) -> str:
146
+ return str(self._get_fqn(schema, database).sql_identifier)
147
+
148
+ def _get_identifier(
149
+ self, schema: Optional[str] = None, database: Optional[str] = None
150
+ ) -> str:
151
+ return str(self._get_fqn(schema, database).identifier)
149
152
 
150
153
  def get_from_fqn_or_conn(self, attribute_name: str) -> str:
151
154
  attribute = getattr(self.fqn, attribute_name, None) or getattr(
@@ -86,11 +86,12 @@ def sync_deploy_root_with_stage(
86
86
  bundle_map: BundleMap,
87
87
  prune: bool,
88
88
  recursive: bool,
89
- stage_path: StagePathParts,
89
+ stage_path_parts: StagePathParts,
90
90
  role: str | None = None,
91
91
  package_name: str | None = None,
92
92
  local_paths_to_sync: List[Path] | None = None,
93
93
  print_diff: bool = True,
94
+ force_overwrite: bool = False,
94
95
  ) -> DiffResult:
95
96
  """
96
97
  Ensures that the files on our remote stage match the artifacts we have in
@@ -101,27 +102,31 @@ def sync_deploy_root_with_stage(
101
102
  role (str): The name of the role to use for queries and commands.
102
103
  prune (bool): Whether to prune artifacts from the stage that don't exist locally.
103
104
  recursive (bool): Whether to traverse directories recursively.
104
- stage_path (DefaultStagePathParts): stage path object.
105
+ stage_path_parts (StagePathParts): stage path parts object.
105
106
 
106
107
  package_name (str): supported for Native App compatibility. Should be None out of Native App context.
107
108
 
108
109
  local_paths_to_sync (List[Path], optional): List of local paths to sync. Defaults to None to sync all
109
110
  local paths. Note that providing an empty list here is equivalent to None.
110
111
  print_diff (bool): Whether to print the diff between the local files and the remote stage. Defaults to True
112
+ force_overwrite (bool): Some resources (e.g. streamlit) need to overwrite files on the stage. Defaults to False.
111
113
 
112
114
  Returns:
113
115
  A `DiffResult` instance describing the changes that were performed.
114
116
  """
115
- if not package_name:
117
+ if stage_path_parts.is_vstage:
118
+ # vstages are created by FBE, so no need to do it manually
119
+ pass
120
+ elif not package_name:
116
121
  # ensure stage exists
117
- stage_fqn = FQN.from_stage(stage_path.stage)
122
+ stage_fqn = FQN.from_stage(stage_path_parts.stage)
118
123
  console.step(f"Creating stage {stage_fqn} if not exists.")
119
124
  StageManager().create(fqn=stage_fqn)
120
125
  else:
121
126
  # ensure stage exists - nativeapp behavior
122
127
  sql_facade = get_snowflake_facade()
123
- schema = stage_path.schema
124
- stage_fqn = stage_path.stage
128
+ schema = stage_path_parts.schema
129
+ stage_fqn = stage_path_parts.stage
125
130
  # Does a stage already exist within the application package, or we need to create one?
126
131
  # Using "if not exists" should take care of either case.
127
132
  console.step(
@@ -134,12 +139,12 @@ def sync_deploy_root_with_stage(
134
139
  # Perform a diff operation and display results to the user for informational purposes
135
140
  if print_diff:
136
141
  console.step(
137
- f"Performing a diff between the Snowflake stage: {stage_path.path} and your local deploy_root: {deploy_root.resolve()}."
142
+ f"Performing a diff between the Snowflake stage: {stage_path_parts.path} and your local deploy_root: {deploy_root.resolve()}."
138
143
  )
139
144
 
140
145
  diff: DiffResult = compute_stage_diff(
141
146
  local_root=deploy_root,
142
- stage_path=stage_path,
147
+ stage_path=stage_path_parts,
143
148
  )
144
149
 
145
150
  if local_paths_to_sync:
@@ -201,7 +206,8 @@ def sync_deploy_root_with_stage(
201
206
  role=role,
202
207
  deploy_root_path=deploy_root,
203
208
  diff_result=diff,
204
- stage_full_path=stage_path.full_path,
209
+ stage_full_path=stage_path_parts.full_path,
210
+ force_overwrite=force_overwrite,
205
211
  )
206
212
  return diff
207
213
 
@@ -50,16 +50,12 @@ class FeatureFlagMixin(Enum):
50
50
  is not None
51
51
  )
52
52
 
53
- def env_variable(self):
53
+ def env_variable(self) -> str:
54
54
  return get_env_variable_name(*FEATURE_FLAGS_SECTION_PATH, key=self.value.name)
55
55
 
56
56
 
57
57
  @unique
58
58
  class FeatureFlag(FeatureFlagMixin):
59
- ENABLE_STREAMLIT_EMBEDDED_STAGE = BooleanFlag(
60
- "ENABLE_STREAMLIT_EMBEDDED_STAGE", False
61
- )
62
- ENABLE_STREAMLIT_NO_CHECKOUTS = BooleanFlag("ENABLE_STREAMLIT_NO_CHECKOUTS", False)
63
59
  ENABLE_STREAMLIT_VERSIONED_STAGE = BooleanFlag(
64
60
  "ENABLE_STREAMLIT_VERSIONED_STAGE", False
65
61
  )
@@ -76,3 +72,4 @@ class FeatureFlag(FeatureFlagMixin):
76
72
  ENABLE_NATIVE_APP_CHILDREN = BooleanFlag("ENABLE_NATIVE_APP_CHILDREN", False)
77
73
  # TODO 4.0: remove ENABLE_RELEASE_CHANNELS
78
74
  ENABLE_RELEASE_CHANNELS = BooleanFlag("ENABLE_RELEASE_CHANNELS", None)
75
+ ENABLE_SNOWFLAKE_PROJECTS = BooleanFlag("ENABLE_SNOWFLAKE_PROJECTS", False)
@@ -18,3 +18,9 @@ from enum import Enum
18
18
  class OutputFormat(Enum):
19
19
  TABLE = "TABLE"
20
20
  JSON = "JSON"
21
+ JSON_EXT = "JSON_EXT"
22
+ CSV = "CSV"
23
+
24
+ @property
25
+ def is_json(self) -> bool:
26
+ return self in (OutputFormat.JSON, OutputFormat.JSON_EXT)
@@ -16,11 +16,22 @@ from __future__ import annotations
16
16
 
17
17
  import json
18
18
  import typing as t
19
+ from enum import IntEnum
19
20
 
21
+ from snowflake.cli.api.cli_global_context import get_cli_context
22
+ from snowflake.cli.api.output.formats import OutputFormat
20
23
  from snowflake.connector import DictCursor
21
24
  from snowflake.connector.cursor import SnowflakeCursor
22
25
 
23
26
 
27
+ class SnowflakeColumnType(IntEnum):
28
+ """Snowflake column type codes for JSON-capable data types."""
29
+
30
+ VARIANT = 5
31
+ OBJECT = 9
32
+ ARRAY = 10
33
+
34
+
24
35
  class CommandResult:
25
36
  @property
26
37
  def result(self):
@@ -69,13 +80,48 @@ class StreamResult(CommandResult):
69
80
  class QueryResult(CollectionResult):
70
81
  def __init__(self, cursor: SnowflakeCursor | DictCursor):
71
82
  self.column_names = [col.name for col in cursor.description]
83
+ # Store column type information to identify VARIANT columns (JSON data)
84
+ self.column_types = [col.type_code for col in cursor.description]
72
85
  super().__init__(elements=self._prepare_payload(cursor))
73
86
  self._query = cursor.query
74
87
 
75
88
  def _prepare_payload(self, cursor: SnowflakeCursor | DictCursor):
76
89
  if isinstance(cursor, DictCursor):
77
- return (k for k in cursor)
78
- return ({k: v for k, v in zip(self.column_names, row)} for row in cursor)
90
+ return (self._process_columns(k) for k in cursor)
91
+ return (
92
+ self._process_columns({k: v for k, v in zip(self.column_names, row)})
93
+ for row in cursor
94
+ )
95
+
96
+ def _process_columns(self, row_dict):
97
+ if get_cli_context().output_format != OutputFormat.JSON_EXT:
98
+ return row_dict
99
+
100
+ processed_row = {}
101
+ for i, (column_name, value) in enumerate(row_dict.items()):
102
+ # Check if this column can contain JSON data
103
+ if i < len(self.column_types) and self.column_types[i] in (
104
+ SnowflakeColumnType.VARIANT,
105
+ SnowflakeColumnType.OBJECT,
106
+ SnowflakeColumnType.ARRAY,
107
+ ):
108
+ # For ARRAY and OBJECT types, the values are always JSON strings that need parsing
109
+ # For VARIANT types, only parse if the value is a string
110
+ if self.column_types[i] in (
111
+ SnowflakeColumnType.OBJECT,
112
+ SnowflakeColumnType.ARRAY,
113
+ ) or isinstance(value, str):
114
+ try:
115
+ # Try to parse as JSON
116
+ processed_row[column_name] = json.loads(value)
117
+ except (json.JSONDecodeError, TypeError):
118
+ # If parsing fails, keep the original value
119
+ processed_row[column_name] = value
120
+ else:
121
+ processed_row[column_name] = value
122
+ else:
123
+ processed_row[column_name] = value
124
+ return processed_row
79
125
 
80
126
  @property
81
127
  def query(self):
@@ -16,6 +16,10 @@ from __future__ import annotations
16
16
 
17
17
  from typing import Dict, List, Union, get_args
18
18
 
19
+ from snowflake.cli._plugins.dcm.dcm_project_entity_model import (
20
+ DCMProjectEntity,
21
+ DCMProjectEntityModel,
22
+ )
19
23
  from snowflake.cli._plugins.nativeapp.entities.application import (
20
24
  ApplicationEntity,
21
25
  ApplicationEntityModel,
@@ -26,10 +30,6 @@ from snowflake.cli._plugins.nativeapp.entities.application_package import (
26
30
  )
27
31
  from snowflake.cli._plugins.notebook.notebook_entity import NotebookEntity
28
32
  from snowflake.cli._plugins.notebook.notebook_entity_model import NotebookEntityModel
29
- from snowflake.cli._plugins.project.project_entity_model import (
30
- ProjectEntity,
31
- ProjectEntityModel,
32
- )
33
33
  from snowflake.cli._plugins.snowpark.snowpark_entity import (
34
34
  FunctionEntity,
35
35
  ProcedureEntity,
@@ -62,7 +62,7 @@ Entity = Union[
62
62
  ApplicationPackageEntity,
63
63
  StreamlitEntity,
64
64
  ProcedureEntity,
65
- ProjectEntity,
65
+ DCMProjectEntity,
66
66
  FunctionEntity,
67
67
  ComputePoolEntity,
68
68
  ImageRepositoryEntity,
@@ -79,7 +79,7 @@ EntityModel = Union[
79
79
  ImageRepositoryEntityModel,
80
80
  ServiceEntityModel,
81
81
  NotebookEntityModel,
82
- ProjectEntityModel,
82
+ DCMProjectEntityModel,
83
83
  ]
84
84
 
85
85
  ALL_ENTITIES: List[Entity] = [*get_args(Entity)]