snowflake-cli 3.11.0__py3-none-any.whl → 3.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/cli_app.py +0 -1
  3. snowflake/cli/_app/printing.py +153 -19
  4. snowflake/cli/_plugins/dbt/commands.py +37 -8
  5. snowflake/cli/_plugins/dbt/manager.py +144 -10
  6. snowflake/cli/_plugins/dcm/commands.py +65 -90
  7. snowflake/cli/_plugins/dcm/manager.py +137 -50
  8. snowflake/cli/_plugins/logs/commands.py +7 -0
  9. snowflake/cli/_plugins/logs/manager.py +21 -1
  10. snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +3 -1
  11. snowflake/cli/_plugins/snowpark/common.py +1 -0
  12. snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +29 -5
  13. snowflake/cli/_plugins/snowpark/package_utils.py +44 -3
  14. snowflake/cli/_plugins/spcs/services/manager.py +5 -4
  15. snowflake/cli/_plugins/sql/lexer/types.py +1 -0
  16. snowflake/cli/_plugins/sql/repl.py +100 -26
  17. snowflake/cli/_plugins/sql/repl_commands.py +607 -0
  18. snowflake/cli/_plugins/sql/statement_reader.py +44 -20
  19. snowflake/cli/api/artifacts/bundle_map.py +32 -2
  20. snowflake/cli/api/artifacts/regex_resolver.py +54 -0
  21. snowflake/cli/api/artifacts/upload.py +5 -1
  22. snowflake/cli/api/artifacts/utils.py +12 -1
  23. snowflake/cli/api/cli_global_context.py +7 -0
  24. snowflake/cli/api/console/abc.py +13 -2
  25. snowflake/cli/api/console/console.py +20 -0
  26. snowflake/cli/api/constants.py +9 -0
  27. snowflake/cli/api/entities/utils.py +10 -6
  28. snowflake/cli/api/feature_flags.py +1 -0
  29. snowflake/cli/api/identifiers.py +18 -1
  30. snowflake/cli/api/project/schemas/entities/entities.py +0 -6
  31. snowflake/cli/api/rendering/sql_templates.py +2 -0
  32. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/METADATA +5 -5
  33. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/RECORD +36 -36
  34. snowflake/cli/_plugins/dcm/dcm_project_entity_model.py +0 -59
  35. snowflake/cli/_plugins/sql/snowsql_commands.py +0 -331
  36. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/WHEEL +0 -0
  37. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/entry_points.txt +0 -0
  38. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/licenses/LICENSE +0 -0
@@ -16,7 +16,7 @@ from __future__ import annotations
16
16
 
17
17
  from enum import Enum, unique
18
18
 
19
- VERSION = "3.11.0"
19
+ VERSION = "3.12.0"
20
20
 
21
21
 
22
22
  @unique
@@ -256,7 +256,6 @@ class CliAppFactory:
256
256
  "--commands-registration",
257
257
  help="Commands registration",
258
258
  hidden=True,
259
- is_eager=True,
260
259
  callback=self._commands_registration_callback(),
261
260
  ),
262
261
  ) -> None:
@@ -22,7 +22,7 @@ from decimal import Decimal
22
22
  from json import JSONEncoder
23
23
  from pathlib import Path
24
24
  from textwrap import indent
25
- from typing import TextIO
25
+ from typing import Any, Dict, TextIO
26
26
 
27
27
  from rich import box, get_console
28
28
  from rich import print as rich_print
@@ -61,13 +61,114 @@ class CustomJSONEncoder(JSONEncoder):
61
61
  return list(o.result)
62
62
  if isinstance(o, (date, datetime, time)):
63
63
  return o.isoformat()
64
- if isinstance(o, (Path, Decimal)):
64
+ if isinstance(o, Path):
65
+ return o.as_posix()
66
+ if isinstance(o, Decimal):
65
67
  return str(o)
66
68
  if isinstance(o, bytearray):
67
69
  return o.hex()
68
70
  return super().default(o)
69
71
 
70
72
 
73
+ class StreamingJSONEncoder(JSONEncoder):
74
+ """Streaming JSON encoder that doesn't materialize generators into lists"""
75
+
76
+ def default(self, o):
77
+ if isinstance(o, str):
78
+ return sanitize_for_terminal(o)
79
+ if isinstance(o, (ObjectResult, MessageResult)):
80
+ return o.result
81
+ if isinstance(o, (CollectionResult, MultipleResults)):
82
+ raise TypeError(
83
+ f"CollectionResult should be handled by streaming functions, not encoder"
84
+ )
85
+ if isinstance(o, (date, datetime, time)):
86
+ return o.isoformat()
87
+ if isinstance(o, Path):
88
+ return o.as_posix()
89
+ if isinstance(o, Decimal):
90
+ return str(o)
91
+ if isinstance(o, bytearray):
92
+ return o.hex()
93
+ return super().default(o)
94
+
95
+
96
+ def _print_json_item_with_array_indentation(item: Any, indent: int):
97
+ """Print a JSON item with proper indentation for array context"""
98
+ if indent:
99
+ indented_output = json.dumps(item, cls=StreamingJSONEncoder, indent=indent)
100
+ indented_lines = indented_output.split("\n")
101
+ for i, line in enumerate(indented_lines):
102
+ if i == 0:
103
+ print(" " * indent + line, end="")
104
+ else:
105
+ print("\n" + " " * indent + line, end="")
106
+ else:
107
+ json.dump(item, sys.stdout, cls=StreamingJSONEncoder, separators=(",", ":"))
108
+
109
+
110
+ def _stream_collection_as_json(result: CollectionResult, indent: int = 4):
111
+ """Stream a CollectionResult as a JSON array without loading all data into memory"""
112
+ items = iter(result.result)
113
+ try:
114
+ first_item = next(items)
115
+ except StopIteration:
116
+ print("[]", end="")
117
+ return
118
+
119
+ print("[")
120
+
121
+ _print_json_item_with_array_indentation(first_item, indent)
122
+
123
+ for item in items:
124
+ print(",")
125
+ _print_json_item_with_array_indentation(item, indent)
126
+
127
+ print("\n]", end="")
128
+
129
+
130
+ def _stream_collection_as_csv(result: CollectionResult):
131
+ """Stream a CollectionResult as CSV without loading all data into memory"""
132
+ items = iter(result.result)
133
+ try:
134
+ first_item = next(items)
135
+ except StopIteration:
136
+ return
137
+
138
+ fieldnames = list(first_item.keys())
139
+ if not isinstance(first_item, dict):
140
+ raise TypeError("CSV output requires dictionary items")
141
+
142
+ writer = csv.DictWriter(sys.stdout, fieldnames=fieldnames, lineterminator="\n")
143
+ writer.writeheader()
144
+ _write_csv_row(writer, first_item)
145
+
146
+ for item in items:
147
+ _write_csv_row(writer, item)
148
+
149
+
150
+ def _write_csv_row(writer: csv.DictWriter, row_data: Dict[str, Any]):
151
+ """Write a single CSV row, handling special data types"""
152
+ processed_row = {}
153
+ for key, value in row_data.items():
154
+ if isinstance(value, str):
155
+ processed_row[key] = sanitize_for_terminal(value)
156
+ elif isinstance(value, (date, datetime, time)):
157
+ processed_row[key] = value.isoformat()
158
+ elif isinstance(value, Path):
159
+ processed_row[key] = value.as_posix()
160
+ elif isinstance(value, Decimal):
161
+ processed_row[key] = str(value)
162
+ elif isinstance(value, bytearray):
163
+ processed_row[key] = value.hex()
164
+ elif value is None:
165
+ processed_row[key] = ""
166
+ else:
167
+ processed_row[key] = str(value)
168
+
169
+ writer.writerow(processed_row)
170
+
171
+
71
172
  def _get_format_type() -> OutputFormat:
72
173
  output_format = get_cli_context().output_format
73
174
  if output_format:
@@ -110,12 +211,13 @@ def is_structured_format(output_format):
110
211
  def print_structured(
111
212
  result: CommandResult, output_format: OutputFormat = OutputFormat.JSON
112
213
  ):
113
- """Handles outputs like json, yml and other structured and parsable formats."""
214
+ """Handles outputs like json, csv and other structured and parsable formats with streaming."""
114
215
  printed_end_line = False
216
+
115
217
  if isinstance(result, MultipleResults):
116
218
  if output_format == OutputFormat.CSV:
117
219
  for command_result in result.result:
118
- _print_csv_result(command_result)
220
+ _print_csv_result_streaming(command_result)
119
221
  print(flush=True)
120
222
  printed_end_line = True
121
223
  else:
@@ -125,35 +227,67 @@ def print_structured(
125
227
  # instead of joining all the values into a JSON array or CSV entry set
126
228
  for r in result.result:
127
229
  if output_format == OutputFormat.CSV:
128
- _print_csv_result(r.result)
230
+ _print_csv_result_streaming(r)
129
231
  else:
130
- json.dump(r, sys.stdout, cls=CustomJSONEncoder)
232
+ json.dump(r, sys.stdout, cls=StreamingJSONEncoder)
131
233
  print(flush=True)
132
234
  printed_end_line = True
133
235
  else:
134
236
  if output_format == OutputFormat.CSV:
135
- _print_csv_result(result)
237
+ _print_csv_result_streaming(result)
136
238
  printed_end_line = True
137
239
  else:
138
- json.dump(result, sys.stdout, cls=CustomJSONEncoder, indent=4)
240
+ _print_json_result_streaming(result)
241
+
139
242
  # Adds empty line at the end
140
243
  if not printed_end_line:
141
244
  print(flush=True)
142
245
 
143
246
 
144
- def _print_csv_result(result: CommandResult):
145
- data = json.loads(json.dumps(result, cls=CustomJSONEncoder))
247
+ def _print_json_result_streaming(result: CommandResult):
248
+ """Print a single CommandResult as JSON with streaming support"""
249
+ if isinstance(result, CollectionResult):
250
+ _stream_collection_as_json(result, indent=4)
251
+ elif isinstance(result, (ObjectResult, MessageResult)):
252
+ json.dump(result, sys.stdout, cls=StreamingJSONEncoder, indent=4)
253
+ else:
254
+ json.dump(result, sys.stdout, cls=StreamingJSONEncoder, indent=4)
255
+
256
+
257
+ def _print_object_result_as_csv(result: ObjectResult):
258
+ """Print an ObjectResult as a single-row CSV.
259
+
260
+ Converts the object's key-value pairs into a CSV with headers
261
+ from the keys and a single data row from the values.
262
+ """
263
+ data = result.result
146
264
  if isinstance(data, dict):
147
- writer = csv.DictWriter(sys.stdout, [*data], lineterminator="\n")
148
- writer.writeheader()
149
- writer.writerow(data)
150
- elif isinstance(data, list):
151
- if not data:
152
- return
153
- writer = csv.DictWriter(sys.stdout, [*data[0]], lineterminator="\n")
265
+ writer = csv.DictWriter(
266
+ sys.stdout, fieldnames=list(data.keys()), lineterminator="\n"
267
+ )
154
268
  writer.writeheader()
155
- for entry in data:
156
- writer.writerow(entry)
269
+ _write_csv_row(writer, data)
270
+
271
+
272
+ def _print_message_result_as_csv(result: MessageResult):
273
+ """Print a MessageResult as CSV with a single 'message' column.
274
+
275
+ Creates a simple CSV structure with one column named 'message'
276
+ containing the sanitized message text.
277
+ """
278
+ writer = csv.DictWriter(sys.stdout, fieldnames=["message"], lineterminator="\n")
279
+ writer.writeheader()
280
+ writer.writerow({"message": sanitize_for_terminal(result.message)})
281
+
282
+
283
+ def _print_csv_result_streaming(result: CommandResult):
284
+ """Print a single CommandResult as CSV with streaming support"""
285
+ if isinstance(result, CollectionResult):
286
+ _stream_collection_as_csv(result)
287
+ elif isinstance(result, ObjectResult):
288
+ _print_object_result_as_csv(result)
289
+ elif isinstance(result, MessageResult):
290
+ _print_message_result_as_csv(result)
157
291
 
158
292
 
159
293
  def _stream_json(result):
@@ -19,7 +19,6 @@ from typing import Optional
19
19
 
20
20
  import typer
21
21
  from click import types
22
- from rich.progress import Progress, SpinnerColumn, TextColumn
23
22
  from snowflake.cli._plugins.dbt.constants import (
24
23
  DBT_COMMANDS,
25
24
  OUTPUT_COLUMN_NAME,
@@ -31,7 +30,9 @@ from snowflake.cli._plugins.object.command_aliases import add_object_command_ali
31
30
  from snowflake.cli._plugins.object.commands import scope_option
32
31
  from snowflake.cli.api.commands.decorators import global_options_with_connection
33
32
  from snowflake.cli.api.commands.flags import identifier_argument, like_option
33
+ from snowflake.cli.api.commands.overrideable_parameter import OverrideableOption
34
34
  from snowflake.cli.api.commands.snow_typer import SnowTyperFactory
35
+ from snowflake.cli.api.console.console import cli_console
35
36
  from snowflake.cli.api.constants import ObjectType
36
37
  from snowflake.cli.api.exceptions import CliError
37
38
  from snowflake.cli.api.feature_flags import FeatureFlag
@@ -59,6 +60,16 @@ DBTNameArgument = identifier_argument(sf_object="DBT Project", example="my_pipel
59
60
  DBTNameOrCommandArgument = identifier_argument(
60
61
  sf_object="DBT Project", example="my_pipeline", click_type=types.StringParamType()
61
62
  )
63
+ DefaultTargetOption = OverrideableOption(
64
+ None,
65
+ "--default-target",
66
+ mutually_exclusive=["unset_default_target"],
67
+ )
68
+ UnsetDefaultTargetOption = OverrideableOption(
69
+ False,
70
+ "--unset-default-target",
71
+ mutually_exclusive=["default_target"],
72
+ )
62
73
 
63
74
  add_object_command_aliases(
64
75
  app=app,
@@ -92,6 +103,21 @@ def deploy_dbt(
92
103
  False,
93
104
  help="Overwrites conflicting files in the project, if any.",
94
105
  ),
106
+ default_target: Optional[str] = DefaultTargetOption(
107
+ help="Default target for the dbt project. Mutually exclusive with --unset-default-target.",
108
+ hidden=FeatureFlag.ENABLE_DBT_GA_FEATURES.is_disabled(),
109
+ ),
110
+ unset_default_target: Optional[bool] = UnsetDefaultTargetOption(
111
+ help="Unset the default target for the dbt project. Mutually exclusive with --default-target.",
112
+ hidden=FeatureFlag.ENABLE_DBT_GA_FEATURES.is_disabled(),
113
+ ),
114
+ external_access_integrations: Optional[list[str]] = typer.Option(
115
+ None,
116
+ "--external-access-integration",
117
+ show_default=False,
118
+ help="External access integration to be used by the dbt object.",
119
+ hidden=FeatureFlag.ENABLE_DBT_GA_FEATURES.is_disabled(),
120
+ ),
95
121
  **options,
96
122
  ) -> CommandResult:
97
123
  """
@@ -99,6 +125,11 @@ def deploy_dbt(
99
125
  provided; or create a new one if it doesn't exist; or update files and
100
126
  create a new version if it exists.
101
127
  """
128
+ if FeatureFlag.ENABLE_DBT_GA_FEATURES.is_disabled():
129
+ default_target = None
130
+ unset_default_target = False
131
+ external_access_integrations = None
132
+
102
133
  project_path = SecurePath(source) if source is not None else SecurePath.cwd()
103
134
  profiles_dir_path = SecurePath(profiles_dir) if profiles_dir else project_path
104
135
  return QueryResult(
@@ -107,6 +138,9 @@ def deploy_dbt(
107
138
  project_path.resolve(),
108
139
  profiles_dir_path.resolve(),
109
140
  force=force,
141
+ default_target=default_target,
142
+ unset_default_target=unset_default_target,
143
+ external_access_integrations=external_access_integrations,
110
144
  )
111
145
  )
112
146
 
@@ -161,13 +195,8 @@ for cmd in DBT_COMMANDS:
161
195
  f"Command submitted. You can check the result with `snow sql -q \"select execution_status from table(information_schema.query_history_by_user()) where query_id in ('{result.sfqid}');\"`"
162
196
  )
163
197
 
164
- with Progress(
165
- SpinnerColumn(),
166
- TextColumn("[progress.description]{task.description}"),
167
- transient=True,
168
- ) as progress:
169
- progress.add_task(description=f"Executing 'dbt {dbt_command}'", total=None)
170
-
198
+ with cli_console.spinner() as spinner:
199
+ spinner.add_task(description=f"Executing 'dbt {dbt_command}'", total=None)
171
200
  result = dbt_manager.execute(*execute_args)
172
201
 
173
202
  try:
@@ -17,6 +17,7 @@ from __future__ import annotations
17
17
  from collections import defaultdict
18
18
  from pathlib import Path
19
19
  from tempfile import TemporaryDirectory
20
+ from typing import List, Optional, TypedDict
20
21
 
21
22
  import yaml
22
23
  from snowflake.cli._plugins.dbt.constants import PROFILES_FILENAME
@@ -29,6 +30,11 @@ from snowflake.cli.api.identifiers import FQN
29
30
  from snowflake.cli.api.secure_path import SecurePath
30
31
  from snowflake.cli.api.sql_execution import SqlExecutionMixin
31
32
  from snowflake.connector.cursor import SnowflakeCursor
33
+ from snowflake.connector.errors import ProgrammingError
34
+
35
+
36
+ class DBTObjectEditableAttributes(TypedDict):
37
+ default_target: Optional[str]
32
38
 
33
39
 
34
40
  class DBTManager(SqlExecutionMixin):
@@ -42,12 +48,44 @@ class DBTManager(SqlExecutionMixin):
42
48
  object_type=ObjectType.DBT_PROJECT.value.cli_name, fqn=name
43
49
  )
44
50
 
51
+ @staticmethod
52
+ def describe(name: FQN) -> SnowflakeCursor:
53
+ return ObjectManager().describe(
54
+ object_type=ObjectType.DBT_PROJECT.value.cli_name, fqn=name
55
+ )
56
+
57
+ @staticmethod
58
+ def get_dbt_object_attributes(name: FQN) -> Optional[DBTObjectEditableAttributes]:
59
+ """Get editable attributes of an existing DBT project, or None if it doesn't exist."""
60
+ try:
61
+ cursor = DBTManager().describe(name)
62
+ except ProgrammingError as exc:
63
+ if "DBT PROJECT" in exc.msg and "does not exist" in exc.msg:
64
+ return None
65
+ raise exc
66
+
67
+ rows = list(cursor)
68
+ if not rows:
69
+ return None
70
+
71
+ row = rows[0]
72
+ # Convert row to dict using column names
73
+ columns = [desc[0] for desc in cursor.description]
74
+ row_dict = dict(zip(columns, row))
75
+
76
+ return DBTObjectEditableAttributes(
77
+ default_target=row_dict.get("default_target")
78
+ )
79
+
45
80
  def deploy(
46
81
  self,
47
82
  fqn: FQN,
48
83
  path: SecurePath,
49
84
  profiles_path: SecurePath,
50
85
  force: bool,
86
+ default_target: Optional[str] = None,
87
+ unset_default_target: bool = False,
88
+ external_access_integrations: Optional[List[str]] = None,
51
89
  ) -> SnowflakeCursor:
52
90
  dbt_project_path = path / "dbt_project.yml"
53
91
  if not dbt_project_path.exists():
@@ -62,13 +100,13 @@ class DBTManager(SqlExecutionMixin):
62
100
  except KeyError:
63
101
  raise CliError("`profile` is not defined in dbt_project.yml")
64
102
 
65
- self._validate_profiles(profiles_path, profile)
103
+ self._validate_profiles(profiles_path, profile, default_target)
66
104
 
67
105
  with cli_console.phase("Creating temporary stage"):
68
106
  stage_manager = StageManager()
69
- stage_fqn = FQN.from_string(f"dbt_{fqn.name}_stage").using_context()
70
- stage_name = stage_manager.get_standard_stage_prefix(stage_fqn)
107
+ stage_fqn = FQN.from_resource(ObjectType.DBT_PROJECT, fqn, "STAGE")
71
108
  stage_manager.create(stage_fqn, temporary=True)
109
+ stage_name = stage_manager.get_standard_stage_prefix(stage_fqn)
72
110
 
73
111
  with cli_console.phase("Copying project files to stage"):
74
112
  with TemporaryDirectory() as tmp:
@@ -86,22 +124,109 @@ class DBTManager(SqlExecutionMixin):
86
124
 
87
125
  with cli_console.phase("Creating DBT project"):
88
126
  if force is True:
89
- query = f"CREATE OR REPLACE DBT PROJECT {fqn}"
90
- elif self.exists(name=fqn):
91
- query = f"ALTER DBT PROJECT {fqn} ADD VERSION"
127
+ return self._deploy_create_or_replace(
128
+ fqn, stage_name, default_target, external_access_integrations
129
+ )
92
130
  else:
93
- query = f"CREATE DBT PROJECT {fqn}"
94
- query += f"\nFROM {stage_name}"
95
- return self.execute_query(query)
131
+ dbt_object_attributes = self.get_dbt_object_attributes(fqn)
132
+ if dbt_object_attributes is not None:
133
+ return self._deploy_alter(
134
+ fqn,
135
+ stage_name,
136
+ dbt_object_attributes,
137
+ default_target,
138
+ unset_default_target,
139
+ external_access_integrations,
140
+ )
141
+ else:
142
+ return self._deploy_create(
143
+ fqn, stage_name, default_target, external_access_integrations
144
+ )
145
+
146
+ def _deploy_alter(
147
+ self,
148
+ fqn: FQN,
149
+ stage_name: str,
150
+ dbt_object_attributes: DBTObjectEditableAttributes,
151
+ default_target: Optional[str],
152
+ unset_default_target: bool,
153
+ external_access_integrations: Optional[List[str]],
154
+ ) -> SnowflakeCursor:
155
+ query = f"ALTER DBT PROJECT {fqn} ADD VERSION"
156
+ query += f"\nFROM {stage_name}"
157
+ query = self._handle_external_access_integrations_query(
158
+ query, external_access_integrations
159
+ )
160
+ result = self.execute_query(query)
161
+ current_default_target = dbt_object_attributes.get("default_target")
162
+ if unset_default_target and current_default_target is not None:
163
+ unset_query = f"ALTER DBT PROJECT {fqn} UNSET DEFAULT_TARGET"
164
+ self.execute_query(unset_query)
165
+ elif default_target and (
166
+ current_default_target is None
167
+ or current_default_target.lower() != default_target.lower()
168
+ ):
169
+ set_default_query = (
170
+ f"ALTER DBT PROJECT {fqn} SET DEFAULT_TARGET='{default_target}'"
171
+ )
172
+ self.execute_query(set_default_query)
173
+ return result
174
+
175
+ def _deploy_create(
176
+ self,
177
+ fqn: FQN,
178
+ stage_name: str,
179
+ default_target: Optional[str],
180
+ external_access_integrations: Optional[List[str]],
181
+ ) -> SnowflakeCursor:
182
+ # Project doesn't exist - create new one
183
+ query = f"CREATE DBT PROJECT {fqn}"
184
+ query += f"\nFROM {stage_name}"
185
+ if default_target:
186
+ query += f" DEFAULT_TARGET='{default_target}'"
187
+ query = self._handle_external_access_integrations_query(
188
+ query, external_access_integrations
189
+ )
190
+ return self.execute_query(query)
191
+
192
+ @staticmethod
193
+ def _handle_external_access_integrations_query(
194
+ query: str, external_access_integrations: Optional[List[str]]
195
+ ) -> str:
196
+ if external_access_integrations:
197
+ integrations_str = ", ".join(external_access_integrations)
198
+ query += f"\nEXTERNAL_ACCESS_INTEGRATIONS = ({integrations_str})"
199
+ return query
200
+
201
+ def _deploy_create_or_replace(
202
+ self,
203
+ fqn: FQN,
204
+ stage_name: str,
205
+ default_target: Optional[str],
206
+ external_access_integrations: Optional[List[str]],
207
+ ) -> SnowflakeCursor:
208
+ query = f"CREATE OR REPLACE DBT PROJECT {fqn}"
209
+ query += f"\nFROM {stage_name}"
210
+ if default_target:
211
+ query += f" DEFAULT_TARGET='{default_target}'"
212
+ query = self._handle_external_access_integrations_query(
213
+ query, external_access_integrations
214
+ )
215
+ return self.execute_query(query)
96
216
 
97
217
  @staticmethod
98
- def _validate_profiles(profiles_path: SecurePath, target_profile: str) -> None:
218
+ def _validate_profiles(
219
+ profiles_path: SecurePath,
220
+ target_profile: str,
221
+ default_target: str | None = None,
222
+ ) -> None:
99
223
  """
100
224
  Validates that:
101
225
  * profiles.yml exists
102
226
  * contain profile specified in dbt_project.yml
103
227
  * no other profiles are defined there
104
228
  * does not contain any confidential data like passwords
229
+ * default_target (if specified) exists in the profile's outputs
105
230
  """
106
231
  profiles_file = profiles_path / PROFILES_FILENAME
107
232
  if not profiles_file.exists():
@@ -152,6 +277,15 @@ class DBTManager(SqlExecutionMixin):
152
277
  f"Value for type field is invalid. Should be set to `snowflake` in target {target_name}"
153
278
  )
154
279
 
280
+ if default_target is not None:
281
+ available_targets = set(profiles[target_profile]["outputs"].keys())
282
+ if default_target not in available_targets:
283
+ available_targets_str = ", ".join(sorted(available_targets))
284
+ errors["default_target"].append(
285
+ f"Default target '{default_target}' is not defined in profile '{target_profile}'. "
286
+ f"Available targets: {available_targets_str}"
287
+ )
288
+
155
289
  if errors:
156
290
  message = f"Found following errors in {PROFILES_FILENAME}. Please fix them before proceeding:"
157
291
  for target, issues in errors.items():