snowflake-cli 3.11.0__py3-none-any.whl → 3.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/cli/__about__.py +1 -1
- snowflake/cli/_app/cli_app.py +43 -1
- snowflake/cli/_app/commands_registration/builtin_plugins.py +1 -1
- snowflake/cli/_app/commands_registration/command_plugins_loader.py +14 -1
- snowflake/cli/_app/printing.py +153 -19
- snowflake/cli/_app/telemetry.py +25 -10
- snowflake/cli/_plugins/auth/__init__.py +0 -2
- snowflake/cli/_plugins/connection/commands.py +1 -78
- snowflake/cli/_plugins/dbt/commands.py +44 -19
- snowflake/cli/_plugins/dbt/constants.py +1 -1
- snowflake/cli/_plugins/dbt/manager.py +252 -47
- snowflake/cli/_plugins/dcm/commands.py +65 -90
- snowflake/cli/_plugins/dcm/manager.py +137 -50
- snowflake/cli/_plugins/logs/commands.py +7 -0
- snowflake/cli/_plugins/logs/manager.py +21 -1
- snowflake/cli/_plugins/nativeapp/entities/application_package.py +4 -1
- snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +3 -1
- snowflake/cli/_plugins/object/manager.py +1 -0
- snowflake/cli/_plugins/snowpark/common.py +1 -0
- snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +29 -5
- snowflake/cli/_plugins/snowpark/package_utils.py +44 -3
- snowflake/cli/_plugins/spcs/services/commands.py +19 -1
- snowflake/cli/_plugins/spcs/services/manager.py +17 -4
- snowflake/cli/_plugins/spcs/services/service_entity_model.py +5 -0
- snowflake/cli/_plugins/sql/lexer/types.py +1 -0
- snowflake/cli/_plugins/sql/repl.py +100 -26
- snowflake/cli/_plugins/sql/repl_commands.py +607 -0
- snowflake/cli/_plugins/sql/statement_reader.py +44 -20
- snowflake/cli/_plugins/streamlit/streamlit_entity.py +28 -2
- snowflake/cli/_plugins/streamlit/streamlit_entity_model.py +24 -4
- snowflake/cli/api/artifacts/bundle_map.py +32 -2
- snowflake/cli/api/artifacts/regex_resolver.py +54 -0
- snowflake/cli/api/artifacts/upload.py +5 -1
- snowflake/cli/api/artifacts/utils.py +12 -1
- snowflake/cli/api/cli_global_context.py +7 -0
- snowflake/cli/api/commands/decorators.py +7 -0
- snowflake/cli/api/commands/flags.py +24 -1
- snowflake/cli/api/console/abc.py +13 -2
- snowflake/cli/api/console/console.py +20 -0
- snowflake/cli/api/constants.py +9 -0
- snowflake/cli/api/entities/utils.py +10 -6
- snowflake/cli/api/feature_flags.py +3 -2
- snowflake/cli/api/identifiers.py +18 -1
- snowflake/cli/api/project/schemas/entities/entities.py +0 -6
- snowflake/cli/api/rendering/sql_templates.py +2 -0
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/METADATA +7 -7
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/RECORD +51 -54
- snowflake/cli/_plugins/auth/keypair/__init__.py +0 -0
- snowflake/cli/_plugins/auth/keypair/commands.py +0 -153
- snowflake/cli/_plugins/auth/keypair/manager.py +0 -331
- snowflake/cli/_plugins/dcm/dcm_project_entity_model.py +0 -59
- snowflake/cli/_plugins/sql/snowsql_commands.py +0 -331
- /snowflake/cli/_plugins/auth/{keypair/plugin_spec.py → plugin_spec.py} +0 -0
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/WHEEL +0 -0
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/entry_points.txt +0 -0
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -17,6 +17,7 @@ from __future__ import annotations
|
|
|
17
17
|
from collections import defaultdict
|
|
18
18
|
from pathlib import Path
|
|
19
19
|
from tempfile import TemporaryDirectory
|
|
20
|
+
from typing import Dict, List, Optional, TypedDict
|
|
20
21
|
|
|
21
22
|
import yaml
|
|
22
23
|
from snowflake.cli._plugins.dbt.constants import PROFILES_FILENAME
|
|
@@ -29,6 +30,12 @@ from snowflake.cli.api.identifiers import FQN
|
|
|
29
30
|
from snowflake.cli.api.secure_path import SecurePath
|
|
30
31
|
from snowflake.cli.api.sql_execution import SqlExecutionMixin
|
|
31
32
|
from snowflake.connector.cursor import SnowflakeCursor
|
|
33
|
+
from snowflake.connector.errors import ProgrammingError
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class DBTObjectEditableAttributes(TypedDict):
|
|
37
|
+
default_target: Optional[str]
|
|
38
|
+
external_access_integrations: Optional[List[str]]
|
|
32
39
|
|
|
33
40
|
|
|
34
41
|
class DBTManager(SqlExecutionMixin):
|
|
@@ -42,12 +49,59 @@ class DBTManager(SqlExecutionMixin):
|
|
|
42
49
|
object_type=ObjectType.DBT_PROJECT.value.cli_name, fqn=name
|
|
43
50
|
)
|
|
44
51
|
|
|
52
|
+
@staticmethod
|
|
53
|
+
def describe(name: FQN) -> SnowflakeCursor:
|
|
54
|
+
return ObjectManager().describe(
|
|
55
|
+
object_type=ObjectType.DBT_PROJECT.value.cli_name, fqn=name
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
@staticmethod
|
|
59
|
+
def get_dbt_object_attributes(name: FQN) -> Optional[DBTObjectEditableAttributes]:
|
|
60
|
+
"""Get editable attributes of an existing DBT project, or None if it doesn't exist."""
|
|
61
|
+
try:
|
|
62
|
+
cursor = DBTManager().describe(name)
|
|
63
|
+
except ProgrammingError as exc:
|
|
64
|
+
if "DBT PROJECT" in exc.msg and "does not exist" in exc.msg:
|
|
65
|
+
return None
|
|
66
|
+
raise exc
|
|
67
|
+
|
|
68
|
+
rows = list(cursor)
|
|
69
|
+
if not rows:
|
|
70
|
+
return None
|
|
71
|
+
|
|
72
|
+
row = rows[0]
|
|
73
|
+
# Convert row to dict using column names
|
|
74
|
+
columns = [desc[0].lower() for desc in cursor.description]
|
|
75
|
+
row_dict = dict(zip(columns, row))
|
|
76
|
+
|
|
77
|
+
external_access_integrations = row_dict.get("external_access_integrations")
|
|
78
|
+
if external_access_integrations:
|
|
79
|
+
if isinstance(external_access_integrations, str):
|
|
80
|
+
external_access_integrations = [
|
|
81
|
+
x.strip()
|
|
82
|
+
for x in external_access_integrations.strip("[]").split(",")
|
|
83
|
+
if x.strip()
|
|
84
|
+
]
|
|
85
|
+
elif not isinstance(external_access_integrations, list):
|
|
86
|
+
external_access_integrations = None
|
|
87
|
+
else:
|
|
88
|
+
external_access_integrations = None
|
|
89
|
+
|
|
90
|
+
return DBTObjectEditableAttributes(
|
|
91
|
+
default_target=row_dict.get("default_target"),
|
|
92
|
+
external_access_integrations=external_access_integrations,
|
|
93
|
+
)
|
|
94
|
+
|
|
45
95
|
def deploy(
|
|
46
96
|
self,
|
|
47
97
|
fqn: FQN,
|
|
48
98
|
path: SecurePath,
|
|
49
99
|
profiles_path: SecurePath,
|
|
50
100
|
force: bool,
|
|
101
|
+
default_target: Optional[str] = None,
|
|
102
|
+
unset_default_target: bool = False,
|
|
103
|
+
external_access_integrations: Optional[List[str]] = None,
|
|
104
|
+
install_local_deps: bool = False,
|
|
51
105
|
) -> SnowflakeCursor:
|
|
52
106
|
dbt_project_path = path / "dbt_project.yml"
|
|
53
107
|
if not dbt_project_path.exists():
|
|
@@ -62,13 +116,13 @@ class DBTManager(SqlExecutionMixin):
|
|
|
62
116
|
except KeyError:
|
|
63
117
|
raise CliError("`profile` is not defined in dbt_project.yml")
|
|
64
118
|
|
|
65
|
-
self._validate_profiles(profiles_path, profile)
|
|
119
|
+
self._validate_profiles(profiles_path, profile, default_target)
|
|
66
120
|
|
|
67
121
|
with cli_console.phase("Creating temporary stage"):
|
|
68
122
|
stage_manager = StageManager()
|
|
69
|
-
stage_fqn = FQN.
|
|
70
|
-
stage_name = stage_manager.get_standard_stage_prefix(stage_fqn)
|
|
123
|
+
stage_fqn = FQN.from_resource(ObjectType.DBT_PROJECT, fqn, "STAGE")
|
|
71
124
|
stage_manager.create(stage_fqn, temporary=True)
|
|
125
|
+
stage_name = stage_manager.get_standard_stage_prefix(stage_fqn)
|
|
72
126
|
|
|
73
127
|
with cli_console.phase("Copying project files to stage"):
|
|
74
128
|
with TemporaryDirectory() as tmp:
|
|
@@ -86,22 +140,166 @@ class DBTManager(SqlExecutionMixin):
|
|
|
86
140
|
|
|
87
141
|
with cli_console.phase("Creating DBT project"):
|
|
88
142
|
if force is True:
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
143
|
+
return self._deploy_create_or_replace(
|
|
144
|
+
fqn,
|
|
145
|
+
stage_name,
|
|
146
|
+
default_target,
|
|
147
|
+
external_access_integrations,
|
|
148
|
+
install_local_deps,
|
|
149
|
+
)
|
|
92
150
|
else:
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
151
|
+
dbt_object_attributes = self.get_dbt_object_attributes(fqn)
|
|
152
|
+
if dbt_object_attributes is not None:
|
|
153
|
+
return self._deploy_alter(
|
|
154
|
+
fqn,
|
|
155
|
+
stage_name,
|
|
156
|
+
dbt_object_attributes,
|
|
157
|
+
default_target,
|
|
158
|
+
unset_default_target,
|
|
159
|
+
external_access_integrations,
|
|
160
|
+
install_local_deps,
|
|
161
|
+
)
|
|
162
|
+
else:
|
|
163
|
+
return self._deploy_create(
|
|
164
|
+
fqn,
|
|
165
|
+
stage_name,
|
|
166
|
+
default_target,
|
|
167
|
+
external_access_integrations,
|
|
168
|
+
install_local_deps,
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
def _deploy_alter(
|
|
172
|
+
self,
|
|
173
|
+
fqn: FQN,
|
|
174
|
+
stage_name: str,
|
|
175
|
+
dbt_object_attributes: DBTObjectEditableAttributes,
|
|
176
|
+
default_target: Optional[str],
|
|
177
|
+
unset_default_target: bool,
|
|
178
|
+
external_access_integrations: Optional[List[str]],
|
|
179
|
+
install_local_deps: bool,
|
|
180
|
+
) -> SnowflakeCursor:
|
|
181
|
+
query = f"ALTER DBT PROJECT {fqn} ADD VERSION"
|
|
182
|
+
query += f"\nFROM {stage_name}"
|
|
183
|
+
result = self.execute_query(query)
|
|
184
|
+
|
|
185
|
+
set_properties = []
|
|
186
|
+
unset_properties = []
|
|
187
|
+
|
|
188
|
+
current_default_target = dbt_object_attributes.get("default_target")
|
|
189
|
+
if unset_default_target and current_default_target is not None:
|
|
190
|
+
unset_properties.append("DEFAULT_TARGET")
|
|
191
|
+
elif default_target and (
|
|
192
|
+
current_default_target is None
|
|
193
|
+
or current_default_target.lower() != default_target.lower()
|
|
194
|
+
):
|
|
195
|
+
set_properties.append(f"DEFAULT_TARGET='{default_target}'")
|
|
196
|
+
|
|
197
|
+
current_external_access_integrations = dbt_object_attributes.get(
|
|
198
|
+
"external_access_integrations"
|
|
199
|
+
)
|
|
200
|
+
if self._should_update_external_access_integrations(
|
|
201
|
+
current_external_access_integrations,
|
|
202
|
+
external_access_integrations,
|
|
203
|
+
install_local_deps,
|
|
204
|
+
):
|
|
205
|
+
if external_access_integrations:
|
|
206
|
+
integrations_str = ", ".join(sorted(external_access_integrations))
|
|
207
|
+
set_properties.append(
|
|
208
|
+
f"EXTERNAL_ACCESS_INTEGRATIONS=({integrations_str})"
|
|
209
|
+
)
|
|
210
|
+
elif install_local_deps:
|
|
211
|
+
set_properties.append("EXTERNAL_ACCESS_INTEGRATIONS=()")
|
|
212
|
+
|
|
213
|
+
if set_properties or unset_properties:
|
|
214
|
+
self._execute_property_updates(fqn, set_properties, unset_properties)
|
|
215
|
+
|
|
216
|
+
return result
|
|
96
217
|
|
|
97
218
|
@staticmethod
|
|
98
|
-
def
|
|
219
|
+
def _should_update_external_access_integrations(
|
|
220
|
+
current: Optional[List[str]],
|
|
221
|
+
requested: Optional[List[str]],
|
|
222
|
+
install_local_deps: bool,
|
|
223
|
+
) -> bool:
|
|
224
|
+
if requested is not None:
|
|
225
|
+
current_set = set(current) if current else set()
|
|
226
|
+
requested_set = set(requested)
|
|
227
|
+
return current_set != requested_set
|
|
228
|
+
elif install_local_deps:
|
|
229
|
+
current_set = set(current) if current else set()
|
|
230
|
+
return current_set != set()
|
|
231
|
+
return False
|
|
232
|
+
|
|
233
|
+
def _execute_property_updates(
|
|
234
|
+
self, fqn: FQN, set_clauses: List[str], unset_properties: List[str]
|
|
235
|
+
) -> None:
|
|
236
|
+
if set_clauses:
|
|
237
|
+
query = f"ALTER DBT PROJECT {fqn} SET {', '.join(set_clauses)}"
|
|
238
|
+
self.execute_query(query)
|
|
239
|
+
|
|
240
|
+
for property_name in unset_properties:
|
|
241
|
+
query = f"ALTER DBT PROJECT {fqn} UNSET {property_name}"
|
|
242
|
+
self.execute_query(query)
|
|
243
|
+
|
|
244
|
+
def _deploy_create(
|
|
245
|
+
self,
|
|
246
|
+
fqn: FQN,
|
|
247
|
+
stage_name: str,
|
|
248
|
+
default_target: Optional[str],
|
|
249
|
+
external_access_integrations: Optional[List[str]],
|
|
250
|
+
install_local_deps: bool,
|
|
251
|
+
) -> SnowflakeCursor:
|
|
252
|
+
query = f"CREATE DBT PROJECT {fqn}"
|
|
253
|
+
query += f"\nFROM {stage_name}"
|
|
254
|
+
if default_target:
|
|
255
|
+
query += f" DEFAULT_TARGET='{default_target}'"
|
|
256
|
+
query = self._handle_external_access_integrations_query(
|
|
257
|
+
query, external_access_integrations, install_local_deps
|
|
258
|
+
)
|
|
259
|
+
return self.execute_query(query)
|
|
260
|
+
|
|
261
|
+
@staticmethod
|
|
262
|
+
def _handle_external_access_integrations_query(
|
|
263
|
+
query: str,
|
|
264
|
+
external_access_integrations: Optional[List[str]],
|
|
265
|
+
install_local_deps: bool,
|
|
266
|
+
) -> str:
|
|
267
|
+
# Providing external access integrations will trigger installation of local deps as well
|
|
268
|
+
if external_access_integrations:
|
|
269
|
+
integrations_str = ", ".join(external_access_integrations)
|
|
270
|
+
query += f"\nEXTERNAL_ACCESS_INTEGRATIONS = ({integrations_str})"
|
|
271
|
+
elif install_local_deps:
|
|
272
|
+
query += f"\nEXTERNAL_ACCESS_INTEGRATIONS = ()"
|
|
273
|
+
return query
|
|
274
|
+
|
|
275
|
+
def _deploy_create_or_replace(
|
|
276
|
+
self,
|
|
277
|
+
fqn: FQN,
|
|
278
|
+
stage_name: str,
|
|
279
|
+
default_target: Optional[str],
|
|
280
|
+
external_access_integrations: Optional[List[str]],
|
|
281
|
+
install_local_deps: bool,
|
|
282
|
+
) -> SnowflakeCursor:
|
|
283
|
+
query = f"CREATE OR REPLACE DBT PROJECT {fqn}"
|
|
284
|
+
query += f"\nFROM {stage_name}"
|
|
285
|
+
if default_target:
|
|
286
|
+
query += f" DEFAULT_TARGET='{default_target}'"
|
|
287
|
+
query = self._handle_external_access_integrations_query(
|
|
288
|
+
query, external_access_integrations, install_local_deps
|
|
289
|
+
)
|
|
290
|
+
return self.execute_query(query)
|
|
291
|
+
|
|
292
|
+
def _validate_profiles(
|
|
293
|
+
self,
|
|
294
|
+
profiles_path: SecurePath,
|
|
295
|
+
profile_name: str,
|
|
296
|
+
default_target: str | None = None,
|
|
297
|
+
) -> None:
|
|
99
298
|
"""
|
|
100
299
|
Validates that:
|
|
101
300
|
* profiles.yml exists
|
|
102
301
|
* contain profile specified in dbt_project.yml
|
|
103
|
-
*
|
|
104
|
-
* does not contain any confidential data like passwords
|
|
302
|
+
* default_target (if specified) exists in the profile's outputs
|
|
105
303
|
"""
|
|
106
304
|
profiles_file = profiles_path / PROFILES_FILENAME
|
|
107
305
|
if not profiles_file.exists():
|
|
@@ -111,53 +309,60 @@ class DBTManager(SqlExecutionMixin):
|
|
|
111
309
|
with profiles_file.open(read_file_limit_mb=DEFAULT_SIZE_LIMIT_MB) as fd:
|
|
112
310
|
profiles = yaml.safe_load(fd)
|
|
113
311
|
|
|
114
|
-
if
|
|
312
|
+
if profile_name not in profiles:
|
|
115
313
|
raise CliError(
|
|
116
|
-
f"
|
|
314
|
+
f"Profile {profile_name} is not defined in {PROFILES_FILENAME}."
|
|
117
315
|
)
|
|
118
316
|
|
|
119
317
|
errors = defaultdict(list)
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
318
|
+
profile = profiles[profile_name]
|
|
319
|
+
target_name = default_target or profile.get("target")
|
|
320
|
+
available_targets = set(profile["outputs"].keys())
|
|
321
|
+
if target_name in available_targets:
|
|
322
|
+
target = profile["outputs"][target_name]
|
|
323
|
+
target_errors = self._validate_target(target_name, target)
|
|
324
|
+
if target_errors:
|
|
325
|
+
errors[profile_name].extend(target_errors)
|
|
326
|
+
else:
|
|
327
|
+
available_targets_str = ", ".join(sorted(available_targets))
|
|
328
|
+
errors[profile_name].append(
|
|
329
|
+
f"Target '{target_name}' is not defined in profile '{profile_name}'. "
|
|
330
|
+
f"Available targets: {available_targets_str}"
|
|
331
|
+
)
|
|
124
332
|
|
|
333
|
+
if errors:
|
|
334
|
+
message = f"Found following errors in {PROFILES_FILENAME}. Please fix them before proceeding:"
|
|
335
|
+
for target, issues in errors.items():
|
|
336
|
+
message += f"\n{target}"
|
|
337
|
+
message += "\n * " + "\n * ".join(issues)
|
|
338
|
+
raise CliError(message)
|
|
339
|
+
|
|
340
|
+
def _validate_target(
|
|
341
|
+
self, target_name: str, target_details: Dict[str, str]
|
|
342
|
+
) -> List[str]:
|
|
343
|
+
errors = []
|
|
125
344
|
required_fields = {
|
|
126
|
-
"account",
|
|
127
345
|
"database",
|
|
128
346
|
"role",
|
|
129
347
|
"schema",
|
|
130
348
|
"type",
|
|
131
|
-
"user",
|
|
132
|
-
"warehouse",
|
|
133
|
-
}
|
|
134
|
-
supported_fields = {
|
|
135
|
-
"threads",
|
|
136
349
|
}
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
if (
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
- supported_fields
|
|
146
|
-
):
|
|
147
|
-
errors[target_profile].append(
|
|
148
|
-
f"Unsupported fields found: {', '.join(sorted(unsupported_keys))} in target {target_name}"
|
|
149
|
-
)
|
|
150
|
-
if "type" in target and target["type"].lower() != "snowflake":
|
|
151
|
-
errors[target_profile].append(
|
|
152
|
-
f"Value for type field is invalid. Should be set to `snowflake` in target {target_name}"
|
|
153
|
-
)
|
|
350
|
+
if missing_keys := required_fields - set(target_details.keys()):
|
|
351
|
+
errors.append(
|
|
352
|
+
f"Missing required fields: {', '.join(sorted(missing_keys))} in target {target_name}"
|
|
353
|
+
)
|
|
354
|
+
if role := target_details.get("role"):
|
|
355
|
+
if not self._validate_role(role_name=role):
|
|
356
|
+
errors.append(f"Role '{role}' does not exist or is not accessible.")
|
|
357
|
+
return errors
|
|
154
358
|
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
359
|
+
def _validate_role(self, role_name: str) -> bool:
|
|
360
|
+
try:
|
|
361
|
+
with self.use_role(role_name):
|
|
362
|
+
self.execute_query("select 1")
|
|
363
|
+
return True
|
|
364
|
+
except ProgrammingError:
|
|
365
|
+
return False
|
|
161
366
|
|
|
162
367
|
@staticmethod
|
|
163
368
|
def _prepare_profiles_file(profiles_path: Path, tmp_path: Path):
|
|
@@ -14,29 +14,23 @@
|
|
|
14
14
|
from typing import List, Optional
|
|
15
15
|
|
|
16
16
|
import typer
|
|
17
|
-
from snowflake.cli._plugins.dcm.dcm_project_entity_model import (
|
|
18
|
-
DCMProjectEntityModel,
|
|
19
|
-
)
|
|
20
17
|
from snowflake.cli._plugins.dcm.manager import DCMProjectManager
|
|
21
18
|
from snowflake.cli._plugins.object.command_aliases import add_object_command_aliases
|
|
22
19
|
from snowflake.cli._plugins.object.commands import scope_option
|
|
23
20
|
from snowflake.cli._plugins.object.manager import ObjectManager
|
|
24
|
-
from snowflake.cli.api.artifacts.upload import sync_artifacts_with_stage
|
|
25
|
-
from snowflake.cli.api.cli_global_context import get_cli_context
|
|
26
|
-
from snowflake.cli.api.commands.decorators import with_project_definition
|
|
27
21
|
from snowflake.cli.api.commands.flags import (
|
|
28
22
|
IfExistsOption,
|
|
29
23
|
IfNotExistsOption,
|
|
30
24
|
OverrideableOption,
|
|
31
|
-
entity_argument,
|
|
32
25
|
identifier_argument,
|
|
33
26
|
like_option,
|
|
34
27
|
variables_option,
|
|
35
28
|
)
|
|
36
29
|
from snowflake.cli.api.commands.snow_typer import SnowTyperFactory
|
|
37
|
-
from snowflake.cli.api.commands.utils import get_entity_for_operation
|
|
38
30
|
from snowflake.cli.api.console.console import cli_console
|
|
39
|
-
from snowflake.cli.api.constants import
|
|
31
|
+
from snowflake.cli.api.constants import (
|
|
32
|
+
ObjectType,
|
|
33
|
+
)
|
|
40
34
|
from snowflake.cli.api.exceptions import CliError
|
|
41
35
|
from snowflake.cli.api.feature_flags import FeatureFlag
|
|
42
36
|
from snowflake.cli.api.identifiers import FQN
|
|
@@ -45,7 +39,7 @@ from snowflake.cli.api.output.types import (
|
|
|
45
39
|
QueryJsonValueResult,
|
|
46
40
|
QueryResult,
|
|
47
41
|
)
|
|
48
|
-
from snowflake.cli.api.
|
|
42
|
+
from snowflake.cli.api.utils.path_utils import is_stage_path
|
|
49
43
|
|
|
50
44
|
app = SnowTyperFactory(
|
|
51
45
|
name="dcm",
|
|
@@ -63,18 +57,10 @@ configuration_flag = typer.Option(
|
|
|
63
57
|
help="Configuration of the DCM Project to use. If not specified default configuration is used.",
|
|
64
58
|
show_default=False,
|
|
65
59
|
)
|
|
66
|
-
from_option =
|
|
60
|
+
from_option = typer.Option(
|
|
67
61
|
None,
|
|
68
62
|
"--from",
|
|
69
|
-
|
|
70
|
-
show_default=False,
|
|
71
|
-
)
|
|
72
|
-
|
|
73
|
-
prune_option = OverrideableOption(
|
|
74
|
-
False,
|
|
75
|
-
"--prune",
|
|
76
|
-
help="Remove unused artifacts from the stage during sync. Mutually exclusive with --from.",
|
|
77
|
-
mutually_exclusive=["from_stage"],
|
|
63
|
+
help="Source location: stage path (starting with '@') or local directory path. Omit to use current directory.",
|
|
78
64
|
show_default=False,
|
|
79
65
|
)
|
|
80
66
|
|
|
@@ -122,61 +108,65 @@ add_object_command_aliases(
|
|
|
122
108
|
@app.command(requires_connection=True)
|
|
123
109
|
def deploy(
|
|
124
110
|
identifier: FQN = dcm_identifier,
|
|
125
|
-
|
|
126
|
-
help="Deploy DCM Project deployment from a given stage."
|
|
127
|
-
),
|
|
111
|
+
from_location: Optional[str] = from_option,
|
|
128
112
|
variables: Optional[List[str]] = variables_flag,
|
|
129
113
|
configuration: Optional[str] = configuration_flag,
|
|
130
114
|
alias: Optional[str] = alias_option,
|
|
131
|
-
prune: bool = prune_option(),
|
|
132
115
|
**options,
|
|
133
116
|
):
|
|
134
117
|
"""
|
|
135
118
|
Applies changes defined in DCM Project to Snowflake.
|
|
136
119
|
"""
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
120
|
+
manager = DCMProjectManager()
|
|
121
|
+
effective_stage = _get_effective_stage(identifier, from_location)
|
|
122
|
+
|
|
123
|
+
with cli_console.spinner() as spinner:
|
|
124
|
+
spinner.add_task(description=f"Deploying dcm project {identifier}", total=None)
|
|
125
|
+
result = manager.execute(
|
|
126
|
+
project_identifier=identifier,
|
|
127
|
+
configuration=configuration,
|
|
128
|
+
from_stage=effective_stage,
|
|
129
|
+
variables=variables,
|
|
130
|
+
alias=alias,
|
|
131
|
+
output_path=None,
|
|
132
|
+
)
|
|
145
133
|
return QueryJsonValueResult(result)
|
|
146
134
|
|
|
147
135
|
|
|
148
136
|
@app.command(requires_connection=True)
|
|
149
137
|
def plan(
|
|
150
138
|
identifier: FQN = dcm_identifier,
|
|
151
|
-
|
|
152
|
-
help="Plan DCM Project deployment from a given stage."
|
|
153
|
-
),
|
|
139
|
+
from_location: Optional[str] = from_option,
|
|
154
140
|
variables: Optional[List[str]] = variables_flag,
|
|
155
141
|
configuration: Optional[str] = configuration_flag,
|
|
156
|
-
prune: bool = prune_option(),
|
|
157
142
|
output_path: Optional[str] = output_path_option(
|
|
158
|
-
help="
|
|
143
|
+
help="Path where the deployment plan output will be stored. Can be a stage path (starting with '@') or a local directory path."
|
|
159
144
|
),
|
|
160
145
|
**options,
|
|
161
146
|
):
|
|
162
147
|
"""
|
|
163
148
|
Plans a DCM Project deployment (validates without executing).
|
|
164
149
|
"""
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
150
|
+
manager = DCMProjectManager()
|
|
151
|
+
effective_stage = _get_effective_stage(identifier, from_location)
|
|
152
|
+
|
|
153
|
+
with cli_console.spinner() as spinner:
|
|
154
|
+
spinner.add_task(description=f"Planning dcm project {identifier}", total=None)
|
|
155
|
+
result = manager.execute(
|
|
156
|
+
project_identifier=identifier,
|
|
157
|
+
configuration=configuration,
|
|
158
|
+
from_stage=effective_stage,
|
|
159
|
+
dry_run=True,
|
|
160
|
+
variables=variables,
|
|
161
|
+
output_path=output_path,
|
|
162
|
+
)
|
|
163
|
+
|
|
173
164
|
return QueryJsonValueResult(result)
|
|
174
165
|
|
|
175
166
|
|
|
176
167
|
@app.command(requires_connection=True)
|
|
177
|
-
@with_project_definition()
|
|
178
168
|
def create(
|
|
179
|
-
|
|
169
|
+
identifier: FQN = dcm_identifier,
|
|
180
170
|
if_not_exists: bool = IfNotExistsOption(
|
|
181
171
|
help="Do nothing if the project already exists."
|
|
182
172
|
),
|
|
@@ -185,28 +175,18 @@ def create(
|
|
|
185
175
|
"""
|
|
186
176
|
Creates a DCM Project in Snowflake.
|
|
187
177
|
"""
|
|
188
|
-
cli_context = get_cli_context()
|
|
189
|
-
project: DCMProjectEntityModel = get_entity_for_operation(
|
|
190
|
-
cli_context=cli_context,
|
|
191
|
-
entity_id=entity_id,
|
|
192
|
-
project_definition=cli_context.project_definition,
|
|
193
|
-
entity_type="dcm",
|
|
194
|
-
)
|
|
195
178
|
om = ObjectManager()
|
|
196
|
-
if om.object_exists(object_type="dcm", fqn=
|
|
197
|
-
message = f"DCM Project '{
|
|
179
|
+
if om.object_exists(object_type="dcm", fqn=identifier):
|
|
180
|
+
message = f"DCM Project '{identifier}' already exists."
|
|
198
181
|
if if_not_exists:
|
|
199
182
|
return MessageResult(message)
|
|
200
183
|
raise CliError(message)
|
|
201
184
|
|
|
202
|
-
if om.object_exists(object_type="stage", fqn=FQN.from_stage(project.stage)):
|
|
203
|
-
raise CliError(f"Stage '{project.stage}' already exists.")
|
|
204
|
-
|
|
205
185
|
dpm = DCMProjectManager()
|
|
206
|
-
with cli_console.phase(f"Creating DCM Project '{
|
|
207
|
-
dpm.create(
|
|
186
|
+
with cli_console.phase(f"Creating DCM Project '{identifier}'"):
|
|
187
|
+
dpm.create(project_identifier=identifier)
|
|
208
188
|
|
|
209
|
-
return MessageResult(f"DCM Project '{
|
|
189
|
+
return MessageResult(f"DCM Project '{identifier}' successfully created.")
|
|
210
190
|
|
|
211
191
|
|
|
212
192
|
@app.command(requires_connection=True)
|
|
@@ -218,56 +198,51 @@ def list_deployments(
|
|
|
218
198
|
Lists deployments of given DCM Project.
|
|
219
199
|
"""
|
|
220
200
|
pm = DCMProjectManager()
|
|
221
|
-
results = pm.
|
|
201
|
+
results = pm.list_deployments(project_identifier=identifier)
|
|
222
202
|
return QueryResult(results)
|
|
223
203
|
|
|
224
204
|
|
|
225
205
|
@app.command(requires_connection=True)
|
|
226
206
|
def drop_deployment(
|
|
227
207
|
identifier: FQN = dcm_identifier,
|
|
228
|
-
|
|
229
|
-
help="Name or alias of the
|
|
208
|
+
deployment_name: str = typer.Argument(
|
|
209
|
+
help="Name or alias of the deployment to drop. For names containing '$', use single quotes to prevent shell expansion (e.g., 'DEPLOYMENT$1').",
|
|
230
210
|
show_default=False,
|
|
231
211
|
),
|
|
232
|
-
if_exists: bool = IfExistsOption(
|
|
212
|
+
if_exists: bool = IfExistsOption(
|
|
213
|
+
help="Do nothing if the deployment does not exist."
|
|
214
|
+
),
|
|
233
215
|
**options,
|
|
234
216
|
):
|
|
235
217
|
"""
|
|
236
|
-
Drops a
|
|
218
|
+
Drops a deployment from the DCM Project.
|
|
237
219
|
"""
|
|
238
220
|
# Detect potential shell expansion issues
|
|
239
|
-
if
|
|
221
|
+
if deployment_name and deployment_name.upper() == "DEPLOYMENT":
|
|
240
222
|
cli_console.warning(
|
|
241
|
-
f"
|
|
242
|
-
f"If you meant to use a
|
|
223
|
+
f"Deployment name '{deployment_name}' might be truncated due to shell expansion. "
|
|
224
|
+
f"If you meant to use a deployment like 'DEPLOYMENT$1', try using single quotes: 'DEPLOYMENT$1'."
|
|
243
225
|
)
|
|
244
226
|
|
|
245
227
|
dpm = DCMProjectManager()
|
|
246
228
|
dpm.drop_deployment(
|
|
247
|
-
|
|
248
|
-
|
|
229
|
+
project_identifier=identifier,
|
|
230
|
+
deployment_name=deployment_name,
|
|
249
231
|
if_exists=if_exists,
|
|
250
232
|
)
|
|
251
233
|
return MessageResult(
|
|
252
|
-
f"
|
|
234
|
+
f"Deployment '{deployment_name}' dropped from DCM Project '{identifier}'."
|
|
253
235
|
)
|
|
254
236
|
|
|
255
237
|
|
|
256
|
-
def
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
with cli_console.phase("Syncing local files to stage"):
|
|
266
|
-
sync_artifacts_with_stage(
|
|
267
|
-
project_paths=ProjectPaths(project_root=cli_context.project_root),
|
|
268
|
-
stage_root=project_entity.stage,
|
|
269
|
-
artifacts=project_entity.artifacts,
|
|
270
|
-
prune=prune,
|
|
238
|
+
def _get_effective_stage(identifier: FQN, from_location: Optional[str]):
|
|
239
|
+
manager = DCMProjectManager()
|
|
240
|
+
if not from_location:
|
|
241
|
+
from_stage = manager.sync_local_files(project_identifier=identifier)
|
|
242
|
+
elif is_stage_path(from_location):
|
|
243
|
+
from_stage = from_location
|
|
244
|
+
else:
|
|
245
|
+
from_stage = manager.sync_local_files(
|
|
246
|
+
project_identifier=identifier, source_directory=from_location
|
|
271
247
|
)
|
|
272
|
-
|
|
273
|
-
return project_entity.stage
|
|
248
|
+
return from_stage
|