snowflake-cli 3.0.2__py3-none-any.whl → 3.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/cli/__about__.py +1 -1
- snowflake/cli/_app/cli_app.py +3 -0
- snowflake/cli/_app/dev/docs/templates/overview.rst.jinja2 +1 -1
- snowflake/cli/_app/dev/docs/templates/usage.rst.jinja2 +2 -2
- snowflake/cli/_app/telemetry.py +69 -4
- snowflake/cli/_plugins/connection/commands.py +40 -2
- snowflake/cli/_plugins/git/commands.py +6 -3
- snowflake/cli/_plugins/git/manager.py +5 -0
- snowflake/cli/_plugins/nativeapp/artifacts.py +13 -3
- snowflake/cli/_plugins/nativeapp/codegen/artifact_processor.py +1 -1
- snowflake/cli/_plugins/nativeapp/codegen/compiler.py +7 -0
- snowflake/cli/_plugins/nativeapp/codegen/sandbox.py +10 -10
- snowflake/cli/_plugins/nativeapp/codegen/setup/native_app_setup_processor.py +2 -2
- snowflake/cli/_plugins/nativeapp/codegen/snowpark/extension_function_utils.py +1 -1
- snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py +8 -8
- snowflake/cli/_plugins/nativeapp/commands.py +135 -186
- snowflake/cli/_plugins/nativeapp/entities/application.py +176 -24
- snowflake/cli/_plugins/nativeapp/entities/application_package.py +112 -136
- snowflake/cli/_plugins/nativeapp/exceptions.py +12 -0
- snowflake/cli/_plugins/nativeapp/manager.py +3 -26
- snowflake/cli/_plugins/nativeapp/v2_conversions/{v2_to_v1_decorator.py → compat.py} +131 -72
- snowflake/cli/_plugins/nativeapp/version/commands.py +30 -29
- snowflake/cli/_plugins/nativeapp/version/version_processor.py +1 -43
- snowflake/cli/_plugins/snowpark/common.py +60 -18
- snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +2 -2
- snowflake/cli/_plugins/spcs/image_repository/commands.py +4 -37
- snowflake/cli/_plugins/spcs/image_repository/manager.py +4 -1
- snowflake/cli/_plugins/spcs/services/commands.py +36 -4
- snowflake/cli/_plugins/spcs/services/manager.py +36 -4
- snowflake/cli/_plugins/stage/commands.py +8 -3
- snowflake/cli/_plugins/stage/diff.py +16 -16
- snowflake/cli/_plugins/stage/manager.py +164 -73
- snowflake/cli/_plugins/stage/md5.py +1 -1
- snowflake/cli/_plugins/workspace/commands.py +21 -1
- snowflake/cli/_plugins/workspace/context.py +38 -0
- snowflake/cli/_plugins/workspace/manager.py +23 -13
- snowflake/cli/api/cli_global_context.py +3 -3
- snowflake/cli/api/commands/flags.py +23 -7
- snowflake/cli/api/config.py +7 -4
- snowflake/cli/api/connections.py +12 -1
- snowflake/cli/api/entities/common.py +4 -2
- snowflake/cli/api/entities/utils.py +17 -37
- snowflake/cli/api/exceptions.py +32 -0
- snowflake/cli/api/identifiers.py +8 -0
- snowflake/cli/api/project/definition_conversion.py +139 -40
- snowflake/cli/api/project/schemas/entities/common.py +11 -0
- snowflake/cli/api/project/schemas/project_definition.py +30 -25
- snowflake/cli/api/sql_execution.py +5 -7
- snowflake/cli/api/stage_path.py +241 -0
- snowflake/cli/api/utils/definition_rendering.py +3 -5
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/METADATA +11 -11
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/RECORD +55 -55
- snowflake/cli/_plugins/nativeapp/teardown_processor.py +0 -70
- snowflake/cli/_plugins/workspace/action_context.py +0 -18
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/WHEEL +0 -0
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/entry_points.txt +0 -0
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -19,6 +19,7 @@ import glob
|
|
|
19
19
|
import logging
|
|
20
20
|
import re
|
|
21
21
|
import sys
|
|
22
|
+
import time
|
|
22
23
|
from contextlib import nullcontext
|
|
23
24
|
from dataclasses import dataclass
|
|
24
25
|
from os import path
|
|
@@ -39,6 +40,7 @@ from snowflake.cli.api.identifiers import FQN
|
|
|
39
40
|
from snowflake.cli.api.project.util import to_string_literal
|
|
40
41
|
from snowflake.cli.api.secure_path import SecurePath
|
|
41
42
|
from snowflake.cli.api.sql_execution import SqlExecutionMixin
|
|
43
|
+
from snowflake.cli.api.stage_path import StagePath
|
|
42
44
|
from snowflake.cli.api.utils.path_utils import path_resolver
|
|
43
45
|
from snowflake.connector import DictCursor, ProgrammingError
|
|
44
46
|
from snowflake.connector.cursor import SnowflakeCursor
|
|
@@ -90,12 +92,12 @@ class StagePathParts:
|
|
|
90
92
|
raise NotImplementedError
|
|
91
93
|
|
|
92
94
|
def get_full_stage_path(self, path: str):
|
|
93
|
-
if prefix := FQN.
|
|
95
|
+
if prefix := FQN.from_stage_path(self.stage).prefix:
|
|
94
96
|
return prefix + "." + path
|
|
95
97
|
return path
|
|
96
98
|
|
|
97
99
|
def get_standard_stage_path(self) -> str:
|
|
98
|
-
path = self.path
|
|
100
|
+
path = self.get_full_stage_path(self.path)
|
|
99
101
|
return f"@{path}{'/'if self.is_directory and not path.endswith('/') else ''}"
|
|
100
102
|
|
|
101
103
|
def get_standard_stage_directory_path(self) -> str:
|
|
@@ -104,6 +106,9 @@ class StagePathParts:
|
|
|
104
106
|
return path + "/"
|
|
105
107
|
return path
|
|
106
108
|
|
|
109
|
+
def strip_stage_prefix(self, path: str):
|
|
110
|
+
raise NotImplementedError
|
|
111
|
+
|
|
107
112
|
|
|
108
113
|
@dataclass
|
|
109
114
|
class DefaultStagePathParts(StagePathParts):
|
|
@@ -141,6 +146,13 @@ class DefaultStagePathParts(StagePathParts):
|
|
|
141
146
|
file_path_without_prefix = Path(file_path).parts[OMIT_FIRST]
|
|
142
147
|
return f"{stage}/{'/'.join(file_path_without_prefix)}"
|
|
143
148
|
|
|
149
|
+
def strip_stage_prefix(self, file_path: str) -> str:
|
|
150
|
+
if file_path.startswith("@"):
|
|
151
|
+
file_path = file_path[OMIT_FIRST]
|
|
152
|
+
if file_path.startswith(self.stage_name):
|
|
153
|
+
return file_path[len(self.stage_name) :]
|
|
154
|
+
return file_path
|
|
155
|
+
|
|
144
156
|
def add_stage_prefix(self, file_path: str) -> str:
|
|
145
157
|
stage = self.stage.rstrip("/")
|
|
146
158
|
return f"{stage}/{file_path.lstrip('/')}"
|
|
@@ -197,6 +209,10 @@ class StageManager(SqlExecutionMixin):
|
|
|
197
209
|
super().__init__()
|
|
198
210
|
self._python_exe_procedure = None
|
|
199
211
|
|
|
212
|
+
@staticmethod
|
|
213
|
+
def build_path(stage_path: str) -> StagePath:
|
|
214
|
+
return StagePath.from_stage_str(stage_path)
|
|
215
|
+
|
|
200
216
|
@staticmethod
|
|
201
217
|
def get_standard_stage_prefix(name: str | FQN) -> str:
|
|
202
218
|
if isinstance(name, FQN):
|
|
@@ -234,9 +250,14 @@ class StageManager(SqlExecutionMixin):
|
|
|
234
250
|
return uri
|
|
235
251
|
return to_string_literal(uri)
|
|
236
252
|
|
|
237
|
-
def list_files(
|
|
238
|
-
stage_name =
|
|
239
|
-
|
|
253
|
+
def list_files(
|
|
254
|
+
self, stage_name: str | StagePath, pattern: str | None = None
|
|
255
|
+
) -> DictCursor:
|
|
256
|
+
if not isinstance(stage_name, StagePath):
|
|
257
|
+
stage_path = self.build_path(stage_name).path_for_sql()
|
|
258
|
+
else:
|
|
259
|
+
stage_path = stage_name.path_for_sql()
|
|
260
|
+
query = f"ls {stage_path}"
|
|
240
261
|
if pattern is not None:
|
|
241
262
|
query += f" pattern = '{pattern}'"
|
|
242
263
|
return self._execute_query(query, cursor_class=DictCursor)
|
|
@@ -251,27 +272,27 @@ class StageManager(SqlExecutionMixin):
|
|
|
251
272
|
def get(
|
|
252
273
|
self, stage_path: str, dest_path: Path, parallel: int = 4
|
|
253
274
|
) -> SnowflakeCursor:
|
|
254
|
-
|
|
275
|
+
spath = self.build_path(stage_path)
|
|
255
276
|
self._assure_is_existing_directory(dest_path)
|
|
256
277
|
dest_directory = f"{dest_path}/"
|
|
257
278
|
return self._execute_query(
|
|
258
|
-
f"get {
|
|
279
|
+
f"get {spath.path_for_sql()} {self._to_uri(dest_directory)} parallel={parallel}"
|
|
259
280
|
)
|
|
260
281
|
|
|
261
282
|
def get_recursive(
|
|
262
283
|
self, stage_path: str, dest_path: Path, parallel: int = 4
|
|
263
284
|
) -> List[SnowflakeCursor]:
|
|
264
|
-
|
|
285
|
+
stage_root = self.build_path(stage_path)
|
|
265
286
|
|
|
266
287
|
results = []
|
|
267
|
-
for file_path in self.iter_stage(
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
self._assure_is_existing_directory(
|
|
288
|
+
for file_path in self.iter_stage(stage_root):
|
|
289
|
+
local_dir = file_path.get_local_target_path(
|
|
290
|
+
target_dir=dest_path, stage_root=stage_root
|
|
291
|
+
)
|
|
292
|
+
self._assure_is_existing_directory(local_dir)
|
|
272
293
|
|
|
273
294
|
result = self._execute_query(
|
|
274
|
-
f"get {
|
|
295
|
+
f"get {file_path.path_for_sql()} {self._to_uri(f'{local_dir}/')} parallel={parallel}"
|
|
275
296
|
)
|
|
276
297
|
results.append(result)
|
|
277
298
|
|
|
@@ -293,28 +314,31 @@ class StageManager(SqlExecutionMixin):
|
|
|
293
314
|
and switch back to the original role for the next commands to run.
|
|
294
315
|
"""
|
|
295
316
|
with self.use_role(role) if role else nullcontext():
|
|
296
|
-
|
|
317
|
+
spath = self.build_path(stage_path)
|
|
297
318
|
local_resolved_path = path_resolver(str(local_path))
|
|
298
319
|
log.info("Uploading %s to %s", local_resolved_path, stage_path)
|
|
299
320
|
cursor = self._execute_query(
|
|
300
|
-
f"put {self._to_uri(local_resolved_path)} {
|
|
321
|
+
f"put {self._to_uri(local_resolved_path)} {spath.path_for_sql()} "
|
|
301
322
|
f"auto_compress={str(auto_compress).lower()} parallel={parallel} overwrite={overwrite}"
|
|
302
323
|
)
|
|
303
324
|
return cursor
|
|
304
325
|
|
|
305
326
|
def copy_files(self, source_path: str, destination_path: str) -> SnowflakeCursor:
|
|
306
|
-
|
|
307
|
-
|
|
327
|
+
source_stage_path = self.build_path(source_path)
|
|
328
|
+
# We copy only into stage
|
|
329
|
+
destination_stage_path = StagePath.from_stage_str(destination_path)
|
|
308
330
|
|
|
309
|
-
if
|
|
331
|
+
if destination_stage_path.is_user_stage():
|
|
310
332
|
raise ClickException(
|
|
311
333
|
"Destination path cannot be a user stage. Please provide a named stage."
|
|
312
334
|
)
|
|
313
335
|
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
336
|
+
log.info(
|
|
337
|
+
"Copying files from %s to %s", source_stage_path, destination_stage_path
|
|
338
|
+
)
|
|
339
|
+
# Destination needs to end with /
|
|
340
|
+
dest = destination_stage_path.absolute_path().rstrip("/") + "/"
|
|
341
|
+
query = f"copy files into {dest} from {source_stage_path}"
|
|
318
342
|
return self._execute_query(query)
|
|
319
343
|
|
|
320
344
|
def remove(
|
|
@@ -327,29 +351,48 @@ class StageManager(SqlExecutionMixin):
|
|
|
327
351
|
and switch back to the original role for the next commands to run.
|
|
328
352
|
"""
|
|
329
353
|
with self.use_role(role) if role else nullcontext():
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
quoted_stage_name = self.quote_stage_name(f"{stage_name}{path}")
|
|
333
|
-
return self._execute_query(f"remove {quoted_stage_name}")
|
|
354
|
+
stage_path = self.build_path(stage_name) / path
|
|
355
|
+
return self._execute_query(f"remove {stage_path.path_for_sql()}")
|
|
334
356
|
|
|
335
|
-
def create(
|
|
336
|
-
|
|
357
|
+
def create(
|
|
358
|
+
self, fqn: FQN, comment: Optional[str] = None, temporary: bool = False
|
|
359
|
+
) -> SnowflakeCursor:
|
|
360
|
+
temporary_str = "temporary " if temporary else ""
|
|
361
|
+
query = f"create {temporary_str}stage if not exists {fqn.sql_identifier}"
|
|
337
362
|
if comment:
|
|
338
363
|
query += f" comment='{comment}'"
|
|
339
364
|
return self._execute_query(query)
|
|
340
365
|
|
|
341
|
-
def iter_stage(self, stage_path:
|
|
342
|
-
for file in self.list_files(stage_path).fetchall():
|
|
343
|
-
|
|
366
|
+
def iter_stage(self, stage_path: StagePath):
|
|
367
|
+
for file in self.list_files(stage_path.absolute_path()).fetchall():
|
|
368
|
+
if stage_path.is_user_stage():
|
|
369
|
+
path = StagePath.get_user_stage() / file["name"]
|
|
370
|
+
else:
|
|
371
|
+
path = self.build_path(file["name"])
|
|
372
|
+
yield path
|
|
344
373
|
|
|
345
374
|
def execute(
|
|
346
375
|
self,
|
|
347
|
-
|
|
376
|
+
stage_path_str: str,
|
|
348
377
|
on_error: OnErrorType,
|
|
349
378
|
variables: Optional[List[str]] = None,
|
|
379
|
+
requires_temporary_stage: bool = False,
|
|
350
380
|
):
|
|
351
|
-
|
|
352
|
-
|
|
381
|
+
if requires_temporary_stage:
|
|
382
|
+
(
|
|
383
|
+
stage_path_parts,
|
|
384
|
+
original_path_parts,
|
|
385
|
+
) = self._create_temporary_copy_of_stage(stage_path_str)
|
|
386
|
+
stage_path = StagePath.from_stage_str(
|
|
387
|
+
stage_path_parts.get_standard_stage_path()
|
|
388
|
+
)
|
|
389
|
+
else:
|
|
390
|
+
stage_path_parts = self._stage_path_part_factory(stage_path_str)
|
|
391
|
+
stage_path = self.build_path(stage_path_str)
|
|
392
|
+
|
|
393
|
+
all_files_list = self._get_files_list_from_stage(stage_path.root_path())
|
|
394
|
+
if not all_files_list:
|
|
395
|
+
raise ClickException(f"No files found on stage '{stage_path}'")
|
|
353
396
|
|
|
354
397
|
all_files_with_stage_name_prefix = [
|
|
355
398
|
stage_path_parts.get_directory(file) for file in all_files_list
|
|
@@ -370,42 +413,75 @@ class StageManager(SqlExecutionMixin):
|
|
|
370
413
|
|
|
371
414
|
parsed_variables = parse_key_value_variables(variables)
|
|
372
415
|
sql_variables = self._parse_execute_variables(parsed_variables)
|
|
373
|
-
python_variables =
|
|
416
|
+
python_variables = self._parse_python_variables(parsed_variables)
|
|
374
417
|
results = []
|
|
375
418
|
|
|
376
419
|
if any(file.endswith(".py") for file in sorted_file_path_list):
|
|
377
420
|
self._python_exe_procedure = self._bootstrap_snowpark_execution_environment(
|
|
378
|
-
|
|
421
|
+
stage_path
|
|
379
422
|
)
|
|
380
423
|
|
|
381
424
|
for file_path in sorted_file_path_list:
|
|
382
425
|
file_stage_path = stage_path_parts.add_stage_prefix(file_path)
|
|
426
|
+
|
|
427
|
+
# For better reporting push down the information about original
|
|
428
|
+
# path if execution happens from temporary stage
|
|
429
|
+
if requires_temporary_stage:
|
|
430
|
+
original_path = original_path_parts.add_stage_prefix(file_path)
|
|
431
|
+
else:
|
|
432
|
+
original_path = file_stage_path
|
|
433
|
+
|
|
383
434
|
if file_path.endswith(".py"):
|
|
384
435
|
result = self._execute_python(
|
|
385
436
|
file_stage_path=file_stage_path,
|
|
386
437
|
on_error=on_error,
|
|
387
438
|
variables=python_variables,
|
|
439
|
+
original_file=original_path,
|
|
388
440
|
)
|
|
389
441
|
else:
|
|
390
442
|
result = self._call_execute_immediate(
|
|
391
443
|
file_stage_path=file_stage_path,
|
|
392
444
|
variables=sql_variables,
|
|
393
445
|
on_error=on_error,
|
|
446
|
+
original_file=original_path,
|
|
394
447
|
)
|
|
395
448
|
results.append(result)
|
|
396
449
|
|
|
397
450
|
return results
|
|
398
451
|
|
|
399
|
-
def
|
|
400
|
-
self,
|
|
401
|
-
) ->
|
|
402
|
-
|
|
403
|
-
stage_path_parts.stage, pattern=pattern
|
|
404
|
-
).fetchall()
|
|
452
|
+
def _create_temporary_copy_of_stage(
|
|
453
|
+
self, stage_path: str
|
|
454
|
+
) -> tuple[StagePathParts, StagePathParts]:
|
|
455
|
+
sm = StageManager()
|
|
405
456
|
|
|
406
|
-
|
|
407
|
-
|
|
457
|
+
# Rewrite stage paths to temporary stage paths. Git paths become stage paths
|
|
458
|
+
original_path_parts = self._stage_path_part_factory(stage_path) # noqa: SLF001
|
|
408
459
|
|
|
460
|
+
tmp_stage_name = f"snowflake_cli_tmp_stage_{int(time.time())}"
|
|
461
|
+
tmp_stage_fqn = FQN.from_stage(tmp_stage_name).using_connection(conn=self._conn)
|
|
462
|
+
tmp_stage = tmp_stage_fqn.identifier
|
|
463
|
+
stage_path_parts = sm._stage_path_part_factory( # noqa: SLF001
|
|
464
|
+
tmp_stage + "/" + original_path_parts.directory
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
# Create temporary stage, it will be dropped with end of session
|
|
468
|
+
sm.create(tmp_stage_fqn, temporary=True)
|
|
469
|
+
|
|
470
|
+
# Copy the content
|
|
471
|
+
self.copy_files(
|
|
472
|
+
source_path=original_path_parts.get_full_stage_path(
|
|
473
|
+
original_path_parts.stage_name
|
|
474
|
+
),
|
|
475
|
+
destination_path=stage_path_parts.get_full_stage_path(
|
|
476
|
+
stage_path_parts.stage_name
|
|
477
|
+
),
|
|
478
|
+
)
|
|
479
|
+
return stage_path_parts, original_path_parts
|
|
480
|
+
|
|
481
|
+
def _get_files_list_from_stage(
|
|
482
|
+
self, stage_path: StagePath, pattern: str | None = None
|
|
483
|
+
) -> List[str]:
|
|
484
|
+
files_list_result = self.list_files(stage_path, pattern=pattern).fetchall()
|
|
409
485
|
return [f["name"] for f in files_list_result]
|
|
410
486
|
|
|
411
487
|
def _filter_files_list(
|
|
@@ -444,6 +520,17 @@ class StageManager(SqlExecutionMixin):
|
|
|
444
520
|
query_parameters = [f"{v.key}=>{v.value}" for v in variables]
|
|
445
521
|
return f" using ({', '.join(query_parameters)})"
|
|
446
522
|
|
|
523
|
+
@staticmethod
|
|
524
|
+
def _parse_python_variables(variables: List[Variable]) -> Dict:
|
|
525
|
+
def _unwrap(s: str):
|
|
526
|
+
if s.startswith("'") and s.endswith("'"):
|
|
527
|
+
return s[1:-1]
|
|
528
|
+
if s.startswith('"') and s.endswith('"'):
|
|
529
|
+
return s[1:-1]
|
|
530
|
+
return s
|
|
531
|
+
|
|
532
|
+
return {str(v.key): _unwrap(v.value) for v in variables}
|
|
533
|
+
|
|
447
534
|
@staticmethod
|
|
448
535
|
def _success_result(file: str):
|
|
449
536
|
cli_console.warning(f"SUCCESS - {file}")
|
|
@@ -464,16 +551,17 @@ class StageManager(SqlExecutionMixin):
|
|
|
464
551
|
file_stage_path: str,
|
|
465
552
|
variables: Optional[str],
|
|
466
553
|
on_error: OnErrorType,
|
|
554
|
+
original_file: str,
|
|
467
555
|
) -> Dict:
|
|
468
556
|
try:
|
|
469
557
|
query = f"execute immediate from {self.quote_stage_name(file_stage_path)}"
|
|
470
558
|
if variables:
|
|
471
559
|
query += variables
|
|
472
560
|
self._execute_query(query)
|
|
473
|
-
return StageManager._success_result(file=
|
|
561
|
+
return StageManager._success_result(file=original_file)
|
|
474
562
|
except ProgrammingError as e:
|
|
475
563
|
StageManager._handle_execution_exception(on_error=on_error, exception=e)
|
|
476
|
-
return StageManager._error_result(file=
|
|
564
|
+
return StageManager._error_result(file=original_file, msg=e.msg)
|
|
477
565
|
|
|
478
566
|
@staticmethod
|
|
479
567
|
def _stage_path_part_factory(stage_path: str) -> StagePathParts:
|
|
@@ -482,32 +570,34 @@ class StageManager(SqlExecutionMixin):
|
|
|
482
570
|
return UserStagePathParts(stage_path)
|
|
483
571
|
return DefaultStagePathParts(stage_path)
|
|
484
572
|
|
|
485
|
-
def _check_for_requirements_file(
|
|
486
|
-
self, stage_path_parts: StagePathParts
|
|
487
|
-
) -> List[str]:
|
|
573
|
+
def _check_for_requirements_file(self, stage_path: StagePath) -> List[str]:
|
|
488
574
|
"""Looks for requirements.txt file on stage."""
|
|
575
|
+
current_dir = stage_path.parent if stage_path.is_file() else stage_path
|
|
489
576
|
req_files_on_stage = self._get_files_list_from_stage(
|
|
490
|
-
|
|
577
|
+
current_dir, pattern=r".*requirements\.txt$"
|
|
491
578
|
)
|
|
492
579
|
if not req_files_on_stage:
|
|
493
580
|
return []
|
|
494
581
|
|
|
495
582
|
# Construct all possible path for requirements file for this context
|
|
496
|
-
# We don't use os.path or pathlib to preserve compatibility on Windows
|
|
497
583
|
req_file_name = "requirements.txt"
|
|
498
|
-
path_parts = stage_path_parts.path.split("/")
|
|
499
584
|
possible_req_files = []
|
|
585
|
+
while not current_dir.is_root():
|
|
586
|
+
current_file = current_dir / req_file_name
|
|
587
|
+
possible_req_files.append(current_file)
|
|
588
|
+
current_dir = current_dir.parent
|
|
500
589
|
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
possible_req_files.append(str(current_file))
|
|
504
|
-
path_parts = path_parts[:-1]
|
|
590
|
+
current_file = current_dir / req_file_name
|
|
591
|
+
possible_req_files.append(current_file)
|
|
505
592
|
|
|
506
593
|
# Now for every possible path check if the file exists on stage,
|
|
507
594
|
# if yes break, we use the first possible file
|
|
508
|
-
requirements_file = None
|
|
595
|
+
requirements_file: StagePath | None = None
|
|
509
596
|
for req_file in possible_req_files:
|
|
510
|
-
if
|
|
597
|
+
if (
|
|
598
|
+
req_file.absolute_path(no_fqn=True, at_prefix=False)
|
|
599
|
+
in req_files_on_stage
|
|
600
|
+
):
|
|
511
601
|
requirements_file = req_file
|
|
512
602
|
break
|
|
513
603
|
|
|
@@ -516,37 +606,34 @@ class StageManager(SqlExecutionMixin):
|
|
|
516
606
|
return []
|
|
517
607
|
|
|
518
608
|
# req_file at this moment is the first found requirements file
|
|
609
|
+
requirements_path = requirements_file.with_stage(stage_path.stage)
|
|
519
610
|
with SecurePath.temporary_directory() as tmp_dir:
|
|
520
|
-
self.get(
|
|
521
|
-
stage_path_parts.get_full_stage_path(requirements_file), tmp_dir.path
|
|
522
|
-
)
|
|
611
|
+
self.get(str(requirements_path), tmp_dir.path)
|
|
523
612
|
requirements = parse_requirements(
|
|
524
613
|
requirements_file=tmp_dir / "requirements.txt"
|
|
525
614
|
)
|
|
526
615
|
|
|
527
616
|
return [req.package_name for req in requirements]
|
|
528
617
|
|
|
529
|
-
def _bootstrap_snowpark_execution_environment(
|
|
530
|
-
self, stage_path_parts: StagePathParts
|
|
531
|
-
):
|
|
618
|
+
def _bootstrap_snowpark_execution_environment(self, stage_path: StagePath):
|
|
532
619
|
"""Prepares Snowpark session for executing Python code remotely."""
|
|
533
620
|
if sys.version_info >= PYTHON_3_12:
|
|
534
621
|
raise ClickException(
|
|
535
|
-
f"Executing
|
|
622
|
+
f"Executing Python files is not supported in Python >= 3.12. Current version: {sys.version}"
|
|
536
623
|
)
|
|
537
624
|
|
|
538
625
|
from snowflake.snowpark.functions import sproc
|
|
539
626
|
|
|
540
627
|
self.snowpark_session.add_packages("snowflake-snowpark-python")
|
|
541
628
|
self.snowpark_session.add_packages("snowflake.core")
|
|
542
|
-
requirements = self._check_for_requirements_file(
|
|
629
|
+
requirements = self._check_for_requirements_file(stage_path)
|
|
543
630
|
self.snowpark_session.add_packages(*requirements)
|
|
544
631
|
|
|
545
|
-
@sproc(is_permanent=False)
|
|
632
|
+
@sproc(is_permanent=False, session=self.snowpark_session)
|
|
546
633
|
def _python_execution_procedure(
|
|
547
634
|
_: Session, file_path: str, variables: Dict | None = None
|
|
548
635
|
) -> None:
|
|
549
|
-
"""Snowpark session-scoped stored procedure to execute content of provided
|
|
636
|
+
"""Snowpark session-scoped stored procedure to execute content of provided Python file."""
|
|
550
637
|
import json
|
|
551
638
|
|
|
552
639
|
from snowflake.snowpark.files import SnowflakeFile
|
|
@@ -566,7 +653,11 @@ class StageManager(SqlExecutionMixin):
|
|
|
566
653
|
return _python_execution_procedure
|
|
567
654
|
|
|
568
655
|
def _execute_python(
|
|
569
|
-
self,
|
|
656
|
+
self,
|
|
657
|
+
file_stage_path: str,
|
|
658
|
+
on_error: OnErrorType,
|
|
659
|
+
variables: Dict,
|
|
660
|
+
original_file: str,
|
|
570
661
|
):
|
|
571
662
|
"""
|
|
572
663
|
Executes Python file from stage using a Snowpark temporary procedure.
|
|
@@ -575,8 +666,8 @@ class StageManager(SqlExecutionMixin):
|
|
|
575
666
|
from snowflake.snowpark.exceptions import SnowparkSQLException
|
|
576
667
|
|
|
577
668
|
try:
|
|
578
|
-
self._python_exe_procedure(self.get_standard_stage_prefix(file_stage_path), variables) # type: ignore
|
|
579
|
-
return StageManager._success_result(file=
|
|
669
|
+
self._python_exe_procedure(self.get_standard_stage_prefix(file_stage_path), variables, session=self.snowpark_session) # type: ignore
|
|
670
|
+
return StageManager._success_result(file=original_file)
|
|
580
671
|
except SnowparkSQLException as e:
|
|
581
672
|
StageManager._handle_execution_exception(on_error=on_error, exception=e)
|
|
582
|
-
return StageManager._error_result(file=
|
|
673
|
+
return StageManager._error_result(file=original_file, msg=e.message)
|
|
@@ -110,7 +110,7 @@ def file_matches_md5sum(local_file: Path, remote_md5: str | None) -> bool:
|
|
|
110
110
|
to a file that has a given remote md5sum.
|
|
111
111
|
|
|
112
112
|
Handles the multi-part md5sums generated by e.g. AWS S3, using values
|
|
113
|
-
from the
|
|
113
|
+
from the Python connector to make educated guesses on chunk size.
|
|
114
114
|
|
|
115
115
|
Assumes that upload time would dominate local hashing time.
|
|
116
116
|
"""
|
|
@@ -15,11 +15,13 @@
|
|
|
15
15
|
from __future__ import annotations
|
|
16
16
|
|
|
17
17
|
import logging
|
|
18
|
+
from io import StringIO
|
|
18
19
|
from pathlib import Path
|
|
19
20
|
from textwrap import dedent
|
|
20
21
|
from typing import List, Optional
|
|
21
22
|
|
|
22
23
|
import typer
|
|
24
|
+
import yaml
|
|
23
25
|
from click import MissingParameter
|
|
24
26
|
from snowflake.cli._plugins.nativeapp.artifacts import BundleMap
|
|
25
27
|
from snowflake.cli._plugins.nativeapp.common_flags import (
|
|
@@ -43,6 +45,24 @@ ws = SnowTyperFactory(
|
|
|
43
45
|
log = logging.getLogger(__name__)
|
|
44
46
|
|
|
45
47
|
|
|
48
|
+
@ws.command(requires_connection=False, hidden=True)
|
|
49
|
+
@with_project_definition()
|
|
50
|
+
def dump(**options):
|
|
51
|
+
"""
|
|
52
|
+
Dumps the project definition.
|
|
53
|
+
"""
|
|
54
|
+
cli_context = get_cli_context()
|
|
55
|
+
pd = cli_context.project_definition
|
|
56
|
+
io = StringIO()
|
|
57
|
+
yaml.safe_dump(
|
|
58
|
+
pd.model_dump(mode="json", by_alias=True),
|
|
59
|
+
io,
|
|
60
|
+
sort_keys=False,
|
|
61
|
+
width=float("inf"), # Don't break lines
|
|
62
|
+
)
|
|
63
|
+
return MessageResult(io.getvalue())
|
|
64
|
+
|
|
65
|
+
|
|
46
66
|
@ws.command(requires_connection=True, hidden=True)
|
|
47
67
|
@with_project_definition()
|
|
48
68
|
def bundle(
|
|
@@ -299,7 +319,7 @@ def version_drop(
|
|
|
299
319
|
)
|
|
300
320
|
ws.perform_action(
|
|
301
321
|
entity_id,
|
|
302
|
-
EntityActions.
|
|
322
|
+
EntityActions.VERSION_DROP,
|
|
303
323
|
version=version,
|
|
304
324
|
interactive=interactive,
|
|
305
325
|
force=force,
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from functools import cached_property
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Callable
|
|
5
|
+
|
|
6
|
+
from snowflake.cli.api.console.abc import AbstractConsole
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class WorkspaceContext:
|
|
11
|
+
"""
|
|
12
|
+
An object that is passed to each entity when instantiated by WorkspaceManager
|
|
13
|
+
to allow access to the CLI context without requiring the entities to use
|
|
14
|
+
get_cli_context().
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
console: AbstractConsole
|
|
18
|
+
project_root: Path
|
|
19
|
+
get_default_role: Callable[[], str]
|
|
20
|
+
get_default_warehouse: Callable[[], str | None]
|
|
21
|
+
|
|
22
|
+
@cached_property
|
|
23
|
+
def default_role(self) -> str:
|
|
24
|
+
return self.get_default_role()
|
|
25
|
+
|
|
26
|
+
@cached_property
|
|
27
|
+
def default_warehouse(self) -> str | None:
|
|
28
|
+
return self.get_default_warehouse()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class ActionContext:
|
|
33
|
+
"""
|
|
34
|
+
An object that is passed to each action when called by WorkspaceManager
|
|
35
|
+
to provide access to metadata about the entity and project being acted upon.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
get_entity: Callable
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
from typing import Dict
|
|
3
3
|
|
|
4
|
-
from snowflake.cli._plugins.workspace.
|
|
4
|
+
from snowflake.cli._plugins.workspace.context import ActionContext, WorkspaceContext
|
|
5
5
|
from snowflake.cli.api.cli_global_context import get_cli_context
|
|
6
6
|
from snowflake.cli.api.console import cli_console as cc
|
|
7
7
|
from snowflake.cli.api.entities.common import EntityActions, get_sql_executor
|
|
@@ -31,13 +31,6 @@ class WorkspaceManager:
|
|
|
31
31
|
self._entities_cache: Dict[str, Entity] = {}
|
|
32
32
|
self._project_definition: DefinitionV20 = project_definition
|
|
33
33
|
self._project_root = project_root
|
|
34
|
-
self._default_role = default_role()
|
|
35
|
-
if self._default_role is None:
|
|
36
|
-
self._default_role = get_sql_executor().current_role()
|
|
37
|
-
self.default_warehouse = None
|
|
38
|
-
cli_context = get_cli_context()
|
|
39
|
-
if cli_context.connection.warehouse:
|
|
40
|
-
self.default_warehouse = to_identifier(cli_context.connection.warehouse)
|
|
41
34
|
|
|
42
35
|
def get_entity(self, entity_id: str):
|
|
43
36
|
"""
|
|
@@ -50,7 +43,13 @@ class WorkspaceManager:
|
|
|
50
43
|
raise ValueError(f"No such entity ID: {entity_id}")
|
|
51
44
|
entity_model_cls = entity_model.__class__
|
|
52
45
|
entity_cls = v2_entity_model_to_entity_map[entity_model_cls]
|
|
53
|
-
|
|
46
|
+
workspace_ctx = WorkspaceContext(
|
|
47
|
+
console=cc,
|
|
48
|
+
project_root=self.project_root,
|
|
49
|
+
get_default_role=_get_default_role,
|
|
50
|
+
get_default_warehouse=_get_default_warehouse,
|
|
51
|
+
)
|
|
52
|
+
self._entities_cache[entity_id] = entity_cls(entity_model, workspace_ctx)
|
|
54
53
|
return self._entities_cache[entity_id]
|
|
55
54
|
|
|
56
55
|
def perform_action(self, entity_id: str, action: EntityActions, *args, **kwargs):
|
|
@@ -60,15 +59,26 @@ class WorkspaceManager:
|
|
|
60
59
|
entity = self.get_entity(entity_id)
|
|
61
60
|
if entity.supports(action):
|
|
62
61
|
action_ctx = ActionContext(
|
|
63
|
-
console=cc,
|
|
64
|
-
project_root=self.project_root(),
|
|
65
|
-
default_role=self._default_role,
|
|
66
|
-
default_warehouse=self.default_warehouse,
|
|
67
62
|
get_entity=self.get_entity,
|
|
68
63
|
)
|
|
69
64
|
return entity.perform(action, action_ctx, *args, **kwargs)
|
|
70
65
|
else:
|
|
71
66
|
raise ValueError(f'This entity type does not support "{action.value}"')
|
|
72
67
|
|
|
68
|
+
@property
|
|
73
69
|
def project_root(self) -> Path:
|
|
74
70
|
return self._project_root
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _get_default_role() -> str:
|
|
74
|
+
role = default_role()
|
|
75
|
+
if role is None:
|
|
76
|
+
role = get_sql_executor().current_role()
|
|
77
|
+
return role
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _get_default_warehouse() -> str | None:
|
|
81
|
+
warehouse = get_cli_context().connection.warehouse
|
|
82
|
+
if warehouse:
|
|
83
|
+
warehouse = to_identifier(warehouse)
|
|
84
|
+
return warehouse
|
|
@@ -54,9 +54,9 @@ class _CliGlobalContextManager:
|
|
|
54
54
|
project_env_overrides_args: dict[str, str] = field(default_factory=dict)
|
|
55
55
|
|
|
56
56
|
# FIXME: this property only exists to help implement
|
|
57
|
-
# nativeapp_definition_v2_to_v1
|
|
58
|
-
# this calculation is provided to commands
|
|
59
|
-
# this logic (then make project_definition a non-cloned @property)
|
|
57
|
+
# nativeapp_definition_v2_to_v1 and single_app_and_package.
|
|
58
|
+
# Consider changing the way this calculation is provided to commands
|
|
59
|
+
# in order to remove this logic (then make project_definition a non-cloned @property)
|
|
60
60
|
override_project_definition: ProjectDefinition | None = None
|
|
61
61
|
|
|
62
62
|
_definition_manager: DefinitionManager | None = None
|