snowflake-cli 3.5.0__py3-none-any.whl → 3.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/cli/__about__.py +13 -1
- snowflake/cli/_app/commands_registration/builtin_plugins.py +4 -0
- snowflake/cli/_app/loggers.py +2 -2
- snowflake/cli/_app/snow_connector.py +7 -6
- snowflake/cli/_app/telemetry.py +3 -15
- snowflake/cli/_app/version_check.py +4 -4
- snowflake/cli/_plugins/auth/__init__.py +11 -0
- snowflake/cli/_plugins/auth/keypair/__init__.py +0 -0
- snowflake/cli/_plugins/auth/keypair/commands.py +151 -0
- snowflake/cli/_plugins/auth/keypair/manager.py +331 -0
- snowflake/cli/_plugins/auth/keypair/plugin_spec.py +30 -0
- snowflake/cli/_plugins/connection/commands.py +78 -1
- snowflake/cli/_plugins/helpers/commands.py +25 -1
- snowflake/cli/_plugins/helpers/snowsl_vars_reader.py +133 -0
- snowflake/cli/_plugins/init/commands.py +9 -6
- snowflake/cli/_plugins/logs/__init__.py +0 -0
- snowflake/cli/_plugins/logs/commands.py +105 -0
- snowflake/cli/_plugins/logs/manager.py +107 -0
- snowflake/cli/_plugins/logs/plugin_spec.py +16 -0
- snowflake/cli/_plugins/logs/utils.py +60 -0
- snowflake/cli/_plugins/nativeapp/entities/application.py +4 -1
- snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +33 -6
- snowflake/cli/_plugins/notebook/commands.py +3 -0
- snowflake/cli/_plugins/notebook/notebook_entity.py +16 -27
- snowflake/cli/_plugins/object/command_aliases.py +3 -1
- snowflake/cli/_plugins/object/manager.py +4 -2
- snowflake/cli/_plugins/project/commands.py +89 -48
- snowflake/cli/_plugins/project/manager.py +57 -23
- snowflake/cli/_plugins/project/project_entity_model.py +22 -3
- snowflake/cli/_plugins/snowpark/commands.py +15 -2
- snowflake/cli/_plugins/spcs/compute_pool/commands.py +17 -5
- snowflake/cli/_plugins/sql/manager.py +43 -52
- snowflake/cli/_plugins/sql/source_reader.py +230 -0
- snowflake/cli/_plugins/stage/manager.py +25 -12
- snowflake/cli/_plugins/streamlit/commands.py +3 -0
- snowflake/cli/_plugins/streamlit/manager.py +19 -15
- snowflake/cli/api/artifacts/upload.py +30 -34
- snowflake/cli/api/artifacts/utils.py +8 -6
- snowflake/cli/api/cli_global_context.py +7 -2
- snowflake/cli/api/commands/decorators.py +11 -2
- snowflake/cli/api/commands/flags.py +35 -4
- snowflake/cli/api/commands/snow_typer.py +20 -2
- snowflake/cli/api/config.py +5 -3
- snowflake/cli/api/constants.py +2 -0
- snowflake/cli/api/entities/utils.py +29 -16
- snowflake/cli/api/errno.py +1 -0
- snowflake/cli/api/exceptions.py +75 -27
- snowflake/cli/api/feature_flags.py +1 -0
- snowflake/cli/api/identifiers.py +2 -0
- snowflake/cli/api/plugins/plugin_config.py +2 -2
- snowflake/cli/api/project/schemas/template.py +3 -3
- snowflake/cli/api/rendering/project_templates.py +3 -3
- snowflake/cli/api/rendering/sql_templates.py +2 -2
- snowflake/cli/api/rest_api.py +2 -3
- snowflake/cli/{_app → api}/secret.py +4 -1
- snowflake/cli/api/secure_path.py +16 -4
- snowflake/cli/api/sql_execution.py +8 -4
- snowflake/cli/api/utils/definition_rendering.py +14 -8
- snowflake/cli/api/utils/templating_functions.py +4 -4
- {snowflake_cli-3.5.0.dist-info → snowflake_cli-3.7.0.dist-info}/METADATA +11 -11
- {snowflake_cli-3.5.0.dist-info → snowflake_cli-3.7.0.dist-info}/RECORD +64 -52
- {snowflake_cli-3.5.0.dist-info → snowflake_cli-3.7.0.dist-info}/WHEEL +0 -0
- {snowflake_cli-3.5.0.dist-info → snowflake_cli-3.7.0.dist-info}/entry_points.txt +0 -0
- {snowflake_cli-3.5.0.dist-info → snowflake_cli-3.7.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
import enum
|
|
2
|
+
import io
|
|
3
|
+
import re
|
|
4
|
+
import urllib.error
|
|
5
|
+
from typing import Any, Callable, Generator, Literal, Sequence
|
|
6
|
+
from urllib.request import urlopen
|
|
7
|
+
|
|
8
|
+
from jinja2 import UndefinedError
|
|
9
|
+
from snowflake.cli.api.secure_path import UNLIMITED, SecurePath
|
|
10
|
+
from snowflake.connector.util_text import split_statements
|
|
11
|
+
|
|
12
|
+
SOURCE_PATTERN = re.compile(
|
|
13
|
+
r"^!(source|load)\s+[\"']?(.*?)[\"']?\s*(?:;|$)",
|
|
14
|
+
flags=re.IGNORECASE,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
URL_PATTERN = re.compile(r"^(\w+?):\/(\/.*)", flags=re.IGNORECASE)
|
|
18
|
+
|
|
19
|
+
SplitedStatements = Generator[
|
|
20
|
+
tuple[str, bool | None] | tuple[str, Literal[False]],
|
|
21
|
+
Any,
|
|
22
|
+
None,
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
SqlTransformFunc = Callable[[str], str]
|
|
26
|
+
OperatorFunctions = Sequence[SqlTransformFunc]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class SourceType(enum.Enum):
|
|
30
|
+
FILE = "file"
|
|
31
|
+
QUERY = "query"
|
|
32
|
+
UNKNOWN = "unknown"
|
|
33
|
+
URL = "url"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ParsedSource:
|
|
37
|
+
"""Container for parsed statement.
|
|
38
|
+
|
|
39
|
+
Holds:
|
|
40
|
+
- source: statement on command content
|
|
41
|
+
- source_type: type of source
|
|
42
|
+
- source_path: in case of URL or FILE path of the origin
|
|
43
|
+
- error: optional message
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
__slots__ = ("source", "source_type", "source_path", "error")
|
|
47
|
+
__match_args__ = ("source_type", "error")
|
|
48
|
+
|
|
49
|
+
source: io.StringIO
|
|
50
|
+
source_type: SourceType | None
|
|
51
|
+
source_path: str | None
|
|
52
|
+
error: str | None
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
source: str,
|
|
57
|
+
source_type: SourceType,
|
|
58
|
+
source_path: str | None,
|
|
59
|
+
error: str | None = None,
|
|
60
|
+
):
|
|
61
|
+
self.source = io.StringIO(source)
|
|
62
|
+
self.source_type = source_type
|
|
63
|
+
self.source_path = source_path
|
|
64
|
+
self.error = error
|
|
65
|
+
|
|
66
|
+
def __bool__(self):
|
|
67
|
+
return not self.error
|
|
68
|
+
|
|
69
|
+
def __eq__(self, other):
|
|
70
|
+
result = (
|
|
71
|
+
self.source_type == other.source_type,
|
|
72
|
+
self.source_path == other.source_path,
|
|
73
|
+
self.error == other.error,
|
|
74
|
+
self.source.read() == other.source.read(),
|
|
75
|
+
)
|
|
76
|
+
self.source.seek(0)
|
|
77
|
+
other.source.seek(0)
|
|
78
|
+
return all(result)
|
|
79
|
+
|
|
80
|
+
def __repr__(self):
|
|
81
|
+
return f"{self.__class__.__name__}(source_type={self.source_type}, source_path={self.source_path}, error={self.error})"
|
|
82
|
+
|
|
83
|
+
@classmethod
|
|
84
|
+
def from_url(cls, path_part: str, raw_source: str) -> "ParsedSource":
|
|
85
|
+
"""Constructor for loading from URL."""
|
|
86
|
+
try:
|
|
87
|
+
payload = urlopen(path_part, timeout=10.0).read().decode()
|
|
88
|
+
return cls(payload, SourceType.URL, path_part)
|
|
89
|
+
|
|
90
|
+
except urllib.error.HTTPError as err:
|
|
91
|
+
error = f"Could not fetch {path_part}: {err}"
|
|
92
|
+
return cls(path_part, SourceType.URL, raw_source, error)
|
|
93
|
+
|
|
94
|
+
@classmethod
|
|
95
|
+
def from_file(cls, path_part: str, raw_source: str) -> "ParsedSource":
|
|
96
|
+
"""Constructor for loading from file."""
|
|
97
|
+
path = SecurePath(path_part)
|
|
98
|
+
|
|
99
|
+
if path.is_file():
|
|
100
|
+
payload = path.read_text(file_size_limit_mb=UNLIMITED)
|
|
101
|
+
return cls(payload, SourceType.FILE, path.as_posix())
|
|
102
|
+
|
|
103
|
+
error_msg = f"Could not read: {path_part}"
|
|
104
|
+
return cls(path_part, SourceType.FILE, raw_source, error_msg)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
RecursiveStatementReader = Generator[ParsedSource, Any, Any]
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def parse_source(source: str, operators: OperatorFunctions) -> ParsedSource:
|
|
111
|
+
"""Evaluates templating and source commands.
|
|
112
|
+
|
|
113
|
+
Returns parsed source according to origin."""
|
|
114
|
+
try:
|
|
115
|
+
statement = source
|
|
116
|
+
for operator in operators:
|
|
117
|
+
statement = operator(statement)
|
|
118
|
+
except UndefinedError as e:
|
|
119
|
+
error_msg = f"SQL template rendering error: {e}"
|
|
120
|
+
return ParsedSource(source, SourceType.UNKNOWN, source, error_msg)
|
|
121
|
+
|
|
122
|
+
split_result = SOURCE_PATTERN.split(statement, maxsplit=1)
|
|
123
|
+
split_result = [p.strip() for p in split_result]
|
|
124
|
+
|
|
125
|
+
if len(split_result) == 1:
|
|
126
|
+
return ParsedSource(statement, SourceType.QUERY, None)
|
|
127
|
+
|
|
128
|
+
_, command, source_path, *_ = split_result
|
|
129
|
+
_path_match = URL_PATTERN.split(source_path.lower())
|
|
130
|
+
|
|
131
|
+
match command.lower(), _path_match:
|
|
132
|
+
# load content from an URL
|
|
133
|
+
case "source" | "load", ("", "http" | "https", *_):
|
|
134
|
+
return ParsedSource.from_url(source_path, statement)
|
|
135
|
+
|
|
136
|
+
# load content from a local file
|
|
137
|
+
case "source" | "load", (str(),):
|
|
138
|
+
return ParsedSource.from_file(source_path, statement)
|
|
139
|
+
|
|
140
|
+
case _:
|
|
141
|
+
error_msg = f"Unknown source: {source_path}"
|
|
142
|
+
|
|
143
|
+
return ParsedSource(source_path, SourceType.UNKNOWN, source, error_msg)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def recursive_source_reader(
|
|
147
|
+
source: SplitedStatements,
|
|
148
|
+
seen_files: list,
|
|
149
|
+
operators: OperatorFunctions,
|
|
150
|
+
remove_comments: bool,
|
|
151
|
+
) -> RecursiveStatementReader:
|
|
152
|
+
"""Based on detected source command reads content of the source and tracks for recursion cycles."""
|
|
153
|
+
for stmt, _ in source:
|
|
154
|
+
if not stmt:
|
|
155
|
+
continue
|
|
156
|
+
parsed_source = parse_source(stmt, operators)
|
|
157
|
+
|
|
158
|
+
match parsed_source:
|
|
159
|
+
case ParsedSource(SourceType.FILE | SourceType.URL, None):
|
|
160
|
+
if parsed_source.source_path in seen_files:
|
|
161
|
+
error = f"Recursion detected: {' -> '.join(seen_files)}"
|
|
162
|
+
parsed_source.error = error
|
|
163
|
+
yield parsed_source
|
|
164
|
+
continue
|
|
165
|
+
|
|
166
|
+
seen_files.append(parsed_source.source_path)
|
|
167
|
+
|
|
168
|
+
yield from recursive_source_reader(
|
|
169
|
+
split_statements(parsed_source.source, remove_comments),
|
|
170
|
+
seen_files,
|
|
171
|
+
operators,
|
|
172
|
+
remove_comments,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
seen_files.pop()
|
|
176
|
+
|
|
177
|
+
case ParsedSource(SourceType.URL, error) if error:
|
|
178
|
+
yield parsed_source
|
|
179
|
+
|
|
180
|
+
case _:
|
|
181
|
+
yield parsed_source
|
|
182
|
+
return
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def files_reader(
|
|
186
|
+
paths: Sequence[SecurePath],
|
|
187
|
+
operators: OperatorFunctions,
|
|
188
|
+
remove_comments: bool = False,
|
|
189
|
+
) -> RecursiveStatementReader:
|
|
190
|
+
"""Entry point for reading statements from files.
|
|
191
|
+
|
|
192
|
+
Returns a generator with statements."""
|
|
193
|
+
for path in paths:
|
|
194
|
+
with path.open(read_file_limit_mb=UNLIMITED) as f:
|
|
195
|
+
stmts = split_statements(io.StringIO(f.read()), remove_comments)
|
|
196
|
+
yield from recursive_source_reader(
|
|
197
|
+
stmts,
|
|
198
|
+
[path.as_posix()],
|
|
199
|
+
operators,
|
|
200
|
+
remove_comments,
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def query_reader(
|
|
205
|
+
source: str,
|
|
206
|
+
operators: OperatorFunctions,
|
|
207
|
+
remove_comments: bool = False,
|
|
208
|
+
) -> RecursiveStatementReader:
|
|
209
|
+
"""Entry point for reading statements from query.
|
|
210
|
+
|
|
211
|
+
Returns a generator with statements."""
|
|
212
|
+
stmts = split_statements(io.StringIO(source), remove_comments)
|
|
213
|
+
yield from recursive_source_reader(stmts, [], operators, remove_comments)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def compile_statements(source: RecursiveStatementReader):
|
|
217
|
+
"""Tracks statements evaluation and collects errors."""
|
|
218
|
+
errors = []
|
|
219
|
+
cnt = 0
|
|
220
|
+
compiled = []
|
|
221
|
+
|
|
222
|
+
for stmt in source:
|
|
223
|
+
if stmt.source_type == SourceType.QUERY:
|
|
224
|
+
cnt += 1
|
|
225
|
+
if not stmt.error:
|
|
226
|
+
compiled.append(stmt.source.read())
|
|
227
|
+
if stmt.error:
|
|
228
|
+
errors.append(stmt.error)
|
|
229
|
+
|
|
230
|
+
return errors, cnt, compiled
|
|
@@ -65,7 +65,7 @@ EXECUTE_SUPPORTED_FILES_FORMATS = (
|
|
|
65
65
|
|
|
66
66
|
# Replace magic numbers with constants
|
|
67
67
|
OMIT_FIRST = slice(1, None)
|
|
68
|
-
STAGE_PATH_REGEX = rf"(?P<prefix
|
|
68
|
+
STAGE_PATH_REGEX = rf"(?P<prefix>(@|{re.escape('snow://')}))?(?:(?P<first_qualifier>{VALID_IDENTIFIER_REGEX})\.)?(?:(?P<second_qualifier>{VALID_IDENTIFIER_REGEX})\.)?(?P<name>{VALID_IDENTIFIER_REGEX})/?(?P<directory>([^/]*/?)*)?"
|
|
69
69
|
|
|
70
70
|
|
|
71
71
|
@dataclass
|
|
@@ -119,6 +119,14 @@ class StagePathParts:
|
|
|
119
119
|
raise NotImplementedError
|
|
120
120
|
|
|
121
121
|
|
|
122
|
+
def _strip_standard_stage_prefix(path: str) -> str:
|
|
123
|
+
"""Removes '@' or 'snow://' prefix from given string"""
|
|
124
|
+
for prefix in ["@", "snow://"]:
|
|
125
|
+
if path.startswith(prefix):
|
|
126
|
+
path = path.removeprefix(prefix)
|
|
127
|
+
return path
|
|
128
|
+
|
|
129
|
+
|
|
122
130
|
@dataclass
|
|
123
131
|
class DefaultStagePathParts(StagePathParts):
|
|
124
132
|
"""
|
|
@@ -126,8 +134,8 @@ class DefaultStagePathParts(StagePathParts):
|
|
|
126
134
|
directory = dir
|
|
127
135
|
stage = @db.schema.stage
|
|
128
136
|
stage_name = stage
|
|
129
|
-
For
|
|
130
|
-
stage ->
|
|
137
|
+
For `snow://stage/dir` to
|
|
138
|
+
stage -> snow://stage
|
|
131
139
|
stage_name -> stage
|
|
132
140
|
directory -> dir
|
|
133
141
|
"""
|
|
@@ -138,12 +146,12 @@ class DefaultStagePathParts(StagePathParts):
|
|
|
138
146
|
raise ClickException("Invalid stage path")
|
|
139
147
|
self.directory = match.group("directory")
|
|
140
148
|
self._schema = match.group("second_qualifier") or match.group("first_qualifier")
|
|
149
|
+
self._prefix = match.group("prefix") or "@"
|
|
141
150
|
self.stage = stage_path.removesuffix(self.directory).rstrip("/")
|
|
142
151
|
|
|
143
152
|
stage_name = FQN.from_stage(self.stage).name
|
|
144
|
-
stage_name
|
|
145
|
-
stage_name
|
|
146
|
-
)
|
|
153
|
+
if stage_name.startswith(self._prefix):
|
|
154
|
+
stage_name = stage_name.removeprefix(self._prefix)
|
|
147
155
|
self.stage_name = stage_name
|
|
148
156
|
self.is_directory = True if stage_path.endswith("/") else False
|
|
149
157
|
|
|
@@ -167,13 +175,12 @@ class DefaultStagePathParts(StagePathParts):
|
|
|
167
175
|
return self._schema
|
|
168
176
|
|
|
169
177
|
def replace_stage_prefix(self, file_path: str) -> str:
|
|
170
|
-
|
|
178
|
+
file_path = _strip_standard_stage_prefix(file_path)
|
|
171
179
|
file_path_without_prefix = Path(file_path).parts[OMIT_FIRST]
|
|
172
|
-
return f"{stage}/{'/'.join(file_path_without_prefix)}"
|
|
180
|
+
return f"{self.stage}/{'/'.join(file_path_without_prefix)}"
|
|
173
181
|
|
|
174
182
|
def strip_stage_prefix(self, file_path: str) -> str:
|
|
175
|
-
|
|
176
|
-
file_path = file_path[OMIT_FIRST]
|
|
183
|
+
file_path = _strip_standard_stage_prefix(file_path)
|
|
177
184
|
if file_path.startswith(self.stage_name):
|
|
178
185
|
return file_path[len(self.stage_name) :]
|
|
179
186
|
return file_path
|
|
@@ -439,9 +446,13 @@ class StageManager(SqlExecutionMixin):
|
|
|
439
446
|
# We end if we reach the root directory
|
|
440
447
|
if directory == temp_dir_with_copy:
|
|
441
448
|
break
|
|
442
|
-
|
|
443
449
|
# Add parent directory to the list if it's not already there
|
|
444
|
-
if directory.parent not in deepest_dirs_list
|
|
450
|
+
if directory.parent not in deepest_dirs_list and not any(
|
|
451
|
+
(
|
|
452
|
+
existing_dir.is_relative_to(directory.parent)
|
|
453
|
+
for existing_dir in deepest_dirs_list
|
|
454
|
+
)
|
|
455
|
+
):
|
|
445
456
|
deepest_dirs_list.append(directory.parent)
|
|
446
457
|
|
|
447
458
|
# Remove the directory so the parent directory will contain only files
|
|
@@ -703,6 +714,7 @@ class StageManager(SqlExecutionMixin):
|
|
|
703
714
|
original_file: str,
|
|
704
715
|
) -> Dict:
|
|
705
716
|
try:
|
|
717
|
+
log.info("Executing SQL file: %s", file_stage_path)
|
|
706
718
|
query = f"execute immediate from {self.quote_stage_name(file_stage_path)}"
|
|
707
719
|
if variables:
|
|
708
720
|
query += variables
|
|
@@ -816,6 +828,7 @@ class StageManager(SqlExecutionMixin):
|
|
|
816
828
|
from snowflake.snowpark.exceptions import SnowparkSQLException
|
|
817
829
|
|
|
818
830
|
try:
|
|
831
|
+
log.info("Executing Python file: %s", file_stage_path)
|
|
819
832
|
self._python_exe_procedure(self.get_standard_stage_prefix(file_stage_path), variables, session=self.snowpark_session) # type: ignore
|
|
820
833
|
return StageManager._success_result(file=original_file)
|
|
821
834
|
except SnowparkSQLException as e:
|
|
@@ -37,6 +37,7 @@ from snowflake.cli.api.commands.decorators import (
|
|
|
37
37
|
with_project_definition,
|
|
38
38
|
)
|
|
39
39
|
from snowflake.cli.api.commands.flags import (
|
|
40
|
+
PruneOption,
|
|
40
41
|
ReplaceOption,
|
|
41
42
|
entity_argument,
|
|
42
43
|
identifier_argument,
|
|
@@ -136,6 +137,7 @@ def streamlit_deploy(
|
|
|
136
137
|
help="Replaces the Streamlit app if it already exists. It only uploads new and overwrites existing files, "
|
|
137
138
|
"but does not remove any files already on the stage."
|
|
138
139
|
),
|
|
140
|
+
prune: bool = PruneOption(),
|
|
139
141
|
entity_id: str = entity_argument("streamlit"),
|
|
140
142
|
open_: bool = OpenOption,
|
|
141
143
|
**options,
|
|
@@ -168,6 +170,7 @@ def streamlit_deploy(
|
|
|
168
170
|
streamlit=streamlit,
|
|
169
171
|
streamlit_project_paths=streamlit_project_paths,
|
|
170
172
|
replace=replace,
|
|
173
|
+
prune=prune,
|
|
171
174
|
)
|
|
172
175
|
|
|
173
176
|
if open_:
|
|
@@ -31,7 +31,7 @@ from snowflake.cli._plugins.streamlit.streamlit_entity_model import (
|
|
|
31
31
|
from snowflake.cli._plugins.streamlit.streamlit_project_paths import (
|
|
32
32
|
StreamlitProjectPaths,
|
|
33
33
|
)
|
|
34
|
-
from snowflake.cli.api.artifacts.upload import
|
|
34
|
+
from snowflake.cli.api.artifacts.upload import sync_artifacts_with_stage
|
|
35
35
|
from snowflake.cli.api.commands.experimental_behaviour import (
|
|
36
36
|
experimental_behaviour_enabled,
|
|
37
37
|
)
|
|
@@ -56,18 +56,20 @@ class StreamlitManager(SqlExecutionMixin):
|
|
|
56
56
|
f"grant usage on streamlit {streamlit_name.sql_identifier} to role {to_role}"
|
|
57
57
|
)
|
|
58
58
|
|
|
59
|
-
def
|
|
59
|
+
def _upload_artifacts(
|
|
60
60
|
self,
|
|
61
61
|
streamlit_project_paths: StreamlitProjectPaths,
|
|
62
62
|
stage_root: str,
|
|
63
|
+
prune: bool,
|
|
63
64
|
artifacts: Optional[List[PathMapping]] = None,
|
|
64
65
|
):
|
|
65
|
-
cli_console.
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
66
|
+
with cli_console.phase(f"Deploying files to {stage_root}"):
|
|
67
|
+
sync_artifacts_with_stage(
|
|
68
|
+
project_paths=streamlit_project_paths,
|
|
69
|
+
stage_root=stage_root,
|
|
70
|
+
prune=prune,
|
|
71
|
+
artifacts=artifacts,
|
|
72
|
+
)
|
|
71
73
|
|
|
72
74
|
def _create_streamlit(
|
|
73
75
|
self,
|
|
@@ -126,6 +128,7 @@ class StreamlitManager(SqlExecutionMixin):
|
|
|
126
128
|
streamlit: StreamlitEntityModel,
|
|
127
129
|
streamlit_project_paths: StreamlitProjectPaths,
|
|
128
130
|
replace: bool = False,
|
|
131
|
+
prune: bool = False,
|
|
129
132
|
):
|
|
130
133
|
streamlit_id = streamlit.fqn.using_connection(self._conn)
|
|
131
134
|
if (
|
|
@@ -182,10 +185,11 @@ class StreamlitManager(SqlExecutionMixin):
|
|
|
182
185
|
else:
|
|
183
186
|
stage_root = f"{embedded_stage_name}/default_checkout"
|
|
184
187
|
|
|
185
|
-
self.
|
|
188
|
+
self._upload_artifacts(
|
|
186
189
|
streamlit_project_paths,
|
|
187
190
|
stage_root,
|
|
188
|
-
|
|
191
|
+
prune=prune,
|
|
192
|
+
artifacts=streamlit.artifacts,
|
|
189
193
|
)
|
|
190
194
|
else:
|
|
191
195
|
"""
|
|
@@ -198,15 +202,15 @@ class StreamlitManager(SqlExecutionMixin):
|
|
|
198
202
|
stage_name = streamlit.stage or "streamlit"
|
|
199
203
|
stage_name = FQN.from_string(stage_name).using_connection(self._conn)
|
|
200
204
|
|
|
201
|
-
cli_console.step(f"Creating {stage_name} stage")
|
|
202
|
-
stage_manager.create(fqn=stage_name)
|
|
203
|
-
|
|
204
205
|
stage_root = stage_manager.get_standard_stage_prefix(
|
|
205
206
|
f"{stage_name}/{streamlit_name_for_root_location}"
|
|
206
207
|
)
|
|
207
208
|
|
|
208
|
-
self.
|
|
209
|
-
streamlit_project_paths,
|
|
209
|
+
self._upload_artifacts(
|
|
210
|
+
streamlit_project_paths,
|
|
211
|
+
stage_root,
|
|
212
|
+
prune=prune,
|
|
213
|
+
artifacts=streamlit.artifacts,
|
|
210
214
|
)
|
|
211
215
|
|
|
212
216
|
self._create_streamlit(
|
|
@@ -1,51 +1,47 @@
|
|
|
1
|
-
from pathlib import PurePosixPath
|
|
2
1
|
from typing import List, Optional
|
|
3
2
|
|
|
4
3
|
from snowflake.cli._plugins.stage.manager import StageManager
|
|
5
|
-
from snowflake.cli.api.artifacts.
|
|
6
|
-
from snowflake.cli.api.artifacts.utils import symlink_or_copy
|
|
4
|
+
from snowflake.cli.api.artifacts.utils import bundle_artifacts
|
|
7
5
|
from snowflake.cli.api.console import cli_console
|
|
6
|
+
from snowflake.cli.api.entities.utils import sync_deploy_root_with_stage
|
|
8
7
|
from snowflake.cli.api.project.project_paths import ProjectPaths
|
|
9
8
|
from snowflake.cli.api.project.schemas.entities.common import PathMapping
|
|
10
9
|
|
|
11
10
|
|
|
12
|
-
def
|
|
11
|
+
def sync_artifacts_with_stage(
|
|
13
12
|
project_paths: ProjectPaths,
|
|
14
13
|
stage_root: str,
|
|
14
|
+
prune: bool = False,
|
|
15
15
|
artifacts: Optional[List[PathMapping]] = None,
|
|
16
16
|
):
|
|
17
|
-
if
|
|
18
|
-
|
|
19
|
-
|
|
17
|
+
if artifacts is None:
|
|
18
|
+
artifacts = []
|
|
19
|
+
|
|
20
|
+
bundle_map = bundle_artifacts(project_paths, artifacts)
|
|
21
|
+
stage_path_parts = StageManager().stage_path_parts_from_str(stage_root)
|
|
20
22
|
# We treat the bundle root as deploy root
|
|
21
|
-
|
|
22
|
-
|
|
23
|
+
sync_deploy_root_with_stage(
|
|
24
|
+
console=cli_console,
|
|
23
25
|
deploy_root=project_paths.bundle_root,
|
|
26
|
+
bundle_map=bundle_map,
|
|
27
|
+
prune=prune,
|
|
28
|
+
recursive=True,
|
|
29
|
+
stage_path=stage_path_parts,
|
|
30
|
+
print_diff=True,
|
|
24
31
|
)
|
|
25
|
-
for artifact in artifacts:
|
|
26
|
-
bundle_map.add(PathMapping(src=str(artifact.src), dest=artifact.dest))
|
|
27
32
|
|
|
28
|
-
# Clean up bundle root
|
|
29
|
-
project_paths.remove_up_bundle_root()
|
|
30
33
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
.parent
|
|
46
|
-
)
|
|
47
|
-
full_stage_path = f"{stage_root}/{stage_path}".rstrip("/")
|
|
48
|
-
cli_console.step(f"Uploading {absolute_dest} to {full_stage_path}")
|
|
49
|
-
stage_manager.put(
|
|
50
|
-
local_path=absolute_dest, stage_path=full_stage_path, overwrite=True
|
|
51
|
-
)
|
|
34
|
+
def put_files(
|
|
35
|
+
project_paths: ProjectPaths,
|
|
36
|
+
stage_root: str,
|
|
37
|
+
artifacts: Optional[List[PathMapping]] = None,
|
|
38
|
+
):
|
|
39
|
+
if not artifacts:
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
sync_artifacts_with_stage(
|
|
43
|
+
project_paths=project_paths,
|
|
44
|
+
stage_root=stage_root,
|
|
45
|
+
prune=False,
|
|
46
|
+
artifacts=artifacts,
|
|
47
|
+
)
|
|
@@ -69,14 +69,16 @@ def bundle_artifacts(project_paths: ProjectPaths, artifacts: Artifacts) -> Bundl
|
|
|
69
69
|
bundle_map.add(artifact)
|
|
70
70
|
|
|
71
71
|
project_paths.remove_up_bundle_root()
|
|
72
|
+
SecurePath(project_paths.bundle_root).mkdir(parents=True, exist_ok=True)
|
|
72
73
|
for absolute_src, absolute_dest in bundle_map.all_mappings(
|
|
73
74
|
absolute=True, expand_directories=True
|
|
74
75
|
):
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
76
|
+
if absolute_src.is_file():
|
|
77
|
+
# We treat the bundle root as deploy root
|
|
78
|
+
symlink_or_copy(
|
|
79
|
+
absolute_src,
|
|
80
|
+
absolute_dest,
|
|
81
|
+
deploy_root=project_paths.bundle_root,
|
|
82
|
+
)
|
|
81
83
|
|
|
82
84
|
return bundle_map
|
|
@@ -22,7 +22,7 @@ from pathlib import Path
|
|
|
22
22
|
from typing import TYPE_CHECKING, Iterator, Optional
|
|
23
23
|
|
|
24
24
|
from snowflake.cli.api.connections import ConnectionContext, OpenConnectionCache
|
|
25
|
-
from snowflake.cli.api.exceptions import
|
|
25
|
+
from snowflake.cli.api.exceptions import MissingConfigurationError
|
|
26
26
|
from snowflake.cli.api.metrics import CLIMetrics
|
|
27
27
|
from snowflake.cli.api.output.formats import OutputFormat
|
|
28
28
|
from snowflake.cli.api.rendering.jinja import CONTEXT_KEY
|
|
@@ -61,6 +61,7 @@ class _CliGlobalContextManager:
|
|
|
61
61
|
override_project_definition: ProjectDefinition | None = None
|
|
62
62
|
|
|
63
63
|
_definition_manager: DefinitionManager | None = None
|
|
64
|
+
enhanced_exit_codes: bool = False
|
|
64
65
|
|
|
65
66
|
# which properties invalidate our current DefinitionManager?
|
|
66
67
|
DEFINITION_MANAGER_DEPENDENCIES = [
|
|
@@ -126,7 +127,7 @@ class _CliGlobalContextManager:
|
|
|
126
127
|
{CONTEXT_KEY: {"env": self.project_env_overrides_args}},
|
|
127
128
|
)
|
|
128
129
|
if not dm.has_definition_file and not self.project_is_optional:
|
|
129
|
-
raise
|
|
130
|
+
raise MissingConfigurationError(
|
|
130
131
|
"Cannot find project definition (snowflake.yml). Please provide a path to the project or run this command in a valid project directory."
|
|
131
132
|
)
|
|
132
133
|
self._definition_manager = dm
|
|
@@ -209,6 +210,10 @@ class _CliGlobalContextAccess:
|
|
|
209
210
|
else:
|
|
210
211
|
return None
|
|
211
212
|
|
|
213
|
+
@property
|
|
214
|
+
def enhanced_exit_codes(self) -> bool:
|
|
215
|
+
return self._manager.enhanced_exit_codes
|
|
216
|
+
|
|
212
217
|
|
|
213
218
|
_CLI_CONTEXT_MANAGER: ContextVar[_CliGlobalContextManager | None] = ContextVar(
|
|
214
219
|
"cli_context", default=None
|
|
@@ -29,6 +29,7 @@ from snowflake.cli.api.commands.flags import (
|
|
|
29
29
|
DiagAllowlistPathOption,
|
|
30
30
|
DiagLogPathOption,
|
|
31
31
|
EnableDiagOption,
|
|
32
|
+
EnhancedExitCodesOption,
|
|
32
33
|
HostOption,
|
|
33
34
|
MasterTokenOption,
|
|
34
35
|
MfaPasscodeOption,
|
|
@@ -78,7 +79,6 @@ def global_options_with_connection(func: Callable):
|
|
|
78
79
|
|
|
79
80
|
def with_project_definition(is_optional: bool = False):
|
|
80
81
|
def _decorator(func: Callable):
|
|
81
|
-
|
|
82
82
|
return _options_decorator_factory(
|
|
83
83
|
func,
|
|
84
84
|
additional_options=[
|
|
@@ -159,7 +159,10 @@ def _options_decorator_factory(
|
|
|
159
159
|
execute_before_command_using_new_options(**options)
|
|
160
160
|
return func(**options)
|
|
161
161
|
|
|
162
|
-
wrapper.__signature__ = _extend_signature_with_additional_options(
|
|
162
|
+
wrapper.__signature__ = _extend_signature_with_additional_options( # type: ignore
|
|
163
|
+
func, additional_options
|
|
164
|
+
)
|
|
165
|
+
|
|
163
166
|
return wrapper
|
|
164
167
|
|
|
165
168
|
|
|
@@ -353,6 +356,12 @@ GLOBAL_OPTIONS = [
|
|
|
353
356
|
annotation=Optional[bool],
|
|
354
357
|
default=SilentOption,
|
|
355
358
|
),
|
|
359
|
+
inspect.Parameter(
|
|
360
|
+
"enhanced_exit_codes",
|
|
361
|
+
inspect.Parameter.KEYWORD_ONLY,
|
|
362
|
+
annotation=Optional[bool],
|
|
363
|
+
default=EnhancedExitCodesOption,
|
|
364
|
+
),
|
|
356
365
|
]
|
|
357
366
|
|
|
358
367
|
|