snowflake-cli 3.7.1__py3-none-any.whl → 3.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/cli/__about__.py +1 -1
- snowflake/cli/_app/snow_connector.py +14 -0
- snowflake/cli/_app/telemetry.py +11 -0
- snowflake/cli/_plugins/connection/commands.py +4 -2
- snowflake/cli/_plugins/nativeapp/codegen/setup/native_app_setup_processor.py +1 -1
- snowflake/cli/_plugins/nativeapp/entities/application_package.py +20 -7
- snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +5 -3
- snowflake/cli/_plugins/project/commands.py +16 -6
- snowflake/cli/_plugins/snowpark/common.py +31 -0
- snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +3 -0
- snowflake/cli/_plugins/snowpark/snowpark_entity.py +21 -1
- snowflake/cli/_plugins/snowpark/snowpark_entity_model.py +23 -1
- snowflake/cli/_plugins/spcs/common.py +7 -0
- snowflake/cli/_plugins/spcs/image_repository/commands.py +7 -2
- snowflake/cli/_plugins/spcs/image_repository/manager.py +6 -2
- snowflake/cli/_plugins/spcs/services/commands.py +2 -2
- snowflake/cli/_plugins/spcs/services/manager.py +36 -1
- snowflake/cli/_plugins/sql/commands.py +57 -6
- snowflake/cli/_plugins/sql/lexer/__init__.py +7 -0
- snowflake/cli/_plugins/sql/lexer/completer.py +12 -0
- snowflake/cli/_plugins/sql/lexer/functions.py +421 -0
- snowflake/cli/_plugins/sql/lexer/keywords.py +529 -0
- snowflake/cli/_plugins/sql/lexer/lexer.py +56 -0
- snowflake/cli/_plugins/sql/lexer/types.py +37 -0
- snowflake/cli/_plugins/sql/manager.py +43 -9
- snowflake/cli/_plugins/sql/repl.py +221 -0
- snowflake/cli/_plugins/sql/snowsql_commands.py +331 -0
- snowflake/cli/_plugins/sql/statement_reader.py +296 -0
- snowflake/cli/_plugins/streamlit/commands.py +30 -15
- snowflake/cli/_plugins/streamlit/manager.py +0 -183
- snowflake/cli/_plugins/streamlit/streamlit_entity.py +163 -23
- snowflake/cli/api/artifacts/upload.py +5 -0
- snowflake/cli/api/artifacts/utils.py +0 -2
- snowflake/cli/api/cli_global_context.py +7 -3
- snowflake/cli/api/commands/decorators.py +70 -0
- snowflake/cli/api/commands/flags.py +95 -3
- snowflake/cli/api/config.py +10 -0
- snowflake/cli/api/connections.py +10 -0
- snowflake/cli/api/console/abc.py +8 -2
- snowflake/cli/api/console/console.py +16 -0
- snowflake/cli/api/console/enum.py +1 -1
- snowflake/cli/api/entities/common.py +99 -10
- snowflake/cli/api/entities/utils.py +1 -0
- snowflake/cli/api/feature_flags.py +6 -0
- snowflake/cli/api/project/project_paths.py +5 -0
- snowflake/cli/api/rendering/sql_templates.py +2 -1
- snowflake/cli/api/sql_execution.py +16 -4
- snowflake/cli/api/utils/path_utils.py +15 -0
- snowflake/cli/api/utils/python_api_utils.py +12 -0
- {snowflake_cli-3.7.1.dist-info → snowflake_cli-3.8.0.dist-info}/METADATA +12 -8
- {snowflake_cli-3.7.1.dist-info → snowflake_cli-3.8.0.dist-info}/RECORD +54 -46
- snowflake/cli/_plugins/nativeapp/feature_flags.py +0 -28
- snowflake/cli/_plugins/sql/source_reader.py +0 -230
- {snowflake_cli-3.7.1.dist-info → snowflake_cli-3.8.0.dist-info}/WHEEL +0 -0
- {snowflake_cli-3.7.1.dist-info → snowflake_cli-3.8.0.dist-info}/entry_points.txt +0 -0
- {snowflake_cli-3.7.1.dist-info → snowflake_cli-3.8.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
import enum
|
|
2
|
+
import io
|
|
3
|
+
import re
|
|
4
|
+
import urllib.error
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Any, Callable, Generator, List, Literal, Sequence, Tuple
|
|
7
|
+
from urllib.request import urlopen
|
|
8
|
+
|
|
9
|
+
from jinja2 import UndefinedError
|
|
10
|
+
from snowflake.cli._plugins.sql.snowsql_commands import (
|
|
11
|
+
SnowSQLCommand,
|
|
12
|
+
compile_snowsql_command,
|
|
13
|
+
)
|
|
14
|
+
from snowflake.cli.api.secure_path import UNLIMITED, SecurePath
|
|
15
|
+
from snowflake.connector.util_text import split_statements
|
|
16
|
+
|
|
17
|
+
COMMAND_PATTERN = re.compile(
|
|
18
|
+
r"^!(\w+)\s*[\"']?(.*?)[\"']?\s*(?:;|$)",
|
|
19
|
+
flags=re.IGNORECASE,
|
|
20
|
+
)
|
|
21
|
+
URL_PATTERN = re.compile(r"^(\w+?):\/(\/.*)", flags=re.IGNORECASE)
|
|
22
|
+
|
|
23
|
+
ASYNC_SUFFIX = ";>"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
SplitedStatements = Generator[
|
|
27
|
+
tuple[str, bool | None] | tuple[str, Literal[False]],
|
|
28
|
+
Any,
|
|
29
|
+
None,
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
SqlTransformFunc = Callable[[str], str]
|
|
33
|
+
OperatorFunctions = Sequence[SqlTransformFunc]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class StatementType(enum.Enum):
|
|
37
|
+
FILE = "file"
|
|
38
|
+
QUERY = "query"
|
|
39
|
+
UNKNOWN = "unknown"
|
|
40
|
+
URL = "url"
|
|
41
|
+
SNOWSQL_COMMAND = "snowsql_command"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class ParsedStatement:
|
|
45
|
+
"""Container for parsed statement.
|
|
46
|
+
|
|
47
|
+
Holds:
|
|
48
|
+
- source: statement on command content
|
|
49
|
+
- source_type: type of source
|
|
50
|
+
- source_path: in case of URL or FILE path of the origin
|
|
51
|
+
- error: optional message
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
__slots__ = ("statement", "statement_type", "source_path", "error")
|
|
55
|
+
__match_args__ = ("statement_type", "error")
|
|
56
|
+
|
|
57
|
+
statement: io.StringIO
|
|
58
|
+
statement_type: StatementType | None
|
|
59
|
+
source_path: str | None
|
|
60
|
+
error: str | None
|
|
61
|
+
|
|
62
|
+
def __init__(
|
|
63
|
+
self,
|
|
64
|
+
statement: str,
|
|
65
|
+
source_type: StatementType,
|
|
66
|
+
source_path: str | None,
|
|
67
|
+
error: str | None = None,
|
|
68
|
+
):
|
|
69
|
+
self.statement = io.StringIO(statement)
|
|
70
|
+
self.statement_type = source_type
|
|
71
|
+
self.source_path = source_path
|
|
72
|
+
self.error = error
|
|
73
|
+
|
|
74
|
+
def __bool__(self):
|
|
75
|
+
return not self.error
|
|
76
|
+
|
|
77
|
+
def __eq__(self, other):
|
|
78
|
+
result = (
|
|
79
|
+
self.statement_type == other.statement_type,
|
|
80
|
+
self.source_path == other.source_path,
|
|
81
|
+
self.error == other.error,
|
|
82
|
+
self.statement.read() == other.statement.read(),
|
|
83
|
+
)
|
|
84
|
+
self.statement.seek(0)
|
|
85
|
+
other.statement.seek(0)
|
|
86
|
+
return all(result)
|
|
87
|
+
|
|
88
|
+
def __repr__(self):
|
|
89
|
+
return f"{self.__class__.__name__}(statement_type={self.statement_type}, source_path={self.source_path}, error={self.error})"
|
|
90
|
+
|
|
91
|
+
@classmethod
|
|
92
|
+
def from_url(cls, path_part: str, raw_source: str) -> "ParsedStatement":
|
|
93
|
+
"""Constructor for loading from URL."""
|
|
94
|
+
try:
|
|
95
|
+
payload = urlopen(path_part, timeout=10.0).read().decode()
|
|
96
|
+
return cls(payload, StatementType.URL, path_part)
|
|
97
|
+
|
|
98
|
+
except urllib.error.HTTPError as err:
|
|
99
|
+
error = f"Could not fetch {path_part}: {err}"
|
|
100
|
+
return cls(path_part, StatementType.URL, raw_source, error)
|
|
101
|
+
|
|
102
|
+
@classmethod
|
|
103
|
+
def from_file(cls, path_part: str, raw_source: str) -> "ParsedStatement":
|
|
104
|
+
"""Constructor for loading from file."""
|
|
105
|
+
path = SecurePath(path_part)
|
|
106
|
+
|
|
107
|
+
if path.is_file():
|
|
108
|
+
payload = path.read_text(file_size_limit_mb=UNLIMITED)
|
|
109
|
+
return cls(payload, StatementType.FILE, path.as_posix())
|
|
110
|
+
|
|
111
|
+
error_msg = f"Could not read: {path_part}"
|
|
112
|
+
return cls(path_part, StatementType.FILE, raw_source, error_msg)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
RecursiveStatementReader = Generator[ParsedStatement, Any, Any]
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def parse_statement(source: str, operators: OperatorFunctions) -> ParsedStatement:
|
|
119
|
+
"""Evaluates templating and source commands.
|
|
120
|
+
|
|
121
|
+
Returns parsed source according to origin."""
|
|
122
|
+
try:
|
|
123
|
+
statement = source
|
|
124
|
+
for operator in operators:
|
|
125
|
+
statement = operator(statement)
|
|
126
|
+
except UndefinedError as e:
|
|
127
|
+
error_msg = f"SQL template rendering error: {e}"
|
|
128
|
+
return ParsedStatement(source, StatementType.UNKNOWN, source, error_msg)
|
|
129
|
+
|
|
130
|
+
split_result = COMMAND_PATTERN.split(statement, maxsplit=1)
|
|
131
|
+
split_result = [p.strip() for p in split_result]
|
|
132
|
+
|
|
133
|
+
if len(split_result) == 1:
|
|
134
|
+
return ParsedStatement(statement, StatementType.QUERY, None)
|
|
135
|
+
|
|
136
|
+
_, command, command_args, *_ = split_result
|
|
137
|
+
_path_match = URL_PATTERN.split(command_args.lower())
|
|
138
|
+
|
|
139
|
+
match command.lower(), _path_match:
|
|
140
|
+
# load content from an URL
|
|
141
|
+
case "source" | "load", ("", "http" | "https", *_):
|
|
142
|
+
return ParsedStatement.from_url(command_args, statement)
|
|
143
|
+
|
|
144
|
+
# load content from a local file
|
|
145
|
+
case "source" | "load", (str(),):
|
|
146
|
+
return ParsedStatement.from_file(command_args, statement)
|
|
147
|
+
|
|
148
|
+
case "source" | "load", _:
|
|
149
|
+
return ParsedStatement(
|
|
150
|
+
statement,
|
|
151
|
+
StatementType.UNKNOWN,
|
|
152
|
+
command_args,
|
|
153
|
+
f"Unknown source: {command_args}",
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
case "queries" | "result" | "abort", (str(),):
|
|
157
|
+
return ParsedStatement(statement, StatementType.SNOWSQL_COMMAND, None)
|
|
158
|
+
|
|
159
|
+
case _:
|
|
160
|
+
error_msg = f"Unknown command: {command}"
|
|
161
|
+
|
|
162
|
+
return ParsedStatement(statement, StatementType.UNKNOWN, None, error_msg)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def recursive_statement_reader(
|
|
166
|
+
source: SplitedStatements,
|
|
167
|
+
seen_files: list,
|
|
168
|
+
operators: OperatorFunctions,
|
|
169
|
+
remove_comments: bool,
|
|
170
|
+
) -> RecursiveStatementReader:
|
|
171
|
+
"""Based on detected source command reads content of the source and tracks for recursion cycles."""
|
|
172
|
+
for stmt, _ in source:
|
|
173
|
+
if not stmt:
|
|
174
|
+
continue
|
|
175
|
+
parsed_source = parse_statement(stmt, operators)
|
|
176
|
+
|
|
177
|
+
match parsed_source:
|
|
178
|
+
case ParsedStatement(StatementType.FILE | StatementType.URL, None):
|
|
179
|
+
if parsed_source.source_path in seen_files:
|
|
180
|
+
error = f"Recursion detected: {' -> '.join(seen_files)}"
|
|
181
|
+
parsed_source.error = error
|
|
182
|
+
yield parsed_source
|
|
183
|
+
continue
|
|
184
|
+
|
|
185
|
+
seen_files.append(parsed_source.source_path)
|
|
186
|
+
|
|
187
|
+
yield from recursive_statement_reader(
|
|
188
|
+
split_statements(parsed_source.statement, remove_comments),
|
|
189
|
+
seen_files,
|
|
190
|
+
operators,
|
|
191
|
+
remove_comments,
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
seen_files.pop()
|
|
195
|
+
|
|
196
|
+
case ParsedStatement(StatementType.URL, error) if error:
|
|
197
|
+
yield parsed_source
|
|
198
|
+
|
|
199
|
+
case _:
|
|
200
|
+
yield parsed_source
|
|
201
|
+
return
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def files_reader(
|
|
205
|
+
paths: Sequence[SecurePath],
|
|
206
|
+
operators: OperatorFunctions,
|
|
207
|
+
remove_comments: bool = False,
|
|
208
|
+
) -> RecursiveStatementReader:
|
|
209
|
+
"""Entry point for reading statements from files.
|
|
210
|
+
|
|
211
|
+
Returns a generator with statements."""
|
|
212
|
+
for path in paths:
|
|
213
|
+
with path.open(read_file_limit_mb=UNLIMITED) as f:
|
|
214
|
+
stmts = split_statements(io.StringIO(f.read()), remove_comments)
|
|
215
|
+
yield from recursive_statement_reader(
|
|
216
|
+
stmts,
|
|
217
|
+
[path.as_posix()],
|
|
218
|
+
operators,
|
|
219
|
+
remove_comments,
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def query_reader(
|
|
224
|
+
source: str,
|
|
225
|
+
operators: OperatorFunctions,
|
|
226
|
+
remove_comments: bool = False,
|
|
227
|
+
) -> RecursiveStatementReader:
|
|
228
|
+
"""Entry point for reading statements from query.
|
|
229
|
+
|
|
230
|
+
Returns a generator with statements."""
|
|
231
|
+
# known issue of split_statements (doesn't work in SnowSQL either):
|
|
232
|
+
# when the line starts with a command:
|
|
233
|
+
# '!queries amount=3; select 3;'
|
|
234
|
+
# it is treated as a single statement
|
|
235
|
+
stmts = split_statements(io.StringIO(source), remove_comments)
|
|
236
|
+
yield from recursive_statement_reader(stmts, [], operators, remove_comments)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
@dataclass
|
|
240
|
+
class CompiledStatement:
|
|
241
|
+
statement: str | None = None
|
|
242
|
+
execute_async: bool = False
|
|
243
|
+
command: SnowSQLCommand | None = None
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def _is_empty_statement(statement: str) -> bool:
|
|
247
|
+
# checks whether all lines from the statement are empty or start with comment
|
|
248
|
+
for line in statement.splitlines():
|
|
249
|
+
if line.strip() and not line.lstrip().startswith("--"):
|
|
250
|
+
# nonempty uncommented line
|
|
251
|
+
return False
|
|
252
|
+
return True
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def compile_statements(
|
|
256
|
+
source: RecursiveStatementReader,
|
|
257
|
+
) -> Tuple[List[str], int, List[CompiledStatement]]:
|
|
258
|
+
"""Tracks statements evaluation and collects errors."""
|
|
259
|
+
errors = []
|
|
260
|
+
expected_results_cnt = 0
|
|
261
|
+
compiled = []
|
|
262
|
+
|
|
263
|
+
for stmt in source:
|
|
264
|
+
if stmt.statement_type == StatementType.QUERY:
|
|
265
|
+
statement = stmt.statement.read()
|
|
266
|
+
if not stmt.error and not _is_empty_statement(statement):
|
|
267
|
+
is_async = statement.endswith(ASYNC_SUFFIX)
|
|
268
|
+
compiled.append(
|
|
269
|
+
CompiledStatement(
|
|
270
|
+
statement=statement.removesuffix(ASYNC_SUFFIX),
|
|
271
|
+
execute_async=is_async,
|
|
272
|
+
)
|
|
273
|
+
)
|
|
274
|
+
if not is_async:
|
|
275
|
+
expected_results_cnt += 1
|
|
276
|
+
|
|
277
|
+
if stmt.statement_type == StatementType.SNOWSQL_COMMAND:
|
|
278
|
+
if not stmt.error:
|
|
279
|
+
cmd = (
|
|
280
|
+
stmt.statement.read()
|
|
281
|
+
.removesuffix(ASYNC_SUFFIX)
|
|
282
|
+
.removesuffix(";")
|
|
283
|
+
.split()
|
|
284
|
+
)
|
|
285
|
+
parsed_command = compile_snowsql_command(
|
|
286
|
+
command=cmd[0], cmd_args=cmd[1:]
|
|
287
|
+
)
|
|
288
|
+
if parsed_command.error_message:
|
|
289
|
+
errors.append(parsed_command.error_message)
|
|
290
|
+
else:
|
|
291
|
+
compiled.append(CompiledStatement(command=parsed_command.command))
|
|
292
|
+
|
|
293
|
+
if stmt.error:
|
|
294
|
+
errors.append(stmt.error)
|
|
295
|
+
|
|
296
|
+
return errors, expected_results_cnt, compiled
|
|
@@ -25,12 +25,8 @@ from snowflake.cli._plugins.object.command_aliases import (
|
|
|
25
25
|
scope_option,
|
|
26
26
|
)
|
|
27
27
|
from snowflake.cli._plugins.streamlit.manager import StreamlitManager
|
|
28
|
-
from snowflake.cli._plugins.streamlit.
|
|
29
|
-
|
|
30
|
-
)
|
|
31
|
-
from snowflake.cli._plugins.streamlit.streamlit_project_paths import (
|
|
32
|
-
StreamlitProjectPaths,
|
|
33
|
-
)
|
|
28
|
+
from snowflake.cli._plugins.streamlit.streamlit_entity import StreamlitEntity
|
|
29
|
+
from snowflake.cli._plugins.workspace.context import ActionContext, WorkspaceContext
|
|
34
30
|
from snowflake.cli.api.cli_global_context import get_cli_context
|
|
35
31
|
from snowflake.cli.api.commands.decorators import (
|
|
36
32
|
with_experimental_behaviour,
|
|
@@ -45,7 +41,9 @@ from snowflake.cli.api.commands.flags import (
|
|
|
45
41
|
)
|
|
46
42
|
from snowflake.cli.api.commands.snow_typer import SnowTyperFactory
|
|
47
43
|
from snowflake.cli.api.commands.utils import get_entity_for_operation
|
|
44
|
+
from snowflake.cli.api.console.console import CliConsole
|
|
48
45
|
from snowflake.cli.api.constants import ObjectType
|
|
46
|
+
from snowflake.cli.api.entities.utils import EntityActions
|
|
49
47
|
from snowflake.cli.api.exceptions import NoProjectDefinitionError
|
|
50
48
|
from snowflake.cli.api.identifiers import FQN
|
|
51
49
|
from snowflake.cli.api.output.types import (
|
|
@@ -158,18 +156,24 @@ def streamlit_deploy(
|
|
|
158
156
|
)
|
|
159
157
|
pd = convert_project_definition_to_v2(cli_context.project_root, pd)
|
|
160
158
|
|
|
161
|
-
streamlit:
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
159
|
+
streamlit: StreamlitEntity = StreamlitEntity(
|
|
160
|
+
entity_model=get_entity_for_operation(
|
|
161
|
+
cli_context=cli_context,
|
|
162
|
+
entity_id=entity_id,
|
|
163
|
+
project_definition=pd,
|
|
164
|
+
entity_type=ObjectType.STREAMLIT.value.cli_name,
|
|
165
|
+
),
|
|
166
|
+
workspace_ctx=_get_current_workspace_context(),
|
|
166
167
|
)
|
|
167
168
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
169
|
+
url = streamlit.perform(
|
|
170
|
+
EntityActions.DEPLOY,
|
|
171
|
+
ActionContext(
|
|
172
|
+
get_entity=lambda *args: None,
|
|
173
|
+
),
|
|
174
|
+
_open=open_,
|
|
172
175
|
replace=replace,
|
|
176
|
+
experimental=options.get("experimental"),
|
|
173
177
|
prune=prune,
|
|
174
178
|
)
|
|
175
179
|
|
|
@@ -190,3 +194,14 @@ def get_url(
|
|
|
190
194
|
if open_:
|
|
191
195
|
typer.launch(url)
|
|
192
196
|
return MessageResult(url)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def _get_current_workspace_context():
|
|
200
|
+
ctx = get_cli_context()
|
|
201
|
+
|
|
202
|
+
return WorkspaceContext(
|
|
203
|
+
console=CliConsole(),
|
|
204
|
+
project_root=ctx.project_root,
|
|
205
|
+
get_default_role=lambda: ctx.connection.role,
|
|
206
|
+
get_default_warehouse=lambda: ctx.connection.warehouse,
|
|
207
|
+
)
|
|
@@ -15,33 +15,18 @@
|
|
|
15
15
|
from __future__ import annotations
|
|
16
16
|
|
|
17
17
|
import logging
|
|
18
|
-
from typing import List, Optional
|
|
19
18
|
|
|
20
|
-
from click import ClickException
|
|
21
19
|
from snowflake.cli._plugins.connection.util import (
|
|
22
20
|
MissingConnectionAccountError,
|
|
23
21
|
MissingConnectionRegionError,
|
|
24
22
|
make_snowsight_url,
|
|
25
23
|
)
|
|
26
|
-
from snowflake.cli._plugins.object.manager import ObjectManager
|
|
27
|
-
from snowflake.cli._plugins.stage.manager import StageManager
|
|
28
24
|
from snowflake.cli._plugins.streamlit.streamlit_entity_model import (
|
|
29
25
|
StreamlitEntityModel,
|
|
30
26
|
)
|
|
31
|
-
from snowflake.cli._plugins.streamlit.streamlit_project_paths import (
|
|
32
|
-
StreamlitProjectPaths,
|
|
33
|
-
)
|
|
34
|
-
from snowflake.cli.api.artifacts.upload import sync_artifacts_with_stage
|
|
35
|
-
from snowflake.cli.api.commands.experimental_behaviour import (
|
|
36
|
-
experimental_behaviour_enabled,
|
|
37
|
-
)
|
|
38
|
-
from snowflake.cli.api.console import cli_console
|
|
39
|
-
from snowflake.cli.api.feature_flags import FeatureFlag
|
|
40
27
|
from snowflake.cli.api.identifiers import FQN
|
|
41
|
-
from snowflake.cli.api.project.schemas.entities.common import PathMapping
|
|
42
28
|
from snowflake.cli.api.sql_execution import SqlExecutionMixin
|
|
43
29
|
from snowflake.connector.cursor import SnowflakeCursor
|
|
44
|
-
from snowflake.connector.errors import ProgrammingError
|
|
45
30
|
|
|
46
31
|
log = logging.getLogger(__name__)
|
|
47
32
|
|
|
@@ -56,174 +41,6 @@ class StreamlitManager(SqlExecutionMixin):
|
|
|
56
41
|
f"grant usage on streamlit {streamlit_name.sql_identifier} to role {to_role}"
|
|
57
42
|
)
|
|
58
43
|
|
|
59
|
-
def _upload_artifacts(
|
|
60
|
-
self,
|
|
61
|
-
streamlit_project_paths: StreamlitProjectPaths,
|
|
62
|
-
stage_root: str,
|
|
63
|
-
prune: bool,
|
|
64
|
-
artifacts: Optional[List[PathMapping]] = None,
|
|
65
|
-
):
|
|
66
|
-
with cli_console.phase(f"Deploying files to {stage_root}"):
|
|
67
|
-
sync_artifacts_with_stage(
|
|
68
|
-
project_paths=streamlit_project_paths,
|
|
69
|
-
stage_root=stage_root,
|
|
70
|
-
prune=prune,
|
|
71
|
-
artifacts=artifacts,
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
def _create_streamlit(
|
|
75
|
-
self,
|
|
76
|
-
streamlit: StreamlitEntityModel,
|
|
77
|
-
replace: Optional[bool] = None,
|
|
78
|
-
experimental: Optional[bool] = None,
|
|
79
|
-
from_stage_name: Optional[str] = None,
|
|
80
|
-
):
|
|
81
|
-
streamlit_id = streamlit.fqn.using_connection(self._conn)
|
|
82
|
-
cli_console.step(f"Creating {streamlit_id} Streamlit")
|
|
83
|
-
query = []
|
|
84
|
-
if replace:
|
|
85
|
-
query.append(f"CREATE OR REPLACE STREAMLIT {streamlit_id.sql_identifier}")
|
|
86
|
-
elif experimental:
|
|
87
|
-
# For experimental behaviour, we need to use CREATE STREAMLIT IF NOT EXISTS
|
|
88
|
-
# for a streamlit app with an embedded stage
|
|
89
|
-
# because this is analogous to the behavior for non-experimental
|
|
90
|
-
# deploy which does CREATE STAGE IF NOT EXISTS
|
|
91
|
-
query.append(
|
|
92
|
-
f"CREATE STREAMLIT IF NOT EXISTS {streamlit_id.sql_identifier}"
|
|
93
|
-
)
|
|
94
|
-
else:
|
|
95
|
-
query.append(f"CREATE STREAMLIT {streamlit_id.sql_identifier}")
|
|
96
|
-
|
|
97
|
-
if from_stage_name:
|
|
98
|
-
query.append(f"ROOT_LOCATION = '{from_stage_name}'")
|
|
99
|
-
|
|
100
|
-
query.append(f"MAIN_FILE = '{streamlit.main_file}'")
|
|
101
|
-
if streamlit.imports:
|
|
102
|
-
query.append(streamlit.get_imports_sql())
|
|
103
|
-
|
|
104
|
-
if not streamlit.query_warehouse:
|
|
105
|
-
cli_console.warning(
|
|
106
|
-
"[Deprecation] In next major version we will remove default query_warehouse='streamlit'."
|
|
107
|
-
)
|
|
108
|
-
query.append(f"QUERY_WAREHOUSE = 'streamlit'")
|
|
109
|
-
else:
|
|
110
|
-
query.append(f"QUERY_WAREHOUSE = {streamlit.query_warehouse}")
|
|
111
|
-
|
|
112
|
-
if streamlit.title:
|
|
113
|
-
query.append(f"TITLE = '{streamlit.title}'")
|
|
114
|
-
|
|
115
|
-
if streamlit.comment:
|
|
116
|
-
query.append(f"COMMENT = '{streamlit.comment}'")
|
|
117
|
-
|
|
118
|
-
if streamlit.external_access_integrations:
|
|
119
|
-
query.append(streamlit.get_external_access_integrations_sql())
|
|
120
|
-
|
|
121
|
-
if streamlit.secrets:
|
|
122
|
-
query.append(streamlit.get_secrets_sql())
|
|
123
|
-
|
|
124
|
-
self.execute_query("\n".join(query))
|
|
125
|
-
|
|
126
|
-
def deploy(
|
|
127
|
-
self,
|
|
128
|
-
streamlit: StreamlitEntityModel,
|
|
129
|
-
streamlit_project_paths: StreamlitProjectPaths,
|
|
130
|
-
replace: bool = False,
|
|
131
|
-
prune: bool = False,
|
|
132
|
-
):
|
|
133
|
-
streamlit_id = streamlit.fqn.using_connection(self._conn)
|
|
134
|
-
if (
|
|
135
|
-
ObjectManager().object_exists(object_type="streamlit", fqn=streamlit_id)
|
|
136
|
-
and not replace
|
|
137
|
-
):
|
|
138
|
-
raise ClickException(
|
|
139
|
-
f"Streamlit {streamlit.fqn} already exist. If you want to replace it use --replace flag."
|
|
140
|
-
)
|
|
141
|
-
|
|
142
|
-
# for backwards compatibility - quoted stage path might be case-sensitive
|
|
143
|
-
# https://docs.snowflake.com/en/sql-reference/identifiers-syntax#double-quoted-identifiers
|
|
144
|
-
streamlit_name_for_root_location = streamlit_id.name
|
|
145
|
-
use_versioned_stage = FeatureFlag.ENABLE_STREAMLIT_VERSIONED_STAGE.is_enabled()
|
|
146
|
-
if (
|
|
147
|
-
experimental_behaviour_enabled()
|
|
148
|
-
or FeatureFlag.ENABLE_STREAMLIT_EMBEDDED_STAGE.is_enabled()
|
|
149
|
-
or use_versioned_stage
|
|
150
|
-
):
|
|
151
|
-
"""
|
|
152
|
-
1. Create streamlit object
|
|
153
|
-
2. Upload files to embedded stage
|
|
154
|
-
"""
|
|
155
|
-
# TODO: Support from_stage
|
|
156
|
-
# from_stage_stmt = f"FROM_STAGE = '{stage_name}'" if stage_name else ""
|
|
157
|
-
self._create_streamlit(
|
|
158
|
-
streamlit=streamlit,
|
|
159
|
-
replace=replace,
|
|
160
|
-
experimental=True,
|
|
161
|
-
)
|
|
162
|
-
try:
|
|
163
|
-
if use_versioned_stage:
|
|
164
|
-
self.execute_query(
|
|
165
|
-
f"ALTER STREAMLIT {streamlit_id.identifier} ADD LIVE VERSION FROM LAST"
|
|
166
|
-
)
|
|
167
|
-
elif not FeatureFlag.ENABLE_STREAMLIT_NO_CHECKOUTS.is_enabled():
|
|
168
|
-
self.execute_query(
|
|
169
|
-
f"ALTER streamlit {streamlit_id.identifier} CHECKOUT"
|
|
170
|
-
)
|
|
171
|
-
except ProgrammingError as e:
|
|
172
|
-
# If an error is raised because a CHECKOUT has already occurred or a LIVE VERSION already exists, simply skip it and continue
|
|
173
|
-
if "Checkout already exists" in str(
|
|
174
|
-
e
|
|
175
|
-
) or "There is already a live version" in str(e):
|
|
176
|
-
log.info("Checkout already exists, continuing")
|
|
177
|
-
else:
|
|
178
|
-
raise
|
|
179
|
-
|
|
180
|
-
stage_path = streamlit_id.identifier
|
|
181
|
-
embedded_stage_name = f"snow://streamlit/{stage_path}"
|
|
182
|
-
if use_versioned_stage:
|
|
183
|
-
# "LIVE" is the only supported version for now, but this may change later.
|
|
184
|
-
stage_root = f"{embedded_stage_name}/versions/live"
|
|
185
|
-
else:
|
|
186
|
-
stage_root = f"{embedded_stage_name}/default_checkout"
|
|
187
|
-
|
|
188
|
-
self._upload_artifacts(
|
|
189
|
-
streamlit_project_paths,
|
|
190
|
-
stage_root,
|
|
191
|
-
prune=prune,
|
|
192
|
-
artifacts=streamlit.artifacts,
|
|
193
|
-
)
|
|
194
|
-
else:
|
|
195
|
-
"""
|
|
196
|
-
1. Create stage
|
|
197
|
-
2. Upload files to created stage
|
|
198
|
-
3. Create streamlit from stage
|
|
199
|
-
"""
|
|
200
|
-
stage_manager = StageManager()
|
|
201
|
-
|
|
202
|
-
stage_name = streamlit.stage or "streamlit"
|
|
203
|
-
stage_name = FQN.from_string(stage_name).using_connection(self._conn)
|
|
204
|
-
|
|
205
|
-
stage_root = stage_manager.get_standard_stage_prefix(
|
|
206
|
-
f"{stage_name}/{streamlit_name_for_root_location}"
|
|
207
|
-
)
|
|
208
|
-
|
|
209
|
-
self._upload_artifacts(
|
|
210
|
-
streamlit_project_paths,
|
|
211
|
-
stage_root,
|
|
212
|
-
prune=prune,
|
|
213
|
-
artifacts=streamlit.artifacts,
|
|
214
|
-
)
|
|
215
|
-
|
|
216
|
-
self._create_streamlit(
|
|
217
|
-
streamlit=streamlit,
|
|
218
|
-
replace=replace,
|
|
219
|
-
from_stage_name=stage_root,
|
|
220
|
-
experimental=False,
|
|
221
|
-
)
|
|
222
|
-
|
|
223
|
-
self.grant_privileges(streamlit)
|
|
224
|
-
|
|
225
|
-
return self.get_url(streamlit_name=streamlit_id)
|
|
226
|
-
|
|
227
44
|
def grant_privileges(self, entity_model: StreamlitEntityModel):
|
|
228
45
|
if not entity_model.grants:
|
|
229
46
|
return
|