snowflake-cli-labs 2.3.1__py3-none-any.whl → 2.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/cli/__about__.py +1 -1
- snowflake/cli/api/__init__.py +2 -0
- snowflake/cli/api/cli_global_context.py +8 -1
- snowflake/cli/api/commands/decorators.py +2 -2
- snowflake/cli/api/commands/flags.py +49 -4
- snowflake/cli/api/commands/snow_typer.py +2 -0
- snowflake/cli/api/console/abc.py +2 -0
- snowflake/cli/api/console/console.py +6 -5
- snowflake/cli/api/constants.py +5 -0
- snowflake/cli/api/exceptions.py +12 -0
- snowflake/cli/api/identifiers.py +123 -0
- snowflake/cli/api/plugins/command/__init__.py +2 -0
- snowflake/cli/api/plugins/plugin_config.py +2 -0
- snowflake/cli/api/project/definition.py +2 -0
- snowflake/cli/api/project/errors.py +3 -3
- snowflake/cli/api/project/schemas/identifier_model.py +35 -0
- snowflake/cli/api/project/schemas/native_app/native_app.py +4 -0
- snowflake/cli/api/project/schemas/native_app/path_mapping.py +21 -3
- snowflake/cli/api/project/schemas/project_definition.py +58 -6
- snowflake/cli/api/project/schemas/snowpark/argument.py +2 -0
- snowflake/cli/api/project/schemas/snowpark/callable.py +8 -17
- snowflake/cli/api/project/schemas/streamlit/streamlit.py +2 -2
- snowflake/cli/api/project/schemas/updatable_model.py +2 -0
- snowflake/cli/api/project/util.py +2 -0
- snowflake/cli/api/secure_path.py +2 -0
- snowflake/cli/api/sql_execution.py +14 -54
- snowflake/cli/api/utils/cursor.py +2 -0
- snowflake/cli/api/utils/models.py +23 -0
- snowflake/cli/api/utils/naming_utils.py +0 -27
- snowflake/cli/api/utils/rendering.py +178 -23
- snowflake/cli/app/api_impl/plugin/plugin_config_provider_impl.py +2 -0
- snowflake/cli/app/cli_app.py +4 -1
- snowflake/cli/app/commands_registration/builtin_plugins.py +8 -0
- snowflake/cli/app/commands_registration/command_plugins_loader.py +2 -0
- snowflake/cli/app/commands_registration/commands_registration_with_callbacks.py +2 -0
- snowflake/cli/app/commands_registration/typer_registration.py +2 -0
- snowflake/cli/app/dev/pycharm_remote_debug.py +2 -0
- snowflake/cli/app/loggers.py +2 -0
- snowflake/cli/app/main_typer.py +1 -1
- snowflake/cli/app/printing.py +3 -1
- snowflake/cli/app/snow_connector.py +2 -2
- snowflake/cli/plugins/connection/commands.py +5 -14
- snowflake/cli/plugins/connection/util.py +1 -1
- snowflake/cli/plugins/cortex/__init__.py +0 -0
- snowflake/cli/plugins/cortex/commands.py +312 -0
- snowflake/cli/plugins/cortex/constants.py +3 -0
- snowflake/cli/plugins/cortex/manager.py +175 -0
- snowflake/cli/plugins/cortex/plugin_spec.py +16 -0
- snowflake/cli/plugins/cortex/types.py +8 -0
- snowflake/cli/plugins/git/commands.py +15 -0
- snowflake/cli/plugins/nativeapp/artifacts.py +368 -123
- snowflake/cli/plugins/nativeapp/codegen/artifact_processor.py +45 -0
- snowflake/cli/plugins/nativeapp/codegen/compiler.py +104 -0
- snowflake/cli/plugins/nativeapp/codegen/sandbox.py +2 -0
- snowflake/cli/plugins/nativeapp/codegen/snowpark/callback_source.py.jinja +181 -0
- snowflake/cli/plugins/nativeapp/codegen/snowpark/extension_function_utils.py +196 -0
- snowflake/cli/plugins/nativeapp/codegen/snowpark/models.py +47 -0
- snowflake/cli/plugins/nativeapp/codegen/snowpark/python_processor.py +489 -0
- snowflake/cli/plugins/nativeapp/commands.py +11 -4
- snowflake/cli/plugins/nativeapp/common_flags.py +12 -5
- snowflake/cli/plugins/nativeapp/manager.py +49 -16
- snowflake/cli/plugins/nativeapp/policy.py +2 -0
- snowflake/cli/plugins/nativeapp/run_processor.py +2 -0
- snowflake/cli/plugins/nativeapp/teardown_processor.py +80 -8
- snowflake/cli/plugins/nativeapp/utils.py +7 -6
- snowflake/cli/plugins/nativeapp/version/commands.py +6 -5
- snowflake/cli/plugins/nativeapp/version/version_processor.py +2 -0
- snowflake/cli/plugins/notebook/commands.py +21 -0
- snowflake/cli/plugins/notebook/exceptions.py +6 -0
- snowflake/cli/plugins/notebook/manager.py +46 -3
- snowflake/cli/plugins/notebook/types.py +2 -0
- snowflake/cli/plugins/object/command_aliases.py +80 -0
- snowflake/cli/plugins/object/commands.py +10 -6
- snowflake/cli/plugins/object/common.py +2 -0
- snowflake/cli/plugins/object_stage_deprecated/__init__.py +1 -0
- snowflake/cli/plugins/object_stage_deprecated/plugin_spec.py +20 -0
- snowflake/cli/plugins/snowpark/commands.py +62 -6
- snowflake/cli/plugins/snowpark/common.py +17 -6
- snowflake/cli/plugins/spcs/compute_pool/commands.py +22 -1
- snowflake/cli/plugins/spcs/compute_pool/manager.py +2 -0
- snowflake/cli/plugins/spcs/image_repository/commands.py +25 -1
- snowflake/cli/plugins/spcs/image_repository/manager.py +3 -1
- snowflake/cli/plugins/spcs/services/commands.py +39 -5
- snowflake/cli/plugins/spcs/services/manager.py +2 -0
- snowflake/cli/plugins/sql/commands.py +13 -5
- snowflake/cli/plugins/sql/manager.py +40 -19
- snowflake/cli/plugins/stage/commands.py +29 -3
- snowflake/cli/plugins/stage/diff.py +2 -0
- snowflake/cli/plugins/streamlit/commands.py +26 -10
- snowflake/cli/plugins/streamlit/manager.py +9 -10
- {snowflake_cli_labs-2.3.1.dist-info → snowflake_cli_labs-2.4.0.dist-info}/METADATA +4 -2
- {snowflake_cli_labs-2.3.1.dist-info → snowflake_cli_labs-2.4.0.dist-info}/RECORD +96 -76
- /snowflake/cli/plugins/{object/stage_deprecated → object_stage_deprecated}/commands.py +0 -0
- {snowflake_cli_labs-2.3.1.dist-info → snowflake_cli_labs-2.4.0.dist-info}/WHEEL +0 -0
- {snowflake_cli_labs-2.3.1.dist-info → snowflake_cli_labs-2.4.0.dist-info}/entry_points.txt +0 -0
- {snowflake_cli_labs-2.3.1.dist-info → snowflake_cli_labs-2.4.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,489 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from textwrap import dedent
|
|
7
|
+
from typing import Any, Dict, List, Optional, Set
|
|
8
|
+
|
|
9
|
+
from click import ClickException
|
|
10
|
+
from snowflake.cli.api.console import cli_console as cc
|
|
11
|
+
from snowflake.cli.api.project.errors import SchemaValidationError
|
|
12
|
+
from snowflake.cli.api.project.schemas.native_app.native_app import NativeApp
|
|
13
|
+
from snowflake.cli.api.project.schemas.native_app.path_mapping import (
|
|
14
|
+
PathMapping,
|
|
15
|
+
ProcessorMapping,
|
|
16
|
+
)
|
|
17
|
+
from snowflake.cli.api.utils.rendering import jinja_render_from_file
|
|
18
|
+
from snowflake.cli.plugins.nativeapp.artifacts import (
|
|
19
|
+
BundleMap,
|
|
20
|
+
find_setup_script_file,
|
|
21
|
+
)
|
|
22
|
+
from snowflake.cli.plugins.nativeapp.codegen.artifact_processor import ArtifactProcessor
|
|
23
|
+
from snowflake.cli.plugins.nativeapp.codegen.sandbox import (
|
|
24
|
+
ExecutionEnvironmentType,
|
|
25
|
+
SandboxExecutionError,
|
|
26
|
+
execute_script_in_sandbox,
|
|
27
|
+
)
|
|
28
|
+
from snowflake.cli.plugins.nativeapp.codegen.snowpark.extension_function_utils import (
|
|
29
|
+
deannotate_module_source,
|
|
30
|
+
ensure_all_string_literals,
|
|
31
|
+
ensure_string_literal,
|
|
32
|
+
get_function_type_signature_for_grant,
|
|
33
|
+
get_qualified_object_name,
|
|
34
|
+
get_sql_argument_signature,
|
|
35
|
+
get_sql_object_type,
|
|
36
|
+
)
|
|
37
|
+
from snowflake.cli.plugins.nativeapp.codegen.snowpark.models import (
|
|
38
|
+
ExtensionFunctionTypeEnum,
|
|
39
|
+
NativeAppExtensionFunction,
|
|
40
|
+
)
|
|
41
|
+
from snowflake.cli.plugins.stage.diff import to_stage_path
|
|
42
|
+
|
|
43
|
+
DEFAULT_TIMEOUT = 30
|
|
44
|
+
TEMPLATE_PATH = Path(__file__).parent / "callback_source.py.jinja"
|
|
45
|
+
SNOWPARK_LIB_NAME = "snowflake-snowpark-python"
|
|
46
|
+
SNOWPARK_LIB_REGEX = rf"'{SNOWPARK_LIB_NAME}\s*((<|<=|!=|==|>=|>|~=|===)\s*[a-zA-Z0-9_.*+!-]+)?'" # support PEP 508, even though not all of it is supported in Snowflake yet
|
|
47
|
+
STAGE_PREFIX = "@"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _determine_virtual_env(
|
|
51
|
+
project_root: Path, processor: ProcessorMapping
|
|
52
|
+
) -> Dict[str, Any]:
|
|
53
|
+
"""
|
|
54
|
+
Determines a virtual environment to run the Snowpark processor in, either through the project definition or by querying the current environment.
|
|
55
|
+
"""
|
|
56
|
+
if (processor.properties is None) or ("env" not in processor.properties):
|
|
57
|
+
return {}
|
|
58
|
+
|
|
59
|
+
env_props = processor.properties["env"]
|
|
60
|
+
env_type = env_props.get("type", None)
|
|
61
|
+
|
|
62
|
+
if env_type is None:
|
|
63
|
+
return {}
|
|
64
|
+
|
|
65
|
+
if env_type.upper() == ExecutionEnvironmentType.CONDA.name:
|
|
66
|
+
env_name = env_props.get("name", None)
|
|
67
|
+
if env_name is None:
|
|
68
|
+
cc.warning(
|
|
69
|
+
"No name found in project definition file for the conda environment to run the Snowpark processor in. Will attempt to auto-detect the current conda environment."
|
|
70
|
+
)
|
|
71
|
+
return {"env_type": ExecutionEnvironmentType.CONDA, "name": env_name}
|
|
72
|
+
elif env_type.upper() == ExecutionEnvironmentType.VENV.name:
|
|
73
|
+
env_path_str = env_props.get("path", None)
|
|
74
|
+
if env_path_str is None:
|
|
75
|
+
cc.warning(
|
|
76
|
+
"No path found in project definition file for the conda environment to run the Snowpark processor in. Will attempt to auto-detect the current venv path."
|
|
77
|
+
)
|
|
78
|
+
env_path = None
|
|
79
|
+
else:
|
|
80
|
+
env_path = Path(env_path_str)
|
|
81
|
+
if not env_path.is_absolute():
|
|
82
|
+
env_path = project_root / env_path
|
|
83
|
+
return {
|
|
84
|
+
"env_type": ExecutionEnvironmentType.VENV,
|
|
85
|
+
"path": env_path,
|
|
86
|
+
}
|
|
87
|
+
elif env_type.upper() == ExecutionEnvironmentType.CURRENT.name:
|
|
88
|
+
return {
|
|
89
|
+
"env_type": ExecutionEnvironmentType.CURRENT,
|
|
90
|
+
}
|
|
91
|
+
return {}
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _is_python_file_artifact(src: Path, dest: Path):
|
|
95
|
+
return src.is_file() and src.suffix == ".py"
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _execute_in_sandbox(
|
|
99
|
+
py_file: str, deploy_root: Path, kwargs: Dict[str, Any]
|
|
100
|
+
) -> Optional[List[Dict[str, Any]]]:
|
|
101
|
+
# Create the code snippet to be executed in the sandbox
|
|
102
|
+
script_source = jinja_render_from_file(
|
|
103
|
+
template_path=TEMPLATE_PATH, data={"py_file": py_file}
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
completed_process = execute_script_in_sandbox(
|
|
108
|
+
script_source=script_source,
|
|
109
|
+
cwd=deploy_root,
|
|
110
|
+
timeout=DEFAULT_TIMEOUT,
|
|
111
|
+
**kwargs,
|
|
112
|
+
)
|
|
113
|
+
except SandboxExecutionError as sdbx_err:
|
|
114
|
+
cc.warning(
|
|
115
|
+
f"Could not fetch Snowpark objects from {py_file} due to {sdbx_err}, continuing execution for the rest of the python files."
|
|
116
|
+
)
|
|
117
|
+
return None
|
|
118
|
+
except Exception as err:
|
|
119
|
+
cc.warning(
|
|
120
|
+
f"Could not fetch Snowpark objects from {py_file} due to {err}, continuing execution for the rest of the python files."
|
|
121
|
+
)
|
|
122
|
+
return None
|
|
123
|
+
|
|
124
|
+
if completed_process.returncode != 0:
|
|
125
|
+
cc.warning(
|
|
126
|
+
f"Could not fetch Snowpark objects from {py_file} due to the following error:\n {completed_process.stderr}"
|
|
127
|
+
)
|
|
128
|
+
cc.warning("Continuing execution for the rest of the python files.")
|
|
129
|
+
return None
|
|
130
|
+
|
|
131
|
+
try:
|
|
132
|
+
return json.loads(completed_process.stdout)
|
|
133
|
+
except Exception as exc:
|
|
134
|
+
cc.warning(
|
|
135
|
+
f"Could not load JSON into python due to the following exception: {exc}"
|
|
136
|
+
)
|
|
137
|
+
cc.warning(f"Continuing execution for the rest of the python files.")
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
class SnowparkAnnotationProcessor(ArtifactProcessor):
|
|
142
|
+
"""
|
|
143
|
+
Built-in Processor to discover Snowpark-annotated objects in a given set of python files,
|
|
144
|
+
and generate SQL code for creation of extension functions based on those discovered objects.
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
def __init__(
|
|
148
|
+
self,
|
|
149
|
+
project_definition: NativeApp,
|
|
150
|
+
project_root: Path,
|
|
151
|
+
deploy_root: Path,
|
|
152
|
+
generated_root: Path,
|
|
153
|
+
):
|
|
154
|
+
super().__init__(
|
|
155
|
+
project_definition=project_definition,
|
|
156
|
+
project_root=project_root,
|
|
157
|
+
deploy_root=deploy_root,
|
|
158
|
+
generated_root=generated_root,
|
|
159
|
+
)
|
|
160
|
+
self.project_definition = project_definition
|
|
161
|
+
self.project_root = project_root
|
|
162
|
+
self.deploy_root = deploy_root
|
|
163
|
+
self.generated_root = generated_root
|
|
164
|
+
|
|
165
|
+
if self.generated_root.exists():
|
|
166
|
+
raise ClickException(
|
|
167
|
+
f"Path {self.generated_root} already exists. Please choose a different name for your generated directory in the project definition file."
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
def process(
|
|
171
|
+
self,
|
|
172
|
+
artifact_to_process: PathMapping,
|
|
173
|
+
processor_mapping: Optional[ProcessorMapping],
|
|
174
|
+
**kwargs,
|
|
175
|
+
) -> None:
|
|
176
|
+
"""
|
|
177
|
+
Collects code annotations from Snowpark python files containing extension functions and augments the existing
|
|
178
|
+
setup script with generated SQL that registers these functions.
|
|
179
|
+
"""
|
|
180
|
+
|
|
181
|
+
bundle_map = BundleMap(
|
|
182
|
+
project_root=self.project_root, deploy_root=self.deploy_root
|
|
183
|
+
)
|
|
184
|
+
bundle_map.add(artifact_to_process)
|
|
185
|
+
|
|
186
|
+
collected_extension_functions_by_path = self.collect_extension_functions(
|
|
187
|
+
bundle_map, processor_mapping
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
collected_output = []
|
|
191
|
+
collected_sql_files: List[Path] = []
|
|
192
|
+
for py_file, extension_fns in collected_extension_functions_by_path.items():
|
|
193
|
+
sql_file = self.generate_new_sql_file_name(
|
|
194
|
+
py_file=py_file,
|
|
195
|
+
)
|
|
196
|
+
collected_sql_files.append(sql_file)
|
|
197
|
+
for extension_fn in extension_fns:
|
|
198
|
+
create_stmt = generate_create_sql_ddl_statement(extension_fn)
|
|
199
|
+
if create_stmt is None:
|
|
200
|
+
continue
|
|
201
|
+
|
|
202
|
+
relative_py_file = py_file.relative_to(bundle_map.deploy_root())
|
|
203
|
+
|
|
204
|
+
grant_statements = generate_grant_sql_ddl_statements(extension_fn)
|
|
205
|
+
if grant_statements is not None:
|
|
206
|
+
collected_output.append(grant_statements)
|
|
207
|
+
|
|
208
|
+
with open(sql_file, "a") as file:
|
|
209
|
+
file.write(
|
|
210
|
+
f"-- Generated by the Snowflake CLI from {relative_py_file}\n"
|
|
211
|
+
)
|
|
212
|
+
file.write(f"-- DO NOT EDIT\n")
|
|
213
|
+
file.write(create_stmt)
|
|
214
|
+
if grant_statements is not None:
|
|
215
|
+
file.write("\n")
|
|
216
|
+
file.write(grant_statements)
|
|
217
|
+
|
|
218
|
+
self.deannotate(py_file, extension_fns)
|
|
219
|
+
|
|
220
|
+
if collected_sql_files:
|
|
221
|
+
edit_setup_script_with_exec_imm_sql(
|
|
222
|
+
collected_sql_files=collected_sql_files,
|
|
223
|
+
deploy_root=bundle_map.deploy_root(),
|
|
224
|
+
generated_root=self.generated_root,
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
def _normalize_imports(
|
|
228
|
+
self,
|
|
229
|
+
extension_fn: NativeAppExtensionFunction,
|
|
230
|
+
py_file: Path,
|
|
231
|
+
deploy_root: Path,
|
|
232
|
+
):
|
|
233
|
+
normalized_imports: Set[str] = set()
|
|
234
|
+
# Add the py_file, which is the source of the extension function
|
|
235
|
+
normalized_imports.add(f"/{to_stage_path(py_file)}")
|
|
236
|
+
|
|
237
|
+
for raw_import in extension_fn.imports:
|
|
238
|
+
if not Path(deploy_root, raw_import).exists():
|
|
239
|
+
# This should capture import_str of different forms: stagenames, malformed paths etc
|
|
240
|
+
# But this will also return True if import_str == "/". Regardless, we append it all to normalized_imports
|
|
241
|
+
cc.warning(
|
|
242
|
+
f"{raw_import} does not exist in the deploy root. Skipping validation of this import."
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
if raw_import.startswith(STAGE_PREFIX) or raw_import.startswith("/"):
|
|
246
|
+
normalized_imports.add(raw_import)
|
|
247
|
+
else:
|
|
248
|
+
normalized_imports.add(f"/{to_stage_path(Path(raw_import))}")
|
|
249
|
+
|
|
250
|
+
# To ensure order when running tests
|
|
251
|
+
sorted_imports = list(normalized_imports)
|
|
252
|
+
sorted_imports.sort()
|
|
253
|
+
extension_fn.imports = sorted_imports
|
|
254
|
+
|
|
255
|
+
def _normalize(
|
|
256
|
+
self,
|
|
257
|
+
extension_fn: NativeAppExtensionFunction,
|
|
258
|
+
py_file: Path,
|
|
259
|
+
deploy_root: Path,
|
|
260
|
+
):
|
|
261
|
+
if extension_fn.name is None:
|
|
262
|
+
# The extension function was not named explicitly, use the name of the Python function object as its name
|
|
263
|
+
extension_fn.name = extension_fn.handler
|
|
264
|
+
|
|
265
|
+
# Compute the fully qualified handler
|
|
266
|
+
# If user defined their udf as @udf(lambda: x, ...) then extension_fn.handler is <lambda>.
|
|
267
|
+
extension_fn.handler = f"{py_file.stem}.{extension_fn.handler}"
|
|
268
|
+
|
|
269
|
+
snowpark_lib_found = False
|
|
270
|
+
snowpark_lib_regex = re.compile(SNOWPARK_LIB_REGEX)
|
|
271
|
+
for pkg in extension_fn.packages:
|
|
272
|
+
if snowpark_lib_regex.fullmatch(ensure_string_literal(pkg.strip())):
|
|
273
|
+
snowpark_lib_found = True
|
|
274
|
+
break
|
|
275
|
+
if not snowpark_lib_found:
|
|
276
|
+
extension_fn.packages.append(SNOWPARK_LIB_NAME)
|
|
277
|
+
|
|
278
|
+
if extension_fn.imports is None:
|
|
279
|
+
extension_fn.imports = []
|
|
280
|
+
self._normalize_imports(
|
|
281
|
+
extension_fn=extension_fn,
|
|
282
|
+
py_file=py_file,
|
|
283
|
+
deploy_root=deploy_root,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
def collect_extension_functions(
|
|
287
|
+
self, bundle_map: BundleMap, processor_mapping: Optional[ProcessorMapping]
|
|
288
|
+
) -> Dict[Path, List[NativeAppExtensionFunction]]:
|
|
289
|
+
kwargs = (
|
|
290
|
+
_determine_virtual_env(self.project_root, processor_mapping)
|
|
291
|
+
if processor_mapping is not None
|
|
292
|
+
else {}
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
collected_extension_fns_by_path: Dict[
|
|
296
|
+
Path, List[NativeAppExtensionFunction]
|
|
297
|
+
] = {}
|
|
298
|
+
|
|
299
|
+
for src_file, dest_file in bundle_map.all_mappings(
|
|
300
|
+
absolute=True, expand_directories=True, predicate=_is_python_file_artifact
|
|
301
|
+
):
|
|
302
|
+
cc.step(
|
|
303
|
+
"Processing Snowpark annotations from {}".format(
|
|
304
|
+
dest_file.relative_to(bundle_map.deploy_root())
|
|
305
|
+
)
|
|
306
|
+
)
|
|
307
|
+
collected_extension_function_json = _execute_in_sandbox(
|
|
308
|
+
py_file=str(dest_file.resolve()),
|
|
309
|
+
deploy_root=self.deploy_root,
|
|
310
|
+
kwargs=kwargs,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
if collected_extension_function_json is None:
|
|
314
|
+
cc.warning(f"Error processing extension functions in {src_file}")
|
|
315
|
+
cc.warning("Skipping generating code of all objects from this file.")
|
|
316
|
+
continue
|
|
317
|
+
|
|
318
|
+
collected_extension_functions = []
|
|
319
|
+
for extension_function_json in collected_extension_function_json:
|
|
320
|
+
try:
|
|
321
|
+
extension_fn = NativeAppExtensionFunction(**extension_function_json)
|
|
322
|
+
self._normalize(
|
|
323
|
+
extension_fn,
|
|
324
|
+
py_file=dest_file.relative_to(bundle_map.deploy_root()),
|
|
325
|
+
deploy_root=bundle_map.deploy_root(),
|
|
326
|
+
)
|
|
327
|
+
collected_extension_functions.append(extension_fn)
|
|
328
|
+
except SchemaValidationError:
|
|
329
|
+
cc.warning("Invalid extension function definition")
|
|
330
|
+
|
|
331
|
+
if collected_extension_functions:
|
|
332
|
+
collected_extension_fns_by_path[
|
|
333
|
+
dest_file
|
|
334
|
+
] = collected_extension_functions
|
|
335
|
+
|
|
336
|
+
return collected_extension_fns_by_path
|
|
337
|
+
|
|
338
|
+
def generate_new_sql_file_name(self, py_file: Path) -> Path:
|
|
339
|
+
"""
|
|
340
|
+
Generates a SQL filename for the generated root from the python file, and creates its parent directories.
|
|
341
|
+
"""
|
|
342
|
+
relative_py_file = py_file.relative_to(self.deploy_root)
|
|
343
|
+
sql_file = Path(self.generated_root, relative_py_file.with_suffix(".sql"))
|
|
344
|
+
if sql_file.exists():
|
|
345
|
+
cc.warning(
|
|
346
|
+
f"""\
|
|
347
|
+
File {sql_file} already exists, will append SQL statements to this file.
|
|
348
|
+
"""
|
|
349
|
+
)
|
|
350
|
+
sql_file.parent.mkdir(exist_ok=True, parents=True)
|
|
351
|
+
return sql_file
|
|
352
|
+
|
|
353
|
+
def deannotate(
|
|
354
|
+
self, py_file: Path, extension_fns: List[NativeAppExtensionFunction]
|
|
355
|
+
):
|
|
356
|
+
with open(py_file, "r", encoding="utf-8") as f:
|
|
357
|
+
code = f.read()
|
|
358
|
+
|
|
359
|
+
if py_file.is_symlink():
|
|
360
|
+
# if the file is a symlink, make sure we don't overwrite the original
|
|
361
|
+
py_file.unlink()
|
|
362
|
+
|
|
363
|
+
new_code = deannotate_module_source(code, extension_fns)
|
|
364
|
+
|
|
365
|
+
with open(py_file, "w", encoding="utf-8") as f:
|
|
366
|
+
f.write(new_code)
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def generate_create_sql_ddl_statement(
|
|
370
|
+
extension_fn: NativeAppExtensionFunction,
|
|
371
|
+
) -> Optional[str]:
|
|
372
|
+
"""
|
|
373
|
+
Generates a "CREATE FUNCTION/PROCEDURE ... " SQL DDL statement based on an extension function definition.
|
|
374
|
+
Logic for this create statement has been lifted from snowflake-snowpark-python v1.15.0 package.
|
|
375
|
+
"""
|
|
376
|
+
|
|
377
|
+
object_type = get_sql_object_type(extension_fn)
|
|
378
|
+
if object_type is None:
|
|
379
|
+
cc.warning(f"Unsupported extension function type: {extension_fn.function_type}")
|
|
380
|
+
return None
|
|
381
|
+
|
|
382
|
+
arguments_in_sql = ", ".join(
|
|
383
|
+
[get_sql_argument_signature(arg) for arg in extension_fn.signature]
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
create_query = dedent(
|
|
387
|
+
f"""
|
|
388
|
+
CREATE OR REPLACE
|
|
389
|
+
{object_type} {get_qualified_object_name(extension_fn)}({arguments_in_sql})
|
|
390
|
+
RETURNS {extension_fn.returns}
|
|
391
|
+
LANGUAGE PYTHON
|
|
392
|
+
RUNTIME_VERSION={extension_fn.runtime}
|
|
393
|
+
"""
|
|
394
|
+
).strip()
|
|
395
|
+
|
|
396
|
+
if extension_fn.imports:
|
|
397
|
+
create_query += (
|
|
398
|
+
f"\nIMPORTS=({', '.join(ensure_all_string_literals(extension_fn.imports))})"
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
if extension_fn.packages:
|
|
402
|
+
create_query += f"\nPACKAGES=({', '.join(ensure_all_string_literals([pkg.strip() for pkg in extension_fn.packages]))})"
|
|
403
|
+
|
|
404
|
+
if extension_fn.external_access_integrations:
|
|
405
|
+
create_query += f"\nEXTERNAL_ACCESS_INTEGRATIONS=({', '.join(ensure_all_string_literals(extension_fn.external_access_integrations))})"
|
|
406
|
+
|
|
407
|
+
if extension_fn.secrets:
|
|
408
|
+
create_query += f"""\nSECRETS=({', '.join([f"{ensure_string_literal(k)}={v}" for k, v in extension_fn.secrets.items()])})"""
|
|
409
|
+
|
|
410
|
+
create_query += f"\nHANDLER={ensure_string_literal(extension_fn.handler)}"
|
|
411
|
+
|
|
412
|
+
if extension_fn.function_type == ExtensionFunctionTypeEnum.PROCEDURE:
|
|
413
|
+
if extension_fn.execute_as_caller:
|
|
414
|
+
create_query += f"\nEXECUTE AS CALLER"
|
|
415
|
+
else:
|
|
416
|
+
create_query += f"\nEXECUTE AS OWNER"
|
|
417
|
+
create_query += ";\n"
|
|
418
|
+
|
|
419
|
+
return create_query
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
def generate_grant_sql_ddl_statements(
|
|
423
|
+
extension_fn: NativeAppExtensionFunction,
|
|
424
|
+
) -> Optional[str]:
|
|
425
|
+
"""
|
|
426
|
+
Generates a "GRANT USAGE TO ... " SQL DDL statement based on a dictionary of extension function properties.
|
|
427
|
+
If no application roles are present, then the function returns None.
|
|
428
|
+
"""
|
|
429
|
+
|
|
430
|
+
if not extension_fn.application_roles:
|
|
431
|
+
cc.warning(
|
|
432
|
+
"Skipping generation of 'GRANT USAGE ON ...' SQL statement for this object due to lack of application roles."
|
|
433
|
+
)
|
|
434
|
+
return None
|
|
435
|
+
|
|
436
|
+
grant_sql_statements = []
|
|
437
|
+
for app_role in extension_fn.application_roles:
|
|
438
|
+
grant_sql_statement = dedent(
|
|
439
|
+
f"""\
|
|
440
|
+
GRANT USAGE ON {get_sql_object_type(extension_fn)} {get_qualified_object_name(extension_fn)}({get_function_type_signature_for_grant(extension_fn)})
|
|
441
|
+
TO APPLICATION ROLE {app_role};
|
|
442
|
+
"""
|
|
443
|
+
).strip()
|
|
444
|
+
grant_sql_statements.append(grant_sql_statement)
|
|
445
|
+
|
|
446
|
+
return "\n".join(grant_sql_statements)
|
|
447
|
+
|
|
448
|
+
|
|
449
|
+
def edit_setup_script_with_exec_imm_sql(
|
|
450
|
+
collected_sql_files: List[Path], deploy_root: Path, generated_root: Path
|
|
451
|
+
):
|
|
452
|
+
"""
|
|
453
|
+
Adds an 'execute immediate' to setup script for every SQL file in the map
|
|
454
|
+
"""
|
|
455
|
+
# Create a __generated.sql in the __generated folder
|
|
456
|
+
generated_file_path = Path(generated_root, f"{generated_root.stem}.sql")
|
|
457
|
+
generated_file_path.parent.mkdir(exist_ok=True, parents=True)
|
|
458
|
+
|
|
459
|
+
if generated_file_path.exists():
|
|
460
|
+
cc.warning(
|
|
461
|
+
f"""\
|
|
462
|
+
File {generated_file_path} already exists.
|
|
463
|
+
Could not complete code generation of Snowpark Extension Functions.
|
|
464
|
+
"""
|
|
465
|
+
)
|
|
466
|
+
return
|
|
467
|
+
|
|
468
|
+
# For every SQL file, add SQL statement 'execute immediate' to __generated.sql script.
|
|
469
|
+
with open(generated_file_path, "a") as file:
|
|
470
|
+
for sql_file in collected_sql_files:
|
|
471
|
+
sql_file_relative_path = sql_file.relative_to(
|
|
472
|
+
deploy_root
|
|
473
|
+
) # Path on stage, without the leading slash
|
|
474
|
+
file.write(f"EXECUTE IMMEDIATE FROM '/{sql_file_relative_path}';\n")
|
|
475
|
+
|
|
476
|
+
# Find the setup script in the deploy root.
|
|
477
|
+
setup_file_path = find_setup_script_file(deploy_root=deploy_root)
|
|
478
|
+
with open(setup_file_path, "r", encoding="utf-8") as file:
|
|
479
|
+
code = file.read()
|
|
480
|
+
# Unlink to prevent over-writing source file
|
|
481
|
+
if setup_file_path.is_symlink():
|
|
482
|
+
setup_file_path.unlink()
|
|
483
|
+
|
|
484
|
+
# Write original contents and the execute immediate sql to the setup script
|
|
485
|
+
generated_file_relative_path = generated_file_path.relative_to(deploy_root)
|
|
486
|
+
with open(setup_file_path, "w", encoding="utf-8") as file:
|
|
487
|
+
file.write(code)
|
|
488
|
+
file.write(f"\nEXECUTE IMMEDIATE FROM '/{generated_file_relative_path}';")
|
|
489
|
+
file.write(f"\n")
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import logging
|
|
2
4
|
from pathlib import Path
|
|
3
5
|
from typing import List, Optional
|
|
@@ -31,7 +33,6 @@ from snowflake.cli.plugins.nativeapp.teardown_processor import (
|
|
|
31
33
|
)
|
|
32
34
|
from snowflake.cli.plugins.nativeapp.utils import (
|
|
33
35
|
get_first_paragraph_from_markdown_file,
|
|
34
|
-
is_tty_interactive,
|
|
35
36
|
shallow_git_clone,
|
|
36
37
|
)
|
|
37
38
|
from snowflake.cli.plugins.nativeapp.version.commands import app as versions_app
|
|
@@ -152,7 +153,7 @@ def app_run(
|
|
|
152
153
|
The command fails if no release directive exists for your Snowflake account for a given application package, which is determined from the project definition file. Default: unset.""",
|
|
153
154
|
is_flag=True,
|
|
154
155
|
),
|
|
155
|
-
interactive:
|
|
156
|
+
interactive: bool = InteractiveOption,
|
|
156
157
|
force: Optional[bool] = ForceOption,
|
|
157
158
|
**options,
|
|
158
159
|
) -> CommandResult:
|
|
@@ -164,7 +165,7 @@ def app_run(
|
|
|
164
165
|
is_interactive = False
|
|
165
166
|
if force:
|
|
166
167
|
policy = AllowAlwaysPolicy()
|
|
167
|
-
elif interactive
|
|
168
|
+
elif interactive:
|
|
168
169
|
is_interactive = True
|
|
169
170
|
policy = AskAlwaysPolicy()
|
|
170
171
|
else:
|
|
@@ -214,6 +215,12 @@ def app_open(
|
|
|
214
215
|
@with_project_definition("native_app")
|
|
215
216
|
def app_teardown(
|
|
216
217
|
force: Optional[bool] = ForceOption,
|
|
218
|
+
cascade: Optional[bool] = typer.Option(
|
|
219
|
+
None,
|
|
220
|
+
help=f"""Whether to drop all application objects owned by the application within the account. Default: false.""",
|
|
221
|
+
show_default=False,
|
|
222
|
+
),
|
|
223
|
+
interactive: bool = InteractiveOption,
|
|
217
224
|
**options,
|
|
218
225
|
) -> CommandResult:
|
|
219
226
|
"""
|
|
@@ -223,7 +230,7 @@ def app_teardown(
|
|
|
223
230
|
project_definition=cli_context.project_definition,
|
|
224
231
|
project_root=cli_context.project_root,
|
|
225
232
|
)
|
|
226
|
-
processor.process(force)
|
|
233
|
+
processor.process(interactive, force, cascade)
|
|
227
234
|
return MessageResult(f"Teardown is now complete.")
|
|
228
235
|
|
|
229
236
|
|
|
@@ -1,11 +1,18 @@
|
|
|
1
1
|
import typer
|
|
2
|
+
from snowflake.cli.plugins.nativeapp.utils import is_tty_interactive
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def interactive_callback(val):
|
|
6
|
+
if val is None:
|
|
7
|
+
return is_tty_interactive()
|
|
8
|
+
return val
|
|
9
|
+
|
|
2
10
|
|
|
3
11
|
InteractiveOption = typer.Option(
|
|
4
|
-
|
|
5
|
-
"
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
is_flag=True,
|
|
12
|
+
None,
|
|
13
|
+
help=f"""When enabled, this option displays prompts even if the standard input and output are not terminal devices. Defaults to True in an interactive shell environment, and False otherwise.""",
|
|
14
|
+
callback=interactive_callback,
|
|
15
|
+
show_default=False,
|
|
9
16
|
)
|
|
10
17
|
|
|
11
18
|
ForceOption = typer.Option(
|