snowflake-cli 3.0.2__py3-none-any.whl → 3.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/cli/__about__.py +1 -1
- snowflake/cli/_app/cli_app.py +3 -0
- snowflake/cli/_app/dev/docs/templates/overview.rst.jinja2 +1 -1
- snowflake/cli/_app/dev/docs/templates/usage.rst.jinja2 +2 -2
- snowflake/cli/_app/telemetry.py +69 -4
- snowflake/cli/_plugins/connection/commands.py +152 -99
- snowflake/cli/_plugins/connection/util.py +54 -9
- snowflake/cli/_plugins/cortex/manager.py +1 -1
- snowflake/cli/_plugins/git/commands.py +6 -3
- snowflake/cli/_plugins/git/manager.py +9 -4
- snowflake/cli/_plugins/nativeapp/artifacts.py +77 -13
- snowflake/cli/_plugins/nativeapp/codegen/artifact_processor.py +1 -1
- snowflake/cli/_plugins/nativeapp/codegen/compiler.py +7 -0
- snowflake/cli/_plugins/nativeapp/codegen/sandbox.py +10 -10
- snowflake/cli/_plugins/nativeapp/codegen/setup/native_app_setup_processor.py +2 -2
- snowflake/cli/_plugins/nativeapp/codegen/snowpark/extension_function_utils.py +1 -1
- snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py +8 -8
- snowflake/cli/_plugins/nativeapp/codegen/templates/templates_processor.py +5 -3
- snowflake/cli/_plugins/nativeapp/commands.py +144 -188
- snowflake/cli/_plugins/nativeapp/constants.py +1 -0
- snowflake/cli/_plugins/nativeapp/entities/application.py +564 -351
- snowflake/cli/_plugins/nativeapp/entities/application_package.py +583 -929
- snowflake/cli/_plugins/nativeapp/entities/models/event_sharing_telemetry.py +58 -0
- snowflake/cli/_plugins/nativeapp/exceptions.py +12 -0
- snowflake/cli/_plugins/nativeapp/same_account_install_method.py +0 -2
- snowflake/cli/_plugins/nativeapp/sf_facade.py +30 -0
- snowflake/cli/_plugins/nativeapp/sf_facade_constants.py +25 -0
- snowflake/cli/_plugins/nativeapp/sf_facade_exceptions.py +117 -0
- snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +525 -0
- snowflake/cli/_plugins/nativeapp/v2_conversions/{v2_to_v1_decorator.py → compat.py} +88 -117
- snowflake/cli/_plugins/nativeapp/version/commands.py +36 -32
- snowflake/cli/_plugins/notebook/manager.py +2 -2
- snowflake/cli/_plugins/object/commands.py +10 -1
- snowflake/cli/_plugins/object/manager.py +13 -5
- snowflake/cli/_plugins/snowpark/common.py +63 -21
- snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +3 -3
- snowflake/cli/_plugins/spcs/common.py +29 -0
- snowflake/cli/_plugins/spcs/compute_pool/manager.py +7 -9
- snowflake/cli/_plugins/spcs/image_registry/manager.py +2 -2
- snowflake/cli/_plugins/spcs/image_repository/commands.py +4 -37
- snowflake/cli/_plugins/spcs/image_repository/manager.py +4 -1
- snowflake/cli/_plugins/spcs/services/commands.py +100 -17
- snowflake/cli/_plugins/spcs/services/manager.py +108 -16
- snowflake/cli/_plugins/sql/commands.py +9 -1
- snowflake/cli/_plugins/sql/manager.py +9 -4
- snowflake/cli/_plugins/stage/commands.py +28 -19
- snowflake/cli/_plugins/stage/diff.py +17 -17
- snowflake/cli/_plugins/stage/manager.py +304 -84
- snowflake/cli/_plugins/stage/md5.py +1 -1
- snowflake/cli/_plugins/streamlit/manager.py +5 -5
- snowflake/cli/_plugins/workspace/commands.py +27 -4
- snowflake/cli/_plugins/workspace/context.py +38 -0
- snowflake/cli/_plugins/workspace/manager.py +23 -13
- snowflake/cli/api/cli_global_context.py +4 -3
- snowflake/cli/api/commands/flags.py +23 -7
- snowflake/cli/api/config.py +30 -9
- snowflake/cli/api/connections.py +12 -1
- snowflake/cli/api/console/console.py +4 -19
- snowflake/cli/api/entities/common.py +4 -2
- snowflake/cli/api/entities/utils.py +36 -69
- snowflake/cli/api/errno.py +2 -0
- snowflake/cli/api/exceptions.py +41 -0
- snowflake/cli/api/identifiers.py +8 -0
- snowflake/cli/api/metrics.py +223 -7
- snowflake/cli/api/output/types.py +1 -1
- snowflake/cli/api/project/definition_conversion.py +293 -77
- snowflake/cli/api/project/schemas/entities/common.py +11 -0
- snowflake/cli/api/project/schemas/project_definition.py +30 -25
- snowflake/cli/api/rest_api.py +26 -4
- snowflake/cli/api/secure_utils.py +1 -1
- snowflake/cli/api/sql_execution.py +40 -29
- snowflake/cli/api/stage_path.py +244 -0
- snowflake/cli/api/utils/definition_rendering.py +3 -5
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.2.0.dist-info}/METADATA +14 -15
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.2.0.dist-info}/RECORD +78 -77
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.2.0.dist-info}/WHEEL +1 -1
- snowflake/cli/_plugins/nativeapp/manager.py +0 -415
- snowflake/cli/_plugins/nativeapp/project_model.py +0 -211
- snowflake/cli/_plugins/nativeapp/run_processor.py +0 -184
- snowflake/cli/_plugins/nativeapp/teardown_processor.py +0 -70
- snowflake/cli/_plugins/nativeapp/version/version_processor.py +0 -98
- snowflake/cli/_plugins/workspace/action_context.py +0 -18
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.2.0.dist-info}/entry_points.txt +0 -0
- {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.2.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -26,6 +26,7 @@ from snowflake.cli._plugins.stage.manager import (
|
|
|
26
26
|
UserStagePathParts,
|
|
27
27
|
)
|
|
28
28
|
from snowflake.cli.api.identifiers import FQN
|
|
29
|
+
from snowflake.cli.api.stage_path import StagePath
|
|
29
30
|
from snowflake.connector.cursor import SnowflakeCursor
|
|
30
31
|
|
|
31
32
|
# Replace magic numbers with constants
|
|
@@ -78,14 +79,18 @@ class GitStagePathParts(StagePathParts):
|
|
|
78
79
|
|
|
79
80
|
|
|
80
81
|
class GitManager(StageManager):
|
|
82
|
+
@staticmethod
|
|
83
|
+
def build_path(stage_path: str) -> StagePathParts:
|
|
84
|
+
return StagePath.from_git_str(stage_path)
|
|
85
|
+
|
|
81
86
|
def show_branches(self, repo_name: str, like: str) -> SnowflakeCursor:
|
|
82
|
-
return self.
|
|
87
|
+
return self.execute_query(f"show git branches like '{like}' in {repo_name}")
|
|
83
88
|
|
|
84
89
|
def show_tags(self, repo_name: str, like: str) -> SnowflakeCursor:
|
|
85
|
-
return self.
|
|
90
|
+
return self.execute_query(f"show git tags like '{like}' in {repo_name}")
|
|
86
91
|
|
|
87
92
|
def fetch(self, fqn: FQN) -> SnowflakeCursor:
|
|
88
|
-
return self.
|
|
93
|
+
return self.execute_query(f"alter git repository {fqn} fetch")
|
|
89
94
|
|
|
90
95
|
def create(
|
|
91
96
|
self, repo_name: FQN, api_integration: str, url: str, secret: str
|
|
@@ -99,7 +104,7 @@ class GitManager(StageManager):
|
|
|
99
104
|
)
|
|
100
105
|
if secret is not None:
|
|
101
106
|
query += f"git_credentials = {secret}\n"
|
|
102
|
-
return self.
|
|
107
|
+
return self.execute_query(query)
|
|
103
108
|
|
|
104
109
|
@staticmethod
|
|
105
110
|
def get_stage_from_path(path: str):
|
|
@@ -16,6 +16,7 @@ from __future__ import annotations
|
|
|
16
16
|
|
|
17
17
|
import itertools
|
|
18
18
|
import os
|
|
19
|
+
from collections import namedtuple
|
|
19
20
|
from pathlib import Path
|
|
20
21
|
from textwrap import dedent
|
|
21
22
|
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union
|
|
@@ -164,7 +165,7 @@ class _ArtifactPathMap:
|
|
|
164
165
|
if src_is_dir:
|
|
165
166
|
# mark all subdirectories of this source as directories so that we can
|
|
166
167
|
# detect accidental clobbering
|
|
167
|
-
for
|
|
168
|
+
for root, _, files in os.walk(absolute_src, followlinks=True):
|
|
168
169
|
canonical_subdir = Path(root).relative_to(absolute_src)
|
|
169
170
|
canonical_dest_subdir = dest / canonical_subdir
|
|
170
171
|
self._update_dest_is_dir(canonical_dest_subdir, is_dir=True)
|
|
@@ -225,9 +226,7 @@ class _ArtifactPathMap:
|
|
|
225
226
|
|
|
226
227
|
current_is_dir = self._dest_is_dir.get(dest, None)
|
|
227
228
|
if current_is_dir is not None and current_is_dir != is_dir:
|
|
228
|
-
raise ArtifactError(
|
|
229
|
-
"Conflicting type for destination path: {canonical_dest}"
|
|
230
|
-
)
|
|
229
|
+
raise ArtifactError(f"Conflicting type for destination path: {dest}")
|
|
231
230
|
|
|
232
231
|
parent = dest.parent
|
|
233
232
|
if parent != dest:
|
|
@@ -240,9 +239,21 @@ class BundleMap:
|
|
|
240
239
|
"""
|
|
241
240
|
Computes the mapping between project directory artifacts (aka source artifacts) to their deploy root location
|
|
242
241
|
(aka destination artifact). This information is primarily used when bundling a native applications project.
|
|
242
|
+
|
|
243
|
+
:param project_root: The root directory of the project and base for all relative paths. Must be an absolute path.
|
|
244
|
+
:param deploy_root: The directory where artifacts should be copied to. Must be an absolute path.
|
|
243
245
|
"""
|
|
244
246
|
|
|
245
247
|
def __init__(self, *, project_root: Path, deploy_root: Path):
|
|
248
|
+
# If a relative path ends up here, it's a bug in the app and can lead to other
|
|
249
|
+
# subtle bugs as paths would be resolved relative to the current working directory.
|
|
250
|
+
assert (
|
|
251
|
+
project_root.is_absolute()
|
|
252
|
+
), f"Project root {project_root} must be an absolute path."
|
|
253
|
+
assert (
|
|
254
|
+
deploy_root.is_absolute()
|
|
255
|
+
), f"Deploy root {deploy_root} must be an absolute path."
|
|
256
|
+
|
|
246
257
|
self._project_root: Path = resolve_without_follow(project_root)
|
|
247
258
|
self._deploy_root: Path = resolve_without_follow(deploy_root)
|
|
248
259
|
self._artifact_map = _ArtifactPathMap(project_root=self._project_root)
|
|
@@ -373,7 +384,7 @@ class BundleMap:
|
|
|
373
384
|
if absolute_src.is_dir() and expand_directories:
|
|
374
385
|
# both src and dest are directories, and expanding directories was requested. Traverse src, and map each
|
|
375
386
|
# file to the dest directory
|
|
376
|
-
for
|
|
387
|
+
for root, subdirs, files in os.walk(absolute_src, followlinks=True):
|
|
377
388
|
relative_root = Path(root).relative_to(absolute_src)
|
|
378
389
|
for name in itertools.chain(subdirs, files):
|
|
379
390
|
src_file_for_output = src_for_output / relative_root / name
|
|
@@ -664,16 +675,27 @@ def build_bundle(
|
|
|
664
675
|
if resolved_root.exists():
|
|
665
676
|
delete(resolved_root)
|
|
666
677
|
|
|
667
|
-
bundle_map =
|
|
668
|
-
for artifact in artifacts:
|
|
669
|
-
bundle_map.add(artifact)
|
|
670
|
-
|
|
678
|
+
bundle_map = bundle_artifacts(project_root, deploy_root, artifacts)
|
|
671
679
|
if bundle_map.is_empty():
|
|
672
680
|
raise ArtifactError(
|
|
673
681
|
"No artifacts mapping found in project definition, nothing to do."
|
|
674
682
|
)
|
|
675
683
|
|
|
676
|
-
|
|
684
|
+
return bundle_map
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
def bundle_artifacts(
|
|
688
|
+
project_root: Path, deploy_root: Path, artifacts: list[PathMapping]
|
|
689
|
+
):
|
|
690
|
+
"""
|
|
691
|
+
Internal implementation of build_bundle that assumes
|
|
692
|
+
that validation is being done by the caller.
|
|
693
|
+
"""
|
|
694
|
+
bundle_map = BundleMap(project_root=project_root, deploy_root=deploy_root)
|
|
695
|
+
for artifact in artifacts:
|
|
696
|
+
bundle_map.add(artifact)
|
|
697
|
+
|
|
698
|
+
for absolute_src, absolute_dest in bundle_map.all_mappings(
|
|
677
699
|
absolute=True, expand_directories=False
|
|
678
700
|
):
|
|
679
701
|
symlink_or_copy(absolute_src, absolute_dest, deploy_root=deploy_root)
|
|
@@ -706,7 +728,7 @@ def find_and_read_manifest_file(deploy_root: Path) -> Dict[str, Any]:
|
|
|
706
728
|
"r", read_file_limit_mb=DEFAULT_SIZE_LIMIT_MB
|
|
707
729
|
) as file:
|
|
708
730
|
manifest_content = safe_load(file.read())
|
|
709
|
-
return manifest_content
|
|
731
|
+
return manifest_content or {}
|
|
710
732
|
|
|
711
733
|
|
|
712
734
|
def find_setup_script_file(deploy_root: Path) -> Path:
|
|
@@ -733,19 +755,24 @@ def find_setup_script_file(deploy_root: Path) -> Path:
|
|
|
733
755
|
)
|
|
734
756
|
|
|
735
757
|
|
|
758
|
+
VersionInfo = namedtuple("VersionInfo", ["version_name", "patch_number", "label"])
|
|
759
|
+
|
|
760
|
+
|
|
736
761
|
def find_version_info_in_manifest_file(
|
|
737
762
|
deploy_root: Path,
|
|
738
|
-
) ->
|
|
763
|
+
) -> VersionInfo:
|
|
739
764
|
"""
|
|
740
765
|
Find version and patch, if available, in the manifest.yml file.
|
|
741
766
|
"""
|
|
742
767
|
name_field = "name"
|
|
743
768
|
patch_field = "patch"
|
|
769
|
+
label_field = "label"
|
|
744
770
|
|
|
745
771
|
manifest_content = find_and_read_manifest_file(deploy_root=deploy_root)
|
|
746
772
|
|
|
747
773
|
version_name: Optional[str] = None
|
|
748
774
|
patch_number: Optional[int] = None
|
|
775
|
+
label: Optional[str] = None
|
|
749
776
|
|
|
750
777
|
version_info = manifest_content.get("version", None)
|
|
751
778
|
if version_info:
|
|
@@ -753,5 +780,42 @@ def find_version_info_in_manifest_file(
|
|
|
753
780
|
version_name = to_identifier(str(version_info[name_field]))
|
|
754
781
|
if patch_field in version_info:
|
|
755
782
|
patch_number = int(version_info[patch_field])
|
|
783
|
+
if label_field in version_info:
|
|
784
|
+
label = str(version_info[label_field])
|
|
785
|
+
|
|
786
|
+
return VersionInfo(version_name, patch_number, label)
|
|
787
|
+
|
|
788
|
+
|
|
789
|
+
def find_events_definitions_in_manifest_file(
|
|
790
|
+
deploy_root: Path,
|
|
791
|
+
) -> List[Dict[str, str]]:
|
|
792
|
+
"""
|
|
793
|
+
Find events definitions, if available, in the manifest.yml file.
|
|
794
|
+
Events definitions can be found under this section in the manifest.yml file:
|
|
795
|
+
|
|
796
|
+
configuration:
|
|
797
|
+
telemetry_event_definitions:
|
|
798
|
+
- type: ERRORS_AND_WARNINGS
|
|
799
|
+
sharing: MANDATORY
|
|
800
|
+
- type: DEBUG_LOGS
|
|
801
|
+
sharing: OPTIONAL
|
|
802
|
+
"""
|
|
803
|
+
manifest_content = find_and_read_manifest_file(deploy_root=deploy_root)
|
|
804
|
+
|
|
805
|
+
configuration_section = manifest_content.get("configuration", None)
|
|
806
|
+
events_definitions = []
|
|
807
|
+
if configuration_section and isinstance(configuration_section, dict):
|
|
808
|
+
telemetry_section = configuration_section.get("telemetry_event_definitions", [])
|
|
809
|
+
if isinstance(telemetry_section, list):
|
|
810
|
+
for event in telemetry_section:
|
|
811
|
+
if isinstance(event, dict):
|
|
812
|
+
event_type = event.get("type", "")
|
|
813
|
+
events_definitions.append(
|
|
814
|
+
{
|
|
815
|
+
"name": f"SNOWFLAKE${event_type}",
|
|
816
|
+
"type": event_type,
|
|
817
|
+
"sharing": event.get("sharing", ""),
|
|
818
|
+
}
|
|
819
|
+
)
|
|
756
820
|
|
|
757
|
-
return
|
|
821
|
+
return events_definitions
|
|
@@ -36,7 +36,7 @@ class UnsupportedArtifactProcessorError(ClickException):
|
|
|
36
36
|
|
|
37
37
|
|
|
38
38
|
def is_python_file_artifact(src: Path, _: Path):
|
|
39
|
-
"""Determines whether the provided source path is an existing
|
|
39
|
+
"""Determines whether the provided source path is an existing Python file."""
|
|
40
40
|
return src.is_file() and src.suffix == ".py"
|
|
41
41
|
|
|
42
42
|
|
|
@@ -65,10 +65,17 @@ class NativeAppCompiler:
|
|
|
65
65
|
self,
|
|
66
66
|
bundle_ctx: BundleContext,
|
|
67
67
|
):
|
|
68
|
+
self._assert_absolute_paths(bundle_ctx)
|
|
68
69
|
self._bundle_ctx = bundle_ctx
|
|
69
70
|
# dictionary of all processors created and shared between different artifact objects.
|
|
70
71
|
self.cached_processors: Dict[str, ArtifactProcessor] = {}
|
|
71
72
|
|
|
73
|
+
@staticmethod
|
|
74
|
+
def _assert_absolute_paths(bundle_ctx: BundleContext):
|
|
75
|
+
for name in ["Project", "Deploy", "Bundle", "Generated"]:
|
|
76
|
+
path = getattr(bundle_ctx, f"{name.lower()}_root")
|
|
77
|
+
assert path.is_absolute(), f"{name} root {path} must be an absolute path."
|
|
78
|
+
|
|
72
79
|
def compile_artifacts(self):
|
|
73
80
|
"""
|
|
74
81
|
Go through every artifact object in the project definition of a native app, and execute processors in order of specification for each of the artifact object.
|
|
@@ -29,10 +29,10 @@ EnvVars = Mapping[str, str] # Only support str -> str for cross-platform compat
|
|
|
29
29
|
|
|
30
30
|
|
|
31
31
|
class SandboxExecutionError(ClickException):
|
|
32
|
-
"""An error occurred while executing a
|
|
32
|
+
"""An error occurred while executing a Python script."""
|
|
33
33
|
|
|
34
34
|
def __init__(self, error: str):
|
|
35
|
-
super().__init__(f"Failed to execute
|
|
35
|
+
super().__init__(f"Failed to execute Python script. {error}")
|
|
36
36
|
|
|
37
37
|
|
|
38
38
|
def _get_active_venv_dir() -> Optional[str]:
|
|
@@ -63,7 +63,7 @@ def _execute_python_interpreter(
|
|
|
63
63
|
env_vars: Optional[EnvVars],
|
|
64
64
|
) -> subprocess.CompletedProcess:
|
|
65
65
|
if not python_executable:
|
|
66
|
-
raise SandboxExecutionError("No
|
|
66
|
+
raise SandboxExecutionError("No Python executable found")
|
|
67
67
|
|
|
68
68
|
if isinstance(python_executable, str) or isinstance(python_executable, Path):
|
|
69
69
|
args = [python_executable]
|
|
@@ -106,7 +106,7 @@ def _execute_in_venv(
|
|
|
106
106
|
f"venv path must be an existing directory: {resolved_venv_path}"
|
|
107
107
|
)
|
|
108
108
|
|
|
109
|
-
# find the
|
|
109
|
+
# find the Python interpreter for this environment. There is no need to activate environment prior to invoking the
|
|
110
110
|
# interpreter, as venv maintains the invariant that invoking any of the scripts will set up the virtual environment
|
|
111
111
|
# correctly. activation scripts are only used for convenience in interactive shells.
|
|
112
112
|
if _is_ms_windows():
|
|
@@ -116,7 +116,7 @@ def _execute_in_venv(
|
|
|
116
116
|
|
|
117
117
|
if not python_executable.is_file():
|
|
118
118
|
raise SandboxExecutionError(
|
|
119
|
-
f"No venv
|
|
119
|
+
f"No venv Python executable found: {resolved_venv_path}"
|
|
120
120
|
)
|
|
121
121
|
|
|
122
122
|
return _execute_python_interpreter(
|
|
@@ -189,14 +189,14 @@ def execute_script_in_sandbox(
|
|
|
189
189
|
**kwargs,
|
|
190
190
|
) -> subprocess.CompletedProcess:
|
|
191
191
|
"""
|
|
192
|
-
Executes a
|
|
192
|
+
Executes a Python script in a sandboxed environment, and returns its output. The script is executed in a different
|
|
193
193
|
process. The execution environment is determined by the `env_type` argument. By default, the logic will attempt
|
|
194
194
|
to auto-detect the correct environment by looking for an active venv or conda environment. If none can be found, it
|
|
195
|
-
will use the system's default
|
|
196
|
-
|
|
195
|
+
will use the system's default Python executable, as determined by the user's path. As a last resort, the current
|
|
196
|
+
Python execution environment will be used (still in a subprocess).
|
|
197
197
|
|
|
198
198
|
Parameters:
|
|
199
|
-
script_source (str): The
|
|
199
|
+
script_source (str): The Python script to be executed, as a string.
|
|
200
200
|
env_type: The type of execution environment to use (default: ExecutionEnvironmentType.AUTO_DETECT).
|
|
201
201
|
cwd (Optional[Union[str, Path]]): An optional path to use as the current directory when executing the script.
|
|
202
202
|
timeout (Optional[int]): An optional timeout in seconds when executing the script. Defaults to no timeout.
|
|
@@ -248,7 +248,7 @@ def execute_script_in_sandbox(
|
|
|
248
248
|
class SandboxEnvBuilder(EnvBuilder):
|
|
249
249
|
"""
|
|
250
250
|
A virtual environment builder that can be used to build an environment suitable for
|
|
251
|
-
executing user-provided
|
|
251
|
+
executing user-provided Python scripts in an isolated sandbox.
|
|
252
252
|
"""
|
|
253
253
|
|
|
254
254
|
def __init__(self, path: Path, **kwargs) -> None:
|
|
@@ -167,14 +167,14 @@ class NativeAppSetupProcessor(ArtifactProcessor):
|
|
|
167
167
|
)
|
|
168
168
|
except Exception as e:
|
|
169
169
|
raise ClickException(
|
|
170
|
-
f"Exception while executing
|
|
170
|
+
f"Exception while executing Python setup script logic: {e}"
|
|
171
171
|
)
|
|
172
172
|
|
|
173
173
|
if result.returncode == 0:
|
|
174
174
|
return json.loads(result.stdout)
|
|
175
175
|
else:
|
|
176
176
|
raise ClickException(
|
|
177
|
-
f"Failed to execute
|
|
177
|
+
f"Failed to execute Python setup script logic: {result.stderr}"
|
|
178
178
|
)
|
|
179
179
|
|
|
180
180
|
def _edit_setup_sql(self, modifications: List[dict]) -> None:
|
|
@@ -194,7 +194,7 @@ def deannotate_module_source(
|
|
|
194
194
|
|
|
195
195
|
module_lines = module_source.splitlines()
|
|
196
196
|
for definition in definitions:
|
|
197
|
-
# Comment out all decorators. As per the
|
|
197
|
+
# Comment out all decorators. As per the Python grammar, decorators must be terminated by a
|
|
198
198
|
# new line, so the line ranges can't overlap.
|
|
199
199
|
for decorator in definition.decorator_list:
|
|
200
200
|
decorator_id = _get_decorator_id(decorator)
|
|
@@ -132,12 +132,12 @@ def _execute_in_sandbox(
|
|
|
132
132
|
)
|
|
133
133
|
except SandboxExecutionError as sdbx_err:
|
|
134
134
|
cc.warning(
|
|
135
|
-
f"Could not fetch Snowpark objects from {py_file} due to {sdbx_err}, continuing execution for the rest of the
|
|
135
|
+
f"Could not fetch Snowpark objects from {py_file} due to {sdbx_err}, continuing execution for the rest of the Python files."
|
|
136
136
|
)
|
|
137
137
|
return None
|
|
138
138
|
except Exception as err:
|
|
139
139
|
cc.warning(
|
|
140
|
-
f"Could not fetch Snowpark objects from {py_file} due to {err}, continuing execution for the rest of the
|
|
140
|
+
f"Could not fetch Snowpark objects from {py_file} due to {err}, continuing execution for the rest of the Python files."
|
|
141
141
|
)
|
|
142
142
|
return None
|
|
143
143
|
|
|
@@ -145,22 +145,22 @@ def _execute_in_sandbox(
|
|
|
145
145
|
cc.warning(
|
|
146
146
|
f"Could not fetch Snowpark objects from {py_file} due to the following error:\n {completed_process.stderr}"
|
|
147
147
|
)
|
|
148
|
-
cc.warning("Continuing execution for the rest of the
|
|
148
|
+
cc.warning("Continuing execution for the rest of the Python files.")
|
|
149
149
|
return None
|
|
150
150
|
|
|
151
151
|
try:
|
|
152
152
|
return json.loads(completed_process.stdout)
|
|
153
153
|
except Exception as exc:
|
|
154
154
|
cc.warning(
|
|
155
|
-
f"Could not load JSON into
|
|
155
|
+
f"Could not load JSON into Python due to the following exception: {exc}"
|
|
156
156
|
)
|
|
157
|
-
cc.warning(f"Continuing execution for the rest of the
|
|
157
|
+
cc.warning(f"Continuing execution for the rest of the Python files.")
|
|
158
158
|
return None
|
|
159
159
|
|
|
160
160
|
|
|
161
161
|
class SnowparkAnnotationProcessor(ArtifactProcessor):
|
|
162
162
|
"""
|
|
163
|
-
Built-in Processor to discover Snowpark-annotated objects in a given set of
|
|
163
|
+
Built-in Processor to discover Snowpark-annotated objects in a given set of Python files,
|
|
164
164
|
and generate SQL code for creation of extension functions based on those discovered objects.
|
|
165
165
|
"""
|
|
166
166
|
|
|
@@ -174,7 +174,7 @@ class SnowparkAnnotationProcessor(ArtifactProcessor):
|
|
|
174
174
|
**kwargs,
|
|
175
175
|
) -> None:
|
|
176
176
|
"""
|
|
177
|
-
Collects code annotations from Snowpark
|
|
177
|
+
Collects code annotations from Snowpark Python files containing extension functions and augments the existing
|
|
178
178
|
setup script with generated SQL that registers these functions.
|
|
179
179
|
"""
|
|
180
180
|
|
|
@@ -360,7 +360,7 @@ class SnowparkAnnotationProcessor(ArtifactProcessor):
|
|
|
360
360
|
|
|
361
361
|
def generate_new_sql_file_name(self, py_file: Path) -> Path:
|
|
362
362
|
"""
|
|
363
|
-
Generates a SQL filename for the generated root from the
|
|
363
|
+
Generates a SQL filename for the generated root from the Python file, and creates its parent directories.
|
|
364
364
|
"""
|
|
365
365
|
relative_py_file = py_file.relative_to(self._bundle_ctx.deploy_root)
|
|
366
366
|
sql_file = Path(
|
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
from __future__ import annotations
|
|
16
16
|
|
|
17
17
|
from pathlib import Path
|
|
18
|
-
from typing import Optional
|
|
18
|
+
from typing import Any, Optional
|
|
19
19
|
|
|
20
20
|
import jinja2
|
|
21
21
|
from snowflake.cli._plugins.nativeapp.artifacts import BundleMap
|
|
@@ -49,7 +49,9 @@ class TemplatesProcessor(ArtifactProcessor):
|
|
|
49
49
|
Processor class to perform template expansion on all relevant artifacts (specified in the project definition file).
|
|
50
50
|
"""
|
|
51
51
|
|
|
52
|
-
def expand_templates_in_file(
|
|
52
|
+
def expand_templates_in_file(
|
|
53
|
+
self, src: Path, dest: Path, template_context: dict[str, Any] | None = None
|
|
54
|
+
) -> None:
|
|
53
55
|
"""
|
|
54
56
|
Expand templates in the file.
|
|
55
57
|
"""
|
|
@@ -74,7 +76,7 @@ class TemplatesProcessor(ArtifactProcessor):
|
|
|
74
76
|
else get_client_side_jinja_env()
|
|
75
77
|
)
|
|
76
78
|
expanded_template = jinja_env.from_string(file.contents).render(
|
|
77
|
-
get_cli_context().template_context
|
|
79
|
+
template_context or get_cli_context().template_context
|
|
78
80
|
)
|
|
79
81
|
|
|
80
82
|
# For now, we are printing the source file path in the error message
|