snowflake-cli 3.11.0__py3-none-any.whl → 3.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- snowflake/cli/__about__.py +1 -1
- snowflake/cli/_app/cli_app.py +0 -1
- snowflake/cli/_app/printing.py +153 -19
- snowflake/cli/_plugins/dbt/commands.py +37 -8
- snowflake/cli/_plugins/dbt/manager.py +144 -10
- snowflake/cli/_plugins/dcm/commands.py +65 -90
- snowflake/cli/_plugins/dcm/manager.py +137 -50
- snowflake/cli/_plugins/logs/commands.py +7 -0
- snowflake/cli/_plugins/logs/manager.py +21 -1
- snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +3 -1
- snowflake/cli/_plugins/snowpark/common.py +1 -0
- snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +29 -5
- snowflake/cli/_plugins/snowpark/package_utils.py +44 -3
- snowflake/cli/_plugins/spcs/services/manager.py +5 -4
- snowflake/cli/_plugins/sql/lexer/types.py +1 -0
- snowflake/cli/_plugins/sql/repl.py +100 -26
- snowflake/cli/_plugins/sql/repl_commands.py +607 -0
- snowflake/cli/_plugins/sql/statement_reader.py +44 -20
- snowflake/cli/api/artifacts/bundle_map.py +32 -2
- snowflake/cli/api/artifacts/regex_resolver.py +54 -0
- snowflake/cli/api/artifacts/upload.py +5 -1
- snowflake/cli/api/artifacts/utils.py +12 -1
- snowflake/cli/api/cli_global_context.py +7 -0
- snowflake/cli/api/console/abc.py +13 -2
- snowflake/cli/api/console/console.py +20 -0
- snowflake/cli/api/constants.py +9 -0
- snowflake/cli/api/entities/utils.py +10 -6
- snowflake/cli/api/feature_flags.py +1 -0
- snowflake/cli/api/identifiers.py +18 -1
- snowflake/cli/api/project/schemas/entities/entities.py +0 -6
- snowflake/cli/api/rendering/sql_templates.py +2 -0
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/METADATA +5 -5
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/RECORD +36 -36
- snowflake/cli/_plugins/dcm/dcm_project_entity_model.py +0 -59
- snowflake/cli/_plugins/sql/snowsql_commands.py +0 -331
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/WHEEL +0 -0
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/entry_points.txt +0 -0
- {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.12.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -14,29 +14,23 @@
|
|
|
14
14
|
from typing import List, Optional
|
|
15
15
|
|
|
16
16
|
import typer
|
|
17
|
-
from snowflake.cli._plugins.dcm.dcm_project_entity_model import (
|
|
18
|
-
DCMProjectEntityModel,
|
|
19
|
-
)
|
|
20
17
|
from snowflake.cli._plugins.dcm.manager import DCMProjectManager
|
|
21
18
|
from snowflake.cli._plugins.object.command_aliases import add_object_command_aliases
|
|
22
19
|
from snowflake.cli._plugins.object.commands import scope_option
|
|
23
20
|
from snowflake.cli._plugins.object.manager import ObjectManager
|
|
24
|
-
from snowflake.cli.api.artifacts.upload import sync_artifacts_with_stage
|
|
25
|
-
from snowflake.cli.api.cli_global_context import get_cli_context
|
|
26
|
-
from snowflake.cli.api.commands.decorators import with_project_definition
|
|
27
21
|
from snowflake.cli.api.commands.flags import (
|
|
28
22
|
IfExistsOption,
|
|
29
23
|
IfNotExistsOption,
|
|
30
24
|
OverrideableOption,
|
|
31
|
-
entity_argument,
|
|
32
25
|
identifier_argument,
|
|
33
26
|
like_option,
|
|
34
27
|
variables_option,
|
|
35
28
|
)
|
|
36
29
|
from snowflake.cli.api.commands.snow_typer import SnowTyperFactory
|
|
37
|
-
from snowflake.cli.api.commands.utils import get_entity_for_operation
|
|
38
30
|
from snowflake.cli.api.console.console import cli_console
|
|
39
|
-
from snowflake.cli.api.constants import
|
|
31
|
+
from snowflake.cli.api.constants import (
|
|
32
|
+
ObjectType,
|
|
33
|
+
)
|
|
40
34
|
from snowflake.cli.api.exceptions import CliError
|
|
41
35
|
from snowflake.cli.api.feature_flags import FeatureFlag
|
|
42
36
|
from snowflake.cli.api.identifiers import FQN
|
|
@@ -45,7 +39,7 @@ from snowflake.cli.api.output.types import (
|
|
|
45
39
|
QueryJsonValueResult,
|
|
46
40
|
QueryResult,
|
|
47
41
|
)
|
|
48
|
-
from snowflake.cli.api.
|
|
42
|
+
from snowflake.cli.api.utils.path_utils import is_stage_path
|
|
49
43
|
|
|
50
44
|
app = SnowTyperFactory(
|
|
51
45
|
name="dcm",
|
|
@@ -63,18 +57,10 @@ configuration_flag = typer.Option(
|
|
|
63
57
|
help="Configuration of the DCM Project to use. If not specified default configuration is used.",
|
|
64
58
|
show_default=False,
|
|
65
59
|
)
|
|
66
|
-
from_option =
|
|
60
|
+
from_option = typer.Option(
|
|
67
61
|
None,
|
|
68
62
|
"--from",
|
|
69
|
-
|
|
70
|
-
show_default=False,
|
|
71
|
-
)
|
|
72
|
-
|
|
73
|
-
prune_option = OverrideableOption(
|
|
74
|
-
False,
|
|
75
|
-
"--prune",
|
|
76
|
-
help="Remove unused artifacts from the stage during sync. Mutually exclusive with --from.",
|
|
77
|
-
mutually_exclusive=["from_stage"],
|
|
63
|
+
help="Source location: stage path (starting with '@') or local directory path. Omit to use current directory.",
|
|
78
64
|
show_default=False,
|
|
79
65
|
)
|
|
80
66
|
|
|
@@ -122,61 +108,65 @@ add_object_command_aliases(
|
|
|
122
108
|
@app.command(requires_connection=True)
|
|
123
109
|
def deploy(
|
|
124
110
|
identifier: FQN = dcm_identifier,
|
|
125
|
-
|
|
126
|
-
help="Deploy DCM Project deployment from a given stage."
|
|
127
|
-
),
|
|
111
|
+
from_location: Optional[str] = from_option,
|
|
128
112
|
variables: Optional[List[str]] = variables_flag,
|
|
129
113
|
configuration: Optional[str] = configuration_flag,
|
|
130
114
|
alias: Optional[str] = alias_option,
|
|
131
|
-
prune: bool = prune_option(),
|
|
132
115
|
**options,
|
|
133
116
|
):
|
|
134
117
|
"""
|
|
135
118
|
Applies changes defined in DCM Project to Snowflake.
|
|
136
119
|
"""
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
120
|
+
manager = DCMProjectManager()
|
|
121
|
+
effective_stage = _get_effective_stage(identifier, from_location)
|
|
122
|
+
|
|
123
|
+
with cli_console.spinner() as spinner:
|
|
124
|
+
spinner.add_task(description=f"Deploying dcm project {identifier}", total=None)
|
|
125
|
+
result = manager.execute(
|
|
126
|
+
project_identifier=identifier,
|
|
127
|
+
configuration=configuration,
|
|
128
|
+
from_stage=effective_stage,
|
|
129
|
+
variables=variables,
|
|
130
|
+
alias=alias,
|
|
131
|
+
output_path=None,
|
|
132
|
+
)
|
|
145
133
|
return QueryJsonValueResult(result)
|
|
146
134
|
|
|
147
135
|
|
|
148
136
|
@app.command(requires_connection=True)
|
|
149
137
|
def plan(
|
|
150
138
|
identifier: FQN = dcm_identifier,
|
|
151
|
-
|
|
152
|
-
help="Plan DCM Project deployment from a given stage."
|
|
153
|
-
),
|
|
139
|
+
from_location: Optional[str] = from_option,
|
|
154
140
|
variables: Optional[List[str]] = variables_flag,
|
|
155
141
|
configuration: Optional[str] = configuration_flag,
|
|
156
|
-
prune: bool = prune_option(),
|
|
157
142
|
output_path: Optional[str] = output_path_option(
|
|
158
|
-
help="
|
|
143
|
+
help="Path where the deployment plan output will be stored. Can be a stage path (starting with '@') or a local directory path."
|
|
159
144
|
),
|
|
160
145
|
**options,
|
|
161
146
|
):
|
|
162
147
|
"""
|
|
163
148
|
Plans a DCM Project deployment (validates without executing).
|
|
164
149
|
"""
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
150
|
+
manager = DCMProjectManager()
|
|
151
|
+
effective_stage = _get_effective_stage(identifier, from_location)
|
|
152
|
+
|
|
153
|
+
with cli_console.spinner() as spinner:
|
|
154
|
+
spinner.add_task(description=f"Planning dcm project {identifier}", total=None)
|
|
155
|
+
result = manager.execute(
|
|
156
|
+
project_identifier=identifier,
|
|
157
|
+
configuration=configuration,
|
|
158
|
+
from_stage=effective_stage,
|
|
159
|
+
dry_run=True,
|
|
160
|
+
variables=variables,
|
|
161
|
+
output_path=output_path,
|
|
162
|
+
)
|
|
163
|
+
|
|
173
164
|
return QueryJsonValueResult(result)
|
|
174
165
|
|
|
175
166
|
|
|
176
167
|
@app.command(requires_connection=True)
|
|
177
|
-
@with_project_definition()
|
|
178
168
|
def create(
|
|
179
|
-
|
|
169
|
+
identifier: FQN = dcm_identifier,
|
|
180
170
|
if_not_exists: bool = IfNotExistsOption(
|
|
181
171
|
help="Do nothing if the project already exists."
|
|
182
172
|
),
|
|
@@ -185,28 +175,18 @@ def create(
|
|
|
185
175
|
"""
|
|
186
176
|
Creates a DCM Project in Snowflake.
|
|
187
177
|
"""
|
|
188
|
-
cli_context = get_cli_context()
|
|
189
|
-
project: DCMProjectEntityModel = get_entity_for_operation(
|
|
190
|
-
cli_context=cli_context,
|
|
191
|
-
entity_id=entity_id,
|
|
192
|
-
project_definition=cli_context.project_definition,
|
|
193
|
-
entity_type="dcm",
|
|
194
|
-
)
|
|
195
178
|
om = ObjectManager()
|
|
196
|
-
if om.object_exists(object_type="dcm", fqn=
|
|
197
|
-
message = f"DCM Project '{
|
|
179
|
+
if om.object_exists(object_type="dcm", fqn=identifier):
|
|
180
|
+
message = f"DCM Project '{identifier}' already exists."
|
|
198
181
|
if if_not_exists:
|
|
199
182
|
return MessageResult(message)
|
|
200
183
|
raise CliError(message)
|
|
201
184
|
|
|
202
|
-
if om.object_exists(object_type="stage", fqn=FQN.from_stage(project.stage)):
|
|
203
|
-
raise CliError(f"Stage '{project.stage}' already exists.")
|
|
204
|
-
|
|
205
185
|
dpm = DCMProjectManager()
|
|
206
|
-
with cli_console.phase(f"Creating DCM Project '{
|
|
207
|
-
dpm.create(
|
|
186
|
+
with cli_console.phase(f"Creating DCM Project '{identifier}'"):
|
|
187
|
+
dpm.create(project_identifier=identifier)
|
|
208
188
|
|
|
209
|
-
return MessageResult(f"DCM Project '{
|
|
189
|
+
return MessageResult(f"DCM Project '{identifier}' successfully created.")
|
|
210
190
|
|
|
211
191
|
|
|
212
192
|
@app.command(requires_connection=True)
|
|
@@ -218,56 +198,51 @@ def list_deployments(
|
|
|
218
198
|
Lists deployments of given DCM Project.
|
|
219
199
|
"""
|
|
220
200
|
pm = DCMProjectManager()
|
|
221
|
-
results = pm.
|
|
201
|
+
results = pm.list_deployments(project_identifier=identifier)
|
|
222
202
|
return QueryResult(results)
|
|
223
203
|
|
|
224
204
|
|
|
225
205
|
@app.command(requires_connection=True)
|
|
226
206
|
def drop_deployment(
|
|
227
207
|
identifier: FQN = dcm_identifier,
|
|
228
|
-
|
|
229
|
-
help="Name or alias of the
|
|
208
|
+
deployment_name: str = typer.Argument(
|
|
209
|
+
help="Name or alias of the deployment to drop. For names containing '$', use single quotes to prevent shell expansion (e.g., 'DEPLOYMENT$1').",
|
|
230
210
|
show_default=False,
|
|
231
211
|
),
|
|
232
|
-
if_exists: bool = IfExistsOption(
|
|
212
|
+
if_exists: bool = IfExistsOption(
|
|
213
|
+
help="Do nothing if the deployment does not exist."
|
|
214
|
+
),
|
|
233
215
|
**options,
|
|
234
216
|
):
|
|
235
217
|
"""
|
|
236
|
-
Drops a
|
|
218
|
+
Drops a deployment from the DCM Project.
|
|
237
219
|
"""
|
|
238
220
|
# Detect potential shell expansion issues
|
|
239
|
-
if
|
|
221
|
+
if deployment_name and deployment_name.upper() == "DEPLOYMENT":
|
|
240
222
|
cli_console.warning(
|
|
241
|
-
f"
|
|
242
|
-
f"If you meant to use a
|
|
223
|
+
f"Deployment name '{deployment_name}' might be truncated due to shell expansion. "
|
|
224
|
+
f"If you meant to use a deployment like 'DEPLOYMENT$1', try using single quotes: 'DEPLOYMENT$1'."
|
|
243
225
|
)
|
|
244
226
|
|
|
245
227
|
dpm = DCMProjectManager()
|
|
246
228
|
dpm.drop_deployment(
|
|
247
|
-
|
|
248
|
-
|
|
229
|
+
project_identifier=identifier,
|
|
230
|
+
deployment_name=deployment_name,
|
|
249
231
|
if_exists=if_exists,
|
|
250
232
|
)
|
|
251
233
|
return MessageResult(
|
|
252
|
-
f"
|
|
234
|
+
f"Deployment '{deployment_name}' dropped from DCM Project '{identifier}'."
|
|
253
235
|
)
|
|
254
236
|
|
|
255
237
|
|
|
256
|
-
def
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
with cli_console.phase("Syncing local files to stage"):
|
|
266
|
-
sync_artifacts_with_stage(
|
|
267
|
-
project_paths=ProjectPaths(project_root=cli_context.project_root),
|
|
268
|
-
stage_root=project_entity.stage,
|
|
269
|
-
artifacts=project_entity.artifacts,
|
|
270
|
-
prune=prune,
|
|
238
|
+
def _get_effective_stage(identifier: FQN, from_location: Optional[str]):
|
|
239
|
+
manager = DCMProjectManager()
|
|
240
|
+
if not from_location:
|
|
241
|
+
from_stage = manager.sync_local_files(project_identifier=identifier)
|
|
242
|
+
elif is_stage_path(from_location):
|
|
243
|
+
from_stage = from_location
|
|
244
|
+
else:
|
|
245
|
+
from_stage = manager.sync_local_files(
|
|
246
|
+
project_identifier=identifier, source_directory=from_location
|
|
271
247
|
)
|
|
272
|
-
|
|
273
|
-
return project_entity.stage
|
|
248
|
+
return from_stage
|
|
@@ -11,21 +11,75 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
+
from contextlib import contextmanager, nullcontext
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Generator, List
|
|
14
17
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
from snowflake.cli._plugins.dcm.dcm_project_entity_model import DCMProjectEntityModel
|
|
18
|
+
import yaml
|
|
18
19
|
from snowflake.cli._plugins.stage.manager import StageManager
|
|
20
|
+
from snowflake.cli.api.artifacts.upload import sync_artifacts_with_stage
|
|
19
21
|
from snowflake.cli.api.commands.utils import parse_key_value_variables
|
|
22
|
+
from snowflake.cli.api.console.console import cli_console
|
|
23
|
+
from snowflake.cli.api.constants import (
|
|
24
|
+
DEFAULT_SIZE_LIMIT_MB,
|
|
25
|
+
ObjectType,
|
|
26
|
+
PatternMatchingType,
|
|
27
|
+
)
|
|
28
|
+
from snowflake.cli.api.exceptions import CliError
|
|
20
29
|
from snowflake.cli.api.identifiers import FQN
|
|
30
|
+
from snowflake.cli.api.project.project_paths import ProjectPaths
|
|
31
|
+
from snowflake.cli.api.project.schemas.entities.common import PathMapping
|
|
32
|
+
from snowflake.cli.api.secure_path import SecurePath
|
|
21
33
|
from snowflake.cli.api.sql_execution import SqlExecutionMixin
|
|
22
34
|
from snowflake.cli.api.stage_path import StagePath
|
|
35
|
+
from snowflake.cli.api.utils.path_utils import is_stage_path
|
|
36
|
+
|
|
37
|
+
MANIFEST_FILE_NAME = "manifest.yml"
|
|
38
|
+
DCM_PROJECT_TYPE = "dcm_project"
|
|
23
39
|
|
|
24
40
|
|
|
25
41
|
class DCMProjectManager(SqlExecutionMixin):
|
|
42
|
+
@contextmanager
|
|
43
|
+
def _collect_output(
|
|
44
|
+
self, project_identifier: FQN, output_path: str
|
|
45
|
+
) -> Generator[str, None, None]:
|
|
46
|
+
"""
|
|
47
|
+
Context manager for handling output path - creates temporary stage for local paths,
|
|
48
|
+
downloads files after execution, and ensures proper cleanup.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
project_identifier: The DCM project identifier
|
|
52
|
+
output_path: Either a stage path (@stage/path) or local directory path
|
|
53
|
+
|
|
54
|
+
Yields:
|
|
55
|
+
str: The effective output path to use in the DCM command
|
|
56
|
+
"""
|
|
57
|
+
temp_stage_for_local_output = None
|
|
58
|
+
stage_manager = StageManager()
|
|
59
|
+
|
|
60
|
+
if should_download_files := not is_stage_path(output_path):
|
|
61
|
+
temp_stage_fqn = FQN.from_resource(
|
|
62
|
+
ObjectType.DCM_PROJECT, project_identifier, "OUTPUT_TMP_STAGE"
|
|
63
|
+
)
|
|
64
|
+
stage_manager.create(temp_stage_fqn, temporary=True)
|
|
65
|
+
effective_output_path = StagePath.from_stage_str(temp_stage_fqn.identifier)
|
|
66
|
+
temp_stage_for_local_output = (temp_stage_fqn.identifier, Path(output_path))
|
|
67
|
+
else:
|
|
68
|
+
effective_output_path = StagePath.from_stage_str(output_path)
|
|
69
|
+
|
|
70
|
+
yield effective_output_path.absolute_path()
|
|
71
|
+
|
|
72
|
+
if should_download_files:
|
|
73
|
+
assert temp_stage_for_local_output is not None
|
|
74
|
+
stage_path, local_path = temp_stage_for_local_output
|
|
75
|
+
stage_manager.get_recursive(stage_path=stage_path, dest_path=local_path)
|
|
76
|
+
cli_console.step(f"Plan output saved to: {local_path.resolve()}")
|
|
77
|
+
else:
|
|
78
|
+
cli_console.step(f"Plan output saved to: {output_path}")
|
|
79
|
+
|
|
26
80
|
def execute(
|
|
27
81
|
self,
|
|
28
|
-
|
|
82
|
+
project_identifier: FQN,
|
|
29
83
|
from_stage: str,
|
|
30
84
|
configuration: str | None = None,
|
|
31
85
|
variables: List[str] | None = None,
|
|
@@ -33,64 +87,97 @@ class DCMProjectManager(SqlExecutionMixin):
|
|
|
33
87
|
alias: str | None = None,
|
|
34
88
|
output_path: str | None = None,
|
|
35
89
|
):
|
|
90
|
+
with self._collect_output(project_identifier, output_path) if (
|
|
91
|
+
output_path and dry_run
|
|
92
|
+
) else nullcontext() as output_stage:
|
|
93
|
+
query = f"EXECUTE DCM PROJECT {project_identifier.sql_identifier}"
|
|
94
|
+
if dry_run:
|
|
95
|
+
query += " PLAN"
|
|
96
|
+
else:
|
|
97
|
+
query += " DEPLOY"
|
|
98
|
+
if alias:
|
|
99
|
+
query += f' AS "{alias}"'
|
|
100
|
+
if configuration or variables:
|
|
101
|
+
query += f" USING"
|
|
102
|
+
if configuration:
|
|
103
|
+
query += f" CONFIGURATION {configuration}"
|
|
104
|
+
if variables:
|
|
105
|
+
query += StageManager.parse_execute_variables(
|
|
106
|
+
parse_key_value_variables(variables)
|
|
107
|
+
).removeprefix(" using")
|
|
108
|
+
stage_path = StagePath.from_stage_str(from_stage)
|
|
109
|
+
query += f" FROM {stage_path.absolute_path()}"
|
|
110
|
+
if output_stage is not None:
|
|
111
|
+
query += f" OUTPUT_PATH {output_stage}"
|
|
112
|
+
result = self.execute_query(query=query)
|
|
36
113
|
|
|
37
|
-
|
|
38
|
-
if dry_run:
|
|
39
|
-
query += " PLAN"
|
|
40
|
-
else:
|
|
41
|
-
query += " DEPLOY"
|
|
42
|
-
if alias:
|
|
43
|
-
query += f" AS {alias}"
|
|
44
|
-
if configuration or variables:
|
|
45
|
-
query += f" USING"
|
|
46
|
-
if configuration:
|
|
47
|
-
query += f" CONFIGURATION {configuration}"
|
|
48
|
-
if variables:
|
|
49
|
-
query += StageManager.parse_execute_variables(
|
|
50
|
-
parse_key_value_variables(variables)
|
|
51
|
-
).removeprefix(" using")
|
|
52
|
-
stage_path = StagePath.from_stage_str(from_stage)
|
|
53
|
-
query += f" FROM {stage_path.absolute_path()}"
|
|
54
|
-
if output_path:
|
|
55
|
-
output_stage_path = StagePath.from_stage_str(output_path)
|
|
56
|
-
query += f" OUTPUT_PATH {output_stage_path.absolute_path()}"
|
|
57
|
-
return self.execute_query(query=query)
|
|
114
|
+
return result
|
|
58
115
|
|
|
59
|
-
def create(self,
|
|
60
|
-
query = f"CREATE DCM PROJECT {
|
|
116
|
+
def create(self, project_identifier: FQN) -> None:
|
|
117
|
+
query = f"CREATE DCM PROJECT {project_identifier.sql_identifier}"
|
|
61
118
|
self.execute_query(query)
|
|
62
119
|
|
|
63
|
-
def
|
|
64
|
-
|
|
65
|
-
project_name: FQN,
|
|
66
|
-
from_stage: str,
|
|
67
|
-
alias: str | None = None,
|
|
68
|
-
comment: str | None = None,
|
|
69
|
-
):
|
|
70
|
-
stage_path = StagePath.from_stage_str(from_stage)
|
|
71
|
-
query = f"ALTER DCM PROJECT {project_name.identifier} ADD VERSION"
|
|
72
|
-
if alias:
|
|
73
|
-
query += f" IF NOT EXISTS {alias}"
|
|
74
|
-
query += f" FROM {stage_path.absolute_path(at_prefix=True)}"
|
|
75
|
-
if comment:
|
|
76
|
-
query += f" COMMENT = '{comment}'"
|
|
77
|
-
return self.execute_query(query=query)
|
|
78
|
-
|
|
79
|
-
def list_versions(self, project_name: FQN):
|
|
80
|
-
query = f"SHOW VERSIONS IN DCM PROJECT {project_name.identifier}"
|
|
120
|
+
def list_deployments(self, project_identifier: FQN):
|
|
121
|
+
query = f"SHOW DEPLOYMENTS IN DCM PROJECT {project_identifier.identifier}"
|
|
81
122
|
return self.execute_query(query=query)
|
|
82
123
|
|
|
83
124
|
def drop_deployment(
|
|
84
125
|
self,
|
|
85
|
-
|
|
86
|
-
|
|
126
|
+
project_identifier: FQN,
|
|
127
|
+
deployment_name: str,
|
|
87
128
|
if_exists: bool = False,
|
|
88
129
|
):
|
|
89
130
|
"""
|
|
90
|
-
Drops a
|
|
131
|
+
Drops a deployment from the DCM Project.
|
|
91
132
|
"""
|
|
92
|
-
query = f"ALTER DCM PROJECT {
|
|
133
|
+
query = f"ALTER DCM PROJECT {project_identifier.identifier} DROP DEPLOYMENT"
|
|
93
134
|
if if_exists:
|
|
94
135
|
query += " IF EXISTS"
|
|
95
|
-
query += f"
|
|
136
|
+
query += f' "{deployment_name}"'
|
|
96
137
|
return self.execute_query(query=query)
|
|
138
|
+
|
|
139
|
+
@staticmethod
|
|
140
|
+
def sync_local_files(
|
|
141
|
+
project_identifier: FQN, source_directory: str | None = None
|
|
142
|
+
) -> str:
|
|
143
|
+
source_path = (
|
|
144
|
+
SecurePath(source_directory).resolve()
|
|
145
|
+
if source_directory
|
|
146
|
+
else SecurePath.cwd()
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
dcm_manifest_file = source_path / MANIFEST_FILE_NAME
|
|
150
|
+
if not dcm_manifest_file.exists():
|
|
151
|
+
raise CliError(
|
|
152
|
+
f"{MANIFEST_FILE_NAME} was not found in directory {source_path.path}"
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
with dcm_manifest_file.open(read_file_limit_mb=DEFAULT_SIZE_LIMIT_MB) as fd:
|
|
156
|
+
dcm_manifest = yaml.safe_load(fd)
|
|
157
|
+
object_type = dcm_manifest.get("type") if dcm_manifest else None
|
|
158
|
+
if object_type is None:
|
|
159
|
+
raise CliError(
|
|
160
|
+
f"Manifest file type is undefined. Expected {DCM_PROJECT_TYPE}"
|
|
161
|
+
)
|
|
162
|
+
if object_type.lower() != DCM_PROJECT_TYPE:
|
|
163
|
+
raise CliError(
|
|
164
|
+
f"Manifest file is defined for type {object_type}. Expected {DCM_PROJECT_TYPE}"
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
definitions = list(dcm_manifest.get("include_definitions", list()))
|
|
168
|
+
if MANIFEST_FILE_NAME not in definitions:
|
|
169
|
+
definitions.append(MANIFEST_FILE_NAME)
|
|
170
|
+
|
|
171
|
+
with cli_console.phase(f"Uploading definition files"):
|
|
172
|
+
stage_fqn = FQN.from_resource(
|
|
173
|
+
ObjectType.DCM_PROJECT, project_identifier, "TMP_STAGE"
|
|
174
|
+
)
|
|
175
|
+
sync_artifacts_with_stage(
|
|
176
|
+
project_paths=ProjectPaths(project_root=source_path.path),
|
|
177
|
+
stage_root=stage_fqn.identifier,
|
|
178
|
+
use_temporary_stage=True,
|
|
179
|
+
artifacts=[PathMapping(src=definition) for definition in definitions],
|
|
180
|
+
pattern_type=PatternMatchingType.REGEX,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
return stage_fqn.identifier
|
|
@@ -48,6 +48,11 @@ def get_logs(
|
|
|
48
48
|
"--log-level",
|
|
49
49
|
help="The log level to filter by. If not provided, INFO will be used",
|
|
50
50
|
),
|
|
51
|
+
partial_match: bool = typer.Option(
|
|
52
|
+
False,
|
|
53
|
+
"--partial",
|
|
54
|
+
help="Enable partial, case-insensitive matching for object names",
|
|
55
|
+
),
|
|
51
56
|
**options,
|
|
52
57
|
):
|
|
53
58
|
"""
|
|
@@ -75,6 +80,7 @@ def get_logs(
|
|
|
75
80
|
refresh_time=refresh_time,
|
|
76
81
|
event_table=event_table,
|
|
77
82
|
log_level=log_level,
|
|
83
|
+
partial_match=partial_match,
|
|
78
84
|
)
|
|
79
85
|
logs = itertools.chain(
|
|
80
86
|
(MessageResult(log.log_message) for logs in logs_stream for log in logs)
|
|
@@ -87,6 +93,7 @@ def get_logs(
|
|
|
87
93
|
to_time=to_time,
|
|
88
94
|
event_table=event_table,
|
|
89
95
|
log_level=log_level,
|
|
96
|
+
partial_match=partial_match,
|
|
90
97
|
)
|
|
91
98
|
logs = (MessageResult(log.log_message) for log in logs_iterable) # type: ignore
|
|
92
99
|
|
|
@@ -11,6 +11,7 @@ from snowflake.cli._plugins.logs.utils import (
|
|
|
11
11
|
)
|
|
12
12
|
from snowflake.cli._plugins.object.commands import NameArgument, ObjectArgument
|
|
13
13
|
from snowflake.cli.api.identifiers import FQN
|
|
14
|
+
from snowflake.cli.api.project.util import escape_like_pattern
|
|
14
15
|
from snowflake.cli.api.sql_execution import SqlExecutionMixin
|
|
15
16
|
from snowflake.connector.cursor import SnowflakeCursor
|
|
16
17
|
|
|
@@ -24,6 +25,7 @@ class LogsManager(SqlExecutionMixin):
|
|
|
24
25
|
from_time: Optional[datetime] = None,
|
|
25
26
|
event_table: Optional[str] = None,
|
|
26
27
|
log_level: Optional[str] = "INFO",
|
|
28
|
+
partial_match: bool = False,
|
|
27
29
|
) -> Iterable[List[LogsQueryRow]]:
|
|
28
30
|
try:
|
|
29
31
|
previous_end = from_time
|
|
@@ -36,6 +38,7 @@ class LogsManager(SqlExecutionMixin):
|
|
|
36
38
|
to_time=None,
|
|
37
39
|
event_table=event_table,
|
|
38
40
|
log_level=log_level,
|
|
41
|
+
partial_match=partial_match,
|
|
39
42
|
).fetchall()
|
|
40
43
|
|
|
41
44
|
if raw_logs:
|
|
@@ -56,6 +59,7 @@ class LogsManager(SqlExecutionMixin):
|
|
|
56
59
|
to_time: Optional[datetime] = None,
|
|
57
60
|
event_table: Optional[str] = None,
|
|
58
61
|
log_level: Optional[str] = "INFO",
|
|
62
|
+
partial_match: bool = False,
|
|
59
63
|
) -> Iterable[LogsQueryRow]:
|
|
60
64
|
"""
|
|
61
65
|
Basic function to get a single batch of logs from the server
|
|
@@ -68,6 +72,7 @@ class LogsManager(SqlExecutionMixin):
|
|
|
68
72
|
to_time=to_time,
|
|
69
73
|
event_table=event_table,
|
|
70
74
|
log_level=log_level,
|
|
75
|
+
partial_match=partial_match,
|
|
71
76
|
)
|
|
72
77
|
|
|
73
78
|
return sanitize_logs(logs)
|
|
@@ -80,10 +85,25 @@ class LogsManager(SqlExecutionMixin):
|
|
|
80
85
|
to_time: Optional[datetime] = None,
|
|
81
86
|
event_table: Optional[str] = None,
|
|
82
87
|
log_level: Optional[str] = "INFO",
|
|
88
|
+
partial_match: bool = False,
|
|
83
89
|
) -> SnowflakeCursor:
|
|
84
90
|
|
|
85
91
|
table = event_table if event_table else "SNOWFLAKE.TELEMETRY.EVENTS"
|
|
86
92
|
|
|
93
|
+
# Escape single quotes in object_name to prevent SQL injection
|
|
94
|
+
escaped_object_name = str(object_name).replace("'", "''")
|
|
95
|
+
|
|
96
|
+
# Build the object name condition based on partial_match flag
|
|
97
|
+
if partial_match:
|
|
98
|
+
# Use ILIKE for case-insensitive partial matching with wildcards
|
|
99
|
+
escaped_pattern = escape_like_pattern(
|
|
100
|
+
escaped_object_name, escape_sequence="\\"
|
|
101
|
+
)
|
|
102
|
+
object_condition = f"object_name ILIKE '%{escaped_pattern}%'"
|
|
103
|
+
else:
|
|
104
|
+
# Use exact match (original behavior)
|
|
105
|
+
object_condition = f"object_name = '{escaped_object_name}'"
|
|
106
|
+
|
|
87
107
|
query = dedent(
|
|
88
108
|
f"""
|
|
89
109
|
SELECT
|
|
@@ -96,7 +116,7 @@ class LogsManager(SqlExecutionMixin):
|
|
|
96
116
|
FROM {table}
|
|
97
117
|
WHERE record_type = 'LOG'
|
|
98
118
|
AND (record:severity_text IN ({parse_log_levels_for_query((log_level))}) or record:severity_text is NULL )
|
|
99
|
-
AND
|
|
119
|
+
AND {object_condition}
|
|
100
120
|
{get_timestamp_query(from_time, to_time)}
|
|
101
121
|
ORDER BY timestamp;
|
|
102
122
|
"""
|
|
@@ -632,6 +632,7 @@ class SnowflakeSQLFacade:
|
|
|
632
632
|
role: str | None = None,
|
|
633
633
|
database: str | None = None,
|
|
634
634
|
schema: str | None = None,
|
|
635
|
+
temporary: bool = False,
|
|
635
636
|
):
|
|
636
637
|
"""
|
|
637
638
|
Creates a stage.
|
|
@@ -641,13 +642,14 @@ class SnowflakeSQLFacade:
|
|
|
641
642
|
@param [Optional] role: Role to switch to while running this script. Current role will be used if no role is passed in.
|
|
642
643
|
@param [Optional] database: Database to use while running this script, unless the stage name is database-qualified.
|
|
643
644
|
@param [Optional] schema: Schema to use while running this script, unless the stage name is schema-qualified.
|
|
645
|
+
@param [Optional] temporary: determines if stage should be temporary. Default is false.
|
|
644
646
|
"""
|
|
645
647
|
fqn = FQN.from_string(name)
|
|
646
648
|
identifier = to_identifier(fqn.name)
|
|
647
649
|
database = fqn.database or database
|
|
648
650
|
schema = fqn.schema or schema
|
|
649
651
|
|
|
650
|
-
query = f"create stage if not exists {identifier}"
|
|
652
|
+
query = f"create{' temporary' if temporary else ''} stage if not exists {identifier}"
|
|
651
653
|
if encryption_type:
|
|
652
654
|
query += f" encryption = (type = '{encryption_type}')"
|
|
653
655
|
if enable_directory:
|
|
@@ -349,6 +349,7 @@ def user_to_sql_type_mapper(user_provided_type: str) -> str:
|
|
|
349
349
|
"FLOAT4",
|
|
350
350
|
"FLOAT8",
|
|
351
351
|
),
|
|
352
|
+
("DECFLOAT", ""): ("DECFLOAT",),
|
|
352
353
|
("TIMESTAMP_NTZ", ""): ("TIMESTAMP_NTZ", "TIMESTAMPNTZ", "DATETIME"),
|
|
353
354
|
("TIMESTAMP_LTZ", ""): ("TIMESTAMP_LTZ", "TIMESTAMPLTZ"),
|
|
354
355
|
("TIMESTAMP_TZ", ""): ("TIMESTAMP_TZ", "TIMESTAMPTZ"),
|
|
@@ -153,13 +153,37 @@ class AnacondaPackages:
|
|
|
153
153
|
):
|
|
154
154
|
"""Saves requirements to a file in format accepted by Snowflake SQL commands."""
|
|
155
155
|
log.info("Writing requirements into file %s", file_path.path)
|
|
156
|
-
|
|
156
|
+
|
|
157
|
+
# Deduplicate requirements by package name, keeping the first occurrence
|
|
158
|
+
seen_packages = set()
|
|
159
|
+
deduplicated_requirements = []
|
|
160
|
+
duplicate_packages = set()
|
|
161
|
+
|
|
157
162
|
for requirement in requirements:
|
|
158
163
|
if requirement.name and requirement.name in self._packages:
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
164
|
+
if requirement.name in seen_packages:
|
|
165
|
+
duplicate_packages.add(requirement.name)
|
|
166
|
+
log.warning(
|
|
167
|
+
"Duplicate package '%s' found in Anaconda requirements. "
|
|
168
|
+
"Ignoring: %s",
|
|
169
|
+
requirement.name,
|
|
170
|
+
requirement.name_and_version,
|
|
171
|
+
)
|
|
172
|
+
else:
|
|
173
|
+
seen_packages.add(requirement.name)
|
|
174
|
+
deduplicated_requirements.append(requirement)
|
|
175
|
+
|
|
176
|
+
if duplicate_packages:
|
|
177
|
+
log.warning(
|
|
178
|
+
"Found duplicate Anaconda packages: %s. "
|
|
179
|
+
"Consider consolidating package versions in requirements.txt.",
|
|
180
|
+
", ".join(sorted(duplicate_packages)),
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
formatted_requirements = []
|
|
184
|
+
for requirement in deduplicated_requirements:
|
|
185
|
+
snowflake_name = self._packages[requirement.name].snowflake_name
|
|
186
|
+
formatted_requirements.append(snowflake_name + requirement.formatted_specs)
|
|
163
187
|
|
|
164
188
|
if formatted_requirements:
|
|
165
189
|
file_path.write_text("\n".join(formatted_requirements))
|