snowflake-cli 3.1.0__py3-none-any.whl → 3.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/dev/docs/templates/usage.rst.jinja2 +1 -1
  3. snowflake/cli/_plugins/connection/commands.py +124 -109
  4. snowflake/cli/_plugins/connection/util.py +54 -9
  5. snowflake/cli/_plugins/cortex/manager.py +1 -1
  6. snowflake/cli/_plugins/git/manager.py +4 -4
  7. snowflake/cli/_plugins/nativeapp/artifacts.py +64 -10
  8. snowflake/cli/_plugins/nativeapp/codegen/templates/templates_processor.py +5 -3
  9. snowflake/cli/_plugins/nativeapp/commands.py +10 -3
  10. snowflake/cli/_plugins/nativeapp/constants.py +1 -0
  11. snowflake/cli/_plugins/nativeapp/entities/application.py +501 -440
  12. snowflake/cli/_plugins/nativeapp/entities/application_package.py +563 -885
  13. snowflake/cli/_plugins/nativeapp/entities/models/event_sharing_telemetry.py +58 -0
  14. snowflake/cli/_plugins/nativeapp/same_account_install_method.py +0 -2
  15. snowflake/cli/_plugins/nativeapp/sf_facade.py +30 -0
  16. snowflake/cli/_plugins/nativeapp/sf_facade_constants.py +25 -0
  17. snowflake/cli/_plugins/nativeapp/sf_facade_exceptions.py +117 -0
  18. snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +525 -0
  19. snowflake/cli/_plugins/nativeapp/v2_conversions/compat.py +1 -89
  20. snowflake/cli/_plugins/nativeapp/version/commands.py +6 -3
  21. snowflake/cli/_plugins/notebook/manager.py +2 -2
  22. snowflake/cli/_plugins/object/commands.py +10 -1
  23. snowflake/cli/_plugins/object/manager.py +13 -5
  24. snowflake/cli/_plugins/snowpark/common.py +3 -3
  25. snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +1 -1
  26. snowflake/cli/_plugins/spcs/common.py +29 -0
  27. snowflake/cli/_plugins/spcs/compute_pool/manager.py +7 -9
  28. snowflake/cli/_plugins/spcs/image_registry/manager.py +2 -2
  29. snowflake/cli/_plugins/spcs/image_repository/manager.py +1 -1
  30. snowflake/cli/_plugins/spcs/services/commands.py +64 -13
  31. snowflake/cli/_plugins/spcs/services/manager.py +75 -15
  32. snowflake/cli/_plugins/sql/commands.py +9 -1
  33. snowflake/cli/_plugins/sql/manager.py +9 -4
  34. snowflake/cli/_plugins/stage/commands.py +20 -16
  35. snowflake/cli/_plugins/stage/diff.py +1 -1
  36. snowflake/cli/_plugins/stage/manager.py +140 -11
  37. snowflake/cli/_plugins/streamlit/manager.py +5 -5
  38. snowflake/cli/_plugins/workspace/commands.py +6 -3
  39. snowflake/cli/api/cli_global_context.py +1 -0
  40. snowflake/cli/api/config.py +23 -5
  41. snowflake/cli/api/console/console.py +4 -19
  42. snowflake/cli/api/entities/utils.py +19 -32
  43. snowflake/cli/api/errno.py +2 -0
  44. snowflake/cli/api/exceptions.py +9 -0
  45. snowflake/cli/api/metrics.py +223 -7
  46. snowflake/cli/api/output/types.py +1 -1
  47. snowflake/cli/api/project/definition_conversion.py +179 -62
  48. snowflake/cli/api/rest_api.py +26 -4
  49. snowflake/cli/api/secure_utils.py +1 -1
  50. snowflake/cli/api/sql_execution.py +35 -22
  51. snowflake/cli/api/stage_path.py +5 -2
  52. {snowflake_cli-3.1.0.dist-info → snowflake_cli-3.2.1.dist-info}/METADATA +7 -8
  53. {snowflake_cli-3.1.0.dist-info → snowflake_cli-3.2.1.dist-info}/RECORD +56 -55
  54. {snowflake_cli-3.1.0.dist-info → snowflake_cli-3.2.1.dist-info}/WHEEL +1 -1
  55. snowflake/cli/_plugins/nativeapp/manager.py +0 -392
  56. snowflake/cli/_plugins/nativeapp/project_model.py +0 -211
  57. snowflake/cli/_plugins/nativeapp/run_processor.py +0 -184
  58. snowflake/cli/_plugins/nativeapp/version/version_processor.py +0 -56
  59. {snowflake_cli-3.1.0.dist-info → snowflake_cli-3.2.1.dist-info}/entry_points.txt +0 -0
  60. {snowflake_cli-3.1.0.dist-info → snowflake_cli-3.2.1.dist-info}/licenses/LICENSE +0 -0
@@ -11,8 +11,25 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
+ from __future__ import annotations
14
15
 
15
- from typing import Dict, Optional
16
+ import time
17
+ import uuid
18
+ from contextlib import contextmanager
19
+ from dataclasses import dataclass, field, replace
20
+ from heapq import nsmallest
21
+ from typing import ClassVar, Dict, Iterator, List, Optional, Set
22
+
23
+
24
+ class CLIMetricsInvalidUsageError(RuntimeError):
25
+ """
26
+ Indicative of bug in the code where a call to CLIMetrics was made erroneously
27
+
28
+ We do not want metrics errors to break the execution of commands,
29
+ so only raise this error in the event that an invariant was broken during setup
30
+ """
31
+
32
+ pass
16
33
 
17
34
 
18
35
  class _TypePrefix:
@@ -52,20 +69,135 @@ class CLICounterField:
52
69
  f"{_TypePrefix.FEATURES}.{_DomainPrefix.APP}.post_deploy_scripts"
53
70
  )
54
71
  PACKAGE_SCRIPTS = f"{_TypePrefix.FEATURES}.{_DomainPrefix.APP}.package_scripts"
72
+ EVENT_SHARING = f"{_TypePrefix.FEATURES}.{_DomainPrefix.APP}.event_sharing"
73
+ EVENT_SHARING_WARNING = (
74
+ f"{_TypePrefix.FEATURES}.{_DomainPrefix.APP}.event_sharing_warning"
75
+ )
76
+ EVENT_SHARING_ERROR = (
77
+ f"{_TypePrefix.FEATURES}.{_DomainPrefix.APP}.event_sharing_error"
78
+ )
79
+
80
+
81
+ @dataclass
82
+ class CLIMetricsSpan:
83
+ """
84
+ class for holding metrics span data and encapsulating related operations
85
+ """
86
+
87
+ # keys for dict representation
88
+ ID_KEY: ClassVar[str] = "id"
89
+ NAME_KEY: ClassVar[str] = "name"
90
+ PARENT_KEY: ClassVar[str] = "parent"
91
+ PARENT_ID_KEY: ClassVar[str] = "parent_id"
92
+ START_TIME_KEY: ClassVar[str] = "start_time"
93
+ EXECUTION_TIME_KEY: ClassVar[str] = "execution_time"
94
+ ERROR_KEY: ClassVar[str] = "error"
95
+ # total number of spans started under this span, inclusive of itself and its children's children (recursively)
96
+ SPAN_COUNT_IN_SUBTREE_KEY: ClassVar[str] = "span_count_in_subtree"
97
+ # the number of spans in the path between the current span and the topmost parent span, inclusive of both
98
+ SPAN_DEPTH_KEY: ClassVar[str] = "span_depth"
99
+ # denotes whether direct children were trimmed from telemetry payload
100
+ TRIMMED_KEY: ClassVar[str] = "trimmed"
101
+
102
+ # constructor vars
103
+ name: str
104
+ start_time: float # relative to when the command first started executing
105
+ parent: Optional[CLIMetricsSpan] = None
106
+
107
+ # vars for reporting
108
+ span_id: str = field(init=False, default_factory=lambda: uuid.uuid4().hex)
109
+ execution_time: Optional[float] = field(init=False, default=None)
110
+ error: Optional[BaseException] = field(init=False, default=None)
111
+ span_depth: int = field(init=False, default=1)
112
+ span_count_in_subtree: int = field(init=False, default=1)
113
+
114
+ # vars for postprocessing
115
+ # spans started directly under this one
116
+ children: Set[CLIMetricsSpan] = field(init=False, default_factory=set)
117
+
118
+ # private state
119
+ # start time of the step from the monotonic clock in order to calculate execution time
120
+ _monotonic_start: float = field(
121
+ init=False, default_factory=lambda: time.monotonic()
122
+ )
123
+
124
+ def __hash__(self) -> int:
125
+ return hash(self.span_id)
126
+
127
+ def __post_init__(self):
128
+ if not self.name:
129
+ raise CLIMetricsInvalidUsageError("span name must not be empty")
130
+
131
+ if self.parent:
132
+ self.parent.add_child(self)
133
+ self.span_depth = self.parent.span_depth + 1
134
+
135
+ def increment_subtree_node_count(self) -> None:
136
+ self.span_count_in_subtree += 1
137
+
138
+ if self.parent:
139
+ self.parent.increment_subtree_node_count()
140
+
141
+ def add_child(self, child: CLIMetricsSpan) -> None:
142
+ self.children.add(child)
143
+ self.increment_subtree_node_count()
144
+
145
+ def finish(self, error: Optional[BaseException] = None) -> None:
146
+ """
147
+ Sets the execution time and (optionally) error raised for the span
148
+
149
+ If already called, this method is a no-op
150
+ """
151
+ if self.execution_time is not None:
152
+ return
153
+
154
+ if error:
155
+ self.error = error
55
156
 
157
+ self.execution_time = time.monotonic() - self._monotonic_start
56
158
 
159
+ def to_dict(self) -> Dict:
160
+ """
161
+ Custom dict conversion function to be used for reporting telemetry
162
+ """
163
+
164
+ return {
165
+ self.ID_KEY: self.span_id,
166
+ self.NAME_KEY: self.name,
167
+ self.START_TIME_KEY: self.start_time,
168
+ self.PARENT_KEY: self.parent.name if self.parent is not None else None,
169
+ self.PARENT_ID_KEY: (
170
+ self.parent.span_id if self.parent is not None else None
171
+ ),
172
+ self.EXECUTION_TIME_KEY: self.execution_time,
173
+ self.ERROR_KEY: type(self.error).__name__ if self.error else None,
174
+ self.SPAN_COUNT_IN_SUBTREE_KEY: self.span_count_in_subtree,
175
+ self.SPAN_DEPTH_KEY: self.span_depth,
176
+ }
177
+
178
+
179
+ @dataclass
57
180
  class CLIMetrics:
58
181
  """
59
182
  Class to track various metrics across the execution of a command
60
183
  """
61
184
 
62
- def __init__(self):
63
- self._counters: Dict[str, int] = {}
185
+ # limits for reporting purposes
186
+ SPAN_DEPTH_LIMIT: ClassVar[int] = 5
187
+ SPAN_TOTAL_LIMIT: ClassVar[int] = 100
188
+
189
+ _counters: Dict[str, int] = field(init=False, default_factory=dict)
190
+ # stack of in progress spans as command is executing
191
+ _in_progress_spans: List[CLIMetricsSpan] = field(init=False, default_factory=list)
192
+ # list of finished steps for telemetry to process
193
+ _completed_spans: List[CLIMetricsSpan] = field(init=False, default_factory=list)
194
+ # monotonic clock time of when this class was initialized to approximate when the command first started executing
195
+ _monotonic_start: float = field(
196
+ init=False, default_factory=lambda: time.monotonic(), compare=False
197
+ )
64
198
 
65
- def __eq__(self, other):
66
- if isinstance(other, CLIMetrics):
67
- return self._counters == other._counters
68
- return False
199
+ def clone(self) -> CLIMetrics:
200
+ return replace(self)
69
201
 
70
202
  def get_counter(self, name: str) -> Optional[int]:
71
203
  return self._counters.get(name)
@@ -86,7 +218,91 @@ class CLIMetrics:
86
218
  else:
87
219
  self._counters[name] += value
88
220
 
221
+ @property
222
+ def current_span(self) -> Optional[CLIMetricsSpan]:
223
+ return self._in_progress_spans[-1] if len(self._in_progress_spans) > 0 else None
224
+
225
+ @contextmanager
226
+ def start_span(self, name: str) -> Iterator[CLIMetricsSpan]:
227
+ """
228
+ Starts a new span that tracks various metrics throughout its execution
229
+
230
+ Assumes that parent spans contain the entirety of their child spans
231
+ If not provided, parent spans are automatically populated as the most recently executed spans
232
+
233
+ Spans are not emitted in telemetry if depth/total limits are exceeded
234
+
235
+ :raises CliMetricsInvalidUsageError: if the step name is empty
236
+ """
237
+ new_span = CLIMetricsSpan(
238
+ name=name,
239
+ start_time=time.monotonic() - self._monotonic_start,
240
+ parent=self.current_span,
241
+ )
242
+
243
+ self._in_progress_spans.append(new_span)
244
+
245
+ try:
246
+ yield new_span
247
+ except BaseException as err:
248
+ new_span.finish(error=err)
249
+ raise
250
+ else:
251
+ new_span.finish()
252
+ finally:
253
+ self._completed_spans.append(new_span)
254
+ self._in_progress_spans.remove(new_span)
255
+
89
256
  @property
90
257
  def counters(self) -> Dict[str, int]:
91
258
  # return a copy of the original dict to avoid mutating the original
92
259
  return self._counters.copy()
260
+
261
+ @property
262
+ def num_spans_past_depth_limit(self) -> int:
263
+ return len(
264
+ [
265
+ span
266
+ for span in self._completed_spans
267
+ if span.span_depth > self.SPAN_DEPTH_LIMIT
268
+ ]
269
+ )
270
+
271
+ @property
272
+ def num_spans_past_total_limit(self) -> int:
273
+ return max(0, len(self._completed_spans) - self.SPAN_TOTAL_LIMIT)
274
+
275
+ @property
276
+ def completed_spans(self) -> List[Dict]:
277
+ """
278
+ Returns the completed spans tracked throughout a command, sorted by start time, for reporting telemetry
279
+
280
+ Ensures that the spans we send are within the configured limits and marks
281
+ certain spans as trimmed if their children would bypass the limits we set
282
+ """
283
+ # take spans breadth-first within the depth and total limits
284
+ # since we care more about the big picture than granularity
285
+ spans_to_report = set(
286
+ nsmallest(
287
+ n=self.SPAN_TOTAL_LIMIT,
288
+ iterable=(
289
+ span
290
+ for span in self._completed_spans
291
+ if span.span_depth <= self.SPAN_DEPTH_LIMIT
292
+ ),
293
+ key=lambda span: (span.span_depth, span.start_time),
294
+ )
295
+ )
296
+
297
+ # sort by start time to make reading the payload easier
298
+ sorted_spans_to_report = sorted(
299
+ spans_to_report, key=lambda span: span.start_time
300
+ )
301
+
302
+ return [
303
+ {
304
+ **span.to_dict(),
305
+ CLIMetricsSpan.TRIMMED_KEY: not span.children <= spans_to_report,
306
+ }
307
+ for span in sorted_spans_to_report
308
+ ]
@@ -37,7 +37,7 @@ class ObjectResult(CommandResult):
37
37
 
38
38
 
39
39
  class CollectionResult(CommandResult):
40
- def __init__(self, elements: t.Iterable[t.Dict]):
40
+ def __init__(self, elements: t.Iterable[t.Dict] | t.Generator[t.Dict, None, None]):
41
41
  self._elements = elements
42
42
 
43
43
  @property
@@ -1,13 +1,21 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
+ import shutil
5
+ import tempfile
6
+ from copy import deepcopy
4
7
  from pathlib import Path
5
8
  from tempfile import TemporaryDirectory, mkstemp
6
9
  from typing import Any, Dict, Literal, Optional
7
10
 
8
11
  from click import ClickException
9
12
  from snowflake.cli._plugins.nativeapp.artifacts import (
10
- BundleMap,
13
+ bundle_artifacts,
14
+ )
15
+ from snowflake.cli._plugins.nativeapp.bundle_context import BundleContext
16
+ from snowflake.cli._plugins.nativeapp.codegen.compiler import TEMPLATES_PROCESSOR
17
+ from snowflake.cli._plugins.nativeapp.codegen.templates.templates_processor import (
18
+ TemplatesProcessor,
11
19
  )
12
20
  from snowflake.cli._plugins.nativeapp.entities.application_package import (
13
21
  ApplicationPackageEntityModel,
@@ -46,6 +54,7 @@ from snowflake.cli.api.project.schemas.v1.snowpark.snowpark import Snowpark
46
54
  from snowflake.cli.api.project.schemas.v1.streamlit.streamlit import Streamlit
47
55
  from snowflake.cli.api.rendering.jinja import get_basic_jinja_env
48
56
  from snowflake.cli.api.utils.definition_rendering import render_definition_template
57
+ from snowflake.cli.api.utils.dict_utils import deep_merge_dicts
49
58
 
50
59
  log = logging.getLogger(__name__)
51
60
 
@@ -107,12 +116,10 @@ def convert_project_definition_to_v2(
107
116
  if definition_v1.streamlit
108
117
  else {}
109
118
  )
110
- native_app_data = (
111
- convert_native_app_to_v2_data(
112
- project_root, definition_v1.native_app, template_context
113
- )
119
+ native_app_data, native_app_template_context = (
120
+ convert_native_app_to_v2_data(definition_v1.native_app, template_context)
114
121
  if definition_v1.native_app
115
- else {}
122
+ else ({}, {})
116
123
  )
117
124
  envs = convert_envs_to_v2(definition_v1)
118
125
 
@@ -137,7 +144,16 @@ def convert_project_definition_to_v2(
137
144
 
138
145
  # If the user's files have any template tags in them, they
139
146
  # also need to be migrated to point to the v2 entities
140
- _convert_templates_in_files(project_root, definition_v1, definition_v2, in_memory)
147
+ replacement_template_context = deepcopy(template_context) or {}
148
+ deep_merge_dicts(replacement_template_context, native_app_template_context)
149
+ if replacement_template_context:
150
+ _convert_templates_in_files(
151
+ project_root,
152
+ definition_v1,
153
+ definition_v2,
154
+ in_memory,
155
+ replacement_template_context,
156
+ )
141
157
 
142
158
  return definition_v2
143
159
 
@@ -243,10 +259,9 @@ def convert_streamlit_to_v2_data(streamlit: Streamlit) -> Dict[str, Any]:
243
259
 
244
260
 
245
261
  def convert_native_app_to_v2_data(
246
- project_root: Path,
247
262
  native_app: NativeApp,
248
263
  template_context: Optional[Dict[str, Any]] = None,
249
- ) -> Dict[str, Any]:
264
+ ) -> tuple[dict[str, Any], dict[str, Any]]:
250
265
  def _make_meta(obj: Application | Package):
251
266
  meta = {}
252
267
  if obj.role:
@@ -257,39 +272,6 @@ def convert_native_app_to_v2_data(
257
272
  meta["post_deploy"] = obj.post_deploy
258
273
  return meta
259
274
 
260
- def _find_manifest():
261
- # We don't know which file in the project directory is the actual manifest,
262
- # and we can't iterate through the artifacts property since the src can contain
263
- # glob patterns. The simplest solution is to bundle the app and find the
264
- # manifest file from the resultant BundleMap, since the bundle process ensures
265
- # that only a single source path can map to the corresponding destination path
266
- bundle_map = BundleMap(
267
- project_root=project_root, deploy_root=project_root / native_app.deploy_root
268
- )
269
- for artifact in native_app.artifacts:
270
- bundle_map.add(artifact)
271
-
272
- manifest_path = next(
273
- (
274
- src
275
- for src, dest in bundle_map.all_mappings(
276
- absolute=True, expand_directories=True
277
- )
278
- if dest.name == "manifest.yml"
279
- ),
280
- None,
281
- )
282
- if not manifest_path:
283
- # The manifest field is required, so we can't gracefully handle it being missing
284
- raise ClickException(
285
- "manifest.yml file not found in any Native App artifact sources, "
286
- "unable to perform migration"
287
- )
288
-
289
- # Use a POSIX path to be consistent with other migrated fields
290
- # which use POSIX paths as default values
291
- return manifest_path.relative_to(project_root).as_posix()
292
-
293
275
  package_entity_name = "pkg"
294
276
  if (
295
277
  native_app.package
@@ -305,7 +287,6 @@ def convert_native_app_to_v2_data(
305
287
  package = {
306
288
  "type": "application package",
307
289
  "identifier": package_identifier,
308
- "manifest": _find_manifest(),
309
290
  "artifacts": native_app.artifacts,
310
291
  }
311
292
 
@@ -360,12 +341,63 @@ def convert_native_app_to_v2_data(
360
341
  ):
361
342
  app["debug"] = native_app.application.debug
362
343
 
363
- return {
344
+ pdfv2_yml = {
364
345
  "entities": {
365
346
  package_entity_name: package,
366
347
  app_entity_name: app,
367
348
  }
368
349
  }
350
+ template_replacements = {
351
+ "ctx": {
352
+ "native_app": {
353
+ "name": native_app.name, # This is a literal since there's no equivalent in v2
354
+ # omitting "artifacts" since lists are not supported in templates
355
+ "bundle_root": _make_template(
356
+ f"ctx.entities.{package_entity_name}.bundle_root"
357
+ ),
358
+ "deploy_root": _make_template(
359
+ f"ctx.entities.{package_entity_name}.deploy_root"
360
+ ),
361
+ "generated_root": _make_template(
362
+ f"ctx.entities.{package_entity_name}.generated_root"
363
+ ),
364
+ "source_stage": _make_template(
365
+ f"ctx.entities.{package_entity_name}.stage"
366
+ ),
367
+ "scratch_stage": _make_template(
368
+ f"ctx.entities.{package_entity_name}.scratch_stage"
369
+ ),
370
+ "package": {
371
+ # omitting "scripts" since lists are not supported in templates
372
+ "role": _make_template(
373
+ f"ctx.entities.{package_entity_name}.meta.role"
374
+ ),
375
+ "name": _make_template(
376
+ f"ctx.entities.{package_entity_name}.identifier"
377
+ ),
378
+ "warehouse": _make_template(
379
+ f"ctx.entities.{package_entity_name}.meta.warehouse"
380
+ ),
381
+ "distribution": _make_template(
382
+ f"ctx.entities.{package_entity_name}.distribution"
383
+ ),
384
+ # omitting "post_deploy" since lists are not supported in templates
385
+ },
386
+ "application": {
387
+ "role": _make_template(f"ctx.entities.{app_entity_name}.meta.role"),
388
+ "name": _make_template(
389
+ f"ctx.entities.{app_entity_name}.identifier"
390
+ ),
391
+ "warehouse": _make_template(
392
+ f"ctx.entities.{app_entity_name}.meta.warehouse"
393
+ ),
394
+ "debug": _make_template(f"ctx.entities.{app_entity_name}.debug"),
395
+ # omitting "post_deploy" since lists are not supported in templates
396
+ },
397
+ }
398
+ }
399
+ }
400
+ return pdfv2_yml, template_replacements
369
401
 
370
402
 
371
403
  def convert_envs_to_v2(pd: ProjectDefinition):
@@ -380,33 +412,106 @@ def _convert_templates_in_files(
380
412
  definition_v1: ProjectDefinition,
381
413
  definition_v2: ProjectDefinitionV2,
382
414
  in_memory: bool,
415
+ replacement_template_context: dict[str, Any],
383
416
  ):
384
417
  """Converts templates in other files to the new format"""
385
- # TODO handle artifacts using the "templates" processor
386
- # For now this only handles Native App package scripts
418
+ # Set up fakers so that references to ctx.env. and fn.
419
+ # get templated to the same literal, since those references
420
+ # are the same in v1 and v2
421
+ replacement_template_context["ctx"]["env"] = _EnvFaker()
422
+ replacement_template_context["fn"] = _FnFaker()
423
+
387
424
  metrics = get_cli_context().metrics
388
- metrics.set_counter_default(CLICounterField.PACKAGE_SCRIPTS, 0)
389
425
 
390
- if (na := definition_v1.native_app) and (pkg := na.package) and pkg.scripts:
391
- metrics.set_counter(CLICounterField.PACKAGE_SCRIPTS, 1)
392
- cli_console.warning(
393
- "WARNING: native_app.package.scripts is deprecated. Please migrate to using native_app.package.post_deploy."
394
- )
395
- # If the v1 definition has a Native App with a package, we know
426
+ if na := definition_v1.native_app:
427
+ # If the v1 definition has a Native App, we know
396
428
  # that the v2 definition will have exactly one application package entity
397
- pkg_entity: ApplicationPackageEntityModel = list(
429
+ pkg_model: ApplicationPackageEntityModel = list(
398
430
  definition_v2.get_entities_by_type(
399
431
  ApplicationPackageEntityModel.get_type()
400
432
  ).values()
401
433
  )[0]
402
- converted_post_deploy_hooks = _convert_package_script_files(
403
- project_root, pkg.scripts, pkg_entity, in_memory
404
- )
405
- if pkg_entity.meta is None:
406
- pkg_entity.meta = MetaField()
407
- if pkg_entity.meta.post_deploy is None:
408
- pkg_entity.meta.post_deploy = []
409
- pkg_entity.meta.post_deploy += converted_post_deploy_hooks
434
+
435
+ # Convert templates in artifacts by passing them through the TemplatesProcessor
436
+ # but providing a context that maps v1 template references to the equivalent v2
437
+ # references instead of resolving to literals
438
+ # For example, replacement_template_context might look like
439
+ # {
440
+ # "ctx": {
441
+ # "native_app": {
442
+ # "bundle_root": "<% ctx.entities.pkg.bundle_root %>",
443
+ # "deploy_root": "<% ctx.entities.pkg.deploy_root %>",
444
+ # "application": {
445
+ # "name": "<% ctx.entities.app.identifier %>",
446
+ # }
447
+ # and so on...
448
+ # }
449
+ # }
450
+ # }
451
+ # We only convert files on-disk if the "templates" processor is used in the artifacts
452
+ # and if we're doing a permanent conversion. If we're doing an in-memory conversion,
453
+ # the CLI global template context is already populated with the v1 definition, so
454
+ # we don't want to convert the v1 template references in artifact files
455
+ metrics.set_counter_default(CLICounterField.TEMPLATES_PROCESSOR, 0)
456
+ artifacts_to_template = [
457
+ artifact
458
+ for artifact in pkg_model.artifacts
459
+ for processor in artifact.processors
460
+ if processor.name == TEMPLATES_PROCESSOR
461
+ ]
462
+ if not in_memory and artifacts_to_template:
463
+ metrics.set_counter(CLICounterField.TEMPLATES_PROCESSOR, 1)
464
+
465
+ # Create a temporary directory to hold the expanded templates,
466
+ # as if a bundle step had been run but without affecting any
467
+ # files on disk outside of the artifacts we want to convert
468
+ with tempfile.TemporaryDirectory() as d:
469
+ deploy_root = Path(d)
470
+ bundle_ctx = BundleContext(
471
+ package_name=pkg_model.identifier,
472
+ artifacts=pkg_model.artifacts,
473
+ project_root=project_root,
474
+ bundle_root=project_root / pkg_model.bundle_root,
475
+ deploy_root=deploy_root,
476
+ generated_root=(
477
+ project_root / deploy_root / pkg_model.generated_root
478
+ ),
479
+ )
480
+ template_processor = TemplatesProcessor(bundle_ctx)
481
+ bundle_map = bundle_artifacts(
482
+ project_root, deploy_root, artifacts_to_template
483
+ )
484
+ for src, dest in bundle_map.all_mappings(
485
+ absolute=True, expand_directories=True
486
+ ):
487
+ if src.is_dir():
488
+ continue
489
+ # We call the implementation directly instead of calling process()
490
+ # since we need access to the BundleMap to copy files anyways
491
+ template_processor.expand_templates_in_file(
492
+ src, dest, replacement_template_context
493
+ )
494
+ # Copy the expanded file back to its original source location if it was modified
495
+ if not dest.is_symlink():
496
+ shutil.copyfile(dest, src)
497
+
498
+ # Convert package script files to post-deploy hooks
499
+ metrics.set_counter_default(CLICounterField.PACKAGE_SCRIPTS, 0)
500
+ if (pkg := na.package) and pkg.scripts:
501
+ metrics.set_counter(CLICounterField.PACKAGE_SCRIPTS, 1)
502
+ cli_console.warning(
503
+ "WARNING: native_app.package.scripts is deprecated. "
504
+ "Please migrate to using native_app.package.post_deploy."
505
+ )
506
+
507
+ converted_post_deploy_hooks = _convert_package_script_files(
508
+ project_root, pkg.scripts, pkg_model, in_memory
509
+ )
510
+ if pkg_model.meta is None:
511
+ pkg_model.meta = MetaField()
512
+ if pkg_model.meta.post_deploy is None:
513
+ pkg_model.meta.post_deploy = []
514
+ pkg_model.meta.post_deploy += converted_post_deploy_hooks
410
515
 
411
516
 
412
517
  def _convert_package_script_files(
@@ -447,6 +552,18 @@ def _convert_package_script_files(
447
552
  return post_deploy_hooks
448
553
 
449
554
 
555
+ class _EnvFaker:
556
+ def __getitem__(self, item):
557
+ return _make_template(f"ctx.env.{item}")
558
+
559
+
560
+ class _FnFaker:
561
+ def __getitem__(self, item):
562
+ return lambda *args: _make_template(
563
+ f"fn.{item}({', '.join(repr(a) for a in args)})"
564
+ )
565
+
566
+
450
567
  def _make_template(template: str) -> str:
451
568
  return f"{PROJECT_TEMPLATE_VARIABLE_OPENING} {template} {PROJECT_TEMPLATE_VARIABLE_CLOSING}"
452
569
 
@@ -17,6 +17,7 @@ from __future__ import annotations
17
17
  import json
18
18
  import logging
19
19
  from typing import Any, Dict, Optional
20
+ from urllib.parse import parse_qsl, urlencode, urlparse
20
21
 
21
22
  from click import ClickException
22
23
  from snowflake.cli.api.constants import SF_REST_API_URL_PREFIX
@@ -107,7 +108,9 @@ class RestApi:
107
108
  url = f"{SF_REST_API_URL_PREFIX}/databases/{db_name}/schemas/{schema_name}"
108
109
  return self._fetch_endpoint_exists(url)
109
110
 
110
- def determine_url_for_create_query(self, object_type: str) -> str:
111
+ def determine_url_for_create_query(
112
+ self, object_type: str, replace: bool = False, if_not_exists: bool = False
113
+ ) -> str:
111
114
  """
112
115
  Determine an url for creating an object of given type via REST API.
113
116
  If URL cannot be determined, the function throws CannotDetermineCreateURLException exception.
@@ -124,10 +127,16 @@ class RestApi:
124
127
  """
125
128
  plural_object_type = _pluralize_object_type(object_type)
126
129
 
130
+ query_params = {}
131
+
132
+ if replace or if_not_exists:
133
+ param = "ifNotExists" if if_not_exists else "orReplace"
134
+ query_params = {"createMode": param}
135
+
127
136
  if self.get_endpoint_exists(
128
137
  url := f"{SF_REST_API_URL_PREFIX}/{plural_object_type}/"
129
138
  ):
130
- return url
139
+ return self._add_query_parameters_to_url(url, query_params)
131
140
 
132
141
  db = self.conn.database
133
142
  if not db:
@@ -139,7 +148,7 @@ class RestApi:
139
148
  if self.get_endpoint_exists(
140
149
  url := f"{SF_REST_API_URL_PREFIX}/databases/{db}/{plural_object_type}/"
141
150
  ):
142
- return url
151
+ return self._add_query_parameters_to_url(url, query_params)
143
152
 
144
153
  schema = self.conn.schema
145
154
  if not schema:
@@ -151,12 +160,25 @@ class RestApi:
151
160
  if self.get_endpoint_exists(
152
161
  url := f"{SF_REST_API_URL_PREFIX}/databases/{self.conn.database}/schemas/{self.conn.schema}/{plural_object_type}/"
153
162
  ):
154
- return url
163
+ return self._add_query_parameters_to_url(url, query_params)
155
164
 
156
165
  raise CannotDetermineCreateURLException(
157
166
  f"Create operation for type {object_type} is not supported. Try using `sql -q 'CREATE ...'` command."
158
167
  )
159
168
 
169
+ @staticmethod
170
+ def _add_query_parameters_to_url(url: str, query_params: Dict[str, Any]) -> str:
171
+ """
172
+ Updates existing url with new query parameters.
173
+ They should be passed as key-value pairs in query_params dict.
174
+ """
175
+ if not query_params:
176
+ return url
177
+ url_parts = urlparse(url)
178
+ query = dict(parse_qsl(url_parts.query))
179
+ query.update(query_params)
180
+ return url_parts._replace(query=urlencode(query)).geturl()
181
+
160
182
 
161
183
  class DatabaseNotDefinedException(ClickException):
162
184
  pass
@@ -108,7 +108,7 @@ def _windows_restrict_file_permissions(path: Path) -> None:
108
108
 
109
109
  for user in windows_get_not_whitelisted_users_with_access(path):
110
110
  log.info("Removing permissions of user %s from file %s", user, path)
111
- subprocess.run(["icacls", str(path), "/DENY", f"{user}:F"])
111
+ subprocess.run(["icacls", str(path), "/remove:g", f"{user}"])
112
112
 
113
113
 
114
114
  def restrict_file_permissions(file_path: Path) -> None: