snowflake-cli 3.0.2__py3-none-any.whl → 3.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/cli_app.py +3 -0
  3. snowflake/cli/_app/dev/docs/templates/overview.rst.jinja2 +1 -1
  4. snowflake/cli/_app/dev/docs/templates/usage.rst.jinja2 +2 -2
  5. snowflake/cli/_app/telemetry.py +69 -4
  6. snowflake/cli/_plugins/connection/commands.py +40 -2
  7. snowflake/cli/_plugins/git/commands.py +6 -3
  8. snowflake/cli/_plugins/git/manager.py +5 -0
  9. snowflake/cli/_plugins/nativeapp/artifacts.py +13 -3
  10. snowflake/cli/_plugins/nativeapp/codegen/artifact_processor.py +1 -1
  11. snowflake/cli/_plugins/nativeapp/codegen/compiler.py +7 -0
  12. snowflake/cli/_plugins/nativeapp/codegen/sandbox.py +10 -10
  13. snowflake/cli/_plugins/nativeapp/codegen/setup/native_app_setup_processor.py +2 -2
  14. snowflake/cli/_plugins/nativeapp/codegen/snowpark/extension_function_utils.py +1 -1
  15. snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py +8 -8
  16. snowflake/cli/_plugins/nativeapp/commands.py +135 -186
  17. snowflake/cli/_plugins/nativeapp/entities/application.py +176 -24
  18. snowflake/cli/_plugins/nativeapp/entities/application_package.py +112 -136
  19. snowflake/cli/_plugins/nativeapp/exceptions.py +12 -0
  20. snowflake/cli/_plugins/nativeapp/manager.py +3 -26
  21. snowflake/cli/_plugins/nativeapp/v2_conversions/{v2_to_v1_decorator.py → compat.py} +131 -72
  22. snowflake/cli/_plugins/nativeapp/version/commands.py +30 -29
  23. snowflake/cli/_plugins/nativeapp/version/version_processor.py +1 -43
  24. snowflake/cli/_plugins/snowpark/common.py +60 -18
  25. snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +2 -2
  26. snowflake/cli/_plugins/spcs/image_repository/commands.py +4 -37
  27. snowflake/cli/_plugins/spcs/image_repository/manager.py +4 -1
  28. snowflake/cli/_plugins/spcs/services/commands.py +36 -4
  29. snowflake/cli/_plugins/spcs/services/manager.py +36 -4
  30. snowflake/cli/_plugins/stage/commands.py +8 -3
  31. snowflake/cli/_plugins/stage/diff.py +16 -16
  32. snowflake/cli/_plugins/stage/manager.py +164 -73
  33. snowflake/cli/_plugins/stage/md5.py +1 -1
  34. snowflake/cli/_plugins/workspace/commands.py +21 -1
  35. snowflake/cli/_plugins/workspace/context.py +38 -0
  36. snowflake/cli/_plugins/workspace/manager.py +23 -13
  37. snowflake/cli/api/cli_global_context.py +3 -3
  38. snowflake/cli/api/commands/flags.py +23 -7
  39. snowflake/cli/api/config.py +7 -4
  40. snowflake/cli/api/connections.py +12 -1
  41. snowflake/cli/api/entities/common.py +4 -2
  42. snowflake/cli/api/entities/utils.py +17 -37
  43. snowflake/cli/api/exceptions.py +32 -0
  44. snowflake/cli/api/identifiers.py +8 -0
  45. snowflake/cli/api/project/definition_conversion.py +139 -40
  46. snowflake/cli/api/project/schemas/entities/common.py +11 -0
  47. snowflake/cli/api/project/schemas/project_definition.py +30 -25
  48. snowflake/cli/api/sql_execution.py +5 -7
  49. snowflake/cli/api/stage_path.py +241 -0
  50. snowflake/cli/api/utils/definition_rendering.py +3 -5
  51. {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/METADATA +11 -11
  52. {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/RECORD +55 -55
  53. snowflake/cli/_plugins/nativeapp/teardown_processor.py +0 -70
  54. snowflake/cli/_plugins/workspace/action_context.py +0 -18
  55. {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/WHEEL +0 -0
  56. {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/entry_points.txt +0 -0
  57. {snowflake_cli-3.0.2.dist-info → snowflake_cli-3.1.0.dist-info}/licenses/LICENSE +0 -0
@@ -33,6 +33,7 @@ from snowflake.cli.api.connections import ConnectionContext
33
33
  from snowflake.cli.api.console import cli_console
34
34
  from snowflake.cli.api.identifiers import FQN
35
35
  from snowflake.cli.api.output.formats import OutputFormat
36
+ from snowflake.cli.api.stage_path import StagePath
36
37
 
37
38
  DEFAULT_CONTEXT_SETTINGS = {"help_option_names": ["--help", "-h"]}
38
39
 
@@ -83,7 +84,7 @@ ConnectionOption = typer.Option(
83
84
  "--connection",
84
85
  "-c",
85
86
  "--environment",
86
- help=f"Name of the connection, as defined in your `config.toml`. Default: `default`.",
87
+ help=f"Name of the connection, as defined in your `config.toml` file. Default: `default`.",
87
88
  callback=_connection_callback("connection_name"),
88
89
  show_default=False,
89
90
  rich_help_panel=_CONNECTION_SECTION,
@@ -276,7 +277,7 @@ MfaPasscodeOption = typer.Option(
276
277
  EnableDiagOption = typer.Option(
277
278
  False,
278
279
  "--enable-diag",
279
- help="Run python connector diagnostic test",
280
+ help="Run Python connector diagnostic test",
280
281
  callback=_connection_callback("enable_diag"),
281
282
  show_default=False,
282
283
  is_flag=True,
@@ -349,7 +350,7 @@ VerboseOption = typer.Option(
349
350
  DebugOption = typer.Option(
350
351
  False,
351
352
  "--debug",
352
- help="Displays log entries for log levels `debug` and higher; debug logs contains additional information.",
353
+ help="Displays log entries for log levels `debug` and higher; debug logs contain additional information.",
353
354
  callback=_context_callback("enable_tracebacks"),
354
355
  is_flag=True,
355
356
  rich_help_panel=_CLI_BEHAVIOUR,
@@ -406,9 +407,9 @@ def variables_option(description: str):
406
407
 
407
408
 
408
409
  ExecuteVariablesOption = variables_option(
409
- 'Variables for the execution context. For example: `-D "<key>=<value>"`. '
410
- "For SQL files variables are use to expand the template and any unknown variable will cause an error. "
411
- "For Python files variables are used to update os.environ dictionary. Provided keys are capitalized to adhere to best practices."
410
+ 'Variables for the execution context; for example: `-D "<key>=<value>"`. '
411
+ "For SQL files, variables are used to expand the template, and any unknown variable will cause an error (consider embedding quoting in the file)."
412
+ "For Python files, variables are used to update the os.environ dictionary. Provided keys are capitalized to adhere to best practices. "
412
413
  "In case of SQL files string values must be quoted in `''` (consider embedding quoting in the file).",
413
414
  )
414
415
 
@@ -473,6 +474,13 @@ class IdentifierStageType(click.ParamType):
473
474
  return FQN.from_stage(value)
474
475
 
475
476
 
477
+ class IdentifierStagePathType(click.ParamType):
478
+ name = "TEXT"
479
+
480
+ def convert(self, value, param, ctx):
481
+ return StagePath.from_stage_str(value)
482
+
483
+
476
484
  def identifier_argument(
477
485
  sf_object: str,
478
486
  example: str,
@@ -481,7 +489,7 @@ def identifier_argument(
481
489
  ) -> typer.Argument:
482
490
  return typer.Argument(
483
491
  ...,
484
- help=f"Identifier of the {sf_object}. For example: {example}",
492
+ help=f"Identifier of the {sf_object}; for example: {example}",
485
493
  show_default=False,
486
494
  click_type=click_type,
487
495
  callback=callback,
@@ -496,6 +504,14 @@ def identifier_stage_argument(
496
504
  )
497
505
 
498
506
 
507
+ def identifier_stage_path_argument(
508
+ sf_object: str, example: str, callback: Callable | None = None
509
+ ) -> typer.Argument:
510
+ return identifier_argument(
511
+ sf_object, example, click_type=IdentifierStagePathType(), callback=callback
512
+ )
513
+
514
+
499
515
  def execution_identifier_argument(sf_object: str, example: str) -> typer.Argument:
500
516
  return typer.Argument(
501
517
  ...,
@@ -346,10 +346,13 @@ def _dump_config(conf_file_cache: Dict):
346
346
 
347
347
 
348
348
  def _check_default_config_files_permissions() -> None:
349
- if CONNECTIONS_FILE.exists() and not file_permissions_are_strict(CONNECTIONS_FILE):
350
- raise ConfigFileTooWidePermissionsError(CONNECTIONS_FILE)
351
- if CONFIG_FILE.exists() and not file_permissions_are_strict(CONFIG_FILE):
352
- raise ConfigFileTooWidePermissionsError(CONFIG_FILE)
349
+ if not IS_WINDOWS:
350
+ if CONNECTIONS_FILE.exists() and not file_permissions_are_strict(
351
+ CONNECTIONS_FILE
352
+ ):
353
+ raise ConfigFileTooWidePermissionsError(CONNECTIONS_FILE)
354
+ if CONFIG_FILE.exists() and not file_permissions_are_strict(CONFIG_FILE):
355
+ raise ConfigFileTooWidePermissionsError(CONFIG_FILE)
353
356
 
354
357
 
355
358
  from typing import Literal
@@ -22,7 +22,7 @@ from dataclasses import asdict, dataclass, field, fields, replace
22
22
  from pathlib import Path
23
23
  from typing import Optional
24
24
 
25
- from snowflake.cli.api.config import get_default_connection_name
25
+ from snowflake.cli.api.config import get_connection_dict, get_default_connection_name
26
26
  from snowflake.cli.api.exceptions import InvalidSchemaError
27
27
  from snowflake.connector import SnowflakeConnection
28
28
  from snowflake.connector.compat import IS_WINDOWS
@@ -79,6 +79,17 @@ class ConnectionContext:
79
79
  raise KeyError(f"{key} is not a field of {self.__class__.__name__}")
80
80
  setattr(self, key, value)
81
81
 
82
+ def update_from_config(self) -> ConnectionContext:
83
+ connection_config = get_connection_dict(connection_name=self.connection_name)
84
+ if "private_key_path" in connection_config:
85
+ connection_config["private_key_file"] = connection_config[
86
+ "private_key_path"
87
+ ]
88
+ del connection_config["private_key_path"]
89
+
90
+ self.update(**connection_config)
91
+ return self
92
+
82
93
  def __repr__(self) -> str:
83
94
  """Minimal repr where None values have their keys omitted."""
84
95
  items = [f"{k}={repr(v)}" for (k, v) in self.present_values_as_dict().items()]
@@ -1,7 +1,7 @@
1
1
  from enum import Enum
2
2
  from typing import Generic, Type, TypeVar, get_args
3
3
 
4
- from snowflake.cli._plugins.workspace.action_context import ActionContext
4
+ from snowflake.cli._plugins.workspace.context import ActionContext, WorkspaceContext
5
5
  from snowflake.cli.api.sql_execution import SqlExecutor
6
6
 
7
7
 
@@ -10,6 +10,7 @@ class EntityActions(str, Enum):
10
10
  DEPLOY = "action_deploy"
11
11
  DROP = "action_drop"
12
12
  VALIDATE = "action_validate"
13
+ EVENTS = "action_events"
13
14
 
14
15
  VERSION_LIST = "action_version_list"
15
16
  VERSION_CREATE = "action_version_create"
@@ -24,8 +25,9 @@ class EntityBase(Generic[T]):
24
25
  Base class for the fully-featured entity classes.
25
26
  """
26
27
 
27
- def __init__(self, entity_model: T):
28
+ def __init__(self, entity_model: T, workspace_ctx: WorkspaceContext):
28
29
  self._entity_model = entity_model
30
+ self._workspace_ctx = workspace_ctx
29
31
 
30
32
  @classmethod
31
33
  def get_entity_model_type(cls) -> Type[T]:
@@ -1,6 +1,5 @@
1
1
  import os
2
2
  from pathlib import Path
3
- from textwrap import dedent
4
3
  from typing import Any, List, NoReturn, Optional
5
4
 
6
5
  import jinja2
@@ -16,7 +15,7 @@ from snowflake.cli._plugins.nativeapp.exceptions import (
16
15
  from snowflake.cli._plugins.nativeapp.utils import verify_exists, verify_no_directories
17
16
  from snowflake.cli._plugins.stage.diff import (
18
17
  DiffResult,
19
- StagePath,
18
+ StagePathType,
20
19
  compute_stage_diff,
21
20
  preserve_from_diff,
22
21
  sync_local_diff_with_stage,
@@ -30,7 +29,11 @@ from snowflake.cli.api.errno import (
30
29
  DOES_NOT_EXIST_OR_CANNOT_BE_PERFORMED,
31
30
  NO_WAREHOUSE_SELECTED_IN_SESSION,
32
31
  )
33
- from snowflake.cli.api.exceptions import SnowflakeSQLExecutionError
32
+ from snowflake.cli.api.exceptions import (
33
+ DoesNotExistOrUnauthorizedError,
34
+ NoWarehouseSelectedInSessionError,
35
+ SnowflakeSQLExecutionError,
36
+ )
34
37
  from snowflake.cli.api.metrics import CLICounterField
35
38
  from snowflake.cli.api.project.schemas.entities.common import PostDeployHook
36
39
  from snowflake.cli.api.rendering.sql_templates import (
@@ -41,46 +44,21 @@ from snowflake.connector import ProgrammingError
41
44
  from snowflake.connector.cursor import SnowflakeCursor
42
45
 
43
46
 
44
- def generic_sql_error_handler(
45
- err: ProgrammingError, role: Optional[str] = None, warehouse: Optional[str] = None
46
- ) -> NoReturn:
47
+ def generic_sql_error_handler(err: ProgrammingError) -> NoReturn:
47
48
  # Potential refactor: If moving away from Python 3.8 and 3.9 to >= 3.10, use match ... case
48
- if err.errno == DOES_NOT_EXIST_OR_CANNOT_BE_PERFORMED:
49
- raise ProgrammingError(
50
- msg=dedent(
51
- f"""\
52
- Received error message '{err.msg}' while executing SQL statement.
53
- '{role}' may not have access to warehouse '{warehouse}'.
54
- Please grant usage privilege on warehouse to this role.
55
- """
56
- ),
57
- errno=err.errno,
58
- )
49
+ if (
50
+ err.errno == DOES_NOT_EXIST_OR_CANNOT_BE_PERFORMED
51
+ or "does not exist or not authorized" in err.msg
52
+ ):
53
+ raise DoesNotExistOrUnauthorizedError(msg=err.msg) from err
59
54
  elif err.errno == NO_WAREHOUSE_SELECTED_IN_SESSION:
60
- raise ProgrammingError(
61
- msg=dedent(
62
- f"""\
63
- Received error message '{err.msg}' while executing SQL statement.
64
- Please provide a warehouse for the active session role in your project definition file, config.toml file, or via command line.
65
- """
66
- ),
67
- errno=err.errno,
68
- )
69
- elif "does not exist or not authorized" in err.msg:
70
- raise ProgrammingError(
71
- msg=dedent(
72
- f"""\
73
- Received error message '{err.msg}' while executing SQL statement.
74
- Please check the name of the resource you are trying to query or the permissions of the role you are using to run the query.
75
- """
76
- )
77
- )
55
+ raise NoWarehouseSelectedInSessionError(msg=err.msg) from err
78
56
  raise err
79
57
 
80
58
 
81
59
  def _get_stage_paths_to_sync(
82
60
  local_paths_to_sync: List[Path], deploy_root: Path
83
- ) -> List[StagePath]:
61
+ ) -> List[StagePathType]:
84
62
  """
85
63
  Takes a list of paths (files and directories), returning a list of all files recursively relative to the deploy root.
86
64
  """
@@ -254,9 +232,11 @@ def execute_post_deploy_hooks(
254
232
 
255
233
  with console.phase(f"Executing {deployed_object_type} post-deploy actions"):
256
234
  sql_scripts_paths = []
235
+ display_paths = []
257
236
  for hook in post_deploy_hooks:
258
237
  if hook.sql_script:
259
238
  sql_scripts_paths.append(hook.sql_script)
239
+ display_paths.append(hook.display_path)
260
240
  else:
261
241
  raise ValueError(
262
242
  f"Unsupported {deployed_object_type} post-deploy hook type: {hook}"
@@ -268,7 +248,7 @@ def execute_post_deploy_hooks(
268
248
  sql_scripts_paths,
269
249
  )
270
250
 
271
- for index, sql_script_path in enumerate(sql_scripts_paths):
251
+ for index, sql_script_path in enumerate(display_paths):
272
252
  console.step(f"Executing SQL script: {sql_script_path}")
273
253
  _execute_sql_script(
274
254
  script_content=scripts_content_list[index],
@@ -188,3 +188,35 @@ class IncompatibleParametersError(UsageError):
188
188
  super().__init__(
189
189
  f"Parameters {comma_separated_options} and {options_with_quotes[-1]} are incompatible and cannot be used simultaneously."
190
190
  )
191
+
192
+
193
+ class NoWarehouseSelectedInSessionError(ClickException):
194
+ def __init__(self, msg: str):
195
+ super().__init__(
196
+ "Received the following error message while executing SQL statement:\n"
197
+ f"'{msg}'\n"
198
+ "Please provide a warehouse for the active session role in your project definition file, config.toml file, or via command line."
199
+ )
200
+
201
+
202
+ class DoesNotExistOrUnauthorizedError(ClickException):
203
+ def __init__(self, msg: str):
204
+ super().__init__(
205
+ "Received the following error message while executing SQL statement:\n"
206
+ f"'{msg}'\n"
207
+ "Please check the name of the resource you are trying to query or the permissions of the role you are using to run the query."
208
+ )
209
+
210
+
211
+ class CouldNotUseObjectError(ClickException):
212
+ def __init__(self, object_type: ObjectType, name: str):
213
+ super().__init__(
214
+ f"Could not use {object_type} {name}. Object does not exist, or operation cannot be performed."
215
+ )
216
+
217
+
218
+ class ShowSpecificObjectMultipleRowsError(RuntimeError):
219
+ def __init__(self, show_obj_query: str):
220
+ super().__init__(
221
+ f"Received multiple rows from result of SQL statement: {show_obj_query}. Usage of 'show_specific_object' may not be properly scoped."
222
+ )
@@ -15,6 +15,7 @@
15
15
  from __future__ import annotations
16
16
 
17
17
  import re
18
+ from pathlib import Path
18
19
 
19
20
  from click import ClickException
20
21
  from snowflake.cli.api.exceptions import FQNInconsistencyError, FQNNameError
@@ -121,8 +122,15 @@ class FQN:
121
122
  name = stage
122
123
  if stage.startswith("@"):
123
124
  name = stage[1:]
125
+ if stage.startswith("~"):
126
+ return cls(name="~", database=None, schema=None)
124
127
  return cls.from_string(name)
125
128
 
129
+ @classmethod
130
+ def from_stage_path(cls, stage_path: str) -> "FQN":
131
+ stage = Path(stage_path).parts[0]
132
+ return cls.from_stage(stage)
133
+
126
134
  @classmethod
127
135
  def from_identifier_model_v1(cls, model: ObjectIdentifierBaseModel) -> "FQN":
128
136
  """Create an instance from object model."""
@@ -2,13 +2,19 @@ from __future__ import annotations
2
2
 
3
3
  import logging
4
4
  from pathlib import Path
5
+ from tempfile import TemporaryDirectory, mkstemp
5
6
  from typing import Any, Dict, Literal, Optional
6
7
 
7
8
  from click import ClickException
8
9
  from snowflake.cli._plugins.nativeapp.artifacts import (
9
10
  BundleMap,
10
11
  )
12
+ from snowflake.cli._plugins.nativeapp.entities.application_package import (
13
+ ApplicationPackageEntityModel,
14
+ )
11
15
  from snowflake.cli._plugins.snowpark.common import is_name_a_templated_one
16
+ from snowflake.cli.api.cli_global_context import get_cli_context
17
+ from snowflake.cli.api.console import cli_console
12
18
  from snowflake.cli.api.constants import (
13
19
  DEFAULT_ENV_FILE,
14
20
  DEFAULT_PAGES_DIR,
@@ -17,7 +23,9 @@ from snowflake.cli.api.constants import (
17
23
  SNOWPARK_SHARED_MIXIN,
18
24
  )
19
25
  from snowflake.cli.api.entities.utils import render_script_template
26
+ from snowflake.cli.api.metrics import CLICounterField
20
27
  from snowflake.cli.api.project.schemas.entities.common import (
28
+ MetaField,
21
29
  SqlScriptHookType,
22
30
  )
23
31
  from snowflake.cli.api.project.schemas.project_definition import (
@@ -37,9 +45,25 @@ from snowflake.cli.api.project.schemas.v1.snowpark.callable import (
37
45
  from snowflake.cli.api.project.schemas.v1.snowpark.snowpark import Snowpark
38
46
  from snowflake.cli.api.project.schemas.v1.streamlit.streamlit import Streamlit
39
47
  from snowflake.cli.api.rendering.jinja import get_basic_jinja_env
48
+ from snowflake.cli.api.utils.definition_rendering import render_definition_template
40
49
 
41
50
  log = logging.getLogger(__name__)
42
51
 
52
+ # A directory to hold temporary files created during in-memory definition conversion
53
+ # We need a global reference to this directory to prevent the object from being
54
+ # garbage collected before the files in the directory are used by other parts
55
+ # of the CLI. The directory will then be deleted on interpreter exit
56
+ _IN_MEMORY_CONVERSION_TEMP_DIR: TemporaryDirectory | None = None
57
+
58
+
59
+ def _get_temp_dir() -> TemporaryDirectory:
60
+ global _IN_MEMORY_CONVERSION_TEMP_DIR
61
+ if _IN_MEMORY_CONVERSION_TEMP_DIR is None:
62
+ _IN_MEMORY_CONVERSION_TEMP_DIR = TemporaryDirectory(
63
+ suffix="_pdf_conversion", ignore_cleanup_errors=True
64
+ )
65
+ return _IN_MEMORY_CONVERSION_TEMP_DIR
66
+
43
67
 
44
68
  def _is_field_defined(template_context: Optional[Dict[str, Any]], *path: str) -> bool:
45
69
  """
@@ -66,20 +90,31 @@ def _is_field_defined(template_context: Optional[Dict[str, Any]], *path: str) ->
66
90
 
67
91
  def convert_project_definition_to_v2(
68
92
  project_root: Path,
69
- pd: ProjectDefinition,
93
+ definition_v1: ProjectDefinition,
70
94
  accept_templates: bool = False,
71
95
  template_context: Optional[Dict[str, Any]] = None,
96
+ in_memory: bool = False,
72
97
  ) -> ProjectDefinitionV2:
73
- _check_if_project_definition_meets_requirements(pd, accept_templates)
98
+ _check_if_project_definition_meets_requirements(definition_v1, accept_templates)
74
99
 
75
- snowpark_data = convert_snowpark_to_v2_data(pd.snowpark) if pd.snowpark else {}
76
- streamlit_data = convert_streamlit_to_v2_data(pd.streamlit) if pd.streamlit else {}
100
+ snowpark_data = (
101
+ convert_snowpark_to_v2_data(definition_v1.snowpark)
102
+ if definition_v1.snowpark
103
+ else {}
104
+ )
105
+ streamlit_data = (
106
+ convert_streamlit_to_v2_data(definition_v1.streamlit)
107
+ if definition_v1.streamlit
108
+ else {}
109
+ )
77
110
  native_app_data = (
78
- convert_native_app_to_v2_data(project_root, pd.native_app, template_context)
79
- if pd.native_app
111
+ convert_native_app_to_v2_data(
112
+ project_root, definition_v1.native_app, template_context
113
+ )
114
+ if definition_v1.native_app
80
115
  else {}
81
116
  )
82
- envs = convert_envs_to_v2(pd)
117
+ envs = convert_envs_to_v2(definition_v1)
83
118
 
84
119
  data = {
85
120
  "definition_version": "2",
@@ -89,10 +124,22 @@ def convert_project_definition_to_v2(
89
124
  native_app_data.get("entities", {}),
90
125
  ),
91
126
  "mixins": snowpark_data.get("mixins", None),
92
- "env": envs,
93
127
  }
128
+ if envs is not None:
129
+ data["env"] = envs
130
+
131
+ if in_memory:
132
+ # If this is an in-memory conversion, we need to evaluate templates right away
133
+ # since the file won't be re-read as it would be for a permanent conversion
134
+ definition_v2 = render_definition_template(data, {}).project_definition
135
+ else:
136
+ definition_v2 = ProjectDefinitionV2(**data)
94
137
 
95
- return ProjectDefinitionV2(**data)
138
+ # If the user's files have any template tags in them, they
139
+ # also need to be migrated to point to the v2 entities
140
+ _convert_templates_in_files(project_root, definition_v1, definition_v2, in_memory)
141
+
142
+ return definition_v2
96
143
 
97
144
 
98
145
  def convert_snowpark_to_v2_data(snowpark: Snowpark) -> Dict[str, Any]:
@@ -196,7 +243,7 @@ def convert_streamlit_to_v2_data(streamlit: Streamlit) -> Dict[str, Any]:
196
243
 
197
244
 
198
245
  def convert_native_app_to_v2_data(
199
- project_root,
246
+ project_root: Path,
200
247
  native_app: NativeApp,
201
248
  template_context: Optional[Dict[str, Any]] = None,
202
249
  ) -> Dict[str, Any]:
@@ -217,7 +264,7 @@ def convert_native_app_to_v2_data(
217
264
  # manifest file from the resultant BundleMap, since the bundle process ensures
218
265
  # that only a single source path can map to the corresponding destination path
219
266
  bundle_map = BundleMap(
220
- project_root=project_root, deploy_root=Path(native_app.deploy_root)
267
+ project_root=project_root, deploy_root=project_root / native_app.deploy_root
221
268
  )
222
269
  for artifact in native_app.artifacts:
223
270
  bundle_map.add(artifact)
@@ -243,29 +290,6 @@ def convert_native_app_to_v2_data(
243
290
  # which use POSIX paths as default values
244
291
  return manifest_path.relative_to(project_root).as_posix()
245
292
 
246
- def _make_template(template: str) -> str:
247
- return f"{PROJECT_TEMPLATE_VARIABLE_OPENING} {template} {PROJECT_TEMPLATE_VARIABLE_CLOSING}"
248
-
249
- def _convert_package_script_files(package_scripts: list[str]):
250
- # PDFv2 doesn't support package scripts, only post-deploy scripts, so we
251
- # need to convert the Jinja syntax from {{ }} to <% %>
252
- # Luckily, package scripts only support {{ package_name }}, so let's convert that tag
253
- # to v2 template syntax by running it though the template process with a fake
254
- # package name that's actually a valid v2 template, which will be evaluated
255
- # when the script is used as a post-deploy script
256
- fake_package_replacement_template = _make_template(
257
- f"ctx.entities.{package_entity_name}.identifier"
258
- )
259
- jinja_context = dict(package_name=fake_package_replacement_template)
260
- post_deploy_hooks = []
261
- for script_file in package_scripts:
262
- new_contents = render_script_template(
263
- project_root, jinja_context, script_file, get_basic_jinja_env()
264
- )
265
- (project_root / script_file).write_text(new_contents)
266
- post_deploy_hooks.append(SqlScriptHookType(sql_script=script_file))
267
- return post_deploy_hooks
268
-
269
293
  package_entity_name = "pkg"
270
294
  if (
271
295
  native_app.package
@@ -303,12 +327,11 @@ def convert_native_app_to_v2_data(
303
327
  package["distribution"] = native_app.package.distribution
304
328
  package_meta = _make_meta(native_app.package)
305
329
  if native_app.package.scripts:
306
- converted_post_deploy_hooks = _convert_package_script_files(
307
- native_app.package.scripts
308
- )
309
- package_meta["post_deploy"] = (
310
- package_meta.get("post_deploy", []) + converted_post_deploy_hooks
311
- )
330
+ # Package scripts are not supported in PDFv2 but we
331
+ # don't convert them here, conversion is deferred until
332
+ # the final v2 Pydantic model is available
333
+ # (see _convert_templates_in_files())
334
+ pass
312
335
  if package_meta:
313
336
  package["meta"] = package_meta
314
337
 
@@ -352,6 +375,82 @@ def convert_envs_to_v2(pd: ProjectDefinition):
352
375
  return None
353
376
 
354
377
 
378
+ def _convert_templates_in_files(
379
+ project_root: Path,
380
+ definition_v1: ProjectDefinition,
381
+ definition_v2: ProjectDefinitionV2,
382
+ in_memory: bool,
383
+ ):
384
+ """Converts templates in other files to the new format"""
385
+ # TODO handle artifacts using the "templates" processor
386
+ # For now this only handles Native App package scripts
387
+ metrics = get_cli_context().metrics
388
+ metrics.set_counter_default(CLICounterField.PACKAGE_SCRIPTS, 0)
389
+
390
+ if (na := definition_v1.native_app) and (pkg := na.package) and pkg.scripts:
391
+ metrics.set_counter(CLICounterField.PACKAGE_SCRIPTS, 1)
392
+ cli_console.warning(
393
+ "WARNING: native_app.package.scripts is deprecated. Please migrate to using native_app.package.post_deploy."
394
+ )
395
+ # If the v1 definition has a Native App with a package, we know
396
+ # that the v2 definition will have exactly one application package entity
397
+ pkg_entity: ApplicationPackageEntityModel = list(
398
+ definition_v2.get_entities_by_type(
399
+ ApplicationPackageEntityModel.get_type()
400
+ ).values()
401
+ )[0]
402
+ converted_post_deploy_hooks = _convert_package_script_files(
403
+ project_root, pkg.scripts, pkg_entity, in_memory
404
+ )
405
+ if pkg_entity.meta is None:
406
+ pkg_entity.meta = MetaField()
407
+ if pkg_entity.meta.post_deploy is None:
408
+ pkg_entity.meta.post_deploy = []
409
+ pkg_entity.meta.post_deploy += converted_post_deploy_hooks
410
+
411
+
412
+ def _convert_package_script_files(
413
+ project_root: Path,
414
+ package_scripts: list[str],
415
+ pkg_model: ApplicationPackageEntityModel,
416
+ in_memory: bool,
417
+ ):
418
+ # PDFv2 doesn't support package scripts, only post-deploy scripts, so we
419
+ # need to convert the Jinja syntax from {{ }} to <% %>
420
+ # Luckily, package scripts only support {{ package_name }}, so let's convert that tag
421
+ # to v2 template syntax by running it though the template process with a fake
422
+ # package name that's actually a valid v2 template, which will be evaluated
423
+ # when the script is used as a post-deploy script
424
+ # If we're doing an in-memory conversion, we can just hardcode the converted
425
+ # package name directly into the script since it's being written to a temporary file
426
+ package_name_replacement = (
427
+ pkg_model.fqn.name
428
+ if in_memory
429
+ else _make_template(f"ctx.entities.{pkg_model.entity_id}.identifier")
430
+ )
431
+ jinja_context = dict(package_name=package_name_replacement)
432
+ post_deploy_hooks = []
433
+ for script_file in package_scripts:
434
+ original_script_file = script_file
435
+ new_contents = render_script_template(
436
+ project_root, jinja_context, script_file, get_basic_jinja_env()
437
+ )
438
+ if in_memory:
439
+ # If we're converting the definition in-memory, we can't touch
440
+ # the package scripts on disk, so we'll write them to a temporary file
441
+ d = _get_temp_dir().name
442
+ _, script_file = mkstemp(dir=d, suffix="_converted.sql", text=True)
443
+ (project_root / script_file).write_text(new_contents)
444
+ hook = SqlScriptHookType(sql_script=script_file)
445
+ hook._display_path = original_script_file # noqa: SLF001
446
+ post_deploy_hooks.append(hook)
447
+ return post_deploy_hooks
448
+
449
+
450
+ def _make_template(template: str) -> str:
451
+ return f"{PROJECT_TEMPLATE_VARIABLE_OPENING} {template} {PROJECT_TEMPLATE_VARIABLE_CLOSING}"
452
+
453
+
355
454
  def _check_if_project_definition_meets_requirements(
356
455
  pd: ProjectDefinition, accept_templates: bool
357
456
  ):
@@ -28,6 +28,17 @@ from snowflake.cli.api.project.schemas.updatable_model import (
28
28
  class SqlScriptHookType(UpdatableModel):
29
29
  sql_script: str = Field(title="SQL file path relative to the project root")
30
30
 
31
+ # Used to store a user-friendly path for this script, when the
32
+ # value of `sql_script` is a path to a different file
33
+ # This is used in the UI to display the path relative to the
34
+ # project root when `sql_script` is a actually path to a temp file
35
+ # generated by the in-memory PDF v1 to v2 conversion
36
+ _display_path: str = PrivateAttr(default="")
37
+
38
+ @property
39
+ def display_path(self):
40
+ return self._display_path or self.sql_script
41
+
31
42
 
32
43
  # Currently sql_script is the only supported hook type. Change to a Union once other hook types are added
33
44
  PostDeployHook = SqlScriptHookType