snowflake-cli-labs 3.0.0rc0__py3-none-any.whl → 3.0.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/cli_app.py +10 -1
  3. snowflake/cli/_app/snow_connector.py +91 -37
  4. snowflake/cli/_app/telemetry.py +8 -4
  5. snowflake/cli/_app/version_check.py +74 -0
  6. snowflake/cli/_plugins/connection/commands.py +3 -2
  7. snowflake/cli/_plugins/git/commands.py +55 -14
  8. snowflake/cli/_plugins/git/manager.py +14 -6
  9. snowflake/cli/_plugins/nativeapp/codegen/compiler.py +18 -2
  10. snowflake/cli/_plugins/nativeapp/codegen/setup/native_app_setup_processor.py +123 -42
  11. snowflake/cli/_plugins/nativeapp/codegen/setup/setup_driver.py.source +5 -2
  12. snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py +6 -11
  13. snowflake/cli/_plugins/nativeapp/codegen/templates/templates_processor.py +111 -0
  14. snowflake/cli/_plugins/nativeapp/exceptions.py +3 -3
  15. snowflake/cli/_plugins/nativeapp/manager.py +74 -144
  16. snowflake/cli/_plugins/nativeapp/project_model.py +2 -9
  17. snowflake/cli/_plugins/nativeapp/run_processor.py +56 -260
  18. snowflake/cli/_plugins/nativeapp/same_account_install_method.py +74 -0
  19. snowflake/cli/_plugins/nativeapp/teardown_processor.py +17 -246
  20. snowflake/cli/_plugins/nativeapp/v2_conversions/v2_to_v1_decorator.py +91 -17
  21. snowflake/cli/_plugins/snowpark/commands.py +5 -65
  22. snowflake/cli/_plugins/snowpark/common.py +17 -1
  23. snowflake/cli/_plugins/snowpark/models.py +2 -1
  24. snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +1 -35
  25. snowflake/cli/_plugins/sql/commands.py +1 -2
  26. snowflake/cli/_plugins/stage/commands.py +2 -2
  27. snowflake/cli/_plugins/stage/manager.py +46 -15
  28. snowflake/cli/_plugins/streamlit/commands.py +4 -63
  29. snowflake/cli/_plugins/streamlit/manager.py +13 -0
  30. snowflake/cli/_plugins/workspace/action_context.py +7 -0
  31. snowflake/cli/_plugins/workspace/commands.py +145 -32
  32. snowflake/cli/_plugins/workspace/manager.py +21 -4
  33. snowflake/cli/api/cli_global_context.py +136 -313
  34. snowflake/cli/api/commands/decorators.py +1 -1
  35. snowflake/cli/api/commands/flags.py +106 -102
  36. snowflake/cli/api/commands/snow_typer.py +15 -6
  37. snowflake/cli/api/config.py +18 -5
  38. snowflake/cli/api/connections.py +214 -0
  39. snowflake/cli/api/console/abc.py +4 -2
  40. snowflake/cli/api/constants.py +11 -0
  41. snowflake/cli/api/entities/application_entity.py +687 -2
  42. snowflake/cli/api/entities/application_package_entity.py +407 -9
  43. snowflake/cli/api/entities/common.py +7 -2
  44. snowflake/cli/api/entities/utils.py +80 -20
  45. snowflake/cli/api/exceptions.py +12 -2
  46. snowflake/cli/api/feature_flags.py +0 -2
  47. snowflake/cli/api/identifiers.py +3 -0
  48. snowflake/cli/api/project/definition.py +35 -1
  49. snowflake/cli/api/project/definition_conversion.py +352 -0
  50. snowflake/cli/api/project/schemas/entities/application_package_entity_model.py +17 -0
  51. snowflake/cli/api/project/schemas/entities/common.py +0 -12
  52. snowflake/cli/api/project/schemas/identifier_model.py +2 -2
  53. snowflake/cli/api/project/schemas/project_definition.py +102 -43
  54. snowflake/cli/api/rendering/jinja.py +2 -16
  55. snowflake/cli/api/rendering/project_definition_templates.py +5 -1
  56. snowflake/cli/api/rendering/sql_templates.py +14 -4
  57. snowflake/cli/api/secure_path.py +13 -18
  58. snowflake/cli/api/secure_utils.py +90 -1
  59. snowflake/cli/api/sql_execution.py +13 -0
  60. snowflake/cli/api/utils/definition_rendering.py +7 -7
  61. {snowflake_cli_labs-3.0.0rc0.dist-info → snowflake_cli_labs-3.0.0rc2.dist-info}/METADATA +9 -9
  62. {snowflake_cli_labs-3.0.0rc0.dist-info → snowflake_cli_labs-3.0.0rc2.dist-info}/RECORD +65 -61
  63. snowflake/cli/api/commands/typer_pre_execute.py +0 -26
  64. {snowflake_cli_labs-3.0.0rc0.dist-info → snowflake_cli_labs-3.0.0rc2.dist-info}/WHEEL +0 -0
  65. {snowflake_cli_labs-3.0.0rc0.dist-info → snowflake_cli_labs-3.0.0rc2.dist-info}/entry_points.txt +0 -0
  66. {snowflake_cli_labs-3.0.0rc0.dist-info → snowflake_cli_labs-3.0.0rc2.dist-info}/licenses/LICENSE +0 -0
@@ -52,5 +52,3 @@ class FeatureFlag(FeatureFlagMixin):
52
52
  ENABLE_STREAMLIT_VERSIONED_STAGE = BooleanFlag(
53
53
  "ENABLE_STREAMLIT_VERSIONED_STAGE", False
54
54
  )
55
- # TODO: remove in 3.0
56
- ENABLE_PROJECT_DEFINITION_V2 = BooleanFlag("ENABLE_PROJECT_DEFINITION_V2", True)
@@ -184,3 +184,6 @@ class FQN:
184
184
  from snowflake.cli.api.cli_global_context import get_cli_context
185
185
 
186
186
  return self.using_connection(get_cli_context().connection)
187
+
188
+ def to_dict(self) -> dict:
189
+ return {"name": self.name, "schema": self.schema, "database": self.database}
@@ -18,10 +18,12 @@ from pathlib import Path
18
18
  from typing import List, Optional
19
19
 
20
20
  import yaml
21
+ from click import ClickException
21
22
  from snowflake.cli.api.cli_global_context import get_cli_context
22
23
  from snowflake.cli.api.constants import DEFAULT_SIZE_LIMIT_MB
23
24
  from snowflake.cli.api.project.schemas.project_definition import (
24
25
  ProjectProperties,
26
+ YamlOverride,
25
27
  )
26
28
  from snowflake.cli.api.project.util import (
27
29
  append_to_identifier,
@@ -36,6 +38,7 @@ from snowflake.cli.api.utils.definition_rendering import (
36
38
  )
37
39
  from snowflake.cli.api.utils.dict_utils import deep_merge_dicts
38
40
  from snowflake.cli.api.utils.types import Context, Definition
41
+ from yaml import MappingNode, SequenceNode
39
42
 
40
43
  DEFAULT_USERNAME = "unknown_user"
41
44
 
@@ -45,8 +48,14 @@ def _get_merged_definitions(paths: List[Path]) -> Optional[Definition]:
45
48
  if len(spaths) == 0:
46
49
  return None
47
50
 
51
+ loader = yaml.BaseLoader
52
+ loader.add_constructor(
53
+ yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _no_duplicates_constructor
54
+ )
55
+ loader.add_constructor("!override", _override_tag)
56
+
48
57
  with spaths[0].open("r", read_file_limit_mb=DEFAULT_SIZE_LIMIT_MB) as base_yml:
49
- definition = yaml.load(base_yml.read(), Loader=yaml.loader.BaseLoader) or {}
58
+ definition = yaml.load(base_yml.read(), Loader=loader) or {}
50
59
 
51
60
  for override_path in spaths[1:]:
52
61
  with override_path.open(
@@ -90,3 +99,28 @@ def default_role():
90
99
  def default_application(project_name: str):
91
100
  user = sanitize_identifier(get_env_username() or DEFAULT_USERNAME).lower()
92
101
  return append_to_identifier(to_identifier(project_name), f"_{user}")
102
+
103
+
104
+ def _no_duplicates_constructor(loader, node, deep=False):
105
+ """
106
+ Raises error it there are duplicated keys on the same level in the yaml file
107
+ """
108
+ mapping = {}
109
+
110
+ for key_node, value_node in node.value:
111
+ key = loader.construct_object(key_node, deep=deep)
112
+ value = loader.construct_object(value_node, deep=deep)
113
+ if key in mapping.keys():
114
+ raise ClickException(
115
+ f"While loading the project definition file, duplicate key was found: {key}"
116
+ )
117
+ mapping[key] = value
118
+ return loader.construct_mapping(node, deep)
119
+
120
+
121
+ def _override_tag(loader, node, deep=False):
122
+ if isinstance(node, SequenceNode):
123
+ return YamlOverride(data=loader.construct_sequence(node, deep))
124
+ if isinstance(node, MappingNode):
125
+ return YamlOverride(data=loader.construct_mapping(node, deep))
126
+ return node.value
@@ -0,0 +1,352 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Any, Dict, Literal, Optional
6
+
7
+ from click import ClickException
8
+ from snowflake.cli._plugins.nativeapp.artifacts import (
9
+ build_bundle,
10
+ )
11
+ from snowflake.cli._plugins.snowpark.common import is_name_a_templated_one
12
+ from snowflake.cli.api.constants import (
13
+ DEFAULT_ENV_FILE,
14
+ DEFAULT_PAGES_DIR,
15
+ PROJECT_TEMPLATE_VARIABLE_CLOSING,
16
+ PROJECT_TEMPLATE_VARIABLE_OPENING,
17
+ SNOWPARK_SHARED_MIXIN,
18
+ )
19
+ from snowflake.cli.api.entities.utils import render_script_template
20
+ from snowflake.cli.api.project.schemas.entities.common import (
21
+ SqlScriptHookType,
22
+ )
23
+ from snowflake.cli.api.project.schemas.native_app.application import (
24
+ Application,
25
+ ApplicationV11,
26
+ )
27
+ from snowflake.cli.api.project.schemas.native_app.native_app import NativeApp
28
+ from snowflake.cli.api.project.schemas.native_app.package import Package, PackageV11
29
+ from snowflake.cli.api.project.schemas.project_definition import (
30
+ ProjectDefinition,
31
+ ProjectDefinitionV2,
32
+ )
33
+ from snowflake.cli.api.project.schemas.snowpark.callable import (
34
+ FunctionSchema,
35
+ ProcedureSchema,
36
+ )
37
+ from snowflake.cli.api.project.schemas.snowpark.snowpark import Snowpark
38
+ from snowflake.cli.api.project.schemas.streamlit.streamlit import Streamlit
39
+ from snowflake.cli.api.rendering.jinja import get_basic_jinja_env
40
+
41
+ log = logging.getLogger(__name__)
42
+
43
+
44
+ def convert_project_definition_to_v2(
45
+ project_root: Path, pd: ProjectDefinition, accept_templates: bool = False
46
+ ) -> ProjectDefinitionV2:
47
+ _check_if_project_definition_meets_requirements(pd, accept_templates)
48
+
49
+ snowpark_data = convert_snowpark_to_v2_data(pd.snowpark) if pd.snowpark else {}
50
+ streamlit_data = convert_streamlit_to_v2_data(pd.streamlit) if pd.streamlit else {}
51
+ native_app_data = (
52
+ convert_native_app_to_v2_data(project_root, pd.native_app)
53
+ if pd.native_app
54
+ else {}
55
+ )
56
+ envs = convert_envs_to_v2(pd)
57
+
58
+ data = {
59
+ "definition_version": "2",
60
+ "entities": get_list_of_all_entities(
61
+ snowpark_data.get("entities", {}),
62
+ streamlit_data.get("entities", {}),
63
+ native_app_data.get("entities", {}),
64
+ ),
65
+ "mixins": snowpark_data.get("mixins", None),
66
+ "env": envs,
67
+ }
68
+
69
+ return ProjectDefinitionV2(**data)
70
+
71
+
72
+ def convert_snowpark_to_v2_data(snowpark: Snowpark) -> Dict[str, Any]:
73
+ artifact_mapping = {"src": snowpark.src}
74
+ if snowpark.project_name:
75
+ artifact_mapping["dest"] = snowpark.project_name
76
+
77
+ data: dict = {
78
+ "mixins": {
79
+ SNOWPARK_SHARED_MIXIN: {
80
+ "stage": snowpark.stage_name,
81
+ "artifacts": [artifact_mapping],
82
+ }
83
+ },
84
+ "entities": {},
85
+ }
86
+
87
+ for index, entity in enumerate([*snowpark.procedures, *snowpark.functions]):
88
+ identifier = {"name": entity.name}
89
+ if entity.database is not None:
90
+ identifier["database"] = entity.database
91
+ if entity.schema_name is not None:
92
+ identifier["schema"] = entity.schema_name
93
+
94
+ entity_name = (
95
+ f"snowpark_entity_{index}"
96
+ if is_name_a_templated_one(entity.name)
97
+ else entity.name
98
+ )
99
+
100
+ if entity_name in data["entities"]:
101
+ raise ClickException(
102
+ f"Entity with name {entity_name} seems to be duplicated. Please rename it and try again."
103
+ )
104
+
105
+ v2_entity = {
106
+ "type": "function" if isinstance(entity, FunctionSchema) else "procedure",
107
+ "stage": snowpark.stage_name,
108
+ "handler": entity.handler,
109
+ "returns": entity.returns,
110
+ "signature": entity.signature,
111
+ "runtime": entity.runtime,
112
+ "external_access_integrations": entity.external_access_integrations,
113
+ "secrets": entity.secrets,
114
+ "imports": entity.imports,
115
+ "identifier": identifier,
116
+ "meta": {"use_mixins": [SNOWPARK_SHARED_MIXIN]},
117
+ }
118
+ if isinstance(entity, ProcedureSchema):
119
+ v2_entity["execute_as_caller"] = entity.execute_as_caller
120
+
121
+ data["entities"][entity_name] = v2_entity
122
+
123
+ return data
124
+
125
+
126
+ def convert_streamlit_to_v2_data(streamlit: Streamlit) -> Dict[str, Any]:
127
+ # Process env file and pages dir
128
+ environment_file = _process_streamlit_files(streamlit.env_file, "environment")
129
+ pages_dir = _process_streamlit_files(streamlit.pages_dir, "pages")
130
+
131
+ # Build V2 definition
132
+ artifacts = [
133
+ streamlit.main_file,
134
+ environment_file,
135
+ pages_dir,
136
+ ]
137
+ artifacts = [a for a in artifacts if a is not None]
138
+
139
+ if streamlit.additional_source_files:
140
+ artifacts.extend(streamlit.additional_source_files)
141
+
142
+ identifier = {"name": streamlit.name}
143
+ if streamlit.schema_name:
144
+ identifier["schema"] = streamlit.schema_name
145
+ if streamlit.database:
146
+ identifier["database"] = streamlit.database
147
+
148
+ streamlit_name = (
149
+ "streamlit_entity_1"
150
+ if is_name_a_templated_one(streamlit.name)
151
+ else streamlit.name
152
+ )
153
+
154
+ data = {
155
+ "entities": {
156
+ streamlit_name: {
157
+ "type": "streamlit",
158
+ "identifier": identifier,
159
+ "title": streamlit.title,
160
+ "query_warehouse": streamlit.query_warehouse,
161
+ "main_file": str(streamlit.main_file),
162
+ "pages_dir": str(streamlit.pages_dir),
163
+ "stage": streamlit.stage,
164
+ "artifacts": artifacts,
165
+ }
166
+ }
167
+ }
168
+ return data
169
+
170
+
171
+ def convert_native_app_to_v2_data(
172
+ project_root, native_app: NativeApp
173
+ ) -> Dict[str, Any]:
174
+ def _make_meta(obj: Application | Package):
175
+ meta = {}
176
+ if obj.role:
177
+ meta["role"] = obj.role
178
+ if obj.warehouse:
179
+ meta["warehouse"] = obj.warehouse
180
+ if obj.post_deploy:
181
+ meta["post_deploy"] = obj.post_deploy
182
+ return meta
183
+
184
+ def _find_manifest():
185
+ # We don't know which file in the project directory is the actual manifest,
186
+ # and we can't iterate through the artifacts property since the src can contain
187
+ # glob patterns. The simplest solution is to bundle the app and find the
188
+ # manifest file from the resultant BundleMap, since the bundle process ensures
189
+ # that only a single source path can map to the corresponding destination path
190
+ try:
191
+ bundle_map = build_bundle(
192
+ project_root, Path(native_app.deploy_root), native_app.artifacts
193
+ )
194
+ except Exception as e:
195
+ # The manifest field is required, so we can't gracefully handle bundle failures
196
+ raise ClickException(
197
+ f"{e}\nCould not bundle Native App artifacts, unable to perform migration"
198
+ ) from e
199
+
200
+ manifest_path = bundle_map.to_project_path(Path("manifest.yml"))
201
+ if not manifest_path:
202
+ # The manifest field is required, so we can't gracefully handle it being missing
203
+ raise ClickException(
204
+ "manifest.yml file not found in any Native App artifact sources, "
205
+ "unable to perform migration"
206
+ )
207
+
208
+ # Use a POSIX path to be consistent with other migrated fields
209
+ # which use POSIX paths as default values
210
+ return manifest_path.as_posix()
211
+
212
+ def _make_template(template: str) -> str:
213
+ return f"{PROJECT_TEMPLATE_VARIABLE_OPENING} {template} {PROJECT_TEMPLATE_VARIABLE_CLOSING}"
214
+
215
+ def _convert_package_script_files(package_scripts: list[str]):
216
+ # PDFv2 doesn't support package scripts, only post-deploy scripts, so we
217
+ # need to convert the Jinja syntax from {{ }} to <% %>
218
+ # Luckily, package scripts only support {{ package_name }}, so let's convert that tag
219
+ # to v2 template syntax by running it though the template process with a fake
220
+ # package name that's actually a valid v2 template, which will be evaluated
221
+ # when the script is used as a post-deploy script
222
+ fake_package_replacement_template = _make_template(
223
+ f"ctx.entities.{package_entity_name}.identifier"
224
+ )
225
+ jinja_context = dict(package_name=fake_package_replacement_template)
226
+ post_deploy_hooks = []
227
+ for script_file in package_scripts:
228
+ new_contents = render_script_template(
229
+ project_root, jinja_context, script_file, get_basic_jinja_env()
230
+ )
231
+ (project_root / script_file).write_text(new_contents)
232
+ post_deploy_hooks.append(SqlScriptHookType(sql_script=script_file))
233
+ return post_deploy_hooks
234
+
235
+ package_entity_name = "pkg"
236
+ if (
237
+ native_app.package
238
+ and native_app.package.name
239
+ and native_app.package.name != PackageV11.model_fields["name"].default
240
+ ):
241
+ package_identifier = native_app.package.name
242
+ else:
243
+ # Backport the PackageV11 default name template, updated for PDFv2
244
+ package_identifier = _make_template(
245
+ f"fn.concat_ids('{native_app.name}', '_pkg_', fn.sanitize_id(fn.get_username('unknown_user')) | lower)"
246
+ )
247
+ package = {
248
+ "type": "application package",
249
+ "identifier": package_identifier,
250
+ "manifest": _find_manifest(),
251
+ "artifacts": native_app.artifacts,
252
+ "bundle_root": native_app.bundle_root,
253
+ "generated_root": native_app.generated_root,
254
+ "deploy_root": native_app.deploy_root,
255
+ "stage": native_app.source_stage,
256
+ "scratch_stage": native_app.scratch_stage,
257
+ }
258
+ if native_app.package:
259
+ package["distribution"] = native_app.package.distribution
260
+ package_meta = _make_meta(native_app.package)
261
+ if native_app.package.scripts:
262
+ converted_post_deploy_hooks = _convert_package_script_files(
263
+ native_app.package.scripts
264
+ )
265
+ package_meta["post_deploy"] = (
266
+ package_meta.get("post_deploy", []) + converted_post_deploy_hooks
267
+ )
268
+ if package_meta:
269
+ package["meta"] = package_meta
270
+
271
+ app_entity_name = "app"
272
+ if (
273
+ native_app.application
274
+ and native_app.application.name
275
+ and native_app.application.name != ApplicationV11.model_fields["name"].default
276
+ ):
277
+ app_identifier = native_app.application.name
278
+ else:
279
+ # Backport the ApplicationV11 default name template, updated for PDFv2
280
+ app_identifier = _make_template(
281
+ f"fn.concat_ids('{native_app.name}', '_', fn.sanitize_id(fn.get_username('unknown_user')) | lower)"
282
+ )
283
+ app = {
284
+ "type": "application",
285
+ "identifier": app_identifier,
286
+ "from": {"target": package_entity_name},
287
+ }
288
+ if native_app.application:
289
+ if app_meta := _make_meta(native_app.application):
290
+ app["meta"] = app_meta
291
+
292
+ return {
293
+ "entities": {
294
+ package_entity_name: package,
295
+ app_entity_name: app,
296
+ }
297
+ }
298
+
299
+
300
+ def convert_envs_to_v2(pd: ProjectDefinition):
301
+ if hasattr(pd, "env") and pd.env:
302
+ data = {k: v for k, v in pd.env.items()}
303
+ return data
304
+ return None
305
+
306
+
307
+ def _check_if_project_definition_meets_requirements(
308
+ pd: ProjectDefinition, accept_templates: bool
309
+ ):
310
+ if pd.meets_version_requirement("2"):
311
+ raise ClickException("Project definition is already at version 2.")
312
+
313
+ if PROJECT_TEMPLATE_VARIABLE_OPENING in str(pd):
314
+ if not accept_templates:
315
+ raise ClickException(
316
+ "Project definition contains templates. They may not be migrated correctly, and require manual migration."
317
+ "You can try again with --accept-templates option, to attempt automatic migration."
318
+ )
319
+ log.warning(
320
+ "Your V1 definition contains templates. We cannot guarantee the correctness of the migration."
321
+ )
322
+
323
+
324
+ def _process_streamlit_files(
325
+ file_name: Optional[str], file_type: Literal["pages", "environment"]
326
+ ):
327
+ default = DEFAULT_PAGES_DIR if file_type == "pages" else DEFAULT_ENV_FILE
328
+
329
+ if file_name and not Path(file_name).exists():
330
+ raise ClickException(f"Provided file {file_name} does not exist")
331
+ elif file_name is None and Path(default).exists():
332
+ file_name = default
333
+ return file_name
334
+
335
+
336
+ def get_list_of_all_entities(
337
+ snowpark_entities: Dict[str, Any],
338
+ streamlit_entities: Dict[str, Any],
339
+ native_app_entities: Dict[str, Any],
340
+ ):
341
+ # Check all combinations of entity types for overlapping names
342
+ # (No need to use itertools here, PDFv1 only supports these three types)
343
+ for types, first, second in [
344
+ ("streamlit and snowpark", streamlit_entities, snowpark_entities),
345
+ ("streamlit and native app", streamlit_entities, native_app_entities),
346
+ ("native app and snowpark", native_app_entities, snowpark_entities),
347
+ ]:
348
+ if first.keys() & second.keys():
349
+ raise ClickException(
350
+ f"In your project, {types} entities share the same name. Please rename them and try again."
351
+ )
352
+ return snowpark_entities | streamlit_entities | native_app_entities
@@ -75,3 +75,20 @@ class ApplicationPackageEntityModel(EntityModelBase):
75
75
  if isinstance(input_value, Identifier):
76
76
  return input_value.model_copy(update=dict(name=with_suffix))
77
77
  return with_suffix
78
+
79
+ @field_validator("artifacts")
80
+ @classmethod
81
+ def transform_artifacts(
82
+ cls, orig_artifacts: List[Union[PathMapping, str]]
83
+ ) -> List[PathMapping]:
84
+ transformed_artifacts = []
85
+ if orig_artifacts is None:
86
+ return transformed_artifacts
87
+
88
+ for artifact in orig_artifacts:
89
+ if isinstance(artifact, PathMapping):
90
+ transformed_artifacts.append(artifact)
91
+ else:
92
+ transformed_artifacts.append(PathMapping(src=artifact))
93
+
94
+ return transformed_artifacts
@@ -61,18 +61,6 @@ class MetaField(UpdatableModel):
61
61
  return mixins
62
62
 
63
63
 
64
- class DefaultsField(UpdatableModel):
65
- schema_: Optional[str] = Field(
66
- title="Schema.",
67
- alias="schema",
68
- default=None,
69
- )
70
- stage: Optional[str] = Field(
71
- title="Stage.",
72
- default=None,
73
- )
74
-
75
-
76
64
  class EntityModelBase(ABC, UpdatableModel):
77
65
  @classmethod
78
66
  def get_type(cls) -> str:
@@ -25,8 +25,8 @@ from snowflake.cli.api.project.schemas.updatable_model import (
25
25
 
26
26
  class Identifier(UpdatableModel):
27
27
  name: str = Field(title="Entity name")
28
- schema_: str = Field(title="Entity schema", alias="schema", default=None)
29
- database: str = Field(title="Entity database", default=None)
28
+ schema_: Optional[str] = Field(title="Entity schema", alias="schema", default=None)
29
+ database: Optional[str] = Field(title="Entity database", default=None)
30
30
 
31
31
 
32
32
  class ObjectIdentifierBaseModel: