sqlmesh 0.217.1.dev1__py3-none-any.whl → 0.227.2.dev20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlmesh/__init__.py +12 -2
- sqlmesh/_version.py +2 -2
- sqlmesh/cli/project_init.py +10 -2
- sqlmesh/core/_typing.py +1 -0
- sqlmesh/core/audit/definition.py +8 -2
- sqlmesh/core/config/__init__.py +1 -1
- sqlmesh/core/config/connection.py +20 -5
- sqlmesh/core/config/dbt.py +13 -0
- sqlmesh/core/config/janitor.py +12 -0
- sqlmesh/core/config/loader.py +7 -0
- sqlmesh/core/config/model.py +2 -0
- sqlmesh/core/config/root.py +3 -0
- sqlmesh/core/console.py +80 -2
- sqlmesh/core/constants.py +1 -1
- sqlmesh/core/context.py +112 -35
- sqlmesh/core/dialect.py +3 -0
- sqlmesh/core/engine_adapter/_typing.py +2 -0
- sqlmesh/core/engine_adapter/base.py +330 -23
- sqlmesh/core/engine_adapter/base_postgres.py +17 -1
- sqlmesh/core/engine_adapter/bigquery.py +146 -7
- sqlmesh/core/engine_adapter/clickhouse.py +17 -13
- sqlmesh/core/engine_adapter/databricks.py +50 -2
- sqlmesh/core/engine_adapter/fabric.py +110 -29
- sqlmesh/core/engine_adapter/mixins.py +142 -48
- sqlmesh/core/engine_adapter/mssql.py +15 -4
- sqlmesh/core/engine_adapter/mysql.py +2 -2
- sqlmesh/core/engine_adapter/postgres.py +9 -3
- sqlmesh/core/engine_adapter/redshift.py +4 -0
- sqlmesh/core/engine_adapter/risingwave.py +1 -0
- sqlmesh/core/engine_adapter/shared.py +6 -0
- sqlmesh/core/engine_adapter/snowflake.py +82 -11
- sqlmesh/core/engine_adapter/spark.py +14 -10
- sqlmesh/core/engine_adapter/trino.py +5 -2
- sqlmesh/core/janitor.py +181 -0
- sqlmesh/core/lineage.py +1 -0
- sqlmesh/core/linter/rules/builtin.py +15 -0
- sqlmesh/core/loader.py +17 -30
- sqlmesh/core/macros.py +35 -13
- sqlmesh/core/model/common.py +2 -0
- sqlmesh/core/model/definition.py +72 -4
- sqlmesh/core/model/kind.py +66 -2
- sqlmesh/core/model/meta.py +107 -2
- sqlmesh/core/node.py +101 -2
- sqlmesh/core/plan/builder.py +15 -10
- sqlmesh/core/plan/common.py +196 -2
- sqlmesh/core/plan/definition.py +21 -6
- sqlmesh/core/plan/evaluator.py +72 -113
- sqlmesh/core/plan/explainer.py +90 -8
- sqlmesh/core/plan/stages.py +42 -21
- sqlmesh/core/renderer.py +26 -18
- sqlmesh/core/scheduler.py +60 -19
- sqlmesh/core/selector.py +137 -9
- sqlmesh/core/signal.py +64 -1
- sqlmesh/core/snapshot/__init__.py +1 -0
- sqlmesh/core/snapshot/definition.py +109 -25
- sqlmesh/core/snapshot/evaluator.py +610 -50
- sqlmesh/core/state_sync/__init__.py +0 -1
- sqlmesh/core/state_sync/base.py +31 -27
- sqlmesh/core/state_sync/cache.py +12 -4
- sqlmesh/core/state_sync/common.py +216 -111
- sqlmesh/core/state_sync/db/facade.py +30 -15
- sqlmesh/core/state_sync/db/interval.py +27 -7
- sqlmesh/core/state_sync/db/migrator.py +14 -8
- sqlmesh/core/state_sync/db/snapshot.py +119 -87
- sqlmesh/core/table_diff.py +2 -2
- sqlmesh/core/test/definition.py +14 -9
- sqlmesh/core/test/discovery.py +4 -0
- sqlmesh/dbt/adapter.py +20 -11
- sqlmesh/dbt/basemodel.py +52 -41
- sqlmesh/dbt/builtin.py +27 -11
- sqlmesh/dbt/column.py +17 -5
- sqlmesh/dbt/common.py +4 -2
- sqlmesh/dbt/context.py +14 -1
- sqlmesh/dbt/loader.py +60 -8
- sqlmesh/dbt/manifest.py +136 -8
- sqlmesh/dbt/model.py +105 -25
- sqlmesh/dbt/package.py +16 -1
- sqlmesh/dbt/profile.py +3 -3
- sqlmesh/dbt/project.py +12 -7
- sqlmesh/dbt/seed.py +1 -1
- sqlmesh/dbt/source.py +6 -1
- sqlmesh/dbt/target.py +25 -6
- sqlmesh/dbt/test.py +31 -1
- sqlmesh/integrations/github/cicd/controller.py +6 -2
- sqlmesh/lsp/context.py +4 -2
- sqlmesh/magics.py +1 -1
- sqlmesh/migrations/v0000_baseline.py +3 -6
- sqlmesh/migrations/v0061_mysql_fix_blob_text_type.py +2 -5
- sqlmesh/migrations/v0062_add_model_gateway.py +2 -2
- sqlmesh/migrations/v0063_change_signals.py +2 -4
- sqlmesh/migrations/v0064_join_when_matched_strings.py +2 -4
- sqlmesh/migrations/v0065_add_model_optimize.py +2 -2
- sqlmesh/migrations/v0066_add_auto_restatements.py +2 -6
- sqlmesh/migrations/v0067_add_tsql_date_full_precision.py +2 -2
- sqlmesh/migrations/v0068_include_unrendered_query_in_metadata_hash.py +2 -2
- sqlmesh/migrations/v0069_update_dev_table_suffix.py +2 -4
- sqlmesh/migrations/v0070_include_grains_in_metadata_hash.py +2 -2
- sqlmesh/migrations/v0071_add_dev_version_to_intervals.py +2 -6
- sqlmesh/migrations/v0072_add_environment_statements.py +2 -4
- sqlmesh/migrations/v0073_remove_symbolic_disable_restatement.py +2 -4
- sqlmesh/migrations/v0074_add_partition_by_time_column_property.py +2 -2
- sqlmesh/migrations/v0075_remove_validate_query.py +2 -4
- sqlmesh/migrations/v0076_add_cron_tz.py +2 -2
- sqlmesh/migrations/v0077_fix_column_type_hash_calculation.py +2 -2
- sqlmesh/migrations/v0078_warn_if_non_migratable_python_env.py +2 -4
- sqlmesh/migrations/v0079_add_gateway_managed_property.py +7 -9
- sqlmesh/migrations/v0080_add_batch_size_to_scd_type_2_models.py +2 -2
- sqlmesh/migrations/v0081_update_partitioned_by.py +2 -4
- sqlmesh/migrations/v0082_warn_if_incorrectly_duplicated_statements.py +2 -4
- sqlmesh/migrations/v0083_use_sql_for_scd_time_data_type_data_hash.py +2 -2
- sqlmesh/migrations/v0084_normalize_quote_when_matched_and_merge_filter.py +2 -2
- sqlmesh/migrations/v0085_deterministic_repr.py +2 -4
- sqlmesh/migrations/v0086_check_deterministic_bug.py +2 -4
- sqlmesh/migrations/v0087_normalize_blueprint_variables.py +2 -4
- sqlmesh/migrations/v0088_warn_about_variable_python_env_diffs.py +2 -4
- sqlmesh/migrations/v0089_add_virtual_environment_mode.py +2 -2
- sqlmesh/migrations/v0090_add_forward_only_column.py +2 -6
- sqlmesh/migrations/v0091_on_additive_change.py +2 -2
- sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +2 -4
- sqlmesh/migrations/v0093_use_raw_sql_in_fingerprint.py +2 -2
- sqlmesh/migrations/v0094_add_dev_version_and_fingerprint_columns.py +2 -6
- sqlmesh/migrations/v0095_warn_about_dbt_raw_sql_diff.py +2 -4
- sqlmesh/migrations/v0096_remove_plan_dags_table.py +2 -4
- sqlmesh/migrations/v0097_add_dbt_name_in_node.py +2 -2
- sqlmesh/migrations/v0098_add_dbt_node_info_in_node.py +103 -0
- sqlmesh/migrations/v0099_add_last_altered_to_intervals.py +25 -0
- sqlmesh/migrations/v0100_add_grants_and_grants_target_layer.py +9 -0
- sqlmesh/utils/__init__.py +8 -1
- sqlmesh/utils/cache.py +5 -1
- sqlmesh/utils/date.py +1 -1
- sqlmesh/utils/errors.py +4 -0
- sqlmesh/utils/git.py +3 -1
- sqlmesh/utils/jinja.py +25 -2
- sqlmesh/utils/pydantic.py +6 -6
- sqlmesh/utils/windows.py +13 -3
- {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/METADATA +5 -5
- {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/RECORD +188 -183
- sqlmesh_dbt/cli.py +70 -7
- sqlmesh_dbt/console.py +14 -6
- sqlmesh_dbt/operations.py +103 -24
- sqlmesh_dbt/selectors.py +39 -1
- web/client/dist/assets/{Audits-Ucsx1GzF.js → Audits-CBiYyyx-.js} +1 -1
- web/client/dist/assets/{Banner-BWDzvavM.js → Banner-DSRbUlO5.js} +1 -1
- web/client/dist/assets/{ChevronDownIcon-D2VL13Ah.js → ChevronDownIcon-MK_nrjD_.js} +1 -1
- web/client/dist/assets/{ChevronRightIcon-DWGYbf1l.js → ChevronRightIcon-CLWtT22Q.js} +1 -1
- web/client/dist/assets/{Content-DdHDZM3I.js → Content-BNuGZN5l.js} +1 -1
- web/client/dist/assets/{Content-Bikfy8fh.js → Content-CSHJyW0n.js} +1 -1
- web/client/dist/assets/{Data-CzAJH7rW.js → Data-C1oRDbLx.js} +1 -1
- web/client/dist/assets/{DataCatalog-BJF11g8f.js → DataCatalog-HXyX2-_j.js} +1 -1
- web/client/dist/assets/{Editor-s0SBpV2y.js → Editor-BDyfpUuw.js} +1 -1
- web/client/dist/assets/{Editor-DgLhgKnm.js → Editor-D0jNItwC.js} +1 -1
- web/client/dist/assets/{Errors-D0m0O1d3.js → Errors-BfuFLcPi.js} +1 -1
- web/client/dist/assets/{FileExplorer-CEv0vXkt.js → FileExplorer-BR9IE3he.js} +1 -1
- web/client/dist/assets/{Footer-BwzXn8Ew.js → Footer-CgBEtiAh.js} +1 -1
- web/client/dist/assets/{Header-6heDkEqG.js → Header-DSqR6nSO.js} +1 -1
- web/client/dist/assets/{Input-obuJsD6k.js → Input-B-oZ6fGO.js} +1 -1
- web/client/dist/assets/Lineage-DYQVwDbD.js +1 -0
- web/client/dist/assets/{ListboxShow-HM9_qyrt.js → ListboxShow-BE5-xevs.js} +1 -1
- web/client/dist/assets/{ModelLineage-zWdKo0U2.js → ModelLineage-DkIFAYo4.js} +1 -1
- web/client/dist/assets/{Models-Bcu66SRz.js → Models-D5dWr8RB.js} +1 -1
- web/client/dist/assets/{Page-BWEEQfIt.js → Page-C-XfU5BR.js} +1 -1
- web/client/dist/assets/{Plan-C4gXCqlf.js → Plan-ZEuTINBq.js} +1 -1
- web/client/dist/assets/{PlusCircleIcon-CVDO651q.js → PlusCircleIcon-DVXAHG8_.js} +1 -1
- web/client/dist/assets/{ReportErrors-BT6xFwAr.js → ReportErrors-B7FEPzMB.js} +1 -1
- web/client/dist/assets/{Root-ryJoBK4h.js → Root-8aZyhPxF.js} +1 -1
- web/client/dist/assets/{SearchList-DB04sPb9.js → SearchList-W_iT2G82.js} +1 -1
- web/client/dist/assets/{SelectEnvironment-CUYcXUu6.js → SelectEnvironment-C65jALmO.js} +1 -1
- web/client/dist/assets/{SourceList-Doo_9ZGp.js → SourceList-DSLO6nVJ.js} +1 -1
- web/client/dist/assets/{SourceListItem-D5Mj7Dly.js → SourceListItem-BHt8d9-I.js} +1 -1
- web/client/dist/assets/{SplitPane-qHmkD1qy.js → SplitPane-CViaZmw6.js} +1 -1
- web/client/dist/assets/{Tests-DH1Z74ML.js → Tests-DhaVt5t1.js} +1 -1
- web/client/dist/assets/{Welcome-DqUJUNMF.js → Welcome-DvpjH-_4.js} +1 -1
- web/client/dist/assets/context-BctCsyGb.js +71 -0
- web/client/dist/assets/{context-Dr54UHLi.js → context-DFNeGsFF.js} +1 -1
- web/client/dist/assets/{editor-DYIP1yQ4.js → editor-CcO28cqd.js} +1 -1
- web/client/dist/assets/{file-DarlIDVi.js → file-CvJN3aZO.js} +1 -1
- web/client/dist/assets/{floating-ui.react-dom-BH3TFvkM.js → floating-ui.react-dom-CjE-JNW1.js} +1 -1
- web/client/dist/assets/{help-Bl8wqaQc.js → help-DuPhjipa.js} +1 -1
- web/client/dist/assets/{index-D1sR7wpN.js → index-C-dJH7yZ.js} +1 -1
- web/client/dist/assets/{index-O3mjYpnE.js → index-Dj0i1-CA.js} +2 -2
- web/client/dist/assets/{plan-CehRrJUG.js → plan-BTRSbjKn.js} +1 -1
- web/client/dist/assets/{popover-CqgMRE0G.js → popover-_Sf0yvOI.js} +1 -1
- web/client/dist/assets/{project-6gxepOhm.js → project-BvSOI8MY.js} +1 -1
- web/client/dist/index.html +1 -1
- web/client/dist/assets/Lineage-D0Hgdz2v.js +0 -1
- web/client/dist/assets/context-DgX0fp2E.js +0 -68
- {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/WHEEL +0 -0
- {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/entry_points.txt +0 -0
- {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/licenses/LICENSE +0 -0
- {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/top_level.txt +0 -0
|
@@ -71,7 +71,7 @@ class TrinoEngineAdapter(
|
|
|
71
71
|
MAX_TIMESTAMP_PRECISION = 3
|
|
72
72
|
|
|
73
73
|
@property
|
|
74
|
-
def schema_location_mapping(self) -> t.Optional[
|
|
74
|
+
def schema_location_mapping(self) -> t.Optional[t.Dict[re.Pattern, str]]:
|
|
75
75
|
return self._extra_config.get("schema_location_mapping")
|
|
76
76
|
|
|
77
77
|
@property
|
|
@@ -86,6 +86,8 @@ class TrinoEngineAdapter(
|
|
|
86
86
|
def get_catalog_type(self, catalog: t.Optional[str]) -> str:
|
|
87
87
|
row: t.Tuple = tuple()
|
|
88
88
|
if catalog:
|
|
89
|
+
if catalog_type_override := self._catalog_type_overrides.get(catalog):
|
|
90
|
+
return catalog_type_override
|
|
89
91
|
row = (
|
|
90
92
|
self.fetchone(
|
|
91
93
|
f"select connector_name from system.metadata.catalogs where catalog_name='{catalog}'"
|
|
@@ -282,6 +284,7 @@ class TrinoEngineAdapter(
|
|
|
282
284
|
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
283
285
|
expressions: t.Optional[t.List[exp.PrimaryKey]] = None,
|
|
284
286
|
is_view: bool = False,
|
|
287
|
+
materialized: bool = False,
|
|
285
288
|
) -> exp.Schema:
|
|
286
289
|
if "delta_lake" in self.get_catalog_type_from_table(table):
|
|
287
290
|
target_columns_to_types = self._to_delta_ts(target_columns_to_types)
|
|
@@ -300,7 +303,7 @@ class TrinoEngineAdapter(
|
|
|
300
303
|
execution_time: t.Union[TimeLike, exp.Column],
|
|
301
304
|
invalidate_hard_deletes: bool = True,
|
|
302
305
|
updated_at_col: t.Optional[exp.Column] = None,
|
|
303
|
-
check_columns: t.Optional[t.Union[exp.Star, t.Sequence[exp.
|
|
306
|
+
check_columns: t.Optional[t.Union[exp.Star, t.Sequence[exp.Expression]]] = None,
|
|
304
307
|
updated_at_as_valid_from: bool = False,
|
|
305
308
|
execution_time_as_valid_from: bool = False,
|
|
306
309
|
target_columns_to_types: t.Optional[t.Dict[str, exp.DataType]] = None,
|
sqlmesh/core/janitor.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import typing as t
|
|
4
|
+
|
|
5
|
+
from sqlglot import exp
|
|
6
|
+
|
|
7
|
+
from sqlmesh.core.engine_adapter import EngineAdapter
|
|
8
|
+
from sqlmesh.core.console import Console
|
|
9
|
+
from sqlmesh.core.dialect import schema_
|
|
10
|
+
from sqlmesh.core.environment import Environment
|
|
11
|
+
from sqlmesh.core.snapshot import SnapshotEvaluator
|
|
12
|
+
from sqlmesh.core.state_sync import StateSync
|
|
13
|
+
from sqlmesh.core.state_sync.common import (
|
|
14
|
+
logger,
|
|
15
|
+
iter_expired_snapshot_batches,
|
|
16
|
+
RowBoundary,
|
|
17
|
+
ExpiredBatchRange,
|
|
18
|
+
)
|
|
19
|
+
from sqlmesh.utils.errors import SQLMeshError
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def cleanup_expired_views(
|
|
23
|
+
default_adapter: EngineAdapter,
|
|
24
|
+
engine_adapters: t.Dict[str, EngineAdapter],
|
|
25
|
+
environments: t.List[Environment],
|
|
26
|
+
warn_on_delete_failure: bool = False,
|
|
27
|
+
console: t.Optional[Console] = None,
|
|
28
|
+
) -> None:
|
|
29
|
+
expired_schema_or_catalog_environments = [
|
|
30
|
+
environment
|
|
31
|
+
for environment in environments
|
|
32
|
+
if environment.suffix_target.is_schema or environment.suffix_target.is_catalog
|
|
33
|
+
]
|
|
34
|
+
expired_table_environments = [
|
|
35
|
+
environment for environment in environments if environment.suffix_target.is_table
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
# We have to use the corresponding adapter if the virtual layer is gateway managed
|
|
39
|
+
def get_adapter(gateway_managed: bool, gateway: t.Optional[str] = None) -> EngineAdapter:
|
|
40
|
+
if gateway_managed and gateway:
|
|
41
|
+
return engine_adapters.get(gateway, default_adapter)
|
|
42
|
+
return default_adapter
|
|
43
|
+
|
|
44
|
+
catalogs_to_drop: t.Set[t.Tuple[EngineAdapter, str]] = set()
|
|
45
|
+
schemas_to_drop: t.Set[t.Tuple[EngineAdapter, exp.Table]] = set()
|
|
46
|
+
|
|
47
|
+
# Collect schemas and catalogs to drop
|
|
48
|
+
for engine_adapter, expired_catalog, expired_schema, suffix_target in {
|
|
49
|
+
(
|
|
50
|
+
(engine_adapter := get_adapter(environment.gateway_managed, snapshot.model_gateway)),
|
|
51
|
+
snapshot.qualified_view_name.catalog_for_environment(
|
|
52
|
+
environment.naming_info, dialect=engine_adapter.dialect
|
|
53
|
+
),
|
|
54
|
+
snapshot.qualified_view_name.schema_for_environment(
|
|
55
|
+
environment.naming_info, dialect=engine_adapter.dialect
|
|
56
|
+
),
|
|
57
|
+
environment.suffix_target,
|
|
58
|
+
)
|
|
59
|
+
for environment in expired_schema_or_catalog_environments
|
|
60
|
+
for snapshot in environment.snapshots
|
|
61
|
+
if snapshot.is_model and not snapshot.is_symbolic
|
|
62
|
+
}:
|
|
63
|
+
if suffix_target.is_catalog:
|
|
64
|
+
if expired_catalog:
|
|
65
|
+
catalogs_to_drop.add((engine_adapter, expired_catalog))
|
|
66
|
+
else:
|
|
67
|
+
schema = schema_(expired_schema, expired_catalog)
|
|
68
|
+
schemas_to_drop.add((engine_adapter, schema))
|
|
69
|
+
|
|
70
|
+
# Drop the views for the expired environments
|
|
71
|
+
for engine_adapter, expired_view in {
|
|
72
|
+
(
|
|
73
|
+
(engine_adapter := get_adapter(environment.gateway_managed, snapshot.model_gateway)),
|
|
74
|
+
snapshot.qualified_view_name.for_environment(
|
|
75
|
+
environment.naming_info, dialect=engine_adapter.dialect
|
|
76
|
+
),
|
|
77
|
+
)
|
|
78
|
+
for environment in expired_table_environments
|
|
79
|
+
for snapshot in environment.snapshots
|
|
80
|
+
if snapshot.is_model and not snapshot.is_symbolic
|
|
81
|
+
}:
|
|
82
|
+
try:
|
|
83
|
+
engine_adapter.drop_view(expired_view, ignore_if_not_exists=True)
|
|
84
|
+
if console:
|
|
85
|
+
console.update_cleanup_progress(expired_view)
|
|
86
|
+
except Exception as e:
|
|
87
|
+
message = f"Failed to drop the expired environment view '{expired_view}': {e}"
|
|
88
|
+
if warn_on_delete_failure:
|
|
89
|
+
logger.warning(message)
|
|
90
|
+
else:
|
|
91
|
+
raise SQLMeshError(message) from e
|
|
92
|
+
|
|
93
|
+
# Drop the schemas for the expired environments
|
|
94
|
+
for engine_adapter, schema in schemas_to_drop:
|
|
95
|
+
try:
|
|
96
|
+
engine_adapter.drop_schema(
|
|
97
|
+
schema,
|
|
98
|
+
ignore_if_not_exists=True,
|
|
99
|
+
cascade=True,
|
|
100
|
+
)
|
|
101
|
+
if console:
|
|
102
|
+
console.update_cleanup_progress(schema.sql(dialect=engine_adapter.dialect))
|
|
103
|
+
except Exception as e:
|
|
104
|
+
message = f"Failed to drop the expired environment schema '{schema}': {e}"
|
|
105
|
+
if warn_on_delete_failure:
|
|
106
|
+
logger.warning(message)
|
|
107
|
+
else:
|
|
108
|
+
raise SQLMeshError(message) from e
|
|
109
|
+
|
|
110
|
+
# Drop any catalogs that were associated with a snapshot where the engine adapter supports dropping catalogs
|
|
111
|
+
# catalogs_to_drop is only populated when environment_suffix_target is set to 'catalog'
|
|
112
|
+
for engine_adapter, catalog in catalogs_to_drop:
|
|
113
|
+
if engine_adapter.SUPPORTS_CREATE_DROP_CATALOG:
|
|
114
|
+
try:
|
|
115
|
+
engine_adapter.drop_catalog(catalog)
|
|
116
|
+
if console:
|
|
117
|
+
console.update_cleanup_progress(catalog)
|
|
118
|
+
except Exception as e:
|
|
119
|
+
message = f"Failed to drop the expired environment catalog '{catalog}': {e}"
|
|
120
|
+
if warn_on_delete_failure:
|
|
121
|
+
logger.warning(message)
|
|
122
|
+
else:
|
|
123
|
+
raise SQLMeshError(message) from e
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def delete_expired_snapshots(
|
|
127
|
+
state_sync: StateSync,
|
|
128
|
+
snapshot_evaluator: SnapshotEvaluator,
|
|
129
|
+
*,
|
|
130
|
+
current_ts: int,
|
|
131
|
+
ignore_ttl: bool = False,
|
|
132
|
+
batch_size: t.Optional[int] = None,
|
|
133
|
+
console: t.Optional[Console] = None,
|
|
134
|
+
) -> None:
|
|
135
|
+
"""Delete all expired snapshots in batches.
|
|
136
|
+
|
|
137
|
+
This helper function encapsulates the logic for deleting expired snapshots in batches,
|
|
138
|
+
eliminating code duplication across different use cases.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
state_sync: StateSync instance to query and delete expired snapshots from.
|
|
142
|
+
snapshot_evaluator: SnapshotEvaluator instance to clean up tables associated with snapshots.
|
|
143
|
+
current_ts: Timestamp used to evaluate expiration.
|
|
144
|
+
ignore_ttl: If True, include snapshots regardless of TTL (only checks if unreferenced).
|
|
145
|
+
batch_size: Maximum number of snapshots to fetch per batch.
|
|
146
|
+
console: Optional console for reporting progress.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
The total number of deleted expired snapshots.
|
|
150
|
+
"""
|
|
151
|
+
num_expired_snapshots = 0
|
|
152
|
+
for batch in iter_expired_snapshot_batches(
|
|
153
|
+
state_reader=state_sync,
|
|
154
|
+
current_ts=current_ts,
|
|
155
|
+
ignore_ttl=ignore_ttl,
|
|
156
|
+
batch_size=batch_size,
|
|
157
|
+
):
|
|
158
|
+
end_info = (
|
|
159
|
+
f"updated_ts={batch.batch_range.end.updated_ts}"
|
|
160
|
+
if isinstance(batch.batch_range.end, RowBoundary)
|
|
161
|
+
else f"limit={batch.batch_range.end.batch_size}"
|
|
162
|
+
)
|
|
163
|
+
logger.info(
|
|
164
|
+
"Processing batch of size %s with end %s",
|
|
165
|
+
len(batch.expired_snapshot_ids),
|
|
166
|
+
end_info,
|
|
167
|
+
)
|
|
168
|
+
snapshot_evaluator.cleanup(
|
|
169
|
+
target_snapshots=batch.cleanup_tasks,
|
|
170
|
+
on_complete=console.update_cleanup_progress if console else None,
|
|
171
|
+
)
|
|
172
|
+
state_sync.delete_expired_snapshots(
|
|
173
|
+
batch_range=ExpiredBatchRange(
|
|
174
|
+
start=RowBoundary.lowest_boundary(),
|
|
175
|
+
end=batch.batch_range.end,
|
|
176
|
+
),
|
|
177
|
+
ignore_ttl=ignore_ttl,
|
|
178
|
+
)
|
|
179
|
+
logger.info("Cleaned up expired snapshots batch")
|
|
180
|
+
num_expired_snapshots += len(batch.expired_snapshot_ids)
|
|
181
|
+
logger.info("Cleaned up %s expired snapshots", num_expired_snapshots)
|
sqlmesh/core/lineage.py
CHANGED
|
@@ -129,6 +129,21 @@ class NoMissingAudits(Rule):
|
|
|
129
129
|
return self.violation()
|
|
130
130
|
|
|
131
131
|
|
|
132
|
+
class NoMissingUnitTest(Rule):
|
|
133
|
+
"""All models must have a unit test found in the tests/ directory yaml files"""
|
|
134
|
+
|
|
135
|
+
def check_model(self, model: Model) -> t.Optional[RuleViolation]:
|
|
136
|
+
# External models cannot have unit tests
|
|
137
|
+
if isinstance(model, ExternalModel):
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
if model.name not in self.context.models_with_tests:
|
|
141
|
+
return self.violation(
|
|
142
|
+
violation_msg=f"Model {model.name} is missing unit test(s). Please add in the tests/ directory."
|
|
143
|
+
)
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
|
|
132
147
|
class NoMissingExternalModels(Rule):
|
|
133
148
|
"""All external models must be registered in the external_models.yaml file"""
|
|
134
149
|
|
sqlmesh/core/loader.py
CHANGED
|
@@ -35,7 +35,7 @@ from sqlmesh.core.model import (
|
|
|
35
35
|
from sqlmesh.core.model import model as model_registry
|
|
36
36
|
from sqlmesh.core.model.common import make_python_env
|
|
37
37
|
from sqlmesh.core.signal import signal
|
|
38
|
-
from sqlmesh.core.test import ModelTestMetadata
|
|
38
|
+
from sqlmesh.core.test import ModelTestMetadata
|
|
39
39
|
from sqlmesh.utils import UniqueKeyDict, sys_path
|
|
40
40
|
from sqlmesh.utils.errors import ConfigError
|
|
41
41
|
from sqlmesh.utils.jinja import JinjaMacroRegistry, MacroExtractor
|
|
@@ -64,6 +64,7 @@ class LoadedProject:
|
|
|
64
64
|
excluded_requirements: t.Set[str]
|
|
65
65
|
environment_statements: t.List[EnvironmentStatements]
|
|
66
66
|
user_rules: RuleSet
|
|
67
|
+
model_test_metadata: t.List[ModelTestMetadata]
|
|
67
68
|
|
|
68
69
|
|
|
69
70
|
class CacheBase(abc.ABC):
|
|
@@ -243,6 +244,8 @@ class Loader(abc.ABC):
|
|
|
243
244
|
|
|
244
245
|
user_rules = self._load_linting_rules()
|
|
245
246
|
|
|
247
|
+
model_test_metadata = self.load_model_tests()
|
|
248
|
+
|
|
246
249
|
project = LoadedProject(
|
|
247
250
|
macros=macros,
|
|
248
251
|
jinja_macros=jinja_macros,
|
|
@@ -254,6 +257,7 @@ class Loader(abc.ABC):
|
|
|
254
257
|
excluded_requirements=excluded_requirements,
|
|
255
258
|
environment_statements=environment_statements,
|
|
256
259
|
user_rules=user_rules,
|
|
260
|
+
model_test_metadata=model_test_metadata,
|
|
257
261
|
)
|
|
258
262
|
return project
|
|
259
263
|
|
|
@@ -423,9 +427,7 @@ class Loader(abc.ABC):
|
|
|
423
427
|
"""Loads user linting rules"""
|
|
424
428
|
return RuleSet()
|
|
425
429
|
|
|
426
|
-
def load_model_tests(
|
|
427
|
-
self, tests: t.Optional[t.List[str]] = None, patterns: list[str] | None = None
|
|
428
|
-
) -> t.List[ModelTestMetadata]:
|
|
430
|
+
def load_model_tests(self) -> t.List[ModelTestMetadata]:
|
|
429
431
|
"""Loads YAML-based model tests"""
|
|
430
432
|
return []
|
|
431
433
|
|
|
@@ -864,38 +866,23 @@ class SqlMeshLoader(Loader):
|
|
|
864
866
|
|
|
865
867
|
return model_test_metadata
|
|
866
868
|
|
|
867
|
-
def load_model_tests(
|
|
868
|
-
self, tests: t.Optional[t.List[str]] = None, patterns: list[str] | None = None
|
|
869
|
-
) -> t.List[ModelTestMetadata]:
|
|
869
|
+
def load_model_tests(self) -> t.List[ModelTestMetadata]:
|
|
870
870
|
"""Loads YAML-based model tests"""
|
|
871
871
|
test_meta_list: t.List[ModelTestMetadata] = []
|
|
872
872
|
|
|
873
|
-
|
|
874
|
-
for test in tests:
|
|
875
|
-
filename, test_name = test.split("::", maxsplit=1) if "::" in test else (test, "")
|
|
876
|
-
|
|
877
|
-
test_meta = self._load_model_test_file(Path(filename))
|
|
878
|
-
if test_name:
|
|
879
|
-
test_meta_list.append(test_meta[test_name])
|
|
880
|
-
else:
|
|
881
|
-
test_meta_list.extend(test_meta.values())
|
|
882
|
-
else:
|
|
883
|
-
search_path = Path(self.config_path) / c.TESTS
|
|
873
|
+
search_path = Path(self.config_path) / c.TESTS
|
|
884
874
|
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
875
|
+
for yaml_file in itertools.chain(
|
|
876
|
+
search_path.glob("**/test*.yaml"),
|
|
877
|
+
search_path.glob("**/test*.yml"),
|
|
878
|
+
):
|
|
879
|
+
if any(
|
|
880
|
+
yaml_file.match(ignore_pattern)
|
|
881
|
+
for ignore_pattern in self.config.ignore_patterns or []
|
|
888
882
|
):
|
|
889
|
-
|
|
890
|
-
yaml_file.match(ignore_pattern)
|
|
891
|
-
for ignore_pattern in self.config.ignore_patterns or []
|
|
892
|
-
):
|
|
893
|
-
continue
|
|
894
|
-
|
|
895
|
-
test_meta_list.extend(self._load_model_test_file(yaml_file).values())
|
|
883
|
+
continue
|
|
896
884
|
|
|
897
|
-
|
|
898
|
-
test_meta_list = filter_tests_by_patterns(test_meta_list, patterns)
|
|
885
|
+
test_meta_list.extend(self._load_model_test_file(yaml_file).values())
|
|
899
886
|
|
|
900
887
|
return test_meta_list
|
|
901
888
|
|
sqlmesh/core/macros.py
CHANGED
|
@@ -128,6 +128,17 @@ def _macro_str_replace(text: str) -> str:
|
|
|
128
128
|
return f"self.template({text}, locals())"
|
|
129
129
|
|
|
130
130
|
|
|
131
|
+
class CaseInsensitiveMapping(t.Dict[str, t.Any]):
|
|
132
|
+
def __init__(self, data: t.Dict[str, t.Any]) -> None:
|
|
133
|
+
super().__init__(data)
|
|
134
|
+
|
|
135
|
+
def __getitem__(self, key: str) -> t.Any:
|
|
136
|
+
return super().__getitem__(key.lower())
|
|
137
|
+
|
|
138
|
+
def get(self, key: str, default: t.Any = None, /) -> t.Any:
|
|
139
|
+
return super().get(key.lower(), default)
|
|
140
|
+
|
|
141
|
+
|
|
131
142
|
class MacroDialect(Python):
|
|
132
143
|
class Generator(Python.Generator):
|
|
133
144
|
TRANSFORMS = {
|
|
@@ -256,14 +267,18 @@ class MacroEvaluator:
|
|
|
256
267
|
changed = True
|
|
257
268
|
variables = self.variables
|
|
258
269
|
|
|
259
|
-
|
|
270
|
+
# This makes all variables case-insensitive, e.g. @X is the same as @x. We do this
|
|
271
|
+
# for consistency, since `variables` and `blueprint_variables` are normalized.
|
|
272
|
+
var_name = node.name.lower()
|
|
273
|
+
|
|
274
|
+
if var_name not in self.locals and var_name not in variables:
|
|
260
275
|
if not isinstance(node.parent, StagedFilePath):
|
|
261
276
|
raise SQLMeshError(f"Macro variable '{node.name}' is undefined.")
|
|
262
277
|
|
|
263
278
|
return node
|
|
264
279
|
|
|
265
280
|
# Precedence order is locals (e.g. @DEF) > blueprint variables > config variables
|
|
266
|
-
value = self.locals.get(
|
|
281
|
+
value = self.locals.get(var_name, variables.get(var_name))
|
|
267
282
|
if isinstance(value, list):
|
|
268
283
|
return exp.convert(
|
|
269
284
|
tuple(
|
|
@@ -313,11 +328,16 @@ class MacroEvaluator:
|
|
|
313
328
|
"""
|
|
314
329
|
# We try to convert all variables into sqlglot expressions because they're going to be converted
|
|
315
330
|
# into strings; in sql we don't convert strings because that would result in adding quotes
|
|
316
|
-
|
|
317
|
-
k: convert_sql(v, self.dialect)
|
|
331
|
+
base_mapping = {
|
|
332
|
+
k.lower(): convert_sql(v, self.dialect)
|
|
318
333
|
for k, v in chain(self.variables.items(), self.locals.items(), local_variables.items())
|
|
334
|
+
if k.lower()
|
|
335
|
+
not in (
|
|
336
|
+
"engine_adapter",
|
|
337
|
+
"snapshot",
|
|
338
|
+
)
|
|
319
339
|
}
|
|
320
|
-
return MacroStrTemplate(str(text)).safe_substitute(
|
|
340
|
+
return MacroStrTemplate(str(text)).safe_substitute(CaseInsensitiveMapping(base_mapping))
|
|
321
341
|
|
|
322
342
|
def evaluate(self, node: MacroFunc) -> exp.Expression | t.List[exp.Expression] | None:
|
|
323
343
|
if isinstance(node, MacroDef):
|
|
@@ -327,7 +347,9 @@ class MacroEvaluator:
|
|
|
327
347
|
args[0] if len(args) == 1 else exp.Tuple(expressions=list(args))
|
|
328
348
|
)
|
|
329
349
|
else:
|
|
330
|
-
|
|
350
|
+
# Make variables defined through `@DEF` case-insensitive
|
|
351
|
+
self.locals[node.name.lower()] = self.transform(node.expression)
|
|
352
|
+
|
|
331
353
|
return node
|
|
332
354
|
|
|
333
355
|
if isinstance(node, (MacroSQL, MacroStrReplace)):
|
|
@@ -630,7 +652,7 @@ def _norm_var_arg_lambda(
|
|
|
630
652
|
) -> exp.Expression | t.List[exp.Expression] | None:
|
|
631
653
|
if isinstance(node, (exp.Identifier, exp.Var)):
|
|
632
654
|
if not isinstance(node.parent, exp.Column):
|
|
633
|
-
name = node.name
|
|
655
|
+
name = node.name.lower()
|
|
634
656
|
if name in args:
|
|
635
657
|
return args[name].copy()
|
|
636
658
|
if name in evaluator.locals:
|
|
@@ -663,7 +685,7 @@ def _norm_var_arg_lambda(
|
|
|
663
685
|
return expressions, lambda args: func.this.transform(
|
|
664
686
|
substitute,
|
|
665
687
|
{
|
|
666
|
-
expression.name: arg
|
|
688
|
+
expression.name.lower(): arg
|
|
667
689
|
for expression, arg in zip(
|
|
668
690
|
func.expressions, args.expressions if isinstance(args, exp.Tuple) else [args]
|
|
669
691
|
)
|
|
@@ -1128,7 +1150,7 @@ def haversine_distance(
|
|
|
1128
1150
|
def pivot(
|
|
1129
1151
|
evaluator: MacroEvaluator,
|
|
1130
1152
|
column: SQL,
|
|
1131
|
-
values: t.List[
|
|
1153
|
+
values: t.List[exp.Expression],
|
|
1132
1154
|
alias: bool = True,
|
|
1133
1155
|
agg: exp.Expression = exp.Literal.string("SUM"),
|
|
1134
1156
|
cmp: exp.Expression = exp.Literal.string("="),
|
|
@@ -1146,10 +1168,10 @@ def pivot(
|
|
|
1146
1168
|
>>> from sqlmesh.core.macros import MacroEvaluator
|
|
1147
1169
|
>>> sql = "SELECT date_day, @PIVOT(status, ['cancelled', 'completed']) FROM rides GROUP BY 1"
|
|
1148
1170
|
>>> MacroEvaluator().transform(parse_one(sql)).sql()
|
|
1149
|
-
'SELECT date_day, SUM(CASE WHEN status = \\'cancelled\\' THEN 1 ELSE 0 END) AS "
|
|
1171
|
+
'SELECT date_day, SUM(CASE WHEN status = \\'cancelled\\' THEN 1 ELSE 0 END) AS "cancelled", SUM(CASE WHEN status = \\'completed\\' THEN 1 ELSE 0 END) AS "completed" FROM rides GROUP BY 1'
|
|
1150
1172
|
>>> sql = "SELECT @PIVOT(a, ['v'], then_value := tv, suffix := '_sfx', quote := FALSE)"
|
|
1151
1173
|
>>> MacroEvaluator(dialect="bigquery").transform(parse_one(sql)).sql("bigquery")
|
|
1152
|
-
"SELECT SUM(CASE WHEN a = 'v' THEN tv ELSE 0 END) AS
|
|
1174
|
+
"SELECT SUM(CASE WHEN a = 'v' THEN tv ELSE 0 END) AS v_sfx"
|
|
1153
1175
|
"""
|
|
1154
1176
|
aggregates: t.List[exp.Expression] = []
|
|
1155
1177
|
for value in values:
|
|
@@ -1157,12 +1179,12 @@ def pivot(
|
|
|
1157
1179
|
if distinct:
|
|
1158
1180
|
proj += "DISTINCT "
|
|
1159
1181
|
|
|
1160
|
-
proj += f"CASE WHEN {column} {cmp.name} {value} THEN {then_value} ELSE {else_value} END) "
|
|
1182
|
+
proj += f"CASE WHEN {column} {cmp.name} {value.sql(evaluator.dialect)} THEN {then_value} ELSE {else_value} END) "
|
|
1161
1183
|
node = evaluator.parse_one(proj)
|
|
1162
1184
|
|
|
1163
1185
|
if alias:
|
|
1164
1186
|
node = node.as_(
|
|
1165
|
-
f"{prefix.name}{value}{suffix.name}",
|
|
1187
|
+
f"{prefix.name}{value.name}{suffix.name}",
|
|
1166
1188
|
quoted=quote,
|
|
1167
1189
|
copy=False,
|
|
1168
1190
|
dialect=evaluator.dialect,
|
sqlmesh/core/model/common.py
CHANGED
|
@@ -641,6 +641,7 @@ properties_validator: t.Callable = field_validator(
|
|
|
641
641
|
"physical_properties_",
|
|
642
642
|
"virtual_properties_",
|
|
643
643
|
"materialization_properties_",
|
|
644
|
+
"grants_",
|
|
644
645
|
mode="before",
|
|
645
646
|
check_fields=False,
|
|
646
647
|
)(parse_properties)
|
|
@@ -662,6 +663,7 @@ depends_on_validator: t.Callable = field_validator(
|
|
|
662
663
|
|
|
663
664
|
class ParsableSql(PydanticModel):
|
|
664
665
|
sql: str
|
|
666
|
+
transaction: t.Optional[bool] = None
|
|
665
667
|
|
|
666
668
|
_parsed: t.Optional[exp.Expression] = None
|
|
667
669
|
_parsed_dialect: t.Optional[str] = None
|
sqlmesh/core/model/definition.py
CHANGED
|
@@ -67,6 +67,7 @@ if t.TYPE_CHECKING:
|
|
|
67
67
|
from sqlmesh.core.context import ExecutionContext
|
|
68
68
|
from sqlmesh.core.engine_adapter import EngineAdapter
|
|
69
69
|
from sqlmesh.core.engine_adapter._typing import QueryOrDF
|
|
70
|
+
from sqlmesh.core.engine_adapter.shared import DataObjectType
|
|
70
71
|
from sqlmesh.core.linter.rule import Rule
|
|
71
72
|
from sqlmesh.core.snapshot import DeployabilityIndex, Node, Snapshot
|
|
72
73
|
from sqlmesh.utils.jinja import MacroReference
|
|
@@ -362,6 +363,7 @@ class _Model(ModelMeta, frozen=True):
|
|
|
362
363
|
expand: t.Iterable[str] = tuple(),
|
|
363
364
|
deployability_index: t.Optional[DeployabilityIndex] = None,
|
|
364
365
|
engine_adapter: t.Optional[EngineAdapter] = None,
|
|
366
|
+
inside_transaction: t.Optional[bool] = True,
|
|
365
367
|
**kwargs: t.Any,
|
|
366
368
|
) -> t.List[exp.Expression]:
|
|
367
369
|
"""Renders pre-statements for a model.
|
|
@@ -383,7 +385,11 @@ class _Model(ModelMeta, frozen=True):
|
|
|
383
385
|
The list of rendered expressions.
|
|
384
386
|
"""
|
|
385
387
|
return self._render_statements(
|
|
386
|
-
|
|
388
|
+
[
|
|
389
|
+
stmt
|
|
390
|
+
for stmt in self.pre_statements
|
|
391
|
+
if stmt.args.get("transaction", True) == inside_transaction
|
|
392
|
+
],
|
|
387
393
|
start=start,
|
|
388
394
|
end=end,
|
|
389
395
|
execution_time=execution_time,
|
|
@@ -404,6 +410,7 @@ class _Model(ModelMeta, frozen=True):
|
|
|
404
410
|
expand: t.Iterable[str] = tuple(),
|
|
405
411
|
deployability_index: t.Optional[DeployabilityIndex] = None,
|
|
406
412
|
engine_adapter: t.Optional[EngineAdapter] = None,
|
|
413
|
+
inside_transaction: t.Optional[bool] = True,
|
|
407
414
|
**kwargs: t.Any,
|
|
408
415
|
) -> t.List[exp.Expression]:
|
|
409
416
|
"""Renders post-statements for a model.
|
|
@@ -419,13 +426,18 @@ class _Model(ModelMeta, frozen=True):
|
|
|
419
426
|
that depend on materialized tables. Model definitions are inlined and can thus be run end to
|
|
420
427
|
end on the fly.
|
|
421
428
|
deployability_index: Determines snapshots that are deployable in the context of this render.
|
|
429
|
+
inside_transaction: Whether to render hooks with transaction=True (inside) or transaction=False (outside).
|
|
422
430
|
kwargs: Additional kwargs to pass to the renderer.
|
|
423
431
|
|
|
424
432
|
Returns:
|
|
425
433
|
The list of rendered expressions.
|
|
426
434
|
"""
|
|
427
435
|
return self._render_statements(
|
|
428
|
-
|
|
436
|
+
[
|
|
437
|
+
stmt
|
|
438
|
+
for stmt in self.post_statements
|
|
439
|
+
if stmt.args.get("transaction", True) == inside_transaction
|
|
440
|
+
],
|
|
429
441
|
start=start,
|
|
430
442
|
end=end,
|
|
431
443
|
execution_time=execution_time,
|
|
@@ -566,6 +578,8 @@ class _Model(ModelMeta, frozen=True):
|
|
|
566
578
|
result = []
|
|
567
579
|
for v in value:
|
|
568
580
|
parsed = v.parse(self.dialect)
|
|
581
|
+
if getattr(v, "transaction", None) is not None:
|
|
582
|
+
parsed.set("transaction", v.transaction)
|
|
569
583
|
if not isinstance(parsed, exp.Semicolon):
|
|
570
584
|
result.append(parsed)
|
|
571
585
|
return result
|
|
@@ -1021,6 +1035,13 @@ class _Model(ModelMeta, frozen=True):
|
|
|
1021
1035
|
# Will raise if the custom materialization points to an invalid class
|
|
1022
1036
|
get_custom_materialization_type_or_raise(self.kind.materialization)
|
|
1023
1037
|
|
|
1038
|
+
# Embedded model kind shouldn't have audits
|
|
1039
|
+
if self.kind.name == ModelKindName.EMBEDDED and self.audits:
|
|
1040
|
+
raise_config_error(
|
|
1041
|
+
"Audits are not supported for embedded models",
|
|
1042
|
+
self._path,
|
|
1043
|
+
)
|
|
1044
|
+
|
|
1024
1045
|
def is_breaking_change(self, previous: Model) -> t.Optional[bool]:
|
|
1025
1046
|
"""Determines whether this model is a breaking change in relation to the `previous` model.
|
|
1026
1047
|
|
|
@@ -1186,6 +1207,8 @@ class _Model(ModelMeta, frozen=True):
|
|
|
1186
1207
|
gen(self.session_properties_) if self.session_properties_ else None,
|
|
1187
1208
|
*[gen(g) for g in self.grains],
|
|
1188
1209
|
*self._audit_metadata_hash_values(),
|
|
1210
|
+
json.dumps(self.grants, sort_keys=True) if self.grants else None,
|
|
1211
|
+
self.grants_target_layer,
|
|
1189
1212
|
]
|
|
1190
1213
|
|
|
1191
1214
|
for key, value in (self.virtual_properties or {}).items():
|
|
@@ -1197,6 +1220,9 @@ class _Model(ModelMeta, frozen=True):
|
|
|
1197
1220
|
for k, v in sorted(args.items()):
|
|
1198
1221
|
metadata.append(f"{k}:{gen(v)}")
|
|
1199
1222
|
|
|
1223
|
+
if self.dbt_node_info:
|
|
1224
|
+
metadata.append(self.dbt_node_info.json(sort_keys=True))
|
|
1225
|
+
|
|
1200
1226
|
metadata.extend(self._additional_metadata)
|
|
1201
1227
|
|
|
1202
1228
|
self._metadata_hash = hash_data(metadata)
|
|
@@ -1207,6 +1233,24 @@ class _Model(ModelMeta, frozen=True):
|
|
|
1207
1233
|
"""Return True if this is a model node"""
|
|
1208
1234
|
return True
|
|
1209
1235
|
|
|
1236
|
+
@property
|
|
1237
|
+
def grants_table_type(self) -> DataObjectType:
|
|
1238
|
+
"""Get the table type for grants application (TABLE, VIEW, MATERIALIZED_VIEW).
|
|
1239
|
+
|
|
1240
|
+
Returns:
|
|
1241
|
+
The DataObjectType that should be used when applying grants to this model.
|
|
1242
|
+
"""
|
|
1243
|
+
from sqlmesh.core.engine_adapter.shared import DataObjectType
|
|
1244
|
+
|
|
1245
|
+
if self.kind.is_view:
|
|
1246
|
+
if hasattr(self.kind, "materialized") and getattr(self.kind, "materialized", False):
|
|
1247
|
+
return DataObjectType.MATERIALIZED_VIEW
|
|
1248
|
+
return DataObjectType.VIEW
|
|
1249
|
+
if self.kind.is_managed:
|
|
1250
|
+
return DataObjectType.MANAGED_TABLE
|
|
1251
|
+
# All other materialized models are tables
|
|
1252
|
+
return DataObjectType.TABLE
|
|
1253
|
+
|
|
1210
1254
|
@property
|
|
1211
1255
|
def _additional_metadata(self) -> t.List[str]:
|
|
1212
1256
|
additional_metadata = []
|
|
@@ -1820,6 +1864,12 @@ class SeedModel(_Model):
|
|
|
1820
1864
|
for column_name, column_hash in self.column_hashes.items():
|
|
1821
1865
|
data.append(column_name)
|
|
1822
1866
|
data.append(column_hash)
|
|
1867
|
+
|
|
1868
|
+
# Include grants in data hash for seed models to force recreation on grant changes
|
|
1869
|
+
# since seed models don't support migration
|
|
1870
|
+
data.append(json.dumps(self.grants, sort_keys=True) if self.grants else "")
|
|
1871
|
+
data.append(self.grants_target_layer)
|
|
1872
|
+
|
|
1823
1873
|
return data
|
|
1824
1874
|
|
|
1825
1875
|
|
|
@@ -2562,9 +2612,17 @@ def _create_model(
|
|
|
2562
2612
|
if statement_field in kwargs:
|
|
2563
2613
|
# Macros extracted from these statements need to be treated as metadata only
|
|
2564
2614
|
is_metadata = statement_field == "on_virtual_update"
|
|
2565
|
-
|
|
2615
|
+
for stmt in kwargs[statement_field]:
|
|
2616
|
+
# Extract the expression if it's ParsableSql already
|
|
2617
|
+
expr = stmt.parse(dialect) if isinstance(stmt, ParsableSql) else stmt
|
|
2618
|
+
statements.append((expr, is_metadata))
|
|
2566
2619
|
kwargs[statement_field] = [
|
|
2567
|
-
|
|
2620
|
+
# this to retain the transaction information
|
|
2621
|
+
stmt
|
|
2622
|
+
if isinstance(stmt, ParsableSql)
|
|
2623
|
+
else ParsableSql.from_parsed_expression(
|
|
2624
|
+
stmt, dialect, use_meta_sql=use_original_sql
|
|
2625
|
+
)
|
|
2568
2626
|
for stmt in kwargs[statement_field]
|
|
2569
2627
|
]
|
|
2570
2628
|
|
|
@@ -2866,6 +2924,13 @@ def render_meta_fields(
|
|
|
2866
2924
|
for key, value in field_value.items():
|
|
2867
2925
|
if key in RUNTIME_RENDERED_MODEL_FIELDS:
|
|
2868
2926
|
rendered_dict[key] = parse_strings_with_macro_refs(value, dialect)
|
|
2927
|
+
elif (
|
|
2928
|
+
# don't parse kind auto_restatement_cron="@..." kwargs (e.g. @daily) into MacroVar
|
|
2929
|
+
key == "auto_restatement_cron"
|
|
2930
|
+
and isinstance(value, str)
|
|
2931
|
+
and value.lower() in CRON_SHORTCUTS
|
|
2932
|
+
):
|
|
2933
|
+
rendered_dict[key] = value
|
|
2869
2934
|
elif (rendered := render_field_value(value)) is not None:
|
|
2870
2935
|
rendered_dict[key] = rendered
|
|
2871
2936
|
|
|
@@ -3012,6 +3077,9 @@ META_FIELD_CONVERTER: t.Dict[str, t.Callable] = {
|
|
|
3012
3077
|
"formatting": str,
|
|
3013
3078
|
"optimize_query": str,
|
|
3014
3079
|
"virtual_environment_mode": lambda value: exp.Literal.string(value.value),
|
|
3080
|
+
"dbt_node_info_": lambda value: value.to_expression(),
|
|
3081
|
+
"grants_": lambda value: value,
|
|
3082
|
+
"grants_target_layer": lambda value: exp.Literal.string(value.value),
|
|
3015
3083
|
}
|
|
3016
3084
|
|
|
3017
3085
|
|