sqlmesh 0.217.1.dev1__py3-none-any.whl → 0.227.2.dev20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. sqlmesh/__init__.py +12 -2
  2. sqlmesh/_version.py +2 -2
  3. sqlmesh/cli/project_init.py +10 -2
  4. sqlmesh/core/_typing.py +1 -0
  5. sqlmesh/core/audit/definition.py +8 -2
  6. sqlmesh/core/config/__init__.py +1 -1
  7. sqlmesh/core/config/connection.py +20 -5
  8. sqlmesh/core/config/dbt.py +13 -0
  9. sqlmesh/core/config/janitor.py +12 -0
  10. sqlmesh/core/config/loader.py +7 -0
  11. sqlmesh/core/config/model.py +2 -0
  12. sqlmesh/core/config/root.py +3 -0
  13. sqlmesh/core/console.py +80 -2
  14. sqlmesh/core/constants.py +1 -1
  15. sqlmesh/core/context.py +112 -35
  16. sqlmesh/core/dialect.py +3 -0
  17. sqlmesh/core/engine_adapter/_typing.py +2 -0
  18. sqlmesh/core/engine_adapter/base.py +330 -23
  19. sqlmesh/core/engine_adapter/base_postgres.py +17 -1
  20. sqlmesh/core/engine_adapter/bigquery.py +146 -7
  21. sqlmesh/core/engine_adapter/clickhouse.py +17 -13
  22. sqlmesh/core/engine_adapter/databricks.py +50 -2
  23. sqlmesh/core/engine_adapter/fabric.py +110 -29
  24. sqlmesh/core/engine_adapter/mixins.py +142 -48
  25. sqlmesh/core/engine_adapter/mssql.py +15 -4
  26. sqlmesh/core/engine_adapter/mysql.py +2 -2
  27. sqlmesh/core/engine_adapter/postgres.py +9 -3
  28. sqlmesh/core/engine_adapter/redshift.py +4 -0
  29. sqlmesh/core/engine_adapter/risingwave.py +1 -0
  30. sqlmesh/core/engine_adapter/shared.py +6 -0
  31. sqlmesh/core/engine_adapter/snowflake.py +82 -11
  32. sqlmesh/core/engine_adapter/spark.py +14 -10
  33. sqlmesh/core/engine_adapter/trino.py +5 -2
  34. sqlmesh/core/janitor.py +181 -0
  35. sqlmesh/core/lineage.py +1 -0
  36. sqlmesh/core/linter/rules/builtin.py +15 -0
  37. sqlmesh/core/loader.py +17 -30
  38. sqlmesh/core/macros.py +35 -13
  39. sqlmesh/core/model/common.py +2 -0
  40. sqlmesh/core/model/definition.py +72 -4
  41. sqlmesh/core/model/kind.py +66 -2
  42. sqlmesh/core/model/meta.py +107 -2
  43. sqlmesh/core/node.py +101 -2
  44. sqlmesh/core/plan/builder.py +15 -10
  45. sqlmesh/core/plan/common.py +196 -2
  46. sqlmesh/core/plan/definition.py +21 -6
  47. sqlmesh/core/plan/evaluator.py +72 -113
  48. sqlmesh/core/plan/explainer.py +90 -8
  49. sqlmesh/core/plan/stages.py +42 -21
  50. sqlmesh/core/renderer.py +26 -18
  51. sqlmesh/core/scheduler.py +60 -19
  52. sqlmesh/core/selector.py +137 -9
  53. sqlmesh/core/signal.py +64 -1
  54. sqlmesh/core/snapshot/__init__.py +1 -0
  55. sqlmesh/core/snapshot/definition.py +109 -25
  56. sqlmesh/core/snapshot/evaluator.py +610 -50
  57. sqlmesh/core/state_sync/__init__.py +0 -1
  58. sqlmesh/core/state_sync/base.py +31 -27
  59. sqlmesh/core/state_sync/cache.py +12 -4
  60. sqlmesh/core/state_sync/common.py +216 -111
  61. sqlmesh/core/state_sync/db/facade.py +30 -15
  62. sqlmesh/core/state_sync/db/interval.py +27 -7
  63. sqlmesh/core/state_sync/db/migrator.py +14 -8
  64. sqlmesh/core/state_sync/db/snapshot.py +119 -87
  65. sqlmesh/core/table_diff.py +2 -2
  66. sqlmesh/core/test/definition.py +14 -9
  67. sqlmesh/core/test/discovery.py +4 -0
  68. sqlmesh/dbt/adapter.py +20 -11
  69. sqlmesh/dbt/basemodel.py +52 -41
  70. sqlmesh/dbt/builtin.py +27 -11
  71. sqlmesh/dbt/column.py +17 -5
  72. sqlmesh/dbt/common.py +4 -2
  73. sqlmesh/dbt/context.py +14 -1
  74. sqlmesh/dbt/loader.py +60 -8
  75. sqlmesh/dbt/manifest.py +136 -8
  76. sqlmesh/dbt/model.py +105 -25
  77. sqlmesh/dbt/package.py +16 -1
  78. sqlmesh/dbt/profile.py +3 -3
  79. sqlmesh/dbt/project.py +12 -7
  80. sqlmesh/dbt/seed.py +1 -1
  81. sqlmesh/dbt/source.py +6 -1
  82. sqlmesh/dbt/target.py +25 -6
  83. sqlmesh/dbt/test.py +31 -1
  84. sqlmesh/integrations/github/cicd/controller.py +6 -2
  85. sqlmesh/lsp/context.py +4 -2
  86. sqlmesh/magics.py +1 -1
  87. sqlmesh/migrations/v0000_baseline.py +3 -6
  88. sqlmesh/migrations/v0061_mysql_fix_blob_text_type.py +2 -5
  89. sqlmesh/migrations/v0062_add_model_gateway.py +2 -2
  90. sqlmesh/migrations/v0063_change_signals.py +2 -4
  91. sqlmesh/migrations/v0064_join_when_matched_strings.py +2 -4
  92. sqlmesh/migrations/v0065_add_model_optimize.py +2 -2
  93. sqlmesh/migrations/v0066_add_auto_restatements.py +2 -6
  94. sqlmesh/migrations/v0067_add_tsql_date_full_precision.py +2 -2
  95. sqlmesh/migrations/v0068_include_unrendered_query_in_metadata_hash.py +2 -2
  96. sqlmesh/migrations/v0069_update_dev_table_suffix.py +2 -4
  97. sqlmesh/migrations/v0070_include_grains_in_metadata_hash.py +2 -2
  98. sqlmesh/migrations/v0071_add_dev_version_to_intervals.py +2 -6
  99. sqlmesh/migrations/v0072_add_environment_statements.py +2 -4
  100. sqlmesh/migrations/v0073_remove_symbolic_disable_restatement.py +2 -4
  101. sqlmesh/migrations/v0074_add_partition_by_time_column_property.py +2 -2
  102. sqlmesh/migrations/v0075_remove_validate_query.py +2 -4
  103. sqlmesh/migrations/v0076_add_cron_tz.py +2 -2
  104. sqlmesh/migrations/v0077_fix_column_type_hash_calculation.py +2 -2
  105. sqlmesh/migrations/v0078_warn_if_non_migratable_python_env.py +2 -4
  106. sqlmesh/migrations/v0079_add_gateway_managed_property.py +7 -9
  107. sqlmesh/migrations/v0080_add_batch_size_to_scd_type_2_models.py +2 -2
  108. sqlmesh/migrations/v0081_update_partitioned_by.py +2 -4
  109. sqlmesh/migrations/v0082_warn_if_incorrectly_duplicated_statements.py +2 -4
  110. sqlmesh/migrations/v0083_use_sql_for_scd_time_data_type_data_hash.py +2 -2
  111. sqlmesh/migrations/v0084_normalize_quote_when_matched_and_merge_filter.py +2 -2
  112. sqlmesh/migrations/v0085_deterministic_repr.py +2 -4
  113. sqlmesh/migrations/v0086_check_deterministic_bug.py +2 -4
  114. sqlmesh/migrations/v0087_normalize_blueprint_variables.py +2 -4
  115. sqlmesh/migrations/v0088_warn_about_variable_python_env_diffs.py +2 -4
  116. sqlmesh/migrations/v0089_add_virtual_environment_mode.py +2 -2
  117. sqlmesh/migrations/v0090_add_forward_only_column.py +2 -6
  118. sqlmesh/migrations/v0091_on_additive_change.py +2 -2
  119. sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +2 -4
  120. sqlmesh/migrations/v0093_use_raw_sql_in_fingerprint.py +2 -2
  121. sqlmesh/migrations/v0094_add_dev_version_and_fingerprint_columns.py +2 -6
  122. sqlmesh/migrations/v0095_warn_about_dbt_raw_sql_diff.py +2 -4
  123. sqlmesh/migrations/v0096_remove_plan_dags_table.py +2 -4
  124. sqlmesh/migrations/v0097_add_dbt_name_in_node.py +2 -2
  125. sqlmesh/migrations/v0098_add_dbt_node_info_in_node.py +103 -0
  126. sqlmesh/migrations/v0099_add_last_altered_to_intervals.py +25 -0
  127. sqlmesh/migrations/v0100_add_grants_and_grants_target_layer.py +9 -0
  128. sqlmesh/utils/__init__.py +8 -1
  129. sqlmesh/utils/cache.py +5 -1
  130. sqlmesh/utils/date.py +1 -1
  131. sqlmesh/utils/errors.py +4 -0
  132. sqlmesh/utils/git.py +3 -1
  133. sqlmesh/utils/jinja.py +25 -2
  134. sqlmesh/utils/pydantic.py +6 -6
  135. sqlmesh/utils/windows.py +13 -3
  136. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/METADATA +5 -5
  137. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/RECORD +188 -183
  138. sqlmesh_dbt/cli.py +70 -7
  139. sqlmesh_dbt/console.py +14 -6
  140. sqlmesh_dbt/operations.py +103 -24
  141. sqlmesh_dbt/selectors.py +39 -1
  142. web/client/dist/assets/{Audits-Ucsx1GzF.js → Audits-CBiYyyx-.js} +1 -1
  143. web/client/dist/assets/{Banner-BWDzvavM.js → Banner-DSRbUlO5.js} +1 -1
  144. web/client/dist/assets/{ChevronDownIcon-D2VL13Ah.js → ChevronDownIcon-MK_nrjD_.js} +1 -1
  145. web/client/dist/assets/{ChevronRightIcon-DWGYbf1l.js → ChevronRightIcon-CLWtT22Q.js} +1 -1
  146. web/client/dist/assets/{Content-DdHDZM3I.js → Content-BNuGZN5l.js} +1 -1
  147. web/client/dist/assets/{Content-Bikfy8fh.js → Content-CSHJyW0n.js} +1 -1
  148. web/client/dist/assets/{Data-CzAJH7rW.js → Data-C1oRDbLx.js} +1 -1
  149. web/client/dist/assets/{DataCatalog-BJF11g8f.js → DataCatalog-HXyX2-_j.js} +1 -1
  150. web/client/dist/assets/{Editor-s0SBpV2y.js → Editor-BDyfpUuw.js} +1 -1
  151. web/client/dist/assets/{Editor-DgLhgKnm.js → Editor-D0jNItwC.js} +1 -1
  152. web/client/dist/assets/{Errors-D0m0O1d3.js → Errors-BfuFLcPi.js} +1 -1
  153. web/client/dist/assets/{FileExplorer-CEv0vXkt.js → FileExplorer-BR9IE3he.js} +1 -1
  154. web/client/dist/assets/{Footer-BwzXn8Ew.js → Footer-CgBEtiAh.js} +1 -1
  155. web/client/dist/assets/{Header-6heDkEqG.js → Header-DSqR6nSO.js} +1 -1
  156. web/client/dist/assets/{Input-obuJsD6k.js → Input-B-oZ6fGO.js} +1 -1
  157. web/client/dist/assets/Lineage-DYQVwDbD.js +1 -0
  158. web/client/dist/assets/{ListboxShow-HM9_qyrt.js → ListboxShow-BE5-xevs.js} +1 -1
  159. web/client/dist/assets/{ModelLineage-zWdKo0U2.js → ModelLineage-DkIFAYo4.js} +1 -1
  160. web/client/dist/assets/{Models-Bcu66SRz.js → Models-D5dWr8RB.js} +1 -1
  161. web/client/dist/assets/{Page-BWEEQfIt.js → Page-C-XfU5BR.js} +1 -1
  162. web/client/dist/assets/{Plan-C4gXCqlf.js → Plan-ZEuTINBq.js} +1 -1
  163. web/client/dist/assets/{PlusCircleIcon-CVDO651q.js → PlusCircleIcon-DVXAHG8_.js} +1 -1
  164. web/client/dist/assets/{ReportErrors-BT6xFwAr.js → ReportErrors-B7FEPzMB.js} +1 -1
  165. web/client/dist/assets/{Root-ryJoBK4h.js → Root-8aZyhPxF.js} +1 -1
  166. web/client/dist/assets/{SearchList-DB04sPb9.js → SearchList-W_iT2G82.js} +1 -1
  167. web/client/dist/assets/{SelectEnvironment-CUYcXUu6.js → SelectEnvironment-C65jALmO.js} +1 -1
  168. web/client/dist/assets/{SourceList-Doo_9ZGp.js → SourceList-DSLO6nVJ.js} +1 -1
  169. web/client/dist/assets/{SourceListItem-D5Mj7Dly.js → SourceListItem-BHt8d9-I.js} +1 -1
  170. web/client/dist/assets/{SplitPane-qHmkD1qy.js → SplitPane-CViaZmw6.js} +1 -1
  171. web/client/dist/assets/{Tests-DH1Z74ML.js → Tests-DhaVt5t1.js} +1 -1
  172. web/client/dist/assets/{Welcome-DqUJUNMF.js → Welcome-DvpjH-_4.js} +1 -1
  173. web/client/dist/assets/context-BctCsyGb.js +71 -0
  174. web/client/dist/assets/{context-Dr54UHLi.js → context-DFNeGsFF.js} +1 -1
  175. web/client/dist/assets/{editor-DYIP1yQ4.js → editor-CcO28cqd.js} +1 -1
  176. web/client/dist/assets/{file-DarlIDVi.js → file-CvJN3aZO.js} +1 -1
  177. web/client/dist/assets/{floating-ui.react-dom-BH3TFvkM.js → floating-ui.react-dom-CjE-JNW1.js} +1 -1
  178. web/client/dist/assets/{help-Bl8wqaQc.js → help-DuPhjipa.js} +1 -1
  179. web/client/dist/assets/{index-D1sR7wpN.js → index-C-dJH7yZ.js} +1 -1
  180. web/client/dist/assets/{index-O3mjYpnE.js → index-Dj0i1-CA.js} +2 -2
  181. web/client/dist/assets/{plan-CehRrJUG.js → plan-BTRSbjKn.js} +1 -1
  182. web/client/dist/assets/{popover-CqgMRE0G.js → popover-_Sf0yvOI.js} +1 -1
  183. web/client/dist/assets/{project-6gxepOhm.js → project-BvSOI8MY.js} +1 -1
  184. web/client/dist/index.html +1 -1
  185. web/client/dist/assets/Lineage-D0Hgdz2v.js +0 -1
  186. web/client/dist/assets/context-DgX0fp2E.js +0 -68
  187. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/WHEEL +0 -0
  188. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/entry_points.txt +0 -0
  189. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/licenses/LICENSE +0 -0
  190. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/top_level.txt +0 -0
@@ -8,9 +8,10 @@ from sqlglot import exp, parse_one
8
8
  from sqlglot.transforms import remove_precision_parameterized_types
9
9
 
10
10
  from sqlmesh.core.dialect import to_schema
11
+ from sqlmesh.core.engine_adapter.base import _get_data_object_cache_key
11
12
  from sqlmesh.core.engine_adapter.mixins import (
12
- InsertOverwriteWithMergeMixin,
13
13
  ClusteredByMixin,
14
+ GrantsFromInfoSchemaMixin,
14
15
  RowDiffMixin,
15
16
  TableAlterClusterByOperation,
16
17
  )
@@ -20,6 +21,7 @@ from sqlmesh.core.engine_adapter.shared import (
20
21
  DataObjectType,
21
22
  SourceQuery,
22
23
  set_catalog,
24
+ InsertOverwriteStrategy,
23
25
  )
24
26
  from sqlmesh.core.node import IntervalUnit
25
27
  from sqlmesh.core.schema_diff import TableAlterOperation, NestedSupport
@@ -39,7 +41,7 @@ if t.TYPE_CHECKING:
39
41
  from google.cloud.bigquery.table import Table as BigQueryTable
40
42
 
41
43
  from sqlmesh.core._typing import SchemaName, SessionProperties, TableName
42
- from sqlmesh.core.engine_adapter._typing import BigframeSession, DF, Query
44
+ from sqlmesh.core.engine_adapter._typing import BigframeSession, DCL, DF, GrantsConfig, Query
43
45
  from sqlmesh.core.engine_adapter.base import QueryOrDF
44
46
 
45
47
 
@@ -54,7 +56,7 @@ NestedFieldsDict = t.Dict[str, t.List[NestedField]]
54
56
 
55
57
 
56
58
  @set_catalog()
57
- class BigQueryEngineAdapter(InsertOverwriteWithMergeMixin, ClusteredByMixin, RowDiffMixin):
59
+ class BigQueryEngineAdapter(ClusteredByMixin, RowDiffMixin, GrantsFromInfoSchemaMixin):
58
60
  """
59
61
  BigQuery Engine Adapter using the `google-cloud-bigquery` library's DB API.
60
62
  """
@@ -64,10 +66,16 @@ class BigQueryEngineAdapter(InsertOverwriteWithMergeMixin, ClusteredByMixin, Row
64
66
  SUPPORTS_TRANSACTIONS = False
65
67
  SUPPORTS_MATERIALIZED_VIEWS = True
66
68
  SUPPORTS_CLONING = True
69
+ SUPPORTS_GRANTS = True
70
+ CURRENT_USER_OR_ROLE_EXPRESSION: exp.Expression = exp.func("session_user")
71
+ SUPPORTS_MULTIPLE_GRANT_PRINCIPALS = True
72
+ USE_CATALOG_IN_GRANTS = True
73
+ GRANT_INFORMATION_SCHEMA_TABLE_NAME = "OBJECT_PRIVILEGES"
67
74
  MAX_TABLE_COMMENT_LENGTH = 1024
68
75
  MAX_COLUMN_COMMENT_LENGTH = 1024
69
76
  SUPPORTS_QUERY_EXECUTION_TRACKING = True
70
77
  SUPPORTED_DROP_CASCADE_OBJECT_KINDS = ["SCHEMA"]
78
+ INSERT_OVERWRITE_STRATEGY = InsertOverwriteStrategy.MERGE
71
79
 
72
80
  SCHEMA_DIFFER_KWARGS = {
73
81
  "compatible_types": {
@@ -168,17 +176,18 @@ class BigQueryEngineAdapter(InsertOverwriteWithMergeMixin, ClusteredByMixin, Row
168
176
  )
169
177
 
170
178
  def query_factory() -> Query:
171
- if bigframes_pd and isinstance(df, bigframes_pd.DataFrame):
172
- df.to_gbq(
179
+ ordered_df = df[list(source_columns_to_types)]
180
+ if bigframes_pd and isinstance(ordered_df, bigframes_pd.DataFrame):
181
+ ordered_df.to_gbq(
173
182
  f"{temp_bq_table.project}.{temp_bq_table.dataset_id}.{temp_bq_table.table_id}",
174
183
  if_exists="replace",
175
184
  )
176
185
  elif not self.table_exists(temp_table):
177
186
  # Make mypy happy
178
- assert isinstance(df, pd.DataFrame)
187
+ assert isinstance(ordered_df, pd.DataFrame)
179
188
  self._db_call(self.client.create_table, table=temp_bq_table, exists_ok=False)
180
189
  result = self.__load_pandas_to_table(
181
- temp_bq_table, df, source_columns_to_types, replace=False
190
+ temp_bq_table, ordered_df, source_columns_to_types, replace=False
182
191
  )
183
192
  if result.errors:
184
193
  raise SQLMeshError(result.errors)
@@ -742,6 +751,12 @@ class BigQueryEngineAdapter(InsertOverwriteWithMergeMixin, ClusteredByMixin, Row
742
751
  )
743
752
 
744
753
  def table_exists(self, table_name: TableName) -> bool:
754
+ table = exp.to_table(table_name)
755
+ data_object_cache_key = _get_data_object_cache_key(table.catalog, table.db, table.name)
756
+ if data_object_cache_key in self._data_object_cache:
757
+ logger.debug("Table existence cache hit: %s", data_object_cache_key)
758
+ return self._data_object_cache[data_object_cache_key] is not None
759
+
745
760
  try:
746
761
  from google.cloud.exceptions import NotFound
747
762
  except ModuleNotFoundError:
@@ -753,6 +768,28 @@ class BigQueryEngineAdapter(InsertOverwriteWithMergeMixin, ClusteredByMixin, Row
753
768
  except NotFound:
754
769
  return False
755
770
 
771
+ def get_table_last_modified_ts(self, table_names: t.List[TableName]) -> t.List[int]:
772
+ from sqlmesh.utils.date import to_timestamp
773
+
774
+ datasets_to_tables: t.DefaultDict[str, t.List[str]] = defaultdict(list)
775
+ for table_name in table_names:
776
+ table = exp.to_table(table_name)
777
+ datasets_to_tables[table.db].append(table.name)
778
+
779
+ results = []
780
+
781
+ for dataset, tables in datasets_to_tables.items():
782
+ query = (
783
+ f"SELECT TIMESTAMP_MILLIS(last_modified_time) FROM `{dataset}.__TABLES__` WHERE "
784
+ )
785
+ for i, table_name in enumerate(tables):
786
+ query += f"TABLE_ID = '{table_name}'"
787
+ if i < len(tables) - 1:
788
+ query += " OR "
789
+ results.extend(self.fetchall(query))
790
+
791
+ return [to_timestamp(row[0]) for row in results]
792
+
756
793
  def _get_table(self, table_name: TableName) -> BigQueryTable:
757
794
  """
758
795
  Returns a BigQueryTable object for the given table name.
@@ -1295,6 +1332,108 @@ class BigQueryEngineAdapter(InsertOverwriteWithMergeMixin, ClusteredByMixin, Row
1295
1332
  def _session_id(self, value: t.Any) -> None:
1296
1333
  self._connection_pool.set_attribute("session_id", value)
1297
1334
 
1335
+ def _get_current_schema(self) -> str:
1336
+ raise NotImplementedError("BigQuery does not support current schema")
1337
+
1338
+ def _get_bq_dataset_location(self, project: str, dataset: str) -> str:
1339
+ return self._db_call(self.client.get_dataset, dataset_ref=f"{project}.{dataset}").location
1340
+
1341
+ def _get_grant_expression(self, table: exp.Table) -> exp.Expression:
1342
+ if not table.db:
1343
+ raise ValueError(
1344
+ f"Table {table.sql(dialect=self.dialect)} does not have a schema (dataset)"
1345
+ )
1346
+ project = table.catalog or self.get_current_catalog()
1347
+ if not project:
1348
+ raise ValueError(
1349
+ f"Table {table.sql(dialect=self.dialect)} does not have a catalog (project)"
1350
+ )
1351
+
1352
+ dataset = table.db
1353
+ table_name = table.name
1354
+ location = self._get_bq_dataset_location(project, dataset)
1355
+
1356
+ # https://cloud.google.com/bigquery/docs/information-schema-object-privileges
1357
+ # OBJECT_PRIVILEGES is a project-level INFORMATION_SCHEMA view with regional qualifier
1358
+ object_privileges_table = exp.to_table(
1359
+ f"`{project}`.`region-{location}`.INFORMATION_SCHEMA.{self.GRANT_INFORMATION_SCHEMA_TABLE_NAME}",
1360
+ dialect=self.dialect,
1361
+ )
1362
+ return (
1363
+ exp.select("privilege_type", "grantee")
1364
+ .from_(object_privileges_table)
1365
+ .where(
1366
+ exp.and_(
1367
+ exp.column("object_schema").eq(exp.Literal.string(dataset)),
1368
+ exp.column("object_name").eq(exp.Literal.string(table_name)),
1369
+ # Filter out current_user
1370
+ # BigQuery grantees format: "user:email" or "group:name"
1371
+ exp.func("split", exp.column("grantee"), exp.Literal.string(":"))[
1372
+ exp.func("OFFSET", exp.Literal.number("1"))
1373
+ ].neq(self.CURRENT_USER_OR_ROLE_EXPRESSION),
1374
+ )
1375
+ )
1376
+ )
1377
+
1378
+ @staticmethod
1379
+ def _grant_object_kind(table_type: DataObjectType) -> str:
1380
+ if table_type == DataObjectType.VIEW:
1381
+ return "VIEW"
1382
+ if table_type == DataObjectType.MATERIALIZED_VIEW:
1383
+ # We actually need to use "MATERIALIZED VIEW" here even though it's not listed
1384
+ # as a supported resource_type in the BigQuery DCL doc:
1385
+ # https://cloud.google.com/bigquery/docs/reference/standard-sql/data-control-language
1386
+ return "MATERIALIZED VIEW"
1387
+ return "TABLE"
1388
+
1389
+ def _dcl_grants_config_expr(
1390
+ self,
1391
+ dcl_cmd: t.Type[DCL],
1392
+ table: exp.Table,
1393
+ grants_config: GrantsConfig,
1394
+ table_type: DataObjectType = DataObjectType.TABLE,
1395
+ ) -> t.List[exp.Expression]:
1396
+ expressions: t.List[exp.Expression] = []
1397
+ if not grants_config:
1398
+ return expressions
1399
+
1400
+ # https://cloud.google.com/bigquery/docs/reference/standard-sql/data-control-language
1401
+
1402
+ def normalize_principal(p: str) -> str:
1403
+ if ":" not in p:
1404
+ raise ValueError(f"Principal '{p}' missing a prefix label")
1405
+
1406
+ # allUsers and allAuthenticatedUsers special groups that are cas-sensitive and must start with "specialGroup:"
1407
+ if p.endswith("allUsers") or p.endswith("allAuthenticatedUsers"):
1408
+ if not p.startswith("specialGroup:"):
1409
+ raise ValueError(
1410
+ f"Special group principal '{p}' must start with 'specialGroup:' prefix label"
1411
+ )
1412
+ return p
1413
+
1414
+ label, principal = p.split(":", 1)
1415
+ # always lowercase principals
1416
+ return f"{label}:{principal.lower()}"
1417
+
1418
+ object_kind = self._grant_object_kind(table_type)
1419
+ for privilege, principals in grants_config.items():
1420
+ if not principals:
1421
+ continue
1422
+
1423
+ noramlized_principals = [exp.Literal.string(normalize_principal(p)) for p in principals]
1424
+ args: t.Dict[str, t.Any] = {
1425
+ "privileges": [exp.GrantPrivilege(this=exp.to_identifier(privilege, quoted=True))],
1426
+ "securable": table.copy(),
1427
+ "principals": noramlized_principals,
1428
+ }
1429
+
1430
+ if object_kind:
1431
+ args["kind"] = exp.Var(this=object_kind)
1432
+
1433
+ expressions.append(dcl_cmd(**args)) # type: ignore[arg-type]
1434
+
1435
+ return expressions
1436
+
1298
1437
 
1299
1438
  class _ErrorCounter:
1300
1439
  """
@@ -112,8 +112,9 @@ class ClickhouseEngineAdapter(EngineAdapterWithIndexSupport, LogicalMergeMixin):
112
112
  storage_format=exp.var("MergeTree"),
113
113
  **kwargs,
114
114
  )
115
+ ordered_df = df[list(source_columns_to_types)]
115
116
 
116
- self.cursor.client.insert_df(temp_table.sql(dialect=self.dialect), df=df)
117
+ self.cursor.client.insert_df(temp_table.sql(dialect=self.dialect), df=ordered_df)
117
118
 
118
119
  return exp.select(*self._casted_columns(target_columns_to_types, source_columns)).from_(
119
120
  temp_table
@@ -223,7 +224,7 @@ class ClickhouseEngineAdapter(EngineAdapterWithIndexSupport, LogicalMergeMixin):
223
224
  target_columns_to_types = target_columns_to_types or self.columns(target_table)
224
225
 
225
226
  temp_table = self._get_temp_table(target_table)
226
- self._create_table_like(temp_table, target_table)
227
+ self.create_table_like(temp_table, target_table)
227
228
 
228
229
  # REPLACE BY KEY: extract kwargs if present
229
230
  dynamic_key = kwargs.get("dynamic_key")
@@ -455,7 +456,11 @@ class ClickhouseEngineAdapter(EngineAdapterWithIndexSupport, LogicalMergeMixin):
455
456
  )
456
457
 
457
458
  def _create_table_like(
458
- self, target_table_name: TableName, source_table_name: TableName
459
+ self,
460
+ target_table_name: TableName,
461
+ source_table_name: TableName,
462
+ exists: bool,
463
+ **kwargs: t.Any,
459
464
  ) -> None:
460
465
  """Create table with identical structure as source table"""
461
466
  self.execute(
@@ -631,16 +636,15 @@ class ClickhouseEngineAdapter(EngineAdapterWithIndexSupport, LogicalMergeMixin):
631
636
  kind: What kind of object to drop. Defaults to TABLE
632
637
  **drop_args: Any extra arguments to set on the Drop expression
633
638
  """
634
- self.execute(
635
- exp.Drop(
636
- this=exp.to_table(name),
637
- kind=kind,
638
- exists=exists,
639
- cluster=exp.OnCluster(this=exp.to_identifier(self.cluster))
640
- if self.engine_run_mode.is_cluster
641
- else None,
642
- **drop_args,
643
- )
639
+ super()._drop_object(
640
+ name=name,
641
+ exists=exists,
642
+ kind=kind,
643
+ cascade=cascade,
644
+ cluster=exp.OnCluster(this=exp.to_identifier(self.cluster))
645
+ if self.engine_run_mode.is_cluster
646
+ else None,
647
+ **drop_args,
644
648
  )
645
649
 
646
650
  def _build_partitioned_by_exp(
@@ -5,7 +5,9 @@ import typing as t
5
5
  from functools import partial
6
6
 
7
7
  from sqlglot import exp
8
+
8
9
  from sqlmesh.core.dialect import to_schema
10
+ from sqlmesh.core.engine_adapter.mixins import GrantsFromInfoSchemaMixin
9
11
  from sqlmesh.core.engine_adapter.shared import (
10
12
  CatalogSupport,
11
13
  DataObject,
@@ -28,12 +30,16 @@ if t.TYPE_CHECKING:
28
30
  logger = logging.getLogger(__name__)
29
31
 
30
32
 
31
- class DatabricksEngineAdapter(SparkEngineAdapter):
33
+ class DatabricksEngineAdapter(SparkEngineAdapter, GrantsFromInfoSchemaMixin):
32
34
  DIALECT = "databricks"
33
35
  INSERT_OVERWRITE_STRATEGY = InsertOverwriteStrategy.REPLACE_WHERE
34
36
  SUPPORTS_CLONING = True
35
37
  SUPPORTS_MATERIALIZED_VIEWS = True
36
38
  SUPPORTS_MATERIALIZED_VIEW_SCHEMA = True
39
+ SUPPORTS_GRANTS = True
40
+ USE_CATALOG_IN_GRANTS = True
41
+ # Spark has this set to false for compatibility when mixing with Trino but that isn't a concern with Databricks
42
+ QUOTE_IDENTIFIERS_IN_VIEWS = True
37
43
  SCHEMA_DIFFER_KWARGS = {
38
44
  "support_positional_add": True,
39
45
  "nested_support": NestedSupport.ALL,
@@ -149,6 +155,28 @@ class DatabricksEngineAdapter(SparkEngineAdapter):
149
155
  def catalog_support(self) -> CatalogSupport:
150
156
  return CatalogSupport.FULL_SUPPORT
151
157
 
158
+ @staticmethod
159
+ def _grant_object_kind(table_type: DataObjectType) -> str:
160
+ if table_type == DataObjectType.VIEW:
161
+ return "VIEW"
162
+ if table_type == DataObjectType.MATERIALIZED_VIEW:
163
+ return "MATERIALIZED VIEW"
164
+ return "TABLE"
165
+
166
+ def _get_grant_expression(self, table: exp.Table) -> exp.Expression:
167
+ # We only care about explicitly granted privileges and not inherited ones
168
+ # if this is removed you would see grants inherited from the catalog get returned
169
+ expression = super()._get_grant_expression(table)
170
+ expression.args["where"].set(
171
+ "this",
172
+ exp.and_(
173
+ expression.args["where"].this,
174
+ exp.column("inherited_from").eq(exp.Literal.string("NONE")),
175
+ wrap=False,
176
+ ),
177
+ )
178
+ return expression
179
+
152
180
  def _begin_session(self, properties: SessionProperties) -> t.Any:
153
181
  """Begin a new session."""
154
182
  # Align the different possible connectors to a single catalog
@@ -266,7 +294,9 @@ class DatabricksEngineAdapter(SparkEngineAdapter):
266
294
  exp.column("table_catalog").as_("catalog"),
267
295
  exp.case(exp.column("table_type"))
268
296
  .when(exp.Literal.string("VIEW"), exp.Literal.string("view"))
269
- .when(exp.Literal.string("MATERIALIZED_VIEW"), exp.Literal.string("view"))
297
+ .when(
298
+ exp.Literal.string("MATERIALIZED_VIEW"), exp.Literal.string("materialized_view")
299
+ )
270
300
  .else_(exp.Literal.string("table"))
271
301
  .as_("type"),
272
302
  )
@@ -297,6 +327,7 @@ class DatabricksEngineAdapter(SparkEngineAdapter):
297
327
  target_table_name: TableName,
298
328
  source_table_name: TableName,
299
329
  replace: bool = False,
330
+ exists: bool = True,
300
331
  clone_kwargs: t.Optional[t.Dict[str, t.Any]] = None,
301
332
  **kwargs: t.Any,
302
333
  ) -> None:
@@ -363,3 +394,20 @@ class DatabricksEngineAdapter(SparkEngineAdapter):
363
394
  expressions.append(clustered_by_exp)
364
395
  properties = exp.Properties(expressions=expressions)
365
396
  return properties
397
+
398
+ def _build_column_defs(
399
+ self,
400
+ target_columns_to_types: t.Dict[str, exp.DataType],
401
+ column_descriptions: t.Optional[t.Dict[str, str]] = None,
402
+ is_view: bool = False,
403
+ materialized: bool = False,
404
+ ) -> t.List[exp.ColumnDef]:
405
+ # Databricks requires column types to be specified when adding column comments
406
+ # in CREATE MATERIALIZED VIEW statements. Override is_view to False to force
407
+ # column types to be included when comments are present.
408
+ if is_view and materialized and column_descriptions:
409
+ is_view = False
410
+
411
+ return super()._build_column_defs(
412
+ target_columns_to_types, column_descriptions, is_view, materialized
413
+ )
@@ -10,23 +10,17 @@ from tenacity import retry, stop_after_attempt, wait_exponential, retry_if_resul
10
10
  from sqlmesh.core.engine_adapter.mssql import MSSQLEngineAdapter
11
11
  from sqlmesh.core.engine_adapter.shared import (
12
12
  InsertOverwriteStrategy,
13
- SourceQuery,
14
13
  )
15
- from sqlmesh.core.engine_adapter.base import EngineAdapter
16
14
  from sqlmesh.utils.errors import SQLMeshError
17
15
  from sqlmesh.utils.connection_pool import ConnectionPool
16
+ from sqlmesh.core.schema_diff import TableAlterOperation
17
+ from sqlmesh.utils import random_id
18
18
 
19
19
 
20
- if t.TYPE_CHECKING:
21
- from sqlmesh.core._typing import TableName
22
-
23
-
24
- from sqlmesh.core.engine_adapter.mixins import LogicalMergeMixin
25
-
26
20
  logger = logging.getLogger(__name__)
27
21
 
28
22
 
29
- class FabricEngineAdapter(LogicalMergeMixin, MSSQLEngineAdapter):
23
+ class FabricEngineAdapter(MSSQLEngineAdapter):
30
24
  """
31
25
  Adapter for Microsoft Fabric.
32
26
  """
@@ -58,26 +52,6 @@ class FabricEngineAdapter(LogicalMergeMixin, MSSQLEngineAdapter):
58
52
  def _target_catalog(self, value: t.Optional[str]) -> None:
59
53
  self._connection_pool.set_attribute("target_catalog", value)
60
54
 
61
- def _insert_overwrite_by_condition(
62
- self,
63
- table_name: TableName,
64
- source_queries: t.List[SourceQuery],
65
- target_columns_to_types: t.Optional[t.Dict[str, exp.DataType]] = None,
66
- where: t.Optional[exp.Condition] = None,
67
- insert_overwrite_strategy_override: t.Optional[InsertOverwriteStrategy] = None,
68
- **kwargs: t.Any,
69
- ) -> None:
70
- # Override to avoid MERGE statement which isn't fully supported in Fabric
71
- return EngineAdapter._insert_overwrite_by_condition(
72
- self,
73
- table_name=table_name,
74
- source_queries=source_queries,
75
- target_columns_to_types=target_columns_to_types,
76
- where=where,
77
- insert_overwrite_strategy_override=InsertOverwriteStrategy.DELETE_INSERT,
78
- **kwargs,
79
- )
80
-
81
55
  @property
82
56
  def api_client(self) -> FabricHttpClient:
83
57
  # the requests Session is not guaranteed to be threadsafe
@@ -181,6 +155,113 @@ class FabricEngineAdapter(LogicalMergeMixin, MSSQLEngineAdapter):
181
155
  f"Unable to switch catalog to {catalog_name}, catalog ended up as {catalog_after_switch}"
182
156
  )
183
157
 
158
+ def alter_table(
159
+ self, alter_expressions: t.Union[t.List[exp.Alter], t.List[TableAlterOperation]]
160
+ ) -> None:
161
+ """
162
+ Applies alter expressions to a table. Fabric has limited support for ALTER TABLE,
163
+ so this method implements a workaround for column type changes.
164
+ This method is self-contained and sets its own catalog context.
165
+ """
166
+ if not alter_expressions:
167
+ return
168
+
169
+ # Get the target table from the first expression to determine the correct catalog.
170
+ first_op = alter_expressions[0]
171
+ expression = first_op.expression if isinstance(first_op, TableAlterOperation) else first_op
172
+ if not isinstance(expression, exp.Alter) or not expression.this.catalog:
173
+ # Fallback for unexpected scenarios
174
+ logger.warning(
175
+ "Could not determine catalog from alter expression, executing with current context."
176
+ )
177
+ super().alter_table(alter_expressions)
178
+ return
179
+
180
+ target_catalog = expression.this.catalog
181
+ self.set_current_catalog(target_catalog)
182
+
183
+ with self.transaction():
184
+ for op in alter_expressions:
185
+ expression = op.expression if isinstance(op, TableAlterOperation) else op
186
+
187
+ if not isinstance(expression, exp.Alter):
188
+ self.execute(expression)
189
+ continue
190
+
191
+ for action in expression.actions:
192
+ table_name = expression.this
193
+
194
+ table_name_without_catalog = table_name.copy()
195
+ table_name_without_catalog.set("catalog", None)
196
+
197
+ is_type_change = isinstance(action, exp.AlterColumn) and action.args.get(
198
+ "dtype"
199
+ )
200
+
201
+ if is_type_change:
202
+ column_to_alter = action.this
203
+ new_type = action.args["dtype"]
204
+ temp_column_name_str = f"{column_to_alter.name}__{random_id(short=True)}"
205
+ temp_column_name = exp.to_identifier(temp_column_name_str)
206
+
207
+ logger.info(
208
+ "Applying workaround for column '%s' on table '%s' to change type to '%s'.",
209
+ column_to_alter.sql(),
210
+ table_name.sql(),
211
+ new_type.sql(),
212
+ )
213
+
214
+ # Step 1: Add a temporary column.
215
+ add_column_expr = exp.Alter(
216
+ this=table_name_without_catalog.copy(),
217
+ kind="TABLE",
218
+ actions=[
219
+ exp.ColumnDef(this=temp_column_name.copy(), kind=new_type.copy())
220
+ ],
221
+ )
222
+ add_sql = self._to_sql(add_column_expr)
223
+ self.execute(add_sql)
224
+
225
+ # Step 2: Copy and cast data.
226
+ update_sql = self._to_sql(
227
+ exp.Update(
228
+ this=table_name_without_catalog.copy(),
229
+ expressions=[
230
+ exp.EQ(
231
+ this=temp_column_name.copy(),
232
+ expression=exp.Cast(
233
+ this=column_to_alter.copy(), to=new_type.copy()
234
+ ),
235
+ )
236
+ ],
237
+ )
238
+ )
239
+ self.execute(update_sql)
240
+
241
+ # Step 3: Drop the original column.
242
+ drop_sql = self._to_sql(
243
+ exp.Alter(
244
+ this=table_name_without_catalog.copy(),
245
+ kind="TABLE",
246
+ actions=[exp.Drop(this=column_to_alter.copy(), kind="COLUMN")],
247
+ )
248
+ )
249
+ self.execute(drop_sql)
250
+
251
+ # Step 4: Rename the temporary column.
252
+ old_name_qualified = f"{table_name_without_catalog.sql(dialect=self.dialect)}.{temp_column_name.sql(dialect=self.dialect)}"
253
+ new_name_unquoted = column_to_alter.sql(
254
+ dialect=self.dialect, identify=False
255
+ )
256
+ rename_sql = f"EXEC sp_rename '{old_name_qualified}', '{new_name_unquoted}', 'COLUMN'"
257
+ self.execute(rename_sql)
258
+ else:
259
+ # For other alterations, execute directly.
260
+ direct_alter_expr = exp.Alter(
261
+ this=table_name_without_catalog.copy(), kind="TABLE", actions=[action]
262
+ )
263
+ self.execute(direct_alter_expr)
264
+
184
265
 
185
266
  class FabricHttpClient:
186
267
  def __init__(self, tenant_id: str, workspace_id: str, client_id: str, client_secret: str):