sqlmesh 0.217.1.dev1__py3-none-any.whl → 0.227.2.dev20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. sqlmesh/__init__.py +12 -2
  2. sqlmesh/_version.py +2 -2
  3. sqlmesh/cli/project_init.py +10 -2
  4. sqlmesh/core/_typing.py +1 -0
  5. sqlmesh/core/audit/definition.py +8 -2
  6. sqlmesh/core/config/__init__.py +1 -1
  7. sqlmesh/core/config/connection.py +20 -5
  8. sqlmesh/core/config/dbt.py +13 -0
  9. sqlmesh/core/config/janitor.py +12 -0
  10. sqlmesh/core/config/loader.py +7 -0
  11. sqlmesh/core/config/model.py +2 -0
  12. sqlmesh/core/config/root.py +3 -0
  13. sqlmesh/core/console.py +80 -2
  14. sqlmesh/core/constants.py +1 -1
  15. sqlmesh/core/context.py +112 -35
  16. sqlmesh/core/dialect.py +3 -0
  17. sqlmesh/core/engine_adapter/_typing.py +2 -0
  18. sqlmesh/core/engine_adapter/base.py +330 -23
  19. sqlmesh/core/engine_adapter/base_postgres.py +17 -1
  20. sqlmesh/core/engine_adapter/bigquery.py +146 -7
  21. sqlmesh/core/engine_adapter/clickhouse.py +17 -13
  22. sqlmesh/core/engine_adapter/databricks.py +50 -2
  23. sqlmesh/core/engine_adapter/fabric.py +110 -29
  24. sqlmesh/core/engine_adapter/mixins.py +142 -48
  25. sqlmesh/core/engine_adapter/mssql.py +15 -4
  26. sqlmesh/core/engine_adapter/mysql.py +2 -2
  27. sqlmesh/core/engine_adapter/postgres.py +9 -3
  28. sqlmesh/core/engine_adapter/redshift.py +4 -0
  29. sqlmesh/core/engine_adapter/risingwave.py +1 -0
  30. sqlmesh/core/engine_adapter/shared.py +6 -0
  31. sqlmesh/core/engine_adapter/snowflake.py +82 -11
  32. sqlmesh/core/engine_adapter/spark.py +14 -10
  33. sqlmesh/core/engine_adapter/trino.py +5 -2
  34. sqlmesh/core/janitor.py +181 -0
  35. sqlmesh/core/lineage.py +1 -0
  36. sqlmesh/core/linter/rules/builtin.py +15 -0
  37. sqlmesh/core/loader.py +17 -30
  38. sqlmesh/core/macros.py +35 -13
  39. sqlmesh/core/model/common.py +2 -0
  40. sqlmesh/core/model/definition.py +72 -4
  41. sqlmesh/core/model/kind.py +66 -2
  42. sqlmesh/core/model/meta.py +107 -2
  43. sqlmesh/core/node.py +101 -2
  44. sqlmesh/core/plan/builder.py +15 -10
  45. sqlmesh/core/plan/common.py +196 -2
  46. sqlmesh/core/plan/definition.py +21 -6
  47. sqlmesh/core/plan/evaluator.py +72 -113
  48. sqlmesh/core/plan/explainer.py +90 -8
  49. sqlmesh/core/plan/stages.py +42 -21
  50. sqlmesh/core/renderer.py +26 -18
  51. sqlmesh/core/scheduler.py +60 -19
  52. sqlmesh/core/selector.py +137 -9
  53. sqlmesh/core/signal.py +64 -1
  54. sqlmesh/core/snapshot/__init__.py +1 -0
  55. sqlmesh/core/snapshot/definition.py +109 -25
  56. sqlmesh/core/snapshot/evaluator.py +610 -50
  57. sqlmesh/core/state_sync/__init__.py +0 -1
  58. sqlmesh/core/state_sync/base.py +31 -27
  59. sqlmesh/core/state_sync/cache.py +12 -4
  60. sqlmesh/core/state_sync/common.py +216 -111
  61. sqlmesh/core/state_sync/db/facade.py +30 -15
  62. sqlmesh/core/state_sync/db/interval.py +27 -7
  63. sqlmesh/core/state_sync/db/migrator.py +14 -8
  64. sqlmesh/core/state_sync/db/snapshot.py +119 -87
  65. sqlmesh/core/table_diff.py +2 -2
  66. sqlmesh/core/test/definition.py +14 -9
  67. sqlmesh/core/test/discovery.py +4 -0
  68. sqlmesh/dbt/adapter.py +20 -11
  69. sqlmesh/dbt/basemodel.py +52 -41
  70. sqlmesh/dbt/builtin.py +27 -11
  71. sqlmesh/dbt/column.py +17 -5
  72. sqlmesh/dbt/common.py +4 -2
  73. sqlmesh/dbt/context.py +14 -1
  74. sqlmesh/dbt/loader.py +60 -8
  75. sqlmesh/dbt/manifest.py +136 -8
  76. sqlmesh/dbt/model.py +105 -25
  77. sqlmesh/dbt/package.py +16 -1
  78. sqlmesh/dbt/profile.py +3 -3
  79. sqlmesh/dbt/project.py +12 -7
  80. sqlmesh/dbt/seed.py +1 -1
  81. sqlmesh/dbt/source.py +6 -1
  82. sqlmesh/dbt/target.py +25 -6
  83. sqlmesh/dbt/test.py +31 -1
  84. sqlmesh/integrations/github/cicd/controller.py +6 -2
  85. sqlmesh/lsp/context.py +4 -2
  86. sqlmesh/magics.py +1 -1
  87. sqlmesh/migrations/v0000_baseline.py +3 -6
  88. sqlmesh/migrations/v0061_mysql_fix_blob_text_type.py +2 -5
  89. sqlmesh/migrations/v0062_add_model_gateway.py +2 -2
  90. sqlmesh/migrations/v0063_change_signals.py +2 -4
  91. sqlmesh/migrations/v0064_join_when_matched_strings.py +2 -4
  92. sqlmesh/migrations/v0065_add_model_optimize.py +2 -2
  93. sqlmesh/migrations/v0066_add_auto_restatements.py +2 -6
  94. sqlmesh/migrations/v0067_add_tsql_date_full_precision.py +2 -2
  95. sqlmesh/migrations/v0068_include_unrendered_query_in_metadata_hash.py +2 -2
  96. sqlmesh/migrations/v0069_update_dev_table_suffix.py +2 -4
  97. sqlmesh/migrations/v0070_include_grains_in_metadata_hash.py +2 -2
  98. sqlmesh/migrations/v0071_add_dev_version_to_intervals.py +2 -6
  99. sqlmesh/migrations/v0072_add_environment_statements.py +2 -4
  100. sqlmesh/migrations/v0073_remove_symbolic_disable_restatement.py +2 -4
  101. sqlmesh/migrations/v0074_add_partition_by_time_column_property.py +2 -2
  102. sqlmesh/migrations/v0075_remove_validate_query.py +2 -4
  103. sqlmesh/migrations/v0076_add_cron_tz.py +2 -2
  104. sqlmesh/migrations/v0077_fix_column_type_hash_calculation.py +2 -2
  105. sqlmesh/migrations/v0078_warn_if_non_migratable_python_env.py +2 -4
  106. sqlmesh/migrations/v0079_add_gateway_managed_property.py +7 -9
  107. sqlmesh/migrations/v0080_add_batch_size_to_scd_type_2_models.py +2 -2
  108. sqlmesh/migrations/v0081_update_partitioned_by.py +2 -4
  109. sqlmesh/migrations/v0082_warn_if_incorrectly_duplicated_statements.py +2 -4
  110. sqlmesh/migrations/v0083_use_sql_for_scd_time_data_type_data_hash.py +2 -2
  111. sqlmesh/migrations/v0084_normalize_quote_when_matched_and_merge_filter.py +2 -2
  112. sqlmesh/migrations/v0085_deterministic_repr.py +2 -4
  113. sqlmesh/migrations/v0086_check_deterministic_bug.py +2 -4
  114. sqlmesh/migrations/v0087_normalize_blueprint_variables.py +2 -4
  115. sqlmesh/migrations/v0088_warn_about_variable_python_env_diffs.py +2 -4
  116. sqlmesh/migrations/v0089_add_virtual_environment_mode.py +2 -2
  117. sqlmesh/migrations/v0090_add_forward_only_column.py +2 -6
  118. sqlmesh/migrations/v0091_on_additive_change.py +2 -2
  119. sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +2 -4
  120. sqlmesh/migrations/v0093_use_raw_sql_in_fingerprint.py +2 -2
  121. sqlmesh/migrations/v0094_add_dev_version_and_fingerprint_columns.py +2 -6
  122. sqlmesh/migrations/v0095_warn_about_dbt_raw_sql_diff.py +2 -4
  123. sqlmesh/migrations/v0096_remove_plan_dags_table.py +2 -4
  124. sqlmesh/migrations/v0097_add_dbt_name_in_node.py +2 -2
  125. sqlmesh/migrations/v0098_add_dbt_node_info_in_node.py +103 -0
  126. sqlmesh/migrations/v0099_add_last_altered_to_intervals.py +25 -0
  127. sqlmesh/migrations/v0100_add_grants_and_grants_target_layer.py +9 -0
  128. sqlmesh/utils/__init__.py +8 -1
  129. sqlmesh/utils/cache.py +5 -1
  130. sqlmesh/utils/date.py +1 -1
  131. sqlmesh/utils/errors.py +4 -0
  132. sqlmesh/utils/git.py +3 -1
  133. sqlmesh/utils/jinja.py +25 -2
  134. sqlmesh/utils/pydantic.py +6 -6
  135. sqlmesh/utils/windows.py +13 -3
  136. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/METADATA +5 -5
  137. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/RECORD +188 -183
  138. sqlmesh_dbt/cli.py +70 -7
  139. sqlmesh_dbt/console.py +14 -6
  140. sqlmesh_dbt/operations.py +103 -24
  141. sqlmesh_dbt/selectors.py +39 -1
  142. web/client/dist/assets/{Audits-Ucsx1GzF.js → Audits-CBiYyyx-.js} +1 -1
  143. web/client/dist/assets/{Banner-BWDzvavM.js → Banner-DSRbUlO5.js} +1 -1
  144. web/client/dist/assets/{ChevronDownIcon-D2VL13Ah.js → ChevronDownIcon-MK_nrjD_.js} +1 -1
  145. web/client/dist/assets/{ChevronRightIcon-DWGYbf1l.js → ChevronRightIcon-CLWtT22Q.js} +1 -1
  146. web/client/dist/assets/{Content-DdHDZM3I.js → Content-BNuGZN5l.js} +1 -1
  147. web/client/dist/assets/{Content-Bikfy8fh.js → Content-CSHJyW0n.js} +1 -1
  148. web/client/dist/assets/{Data-CzAJH7rW.js → Data-C1oRDbLx.js} +1 -1
  149. web/client/dist/assets/{DataCatalog-BJF11g8f.js → DataCatalog-HXyX2-_j.js} +1 -1
  150. web/client/dist/assets/{Editor-s0SBpV2y.js → Editor-BDyfpUuw.js} +1 -1
  151. web/client/dist/assets/{Editor-DgLhgKnm.js → Editor-D0jNItwC.js} +1 -1
  152. web/client/dist/assets/{Errors-D0m0O1d3.js → Errors-BfuFLcPi.js} +1 -1
  153. web/client/dist/assets/{FileExplorer-CEv0vXkt.js → FileExplorer-BR9IE3he.js} +1 -1
  154. web/client/dist/assets/{Footer-BwzXn8Ew.js → Footer-CgBEtiAh.js} +1 -1
  155. web/client/dist/assets/{Header-6heDkEqG.js → Header-DSqR6nSO.js} +1 -1
  156. web/client/dist/assets/{Input-obuJsD6k.js → Input-B-oZ6fGO.js} +1 -1
  157. web/client/dist/assets/Lineage-DYQVwDbD.js +1 -0
  158. web/client/dist/assets/{ListboxShow-HM9_qyrt.js → ListboxShow-BE5-xevs.js} +1 -1
  159. web/client/dist/assets/{ModelLineage-zWdKo0U2.js → ModelLineage-DkIFAYo4.js} +1 -1
  160. web/client/dist/assets/{Models-Bcu66SRz.js → Models-D5dWr8RB.js} +1 -1
  161. web/client/dist/assets/{Page-BWEEQfIt.js → Page-C-XfU5BR.js} +1 -1
  162. web/client/dist/assets/{Plan-C4gXCqlf.js → Plan-ZEuTINBq.js} +1 -1
  163. web/client/dist/assets/{PlusCircleIcon-CVDO651q.js → PlusCircleIcon-DVXAHG8_.js} +1 -1
  164. web/client/dist/assets/{ReportErrors-BT6xFwAr.js → ReportErrors-B7FEPzMB.js} +1 -1
  165. web/client/dist/assets/{Root-ryJoBK4h.js → Root-8aZyhPxF.js} +1 -1
  166. web/client/dist/assets/{SearchList-DB04sPb9.js → SearchList-W_iT2G82.js} +1 -1
  167. web/client/dist/assets/{SelectEnvironment-CUYcXUu6.js → SelectEnvironment-C65jALmO.js} +1 -1
  168. web/client/dist/assets/{SourceList-Doo_9ZGp.js → SourceList-DSLO6nVJ.js} +1 -1
  169. web/client/dist/assets/{SourceListItem-D5Mj7Dly.js → SourceListItem-BHt8d9-I.js} +1 -1
  170. web/client/dist/assets/{SplitPane-qHmkD1qy.js → SplitPane-CViaZmw6.js} +1 -1
  171. web/client/dist/assets/{Tests-DH1Z74ML.js → Tests-DhaVt5t1.js} +1 -1
  172. web/client/dist/assets/{Welcome-DqUJUNMF.js → Welcome-DvpjH-_4.js} +1 -1
  173. web/client/dist/assets/context-BctCsyGb.js +71 -0
  174. web/client/dist/assets/{context-Dr54UHLi.js → context-DFNeGsFF.js} +1 -1
  175. web/client/dist/assets/{editor-DYIP1yQ4.js → editor-CcO28cqd.js} +1 -1
  176. web/client/dist/assets/{file-DarlIDVi.js → file-CvJN3aZO.js} +1 -1
  177. web/client/dist/assets/{floating-ui.react-dom-BH3TFvkM.js → floating-ui.react-dom-CjE-JNW1.js} +1 -1
  178. web/client/dist/assets/{help-Bl8wqaQc.js → help-DuPhjipa.js} +1 -1
  179. web/client/dist/assets/{index-D1sR7wpN.js → index-C-dJH7yZ.js} +1 -1
  180. web/client/dist/assets/{index-O3mjYpnE.js → index-Dj0i1-CA.js} +2 -2
  181. web/client/dist/assets/{plan-CehRrJUG.js → plan-BTRSbjKn.js} +1 -1
  182. web/client/dist/assets/{popover-CqgMRE0G.js → popover-_Sf0yvOI.js} +1 -1
  183. web/client/dist/assets/{project-6gxepOhm.js → project-BvSOI8MY.js} +1 -1
  184. web/client/dist/index.html +1 -1
  185. web/client/dist/assets/Lineage-D0Hgdz2v.js +0 -1
  186. web/client/dist/assets/context-DgX0fp2E.js +0 -68
  187. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/WHEEL +0 -0
  188. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/entry_points.txt +0 -0
  189. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/licenses/LICENSE +0 -0
  190. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev20.dist-info}/top_level.txt +0 -0
sqlmesh/__init__.py CHANGED
@@ -126,6 +126,8 @@ def is_cicd_environment() -> bool:
126
126
 
127
127
 
128
128
  def is_interactive_environment() -> bool:
129
+ if sys.stdin is None or sys.stdout is None:
130
+ return False
129
131
  return sys.stdin.isatty() and sys.stdout.isatty()
130
132
 
131
133
 
@@ -186,6 +188,7 @@ def configure_logging(
186
188
  write_to_file: bool = True,
187
189
  log_file_dir: t.Optional[t.Union[str, Path]] = None,
188
190
  ignore_warnings: bool = False,
191
+ log_level: t.Optional[t.Union[str, int]] = None,
189
192
  ) -> None:
190
193
  # Remove noisy grpc logs that are not useful for users
191
194
  os.environ["GRPC_VERBOSITY"] = os.environ.get("GRPC_VERBOSITY", "NONE")
@@ -193,8 +196,15 @@ def configure_logging(
193
196
  logger = logging.getLogger()
194
197
  debug = force_debug or debug_mode_enabled()
195
198
 
196
- # base logger needs to be the lowest level that we plan to log
197
- level = logging.DEBUG if debug else logging.INFO
199
+ if log_level is not None:
200
+ if isinstance(log_level, str):
201
+ level = logging._nameToLevel.get(log_level.upper()) or logging.INFO
202
+ else:
203
+ level = log_level
204
+ else:
205
+ # base logger needs to be the lowest level that we plan to log
206
+ level = logging.DEBUG if debug else logging.INFO
207
+
198
208
  logger.setLevel(level)
199
209
 
200
210
  if debug:
sqlmesh/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.217.1.dev1'
32
- __version_tuple__ = version_tuple = (0, 217, 1, 'dev1')
31
+ __version__ = version = '0.227.2.dev20'
32
+ __version_tuple__ = version_tuple = (0, 227, 2, 'dev20')
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -116,7 +116,15 @@ linter:
116
116
  - invalidselectstarexpansion
117
117
  - noambiguousprojections
118
118
  """,
119
- ProjectTemplate.DBT: f"""# --- Virtual Data Environment Mode ---
119
+ ProjectTemplate.DBT: f"""# --- DBT-specific options ---
120
+ dbt:
121
+ # This configuration ensures that each dbt target gets its own isolated state.
122
+ # The inferred state schemas are named "sqlmesh_state_<profile name>_<target schema>", eg "sqlmesh_state_jaffle_shop_dev"
123
+ # If this is undesirable, you may manually configure the gateway to use a specific state schema name
124
+ # https://sqlmesh.readthedocs.io/en/stable/integrations/dbt/#selecting-a-different-state-connection
125
+ infer_state_schema_name: True
126
+
127
+ # --- Virtual Data Environment Mode ---
120
128
  # Enable Virtual Data Environments (VDE) for *development* environments.
121
129
  # Note that the production environment in dbt projects is not virtual by default to maintain compatibility with existing tooling.
122
130
  # https://sqlmesh.readthedocs.io/en/stable/guides/configuration/#virtual-data-environment-modes
@@ -298,6 +306,7 @@ def init_example_project(
298
306
  dlt_path: t.Optional[str] = None,
299
307
  schema_name: str = "sqlmesh_example",
300
308
  cli_mode: InitCliMode = InitCliMode.DEFAULT,
309
+ start: t.Optional[str] = None,
301
310
  ) -> Path:
302
311
  root_path = Path(path)
303
312
 
@@ -336,7 +345,6 @@ def init_example_project(
336
345
 
337
346
  models: t.Set[t.Tuple[str, str]] = set()
338
347
  settings = None
339
- start = None
340
348
  if engine_type and template == ProjectTemplate.DLT:
341
349
  project_dialect = dialect or DIALECT_TO_TYPE.get(engine_type)
342
350
  if pipeline and project_dialect:
sqlmesh/core/_typing.py CHANGED
@@ -11,6 +11,7 @@ if t.TYPE_CHECKING:
11
11
  SessionProperties = t.Dict[str, t.Union[exp.Expression, str, int, float, bool]]
12
12
  CustomMaterializationProperties = t.Dict[str, t.Union[exp.Expression, str, int, float, bool]]
13
13
 
14
+
14
15
  if sys.version_info >= (3, 11):
15
16
  from typing import Self as Self
16
17
  else:
@@ -19,7 +19,7 @@ from sqlmesh.core.model.common import (
19
19
  sorted_python_env_payloads,
20
20
  )
21
21
  from sqlmesh.core.model.common import make_python_env, single_value_or_tuple, ParsableSql
22
- from sqlmesh.core.node import _Node
22
+ from sqlmesh.core.node import _Node, DbtInfoMixin, DbtNodeInfo
23
23
  from sqlmesh.core.renderer import QueryRenderer
24
24
  from sqlmesh.utils.date import TimeLike
25
25
  from sqlmesh.utils.errors import AuditConfigError, SQLMeshError, raise_config_error
@@ -120,7 +120,7 @@ def audit_map_validator(cls: t.Type, v: t.Any, values: t.Any) -> t.Dict[str, t.A
120
120
  return {}
121
121
 
122
122
 
123
- class ModelAudit(PydanticModel, AuditMixin, frozen=True):
123
+ class ModelAudit(PydanticModel, AuditMixin, DbtInfoMixin, frozen=True):
124
124
  """
125
125
  Audit is an assertion made about your tables.
126
126
 
@@ -137,6 +137,7 @@ class ModelAudit(PydanticModel, AuditMixin, frozen=True):
137
137
  expressions_: t.Optional[t.List[ParsableSql]] = Field(default=None, alias="expressions")
138
138
  jinja_macros: JinjaMacroRegistry = JinjaMacroRegistry()
139
139
  formatting: t.Optional[bool] = Field(default=None, exclude=True)
140
+ dbt_node_info_: t.Optional[DbtNodeInfo] = Field(alias="dbt_node_info", default=None)
140
141
 
141
142
  _path: t.Optional[Path] = None
142
143
 
@@ -150,6 +151,10 @@ class ModelAudit(PydanticModel, AuditMixin, frozen=True):
150
151
  path = f": {self._path.name}" if self._path else ""
151
152
  return f"{self.__class__.__name__}<{self.name}{path}>"
152
153
 
154
+ @property
155
+ def dbt_node_info(self) -> t.Optional[DbtNodeInfo]:
156
+ return self.dbt_node_info_
157
+
153
158
 
154
159
  class StandaloneAudit(_Node, AuditMixin):
155
160
  """
@@ -552,4 +557,5 @@ META_FIELD_CONVERTER: t.Dict[str, t.Callable] = {
552
557
  "depends_on_": lambda value: exp.Tuple(expressions=sorted(value)),
553
558
  "tags": single_value_or_tuple,
554
559
  "default_catalog": exp.to_identifier,
560
+ "dbt_node_info_": lambda value: value.to_expression(),
555
561
  }
@@ -36,6 +36,6 @@ from sqlmesh.core.config.model import ModelDefaultsConfig as ModelDefaultsConfig
36
36
  from sqlmesh.core.config.naming import NameInferenceConfig as NameInferenceConfig
37
37
  from sqlmesh.core.config.linter import LinterConfig as LinterConfig
38
38
  from sqlmesh.core.config.plan import PlanConfig as PlanConfig
39
- from sqlmesh.core.config.root import Config as Config
39
+ from sqlmesh.core.config.root import Config as Config, DbtConfig as DbtConfig
40
40
  from sqlmesh.core.config.run import RunConfig as RunConfig
41
41
  from sqlmesh.core.config.scheduler import BuiltInSchedulerConfig as BuiltInSchedulerConfig
@@ -58,6 +58,7 @@ FORBIDDEN_STATE_SYNC_ENGINES = {
58
58
  "clickhouse",
59
59
  }
60
60
  MOTHERDUCK_TOKEN_REGEX = re.compile(r"(\?|\&)(motherduck_token=)(\S*)")
61
+ PASSWORD_REGEX = re.compile(r"(password=)(\S+)")
61
62
 
62
63
 
63
64
  def _get_engine_import_validator(
@@ -101,6 +102,7 @@ class ConnectionConfig(abc.ABC, BaseConfig):
101
102
  pre_ping: bool
102
103
  pretty_sql: bool = False
103
104
  schema_differ_overrides: t.Optional[t.Dict[str, t.Any]] = None
105
+ catalog_type_overrides: t.Optional[t.Dict[str, str]] = None
104
106
 
105
107
  # Whether to share a single connection across threads or create a new connection per thread.
106
108
  shared_connection: t.ClassVar[bool] = False
@@ -176,6 +178,7 @@ class ConnectionConfig(abc.ABC, BaseConfig):
176
178
  pretty_sql=self.pretty_sql,
177
179
  shared_connection=self.shared_connection,
178
180
  schema_differ_overrides=self.schema_differ_overrides,
181
+ catalog_type_overrides=self.catalog_type_overrides,
179
182
  **self._extra_engine_config,
180
183
  )
181
184
 
@@ -235,6 +238,7 @@ class DuckDBAttachOptions(BaseConfig):
235
238
  data_path: t.Optional[str] = None
236
239
  encrypted: bool = False
237
240
  data_inlining_row_limit: t.Optional[int] = None
241
+ metadata_schema: t.Optional[str] = None
238
242
 
239
243
  def to_sql(self, alias: str) -> str:
240
244
  options = []
@@ -256,6 +260,8 @@ class DuckDBAttachOptions(BaseConfig):
256
260
  options.append("ENCRYPTED")
257
261
  if self.data_inlining_row_limit is not None:
258
262
  options.append(f"DATA_INLINING_ROW_LIMIT {self.data_inlining_row_limit}")
263
+ if self.metadata_schema is not None:
264
+ options.append(f"METADATA_SCHEMA '{self.metadata_schema}'")
259
265
 
260
266
  options_sql = f" ({', '.join(options)})" if options else ""
261
267
  alias_sql = ""
@@ -477,13 +483,13 @@ class BaseDuckDBConnectionConfig(ConnectionConfig):
477
483
  adapter = BaseDuckDBConnectionConfig._data_file_to_adapter.get(key)
478
484
  if adapter is not None:
479
485
  logger.info(
480
- f"Using existing DuckDB adapter due to overlapping data file: {self._mask_motherduck_token(key)}"
486
+ f"Using existing DuckDB adapter due to overlapping data file: {self._mask_sensitive_data(key)}"
481
487
  )
482
488
  return adapter
483
489
 
484
490
  if data_files:
485
491
  masked_files = {
486
- self._mask_motherduck_token(file if isinstance(file, str) else file.path)
492
+ self._mask_sensitive_data(file if isinstance(file, str) else file.path)
487
493
  for file in data_files
488
494
  }
489
495
  logger.info(f"Creating new DuckDB adapter for data files: {masked_files}")
@@ -505,10 +511,14 @@ class BaseDuckDBConnectionConfig(ConnectionConfig):
505
511
  return list(self.catalogs)[0]
506
512
  return None
507
513
 
508
- def _mask_motherduck_token(self, string: str) -> str:
509
- return MOTHERDUCK_TOKEN_REGEX.sub(
510
- lambda m: f"{m.group(1)}{m.group(2)}{'*' * len(m.group(3))}", string
514
+ def _mask_sensitive_data(self, string: str) -> str:
515
+ # Mask MotherDuck tokens with fixed number of asterisks
516
+ result = MOTHERDUCK_TOKEN_REGEX.sub(
517
+ lambda m: f"{m.group(1)}{m.group(2)}{'*' * 8 if m.group(3) else ''}", string
511
518
  )
519
+ # Mask PostgreSQL/MySQL passwords with fixed number of asterisks
520
+ result = PASSWORD_REGEX.sub(lambda m: f"{m.group(1)}{'*' * 8}", result)
521
+ return result
512
522
 
513
523
 
514
524
  class MotherDuckConnectionConfig(BaseDuckDBConnectionConfig):
@@ -1753,6 +1763,7 @@ class SparkConnectionConfig(ConnectionConfig):
1753
1763
  config_dir: t.Optional[str] = None
1754
1764
  catalog: t.Optional[str] = None
1755
1765
  config: t.Dict[str, t.Any] = {}
1766
+ wap_enabled: bool = False
1756
1767
 
1757
1768
  concurrent_tasks: int = 4
1758
1769
  register_comments: bool = True
@@ -1799,6 +1810,10 @@ class SparkConnectionConfig(ConnectionConfig):
1799
1810
  .getOrCreate(),
1800
1811
  }
1801
1812
 
1813
+ @property
1814
+ def _extra_engine_config(self) -> t.Dict[str, t.Any]:
1815
+ return {"wap_enabled": self.wap_enabled}
1816
+
1802
1817
 
1803
1818
  class TrinoAuthenticationMethod(str, Enum):
1804
1819
  NO_AUTH = "no-auth"
@@ -0,0 +1,13 @@
1
+ from sqlmesh.core.config.base import BaseConfig
2
+
3
+
4
+ class DbtConfig(BaseConfig):
5
+ """
6
+ Represents dbt-specific options on the SQLMesh root config.
7
+
8
+ These options are only taken into account for dbt projects and are ignored on native projects
9
+ """
10
+
11
+ infer_state_schema_name: bool = False
12
+ """If set, indicates to the dbt loader that the state schema should be inferred based on the profile/target
13
+ so that each target gets its own isolated state"""
@@ -1,7 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import typing as t
3
4
 
4
5
  from sqlmesh.core.config.base import BaseConfig
6
+ from sqlmesh.utils.pydantic import field_validator
5
7
 
6
8
 
7
9
  class JanitorConfig(BaseConfig):
@@ -9,6 +11,16 @@ class JanitorConfig(BaseConfig):
9
11
 
10
12
  Args:
11
13
  warn_on_delete_failure: Whether to warn instead of erroring if the janitor fails to delete the expired environment schema / views.
14
+ expired_snapshots_batch_size: Maximum number of expired snapshots to clean in a single batch.
12
15
  """
13
16
 
14
17
  warn_on_delete_failure: bool = False
18
+ expired_snapshots_batch_size: t.Optional[int] = None
19
+
20
+ @field_validator("expired_snapshots_batch_size", mode="before")
21
+ @classmethod
22
+ def _validate_batch_size(cls, value: int) -> int:
23
+ batch_size = int(value)
24
+ if batch_size <= 0:
25
+ raise ValueError("expired_snapshots_batch_size must be greater than 0")
26
+ return batch_size
@@ -172,11 +172,18 @@ def load_config_from_paths(
172
172
  if dbt_project_file:
173
173
  from sqlmesh.dbt.loader import sqlmesh_config
174
174
 
175
+ infer_state_schema_name = False
176
+ if dbt := non_python_config.dbt:
177
+ infer_state_schema_name = dbt.infer_state_schema_name
178
+
175
179
  dbt_python_config = sqlmesh_config(
176
180
  project_root=dbt_project_file.parent,
181
+ profiles_dir=kwargs.pop("profiles_dir", None),
177
182
  dbt_profile_name=kwargs.pop("profile", None),
178
183
  dbt_target_name=kwargs.pop("target", None),
179
184
  variables=variables,
185
+ threads=kwargs.pop("threads", None),
186
+ infer_state_schema_name=infer_state_schema_name,
180
187
  )
181
188
  if type(dbt_python_config) != config_type:
182
189
  dbt_python_config = convert_config_type(dbt_python_config, config_type)
@@ -45,6 +45,7 @@ class ModelDefaultsConfig(BaseConfig):
45
45
  allow_partials: Whether the models can process partial (incomplete) data intervals.
46
46
  enabled: Whether the models are enabled.
47
47
  interval_unit: The temporal granularity of the models data intervals. By default computed from cron.
48
+ batch_concurrency: The maximum number of batches that can run concurrently for an incremental model.
48
49
  pre_statements: The list of SQL statements that get executed before a model runs.
49
50
  post_statements: The list of SQL statements that get executed before a model runs.
50
51
  on_virtual_update: The list of SQL statements to be executed after the virtual update.
@@ -69,6 +70,7 @@ class ModelDefaultsConfig(BaseConfig):
69
70
  interval_unit: t.Optional[t.Union[str, IntervalUnit]] = None
70
71
  enabled: t.Optional[t.Union[str, bool]] = None
71
72
  formatting: t.Optional[t.Union[str, bool]] = None
73
+ batch_concurrency: t.Optional[int] = None
72
74
  pre_statements: t.Optional[t.List[t.Union[str, exp.Expression]]] = None
73
75
  post_statements: t.Optional[t.List[t.Union[str, exp.Expression]]] = None
74
76
  on_virtual_update: t.Optional[t.List[t.Union[str, exp.Expression]]] = None
@@ -36,6 +36,7 @@ from sqlmesh.core.config.naming import NameInferenceConfig as NameInferenceConfi
36
36
  from sqlmesh.core.config.linter import LinterConfig as LinterConfig
37
37
  from sqlmesh.core.config.plan import PlanConfig
38
38
  from sqlmesh.core.config.run import RunConfig
39
+ from sqlmesh.core.config.dbt import DbtConfig
39
40
  from sqlmesh.core.config.scheduler import (
40
41
  BuiltInSchedulerConfig,
41
42
  SchedulerConfig,
@@ -173,6 +174,7 @@ class Config(BaseConfig):
173
174
  linter: LinterConfig = LinterConfig()
174
175
  janitor: JanitorConfig = JanitorConfig()
175
176
  cache_dir: t.Optional[str] = None
177
+ dbt: t.Optional[DbtConfig] = None
176
178
 
177
179
  _FIELD_UPDATE_STRATEGY: t.ClassVar[t.Dict[str, UpdateStrategy]] = {
178
180
  "gateways": UpdateStrategy.NESTED_UPDATE,
@@ -191,6 +193,7 @@ class Config(BaseConfig):
191
193
  "before_all": UpdateStrategy.EXTEND,
192
194
  "after_all": UpdateStrategy.EXTEND,
193
195
  "linter": UpdateStrategy.NESTED_UPDATE,
196
+ "dbt": UpdateStrategy.NESTED_UPDATE,
194
197
  }
195
198
 
196
199
  _connection_config_validator = connection_config_validator
sqlmesh/core/console.py CHANGED
@@ -551,6 +551,22 @@ class Console(
551
551
  def log_failed_models(self, errors: t.List[NodeExecutionFailedError]) -> None:
552
552
  """Display list of models that failed during evaluation to the user."""
553
553
 
554
+ @abc.abstractmethod
555
+ def log_models_updated_during_restatement(
556
+ self,
557
+ snapshots: t.List[t.Tuple[SnapshotTableInfo, SnapshotTableInfo]],
558
+ environment_naming_info: EnvironmentNamingInfo,
559
+ default_catalog: t.Optional[str],
560
+ ) -> None:
561
+ """Display a list of models where new versions got deployed to the specified :environment while we were restating data the old versions
562
+
563
+ Args:
564
+ snapshots: a list of (snapshot_we_restated, snapshot_it_got_replaced_with_during_restatement) tuples
565
+ environment: which environment got updated while we were restating models
566
+ environment_naming_info: how snapshots are named in that :environment (for display name purposes)
567
+ default_catalog: the configured default catalog (for display name purposes)
568
+ """
569
+
554
570
  @abc.abstractmethod
555
571
  def loading_start(self, message: t.Optional[str] = None) -> uuid.UUID:
556
572
  """Starts loading and returns a unique ID that can be used to stop the loading. Optionally can display a message."""
@@ -771,6 +787,14 @@ class NoopConsole(Console):
771
787
  def log_failed_models(self, errors: t.List[NodeExecutionFailedError]) -> None:
772
788
  pass
773
789
 
790
+ def log_models_updated_during_restatement(
791
+ self,
792
+ snapshots: t.List[t.Tuple[SnapshotTableInfo, SnapshotTableInfo]],
793
+ environment_naming_info: EnvironmentNamingInfo,
794
+ default_catalog: t.Optional[str],
795
+ ) -> None:
796
+ pass
797
+
774
798
  def log_destructive_change(
775
799
  self,
776
800
  snapshot_name: str,
@@ -1998,7 +2022,34 @@ class TerminalConsole(Console):
1998
2022
  plan = plan_builder.build()
1999
2023
 
2000
2024
  if plan.restatements:
2001
- self._print("\n[bold]Restating models\n")
2025
+ # A plan can have restatements for the following reasons:
2026
+ # - The user specifically called `sqlmesh plan` with --restate-model.
2027
+ # This creates a "restatement plan" which disallows all other changes and simply force-backfills
2028
+ # the selected models and their downstream dependencies using the versions of the models stored in state.
2029
+ # - There are no specific restatements (so changes are allowed) AND dev previews need to be computed.
2030
+ # The "restatements" feature is currently reused for dev previews.
2031
+ if plan.selected_models_to_restate:
2032
+ # There were legitimate restatements, no dev previews
2033
+ tree = Tree(
2034
+ "[bold]Models selected for restatement:[/bold]\n"
2035
+ "This causes backfill of the model itself as well as affected downstream models"
2036
+ )
2037
+ model_fqn_to_snapshot = {s.name: s for s in plan.snapshots.values()}
2038
+ for model_fqn in plan.selected_models_to_restate:
2039
+ snapshot = model_fqn_to_snapshot[model_fqn]
2040
+ display_name = snapshot.display_name(
2041
+ plan.environment_naming_info,
2042
+ default_catalog if self.verbosity < Verbosity.VERY_VERBOSE else None,
2043
+ dialect=self.dialect,
2044
+ )
2045
+ tree.add(
2046
+ display_name
2047
+ ) # note: we deliberately dont show any intervals here; they get shown in the backfill section
2048
+ self._print(tree)
2049
+ else:
2050
+ # We are computing dev previews, do not confuse the user by printing out something to do
2051
+ # with restatements. Dev previews are already highlighted in the backfill step
2052
+ pass
2002
2053
  else:
2003
2054
  self.show_environment_difference_summary(
2004
2055
  plan.context_diff,
@@ -2225,6 +2276,30 @@ class TerminalConsole(Console):
2225
2276
  for node_name, msg in error_messages.items():
2226
2277
  self._print(f" [red]{node_name}[/red]\n\n{msg}")
2227
2278
 
2279
+ def log_models_updated_during_restatement(
2280
+ self,
2281
+ snapshots: t.List[t.Tuple[SnapshotTableInfo, SnapshotTableInfo]],
2282
+ environment_naming_info: EnvironmentNamingInfo,
2283
+ default_catalog: t.Optional[str] = None,
2284
+ ) -> None:
2285
+ if snapshots:
2286
+ tree = Tree(
2287
+ f"[yellow]The following models had new versions deployed while data was being restated:[/yellow]"
2288
+ )
2289
+
2290
+ for restated_snapshot, updated_snapshot in snapshots:
2291
+ display_name = restated_snapshot.display_name(
2292
+ environment_naming_info,
2293
+ default_catalog if self.verbosity < Verbosity.VERY_VERBOSE else None,
2294
+ dialect=self.dialect,
2295
+ )
2296
+ current_branch = tree.add(display_name)
2297
+ current_branch.add(f"restated version: '{restated_snapshot.version}'")
2298
+ current_branch.add(f"currently active version: '{updated_snapshot.version}'")
2299
+
2300
+ self._print(tree)
2301
+ self._print("") # newline spacer
2302
+
2228
2303
  def log_destructive_change(
2229
2304
  self,
2230
2305
  snapshot_name: str,
@@ -3566,7 +3641,10 @@ class MarkdownConsole(CaptureTerminalConsole):
3566
3641
  msg = f"\nLinter {severity} for `{model._path}`:\n{violations_msg}\n"
3567
3642
 
3568
3643
  self._print(msg)
3569
- self._errors.append(msg)
3644
+ if is_error:
3645
+ self._errors.append(msg)
3646
+ else:
3647
+ self._warnings.append(msg)
3570
3648
 
3571
3649
  @property
3572
3650
  def captured_warnings(self) -> str:
sqlmesh/core/constants.py CHANGED
@@ -8,7 +8,7 @@ from pathlib import Path
8
8
 
9
9
  SQLMESH = "sqlmesh"
10
10
  SQLMESH_MANAGED = "sqlmesh_managed"
11
- SQLMESH_PATH = Path.home() / ".sqlmesh"
11
+ SQLMESH_PATH = Path(os.getenv("SQLMESH_HOME") or Path.home() / ".sqlmesh")
12
12
 
13
13
  PROD = "prod"
14
14
  """Prod"""