sqlmesh 0.225.1.dev26__py3-none-any.whl → 0.227.2.dev6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlmesh might be problematic. Click here for more details.

sqlmesh/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.225.1.dev26'
32
- __version_tuple__ = version_tuple = (0, 225, 1, 'dev26')
31
+ __version__ = version = '0.227.2.dev6'
32
+ __version_tuple__ = version_tuple = (0, 227, 2, 'dev6')
33
33
 
34
34
  __commit_id__ = commit_id = None
sqlmesh/core/context.py CHANGED
@@ -115,6 +115,7 @@ from sqlmesh.core.test import (
115
115
  ModelTestMetadata,
116
116
  generate_test,
117
117
  run_tests,
118
+ filter_tests_by_patterns,
118
119
  )
119
120
  from sqlmesh.core.user import User
120
121
  from sqlmesh.utils import UniqueKeyDict, Verbosity
@@ -146,14 +147,16 @@ if t.TYPE_CHECKING:
146
147
  from typing_extensions import Literal
147
148
 
148
149
  from sqlmesh.core.engine_adapter._typing import (
149
- BigframeSession,
150
150
  DF,
151
+ BigframeSession,
151
152
  PySparkDataFrame,
152
153
  PySparkSession,
153
154
  SnowparkSession,
154
155
  )
155
156
  from sqlmesh.core.snapshot import Node
156
157
 
158
+ from sqlmesh.core.snapshot.definition import Intervals
159
+
157
160
  ModelOrSnapshot = t.Union[str, Model, Snapshot]
158
161
  NodeOrSnapshot = t.Union[str, Model, StandaloneAudit, Snapshot]
159
162
 
@@ -276,6 +279,7 @@ class ExecutionContext(BaseContext):
276
279
  default_dialect: t.Optional[str] = None,
277
280
  default_catalog: t.Optional[str] = None,
278
281
  is_restatement: t.Optional[bool] = None,
282
+ parent_intervals: t.Optional[Intervals] = None,
279
283
  variables: t.Optional[t.Dict[str, t.Any]] = None,
280
284
  blueprint_variables: t.Optional[t.Dict[str, t.Any]] = None,
281
285
  ):
@@ -287,6 +291,7 @@ class ExecutionContext(BaseContext):
287
291
  self._variables = variables or {}
288
292
  self._blueprint_variables = blueprint_variables or {}
289
293
  self._is_restatement = is_restatement
294
+ self._parent_intervals = parent_intervals
290
295
 
291
296
  @property
292
297
  def default_dialect(self) -> t.Optional[str]:
@@ -315,6 +320,10 @@ class ExecutionContext(BaseContext):
315
320
  def is_restatement(self) -> t.Optional[bool]:
316
321
  return self._is_restatement
317
322
 
323
+ @property
324
+ def parent_intervals(self) -> t.Optional[Intervals]:
325
+ return self._parent_intervals
326
+
318
327
  def var(self, var_name: str, default: t.Optional[t.Any] = None) -> t.Optional[t.Any]:
319
328
  """Returns a variable value."""
320
329
  return self._variables.get(var_name.lower(), default)
@@ -390,6 +399,10 @@ class GenericContext(BaseContext, t.Generic[C]):
390
399
  self._standalone_audits: UniqueKeyDict[str, StandaloneAudit] = UniqueKeyDict(
391
400
  "standaloneaudits"
392
401
  )
402
+ self._model_test_metadata: t.List[ModelTestMetadata] = []
403
+ self._model_test_metadata_path_index: t.Dict[Path, t.List[ModelTestMetadata]] = {}
404
+ self._model_test_metadata_fully_qualified_name_index: t.Dict[str, ModelTestMetadata] = {}
405
+ self._models_with_tests: t.Set[str] = set()
393
406
  self._macros: UniqueKeyDict[str, ExecutableOrMacro] = UniqueKeyDict("macros")
394
407
  self._metrics: UniqueKeyDict[str, Metric] = UniqueKeyDict("metrics")
395
408
  self._jinja_macros = JinjaMacroRegistry()
@@ -628,6 +641,10 @@ class GenericContext(BaseContext, t.Generic[C]):
628
641
  self._excluded_requirements.clear()
629
642
  self._linters.clear()
630
643
  self._environment_statements = []
644
+ self._model_test_metadata.clear()
645
+ self._model_test_metadata_path_index.clear()
646
+ self._model_test_metadata_fully_qualified_name_index.clear()
647
+ self._models_with_tests.clear()
631
648
 
632
649
  for loader, project in zip(self._loaders, loaded_projects):
633
650
  self._jinja_macros = self._jinja_macros.merge(project.jinja_macros)
@@ -639,6 +656,15 @@ class GenericContext(BaseContext, t.Generic[C]):
639
656
  self._requirements.update(project.requirements)
640
657
  self._excluded_requirements.update(project.excluded_requirements)
641
658
  self._environment_statements.extend(project.environment_statements)
659
+ self._model_test_metadata.extend(project.model_test_metadata)
660
+ for metadata in project.model_test_metadata:
661
+ if metadata.path not in self._model_test_metadata_path_index:
662
+ self._model_test_metadata_path_index[metadata.path] = []
663
+ self._model_test_metadata_path_index[metadata.path].append(metadata)
664
+ self._model_test_metadata_fully_qualified_name_index[
665
+ metadata.fully_qualified_test_name
666
+ ] = metadata
667
+ self._models_with_tests.add(metadata.model_name)
642
668
 
643
669
  config = loader.config
644
670
  self._linters[config.project] = Linter.from_rules(
@@ -1041,6 +1067,11 @@ class GenericContext(BaseContext, t.Generic[C]):
1041
1067
  """Returns all registered standalone audits in this context."""
1042
1068
  return MappingProxyType(self._standalone_audits)
1043
1069
 
1070
+ @property
1071
+ def models_with_tests(self) -> t.Set[str]:
1072
+ """Returns all models with tests in this context."""
1073
+ return self._models_with_tests
1074
+
1044
1075
  @property
1045
1076
  def snapshots(self) -> t.Dict[str, Snapshot]:
1046
1077
  """Generates and returns snapshots based on models registered in this context.
@@ -2212,7 +2243,9 @@ class GenericContext(BaseContext, t.Generic[C]):
2212
2243
 
2213
2244
  pd.set_option("display.max_columns", None)
2214
2245
 
2215
- test_meta = self.load_model_tests(tests=tests, patterns=match_patterns)
2246
+ test_meta = self._select_tests(
2247
+ test_meta=self._model_test_metadata, tests=tests, patterns=match_patterns
2248
+ )
2216
2249
 
2217
2250
  result = run_tests(
2218
2251
  model_test_metadata=test_meta,
@@ -2271,6 +2304,7 @@ class GenericContext(BaseContext, t.Generic[C]):
2271
2304
  snapshot=snapshot,
2272
2305
  start=start,
2273
2306
  end=end,
2307
+ execution_time=execution_time,
2274
2308
  snapshots=self.snapshots,
2275
2309
  ):
2276
2310
  audit_id = f"{audit_result.audit.name}"
@@ -2773,6 +2807,33 @@ class GenericContext(BaseContext, t.Generic[C]):
2773
2807
  raise SQLMeshError(f"Gateway '{gateway}' not found in the available engine adapters.")
2774
2808
  return self.engine_adapter
2775
2809
 
2810
+ def _select_tests(
2811
+ self,
2812
+ test_meta: t.List[ModelTestMetadata],
2813
+ tests: t.Optional[t.List[str]] = None,
2814
+ patterns: t.Optional[t.List[str]] = None,
2815
+ ) -> t.List[ModelTestMetadata]:
2816
+ """Filter pre-loaded test metadata based on tests and patterns."""
2817
+
2818
+ if tests:
2819
+ filtered_tests = []
2820
+ for test in tests:
2821
+ if "::" in test:
2822
+ if test in self._model_test_metadata_fully_qualified_name_index:
2823
+ filtered_tests.append(
2824
+ self._model_test_metadata_fully_qualified_name_index[test]
2825
+ )
2826
+ else:
2827
+ test_path = Path(test)
2828
+ if test_path in self._model_test_metadata_path_index:
2829
+ filtered_tests.extend(self._model_test_metadata_path_index[test_path])
2830
+ test_meta = filtered_tests
2831
+
2832
+ if patterns:
2833
+ test_meta = filter_tests_by_patterns(test_meta, patterns)
2834
+
2835
+ return test_meta
2836
+
2776
2837
  def _snapshots(
2777
2838
  self, models_override: t.Optional[UniqueKeyDict[str, Model]] = None
2778
2839
  ) -> t.Dict[str, Snapshot]:
@@ -7,7 +7,6 @@ import time
7
7
  from functools import cached_property
8
8
  from sqlglot import exp
9
9
  from tenacity import retry, stop_after_attempt, wait_exponential, retry_if_result
10
- from sqlmesh.core.engine_adapter.mixins import LogicalMergeMixin
11
10
  from sqlmesh.core.engine_adapter.mssql import MSSQLEngineAdapter
12
11
  from sqlmesh.core.engine_adapter.shared import (
13
12
  InsertOverwriteStrategy,
@@ -19,7 +18,7 @@ from sqlmesh.utils.connection_pool import ConnectionPool
19
18
  logger = logging.getLogger(__name__)
20
19
 
21
20
 
22
- class FabricEngineAdapter(LogicalMergeMixin, MSSQLEngineAdapter):
21
+ class FabricEngineAdapter(MSSQLEngineAdapter):
23
22
  """
24
23
  Adapter for Microsoft Fabric.
25
24
  """
sqlmesh/core/lineage.py CHANGED
@@ -66,6 +66,7 @@ def lineage(
66
66
  scope=scope,
67
67
  trim_selects=trim_selects,
68
68
  dialect=model.dialect,
69
+ copy=False,
69
70
  )
70
71
 
71
72
 
@@ -129,6 +129,21 @@ class NoMissingAudits(Rule):
129
129
  return self.violation()
130
130
 
131
131
 
132
+ class NoMissingUnitTest(Rule):
133
+ """All models must have a unit test found in the test/ directory yaml files"""
134
+
135
+ def check_model(self, model: Model) -> t.Optional[RuleViolation]:
136
+ # External models cannot have unit tests
137
+ if isinstance(model, ExternalModel):
138
+ return None
139
+
140
+ if model.name not in self.context.models_with_tests:
141
+ return self.violation(
142
+ violation_msg=f"Model {model.name} is missing unit test(s). Please add in the tests/ directory."
143
+ )
144
+ return None
145
+
146
+
132
147
  class NoMissingExternalModels(Rule):
133
148
  """All external models must be registered in the external_models.yaml file"""
134
149
 
sqlmesh/core/loader.py CHANGED
@@ -64,6 +64,7 @@ class LoadedProject:
64
64
  excluded_requirements: t.Set[str]
65
65
  environment_statements: t.List[EnvironmentStatements]
66
66
  user_rules: RuleSet
67
+ model_test_metadata: t.List[ModelTestMetadata]
67
68
 
68
69
 
69
70
  class CacheBase(abc.ABC):
@@ -243,6 +244,8 @@ class Loader(abc.ABC):
243
244
 
244
245
  user_rules = self._load_linting_rules()
245
246
 
247
+ model_test_metadata = self.load_model_tests()
248
+
246
249
  project = LoadedProject(
247
250
  macros=macros,
248
251
  jinja_macros=jinja_macros,
@@ -254,6 +257,7 @@ class Loader(abc.ABC):
254
257
  excluded_requirements=excluded_requirements,
255
258
  environment_statements=environment_statements,
256
259
  user_rules=user_rules,
260
+ model_test_metadata=model_test_metadata,
257
261
  )
258
262
  return project
259
263
 
@@ -63,7 +63,7 @@ class Plan(PydanticModel, frozen=True):
63
63
  restatements: t.Dict[SnapshotId, Interval]
64
64
  """
65
65
  All models being restated, which are typically the explicitly selected ones + their downstream dependencies.
66
-
66
+
67
67
  Note that dev previews are also considered restatements, so :selected_models_to_restate can be empty
68
68
  while :restatements is still populated with dev previews
69
69
  """
@@ -213,8 +213,8 @@ class Plan(PydanticModel, frozen=True):
213
213
 
214
214
  snapshots_by_name = self.context_diff.snapshots_by_name
215
215
  snapshots = [s.table_info for s in self.snapshots.values()]
216
- promoted_snapshot_ids = None
217
- if self.is_dev and not self.include_unmodified:
216
+ promotable_snapshot_ids = None
217
+ if self.is_dev:
218
218
  if self.selected_models_to_backfill is not None:
219
219
  # Only promote models that have been explicitly selected for backfill.
220
220
  promotable_snapshot_ids = {
@@ -225,12 +225,14 @@ class Plan(PydanticModel, frozen=True):
225
225
  if m in snapshots_by_name
226
226
  ],
227
227
  }
228
- else:
228
+ elif not self.include_unmodified:
229
229
  promotable_snapshot_ids = self.context_diff.promotable_snapshot_ids.copy()
230
230
 
231
- promoted_snapshot_ids = [
232
- s.snapshot_id for s in snapshots if s.snapshot_id in promotable_snapshot_ids
233
- ]
231
+ promoted_snapshot_ids = (
232
+ [s.snapshot_id for s in snapshots if s.snapshot_id in promotable_snapshot_ids]
233
+ if promotable_snapshot_ids is not None
234
+ else None
235
+ )
234
236
 
235
237
  previous_finalized_snapshots = (
236
238
  self.context_diff.environment_snapshots
sqlmesh/core/renderer.py CHANGED
@@ -196,7 +196,14 @@ class BaseExpressionRenderer:
196
196
  **kwargs,
197
197
  }
198
198
 
199
+ if this_model:
200
+ render_kwargs["this_model"] = this_model
201
+
202
+ macro_evaluator.locals.update(render_kwargs)
203
+
199
204
  variables = kwargs.pop("variables", {})
205
+ if variables:
206
+ macro_evaluator.locals.setdefault(c.SQLMESH_VARS, {}).update(variables)
200
207
 
201
208
  expressions = [self._expression]
202
209
  if isinstance(self._expression, d.Jinja):
@@ -268,14 +275,6 @@ class BaseExpressionRenderer:
268
275
  f"Could not parse the rendered jinja at '{self._path}'.\n{ex}"
269
276
  ) from ex
270
277
 
271
- if this_model:
272
- render_kwargs["this_model"] = this_model
273
-
274
- macro_evaluator.locals.update(render_kwargs)
275
-
276
- if variables:
277
- macro_evaluator.locals.setdefault(c.SQLMESH_VARS, {}).update(variables)
278
-
279
278
  for definition in self._macro_definitions:
280
279
  try:
281
280
  macro_evaluator.evaluate(definition)
sqlmesh/core/scheduler.py CHANGED
@@ -352,7 +352,7 @@ class Scheduler:
352
352
  )
353
353
  for snapshot, intervals in merged_intervals.items()
354
354
  }
355
- snapshot_batches = {}
355
+ snapshot_batches: t.Dict[Snapshot, Intervals] = {}
356
356
  all_unready_intervals: t.Dict[str, set[Interval]] = {}
357
357
  for snapshot_id in dag:
358
358
  if snapshot_id not in snapshot_intervals:
@@ -364,6 +364,14 @@ class Scheduler:
364
364
 
365
365
  adapter = self.snapshot_evaluator.get_adapter(snapshot.model_gateway)
366
366
 
367
+ parent_intervals: Intervals = []
368
+ for parent_id in snapshot.parents:
369
+ parent_snapshot, _ = snapshot_intervals.get(parent_id, (None, []))
370
+ if not parent_snapshot or parent_snapshot.is_external:
371
+ continue
372
+
373
+ parent_intervals.extend(snapshot_batches[parent_snapshot])
374
+
367
375
  context = ExecutionContext(
368
376
  adapter,
369
377
  self.snapshots_by_name,
@@ -371,6 +379,7 @@ class Scheduler:
371
379
  default_dialect=adapter.dialect,
372
380
  default_catalog=self.default_catalog,
373
381
  is_restatement=is_restatement,
382
+ parent_intervals=parent_intervals,
374
383
  )
375
384
 
376
385
  intervals = self._check_ready_intervals(
@@ -538,6 +547,10 @@ class Scheduler:
538
547
  execution_time=execution_time,
539
548
  )
540
549
  else:
550
+ # If batch_index > 0, then the target table must exist since the first batch would have created it
551
+ target_table_exists = (
552
+ snapshot.snapshot_id not in snapshots_to_create or node.batch_index > 0
553
+ )
541
554
  audit_results = self.evaluate(
542
555
  snapshot=snapshot,
543
556
  environment_naming_info=environment_naming_info,
@@ -548,7 +561,7 @@ class Scheduler:
548
561
  batch_index=node.batch_index,
549
562
  allow_destructive_snapshots=allow_destructive_snapshots,
550
563
  allow_additive_snapshots=allow_additive_snapshots,
551
- target_table_exists=snapshot.snapshot_id not in snapshots_to_create,
564
+ target_table_exists=target_table_exists,
552
565
  selected_models=selected_models,
553
566
  )
554
567
 
@@ -646,6 +659,7 @@ class Scheduler:
646
659
  }
647
660
  snapshots_to_create = snapshots_to_create or set()
648
661
  original_snapshots_to_create = snapshots_to_create.copy()
662
+ upstream_dependencies_cache: t.Dict[SnapshotId, t.Set[SchedulingUnit]] = {}
649
663
 
650
664
  snapshot_dag = snapshot_dag or snapshots_to_dag(batches)
651
665
  dag = DAG[SchedulingUnit]()
@@ -657,12 +671,15 @@ class Scheduler:
657
671
  snapshot = self.snapshots_by_name[snapshot_id.name]
658
672
  intervals = intervals_per_snapshot.get(snapshot.name, [])
659
673
 
660
- upstream_dependencies: t.List[SchedulingUnit] = []
674
+ upstream_dependencies: t.Set[SchedulingUnit] = set()
661
675
 
662
676
  for p_sid in snapshot.parents:
663
- upstream_dependencies.extend(
677
+ upstream_dependencies.update(
664
678
  self._find_upstream_dependencies(
665
- p_sid, intervals_per_snapshot, original_snapshots_to_create
679
+ p_sid,
680
+ intervals_per_snapshot,
681
+ original_snapshots_to_create,
682
+ upstream_dependencies_cache,
666
683
  )
667
684
  )
668
685
 
@@ -713,29 +730,42 @@ class Scheduler:
713
730
  parent_sid: SnapshotId,
714
731
  intervals_per_snapshot: t.Dict[str, Intervals],
715
732
  snapshots_to_create: t.Set[SnapshotId],
716
- ) -> t.List[SchedulingUnit]:
733
+ cache: t.Dict[SnapshotId, t.Set[SchedulingUnit]],
734
+ ) -> t.Set[SchedulingUnit]:
717
735
  if parent_sid not in self.snapshots:
718
- return []
736
+ return set()
737
+ if parent_sid in cache:
738
+ return cache[parent_sid]
719
739
 
720
740
  p_intervals = intervals_per_snapshot.get(parent_sid.name, [])
721
741
 
742
+ parent_node: t.Optional[SchedulingUnit] = None
722
743
  if p_intervals:
723
744
  if len(p_intervals) > 1:
724
- return [DummyNode(snapshot_name=parent_sid.name)]
725
- interval = p_intervals[0]
726
- return [EvaluateNode(snapshot_name=parent_sid.name, interval=interval, batch_index=0)]
727
- if parent_sid in snapshots_to_create:
728
- return [CreateNode(snapshot_name=parent_sid.name)]
745
+ parent_node = DummyNode(snapshot_name=parent_sid.name)
746
+ else:
747
+ interval = p_intervals[0]
748
+ parent_node = EvaluateNode(
749
+ snapshot_name=parent_sid.name, interval=interval, batch_index=0
750
+ )
751
+ elif parent_sid in snapshots_to_create:
752
+ parent_node = CreateNode(snapshot_name=parent_sid.name)
753
+
754
+ if parent_node is not None:
755
+ cache[parent_sid] = {parent_node}
756
+ return {parent_node}
757
+
729
758
  # This snapshot has no intervals and doesn't need creation which means
730
759
  # that it can be a transitive dependency
731
- transitive_deps: t.List[SchedulingUnit] = []
760
+ transitive_deps: t.Set[SchedulingUnit] = set()
732
761
  parent_snapshot = self.snapshots[parent_sid]
733
762
  for grandparent_sid in parent_snapshot.parents:
734
- transitive_deps.extend(
763
+ transitive_deps.update(
735
764
  self._find_upstream_dependencies(
736
- grandparent_sid, intervals_per_snapshot, snapshots_to_create
765
+ grandparent_sid, intervals_per_snapshot, snapshots_to_create, cache
737
766
  )
738
767
  )
768
+ cache[parent_sid] = transitive_deps
739
769
  return transitive_deps
740
770
 
741
771
  def _run_or_audit(
sqlmesh/core/signal.py CHANGED
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import typing as t
4
4
  from sqlmesh.utils import UniqueKeyDict, registry_decorator
5
+ from sqlmesh.utils.errors import MissingSourceError
5
6
 
6
7
  if t.TYPE_CHECKING:
7
8
  from sqlmesh.core.context import ExecutionContext
@@ -42,7 +43,16 @@ SignalRegistry = UniqueKeyDict[str, signal]
42
43
 
43
44
 
44
45
  @signal()
45
- def freshness(batch: DatetimeRanges, snapshot: Snapshot, context: ExecutionContext) -> bool:
46
+ def freshness(
47
+ batch: DatetimeRanges,
48
+ snapshot: Snapshot,
49
+ context: ExecutionContext,
50
+ ) -> bool:
51
+ """
52
+ Implements model freshness as a signal, i.e it considers this model to be fresh if:
53
+ - Any upstream SQLMesh model has available intervals to compute i.e is fresh
54
+ - Any upstream external model has been altered since the last time the model was evaluated
55
+ """
46
56
  adapter = context.engine_adapter
47
57
  if context.is_restatement or not adapter.SUPPORTS_METADATA_TABLE_LAST_MODIFIED_TS:
48
58
  return True
@@ -54,24 +64,35 @@ def freshness(batch: DatetimeRanges, snapshot: Snapshot, context: ExecutionConte
54
64
  if deployability_index.is_deployable(snapshot)
55
65
  else snapshot.dev_last_altered_ts
56
66
  )
67
+
57
68
  if not last_altered_ts:
58
69
  return True
59
70
 
60
71
  parent_snapshots = {context.snapshots[p.name] for p in snapshot.parents}
61
- if len(parent_snapshots) != len(snapshot.node.depends_on) or not all(
62
- p.is_external for p in parent_snapshots
63
- ):
64
- # The mismatch can happen if e.g an external model is not registered in the project
72
+
73
+ upstream_parent_snapshots = {p for p in parent_snapshots if not p.is_external}
74
+ external_parents = snapshot.node.depends_on - {p.name for p in upstream_parent_snapshots}
75
+
76
+ if context.parent_intervals:
77
+ # At least one upstream sqlmesh model has intervals to compute (i.e is fresh),
78
+ # so the current model is considered fresh too
65
79
  return True
66
80
 
67
- # Finding new data means that the upstream depedencies have been altered
68
- # since the last time the model was evaluated
69
- upstream_dep_has_new_data = any(
70
- upstream_last_altered_ts > last_altered_ts
71
- for upstream_last_altered_ts in adapter.get_table_last_modified_ts(
72
- [p.name for p in parent_snapshots]
81
+ if external_parents:
82
+ external_last_altered_timestamps = adapter.get_table_last_modified_ts(
83
+ list(external_parents)
84
+ )
85
+
86
+ if len(external_last_altered_timestamps) != len(external_parents):
87
+ raise MissingSourceError(
88
+ f"Expected {len(external_parents)} sources to be present, but got {len(external_last_altered_timestamps)}."
89
+ )
90
+
91
+ # Finding new data means that the upstream depedencies have been altered
92
+ # since the last time the model was evaluated
93
+ return any(
94
+ external_last_altered_ts > last_altered_ts
95
+ for external_last_altered_ts in external_last_altered_timestamps
73
96
  )
74
- )
75
97
 
76
- # Returning true is a no-op, returning False nullifies the batch so the model will not be evaluated.
77
- return upstream_dep_has_new_data
98
+ return False
@@ -2081,16 +2081,20 @@ def missing_intervals(
2081
2081
  continue
2082
2082
  snapshot_end_date = existing_interval_end
2083
2083
 
2084
+ snapshot_start_date = max(
2085
+ to_datetime(snapshot_start_date),
2086
+ to_datetime(start_date(snapshot, snapshots, cache, relative_to=snapshot_end_date)),
2087
+ )
2088
+ if snapshot_start_date > to_datetime(snapshot_end_date):
2089
+ continue
2090
+
2084
2091
  missing_interval_end_date = snapshot_end_date
2085
2092
  node_end_date = snapshot.node.end
2086
2093
  if node_end_date and (to_datetime(node_end_date) < to_datetime(snapshot_end_date)):
2087
2094
  missing_interval_end_date = node_end_date
2088
2095
 
2089
2096
  intervals = snapshot.missing_intervals(
2090
- max(
2091
- to_datetime(snapshot_start_date),
2092
- to_datetime(start_date(snapshot, snapshots, cache, relative_to=snapshot_end_date)),
2093
- ),
2097
+ snapshot_start_date,
2094
2098
  missing_interval_end_date,
2095
2099
  execution_time=execution_time,
2096
2100
  deployability_index=deployability_index,
@@ -2295,14 +2299,16 @@ def start_date(
2295
2299
  if not isinstance(snapshots, dict):
2296
2300
  snapshots = {snapshot.snapshot_id: snapshot for snapshot in snapshots}
2297
2301
 
2298
- earliest = snapshot.node.cron_prev(snapshot.node.cron_floor(relative_to or now()))
2299
-
2300
- for parent in snapshot.parents:
2301
- if parent in snapshots:
2302
- earliest = min(
2303
- earliest,
2304
- start_date(snapshots[parent], snapshots, cache=cache, relative_to=relative_to),
2305
- )
2302
+ parent_starts = [
2303
+ start_date(snapshots[parent], snapshots, cache=cache, relative_to=relative_to)
2304
+ for parent in snapshot.parents
2305
+ if parent in snapshots
2306
+ ]
2307
+ earliest = (
2308
+ min(parent_starts)
2309
+ if parent_starts
2310
+ else snapshot.node.cron_prev(snapshot.node.cron_floor(relative_to or now()))
2311
+ )
2306
2312
 
2307
2313
  cache[key] = earliest
2308
2314
  return earliest
@@ -1021,6 +1021,11 @@ class SnapshotEvaluator:
1021
1021
  ):
1022
1022
  import pandas as pd
1023
1023
 
1024
+ try:
1025
+ first_query_or_df = next(queries_or_dfs)
1026
+ except StopIteration:
1027
+ return
1028
+
1024
1029
  query_or_df = reduce(
1025
1030
  lambda a, b: (
1026
1031
  pd.concat([a, b], ignore_index=True) # type: ignore
@@ -1028,6 +1033,7 @@ class SnapshotEvaluator:
1028
1033
  else a.union_all(b) # type: ignore
1029
1034
  ), # type: ignore
1030
1035
  queries_or_dfs,
1036
+ first_query_or_df,
1031
1037
  )
1032
1038
  apply(query_or_df, index=0)
1033
1039
  else:
@@ -1593,14 +1599,14 @@ class SnapshotEvaluator:
1593
1599
  tables_by_gateway_and_schema: t.Dict[t.Union[str, None], t.Dict[exp.Table, set[str]]] = (
1594
1600
  defaultdict(lambda: defaultdict(set))
1595
1601
  )
1596
- snapshots_by_table_name: t.Dict[str, Snapshot] = {}
1602
+ snapshots_by_table_name: t.Dict[exp.Table, t.Dict[str, Snapshot]] = defaultdict(dict)
1597
1603
  for snapshot in target_snapshots:
1598
1604
  if not snapshot.is_model or snapshot.is_symbolic:
1599
1605
  continue
1600
1606
  table = table_name_callable(snapshot)
1601
1607
  table_schema = d.schema_(table.db, catalog=table.catalog)
1602
1608
  tables_by_gateway_and_schema[snapshot.model_gateway][table_schema].add(table.name)
1603
- snapshots_by_table_name[table.name] = snapshot
1609
+ snapshots_by_table_name[table_schema][table.name] = snapshot
1604
1610
 
1605
1611
  def _get_data_objects_in_schema(
1606
1612
  schema: exp.Table,
@@ -1613,23 +1619,25 @@ class SnapshotEvaluator:
1613
1619
  )
1614
1620
 
1615
1621
  with self.concurrent_context():
1616
- existing_objects: t.List[DataObject] = []
1622
+ snapshot_id_to_obj: t.Dict[SnapshotId, DataObject] = {}
1617
1623
  # A schema can be shared across multiple engines, so we need to group tables by both gateway and schema
1618
1624
  for gateway, tables_by_schema in tables_by_gateway_and_schema.items():
1619
- objs_for_gateway = [
1620
- obj
1621
- for objs in concurrent_apply_to_values(
1622
- list(tables_by_schema),
1623
- lambda s: _get_data_objects_in_schema(
1624
- schema=s, object_names=tables_by_schema.get(s), gateway=gateway
1625
- ),
1626
- self.ddl_concurrent_tasks,
1627
- )
1628
- for obj in objs
1629
- ]
1630
- existing_objects.extend(objs_for_gateway)
1625
+ schema_list = list(tables_by_schema.keys())
1626
+ results = concurrent_apply_to_values(
1627
+ schema_list,
1628
+ lambda s: _get_data_objects_in_schema(
1629
+ schema=s, object_names=tables_by_schema.get(s), gateway=gateway
1630
+ ),
1631
+ self.ddl_concurrent_tasks,
1632
+ )
1633
+
1634
+ for schema, objs in zip(schema_list, results):
1635
+ snapshots_by_name = snapshots_by_table_name.get(schema, {})
1636
+ for obj in objs:
1637
+ if obj.name in snapshots_by_name:
1638
+ snapshot_id_to_obj[snapshots_by_name[obj.name].snapshot_id] = obj
1631
1639
 
1632
- return {snapshots_by_table_name[obj.name].snapshot_id: obj for obj in existing_objects}
1640
+ return snapshot_id_to_obj
1633
1641
 
1634
1642
 
1635
1643
  def _evaluation_strategy(snapshot: SnapshotInfoLike, adapter: EngineAdapter) -> EvaluationStrategy:
@@ -807,7 +807,7 @@ class PythonModelTest(ModelTest):
807
807
  actual_df.reset_index(drop=True, inplace=True)
808
808
  expected = self._create_df(values, columns=self.model.columns_to_types, partial=partial)
809
809
 
810
- self.assert_equal(expected, actual_df, sort=False, partial=partial)
810
+ self.assert_equal(expected, actual_df, sort=True, partial=partial)
811
811
 
812
812
  def _execute_model(self) -> pd.DataFrame:
813
813
  """Executes the python model and returns a DataFrame."""
@@ -925,8 +925,7 @@ def generate_test(
925
925
  cte_output = test._execute(cte_query)
926
926
  ctes[cte.alias] = (
927
927
  pandas_timestamp_to_pydatetime(
928
- cte_output.apply(lambda col: col.map(_normalize_df_value)),
929
- cte_query.named_selects,
928
+ df=cte_output.apply(lambda col: col.map(_normalize_df_value)),
930
929
  )
931
930
  .replace({np.nan: None})
932
931
  .to_dict(orient="records")
@@ -20,6 +20,10 @@ class ModelTestMetadata(PydanticModel):
20
20
  def fully_qualified_test_name(self) -> str:
21
21
  return f"{self.path}::{self.test_name}"
22
22
 
23
+ @property
24
+ def model_name(self) -> str:
25
+ return self.body.get("model", "")
26
+
23
27
  def __hash__(self) -> int:
24
28
  return self.fully_qualified_test_name.__hash__()
25
29
 
sqlmesh/dbt/common.py CHANGED
@@ -46,7 +46,9 @@ def load_yaml(source: str | Path) -> t.Dict:
46
46
  raise ConfigError(f"{source}: {ex}" if isinstance(source, Path) else f"{ex}")
47
47
 
48
48
 
49
- def parse_meta(v: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
49
+ def parse_meta(v: t.Optional[t.Dict[str, t.Any]]) -> t.Dict[str, t.Any]:
50
+ if v is None:
51
+ return {}
50
52
  for key, value in v.items():
51
53
  if isinstance(value, str):
52
54
  v[key] = try_str_to_bool(value)
@@ -115,7 +117,7 @@ class GeneralConfig(DbtConfig):
115
117
 
116
118
  @field_validator("meta", mode="before")
117
119
  @classmethod
118
- def _validate_meta(cls, v: t.Dict[str, t.Union[str, t.Any]]) -> t.Dict[str, t.Any]:
120
+ def _validate_meta(cls, v: t.Optional[t.Dict[str, t.Union[str, t.Any]]]) -> t.Dict[str, t.Any]:
119
121
  return parse_meta(v)
120
122
 
121
123
  _FIELD_UPDATE_STRATEGY: t.ClassVar[t.Dict[str, UpdateStrategy]] = {
sqlmesh/dbt/manifest.py CHANGED
@@ -11,7 +11,7 @@ from collections import defaultdict
11
11
  from functools import cached_property
12
12
  from pathlib import Path
13
13
 
14
- from dbt import constants as dbt_constants, flags
14
+ from dbt import flags
15
15
 
16
16
  from sqlmesh.dbt.util import DBT_VERSION
17
17
  from sqlmesh.utils.conversions import make_serializable
@@ -19,6 +19,8 @@ from sqlmesh.utils.conversions import make_serializable
19
19
  # Override the file name to prevent dbt commands from invalidating the cache.
20
20
 
21
21
  if DBT_VERSION >= (1, 6, 0):
22
+ from dbt import constants as dbt_constants
23
+
22
24
  dbt_constants.PARTIAL_PARSE_FILE_NAME = "sqlmesh_partial_parse.msgpack" # type: ignore
23
25
  else:
24
26
  from dbt.parser import manifest as dbt_manifest # type: ignore
sqlmesh/utils/date.py CHANGED
@@ -444,7 +444,7 @@ def to_time_column(
444
444
 
445
445
 
446
446
  def pandas_timestamp_to_pydatetime(
447
- df: pd.DataFrame, columns_to_types: t.Optional[t.Dict[str, exp.DataType]]
447
+ df: pd.DataFrame, columns_to_types: t.Optional[t.Dict[str, exp.DataType]] = None
448
448
  ) -> pd.DataFrame:
449
449
  import pandas as pd
450
450
  from pandas.api.types import is_datetime64_any_dtype # type: ignore
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlmesh
3
- Version: 0.225.1.dev26
3
+ Version: 0.227.2.dev6
4
4
  Summary: Next-generation data transformation framework
5
5
  Author-email: "TobikoData Inc." <engineering@tobikodata.com>
6
6
  License: Apache License
@@ -235,7 +235,7 @@ Requires-Dist: python-dotenv
235
235
  Requires-Dist: requests
236
236
  Requires-Dist: rich[jupyter]
237
237
  Requires-Dist: ruamel.yaml
238
- Requires-Dist: sqlglot[rs]~=27.27.0
238
+ Requires-Dist: sqlglot[rs]~=27.28.0
239
239
  Requires-Dist: tenacity
240
240
  Requires-Dist: time-machine
241
241
  Requires-Dist: json-stream
@@ -1,5 +1,5 @@
1
1
  sqlmesh/__init__.py,sha256=v_spqQEhcnGaahp1yPvMqUIa6mhH3cs3Bc1CznxvCEA,7965
2
- sqlmesh/_version.py,sha256=V3C0DJOSEV4FynqTwn1KlC_CQzC0rQHXenalgKige3A,723
2
+ sqlmesh/_version.py,sha256=YAFaI4FGLTdonGqqV9W5lAFyj9t7owRNsu0qHTS5QAc,721
3
3
  sqlmesh/magics.py,sha256=xLh3u4eqpVrKRVN5KF3X84RPRqjygAB9AJP1TXwH8hg,42086
4
4
  sqlmesh/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  sqlmesh/cicd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -13,23 +13,23 @@ sqlmesh/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  sqlmesh/core/_typing.py,sha256=PzXxMYnORq18JhblAOUttms3zPJZzZpIbfFA_jgKYPA,498
14
14
  sqlmesh/core/console.py,sha256=MYpVlciUY6rUuoqXyKfXTxD6a4-Bw4-ooATUTj_VHGg,172830
15
15
  sqlmesh/core/constants.py,sha256=BuQk43vluUm7LfP9nKp5o9qRhqIenWF_LiLXO_t_53c,2699
16
- sqlmesh/core/context.py,sha256=dgtgIabNtdhVPsQ-tUY5lwK-SnCQxqDdRNY6f3TVMgY,130941
16
+ sqlmesh/core/context.py,sha256=PZcI06NldePYMDkvsMuZf8vGreDk7dzjnzX5rZ422AM,133656
17
17
  sqlmesh/core/context_diff.py,sha256=mxkJu0IthFMOlaQ_kcq5C09mlgkq2RQb-pG2rd-x_nA,21648
18
18
  sqlmesh/core/dialect.py,sha256=CnKcPj6BnREfu9Zn1OyS7hZ3ktnaX03ygOg91nADlTU,53029
19
19
  sqlmesh/core/environment.py,sha256=Kgs_gUEUI072mh0JJFWNRynrCxp1TzRHZhX_NWJRfXc,13142
20
20
  sqlmesh/core/janitor.py,sha256=zJRN48ENjKexeiqa1Kmwyj_HsEEEIAa8hsFD8gTCmfg,7194
21
- sqlmesh/core/lineage.py,sha256=zjB0Zfamo2Fja2r5SSZPMbrEKTXr1WjozZPVqvxdybI,3143
22
- sqlmesh/core/loader.py,sha256=2u91WKnXWHbAmyo9mluXHhSNwhe4r35QHv031S4OXjU,37291
21
+ sqlmesh/core/lineage.py,sha256=LtiOztX1xIbFfWz-eb5dPZW4B0o2sI942_IM4YDbsso,3163
22
+ sqlmesh/core/loader.py,sha256=sXGTeyDISd3Gtu9Ej9iEz1CMM4SXSwZMSBpbZNohE10,37458
23
23
  sqlmesh/core/macros.py,sha256=rkklwVnUEmEro4wpdel289mKhaS3x5_SPZrkYZt3Q9E,63173
24
24
  sqlmesh/core/node.py,sha256=2ejDwH1whl_ic1CRzX16Be-FQrosAf8pdyWb7oPzU6M,19895
25
25
  sqlmesh/core/notification_target.py,sha256=PPGoDrgbRKxr27vJEu03XqNTQLYTw0ZF_b0yAapxGeI,16158
26
26
  sqlmesh/core/reference.py,sha256=k7OSkLqTjPR8WJjNeFj0xAJ297nZUMgb_iTVwKRRKjc,4875
27
- sqlmesh/core/renderer.py,sha256=JZuoydTwK0voU5sH70jhULjo5_x41uTEFKydtT-JRCg,28855
28
- sqlmesh/core/scheduler.py,sha256=ELxPXCji11XZotcnkMaaCmBy-VNqlCy2Vdx4SVIJfYg,49305
27
+ sqlmesh/core/renderer.py,sha256=z1WbRaNnBUZAWqc5gYurIgd4LocKQOexdjKQ0hhbLfE,28854
28
+ sqlmesh/core/scheduler.py,sha256=seYDDtowupyyK_xgjqDLZ5CACyktKCojmAjj97Tg0ts,50629
29
29
  sqlmesh/core/schema_diff.py,sha256=qM4uxOBtrAqx8_5JU0ERicMT-byLD4xUUv4FrQw92js,33934
30
30
  sqlmesh/core/schema_loader.py,sha256=_Pq2RSw91uthqv1vNi_eHmLlzhtGz_APMJ0wAJZYuvk,3677
31
31
  sqlmesh/core/selector.py,sha256=gb8NpDXO-yxzxAB4Rl5yRkirWZyouV9V9d9AC1Lfzjg,18030
32
- sqlmesh/core/signal.py,sha256=adHUCx3m36XlRGFWSktFN8aLJU9AyrC-JCoFjH_fYZk,2991
32
+ sqlmesh/core/signal.py,sha256=RPyQNSCLyr2sybRK3wj6iWwukpwF-R0w9divnPwjJlM,3692
33
33
  sqlmesh/core/table_diff.py,sha256=oKLVaBs5HhpWFQUHimcNB4jDPvFJCCM360N3yQqle5g,28872
34
34
  sqlmesh/core/user.py,sha256=EJ6R4R1iK67n80vBoCCsidF56IR7xEYqiCEO-nrVMso,1660
35
35
  sqlmesh/core/analytics/__init__.py,sha256=ou3ZXAJfQOXEifj-PzaXwMDSvJzsVaqaMkUopiI00kM,3247
@@ -66,7 +66,7 @@ sqlmesh/core/engine_adapter/bigquery.py,sha256=edBWbAbeXA4bOtVG-YNTQbt9qqwL9QFff
66
66
  sqlmesh/core/engine_adapter/clickhouse.py,sha256=GWGpwdxZd4RqLSAMlOHjtO8nPpSIo3zFeRWnj9eSOrM,36072
67
67
  sqlmesh/core/engine_adapter/databricks.py,sha256=452Og5LriNtvXk0DElUGmoR_pUFQvBgNZchpprTIJxA,15846
68
68
  sqlmesh/core/engine_adapter/duckdb.py,sha256=9AXeRhaYXBcYSmIavyFY9LUzfgh94qkTO98v0-suQ8I,7993
69
- sqlmesh/core/engine_adapter/fabric.py,sha256=V5Wx2Htt94nvXXVAKFjnLHqN0WIaPlS87mYYxQs0GGo,14256
69
+ sqlmesh/core/engine_adapter/fabric.py,sha256=wky02p3UVu0FvEZwqqb5XBW--XCc1JLMLrvY6TVqCdM,14172
70
70
  sqlmesh/core/engine_adapter/mixins.py,sha256=3rB7B2PZSB920BODO7k_kKqu6z0N-zj1etiRCYzpUcQ,27096
71
71
  sqlmesh/core/engine_adapter/mssql.py,sha256=pqh6D_7eAeVCH6K4-81HPcNTLEPhTM_-Mou0QWBTOfA,18898
72
72
  sqlmesh/core/engine_adapter/mysql.py,sha256=anKxdklYY2kiuxaHsC7FPN-LKzo7BP0Hy6hinA_c5Hg,6953
@@ -82,7 +82,7 @@ sqlmesh/core/linter/definition.py,sha256=1EOhKdF16jmeqISfcrR-8fzMdgXuxpB7wb3Qaep
82
82
  sqlmesh/core/linter/helpers.py,sha256=cwKXP4sL6azRtNVGbMfJ5_6Hqq5Xx2M2rRLCgH3Y3ag,10743
83
83
  sqlmesh/core/linter/rule.py,sha256=nB3o1rHyN44ZOg5ImICP16SeUHimf-12ObdXJjkTGyM,3964
84
84
  sqlmesh/core/linter/rules/__init__.py,sha256=gevzfb67vFqckTCoVAe_TBGf6hQ-YtE1_YuGuXyh1L0,77
85
- sqlmesh/core/linter/rules/builtin.py,sha256=C6_cAeZEmZphl9SjGUFMsHGrDKNjxQ5Y6g66eO8HCZg,11177
85
+ sqlmesh/core/linter/rules/builtin.py,sha256=mJjRrL97mTyqPgrUGk-1Ceml6ATCxElZVgiwi6WFqmU,11727
86
86
  sqlmesh/core/metric/__init__.py,sha256=H1HmoD5IwN4YWe9iJXyueLYNmTQFZwok5nSWNJcZIBQ,237
87
87
  sqlmesh/core/metric/definition.py,sha256=Yd5aVgsZCDPJ43aGP7WqtzZOuuSUtB8uJGVA6Jw9x9M,7201
88
88
  sqlmesh/core/metric/rewriter.py,sha256=GiSTHfn2kinqCfNPYgZPRk93JFLzVaaejHtHDQ0yXZI,7326
@@ -98,15 +98,15 @@ sqlmesh/core/model/seed.py,sha256=a0M-1zY1gOkN5ph2GQyataEdBSCtq50YjeFk2LyvInI,50
98
98
  sqlmesh/core/plan/__init__.py,sha256=NKSvM7ZBVjw9ho3J65M1wFvG3KURB8PJ0FHHLmtSF44,443
99
99
  sqlmesh/core/plan/builder.py,sha256=naiEWF_x3vUpcVjzaif1HMsKPtJLvabdnB4WsCskdf8,44245
100
100
  sqlmesh/core/plan/common.py,sha256=GEu7eXIfX7MM7d8-1znYlVnF1UbRZkBSDXLoMbjsieY,10716
101
- sqlmesh/core/plan/definition.py,sha256=YPpp7ABu77UznIHjhcPiDi7vxLyhj4II9CED_cQ19g4,15225
101
+ sqlmesh/core/plan/definition.py,sha256=tfddMilgk4CZN2SljFotFlttsT9nBQ50kwiNDRonxXw,15282
102
102
  sqlmesh/core/plan/evaluator.py,sha256=twO9cHznTNAtPNC4IZcw9uhCxGl2yIywfePfmJKkymE,21114
103
103
  sqlmesh/core/plan/explainer.py,sha256=UITln7f4vxf6-nx7mV_IBbtIZ4f8ob4TyooqZMB9Pqg,15442
104
104
  sqlmesh/core/plan/stages.py,sha256=-Ju9yRQlEFmQoDIsDH_RO0EHdOlRZUVtVT9ag1gzLns,27491
105
105
  sqlmesh/core/snapshot/__init__.py,sha256=NUhvP-glftOWwxONK79Bud93yNQJv8ApBUjkV35RhMY,1465
106
106
  sqlmesh/core/snapshot/cache.py,sha256=bgqCR2hyf6r2A_8QP1EnXFK25gDX37-Zg0YeMuETWxg,3934
107
107
  sqlmesh/core/snapshot/categorizer.py,sha256=iNBEqK2KIyTAYURlB9KLfyKCpXN7vjxSqA7QjFa7e5c,2418
108
- sqlmesh/core/snapshot/definition.py,sha256=EHVRyXD58OxB-0fcx_ff9d-TVkdvoXD7NheewbpRQto,96542
109
- sqlmesh/core/snapshot/evaluator.py,sha256=IOqnV-0etMaIhYaQ0qZvYORyVr9mHdpAKDx8ru8HJhc,132807
108
+ sqlmesh/core/snapshot/definition.py,sha256=ZjjeiFLglG6zOusjzgaKOWSr_X_77JlMmvHK0C8d6Ms,96692
109
+ sqlmesh/core/snapshot/evaluator.py,sha256=ALO9bfzU9TxtNr1AdMCpnM1iJ_TJmpZKOJyO4UghRKc,133153
110
110
  sqlmesh/core/snapshot/execution_tracker.py,sha256=Ss1oYgH28Fy1mQ4HriX-luE9MG0eLdecrE1SssUveQI,3651
111
111
  sqlmesh/core/state_sync/__init__.py,sha256=vcm3p_e0scP_ZxOs3XPKPG3uPsaxrK_4pnNj0QueDwQ,779
112
112
  sqlmesh/core/state_sync/base.py,sha256=nK5tq5cIT5x5NrTaTurCRX18bSHnhSjEWG20tVqlkZc,19340
@@ -123,8 +123,8 @@ sqlmesh/core/state_sync/db/utils.py,sha256=8KjRmOjP5CLuSRkYBUE2k34V-UYB0iSyuO0rW
123
123
  sqlmesh/core/state_sync/db/version.py,sha256=q5VDIIvY-585vTbvqPalU0N4qjG6RKs4gr8a51R-_UE,2257
124
124
  sqlmesh/core/test/__init__.py,sha256=e83TJPwPRR_rAG29Y0OVbZb-5oWVBzz-_wrcd22Qk10,418
125
125
  sqlmesh/core/test/context.py,sha256=-TjUrhM3WLtVPBgOMTkvRrnuZq7mT7BeIIyuCbrPePU,2332
126
- sqlmesh/core/test/definition.py,sha256=MRyTs3WPSW6HVynfNcKDeu224EtI3Api3RLdqaJwx84,42266
127
- sqlmesh/core/test/discovery.py,sha256=WXIwPidLoacAGHoT_3TVFFEKamG89nHNtaMsZ01uGjs,1188
126
+ sqlmesh/core/test/definition.py,sha256=Lfflu-qgkqkI7T977F4h4X7c5Co7i3uBt5Efsi4XaZE,42219
127
+ sqlmesh/core/test/discovery.py,sha256=5duKXgH4Lms7rXhJ8tOLCmCtqHpv7c7a4VJf12VkGw8,1278
128
128
  sqlmesh/core/test/result.py,sha256=6gOKEsERciHhcrw9TedtNr7g1ynTO7UwA5-PPrzvYuM,4564
129
129
  sqlmesh/core/test/runner.py,sha256=8I-cL7Q9CggLvET_GPkrXB2YjlyCIHrvbFbbRDnSHRE,6169
130
130
  sqlmesh/dbt/__init__.py,sha256=KUv-lW5sG9D2ceXAIzA4MLcjyhzq3E-7qJP4P_PH2EU,144
@@ -132,10 +132,10 @@ sqlmesh/dbt/adapter.py,sha256=z-tFIj3rpVvdBr3y8l40FU531-TQ5H2ctLmjwzMBxwk,21321
132
132
  sqlmesh/dbt/basemodel.py,sha256=oUr_Em-TjQbpYZS5gtvMA65JRTdnZM46NO9MWvLBLzQ,14860
133
133
  sqlmesh/dbt/builtin.py,sha256=hJwLdVs3Qe_AFUIa0ZMnktblpdkGGaq20nFUJEf3B_I,19752
134
134
  sqlmesh/dbt/column.py,sha256=T5xEWNf0n1sZ3REWnc5D9RsXt5VrrZ1YlMWZUUuAUxo,2449
135
- sqlmesh/dbt/common.py,sha256=AezWUEoGDISQj9eOo1Z5kLsXz3oRFb0LikTMEaAX3J8,8593
135
+ sqlmesh/dbt/common.py,sha256=RmabUrj2A25G1vy7iV-15NJ481L5qHAQnq-JVNYEQr0,8653
136
136
  sqlmesh/dbt/context.py,sha256=JDfSkVBBV2Xi4nDOwWipVHJRll3ioEmvh7gBglPVvqM,11074
137
137
  sqlmesh/dbt/loader.py,sha256=ZTpPFnXuf4hQ8Z7Z6oMzxqN2wMMxsQqhm2x-8a5R1AA,19269
138
- sqlmesh/dbt/manifest.py,sha256=aea8FaSnMbnjF1JfJx0hDRvg-cEwLCkS207dlf_NCvg,34613
138
+ sqlmesh/dbt/manifest.py,sha256=uwXiXnhjoXVZeRa7eTp1eqUYrw_6VQNOqquozJy_FOo,34633
139
139
  sqlmesh/dbt/model.py,sha256=RcQw3Dz2o4zC8vBYPCkMB8MKkn3MEUS6Ns3uQmACkeQ,35435
140
140
  sqlmesh/dbt/package.py,sha256=8MOq_kHP2qjj24bpoC3GPnHlOVLYO4V9oVb9krk1Mdk,4759
141
141
  sqlmesh/dbt/profile.py,sha256=ilDiSqBqw6lsJLUu4MfJSrIkvtC3fbxlvawKn44lHjc,4009
@@ -224,7 +224,7 @@ sqlmesh/utils/connection_pool.py,sha256=pKiO3MLPM-EDAkKNDirLsVOTdmST9BnP15CTLVKA
224
224
  sqlmesh/utils/conversions.py,sha256=U1i9QRzcTc_rswt7N4KeAfeRM0MHEzDezNUD_A7BFJc,758
225
225
  sqlmesh/utils/cron.py,sha256=eGwn4iUeiRoQzwcd9eS2TZkut8nR4yWud77N7xQ9CQ0,1829
226
226
  sqlmesh/utils/dag.py,sha256=5Sec50yY-UBEpLU82_nzaL7Wlalwf7K8EvLL8sBs2Z8,9049
227
- sqlmesh/utils/date.py,sha256=vED-JXYXN9h36-c0_zkB7HZzfIkj7g1Vcc6uCgsv99w,16453
227
+ sqlmesh/utils/date.py,sha256=m0NHAqSQYqZnvuNHVk9RNEktiE_LbyqcO_O0SVxcGrw,16460
228
228
  sqlmesh/utils/errors.py,sha256=rktXVSd4R3tii7_k_pnex05ZXS7QnlFx1np1u-pjSSU,8000
229
229
  sqlmesh/utils/git.py,sha256=v1MD4Zwn52UBn_tTfoKn8SAPJVGC6SEFrJ4WEJFbgF0,1868
230
230
  sqlmesh/utils/hashing.py,sha256=nZRKvLNQ83tLG4IoXshVJZf-MbDrXC1HOeNw8Ji-tMM,578
@@ -238,7 +238,7 @@ sqlmesh/utils/pydantic.py,sha256=-yppkVlw6iSBaSiKjbe7OChxL-u3urOS4-KCjJEgsRU,120
238
238
  sqlmesh/utils/rich.py,sha256=cwQ5nJ6sgz64xHtoh6_ec7ReV5YpsOGhMtUJnwoRfEI,3549
239
239
  sqlmesh/utils/windows.py,sha256=0F9RdpuuCoG5NiEDXvWlAGCiJ-59OjSAmgFF5wW05aY,1133
240
240
  sqlmesh/utils/yaml.py,sha256=KFBd7hsKNRTtRudGR7d410qUYffQv0EWRcDM8hVNNZg,3025
241
- sqlmesh-0.225.1.dev26.dist-info/licenses/LICENSE,sha256=OlMefUjgWJdULtf84BLW0AZZcY8DwdgQqb_1j2862j8,11346
241
+ sqlmesh-0.227.2.dev6.dist-info/licenses/LICENSE,sha256=OlMefUjgWJdULtf84BLW0AZZcY8DwdgQqb_1j2862j8,11346
242
242
  sqlmesh_dbt/__init__.py,sha256=awYS5y5mz-1NUmx6i5h5NSTJ7tidRl9NC0FAnFWSF6U,350
243
243
  sqlmesh_dbt/cli.py,sha256=p9foHjAW9ni7BTOJ2loynk47M0Sf43QIJZRggOzF5tc,6351
244
244
  sqlmesh_dbt/console.py,sha256=RwWLYnEZHzn9Xp-e2gbZvkdKbWbBLN146geI84mJitg,1132
@@ -363,8 +363,8 @@ web/server/api/endpoints/models.py,sha256=kwj0s7uve3iZSMfmjkoPVMFMeY1sD0peTeyrWf
363
363
  web/server/api/endpoints/modules.py,sha256=8hqqgonGay_mJmpCw0IdbjsPhWlQH2VLdKAqha-myac,468
364
364
  web/server/api/endpoints/plan.py,sha256=bbbY50W_2MsZSTxOHWMKz0tbIm75nsRSlPy8GI2fg9Q,9306
365
365
  web/server/api/endpoints/table_diff.py,sha256=8XTwgOh6QBbNy_hTM1JuHgRjbnie-pGPrphiW-FNLjQ,6058
366
- sqlmesh-0.225.1.dev26.dist-info/METADATA,sha256=WOpjGHcFWeYdCXnTG36EY9U7IUKku7UJ6ckI6t05ndA,26686
367
- sqlmesh-0.225.1.dev26.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
368
- sqlmesh-0.225.1.dev26.dist-info/entry_points.txt,sha256=sHAf6tQczIM8xZoduN4qaUjV7QEPVUUW_LCT8EDUMv4,155
369
- sqlmesh-0.225.1.dev26.dist-info/top_level.txt,sha256=RQ-33FPe2IgL0rgossAfJkCRtqslz9b7wFARqiWLC5Q,24
370
- sqlmesh-0.225.1.dev26.dist-info/RECORD,,
366
+ sqlmesh-0.227.2.dev6.dist-info/METADATA,sha256=ROcHstdXOc9MFy57XBnmYS9iS9bhfpc7tLui43oGMMg,26685
367
+ sqlmesh-0.227.2.dev6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
368
+ sqlmesh-0.227.2.dev6.dist-info/entry_points.txt,sha256=sHAf6tQczIM8xZoduN4qaUjV7QEPVUUW_LCT8EDUMv4,155
369
+ sqlmesh-0.227.2.dev6.dist-info/top_level.txt,sha256=RQ-33FPe2IgL0rgossAfJkCRtqslz9b7wFARqiWLC5Q,24
370
+ sqlmesh-0.227.2.dev6.dist-info/RECORD,,