sqlmesh 0.227.2.dev4__py3-none-any.whl → 0.227.2.dev20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlmesh/_version.py +2 -2
- sqlmesh/core/config/connection.py +3 -0
- sqlmesh/core/context.py +51 -10
- sqlmesh/core/engine_adapter/base.py +8 -1
- sqlmesh/core/engine_adapter/databricks.py +17 -0
- sqlmesh/core/engine_adapter/fabric.py +109 -0
- sqlmesh/core/engine_adapter/trino.py +1 -0
- sqlmesh/core/linter/rules/builtin.py +15 -0
- sqlmesh/core/loader.py +17 -30
- sqlmesh/core/model/definition.py +7 -0
- sqlmesh/core/test/discovery.py +4 -0
- sqlmesh/integrations/github/cicd/controller.py +6 -2
- sqlmesh/lsp/context.py +4 -2
- sqlmesh/magics.py +1 -1
- sqlmesh/utils/git.py +3 -1
- {sqlmesh-0.227.2.dev4.dist-info → sqlmesh-0.227.2.dev20.dist-info}/METADATA +1 -1
- {sqlmesh-0.227.2.dev4.dist-info → sqlmesh-0.227.2.dev20.dist-info}/RECORD +21 -21
- {sqlmesh-0.227.2.dev4.dist-info → sqlmesh-0.227.2.dev20.dist-info}/WHEEL +0 -0
- {sqlmesh-0.227.2.dev4.dist-info → sqlmesh-0.227.2.dev20.dist-info}/entry_points.txt +0 -0
- {sqlmesh-0.227.2.dev4.dist-info → sqlmesh-0.227.2.dev20.dist-info}/licenses/LICENSE +0 -0
- {sqlmesh-0.227.2.dev4.dist-info → sqlmesh-0.227.2.dev20.dist-info}/top_level.txt +0 -0
sqlmesh/_version.py
CHANGED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '0.227.2.
|
|
32
|
-
__version_tuple__ = version_tuple = (0, 227, 2, '
|
|
31
|
+
__version__ = version = '0.227.2.dev20'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 227, 2, 'dev20')
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -238,6 +238,7 @@ class DuckDBAttachOptions(BaseConfig):
|
|
|
238
238
|
data_path: t.Optional[str] = None
|
|
239
239
|
encrypted: bool = False
|
|
240
240
|
data_inlining_row_limit: t.Optional[int] = None
|
|
241
|
+
metadata_schema: t.Optional[str] = None
|
|
241
242
|
|
|
242
243
|
def to_sql(self, alias: str) -> str:
|
|
243
244
|
options = []
|
|
@@ -259,6 +260,8 @@ class DuckDBAttachOptions(BaseConfig):
|
|
|
259
260
|
options.append("ENCRYPTED")
|
|
260
261
|
if self.data_inlining_row_limit is not None:
|
|
261
262
|
options.append(f"DATA_INLINING_ROW_LIMIT {self.data_inlining_row_limit}")
|
|
263
|
+
if self.metadata_schema is not None:
|
|
264
|
+
options.append(f"METADATA_SCHEMA '{self.metadata_schema}'")
|
|
262
265
|
|
|
263
266
|
options_sql = f" ({', '.join(options)})" if options else ""
|
|
264
267
|
alias_sql = ""
|
sqlmesh/core/context.py
CHANGED
|
@@ -115,6 +115,7 @@ from sqlmesh.core.test import (
|
|
|
115
115
|
ModelTestMetadata,
|
|
116
116
|
generate_test,
|
|
117
117
|
run_tests,
|
|
118
|
+
filter_tests_by_patterns,
|
|
118
119
|
)
|
|
119
120
|
from sqlmesh.core.user import User
|
|
120
121
|
from sqlmesh.utils import UniqueKeyDict, Verbosity
|
|
@@ -398,6 +399,11 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
398
399
|
self._standalone_audits: UniqueKeyDict[str, StandaloneAudit] = UniqueKeyDict(
|
|
399
400
|
"standaloneaudits"
|
|
400
401
|
)
|
|
402
|
+
self._model_test_metadata: t.List[ModelTestMetadata] = []
|
|
403
|
+
self._model_test_metadata_path_index: t.Dict[Path, t.List[ModelTestMetadata]] = {}
|
|
404
|
+
self._model_test_metadata_fully_qualified_name_index: t.Dict[str, ModelTestMetadata] = {}
|
|
405
|
+
self._models_with_tests: t.Set[str] = set()
|
|
406
|
+
|
|
401
407
|
self._macros: UniqueKeyDict[str, ExecutableOrMacro] = UniqueKeyDict("macros")
|
|
402
408
|
self._metrics: UniqueKeyDict[str, Metric] = UniqueKeyDict("metrics")
|
|
403
409
|
self._jinja_macros = JinjaMacroRegistry()
|
|
@@ -636,6 +642,10 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
636
642
|
self._excluded_requirements.clear()
|
|
637
643
|
self._linters.clear()
|
|
638
644
|
self._environment_statements = []
|
|
645
|
+
self._model_test_metadata.clear()
|
|
646
|
+
self._model_test_metadata_path_index.clear()
|
|
647
|
+
self._model_test_metadata_fully_qualified_name_index.clear()
|
|
648
|
+
self._models_with_tests.clear()
|
|
639
649
|
|
|
640
650
|
for loader, project in zip(self._loaders, loaded_projects):
|
|
641
651
|
self._jinja_macros = self._jinja_macros.merge(project.jinja_macros)
|
|
@@ -648,6 +658,16 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
648
658
|
self._excluded_requirements.update(project.excluded_requirements)
|
|
649
659
|
self._environment_statements.extend(project.environment_statements)
|
|
650
660
|
|
|
661
|
+
self._model_test_metadata.extend(project.model_test_metadata)
|
|
662
|
+
for metadata in project.model_test_metadata:
|
|
663
|
+
if metadata.path not in self._model_test_metadata_path_index:
|
|
664
|
+
self._model_test_metadata_path_index[metadata.path] = []
|
|
665
|
+
self._model_test_metadata_path_index[metadata.path].append(metadata)
|
|
666
|
+
self._model_test_metadata_fully_qualified_name_index[
|
|
667
|
+
metadata.fully_qualified_test_name
|
|
668
|
+
] = metadata
|
|
669
|
+
self._models_with_tests.add(metadata.model_name)
|
|
670
|
+
|
|
651
671
|
config = loader.config
|
|
652
672
|
self._linters[config.project] = Linter.from_rules(
|
|
653
673
|
BUILTIN_RULES.union(project.user_rules), config.linter
|
|
@@ -1049,6 +1069,11 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
1049
1069
|
"""Returns all registered standalone audits in this context."""
|
|
1050
1070
|
return MappingProxyType(self._standalone_audits)
|
|
1051
1071
|
|
|
1072
|
+
@property
|
|
1073
|
+
def models_with_tests(self) -> t.Set[str]:
|
|
1074
|
+
"""Returns all models with tests in this context."""
|
|
1075
|
+
return self._models_with_tests
|
|
1076
|
+
|
|
1052
1077
|
@property
|
|
1053
1078
|
def snapshots(self) -> t.Dict[str, Snapshot]:
|
|
1054
1079
|
"""Generates and returns snapshots based on models registered in this context.
|
|
@@ -2220,7 +2245,7 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
2220
2245
|
|
|
2221
2246
|
pd.set_option("display.max_columns", None)
|
|
2222
2247
|
|
|
2223
|
-
test_meta = self.
|
|
2248
|
+
test_meta = self.select_tests(tests=tests, patterns=match_patterns)
|
|
2224
2249
|
|
|
2225
2250
|
result = run_tests(
|
|
2226
2251
|
model_test_metadata=test_meta,
|
|
@@ -3193,18 +3218,34 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
3193
3218
|
|
|
3194
3219
|
return all_violations
|
|
3195
3220
|
|
|
3196
|
-
def
|
|
3197
|
-
self,
|
|
3221
|
+
def select_tests(
|
|
3222
|
+
self,
|
|
3223
|
+
tests: t.Optional[t.List[str]] = None,
|
|
3224
|
+
patterns: t.Optional[t.List[str]] = None,
|
|
3198
3225
|
) -> t.List[ModelTestMetadata]:
|
|
3199
|
-
|
|
3200
|
-
|
|
3201
|
-
|
|
3226
|
+
"""Filter pre-loaded test metadata based on tests and patterns."""
|
|
3227
|
+
|
|
3228
|
+
test_meta = self._model_test_metadata
|
|
3229
|
+
|
|
3230
|
+
if tests:
|
|
3231
|
+
filtered_tests = []
|
|
3232
|
+
for test in tests:
|
|
3233
|
+
if "::" in test:
|
|
3234
|
+
if test in self._model_test_metadata_fully_qualified_name_index:
|
|
3235
|
+
filtered_tests.append(
|
|
3236
|
+
self._model_test_metadata_fully_qualified_name_index[test]
|
|
3237
|
+
)
|
|
3238
|
+
else:
|
|
3239
|
+
test_path = Path(test)
|
|
3240
|
+
if test_path in self._model_test_metadata_path_index:
|
|
3241
|
+
filtered_tests.extend(self._model_test_metadata_path_index[test_path])
|
|
3242
|
+
|
|
3243
|
+
test_meta = filtered_tests
|
|
3202
3244
|
|
|
3203
|
-
|
|
3204
|
-
|
|
3205
|
-
model_tests.extend(loader.load_model_tests(tests=tests, patterns=patterns))
|
|
3245
|
+
if patterns:
|
|
3246
|
+
test_meta = filter_tests_by_patterns(test_meta, patterns)
|
|
3206
3247
|
|
|
3207
|
-
return
|
|
3248
|
+
return test_meta
|
|
3208
3249
|
|
|
3209
3250
|
|
|
3210
3251
|
class Context(GenericContext[Config]):
|
|
@@ -811,6 +811,7 @@ class EngineAdapter:
|
|
|
811
811
|
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
812
812
|
expressions: t.Optional[t.List[exp.PrimaryKey]] = None,
|
|
813
813
|
is_view: bool = False,
|
|
814
|
+
materialized: bool = False,
|
|
814
815
|
) -> exp.Schema:
|
|
815
816
|
"""
|
|
816
817
|
Build a schema expression for a table, columns, column comments, and additional schema properties.
|
|
@@ -823,6 +824,7 @@ class EngineAdapter:
|
|
|
823
824
|
target_columns_to_types=target_columns_to_types,
|
|
824
825
|
column_descriptions=column_descriptions,
|
|
825
826
|
is_view=is_view,
|
|
827
|
+
materialized=materialized,
|
|
826
828
|
)
|
|
827
829
|
+ expressions,
|
|
828
830
|
)
|
|
@@ -832,6 +834,7 @@ class EngineAdapter:
|
|
|
832
834
|
target_columns_to_types: t.Dict[str, exp.DataType],
|
|
833
835
|
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
834
836
|
is_view: bool = False,
|
|
837
|
+
materialized: bool = False,
|
|
835
838
|
) -> t.List[exp.ColumnDef]:
|
|
836
839
|
engine_supports_schema_comments = (
|
|
837
840
|
self.COMMENT_CREATION_VIEW.supports_schema_def
|
|
@@ -1260,7 +1263,11 @@ class EngineAdapter:
|
|
|
1260
1263
|
schema: t.Union[exp.Table, exp.Schema] = exp.to_table(view_name)
|
|
1261
1264
|
if target_columns_to_types:
|
|
1262
1265
|
schema = self._build_schema_exp(
|
|
1263
|
-
exp.to_table(view_name),
|
|
1266
|
+
exp.to_table(view_name),
|
|
1267
|
+
target_columns_to_types,
|
|
1268
|
+
column_descriptions,
|
|
1269
|
+
is_view=True,
|
|
1270
|
+
materialized=materialized,
|
|
1264
1271
|
)
|
|
1265
1272
|
|
|
1266
1273
|
properties = create_kwargs.pop("properties", None)
|
|
@@ -394,3 +394,20 @@ class DatabricksEngineAdapter(SparkEngineAdapter, GrantsFromInfoSchemaMixin):
|
|
|
394
394
|
expressions.append(clustered_by_exp)
|
|
395
395
|
properties = exp.Properties(expressions=expressions)
|
|
396
396
|
return properties
|
|
397
|
+
|
|
398
|
+
def _build_column_defs(
|
|
399
|
+
self,
|
|
400
|
+
target_columns_to_types: t.Dict[str, exp.DataType],
|
|
401
|
+
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
402
|
+
is_view: bool = False,
|
|
403
|
+
materialized: bool = False,
|
|
404
|
+
) -> t.List[exp.ColumnDef]:
|
|
405
|
+
# Databricks requires column types to be specified when adding column comments
|
|
406
|
+
# in CREATE MATERIALIZED VIEW statements. Override is_view to False to force
|
|
407
|
+
# column types to be included when comments are present.
|
|
408
|
+
if is_view and materialized and column_descriptions:
|
|
409
|
+
is_view = False
|
|
410
|
+
|
|
411
|
+
return super()._build_column_defs(
|
|
412
|
+
target_columns_to_types, column_descriptions, is_view, materialized
|
|
413
|
+
)
|
|
@@ -13,6 +13,8 @@ from sqlmesh.core.engine_adapter.shared import (
|
|
|
13
13
|
)
|
|
14
14
|
from sqlmesh.utils.errors import SQLMeshError
|
|
15
15
|
from sqlmesh.utils.connection_pool import ConnectionPool
|
|
16
|
+
from sqlmesh.core.schema_diff import TableAlterOperation
|
|
17
|
+
from sqlmesh.utils import random_id
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
logger = logging.getLogger(__name__)
|
|
@@ -153,6 +155,113 @@ class FabricEngineAdapter(MSSQLEngineAdapter):
|
|
|
153
155
|
f"Unable to switch catalog to {catalog_name}, catalog ended up as {catalog_after_switch}"
|
|
154
156
|
)
|
|
155
157
|
|
|
158
|
+
def alter_table(
|
|
159
|
+
self, alter_expressions: t.Union[t.List[exp.Alter], t.List[TableAlterOperation]]
|
|
160
|
+
) -> None:
|
|
161
|
+
"""
|
|
162
|
+
Applies alter expressions to a table. Fabric has limited support for ALTER TABLE,
|
|
163
|
+
so this method implements a workaround for column type changes.
|
|
164
|
+
This method is self-contained and sets its own catalog context.
|
|
165
|
+
"""
|
|
166
|
+
if not alter_expressions:
|
|
167
|
+
return
|
|
168
|
+
|
|
169
|
+
# Get the target table from the first expression to determine the correct catalog.
|
|
170
|
+
first_op = alter_expressions[0]
|
|
171
|
+
expression = first_op.expression if isinstance(first_op, TableAlterOperation) else first_op
|
|
172
|
+
if not isinstance(expression, exp.Alter) or not expression.this.catalog:
|
|
173
|
+
# Fallback for unexpected scenarios
|
|
174
|
+
logger.warning(
|
|
175
|
+
"Could not determine catalog from alter expression, executing with current context."
|
|
176
|
+
)
|
|
177
|
+
super().alter_table(alter_expressions)
|
|
178
|
+
return
|
|
179
|
+
|
|
180
|
+
target_catalog = expression.this.catalog
|
|
181
|
+
self.set_current_catalog(target_catalog)
|
|
182
|
+
|
|
183
|
+
with self.transaction():
|
|
184
|
+
for op in alter_expressions:
|
|
185
|
+
expression = op.expression if isinstance(op, TableAlterOperation) else op
|
|
186
|
+
|
|
187
|
+
if not isinstance(expression, exp.Alter):
|
|
188
|
+
self.execute(expression)
|
|
189
|
+
continue
|
|
190
|
+
|
|
191
|
+
for action in expression.actions:
|
|
192
|
+
table_name = expression.this
|
|
193
|
+
|
|
194
|
+
table_name_without_catalog = table_name.copy()
|
|
195
|
+
table_name_without_catalog.set("catalog", None)
|
|
196
|
+
|
|
197
|
+
is_type_change = isinstance(action, exp.AlterColumn) and action.args.get(
|
|
198
|
+
"dtype"
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
if is_type_change:
|
|
202
|
+
column_to_alter = action.this
|
|
203
|
+
new_type = action.args["dtype"]
|
|
204
|
+
temp_column_name_str = f"{column_to_alter.name}__{random_id(short=True)}"
|
|
205
|
+
temp_column_name = exp.to_identifier(temp_column_name_str)
|
|
206
|
+
|
|
207
|
+
logger.info(
|
|
208
|
+
"Applying workaround for column '%s' on table '%s' to change type to '%s'.",
|
|
209
|
+
column_to_alter.sql(),
|
|
210
|
+
table_name.sql(),
|
|
211
|
+
new_type.sql(),
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
# Step 1: Add a temporary column.
|
|
215
|
+
add_column_expr = exp.Alter(
|
|
216
|
+
this=table_name_without_catalog.copy(),
|
|
217
|
+
kind="TABLE",
|
|
218
|
+
actions=[
|
|
219
|
+
exp.ColumnDef(this=temp_column_name.copy(), kind=new_type.copy())
|
|
220
|
+
],
|
|
221
|
+
)
|
|
222
|
+
add_sql = self._to_sql(add_column_expr)
|
|
223
|
+
self.execute(add_sql)
|
|
224
|
+
|
|
225
|
+
# Step 2: Copy and cast data.
|
|
226
|
+
update_sql = self._to_sql(
|
|
227
|
+
exp.Update(
|
|
228
|
+
this=table_name_without_catalog.copy(),
|
|
229
|
+
expressions=[
|
|
230
|
+
exp.EQ(
|
|
231
|
+
this=temp_column_name.copy(),
|
|
232
|
+
expression=exp.Cast(
|
|
233
|
+
this=column_to_alter.copy(), to=new_type.copy()
|
|
234
|
+
),
|
|
235
|
+
)
|
|
236
|
+
],
|
|
237
|
+
)
|
|
238
|
+
)
|
|
239
|
+
self.execute(update_sql)
|
|
240
|
+
|
|
241
|
+
# Step 3: Drop the original column.
|
|
242
|
+
drop_sql = self._to_sql(
|
|
243
|
+
exp.Alter(
|
|
244
|
+
this=table_name_without_catalog.copy(),
|
|
245
|
+
kind="TABLE",
|
|
246
|
+
actions=[exp.Drop(this=column_to_alter.copy(), kind="COLUMN")],
|
|
247
|
+
)
|
|
248
|
+
)
|
|
249
|
+
self.execute(drop_sql)
|
|
250
|
+
|
|
251
|
+
# Step 4: Rename the temporary column.
|
|
252
|
+
old_name_qualified = f"{table_name_without_catalog.sql(dialect=self.dialect)}.{temp_column_name.sql(dialect=self.dialect)}"
|
|
253
|
+
new_name_unquoted = column_to_alter.sql(
|
|
254
|
+
dialect=self.dialect, identify=False
|
|
255
|
+
)
|
|
256
|
+
rename_sql = f"EXEC sp_rename '{old_name_qualified}', '{new_name_unquoted}', 'COLUMN'"
|
|
257
|
+
self.execute(rename_sql)
|
|
258
|
+
else:
|
|
259
|
+
# For other alterations, execute directly.
|
|
260
|
+
direct_alter_expr = exp.Alter(
|
|
261
|
+
this=table_name_without_catalog.copy(), kind="TABLE", actions=[action]
|
|
262
|
+
)
|
|
263
|
+
self.execute(direct_alter_expr)
|
|
264
|
+
|
|
156
265
|
|
|
157
266
|
class FabricHttpClient:
|
|
158
267
|
def __init__(self, tenant_id: str, workspace_id: str, client_id: str, client_secret: str):
|
|
@@ -284,6 +284,7 @@ class TrinoEngineAdapter(
|
|
|
284
284
|
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
285
285
|
expressions: t.Optional[t.List[exp.PrimaryKey]] = None,
|
|
286
286
|
is_view: bool = False,
|
|
287
|
+
materialized: bool = False,
|
|
287
288
|
) -> exp.Schema:
|
|
288
289
|
if "delta_lake" in self.get_catalog_type_from_table(table):
|
|
289
290
|
target_columns_to_types = self._to_delta_ts(target_columns_to_types)
|
|
@@ -129,6 +129,21 @@ class NoMissingAudits(Rule):
|
|
|
129
129
|
return self.violation()
|
|
130
130
|
|
|
131
131
|
|
|
132
|
+
class NoMissingUnitTest(Rule):
|
|
133
|
+
"""All models must have a unit test found in the tests/ directory yaml files"""
|
|
134
|
+
|
|
135
|
+
def check_model(self, model: Model) -> t.Optional[RuleViolation]:
|
|
136
|
+
# External models cannot have unit tests
|
|
137
|
+
if isinstance(model, ExternalModel):
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
if model.name not in self.context.models_with_tests:
|
|
141
|
+
return self.violation(
|
|
142
|
+
violation_msg=f"Model {model.name} is missing unit test(s). Please add in the tests/ directory."
|
|
143
|
+
)
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
|
|
132
147
|
class NoMissingExternalModels(Rule):
|
|
133
148
|
"""All external models must be registered in the external_models.yaml file"""
|
|
134
149
|
|
sqlmesh/core/loader.py
CHANGED
|
@@ -35,7 +35,7 @@ from sqlmesh.core.model import (
|
|
|
35
35
|
from sqlmesh.core.model import model as model_registry
|
|
36
36
|
from sqlmesh.core.model.common import make_python_env
|
|
37
37
|
from sqlmesh.core.signal import signal
|
|
38
|
-
from sqlmesh.core.test import ModelTestMetadata
|
|
38
|
+
from sqlmesh.core.test import ModelTestMetadata
|
|
39
39
|
from sqlmesh.utils import UniqueKeyDict, sys_path
|
|
40
40
|
from sqlmesh.utils.errors import ConfigError
|
|
41
41
|
from sqlmesh.utils.jinja import JinjaMacroRegistry, MacroExtractor
|
|
@@ -64,6 +64,7 @@ class LoadedProject:
|
|
|
64
64
|
excluded_requirements: t.Set[str]
|
|
65
65
|
environment_statements: t.List[EnvironmentStatements]
|
|
66
66
|
user_rules: RuleSet
|
|
67
|
+
model_test_metadata: t.List[ModelTestMetadata]
|
|
67
68
|
|
|
68
69
|
|
|
69
70
|
class CacheBase(abc.ABC):
|
|
@@ -243,6 +244,8 @@ class Loader(abc.ABC):
|
|
|
243
244
|
|
|
244
245
|
user_rules = self._load_linting_rules()
|
|
245
246
|
|
|
247
|
+
model_test_metadata = self.load_model_tests()
|
|
248
|
+
|
|
246
249
|
project = LoadedProject(
|
|
247
250
|
macros=macros,
|
|
248
251
|
jinja_macros=jinja_macros,
|
|
@@ -254,6 +257,7 @@ class Loader(abc.ABC):
|
|
|
254
257
|
excluded_requirements=excluded_requirements,
|
|
255
258
|
environment_statements=environment_statements,
|
|
256
259
|
user_rules=user_rules,
|
|
260
|
+
model_test_metadata=model_test_metadata,
|
|
257
261
|
)
|
|
258
262
|
return project
|
|
259
263
|
|
|
@@ -423,9 +427,7 @@ class Loader(abc.ABC):
|
|
|
423
427
|
"""Loads user linting rules"""
|
|
424
428
|
return RuleSet()
|
|
425
429
|
|
|
426
|
-
def load_model_tests(
|
|
427
|
-
self, tests: t.Optional[t.List[str]] = None, patterns: list[str] | None = None
|
|
428
|
-
) -> t.List[ModelTestMetadata]:
|
|
430
|
+
def load_model_tests(self) -> t.List[ModelTestMetadata]:
|
|
429
431
|
"""Loads YAML-based model tests"""
|
|
430
432
|
return []
|
|
431
433
|
|
|
@@ -864,38 +866,23 @@ class SqlMeshLoader(Loader):
|
|
|
864
866
|
|
|
865
867
|
return model_test_metadata
|
|
866
868
|
|
|
867
|
-
def load_model_tests(
|
|
868
|
-
self, tests: t.Optional[t.List[str]] = None, patterns: list[str] | None = None
|
|
869
|
-
) -> t.List[ModelTestMetadata]:
|
|
869
|
+
def load_model_tests(self) -> t.List[ModelTestMetadata]:
|
|
870
870
|
"""Loads YAML-based model tests"""
|
|
871
871
|
test_meta_list: t.List[ModelTestMetadata] = []
|
|
872
872
|
|
|
873
|
-
|
|
874
|
-
for test in tests:
|
|
875
|
-
filename, test_name = test.split("::", maxsplit=1) if "::" in test else (test, "")
|
|
876
|
-
|
|
877
|
-
test_meta = self._load_model_test_file(Path(filename))
|
|
878
|
-
if test_name:
|
|
879
|
-
test_meta_list.append(test_meta[test_name])
|
|
880
|
-
else:
|
|
881
|
-
test_meta_list.extend(test_meta.values())
|
|
882
|
-
else:
|
|
883
|
-
search_path = Path(self.config_path) / c.TESTS
|
|
873
|
+
search_path = Path(self.config_path) / c.TESTS
|
|
884
874
|
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
875
|
+
for yaml_file in itertools.chain(
|
|
876
|
+
search_path.glob("**/test*.yaml"),
|
|
877
|
+
search_path.glob("**/test*.yml"),
|
|
878
|
+
):
|
|
879
|
+
if any(
|
|
880
|
+
yaml_file.match(ignore_pattern)
|
|
881
|
+
for ignore_pattern in self.config.ignore_patterns or []
|
|
888
882
|
):
|
|
889
|
-
|
|
890
|
-
yaml_file.match(ignore_pattern)
|
|
891
|
-
for ignore_pattern in self.config.ignore_patterns or []
|
|
892
|
-
):
|
|
893
|
-
continue
|
|
894
|
-
|
|
895
|
-
test_meta_list.extend(self._load_model_test_file(yaml_file).values())
|
|
883
|
+
continue
|
|
896
884
|
|
|
897
|
-
|
|
898
|
-
test_meta_list = filter_tests_by_patterns(test_meta_list, patterns)
|
|
885
|
+
test_meta_list.extend(self._load_model_test_file(yaml_file).values())
|
|
899
886
|
|
|
900
887
|
return test_meta_list
|
|
901
888
|
|
sqlmesh/core/model/definition.py
CHANGED
|
@@ -1035,6 +1035,13 @@ class _Model(ModelMeta, frozen=True):
|
|
|
1035
1035
|
# Will raise if the custom materialization points to an invalid class
|
|
1036
1036
|
get_custom_materialization_type_or_raise(self.kind.materialization)
|
|
1037
1037
|
|
|
1038
|
+
# Embedded model kind shouldn't have audits
|
|
1039
|
+
if self.kind.name == ModelKindName.EMBEDDED and self.audits:
|
|
1040
|
+
raise_config_error(
|
|
1041
|
+
"Audits are not supported for embedded models",
|
|
1042
|
+
self._path,
|
|
1043
|
+
)
|
|
1044
|
+
|
|
1038
1045
|
def is_breaking_change(self, previous: Model) -> t.Optional[bool]:
|
|
1039
1046
|
"""Determines whether this model is a breaking change in relation to the `previous` model.
|
|
1040
1047
|
|
sqlmesh/core/test/discovery.py
CHANGED
|
@@ -20,6 +20,10 @@ class ModelTestMetadata(PydanticModel):
|
|
|
20
20
|
def fully_qualified_test_name(self) -> str:
|
|
21
21
|
return f"{self.path}::{self.test_name}"
|
|
22
22
|
|
|
23
|
+
@property
|
|
24
|
+
def model_name(self) -> str:
|
|
25
|
+
return self.body.get("model", "")
|
|
26
|
+
|
|
23
27
|
def __hash__(self) -> int:
|
|
24
28
|
return self.fully_qualified_test_name.__hash__()
|
|
25
29
|
|
|
@@ -448,10 +448,9 @@ class GithubController:
|
|
|
448
448
|
c.PROD,
|
|
449
449
|
# this is required to highlight any data gaps between this PR environment and prod (since PR environments may only contain a subset of data)
|
|
450
450
|
no_gaps=False,
|
|
451
|
-
# this works because the snapshots were already categorized when applying self.pr_plan so there are no uncategorized local snapshots to trigger a plan error
|
|
452
|
-
no_auto_categorization=True,
|
|
453
451
|
skip_tests=True,
|
|
454
452
|
skip_linter=True,
|
|
453
|
+
categorizer_config=self.bot_config.auto_categorize_changes,
|
|
455
454
|
run=self.bot_config.run_on_deploy_to_prod,
|
|
456
455
|
forward_only=self.forward_only_plan,
|
|
457
456
|
)
|
|
@@ -773,6 +772,11 @@ class GithubController:
|
|
|
773
772
|
"PR is already merged and this event was triggered prior to the merge."
|
|
774
773
|
)
|
|
775
774
|
merge_status = self._get_merge_state_status()
|
|
775
|
+
if merge_status.is_blocked:
|
|
776
|
+
raise CICDBotError(
|
|
777
|
+
"Branch protection or ruleset requirement is likely not satisfied, e.g. missing CODEOWNERS approval. "
|
|
778
|
+
"Please check PR and resolve any issues."
|
|
779
|
+
)
|
|
776
780
|
if merge_status.is_dirty:
|
|
777
781
|
raise CICDBotError(
|
|
778
782
|
"Merge commit cannot be cleanly created. Likely from a merge conflict. "
|
sqlmesh/lsp/context.py
CHANGED
|
@@ -72,7 +72,7 @@ class LSPContext:
|
|
|
72
72
|
|
|
73
73
|
def list_workspace_tests(self) -> t.List[TestEntry]:
|
|
74
74
|
"""List all tests in the workspace."""
|
|
75
|
-
tests = self.context.
|
|
75
|
+
tests = self.context.select_tests()
|
|
76
76
|
|
|
77
77
|
# Use a set to ensure unique URIs
|
|
78
78
|
unique_test_uris = {URI.from_path(test.path).value for test in tests}
|
|
@@ -81,7 +81,9 @@ class LSPContext:
|
|
|
81
81
|
test_ranges = get_test_ranges(URI(uri).to_path())
|
|
82
82
|
if uri not in test_uris:
|
|
83
83
|
test_uris[uri] = {}
|
|
84
|
+
|
|
84
85
|
test_uris[uri].update(test_ranges)
|
|
86
|
+
|
|
85
87
|
return [
|
|
86
88
|
TestEntry(
|
|
87
89
|
name=test.test_name,
|
|
@@ -100,7 +102,7 @@ class LSPContext:
|
|
|
100
102
|
Returns:
|
|
101
103
|
List of TestEntry objects for the specified document.
|
|
102
104
|
"""
|
|
103
|
-
tests = self.context.
|
|
105
|
+
tests = self.context.select_tests(tests=[str(uri.to_path())])
|
|
104
106
|
test_ranges = get_test_ranges(uri.to_path())
|
|
105
107
|
return [
|
|
106
108
|
TestEntry(
|
sqlmesh/magics.py
CHANGED
|
@@ -337,7 +337,7 @@ class SQLMeshMagics(Magics):
|
|
|
337
337
|
if not args.test_name and not args.ls:
|
|
338
338
|
raise MagicError("Must provide either test name or `--ls` to list tests")
|
|
339
339
|
|
|
340
|
-
test_meta = context.
|
|
340
|
+
test_meta = context.select_tests()
|
|
341
341
|
|
|
342
342
|
tests: t.Dict[str, t.Dict[str, ModelTestMetadata]] = defaultdict(dict)
|
|
343
343
|
for model_test_metadata in test_meta:
|
sqlmesh/utils/git.py
CHANGED
|
@@ -16,7 +16,9 @@ class GitClient:
|
|
|
16
16
|
)
|
|
17
17
|
|
|
18
18
|
def list_uncommitted_changed_files(self) -> t.List[Path]:
|
|
19
|
-
return self._execute_list_output(
|
|
19
|
+
return self._execute_list_output(
|
|
20
|
+
["diff", "--name-only", "--diff-filter=d", "HEAD"], self._git_root
|
|
21
|
+
)
|
|
20
22
|
|
|
21
23
|
def list_committed_changed_files(self, target_branch: str = "main") -> t.List[Path]:
|
|
22
24
|
return self._execute_list_output(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
sqlmesh/__init__.py,sha256=v_spqQEhcnGaahp1yPvMqUIa6mhH3cs3Bc1CznxvCEA,7965
|
|
2
|
-
sqlmesh/_version.py,sha256=
|
|
3
|
-
sqlmesh/magics.py,sha256=
|
|
2
|
+
sqlmesh/_version.py,sha256=pCK7kVeQ25MLuoPBFNNpuNwWYW_MZoMLpHLd151s4qs,723
|
|
3
|
+
sqlmesh/magics.py,sha256=7Q1_lXSD_PgYH40Hsx6-OkfSQC3UJZgF043RVFRnw1s,42082
|
|
4
4
|
sqlmesh/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
5
|
sqlmesh/cicd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
sqlmesh/cicd/bot.py,sha256=2zlbn-DXkqQzr3lA0__IGU4XaIfXBXBKLWXNI2DRJX8,759
|
|
@@ -13,13 +13,13 @@ sqlmesh/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
13
13
|
sqlmesh/core/_typing.py,sha256=PzXxMYnORq18JhblAOUttms3zPJZzZpIbfFA_jgKYPA,498
|
|
14
14
|
sqlmesh/core/console.py,sha256=MYpVlciUY6rUuoqXyKfXTxD6a4-Bw4-ooATUTj_VHGg,172830
|
|
15
15
|
sqlmesh/core/constants.py,sha256=BuQk43vluUm7LfP9nKp5o9qRhqIenWF_LiLXO_t_53c,2699
|
|
16
|
-
sqlmesh/core/context.py,sha256=
|
|
16
|
+
sqlmesh/core/context.py,sha256=Ig2FKOLecp0tZ3jnx4952gQ52KukFATMLNfRK4vHqlk,133051
|
|
17
17
|
sqlmesh/core/context_diff.py,sha256=mxkJu0IthFMOlaQ_kcq5C09mlgkq2RQb-pG2rd-x_nA,21648
|
|
18
18
|
sqlmesh/core/dialect.py,sha256=CnKcPj6BnREfu9Zn1OyS7hZ3ktnaX03ygOg91nADlTU,53029
|
|
19
19
|
sqlmesh/core/environment.py,sha256=Kgs_gUEUI072mh0JJFWNRynrCxp1TzRHZhX_NWJRfXc,13142
|
|
20
20
|
sqlmesh/core/janitor.py,sha256=zJRN48ENjKexeiqa1Kmwyj_HsEEEIAa8hsFD8gTCmfg,7194
|
|
21
21
|
sqlmesh/core/lineage.py,sha256=LtiOztX1xIbFfWz-eb5dPZW4B0o2sI942_IM4YDbsso,3163
|
|
22
|
-
sqlmesh/core/loader.py,sha256=
|
|
22
|
+
sqlmesh/core/loader.py,sha256=YbdDekoeIwu1zg0xFsiQUWsxgupZTqpHAziwxV-53Hs,36698
|
|
23
23
|
sqlmesh/core/macros.py,sha256=rkklwVnUEmEro4wpdel289mKhaS3x5_SPZrkYZt3Q9E,63173
|
|
24
24
|
sqlmesh/core/node.py,sha256=2ejDwH1whl_ic1CRzX16Be-FQrosAf8pdyWb7oPzU6M,19895
|
|
25
25
|
sqlmesh/core/notification_target.py,sha256=PPGoDrgbRKxr27vJEu03XqNTQLYTw0ZF_b0yAapxGeI,16158
|
|
@@ -42,7 +42,7 @@ sqlmesh/core/config/__init__.py,sha256=tnEakbd8FAgSLYmjzuYAAgHIpJ00lwMKAhD_Cfs2O
|
|
|
42
42
|
sqlmesh/core/config/base.py,sha256=t8NQmsgQoZSc-k0dlDiCb8t1jj0AMYdGZ-6se9q_Pks,4898
|
|
43
43
|
sqlmesh/core/config/categorizer.py,sha256=6vzUoNLjR6GOEb_2mYVz2TwmMv2BfldgHX2u-Le5HZs,1975
|
|
44
44
|
sqlmesh/core/config/common.py,sha256=9V6PltBAjYeWLOU5dAbqL55BSFfpg8z8t2Op1x_PLhU,6418
|
|
45
|
-
sqlmesh/core/config/connection.py,sha256=
|
|
45
|
+
sqlmesh/core/config/connection.py,sha256=qynbsjRT_KOI6InKdCIXdqGXExF43EuawQp0NxTHPG4,91168
|
|
46
46
|
sqlmesh/core/config/dbt.py,sha256=xSQ4NEVWhZj_aRYpyy4MWcRJ8Qa0o28w2ZBLI4bs3_I,468
|
|
47
47
|
sqlmesh/core/config/format.py,sha256=6CXFbvnor56xbldKE-Vrm9k_ABRoY4v6vgIb3mCihiQ,1355
|
|
48
48
|
sqlmesh/core/config/gateway.py,sha256=tYngyqwd_4Qr9lhcv2hlvLvb_2pgYYtKu6hdGsTr-4I,1931
|
|
@@ -60,13 +60,13 @@ sqlmesh/core/config/ui.py,sha256=jsO-S6_d9NkLZGG5pT4mgKgxMF34KzkDociZAMvCX3U,278
|
|
|
60
60
|
sqlmesh/core/engine_adapter/__init__.py,sha256=y9jZAFdMBkkkRrf0ymfsJJn6s_7Ya6OpDgR4Bf1OG_U,2383
|
|
61
61
|
sqlmesh/core/engine_adapter/_typing.py,sha256=PCXQVpNbUTI3rJQyH_VTx57mDR5emh8b8cAfme6hTW4,1104
|
|
62
62
|
sqlmesh/core/engine_adapter/athena.py,sha256=5BhMaQcpiBkGt_tdT4Dw67t5pCOh-UN9-bQtayFRL3Q,26867
|
|
63
|
-
sqlmesh/core/engine_adapter/base.py,sha256=
|
|
63
|
+
sqlmesh/core/engine_adapter/base.py,sha256=GN05HN4E_Yrw38ps7gwKnes-bput3uIAbTFXpttqBi8,130196
|
|
64
64
|
sqlmesh/core/engine_adapter/base_postgres.py,sha256=WTU0QingaTNM7n-mTVxS-sg4f6jFZGOSryK5IYacveY,7734
|
|
65
65
|
sqlmesh/core/engine_adapter/bigquery.py,sha256=edBWbAbeXA4bOtVG-YNTQbt9qqwL9QFffZti8Ozv-Cw,60923
|
|
66
66
|
sqlmesh/core/engine_adapter/clickhouse.py,sha256=GWGpwdxZd4RqLSAMlOHjtO8nPpSIo3zFeRWnj9eSOrM,36072
|
|
67
|
-
sqlmesh/core/engine_adapter/databricks.py,sha256=
|
|
67
|
+
sqlmesh/core/engine_adapter/databricks.py,sha256=ZZ8y69rSOP9XDmyCoKbzRIn-IxoJ9ooCRiADlLgOpM0,16574
|
|
68
68
|
sqlmesh/core/engine_adapter/duckdb.py,sha256=9AXeRhaYXBcYSmIavyFY9LUzfgh94qkTO98v0-suQ8I,7993
|
|
69
|
-
sqlmesh/core/engine_adapter/fabric.py,sha256=
|
|
69
|
+
sqlmesh/core/engine_adapter/fabric.py,sha256=jY1bejscEcL5r-WdGjsSGr-dWDa1awavCikrAyhDFpk,19299
|
|
70
70
|
sqlmesh/core/engine_adapter/mixins.py,sha256=3rB7B2PZSB920BODO7k_kKqu6z0N-zj1etiRCYzpUcQ,27096
|
|
71
71
|
sqlmesh/core/engine_adapter/mssql.py,sha256=pqh6D_7eAeVCH6K4-81HPcNTLEPhTM_-Mou0QWBTOfA,18898
|
|
72
72
|
sqlmesh/core/engine_adapter/mysql.py,sha256=anKxdklYY2kiuxaHsC7FPN-LKzo7BP0Hy6hinA_c5Hg,6953
|
|
@@ -76,13 +76,13 @@ sqlmesh/core/engine_adapter/risingwave.py,sha256=d_1MxpXNONyyLnuELa7bILkJlLquf4j
|
|
|
76
76
|
sqlmesh/core/engine_adapter/shared.py,sha256=bM4GJSAR0dU3wCqsTl2SIcy2j_8BGusQvnme99l6wnE,13701
|
|
77
77
|
sqlmesh/core/engine_adapter/snowflake.py,sha256=6rMuhuhp2K-UH8dVnmiieucfOevxmK8vR3N5-dj4MDA,33453
|
|
78
78
|
sqlmesh/core/engine_adapter/spark.py,sha256=ZDEg4rx_cvPcLG83PSWu5nkXzChaCbmb7ka2J2ngEEU,23068
|
|
79
|
-
sqlmesh/core/engine_adapter/trino.py,sha256=
|
|
79
|
+
sqlmesh/core/engine_adapter/trino.py,sha256=dy5DXCbhPB9nuTDytiNuDXe-wGgxv5b9CjhEVpbQCq4,18180
|
|
80
80
|
sqlmesh/core/linter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
81
81
|
sqlmesh/core/linter/definition.py,sha256=1EOhKdF16jmeqISfcrR-8fzMdgXuxpB7wb3QaepBPeU,5564
|
|
82
82
|
sqlmesh/core/linter/helpers.py,sha256=cwKXP4sL6azRtNVGbMfJ5_6Hqq5Xx2M2rRLCgH3Y3ag,10743
|
|
83
83
|
sqlmesh/core/linter/rule.py,sha256=nB3o1rHyN44ZOg5ImICP16SeUHimf-12ObdXJjkTGyM,3964
|
|
84
84
|
sqlmesh/core/linter/rules/__init__.py,sha256=gevzfb67vFqckTCoVAe_TBGf6hQ-YtE1_YuGuXyh1L0,77
|
|
85
|
-
sqlmesh/core/linter/rules/builtin.py,sha256=
|
|
85
|
+
sqlmesh/core/linter/rules/builtin.py,sha256=6j22W_5EOBN979Bi2_mvmCNq4yqZVsJ9oqEukunj4Ws,11728
|
|
86
86
|
sqlmesh/core/metric/__init__.py,sha256=H1HmoD5IwN4YWe9iJXyueLYNmTQFZwok5nSWNJcZIBQ,237
|
|
87
87
|
sqlmesh/core/metric/definition.py,sha256=Yd5aVgsZCDPJ43aGP7WqtzZOuuSUtB8uJGVA6Jw9x9M,7201
|
|
88
88
|
sqlmesh/core/metric/rewriter.py,sha256=GiSTHfn2kinqCfNPYgZPRk93JFLzVaaejHtHDQ0yXZI,7326
|
|
@@ -90,7 +90,7 @@ sqlmesh/core/model/__init__.py,sha256=C8GRZ53xuXEA9hQv3BQS9pNNyd9rZ06R_B96UYGhDu
|
|
|
90
90
|
sqlmesh/core/model/cache.py,sha256=csun0RJguHzKX6-qITcOs4fVP4f8_Ts8qiUVV4sHY6Q,7869
|
|
91
91
|
sqlmesh/core/model/common.py,sha256=UqOmtbsrl4MYDUOigde2CwME-qdPgRf91QExX4yhAA0,27741
|
|
92
92
|
sqlmesh/core/model/decorator.py,sha256=bL-JuNrdBAikZSjVxnXqeB9i0e9qC7jm7yLjwiZ38aU,9470
|
|
93
|
-
sqlmesh/core/model/definition.py,sha256=
|
|
93
|
+
sqlmesh/core/model/definition.py,sha256=SAnrNgHUNPO2wMMRqAUYixOJJ5izJyc56adIFIUHIW0,117690
|
|
94
94
|
sqlmesh/core/model/kind.py,sha256=qJdiin09Q0neRFudNnLsDNCvbqD3EHAoK-WCvX-eUJs,40071
|
|
95
95
|
sqlmesh/core/model/meta.py,sha256=ELjprp6rl7dW9a7rs9eyQXScbDImInq35SyasiAriIk,24128
|
|
96
96
|
sqlmesh/core/model/schema.py,sha256=_HMYfzK9wWXh7_CQDIIGnuQUD4aiX3o5D2cRp2sERzc,3387
|
|
@@ -124,7 +124,7 @@ sqlmesh/core/state_sync/db/version.py,sha256=q5VDIIvY-585vTbvqPalU0N4qjG6RKs4gr8
|
|
|
124
124
|
sqlmesh/core/test/__init__.py,sha256=e83TJPwPRR_rAG29Y0OVbZb-5oWVBzz-_wrcd22Qk10,418
|
|
125
125
|
sqlmesh/core/test/context.py,sha256=-TjUrhM3WLtVPBgOMTkvRrnuZq7mT7BeIIyuCbrPePU,2332
|
|
126
126
|
sqlmesh/core/test/definition.py,sha256=Lfflu-qgkqkI7T977F4h4X7c5Co7i3uBt5Efsi4XaZE,42219
|
|
127
|
-
sqlmesh/core/test/discovery.py,sha256=
|
|
127
|
+
sqlmesh/core/test/discovery.py,sha256=5duKXgH4Lms7rXhJ8tOLCmCtqHpv7c7a4VJf12VkGw8,1278
|
|
128
128
|
sqlmesh/core/test/result.py,sha256=6gOKEsERciHhcrw9TedtNr7g1ynTO7UwA5-PPrzvYuM,4564
|
|
129
129
|
sqlmesh/core/test/runner.py,sha256=8I-cL7Q9CggLvET_GPkrXB2YjlyCIHrvbFbbRDnSHRE,6169
|
|
130
130
|
sqlmesh/dbt/__init__.py,sha256=KUv-lW5sG9D2ceXAIzA4MLcjyhzq3E-7qJP4P_PH2EU,144
|
|
@@ -158,11 +158,11 @@ sqlmesh/integrations/github/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
|
|
|
158
158
|
sqlmesh/integrations/github/cicd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
159
159
|
sqlmesh/integrations/github/cicd/command.py,sha256=w3oWhvR4utEurB5UABXj4iVBIojkCnZ0wTazIFmVmnc,12246
|
|
160
160
|
sqlmesh/integrations/github/cicd/config.py,sha256=n4KyNv65y1gpys82iLAQZUb6g2pnzywyV_7cOrQXsnI,3669
|
|
161
|
-
sqlmesh/integrations/github/cicd/controller.py,sha256=
|
|
161
|
+
sqlmesh/integrations/github/cicd/controller.py,sha256=pWRiUgBm878zO6h6RB4_nnu6L6r8szDp5h_g4Xp4SjQ,55044
|
|
162
162
|
sqlmesh/lsp/api.py,sha256=Z_8Op6CWqdbmEeidCQgMcVmRooQujqaynn-0EOw4478,2505
|
|
163
163
|
sqlmesh/lsp/commands.py,sha256=7tZPePSH-IwBYmXJPIlqGM7pi4rOCLEtc3fKJglAxZs,72
|
|
164
164
|
sqlmesh/lsp/completions.py,sha256=7Lhboh6xyoMJ3kkHG3aZz1xVbDwKiXeQKdIRj5xlUOA,6674
|
|
165
|
-
sqlmesh/lsp/context.py,sha256=
|
|
165
|
+
sqlmesh/lsp/context.py,sha256=7S17A1oE9WZtOU1dYvQeoExqvXGwmNxZdbk9uF4Xllw,20886
|
|
166
166
|
sqlmesh/lsp/custom.py,sha256=npzNznpUJ3ELY_WU4n_4I73lAjuTapI0_HKCFsoMcOk,5132
|
|
167
167
|
sqlmesh/lsp/errors.py,sha256=3NMim_5J00Eypz7t8b7XbkBfy8gIsRkeq-VcjD4COtc,1489
|
|
168
168
|
sqlmesh/lsp/helpers.py,sha256=EFc1u3-b7kSv5-tNwmKUDxId72RCLDBnN2lLTgRSzzQ,1020
|
|
@@ -226,7 +226,7 @@ sqlmesh/utils/cron.py,sha256=eGwn4iUeiRoQzwcd9eS2TZkut8nR4yWud77N7xQ9CQ0,1829
|
|
|
226
226
|
sqlmesh/utils/dag.py,sha256=5Sec50yY-UBEpLU82_nzaL7Wlalwf7K8EvLL8sBs2Z8,9049
|
|
227
227
|
sqlmesh/utils/date.py,sha256=m0NHAqSQYqZnvuNHVk9RNEktiE_LbyqcO_O0SVxcGrw,16460
|
|
228
228
|
sqlmesh/utils/errors.py,sha256=rktXVSd4R3tii7_k_pnex05ZXS7QnlFx1np1u-pjSSU,8000
|
|
229
|
-
sqlmesh/utils/git.py,sha256=
|
|
229
|
+
sqlmesh/utils/git.py,sha256=hrzhAH9XkxKoxNAI5ASOOm-d0-UyGi8YB37-neOcKe4,1898
|
|
230
230
|
sqlmesh/utils/hashing.py,sha256=nZRKvLNQ83tLG4IoXshVJZf-MbDrXC1HOeNw8Ji-tMM,578
|
|
231
231
|
sqlmesh/utils/jinja.py,sha256=474yuVZmS1pppBoEZqCJeugW9CQWniWBeuV4x6RGbEA,26380
|
|
232
232
|
sqlmesh/utils/lineage.py,sha256=zz9BPc6MShRy9LEXmAp02x6oKt4ubVNUPdapFVFKkac,16019
|
|
@@ -238,7 +238,7 @@ sqlmesh/utils/pydantic.py,sha256=-yppkVlw6iSBaSiKjbe7OChxL-u3urOS4-KCjJEgsRU,120
|
|
|
238
238
|
sqlmesh/utils/rich.py,sha256=cwQ5nJ6sgz64xHtoh6_ec7ReV5YpsOGhMtUJnwoRfEI,3549
|
|
239
239
|
sqlmesh/utils/windows.py,sha256=0F9RdpuuCoG5NiEDXvWlAGCiJ-59OjSAmgFF5wW05aY,1133
|
|
240
240
|
sqlmesh/utils/yaml.py,sha256=KFBd7hsKNRTtRudGR7d410qUYffQv0EWRcDM8hVNNZg,3025
|
|
241
|
-
sqlmesh-0.227.2.
|
|
241
|
+
sqlmesh-0.227.2.dev20.dist-info/licenses/LICENSE,sha256=OlMefUjgWJdULtf84BLW0AZZcY8DwdgQqb_1j2862j8,11346
|
|
242
242
|
sqlmesh_dbt/__init__.py,sha256=awYS5y5mz-1NUmx6i5h5NSTJ7tidRl9NC0FAnFWSF6U,350
|
|
243
243
|
sqlmesh_dbt/cli.py,sha256=p9foHjAW9ni7BTOJ2loynk47M0Sf43QIJZRggOzF5tc,6351
|
|
244
244
|
sqlmesh_dbt/console.py,sha256=RwWLYnEZHzn9Xp-e2gbZvkdKbWbBLN146geI84mJitg,1132
|
|
@@ -363,8 +363,8 @@ web/server/api/endpoints/models.py,sha256=kwj0s7uve3iZSMfmjkoPVMFMeY1sD0peTeyrWf
|
|
|
363
363
|
web/server/api/endpoints/modules.py,sha256=8hqqgonGay_mJmpCw0IdbjsPhWlQH2VLdKAqha-myac,468
|
|
364
364
|
web/server/api/endpoints/plan.py,sha256=bbbY50W_2MsZSTxOHWMKz0tbIm75nsRSlPy8GI2fg9Q,9306
|
|
365
365
|
web/server/api/endpoints/table_diff.py,sha256=8XTwgOh6QBbNy_hTM1JuHgRjbnie-pGPrphiW-FNLjQ,6058
|
|
366
|
-
sqlmesh-0.227.2.
|
|
367
|
-
sqlmesh-0.227.2.
|
|
368
|
-
sqlmesh-0.227.2.
|
|
369
|
-
sqlmesh-0.227.2.
|
|
370
|
-
sqlmesh-0.227.2.
|
|
366
|
+
sqlmesh-0.227.2.dev20.dist-info/METADATA,sha256=QVlVvouYOl0lxtudYAbwqMunp3PMFvhJDIEVzqK2sNo,26686
|
|
367
|
+
sqlmesh-0.227.2.dev20.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
368
|
+
sqlmesh-0.227.2.dev20.dist-info/entry_points.txt,sha256=sHAf6tQczIM8xZoduN4qaUjV7QEPVUUW_LCT8EDUMv4,155
|
|
369
|
+
sqlmesh-0.227.2.dev20.dist-info/top_level.txt,sha256=RQ-33FPe2IgL0rgossAfJkCRtqslz9b7wFARqiWLC5Q,24
|
|
370
|
+
sqlmesh-0.227.2.dev20.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|