sqlmesh 0.227.2.dev6__py3-none-any.whl → 0.228.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlmesh might be problematic. Click here for more details.
- sqlmesh/_version.py +2 -2
- sqlmesh/core/config/connection.py +37 -1
- sqlmesh/core/context.py +29 -40
- sqlmesh/core/dialect.py +10 -2
- sqlmesh/core/engine_adapter/base.py +8 -1
- sqlmesh/core/engine_adapter/databricks.py +33 -16
- sqlmesh/core/engine_adapter/fabric.py +109 -0
- sqlmesh/core/engine_adapter/trino.py +44 -6
- sqlmesh/core/linter/rules/builtin.py +1 -1
- sqlmesh/core/loader.py +13 -30
- sqlmesh/core/model/definition.py +9 -0
- sqlmesh/core/test/definition.py +3 -2
- sqlmesh/integrations/github/cicd/command.py +11 -2
- sqlmesh/integrations/github/cicd/controller.py +6 -2
- sqlmesh/lsp/context.py +4 -2
- sqlmesh/magics.py +1 -1
- sqlmesh/utils/git.py +3 -1
- {sqlmesh-0.227.2.dev6.dist-info → sqlmesh-0.228.2.dist-info}/METADATA +2 -2
- {sqlmesh-0.227.2.dev6.dist-info → sqlmesh-0.228.2.dist-info}/RECORD +23 -23
- {sqlmesh-0.227.2.dev6.dist-info → sqlmesh-0.228.2.dist-info}/WHEEL +0 -0
- {sqlmesh-0.227.2.dev6.dist-info → sqlmesh-0.228.2.dist-info}/entry_points.txt +0 -0
- {sqlmesh-0.227.2.dev6.dist-info → sqlmesh-0.228.2.dist-info}/licenses/LICENSE +0 -0
- {sqlmesh-0.227.2.dev6.dist-info → sqlmesh-0.228.2.dist-info}/top_level.txt +0 -0
sqlmesh/_version.py
CHANGED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '0.
|
|
32
|
-
__version_tuple__ = version_tuple = (0,
|
|
31
|
+
__version__ = version = '0.228.2'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 228, 2)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -17,6 +17,7 @@ from pydantic_core import from_json
|
|
|
17
17
|
from packaging import version
|
|
18
18
|
from sqlglot import exp
|
|
19
19
|
from sqlglot.helper import subclasses
|
|
20
|
+
from sqlglot.errors import ParseError
|
|
20
21
|
|
|
21
22
|
from sqlmesh.core import engine_adapter
|
|
22
23
|
from sqlmesh.core.config.base import BaseConfig
|
|
@@ -238,6 +239,7 @@ class DuckDBAttachOptions(BaseConfig):
|
|
|
238
239
|
data_path: t.Optional[str] = None
|
|
239
240
|
encrypted: bool = False
|
|
240
241
|
data_inlining_row_limit: t.Optional[int] = None
|
|
242
|
+
metadata_schema: t.Optional[str] = None
|
|
241
243
|
|
|
242
244
|
def to_sql(self, alias: str) -> str:
|
|
243
245
|
options = []
|
|
@@ -259,6 +261,8 @@ class DuckDBAttachOptions(BaseConfig):
|
|
|
259
261
|
options.append("ENCRYPTED")
|
|
260
262
|
if self.data_inlining_row_limit is not None:
|
|
261
263
|
options.append(f"DATA_INLINING_ROW_LIMIT {self.data_inlining_row_limit}")
|
|
264
|
+
if self.metadata_schema is not None:
|
|
265
|
+
options.append(f"METADATA_SCHEMA '{self.metadata_schema}'")
|
|
262
266
|
|
|
263
267
|
options_sql = f" ({', '.join(options)})" if options else ""
|
|
264
268
|
alias_sql = ""
|
|
@@ -1887,6 +1891,7 @@ class TrinoConnectionConfig(ConnectionConfig):
|
|
|
1887
1891
|
|
|
1888
1892
|
# SQLMesh options
|
|
1889
1893
|
schema_location_mapping: t.Optional[dict[re.Pattern, str]] = None
|
|
1894
|
+
timestamp_mapping: t.Optional[dict[exp.DataType, exp.DataType]] = None
|
|
1890
1895
|
concurrent_tasks: int = 4
|
|
1891
1896
|
register_comments: bool = True
|
|
1892
1897
|
pre_ping: t.Literal[False] = False
|
|
@@ -1911,6 +1916,34 @@ class TrinoConnectionConfig(ConnectionConfig):
|
|
|
1911
1916
|
)
|
|
1912
1917
|
return compiled
|
|
1913
1918
|
|
|
1919
|
+
@field_validator("timestamp_mapping", mode="before")
|
|
1920
|
+
@classmethod
|
|
1921
|
+
def _validate_timestamp_mapping(
|
|
1922
|
+
cls, value: t.Optional[dict[str, str]]
|
|
1923
|
+
) -> t.Optional[dict[exp.DataType, exp.DataType]]:
|
|
1924
|
+
if value is None:
|
|
1925
|
+
return value
|
|
1926
|
+
|
|
1927
|
+
result: dict[exp.DataType, exp.DataType] = {}
|
|
1928
|
+
for source_type, target_type in value.items():
|
|
1929
|
+
try:
|
|
1930
|
+
source_datatype = exp.DataType.build(source_type)
|
|
1931
|
+
except ParseError:
|
|
1932
|
+
raise ConfigError(
|
|
1933
|
+
f"Invalid SQL type string in timestamp_mapping: "
|
|
1934
|
+
f"'{source_type}' is not a valid SQL data type."
|
|
1935
|
+
)
|
|
1936
|
+
try:
|
|
1937
|
+
target_datatype = exp.DataType.build(target_type)
|
|
1938
|
+
except ParseError:
|
|
1939
|
+
raise ConfigError(
|
|
1940
|
+
f"Invalid SQL type string in timestamp_mapping: "
|
|
1941
|
+
f"'{target_type}' is not a valid SQL data type."
|
|
1942
|
+
)
|
|
1943
|
+
result[source_datatype] = target_datatype
|
|
1944
|
+
|
|
1945
|
+
return result
|
|
1946
|
+
|
|
1914
1947
|
@model_validator(mode="after")
|
|
1915
1948
|
def _root_validator(self) -> Self:
|
|
1916
1949
|
port = self.port
|
|
@@ -2013,7 +2046,10 @@ class TrinoConnectionConfig(ConnectionConfig):
|
|
|
2013
2046
|
|
|
2014
2047
|
@property
|
|
2015
2048
|
def _extra_engine_config(self) -> t.Dict[str, t.Any]:
|
|
2016
|
-
return {
|
|
2049
|
+
return {
|
|
2050
|
+
"schema_location_mapping": self.schema_location_mapping,
|
|
2051
|
+
"timestamp_mapping": self.timestamp_mapping,
|
|
2052
|
+
}
|
|
2017
2053
|
|
|
2018
2054
|
|
|
2019
2055
|
class ClickhouseConnectionConfig(ConnectionConfig):
|
sqlmesh/core/context.py
CHANGED
|
@@ -147,8 +147,8 @@ if t.TYPE_CHECKING:
|
|
|
147
147
|
from typing_extensions import Literal
|
|
148
148
|
|
|
149
149
|
from sqlmesh.core.engine_adapter._typing import (
|
|
150
|
-
DF,
|
|
151
150
|
BigframeSession,
|
|
151
|
+
DF,
|
|
152
152
|
PySparkDataFrame,
|
|
153
153
|
PySparkSession,
|
|
154
154
|
SnowparkSession,
|
|
@@ -403,6 +403,7 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
403
403
|
self._model_test_metadata_path_index: t.Dict[Path, t.List[ModelTestMetadata]] = {}
|
|
404
404
|
self._model_test_metadata_fully_qualified_name_index: t.Dict[str, ModelTestMetadata] = {}
|
|
405
405
|
self._models_with_tests: t.Set[str] = set()
|
|
406
|
+
|
|
406
407
|
self._macros: UniqueKeyDict[str, ExecutableOrMacro] = UniqueKeyDict("macros")
|
|
407
408
|
self._metrics: UniqueKeyDict[str, Metric] = UniqueKeyDict("metrics")
|
|
408
409
|
self._jinja_macros = JinjaMacroRegistry()
|
|
@@ -656,6 +657,7 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
656
657
|
self._requirements.update(project.requirements)
|
|
657
658
|
self._excluded_requirements.update(project.excluded_requirements)
|
|
658
659
|
self._environment_statements.extend(project.environment_statements)
|
|
660
|
+
|
|
659
661
|
self._model_test_metadata.extend(project.model_test_metadata)
|
|
660
662
|
for metadata in project.model_test_metadata:
|
|
661
663
|
if metadata.path not in self._model_test_metadata_path_index:
|
|
@@ -2243,9 +2245,7 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
2243
2245
|
|
|
2244
2246
|
pd.set_option("display.max_columns", None)
|
|
2245
2247
|
|
|
2246
|
-
test_meta = self.
|
|
2247
|
-
test_meta=self._model_test_metadata, tests=tests, patterns=match_patterns
|
|
2248
|
-
)
|
|
2248
|
+
test_meta = self.select_tests(tests=tests, patterns=match_patterns)
|
|
2249
2249
|
|
|
2250
2250
|
result = run_tests(
|
|
2251
2251
|
model_test_metadata=test_meta,
|
|
@@ -2807,33 +2807,6 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
2807
2807
|
raise SQLMeshError(f"Gateway '{gateway}' not found in the available engine adapters.")
|
|
2808
2808
|
return self.engine_adapter
|
|
2809
2809
|
|
|
2810
|
-
def _select_tests(
|
|
2811
|
-
self,
|
|
2812
|
-
test_meta: t.List[ModelTestMetadata],
|
|
2813
|
-
tests: t.Optional[t.List[str]] = None,
|
|
2814
|
-
patterns: t.Optional[t.List[str]] = None,
|
|
2815
|
-
) -> t.List[ModelTestMetadata]:
|
|
2816
|
-
"""Filter pre-loaded test metadata based on tests and patterns."""
|
|
2817
|
-
|
|
2818
|
-
if tests:
|
|
2819
|
-
filtered_tests = []
|
|
2820
|
-
for test in tests:
|
|
2821
|
-
if "::" in test:
|
|
2822
|
-
if test in self._model_test_metadata_fully_qualified_name_index:
|
|
2823
|
-
filtered_tests.append(
|
|
2824
|
-
self._model_test_metadata_fully_qualified_name_index[test]
|
|
2825
|
-
)
|
|
2826
|
-
else:
|
|
2827
|
-
test_path = Path(test)
|
|
2828
|
-
if test_path in self._model_test_metadata_path_index:
|
|
2829
|
-
filtered_tests.extend(self._model_test_metadata_path_index[test_path])
|
|
2830
|
-
test_meta = filtered_tests
|
|
2831
|
-
|
|
2832
|
-
if patterns:
|
|
2833
|
-
test_meta = filter_tests_by_patterns(test_meta, patterns)
|
|
2834
|
-
|
|
2835
|
-
return test_meta
|
|
2836
|
-
|
|
2837
2810
|
def _snapshots(
|
|
2838
2811
|
self, models_override: t.Optional[UniqueKeyDict[str, Model]] = None
|
|
2839
2812
|
) -> t.Dict[str, Snapshot]:
|
|
@@ -3245,18 +3218,34 @@ class GenericContext(BaseContext, t.Generic[C]):
|
|
|
3245
3218
|
|
|
3246
3219
|
return all_violations
|
|
3247
3220
|
|
|
3248
|
-
def
|
|
3249
|
-
self,
|
|
3221
|
+
def select_tests(
|
|
3222
|
+
self,
|
|
3223
|
+
tests: t.Optional[t.List[str]] = None,
|
|
3224
|
+
patterns: t.Optional[t.List[str]] = None,
|
|
3250
3225
|
) -> t.List[ModelTestMetadata]:
|
|
3251
|
-
|
|
3252
|
-
|
|
3253
|
-
|
|
3226
|
+
"""Filter pre-loaded test metadata based on tests and patterns."""
|
|
3227
|
+
|
|
3228
|
+
test_meta = self._model_test_metadata
|
|
3229
|
+
|
|
3230
|
+
if tests:
|
|
3231
|
+
filtered_tests = []
|
|
3232
|
+
for test in tests:
|
|
3233
|
+
if "::" in test:
|
|
3234
|
+
if test in self._model_test_metadata_fully_qualified_name_index:
|
|
3235
|
+
filtered_tests.append(
|
|
3236
|
+
self._model_test_metadata_fully_qualified_name_index[test]
|
|
3237
|
+
)
|
|
3238
|
+
else:
|
|
3239
|
+
test_path = Path(test)
|
|
3240
|
+
if test_path in self._model_test_metadata_path_index:
|
|
3241
|
+
filtered_tests.extend(self._model_test_metadata_path_index[test_path])
|
|
3242
|
+
|
|
3243
|
+
test_meta = filtered_tests
|
|
3254
3244
|
|
|
3255
|
-
|
|
3256
|
-
|
|
3257
|
-
model_tests.extend(loader.load_model_tests(tests=tests, patterns=patterns))
|
|
3245
|
+
if patterns:
|
|
3246
|
+
test_meta = filter_tests_by_patterns(test_meta, patterns)
|
|
3258
3247
|
|
|
3259
|
-
return
|
|
3248
|
+
return test_meta
|
|
3260
3249
|
|
|
3261
3250
|
|
|
3262
3251
|
class Context(GenericContext[Config]):
|
sqlmesh/core/dialect.py
CHANGED
|
@@ -803,8 +803,15 @@ def text_diff(
|
|
|
803
803
|
return "\n".join(unified_diff(a_sql, b_sql))
|
|
804
804
|
|
|
805
805
|
|
|
806
|
+
WS_OR_COMMENT = r"(?:\s|--[^\n]*\n|/\*.*?\*/)"
|
|
807
|
+
HEADER = r"\b(?:model|audit)\b(?=\s*\()"
|
|
808
|
+
KEY_BOUNDARY = r"(?:\(|,)" # key is preceded by either '(' or ','
|
|
809
|
+
DIALECT_VALUE = r"['\"]?(?P<dialect>[a-z][a-z0-9]*)['\"]?"
|
|
810
|
+
VALUE_BOUNDARY = r"(?=,|\))" # value is followed by comma or closing paren
|
|
811
|
+
|
|
806
812
|
DIALECT_PATTERN = re.compile(
|
|
807
|
-
|
|
813
|
+
rf"{HEADER}.*?{KEY_BOUNDARY}{WS_OR_COMMENT}*dialect{WS_OR_COMMENT}+{DIALECT_VALUE}{WS_OR_COMMENT}*{VALUE_BOUNDARY}",
|
|
814
|
+
re.IGNORECASE | re.DOTALL,
|
|
808
815
|
)
|
|
809
816
|
|
|
810
817
|
|
|
@@ -895,7 +902,8 @@ def parse(
|
|
|
895
902
|
A list of the parsed expressions: [Model, *Statements, Query, *Statements]
|
|
896
903
|
"""
|
|
897
904
|
match = match_dialect and DIALECT_PATTERN.search(sql[:MAX_MODEL_DEFINITION_SIZE])
|
|
898
|
-
|
|
905
|
+
dialect_str = match.group("dialect") if match else None
|
|
906
|
+
dialect = Dialect.get_or_raise(dialect_str or default_dialect)
|
|
899
907
|
|
|
900
908
|
tokens = dialect.tokenize(sql)
|
|
901
909
|
chunks: t.List[t.Tuple[t.List[Token], ChunkType]] = [([], ChunkType.SQL)]
|
|
@@ -811,6 +811,7 @@ class EngineAdapter:
|
|
|
811
811
|
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
812
812
|
expressions: t.Optional[t.List[exp.PrimaryKey]] = None,
|
|
813
813
|
is_view: bool = False,
|
|
814
|
+
materialized: bool = False,
|
|
814
815
|
) -> exp.Schema:
|
|
815
816
|
"""
|
|
816
817
|
Build a schema expression for a table, columns, column comments, and additional schema properties.
|
|
@@ -823,6 +824,7 @@ class EngineAdapter:
|
|
|
823
824
|
target_columns_to_types=target_columns_to_types,
|
|
824
825
|
column_descriptions=column_descriptions,
|
|
825
826
|
is_view=is_view,
|
|
827
|
+
materialized=materialized,
|
|
826
828
|
)
|
|
827
829
|
+ expressions,
|
|
828
830
|
)
|
|
@@ -832,6 +834,7 @@ class EngineAdapter:
|
|
|
832
834
|
target_columns_to_types: t.Dict[str, exp.DataType],
|
|
833
835
|
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
834
836
|
is_view: bool = False,
|
|
837
|
+
materialized: bool = False,
|
|
835
838
|
) -> t.List[exp.ColumnDef]:
|
|
836
839
|
engine_supports_schema_comments = (
|
|
837
840
|
self.COMMENT_CREATION_VIEW.supports_schema_def
|
|
@@ -1260,7 +1263,11 @@ class EngineAdapter:
|
|
|
1260
1263
|
schema: t.Union[exp.Table, exp.Schema] = exp.to_table(view_name)
|
|
1261
1264
|
if target_columns_to_types:
|
|
1262
1265
|
schema = self._build_schema_exp(
|
|
1263
|
-
exp.to_table(view_name),
|
|
1266
|
+
exp.to_table(view_name),
|
|
1267
|
+
target_columns_to_types,
|
|
1268
|
+
column_descriptions,
|
|
1269
|
+
is_view=True,
|
|
1270
|
+
materialized=materialized,
|
|
1264
1271
|
)
|
|
1265
1272
|
|
|
1266
1273
|
properties = create_kwargs.pop("properties", None)
|
|
@@ -78,21 +78,21 @@ class DatabricksEngineAdapter(SparkEngineAdapter, GrantsFromInfoSchemaMixin):
|
|
|
78
78
|
def _use_spark_session(self) -> bool:
|
|
79
79
|
if self.can_access_spark_session(bool(self._extra_config.get("disable_spark_session"))):
|
|
80
80
|
return True
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
81
|
+
|
|
82
|
+
if self.can_access_databricks_connect(
|
|
83
|
+
bool(self._extra_config.get("disable_databricks_connect"))
|
|
84
|
+
):
|
|
85
|
+
if self._extra_config.get("databricks_connect_use_serverless"):
|
|
86
|
+
return True
|
|
87
|
+
|
|
88
|
+
if {
|
|
89
|
+
"databricks_connect_cluster_id",
|
|
90
|
+
"databricks_connect_server_hostname",
|
|
91
|
+
"databricks_connect_access_token",
|
|
92
|
+
}.issubset(self._extra_config):
|
|
93
|
+
return True
|
|
94
|
+
|
|
95
|
+
return False
|
|
96
96
|
|
|
97
97
|
@property
|
|
98
98
|
def is_spark_session_connection(self) -> bool:
|
|
@@ -108,7 +108,7 @@ class DatabricksEngineAdapter(SparkEngineAdapter, GrantsFromInfoSchemaMixin):
|
|
|
108
108
|
|
|
109
109
|
connect_kwargs = dict(
|
|
110
110
|
host=self._extra_config["databricks_connect_server_hostname"],
|
|
111
|
-
token=self._extra_config
|
|
111
|
+
token=self._extra_config.get("databricks_connect_access_token"),
|
|
112
112
|
)
|
|
113
113
|
if "databricks_connect_use_serverless" in self._extra_config:
|
|
114
114
|
connect_kwargs["serverless"] = True
|
|
@@ -394,3 +394,20 @@ class DatabricksEngineAdapter(SparkEngineAdapter, GrantsFromInfoSchemaMixin):
|
|
|
394
394
|
expressions.append(clustered_by_exp)
|
|
395
395
|
properties = exp.Properties(expressions=expressions)
|
|
396
396
|
return properties
|
|
397
|
+
|
|
398
|
+
def _build_column_defs(
|
|
399
|
+
self,
|
|
400
|
+
target_columns_to_types: t.Dict[str, exp.DataType],
|
|
401
|
+
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
402
|
+
is_view: bool = False,
|
|
403
|
+
materialized: bool = False,
|
|
404
|
+
) -> t.List[exp.ColumnDef]:
|
|
405
|
+
# Databricks requires column types to be specified when adding column comments
|
|
406
|
+
# in CREATE MATERIALIZED VIEW statements. Override is_view to False to force
|
|
407
|
+
# column types to be included when comments are present.
|
|
408
|
+
if is_view and materialized and column_descriptions:
|
|
409
|
+
is_view = False
|
|
410
|
+
|
|
411
|
+
return super()._build_column_defs(
|
|
412
|
+
target_columns_to_types, column_descriptions, is_view, materialized
|
|
413
|
+
)
|
|
@@ -13,6 +13,8 @@ from sqlmesh.core.engine_adapter.shared import (
|
|
|
13
13
|
)
|
|
14
14
|
from sqlmesh.utils.errors import SQLMeshError
|
|
15
15
|
from sqlmesh.utils.connection_pool import ConnectionPool
|
|
16
|
+
from sqlmesh.core.schema_diff import TableAlterOperation
|
|
17
|
+
from sqlmesh.utils import random_id
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
logger = logging.getLogger(__name__)
|
|
@@ -153,6 +155,113 @@ class FabricEngineAdapter(MSSQLEngineAdapter):
|
|
|
153
155
|
f"Unable to switch catalog to {catalog_name}, catalog ended up as {catalog_after_switch}"
|
|
154
156
|
)
|
|
155
157
|
|
|
158
|
+
def alter_table(
|
|
159
|
+
self, alter_expressions: t.Union[t.List[exp.Alter], t.List[TableAlterOperation]]
|
|
160
|
+
) -> None:
|
|
161
|
+
"""
|
|
162
|
+
Applies alter expressions to a table. Fabric has limited support for ALTER TABLE,
|
|
163
|
+
so this method implements a workaround for column type changes.
|
|
164
|
+
This method is self-contained and sets its own catalog context.
|
|
165
|
+
"""
|
|
166
|
+
if not alter_expressions:
|
|
167
|
+
return
|
|
168
|
+
|
|
169
|
+
# Get the target table from the first expression to determine the correct catalog.
|
|
170
|
+
first_op = alter_expressions[0]
|
|
171
|
+
expression = first_op.expression if isinstance(first_op, TableAlterOperation) else first_op
|
|
172
|
+
if not isinstance(expression, exp.Alter) or not expression.this.catalog:
|
|
173
|
+
# Fallback for unexpected scenarios
|
|
174
|
+
logger.warning(
|
|
175
|
+
"Could not determine catalog from alter expression, executing with current context."
|
|
176
|
+
)
|
|
177
|
+
super().alter_table(alter_expressions)
|
|
178
|
+
return
|
|
179
|
+
|
|
180
|
+
target_catalog = expression.this.catalog
|
|
181
|
+
self.set_current_catalog(target_catalog)
|
|
182
|
+
|
|
183
|
+
with self.transaction():
|
|
184
|
+
for op in alter_expressions:
|
|
185
|
+
expression = op.expression if isinstance(op, TableAlterOperation) else op
|
|
186
|
+
|
|
187
|
+
if not isinstance(expression, exp.Alter):
|
|
188
|
+
self.execute(expression)
|
|
189
|
+
continue
|
|
190
|
+
|
|
191
|
+
for action in expression.actions:
|
|
192
|
+
table_name = expression.this
|
|
193
|
+
|
|
194
|
+
table_name_without_catalog = table_name.copy()
|
|
195
|
+
table_name_without_catalog.set("catalog", None)
|
|
196
|
+
|
|
197
|
+
is_type_change = isinstance(action, exp.AlterColumn) and action.args.get(
|
|
198
|
+
"dtype"
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
if is_type_change:
|
|
202
|
+
column_to_alter = action.this
|
|
203
|
+
new_type = action.args["dtype"]
|
|
204
|
+
temp_column_name_str = f"{column_to_alter.name}__{random_id(short=True)}"
|
|
205
|
+
temp_column_name = exp.to_identifier(temp_column_name_str)
|
|
206
|
+
|
|
207
|
+
logger.info(
|
|
208
|
+
"Applying workaround for column '%s' on table '%s' to change type to '%s'.",
|
|
209
|
+
column_to_alter.sql(),
|
|
210
|
+
table_name.sql(),
|
|
211
|
+
new_type.sql(),
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
# Step 1: Add a temporary column.
|
|
215
|
+
add_column_expr = exp.Alter(
|
|
216
|
+
this=table_name_without_catalog.copy(),
|
|
217
|
+
kind="TABLE",
|
|
218
|
+
actions=[
|
|
219
|
+
exp.ColumnDef(this=temp_column_name.copy(), kind=new_type.copy())
|
|
220
|
+
],
|
|
221
|
+
)
|
|
222
|
+
add_sql = self._to_sql(add_column_expr)
|
|
223
|
+
self.execute(add_sql)
|
|
224
|
+
|
|
225
|
+
# Step 2: Copy and cast data.
|
|
226
|
+
update_sql = self._to_sql(
|
|
227
|
+
exp.Update(
|
|
228
|
+
this=table_name_without_catalog.copy(),
|
|
229
|
+
expressions=[
|
|
230
|
+
exp.EQ(
|
|
231
|
+
this=temp_column_name.copy(),
|
|
232
|
+
expression=exp.Cast(
|
|
233
|
+
this=column_to_alter.copy(), to=new_type.copy()
|
|
234
|
+
),
|
|
235
|
+
)
|
|
236
|
+
],
|
|
237
|
+
)
|
|
238
|
+
)
|
|
239
|
+
self.execute(update_sql)
|
|
240
|
+
|
|
241
|
+
# Step 3: Drop the original column.
|
|
242
|
+
drop_sql = self._to_sql(
|
|
243
|
+
exp.Alter(
|
|
244
|
+
this=table_name_without_catalog.copy(),
|
|
245
|
+
kind="TABLE",
|
|
246
|
+
actions=[exp.Drop(this=column_to_alter.copy(), kind="COLUMN")],
|
|
247
|
+
)
|
|
248
|
+
)
|
|
249
|
+
self.execute(drop_sql)
|
|
250
|
+
|
|
251
|
+
# Step 4: Rename the temporary column.
|
|
252
|
+
old_name_qualified = f"{table_name_without_catalog.sql(dialect=self.dialect)}.{temp_column_name.sql(dialect=self.dialect)}"
|
|
253
|
+
new_name_unquoted = column_to_alter.sql(
|
|
254
|
+
dialect=self.dialect, identify=False
|
|
255
|
+
)
|
|
256
|
+
rename_sql = f"EXEC sp_rename '{old_name_qualified}', '{new_name_unquoted}', 'COLUMN'"
|
|
257
|
+
self.execute(rename_sql)
|
|
258
|
+
else:
|
|
259
|
+
# For other alterations, execute directly.
|
|
260
|
+
direct_alter_expr = exp.Alter(
|
|
261
|
+
this=table_name_without_catalog.copy(), kind="TABLE", actions=[action]
|
|
262
|
+
)
|
|
263
|
+
self.execute(direct_alter_expr)
|
|
264
|
+
|
|
156
265
|
|
|
157
266
|
class FabricHttpClient:
|
|
158
267
|
def __init__(self, tenant_id: str, workspace_id: str, client_id: str, client_secret: str):
|
|
@@ -74,6 +74,32 @@ class TrinoEngineAdapter(
|
|
|
74
74
|
def schema_location_mapping(self) -> t.Optional[t.Dict[re.Pattern, str]]:
|
|
75
75
|
return self._extra_config.get("schema_location_mapping")
|
|
76
76
|
|
|
77
|
+
@property
|
|
78
|
+
def timestamp_mapping(self) -> t.Optional[t.Dict[exp.DataType, exp.DataType]]:
|
|
79
|
+
return self._extra_config.get("timestamp_mapping")
|
|
80
|
+
|
|
81
|
+
def _apply_timestamp_mapping(
|
|
82
|
+
self, columns_to_types: t.Dict[str, exp.DataType]
|
|
83
|
+
) -> t.Tuple[t.Dict[str, exp.DataType], t.Set[str]]:
|
|
84
|
+
"""Apply custom timestamp mapping to column types.
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
A tuple of (mapped_columns_to_types, mapped_column_names) where mapped_column_names
|
|
88
|
+
contains the names of columns that were found in the mapping.
|
|
89
|
+
"""
|
|
90
|
+
if not self.timestamp_mapping:
|
|
91
|
+
return columns_to_types, set()
|
|
92
|
+
|
|
93
|
+
result = {}
|
|
94
|
+
mapped_columns: t.Set[str] = set()
|
|
95
|
+
for column, column_type in columns_to_types.items():
|
|
96
|
+
if column_type in self.timestamp_mapping:
|
|
97
|
+
result[column] = self.timestamp_mapping[column_type]
|
|
98
|
+
mapped_columns.add(column)
|
|
99
|
+
else:
|
|
100
|
+
result[column] = column_type
|
|
101
|
+
return result, mapped_columns
|
|
102
|
+
|
|
77
103
|
@property
|
|
78
104
|
def catalog_support(self) -> CatalogSupport:
|
|
79
105
|
return CatalogSupport.FULL_SUPPORT
|
|
@@ -117,7 +143,7 @@ class TrinoEngineAdapter(
|
|
|
117
143
|
try:
|
|
118
144
|
yield
|
|
119
145
|
finally:
|
|
120
|
-
self.execute(
|
|
146
|
+
self.execute("RESET SESSION AUTHORIZATION")
|
|
121
147
|
|
|
122
148
|
def replace_query(
|
|
123
149
|
self,
|
|
@@ -284,9 +310,13 @@ class TrinoEngineAdapter(
|
|
|
284
310
|
column_descriptions: t.Optional[t.Dict[str, str]] = None,
|
|
285
311
|
expressions: t.Optional[t.List[exp.PrimaryKey]] = None,
|
|
286
312
|
is_view: bool = False,
|
|
313
|
+
materialized: bool = False,
|
|
287
314
|
) -> exp.Schema:
|
|
315
|
+
target_columns_to_types, mapped_columns = self._apply_timestamp_mapping(
|
|
316
|
+
target_columns_to_types
|
|
317
|
+
)
|
|
288
318
|
if "delta_lake" in self.get_catalog_type_from_table(table):
|
|
289
|
-
target_columns_to_types = self._to_delta_ts(target_columns_to_types)
|
|
319
|
+
target_columns_to_types = self._to_delta_ts(target_columns_to_types, mapped_columns)
|
|
290
320
|
|
|
291
321
|
return super()._build_schema_exp(
|
|
292
322
|
table, target_columns_to_types, column_descriptions, expressions, is_view
|
|
@@ -312,10 +342,15 @@ class TrinoEngineAdapter(
|
|
|
312
342
|
source_columns: t.Optional[t.List[str]] = None,
|
|
313
343
|
**kwargs: t.Any,
|
|
314
344
|
) -> None:
|
|
345
|
+
mapped_columns: t.Set[str] = set()
|
|
346
|
+
if target_columns_to_types:
|
|
347
|
+
target_columns_to_types, mapped_columns = self._apply_timestamp_mapping(
|
|
348
|
+
target_columns_to_types
|
|
349
|
+
)
|
|
315
350
|
if target_columns_to_types and "delta_lake" in self.get_catalog_type_from_table(
|
|
316
351
|
target_table
|
|
317
352
|
):
|
|
318
|
-
target_columns_to_types = self._to_delta_ts(target_columns_to_types)
|
|
353
|
+
target_columns_to_types = self._to_delta_ts(target_columns_to_types, mapped_columns)
|
|
319
354
|
|
|
320
355
|
return super()._scd_type_2(
|
|
321
356
|
target_table,
|
|
@@ -345,18 +380,21 @@ class TrinoEngineAdapter(
|
|
|
345
380
|
# - `timestamp(3) with time zone` for timezone-aware
|
|
346
381
|
# https://trino.io/docs/current/connector/delta-lake.html#delta-lake-to-trino-type-mapping
|
|
347
382
|
def _to_delta_ts(
|
|
348
|
-
self,
|
|
383
|
+
self,
|
|
384
|
+
columns_to_types: t.Dict[str, exp.DataType],
|
|
385
|
+
skip_columns: t.Optional[t.Set[str]] = None,
|
|
349
386
|
) -> t.Dict[str, exp.DataType]:
|
|
350
387
|
ts6 = exp.DataType.build("timestamp(6)")
|
|
351
388
|
ts3_tz = exp.DataType.build("timestamp(3) with time zone")
|
|
389
|
+
skip = skip_columns or set()
|
|
352
390
|
|
|
353
391
|
delta_columns_to_types = {
|
|
354
|
-
k: ts6 if v.is_type(exp.DataType.Type.TIMESTAMP) else v
|
|
392
|
+
k: ts6 if k not in skip and v.is_type(exp.DataType.Type.TIMESTAMP) else v
|
|
355
393
|
for k, v in columns_to_types.items()
|
|
356
394
|
}
|
|
357
395
|
|
|
358
396
|
delta_columns_to_types = {
|
|
359
|
-
k: ts3_tz if v.is_type(exp.DataType.Type.TIMESTAMPTZ) else v
|
|
397
|
+
k: ts3_tz if k not in skip and v.is_type(exp.DataType.Type.TIMESTAMPTZ) else v
|
|
360
398
|
for k, v in delta_columns_to_types.items()
|
|
361
399
|
}
|
|
362
400
|
|
|
@@ -130,7 +130,7 @@ class NoMissingAudits(Rule):
|
|
|
130
130
|
|
|
131
131
|
|
|
132
132
|
class NoMissingUnitTest(Rule):
|
|
133
|
-
"""All models must have a unit test found in the
|
|
133
|
+
"""All models must have a unit test found in the tests/ directory yaml files"""
|
|
134
134
|
|
|
135
135
|
def check_model(self, model: Model) -> t.Optional[RuleViolation]:
|
|
136
136
|
# External models cannot have unit tests
|
sqlmesh/core/loader.py
CHANGED
|
@@ -35,7 +35,7 @@ from sqlmesh.core.model import (
|
|
|
35
35
|
from sqlmesh.core.model import model as model_registry
|
|
36
36
|
from sqlmesh.core.model.common import make_python_env
|
|
37
37
|
from sqlmesh.core.signal import signal
|
|
38
|
-
from sqlmesh.core.test import ModelTestMetadata
|
|
38
|
+
from sqlmesh.core.test import ModelTestMetadata
|
|
39
39
|
from sqlmesh.utils import UniqueKeyDict, sys_path
|
|
40
40
|
from sqlmesh.utils.errors import ConfigError
|
|
41
41
|
from sqlmesh.utils.jinja import JinjaMacroRegistry, MacroExtractor
|
|
@@ -427,9 +427,7 @@ class Loader(abc.ABC):
|
|
|
427
427
|
"""Loads user linting rules"""
|
|
428
428
|
return RuleSet()
|
|
429
429
|
|
|
430
|
-
def load_model_tests(
|
|
431
|
-
self, tests: t.Optional[t.List[str]] = None, patterns: list[str] | None = None
|
|
432
|
-
) -> t.List[ModelTestMetadata]:
|
|
430
|
+
def load_model_tests(self) -> t.List[ModelTestMetadata]:
|
|
433
431
|
"""Loads YAML-based model tests"""
|
|
434
432
|
return []
|
|
435
433
|
|
|
@@ -868,38 +866,23 @@ class SqlMeshLoader(Loader):
|
|
|
868
866
|
|
|
869
867
|
return model_test_metadata
|
|
870
868
|
|
|
871
|
-
def load_model_tests(
|
|
872
|
-
self, tests: t.Optional[t.List[str]] = None, patterns: list[str] | None = None
|
|
873
|
-
) -> t.List[ModelTestMetadata]:
|
|
869
|
+
def load_model_tests(self) -> t.List[ModelTestMetadata]:
|
|
874
870
|
"""Loads YAML-based model tests"""
|
|
875
871
|
test_meta_list: t.List[ModelTestMetadata] = []
|
|
876
872
|
|
|
877
|
-
|
|
878
|
-
for test in tests:
|
|
879
|
-
filename, test_name = test.split("::", maxsplit=1) if "::" in test else (test, "")
|
|
873
|
+
search_path = Path(self.config_path) / c.TESTS
|
|
880
874
|
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
for yaml_file in itertools.chain(
|
|
890
|
-
search_path.glob("**/test*.yaml"),
|
|
891
|
-
search_path.glob("**/test*.yml"),
|
|
875
|
+
for yaml_file in itertools.chain(
|
|
876
|
+
search_path.glob("**/test*.yaml"),
|
|
877
|
+
search_path.glob("**/test*.yml"),
|
|
878
|
+
):
|
|
879
|
+
if any(
|
|
880
|
+
yaml_file.match(ignore_pattern)
|
|
881
|
+
for ignore_pattern in self.config.ignore_patterns or []
|
|
892
882
|
):
|
|
893
|
-
|
|
894
|
-
yaml_file.match(ignore_pattern)
|
|
895
|
-
for ignore_pattern in self.config.ignore_patterns or []
|
|
896
|
-
):
|
|
897
|
-
continue
|
|
898
|
-
|
|
899
|
-
test_meta_list.extend(self._load_model_test_file(yaml_file).values())
|
|
883
|
+
continue
|
|
900
884
|
|
|
901
|
-
|
|
902
|
-
test_meta_list = filter_tests_by_patterns(test_meta_list, patterns)
|
|
885
|
+
test_meta_list.extend(self._load_model_test_file(yaml_file).values())
|
|
903
886
|
|
|
904
887
|
return test_meta_list
|
|
905
888
|
|
sqlmesh/core/model/definition.py
CHANGED
|
@@ -34,6 +34,7 @@ from sqlmesh.core.model.common import (
|
|
|
34
34
|
)
|
|
35
35
|
from sqlmesh.core.model.meta import ModelMeta
|
|
36
36
|
from sqlmesh.core.model.kind import (
|
|
37
|
+
ExternalKind,
|
|
37
38
|
ModelKindName,
|
|
38
39
|
SeedKind,
|
|
39
40
|
ModelKind,
|
|
@@ -1035,6 +1036,13 @@ class _Model(ModelMeta, frozen=True):
|
|
|
1035
1036
|
# Will raise if the custom materialization points to an invalid class
|
|
1036
1037
|
get_custom_materialization_type_or_raise(self.kind.materialization)
|
|
1037
1038
|
|
|
1039
|
+
# Embedded model kind shouldn't have audits
|
|
1040
|
+
if self.kind.name == ModelKindName.EMBEDDED and self.audits:
|
|
1041
|
+
raise_config_error(
|
|
1042
|
+
"Audits are not supported for embedded models",
|
|
1043
|
+
self._path,
|
|
1044
|
+
)
|
|
1045
|
+
|
|
1038
1046
|
def is_breaking_change(self, previous: Model) -> t.Optional[bool]:
|
|
1039
1047
|
"""Determines whether this model is a breaking change in relation to the `previous` model.
|
|
1040
1048
|
|
|
@@ -1962,6 +1970,7 @@ class PythonModel(_Model):
|
|
|
1962
1970
|
class ExternalModel(_Model):
|
|
1963
1971
|
"""The model definition which represents an external source/table."""
|
|
1964
1972
|
|
|
1973
|
+
kind: ModelKind = ExternalKind()
|
|
1965
1974
|
source_type: t.Literal["external"] = "external"
|
|
1966
1975
|
|
|
1967
1976
|
def is_breaking_change(self, previous: Model) -> t.Optional[bool]:
|
sqlmesh/core/test/definition.py
CHANGED
|
@@ -355,11 +355,12 @@ class ModelTest(unittest.TestCase):
|
|
|
355
355
|
for df in _split_df_by_column_pairs(diff)
|
|
356
356
|
)
|
|
357
357
|
else:
|
|
358
|
-
from pandas import MultiIndex
|
|
358
|
+
from pandas import DataFrame, MultiIndex
|
|
359
359
|
|
|
360
360
|
levels = t.cast(MultiIndex, diff.columns).levels[0]
|
|
361
361
|
for col in levels:
|
|
362
|
-
|
|
362
|
+
# diff[col] returns a DataFrame when columns is a MultiIndex
|
|
363
|
+
col_diff = t.cast(DataFrame, diff[col])
|
|
363
364
|
if not col_diff.empty:
|
|
364
365
|
table = df_to_table(
|
|
365
366
|
f"[bold red]Column '{col}' mismatch{failed_subtest}[/bold red]",
|
|
@@ -25,12 +25,21 @@ logger = logging.getLogger(__name__)
|
|
|
25
25
|
envvar="GITHUB_TOKEN",
|
|
26
26
|
help="The Github Token to be used. Pass in `${{ secrets.GITHUB_TOKEN }}` if you want to use the one created by Github actions",
|
|
27
27
|
)
|
|
28
|
+
@click.option(
|
|
29
|
+
"--full-logs",
|
|
30
|
+
is_flag=True,
|
|
31
|
+
help="Whether to print all logs in the Github Actions output or only in their relevant GA check",
|
|
32
|
+
)
|
|
28
33
|
@click.pass_context
|
|
29
|
-
def github(ctx: click.Context, token: str) -> None:
|
|
34
|
+
def github(ctx: click.Context, token: str, full_logs: bool = False) -> None:
|
|
30
35
|
"""Github Action CI/CD Bot. See https://sqlmesh.readthedocs.io/en/stable/integrations/github/ for details"""
|
|
31
36
|
# set a larger width because if none is specified, it auto-detects 80 characters when running in GitHub Actions
|
|
32
37
|
# which can result in surprise newlines when outputting dates to backfill
|
|
33
|
-
set_console(
|
|
38
|
+
set_console(
|
|
39
|
+
MarkdownConsole(
|
|
40
|
+
width=1000, warning_capture_only=not full_logs, error_capture_only=not full_logs
|
|
41
|
+
)
|
|
42
|
+
)
|
|
34
43
|
ctx.obj["github"] = GithubController(
|
|
35
44
|
paths=ctx.obj["paths"],
|
|
36
45
|
token=token,
|
|
@@ -448,10 +448,9 @@ class GithubController:
|
|
|
448
448
|
c.PROD,
|
|
449
449
|
# this is required to highlight any data gaps between this PR environment and prod (since PR environments may only contain a subset of data)
|
|
450
450
|
no_gaps=False,
|
|
451
|
-
# this works because the snapshots were already categorized when applying self.pr_plan so there are no uncategorized local snapshots to trigger a plan error
|
|
452
|
-
no_auto_categorization=True,
|
|
453
451
|
skip_tests=True,
|
|
454
452
|
skip_linter=True,
|
|
453
|
+
categorizer_config=self.bot_config.auto_categorize_changes,
|
|
455
454
|
run=self.bot_config.run_on_deploy_to_prod,
|
|
456
455
|
forward_only=self.forward_only_plan,
|
|
457
456
|
)
|
|
@@ -773,6 +772,11 @@ class GithubController:
|
|
|
773
772
|
"PR is already merged and this event was triggered prior to the merge."
|
|
774
773
|
)
|
|
775
774
|
merge_status = self._get_merge_state_status()
|
|
775
|
+
if merge_status.is_blocked:
|
|
776
|
+
raise CICDBotError(
|
|
777
|
+
"Branch protection or ruleset requirement is likely not satisfied, e.g. missing CODEOWNERS approval. "
|
|
778
|
+
"Please check PR and resolve any issues."
|
|
779
|
+
)
|
|
776
780
|
if merge_status.is_dirty:
|
|
777
781
|
raise CICDBotError(
|
|
778
782
|
"Merge commit cannot be cleanly created. Likely from a merge conflict. "
|
sqlmesh/lsp/context.py
CHANGED
|
@@ -72,7 +72,7 @@ class LSPContext:
|
|
|
72
72
|
|
|
73
73
|
def list_workspace_tests(self) -> t.List[TestEntry]:
|
|
74
74
|
"""List all tests in the workspace."""
|
|
75
|
-
tests = self.context.
|
|
75
|
+
tests = self.context.select_tests()
|
|
76
76
|
|
|
77
77
|
# Use a set to ensure unique URIs
|
|
78
78
|
unique_test_uris = {URI.from_path(test.path).value for test in tests}
|
|
@@ -81,7 +81,9 @@ class LSPContext:
|
|
|
81
81
|
test_ranges = get_test_ranges(URI(uri).to_path())
|
|
82
82
|
if uri not in test_uris:
|
|
83
83
|
test_uris[uri] = {}
|
|
84
|
+
|
|
84
85
|
test_uris[uri].update(test_ranges)
|
|
86
|
+
|
|
85
87
|
return [
|
|
86
88
|
TestEntry(
|
|
87
89
|
name=test.test_name,
|
|
@@ -100,7 +102,7 @@ class LSPContext:
|
|
|
100
102
|
Returns:
|
|
101
103
|
List of TestEntry objects for the specified document.
|
|
102
104
|
"""
|
|
103
|
-
tests = self.context.
|
|
105
|
+
tests = self.context.select_tests(tests=[str(uri.to_path())])
|
|
104
106
|
test_ranges = get_test_ranges(uri.to_path())
|
|
105
107
|
return [
|
|
106
108
|
TestEntry(
|
sqlmesh/magics.py
CHANGED
|
@@ -337,7 +337,7 @@ class SQLMeshMagics(Magics):
|
|
|
337
337
|
if not args.test_name and not args.ls:
|
|
338
338
|
raise MagicError("Must provide either test name or `--ls` to list tests")
|
|
339
339
|
|
|
340
|
-
test_meta = context.
|
|
340
|
+
test_meta = context.select_tests()
|
|
341
341
|
|
|
342
342
|
tests: t.Dict[str, t.Dict[str, ModelTestMetadata]] = defaultdict(dict)
|
|
343
343
|
for model_test_metadata in test_meta:
|
sqlmesh/utils/git.py
CHANGED
|
@@ -16,7 +16,9 @@ class GitClient:
|
|
|
16
16
|
)
|
|
17
17
|
|
|
18
18
|
def list_uncommitted_changed_files(self) -> t.List[Path]:
|
|
19
|
-
return self._execute_list_output(
|
|
19
|
+
return self._execute_list_output(
|
|
20
|
+
["diff", "--name-only", "--diff-filter=d", "HEAD"], self._git_root
|
|
21
|
+
)
|
|
20
22
|
|
|
21
23
|
def list_committed_changed_files(self, target_branch: str = "main") -> t.List[Path]:
|
|
22
24
|
return self._execute_list_output(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sqlmesh
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.228.2
|
|
4
4
|
Summary: Next-generation data transformation framework
|
|
5
5
|
Author-email: "TobikoData Inc." <engineering@tobikodata.com>
|
|
6
6
|
License: Apache License
|
|
@@ -315,7 +315,7 @@ Requires-Dist: cloud-sql-python-connector[pg8000]>=1.8.0; extra == "gcppostgres"
|
|
|
315
315
|
Provides-Extra: github
|
|
316
316
|
Requires-Dist: PyGithub>=2.6.0; extra == "github"
|
|
317
317
|
Provides-Extra: motherduck
|
|
318
|
-
Requires-Dist: duckdb>=1.2
|
|
318
|
+
Requires-Dist: duckdb>=1.3.2; extra == "motherduck"
|
|
319
319
|
Provides-Extra: mssql
|
|
320
320
|
Requires-Dist: pymssql; extra == "mssql"
|
|
321
321
|
Provides-Extra: mssql-odbc
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
sqlmesh/__init__.py,sha256=v_spqQEhcnGaahp1yPvMqUIa6mhH3cs3Bc1CznxvCEA,7965
|
|
2
|
-
sqlmesh/_version.py,sha256=
|
|
3
|
-
sqlmesh/magics.py,sha256=
|
|
2
|
+
sqlmesh/_version.py,sha256=ACVrSdNg2UPVOn2UpogTOsRK3tNSbFTR7bH-jNSw-tE,708
|
|
3
|
+
sqlmesh/magics.py,sha256=7Q1_lXSD_PgYH40Hsx6-OkfSQC3UJZgF043RVFRnw1s,42082
|
|
4
4
|
sqlmesh/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
5
|
sqlmesh/cicd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
sqlmesh/cicd/bot.py,sha256=2zlbn-DXkqQzr3lA0__IGU4XaIfXBXBKLWXNI2DRJX8,759
|
|
@@ -13,13 +13,13 @@ sqlmesh/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
13
13
|
sqlmesh/core/_typing.py,sha256=PzXxMYnORq18JhblAOUttms3zPJZzZpIbfFA_jgKYPA,498
|
|
14
14
|
sqlmesh/core/console.py,sha256=MYpVlciUY6rUuoqXyKfXTxD6a4-Bw4-ooATUTj_VHGg,172830
|
|
15
15
|
sqlmesh/core/constants.py,sha256=BuQk43vluUm7LfP9nKp5o9qRhqIenWF_LiLXO_t_53c,2699
|
|
16
|
-
sqlmesh/core/context.py,sha256=
|
|
16
|
+
sqlmesh/core/context.py,sha256=Ig2FKOLecp0tZ3jnx4952gQ52KukFATMLNfRK4vHqlk,133051
|
|
17
17
|
sqlmesh/core/context_diff.py,sha256=mxkJu0IthFMOlaQ_kcq5C09mlgkq2RQb-pG2rd-x_nA,21648
|
|
18
|
-
sqlmesh/core/dialect.py,sha256=
|
|
18
|
+
sqlmesh/core/dialect.py,sha256=mxdzQjU0KNloidEPo4tk2poAPTAYRcH_6AdRbDYN-zI,53442
|
|
19
19
|
sqlmesh/core/environment.py,sha256=Kgs_gUEUI072mh0JJFWNRynrCxp1TzRHZhX_NWJRfXc,13142
|
|
20
20
|
sqlmesh/core/janitor.py,sha256=zJRN48ENjKexeiqa1Kmwyj_HsEEEIAa8hsFD8gTCmfg,7194
|
|
21
21
|
sqlmesh/core/lineage.py,sha256=LtiOztX1xIbFfWz-eb5dPZW4B0o2sI942_IM4YDbsso,3163
|
|
22
|
-
sqlmesh/core/loader.py,sha256=
|
|
22
|
+
sqlmesh/core/loader.py,sha256=YbdDekoeIwu1zg0xFsiQUWsxgupZTqpHAziwxV-53Hs,36698
|
|
23
23
|
sqlmesh/core/macros.py,sha256=rkklwVnUEmEro4wpdel289mKhaS3x5_SPZrkYZt3Q9E,63173
|
|
24
24
|
sqlmesh/core/node.py,sha256=2ejDwH1whl_ic1CRzX16Be-FQrosAf8pdyWb7oPzU6M,19895
|
|
25
25
|
sqlmesh/core/notification_target.py,sha256=PPGoDrgbRKxr27vJEu03XqNTQLYTw0ZF_b0yAapxGeI,16158
|
|
@@ -42,7 +42,7 @@ sqlmesh/core/config/__init__.py,sha256=tnEakbd8FAgSLYmjzuYAAgHIpJ00lwMKAhD_Cfs2O
|
|
|
42
42
|
sqlmesh/core/config/base.py,sha256=t8NQmsgQoZSc-k0dlDiCb8t1jj0AMYdGZ-6se9q_Pks,4898
|
|
43
43
|
sqlmesh/core/config/categorizer.py,sha256=6vzUoNLjR6GOEb_2mYVz2TwmMv2BfldgHX2u-Le5HZs,1975
|
|
44
44
|
sqlmesh/core/config/common.py,sha256=9V6PltBAjYeWLOU5dAbqL55BSFfpg8z8t2Op1x_PLhU,6418
|
|
45
|
-
sqlmesh/core/config/connection.py,sha256=
|
|
45
|
+
sqlmesh/core/config/connection.py,sha256=l2GUpZtCJyVk94JiTcvZbrrH9dpOfZChCwyM6Z20Efs,92425
|
|
46
46
|
sqlmesh/core/config/dbt.py,sha256=xSQ4NEVWhZj_aRYpyy4MWcRJ8Qa0o28w2ZBLI4bs3_I,468
|
|
47
47
|
sqlmesh/core/config/format.py,sha256=6CXFbvnor56xbldKE-Vrm9k_ABRoY4v6vgIb3mCihiQ,1355
|
|
48
48
|
sqlmesh/core/config/gateway.py,sha256=tYngyqwd_4Qr9lhcv2hlvLvb_2pgYYtKu6hdGsTr-4I,1931
|
|
@@ -60,13 +60,13 @@ sqlmesh/core/config/ui.py,sha256=jsO-S6_d9NkLZGG5pT4mgKgxMF34KzkDociZAMvCX3U,278
|
|
|
60
60
|
sqlmesh/core/engine_adapter/__init__.py,sha256=y9jZAFdMBkkkRrf0ymfsJJn6s_7Ya6OpDgR4Bf1OG_U,2383
|
|
61
61
|
sqlmesh/core/engine_adapter/_typing.py,sha256=PCXQVpNbUTI3rJQyH_VTx57mDR5emh8b8cAfme6hTW4,1104
|
|
62
62
|
sqlmesh/core/engine_adapter/athena.py,sha256=5BhMaQcpiBkGt_tdT4Dw67t5pCOh-UN9-bQtayFRL3Q,26867
|
|
63
|
-
sqlmesh/core/engine_adapter/base.py,sha256=
|
|
63
|
+
sqlmesh/core/engine_adapter/base.py,sha256=GN05HN4E_Yrw38ps7gwKnes-bput3uIAbTFXpttqBi8,130196
|
|
64
64
|
sqlmesh/core/engine_adapter/base_postgres.py,sha256=WTU0QingaTNM7n-mTVxS-sg4f6jFZGOSryK5IYacveY,7734
|
|
65
65
|
sqlmesh/core/engine_adapter/bigquery.py,sha256=edBWbAbeXA4bOtVG-YNTQbt9qqwL9QFffZti8Ozv-Cw,60923
|
|
66
66
|
sqlmesh/core/engine_adapter/clickhouse.py,sha256=GWGpwdxZd4RqLSAMlOHjtO8nPpSIo3zFeRWnj9eSOrM,36072
|
|
67
|
-
sqlmesh/core/engine_adapter/databricks.py,sha256=
|
|
67
|
+
sqlmesh/core/engine_adapter/databricks.py,sha256=VrZMgrL7PQiipaI_inIMcLudLqg2nX5JLdALjB8DamY,16525
|
|
68
68
|
sqlmesh/core/engine_adapter/duckdb.py,sha256=9AXeRhaYXBcYSmIavyFY9LUzfgh94qkTO98v0-suQ8I,7993
|
|
69
|
-
sqlmesh/core/engine_adapter/fabric.py,sha256=
|
|
69
|
+
sqlmesh/core/engine_adapter/fabric.py,sha256=jY1bejscEcL5r-WdGjsSGr-dWDa1awavCikrAyhDFpk,19299
|
|
70
70
|
sqlmesh/core/engine_adapter/mixins.py,sha256=3rB7B2PZSB920BODO7k_kKqu6z0N-zj1etiRCYzpUcQ,27096
|
|
71
71
|
sqlmesh/core/engine_adapter/mssql.py,sha256=pqh6D_7eAeVCH6K4-81HPcNTLEPhTM_-Mou0QWBTOfA,18898
|
|
72
72
|
sqlmesh/core/engine_adapter/mysql.py,sha256=anKxdklYY2kiuxaHsC7FPN-LKzo7BP0Hy6hinA_c5Hg,6953
|
|
@@ -76,13 +76,13 @@ sqlmesh/core/engine_adapter/risingwave.py,sha256=d_1MxpXNONyyLnuELa7bILkJlLquf4j
|
|
|
76
76
|
sqlmesh/core/engine_adapter/shared.py,sha256=bM4GJSAR0dU3wCqsTl2SIcy2j_8BGusQvnme99l6wnE,13701
|
|
77
77
|
sqlmesh/core/engine_adapter/snowflake.py,sha256=6rMuhuhp2K-UH8dVnmiieucfOevxmK8vR3N5-dj4MDA,33453
|
|
78
78
|
sqlmesh/core/engine_adapter/spark.py,sha256=ZDEg4rx_cvPcLG83PSWu5nkXzChaCbmb7ka2J2ngEEU,23068
|
|
79
|
-
sqlmesh/core/engine_adapter/trino.py,sha256=
|
|
79
|
+
sqlmesh/core/engine_adapter/trino.py,sha256=F6Cs0RxbMnk5tOYxp3GdMNhK3nnBrtnU8gldMD3POh0,19731
|
|
80
80
|
sqlmesh/core/linter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
81
81
|
sqlmesh/core/linter/definition.py,sha256=1EOhKdF16jmeqISfcrR-8fzMdgXuxpB7wb3QaepBPeU,5564
|
|
82
82
|
sqlmesh/core/linter/helpers.py,sha256=cwKXP4sL6azRtNVGbMfJ5_6Hqq5Xx2M2rRLCgH3Y3ag,10743
|
|
83
83
|
sqlmesh/core/linter/rule.py,sha256=nB3o1rHyN44ZOg5ImICP16SeUHimf-12ObdXJjkTGyM,3964
|
|
84
84
|
sqlmesh/core/linter/rules/__init__.py,sha256=gevzfb67vFqckTCoVAe_TBGf6hQ-YtE1_YuGuXyh1L0,77
|
|
85
|
-
sqlmesh/core/linter/rules/builtin.py,sha256=
|
|
85
|
+
sqlmesh/core/linter/rules/builtin.py,sha256=6j22W_5EOBN979Bi2_mvmCNq4yqZVsJ9oqEukunj4Ws,11728
|
|
86
86
|
sqlmesh/core/metric/__init__.py,sha256=H1HmoD5IwN4YWe9iJXyueLYNmTQFZwok5nSWNJcZIBQ,237
|
|
87
87
|
sqlmesh/core/metric/definition.py,sha256=Yd5aVgsZCDPJ43aGP7WqtzZOuuSUtB8uJGVA6Jw9x9M,7201
|
|
88
88
|
sqlmesh/core/metric/rewriter.py,sha256=GiSTHfn2kinqCfNPYgZPRk93JFLzVaaejHtHDQ0yXZI,7326
|
|
@@ -90,7 +90,7 @@ sqlmesh/core/model/__init__.py,sha256=C8GRZ53xuXEA9hQv3BQS9pNNyd9rZ06R_B96UYGhDu
|
|
|
90
90
|
sqlmesh/core/model/cache.py,sha256=csun0RJguHzKX6-qITcOs4fVP4f8_Ts8qiUVV4sHY6Q,7869
|
|
91
91
|
sqlmesh/core/model/common.py,sha256=UqOmtbsrl4MYDUOigde2CwME-qdPgRf91QExX4yhAA0,27741
|
|
92
92
|
sqlmesh/core/model/decorator.py,sha256=bL-JuNrdBAikZSjVxnXqeB9i0e9qC7jm7yLjwiZ38aU,9470
|
|
93
|
-
sqlmesh/core/model/definition.py,sha256=
|
|
93
|
+
sqlmesh/core/model/definition.py,sha256=6avH5we43psgNCsN2aDFnSjL3UNJ85qmMLKwxvERqPA,117745
|
|
94
94
|
sqlmesh/core/model/kind.py,sha256=qJdiin09Q0neRFudNnLsDNCvbqD3EHAoK-WCvX-eUJs,40071
|
|
95
95
|
sqlmesh/core/model/meta.py,sha256=ELjprp6rl7dW9a7rs9eyQXScbDImInq35SyasiAriIk,24128
|
|
96
96
|
sqlmesh/core/model/schema.py,sha256=_HMYfzK9wWXh7_CQDIIGnuQUD4aiX3o5D2cRp2sERzc,3387
|
|
@@ -123,7 +123,7 @@ sqlmesh/core/state_sync/db/utils.py,sha256=8KjRmOjP5CLuSRkYBUE2k34V-UYB0iSyuO0rW
|
|
|
123
123
|
sqlmesh/core/state_sync/db/version.py,sha256=q5VDIIvY-585vTbvqPalU0N4qjG6RKs4gr8a51R-_UE,2257
|
|
124
124
|
sqlmesh/core/test/__init__.py,sha256=e83TJPwPRR_rAG29Y0OVbZb-5oWVBzz-_wrcd22Qk10,418
|
|
125
125
|
sqlmesh/core/test/context.py,sha256=-TjUrhM3WLtVPBgOMTkvRrnuZq7mT7BeIIyuCbrPePU,2332
|
|
126
|
-
sqlmesh/core/test/definition.py,sha256=
|
|
126
|
+
sqlmesh/core/test/definition.py,sha256=vktajrCX1Yf50ZTYwh5wqnBry4qEgxSilqTGeuF3sec,42334
|
|
127
127
|
sqlmesh/core/test/discovery.py,sha256=5duKXgH4Lms7rXhJ8tOLCmCtqHpv7c7a4VJf12VkGw8,1278
|
|
128
128
|
sqlmesh/core/test/result.py,sha256=6gOKEsERciHhcrw9TedtNr7g1ynTO7UwA5-PPrzvYuM,4564
|
|
129
129
|
sqlmesh/core/test/runner.py,sha256=8I-cL7Q9CggLvET_GPkrXB2YjlyCIHrvbFbbRDnSHRE,6169
|
|
@@ -156,13 +156,13 @@ sqlmesh/integrations/dlt.py,sha256=mA9ym16gAN2O8RYOzSPeTX5xsXPuXVRPdiX7dkGGZlo,7
|
|
|
156
156
|
sqlmesh/integrations/slack.py,sha256=nxLxu5WztGbZH3JdqnzyPqmJUMfRdJ_49LQ7zR-u39Q,6801
|
|
157
157
|
sqlmesh/integrations/github/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
158
158
|
sqlmesh/integrations/github/cicd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
159
|
-
sqlmesh/integrations/github/cicd/command.py,sha256=
|
|
159
|
+
sqlmesh/integrations/github/cicd/command.py,sha256=CyYpoFjZHkOpE2CFEWAMxOQhNrhbds8643PHaNmBLxo,12481
|
|
160
160
|
sqlmesh/integrations/github/cicd/config.py,sha256=n4KyNv65y1gpys82iLAQZUb6g2pnzywyV_7cOrQXsnI,3669
|
|
161
|
-
sqlmesh/integrations/github/cicd/controller.py,sha256=
|
|
161
|
+
sqlmesh/integrations/github/cicd/controller.py,sha256=pWRiUgBm878zO6h6RB4_nnu6L6r8szDp5h_g4Xp4SjQ,55044
|
|
162
162
|
sqlmesh/lsp/api.py,sha256=Z_8Op6CWqdbmEeidCQgMcVmRooQujqaynn-0EOw4478,2505
|
|
163
163
|
sqlmesh/lsp/commands.py,sha256=7tZPePSH-IwBYmXJPIlqGM7pi4rOCLEtc3fKJglAxZs,72
|
|
164
164
|
sqlmesh/lsp/completions.py,sha256=7Lhboh6xyoMJ3kkHG3aZz1xVbDwKiXeQKdIRj5xlUOA,6674
|
|
165
|
-
sqlmesh/lsp/context.py,sha256=
|
|
165
|
+
sqlmesh/lsp/context.py,sha256=7S17A1oE9WZtOU1dYvQeoExqvXGwmNxZdbk9uF4Xllw,20886
|
|
166
166
|
sqlmesh/lsp/custom.py,sha256=npzNznpUJ3ELY_WU4n_4I73lAjuTapI0_HKCFsoMcOk,5132
|
|
167
167
|
sqlmesh/lsp/errors.py,sha256=3NMim_5J00Eypz7t8b7XbkBfy8gIsRkeq-VcjD4COtc,1489
|
|
168
168
|
sqlmesh/lsp/helpers.py,sha256=EFc1u3-b7kSv5-tNwmKUDxId72RCLDBnN2lLTgRSzzQ,1020
|
|
@@ -226,7 +226,7 @@ sqlmesh/utils/cron.py,sha256=eGwn4iUeiRoQzwcd9eS2TZkut8nR4yWud77N7xQ9CQ0,1829
|
|
|
226
226
|
sqlmesh/utils/dag.py,sha256=5Sec50yY-UBEpLU82_nzaL7Wlalwf7K8EvLL8sBs2Z8,9049
|
|
227
227
|
sqlmesh/utils/date.py,sha256=m0NHAqSQYqZnvuNHVk9RNEktiE_LbyqcO_O0SVxcGrw,16460
|
|
228
228
|
sqlmesh/utils/errors.py,sha256=rktXVSd4R3tii7_k_pnex05ZXS7QnlFx1np1u-pjSSU,8000
|
|
229
|
-
sqlmesh/utils/git.py,sha256=
|
|
229
|
+
sqlmesh/utils/git.py,sha256=hrzhAH9XkxKoxNAI5ASOOm-d0-UyGi8YB37-neOcKe4,1898
|
|
230
230
|
sqlmesh/utils/hashing.py,sha256=nZRKvLNQ83tLG4IoXshVJZf-MbDrXC1HOeNw8Ji-tMM,578
|
|
231
231
|
sqlmesh/utils/jinja.py,sha256=474yuVZmS1pppBoEZqCJeugW9CQWniWBeuV4x6RGbEA,26380
|
|
232
232
|
sqlmesh/utils/lineage.py,sha256=zz9BPc6MShRy9LEXmAp02x6oKt4ubVNUPdapFVFKkac,16019
|
|
@@ -238,7 +238,7 @@ sqlmesh/utils/pydantic.py,sha256=-yppkVlw6iSBaSiKjbe7OChxL-u3urOS4-KCjJEgsRU,120
|
|
|
238
238
|
sqlmesh/utils/rich.py,sha256=cwQ5nJ6sgz64xHtoh6_ec7ReV5YpsOGhMtUJnwoRfEI,3549
|
|
239
239
|
sqlmesh/utils/windows.py,sha256=0F9RdpuuCoG5NiEDXvWlAGCiJ-59OjSAmgFF5wW05aY,1133
|
|
240
240
|
sqlmesh/utils/yaml.py,sha256=KFBd7hsKNRTtRudGR7d410qUYffQv0EWRcDM8hVNNZg,3025
|
|
241
|
-
sqlmesh-0.
|
|
241
|
+
sqlmesh-0.228.2.dist-info/licenses/LICENSE,sha256=OlMefUjgWJdULtf84BLW0AZZcY8DwdgQqb_1j2862j8,11346
|
|
242
242
|
sqlmesh_dbt/__init__.py,sha256=awYS5y5mz-1NUmx6i5h5NSTJ7tidRl9NC0FAnFWSF6U,350
|
|
243
243
|
sqlmesh_dbt/cli.py,sha256=p9foHjAW9ni7BTOJ2loynk47M0Sf43QIJZRggOzF5tc,6351
|
|
244
244
|
sqlmesh_dbt/console.py,sha256=RwWLYnEZHzn9Xp-e2gbZvkdKbWbBLN146geI84mJitg,1132
|
|
@@ -363,8 +363,8 @@ web/server/api/endpoints/models.py,sha256=kwj0s7uve3iZSMfmjkoPVMFMeY1sD0peTeyrWf
|
|
|
363
363
|
web/server/api/endpoints/modules.py,sha256=8hqqgonGay_mJmpCw0IdbjsPhWlQH2VLdKAqha-myac,468
|
|
364
364
|
web/server/api/endpoints/plan.py,sha256=bbbY50W_2MsZSTxOHWMKz0tbIm75nsRSlPy8GI2fg9Q,9306
|
|
365
365
|
web/server/api/endpoints/table_diff.py,sha256=8XTwgOh6QBbNy_hTM1JuHgRjbnie-pGPrphiW-FNLjQ,6058
|
|
366
|
-
sqlmesh-0.
|
|
367
|
-
sqlmesh-0.
|
|
368
|
-
sqlmesh-0.
|
|
369
|
-
sqlmesh-0.
|
|
370
|
-
sqlmesh-0.
|
|
366
|
+
sqlmesh-0.228.2.dist-info/METADATA,sha256=MzCP4RQrktGJl31ZWjoCyCLFuWa7fGr7y_vo-ZuoGpA,26680
|
|
367
|
+
sqlmesh-0.228.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
368
|
+
sqlmesh-0.228.2.dist-info/entry_points.txt,sha256=sHAf6tQczIM8xZoduN4qaUjV7QEPVUUW_LCT8EDUMv4,155
|
|
369
|
+
sqlmesh-0.228.2.dist-info/top_level.txt,sha256=RQ-33FPe2IgL0rgossAfJkCRtqslz9b7wFARqiWLC5Q,24
|
|
370
|
+
sqlmesh-0.228.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|