sqlmesh 0.213.1.dev1__py3-none-any.whl → 0.227.2.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlmesh/__init__.py +12 -2
- sqlmesh/_version.py +2 -2
- sqlmesh/cli/main.py +0 -44
- sqlmesh/cli/project_init.py +11 -2
- sqlmesh/core/_typing.py +1 -0
- sqlmesh/core/audit/definition.py +8 -2
- sqlmesh/core/config/__init__.py +1 -1
- sqlmesh/core/config/connection.py +17 -5
- sqlmesh/core/config/dbt.py +13 -0
- sqlmesh/core/config/janitor.py +12 -0
- sqlmesh/core/config/loader.py +7 -0
- sqlmesh/core/config/model.py +2 -0
- sqlmesh/core/config/root.py +3 -0
- sqlmesh/core/console.py +81 -3
- sqlmesh/core/constants.py +1 -1
- sqlmesh/core/context.py +69 -26
- sqlmesh/core/dialect.py +3 -0
- sqlmesh/core/engine_adapter/_typing.py +2 -0
- sqlmesh/core/engine_adapter/base.py +322 -22
- sqlmesh/core/engine_adapter/base_postgres.py +17 -1
- sqlmesh/core/engine_adapter/bigquery.py +146 -7
- sqlmesh/core/engine_adapter/clickhouse.py +17 -13
- sqlmesh/core/engine_adapter/databricks.py +33 -2
- sqlmesh/core/engine_adapter/fabric.py +10 -29
- sqlmesh/core/engine_adapter/mixins.py +142 -48
- sqlmesh/core/engine_adapter/mssql.py +15 -4
- sqlmesh/core/engine_adapter/mysql.py +2 -2
- sqlmesh/core/engine_adapter/postgres.py +9 -3
- sqlmesh/core/engine_adapter/redshift.py +4 -0
- sqlmesh/core/engine_adapter/risingwave.py +1 -0
- sqlmesh/core/engine_adapter/shared.py +6 -0
- sqlmesh/core/engine_adapter/snowflake.py +82 -11
- sqlmesh/core/engine_adapter/spark.py +14 -10
- sqlmesh/core/engine_adapter/trino.py +4 -2
- sqlmesh/core/environment.py +2 -0
- sqlmesh/core/janitor.py +181 -0
- sqlmesh/core/lineage.py +1 -0
- sqlmesh/core/linter/definition.py +13 -13
- sqlmesh/core/linter/rules/builtin.py +29 -0
- sqlmesh/core/macros.py +35 -13
- sqlmesh/core/model/common.py +2 -0
- sqlmesh/core/model/definition.py +82 -28
- sqlmesh/core/model/kind.py +66 -2
- sqlmesh/core/model/meta.py +108 -4
- sqlmesh/core/node.py +101 -1
- sqlmesh/core/plan/builder.py +18 -10
- sqlmesh/core/plan/common.py +199 -2
- sqlmesh/core/plan/definition.py +25 -6
- sqlmesh/core/plan/evaluator.py +75 -113
- sqlmesh/core/plan/explainer.py +90 -8
- sqlmesh/core/plan/stages.py +42 -21
- sqlmesh/core/renderer.py +78 -32
- sqlmesh/core/scheduler.py +102 -22
- sqlmesh/core/selector.py +137 -9
- sqlmesh/core/signal.py +64 -1
- sqlmesh/core/snapshot/__init__.py +2 -0
- sqlmesh/core/snapshot/definition.py +146 -34
- sqlmesh/core/snapshot/evaluator.py +689 -124
- sqlmesh/core/state_sync/__init__.py +0 -1
- sqlmesh/core/state_sync/base.py +55 -33
- sqlmesh/core/state_sync/cache.py +12 -7
- sqlmesh/core/state_sync/common.py +216 -111
- sqlmesh/core/state_sync/db/environment.py +6 -4
- sqlmesh/core/state_sync/db/facade.py +42 -24
- sqlmesh/core/state_sync/db/interval.py +27 -7
- sqlmesh/core/state_sync/db/migrator.py +34 -16
- sqlmesh/core/state_sync/db/snapshot.py +177 -169
- sqlmesh/core/table_diff.py +2 -2
- sqlmesh/core/test/context.py +2 -0
- sqlmesh/core/test/definition.py +14 -9
- sqlmesh/dbt/adapter.py +22 -16
- sqlmesh/dbt/basemodel.py +75 -56
- sqlmesh/dbt/builtin.py +116 -12
- sqlmesh/dbt/column.py +17 -5
- sqlmesh/dbt/common.py +19 -5
- sqlmesh/dbt/context.py +14 -1
- sqlmesh/dbt/loader.py +61 -9
- sqlmesh/dbt/manifest.py +174 -16
- sqlmesh/dbt/model.py +183 -85
- sqlmesh/dbt/package.py +16 -1
- sqlmesh/dbt/profile.py +3 -3
- sqlmesh/dbt/project.py +12 -7
- sqlmesh/dbt/seed.py +6 -1
- sqlmesh/dbt/source.py +13 -1
- sqlmesh/dbt/target.py +25 -6
- sqlmesh/dbt/test.py +36 -5
- sqlmesh/migrations/v0000_baseline.py +95 -0
- sqlmesh/migrations/v0061_mysql_fix_blob_text_type.py +5 -7
- sqlmesh/migrations/v0062_add_model_gateway.py +5 -1
- sqlmesh/migrations/v0063_change_signals.py +5 -3
- sqlmesh/migrations/v0064_join_when_matched_strings.py +5 -3
- sqlmesh/migrations/v0065_add_model_optimize.py +5 -1
- sqlmesh/migrations/v0066_add_auto_restatements.py +8 -3
- sqlmesh/migrations/v0067_add_tsql_date_full_precision.py +5 -1
- sqlmesh/migrations/v0068_include_unrendered_query_in_metadata_hash.py +5 -1
- sqlmesh/migrations/v0069_update_dev_table_suffix.py +5 -3
- sqlmesh/migrations/v0070_include_grains_in_metadata_hash.py +5 -1
- sqlmesh/migrations/v0071_add_dev_version_to_intervals.py +9 -5
- sqlmesh/migrations/v0072_add_environment_statements.py +5 -3
- sqlmesh/migrations/v0073_remove_symbolic_disable_restatement.py +5 -3
- sqlmesh/migrations/v0074_add_partition_by_time_column_property.py +5 -1
- sqlmesh/migrations/v0075_remove_validate_query.py +5 -3
- sqlmesh/migrations/v0076_add_cron_tz.py +5 -1
- sqlmesh/migrations/v0077_fix_column_type_hash_calculation.py +5 -1
- sqlmesh/migrations/v0078_warn_if_non_migratable_python_env.py +5 -3
- sqlmesh/migrations/v0079_add_gateway_managed_property.py +10 -5
- sqlmesh/migrations/v0080_add_batch_size_to_scd_type_2_models.py +5 -1
- sqlmesh/migrations/v0081_update_partitioned_by.py +5 -3
- sqlmesh/migrations/v0082_warn_if_incorrectly_duplicated_statements.py +5 -3
- sqlmesh/migrations/v0083_use_sql_for_scd_time_data_type_data_hash.py +5 -1
- sqlmesh/migrations/v0084_normalize_quote_when_matched_and_merge_filter.py +5 -1
- sqlmesh/migrations/v0085_deterministic_repr.py +5 -3
- sqlmesh/migrations/v0086_check_deterministic_bug.py +5 -3
- sqlmesh/migrations/v0087_normalize_blueprint_variables.py +5 -3
- sqlmesh/migrations/v0088_warn_about_variable_python_env_diffs.py +5 -3
- sqlmesh/migrations/v0089_add_virtual_environment_mode.py +5 -1
- sqlmesh/migrations/v0090_add_forward_only_column.py +9 -5
- sqlmesh/migrations/v0091_on_additive_change.py +5 -1
- sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +5 -3
- sqlmesh/migrations/v0093_use_raw_sql_in_fingerprint.py +5 -1
- sqlmesh/migrations/v0094_add_dev_version_and_fingerprint_columns.py +123 -0
- sqlmesh/migrations/v0095_warn_about_dbt_raw_sql_diff.py +49 -0
- sqlmesh/migrations/v0096_remove_plan_dags_table.py +13 -0
- sqlmesh/migrations/v0097_add_dbt_name_in_node.py +9 -0
- sqlmesh/migrations/{v0060_move_audits_to_model.py → v0098_add_dbt_node_info_in_node.py} +33 -16
- sqlmesh/migrations/v0099_add_last_altered_to_intervals.py +25 -0
- sqlmesh/migrations/v0100_add_grants_and_grants_target_layer.py +9 -0
- sqlmesh/utils/__init__.py +8 -1
- sqlmesh/utils/cache.py +5 -1
- sqlmesh/utils/connection_pool.py +2 -1
- sqlmesh/utils/dag.py +65 -10
- sqlmesh/utils/date.py +8 -1
- sqlmesh/utils/errors.py +8 -0
- sqlmesh/utils/jinja.py +54 -4
- sqlmesh/utils/pydantic.py +6 -6
- sqlmesh/utils/windows.py +13 -3
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/METADATA +7 -10
- sqlmesh-0.227.2.dev4.dist-info/RECORD +370 -0
- sqlmesh_dbt/cli.py +70 -7
- sqlmesh_dbt/console.py +14 -6
- sqlmesh_dbt/operations.py +103 -24
- sqlmesh_dbt/selectors.py +39 -1
- web/client/dist/assets/{Audits-Ucsx1GzF.js → Audits-CBiYyyx-.js} +1 -1
- web/client/dist/assets/{Banner-BWDzvavM.js → Banner-DSRbUlO5.js} +1 -1
- web/client/dist/assets/{ChevronDownIcon-D2VL13Ah.js → ChevronDownIcon-MK_nrjD_.js} +1 -1
- web/client/dist/assets/{ChevronRightIcon-DWGYbf1l.js → ChevronRightIcon-CLWtT22Q.js} +1 -1
- web/client/dist/assets/{Content-DdHDZM3I.js → Content-BNuGZN5l.js} +1 -1
- web/client/dist/assets/{Content-Bikfy8fh.js → Content-CSHJyW0n.js} +1 -1
- web/client/dist/assets/{Data-CzAJH7rW.js → Data-C1oRDbLx.js} +1 -1
- web/client/dist/assets/{DataCatalog-BJF11g8f.js → DataCatalog-HXyX2-_j.js} +1 -1
- web/client/dist/assets/{Editor-s0SBpV2y.js → Editor-BDyfpUuw.js} +1 -1
- web/client/dist/assets/{Editor-DgLhgKnm.js → Editor-D0jNItwC.js} +1 -1
- web/client/dist/assets/{Errors-D0m0O1d3.js → Errors-BfuFLcPi.js} +1 -1
- web/client/dist/assets/{FileExplorer-CEv0vXkt.js → FileExplorer-BR9IE3he.js} +1 -1
- web/client/dist/assets/{Footer-BwzXn8Ew.js → Footer-CgBEtiAh.js} +1 -1
- web/client/dist/assets/{Header-6heDkEqG.js → Header-DSqR6nSO.js} +1 -1
- web/client/dist/assets/{Input-obuJsD6k.js → Input-B-oZ6fGO.js} +1 -1
- web/client/dist/assets/Lineage-DYQVwDbD.js +1 -0
- web/client/dist/assets/{ListboxShow-HM9_qyrt.js → ListboxShow-BE5-xevs.js} +1 -1
- web/client/dist/assets/{ModelLineage-zWdKo0U2.js → ModelLineage-DkIFAYo4.js} +1 -1
- web/client/dist/assets/{Models-Bcu66SRz.js → Models-D5dWr8RB.js} +1 -1
- web/client/dist/assets/{Page-BWEEQfIt.js → Page-C-XfU5BR.js} +1 -1
- web/client/dist/assets/{Plan-C4gXCqlf.js → Plan-ZEuTINBq.js} +1 -1
- web/client/dist/assets/{PlusCircleIcon-CVDO651q.js → PlusCircleIcon-DVXAHG8_.js} +1 -1
- web/client/dist/assets/{ReportErrors-BT6xFwAr.js → ReportErrors-B7FEPzMB.js} +1 -1
- web/client/dist/assets/{Root-ryJoBK4h.js → Root-8aZyhPxF.js} +1 -1
- web/client/dist/assets/{SearchList-DB04sPb9.js → SearchList-W_iT2G82.js} +1 -1
- web/client/dist/assets/{SelectEnvironment-CUYcXUu6.js → SelectEnvironment-C65jALmO.js} +1 -1
- web/client/dist/assets/{SourceList-Doo_9ZGp.js → SourceList-DSLO6nVJ.js} +1 -1
- web/client/dist/assets/{SourceListItem-D5Mj7Dly.js → SourceListItem-BHt8d9-I.js} +1 -1
- web/client/dist/assets/{SplitPane-qHmkD1qy.js → SplitPane-CViaZmw6.js} +1 -1
- web/client/dist/assets/{Tests-DH1Z74ML.js → Tests-DhaVt5t1.js} +1 -1
- web/client/dist/assets/{Welcome-DqUJUNMF.js → Welcome-DvpjH-_4.js} +1 -1
- web/client/dist/assets/context-BctCsyGb.js +71 -0
- web/client/dist/assets/{context-Dr54UHLi.js → context-DFNeGsFF.js} +1 -1
- web/client/dist/assets/{editor-DYIP1yQ4.js → editor-CcO28cqd.js} +1 -1
- web/client/dist/assets/{file-DarlIDVi.js → file-CvJN3aZO.js} +1 -1
- web/client/dist/assets/{floating-ui.react-dom-BH3TFvkM.js → floating-ui.react-dom-CjE-JNW1.js} +1 -1
- web/client/dist/assets/{help-Bl8wqaQc.js → help-DuPhjipa.js} +1 -1
- web/client/dist/assets/{index-D1sR7wpN.js → index-C-dJH7yZ.js} +1 -1
- web/client/dist/assets/{index-O3mjYpnE.js → index-Dj0i1-CA.js} +2 -2
- web/client/dist/assets/{plan-CehRrJUG.js → plan-BTRSbjKn.js} +1 -1
- web/client/dist/assets/{popover-CqgMRE0G.js → popover-_Sf0yvOI.js} +1 -1
- web/client/dist/assets/{project-6gxepOhm.js → project-BvSOI8MY.js} +1 -1
- web/client/dist/index.html +1 -1
- sqlmesh/integrations/llm.py +0 -56
- sqlmesh/migrations/v0001_init.py +0 -60
- sqlmesh/migrations/v0002_remove_identify.py +0 -5
- sqlmesh/migrations/v0003_move_batch_size.py +0 -34
- sqlmesh/migrations/v0004_environmnent_add_finalized_at.py +0 -23
- sqlmesh/migrations/v0005_create_seed_table.py +0 -24
- sqlmesh/migrations/v0006_change_seed_hash.py +0 -5
- sqlmesh/migrations/v0007_env_table_info_to_kind.py +0 -99
- sqlmesh/migrations/v0008_create_intervals_table.py +0 -38
- sqlmesh/migrations/v0009_remove_pre_post_hooks.py +0 -62
- sqlmesh/migrations/v0010_seed_hash_batch_size.py +0 -5
- sqlmesh/migrations/v0011_add_model_kind_name.py +0 -63
- sqlmesh/migrations/v0012_update_jinja_expressions.py +0 -86
- sqlmesh/migrations/v0013_serde_using_model_dialects.py +0 -87
- sqlmesh/migrations/v0014_fix_dev_intervals.py +0 -14
- sqlmesh/migrations/v0015_environment_add_promoted_snapshot_ids.py +0 -26
- sqlmesh/migrations/v0016_fix_windows_path.py +0 -59
- sqlmesh/migrations/v0017_fix_windows_seed_path.py +0 -55
- sqlmesh/migrations/v0018_rename_snapshot_model_to_node.py +0 -53
- sqlmesh/migrations/v0019_add_env_suffix_target.py +0 -28
- sqlmesh/migrations/v0020_remove_redundant_attributes_from_dbt_models.py +0 -80
- sqlmesh/migrations/v0021_fix_table_properties.py +0 -62
- sqlmesh/migrations/v0022_move_project_to_model.py +0 -54
- sqlmesh/migrations/v0023_fix_added_models_with_forward_only_parents.py +0 -65
- sqlmesh/migrations/v0024_replace_model_kind_name_enum_with_value.py +0 -55
- sqlmesh/migrations/v0025_fix_intervals_and_missing_change_category.py +0 -117
- sqlmesh/migrations/v0026_remove_dialect_from_seed.py +0 -55
- sqlmesh/migrations/v0027_minute_interval_to_five.py +0 -57
- sqlmesh/migrations/v0028_add_plan_dags_table.py +0 -29
- sqlmesh/migrations/v0029_generate_schema_types_using_dialect.py +0 -69
- sqlmesh/migrations/v0030_update_unrestorable_snapshots.py +0 -65
- sqlmesh/migrations/v0031_remove_dbt_target_fields.py +0 -65
- sqlmesh/migrations/v0032_add_sqlmesh_version.py +0 -25
- sqlmesh/migrations/v0033_mysql_fix_blob_text_type.py +0 -45
- sqlmesh/migrations/v0034_add_default_catalog.py +0 -367
- sqlmesh/migrations/v0035_add_catalog_name_override.py +0 -22
- sqlmesh/migrations/v0036_delete_plan_dags_bug_fix.py +0 -14
- sqlmesh/migrations/v0037_remove_dbt_is_incremental_macro.py +0 -61
- sqlmesh/migrations/v0038_add_expiration_ts_to_snapshot.py +0 -73
- sqlmesh/migrations/v0039_include_environment_in_plan_dag_spec.py +0 -68
- sqlmesh/migrations/v0040_add_previous_finalized_snapshots.py +0 -26
- sqlmesh/migrations/v0041_remove_hash_raw_query_attribute.py +0 -59
- sqlmesh/migrations/v0042_trim_indirect_versions.py +0 -66
- sqlmesh/migrations/v0043_fix_remove_obsolete_attributes_in_plan_dags.py +0 -61
- sqlmesh/migrations/v0044_quote_identifiers_in_model_attributes.py +0 -5
- sqlmesh/migrations/v0045_move_gateway_variable.py +0 -70
- sqlmesh/migrations/v0046_add_batch_concurrency.py +0 -8
- sqlmesh/migrations/v0047_change_scd_string_to_column.py +0 -5
- sqlmesh/migrations/v0048_drop_indirect_versions.py +0 -59
- sqlmesh/migrations/v0049_replace_identifier_with_version_in_seeds_table.py +0 -57
- sqlmesh/migrations/v0050_drop_seeds_table.py +0 -11
- sqlmesh/migrations/v0051_rename_column_descriptions.py +0 -65
- sqlmesh/migrations/v0052_add_normalize_name_in_environment_naming_info.py +0 -28
- sqlmesh/migrations/v0053_custom_model_kind_extra_attributes.py +0 -5
- sqlmesh/migrations/v0054_fix_trailing_comments.py +0 -5
- sqlmesh/migrations/v0055_add_updated_ts_unpaused_ts_ttl_ms_unrestorable_to_snapshot.py +0 -132
- sqlmesh/migrations/v0056_restore_table_indexes.py +0 -118
- sqlmesh/migrations/v0057_add_table_format.py +0 -5
- sqlmesh/migrations/v0058_add_requirements.py +0 -26
- sqlmesh/migrations/v0059_add_physical_version.py +0 -5
- sqlmesh-0.213.1.dev1.dist-info/RECORD +0 -421
- web/client/dist/assets/Lineage-D0Hgdz2v.js +0 -1
- web/client/dist/assets/context-DgX0fp2E.js +0 -68
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/WHEEL +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/entry_points.txt +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/licenses/LICENSE +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/top_level.txt +0 -0
|
@@ -35,9 +35,11 @@ SQLMESH_BLUEPRINT_VARS = "__sqlmesh__blueprint__vars__"
|
|
|
35
35
|
METADATA_HASH_EXPRESSIONS = {"on_virtual_update", "audits", "signals", "audit_definitions"}
|
|
36
36
|
|
|
37
37
|
|
|
38
|
-
def
|
|
39
|
-
|
|
40
|
-
|
|
38
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
41
43
|
snapshots_table = "_snapshots"
|
|
42
44
|
if schema:
|
|
43
45
|
snapshots_table = f"{schema}.{snapshots_table}"
|
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
"""Add virtual_environment_mode to the model definition."""
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
def
|
|
4
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
5
|
+
pass
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
5
9
|
pass
|
|
@@ -7,11 +7,7 @@ from sqlglot import exp
|
|
|
7
7
|
from sqlmesh.utils.migration import index_text_type, blob_text_type
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
def
|
|
11
|
-
import pandas as pd
|
|
12
|
-
|
|
13
|
-
engine_adapter = state_sync.engine_adapter
|
|
14
|
-
schema = state_sync.schema
|
|
10
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
15
11
|
snapshots_table = "_snapshots"
|
|
16
12
|
if schema:
|
|
17
13
|
snapshots_table = f"{schema}.{snapshots_table}"
|
|
@@ -28,6 +24,14 @@ def migrate(state_sync, **kwargs): # type: ignore
|
|
|
28
24
|
)
|
|
29
25
|
engine_adapter.execute(alter_table_exp)
|
|
30
26
|
|
|
27
|
+
|
|
28
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
29
|
+
import pandas as pd
|
|
30
|
+
|
|
31
|
+
snapshots_table = "_snapshots"
|
|
32
|
+
if schema:
|
|
33
|
+
snapshots_table = f"{schema}.{snapshots_table}"
|
|
34
|
+
|
|
31
35
|
new_snapshots = []
|
|
32
36
|
|
|
33
37
|
for (
|
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
"""Add on_additive_change to incremental model metadata hash."""
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
def
|
|
4
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
5
|
+
pass
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
5
9
|
pass
|
|
@@ -17,9 +17,11 @@ from sqlmesh.core.console import get_console
|
|
|
17
17
|
SQLMESH_DBT_PACKAGE = "sqlmesh.dbt"
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
def
|
|
21
|
-
|
|
22
|
-
|
|
20
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
23
25
|
snapshots_table = "_snapshots"
|
|
24
26
|
if schema:
|
|
25
27
|
snapshots_table = f"{schema}.{snapshots_table}"
|
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
"""Use the raw SQL when computing the model fingerprint."""
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
def
|
|
4
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
5
|
+
pass
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
5
9
|
pass
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"""Add dev_version and fingerprint columns to the snapshots table."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
from sqlglot import exp
|
|
6
|
+
|
|
7
|
+
from sqlmesh.utils.migration import index_text_type, blob_text_type
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
11
|
+
snapshots_table = "_snapshots"
|
|
12
|
+
if schema:
|
|
13
|
+
snapshots_table = f"{schema}.{snapshots_table}"
|
|
14
|
+
|
|
15
|
+
index_type = index_text_type(engine_adapter.dialect)
|
|
16
|
+
blob_type = blob_text_type(engine_adapter.dialect)
|
|
17
|
+
|
|
18
|
+
add_dev_version_exp = exp.Alter(
|
|
19
|
+
this=exp.to_table(snapshots_table),
|
|
20
|
+
kind="TABLE",
|
|
21
|
+
actions=[
|
|
22
|
+
exp.ColumnDef(
|
|
23
|
+
this=exp.to_column("dev_version"),
|
|
24
|
+
kind=exp.DataType.build(index_type),
|
|
25
|
+
)
|
|
26
|
+
],
|
|
27
|
+
)
|
|
28
|
+
engine_adapter.execute(add_dev_version_exp)
|
|
29
|
+
|
|
30
|
+
add_fingerprint_exp = exp.Alter(
|
|
31
|
+
this=exp.to_table(snapshots_table),
|
|
32
|
+
kind="TABLE",
|
|
33
|
+
actions=[
|
|
34
|
+
exp.ColumnDef(
|
|
35
|
+
this=exp.to_column("fingerprint"),
|
|
36
|
+
kind=exp.DataType.build(blob_type),
|
|
37
|
+
)
|
|
38
|
+
],
|
|
39
|
+
)
|
|
40
|
+
engine_adapter.execute(add_fingerprint_exp)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
44
|
+
import pandas as pd
|
|
45
|
+
|
|
46
|
+
snapshots_table = "_snapshots"
|
|
47
|
+
if schema:
|
|
48
|
+
snapshots_table = f"{schema}.{snapshots_table}"
|
|
49
|
+
|
|
50
|
+
index_type = index_text_type(engine_adapter.dialect)
|
|
51
|
+
blob_type = blob_text_type(engine_adapter.dialect)
|
|
52
|
+
|
|
53
|
+
new_snapshots = []
|
|
54
|
+
|
|
55
|
+
for (
|
|
56
|
+
name,
|
|
57
|
+
identifier,
|
|
58
|
+
version,
|
|
59
|
+
snapshot,
|
|
60
|
+
kind_name,
|
|
61
|
+
updated_ts,
|
|
62
|
+
unpaused_ts,
|
|
63
|
+
ttl_ms,
|
|
64
|
+
unrestorable,
|
|
65
|
+
forward_only,
|
|
66
|
+
_,
|
|
67
|
+
_,
|
|
68
|
+
) in engine_adapter.fetchall(
|
|
69
|
+
exp.select(
|
|
70
|
+
"name",
|
|
71
|
+
"identifier",
|
|
72
|
+
"version",
|
|
73
|
+
"snapshot",
|
|
74
|
+
"kind_name",
|
|
75
|
+
"updated_ts",
|
|
76
|
+
"unpaused_ts",
|
|
77
|
+
"ttl_ms",
|
|
78
|
+
"unrestorable",
|
|
79
|
+
"forward_only",
|
|
80
|
+
"dev_version",
|
|
81
|
+
"fingerprint",
|
|
82
|
+
).from_(snapshots_table),
|
|
83
|
+
quote_identifiers=True,
|
|
84
|
+
):
|
|
85
|
+
parsed_snapshot = json.loads(snapshot)
|
|
86
|
+
new_snapshots.append(
|
|
87
|
+
{
|
|
88
|
+
"name": name,
|
|
89
|
+
"identifier": identifier,
|
|
90
|
+
"version": version,
|
|
91
|
+
"snapshot": snapshot,
|
|
92
|
+
"kind_name": kind_name,
|
|
93
|
+
"updated_ts": updated_ts,
|
|
94
|
+
"unpaused_ts": unpaused_ts,
|
|
95
|
+
"ttl_ms": ttl_ms,
|
|
96
|
+
"unrestorable": unrestorable,
|
|
97
|
+
"forward_only": forward_only,
|
|
98
|
+
"dev_version": parsed_snapshot.get("dev_version"),
|
|
99
|
+
"fingerprint": json.dumps(parsed_snapshot.get("fingerprint")),
|
|
100
|
+
}
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
if new_snapshots:
|
|
104
|
+
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
105
|
+
|
|
106
|
+
engine_adapter.insert_append(
|
|
107
|
+
snapshots_table,
|
|
108
|
+
pd.DataFrame(new_snapshots),
|
|
109
|
+
target_columns_to_types={
|
|
110
|
+
"name": exp.DataType.build(index_type),
|
|
111
|
+
"identifier": exp.DataType.build(index_type),
|
|
112
|
+
"version": exp.DataType.build(index_type),
|
|
113
|
+
"snapshot": exp.DataType.build(blob_type),
|
|
114
|
+
"kind_name": exp.DataType.build(index_type),
|
|
115
|
+
"updated_ts": exp.DataType.build("bigint"),
|
|
116
|
+
"unpaused_ts": exp.DataType.build("bigint"),
|
|
117
|
+
"ttl_ms": exp.DataType.build("bigint"),
|
|
118
|
+
"unrestorable": exp.DataType.build("boolean"),
|
|
119
|
+
"forward_only": exp.DataType.build("boolean"),
|
|
120
|
+
"dev_version": exp.DataType.build(index_type),
|
|
121
|
+
"fingerprint": exp.DataType.build(blob_type),
|
|
122
|
+
},
|
|
123
|
+
)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Warns dbt users about potential diffs due to inclusion of {{ config(...) }} blocks in model SQL.
|
|
3
|
+
|
|
4
|
+
Prior to this fix, SQLMesh wasn't including the {{ config(...) }} block in the model's SQL payload
|
|
5
|
+
when processing dbt models. Now these config blocks are properly included in the raw SQL, which
|
|
6
|
+
may cause diffs to appear for existing dbt models even though the actual SQL logic hasn't changed.
|
|
7
|
+
|
|
8
|
+
This is a one-time diff that will appear after upgrading, and applying a plan will resolve it.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
|
|
13
|
+
from sqlglot import exp
|
|
14
|
+
|
|
15
|
+
from sqlmesh.core.console import get_console
|
|
16
|
+
|
|
17
|
+
SQLMESH_DBT_PACKAGE = "sqlmesh.dbt"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
25
|
+
snapshots_table = "_snapshots"
|
|
26
|
+
if schema:
|
|
27
|
+
snapshots_table = f"{schema}.{snapshots_table}"
|
|
28
|
+
|
|
29
|
+
warning = (
|
|
30
|
+
"SQLMesh detected that it may not be able to fully migrate the state database. This should not impact "
|
|
31
|
+
"the migration process, but may result in unexpected changes being reported by the next `sqlmesh plan` "
|
|
32
|
+
"command. Please run `sqlmesh diff prod` after the migration has completed, before making any new "
|
|
33
|
+
"changes. If any unexpected changes are reported, consider running a forward-only plan to apply these "
|
|
34
|
+
"changes and avoid unnecessary backfills: sqlmesh plan prod --forward-only. "
|
|
35
|
+
"See https://sqlmesh.readthedocs.io/en/stable/concepts/plans/#forward-only-plans for more details.\n"
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
for (snapshot,) in engine_adapter.fetchall(
|
|
39
|
+
exp.select("snapshot").from_(snapshots_table), quote_identifiers=True
|
|
40
|
+
):
|
|
41
|
+
parsed_snapshot = json.loads(snapshot)
|
|
42
|
+
node = parsed_snapshot["node"]
|
|
43
|
+
|
|
44
|
+
jinja_macros = node.get("jinja_macros") or {}
|
|
45
|
+
create_builtins_module = jinja_macros.get("create_builtins_module") or ""
|
|
46
|
+
|
|
47
|
+
if create_builtins_module == SQLMESH_DBT_PACKAGE:
|
|
48
|
+
get_console().log_warning(warning)
|
|
49
|
+
return
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""Remove the obsolete _plan_dags table."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
5
|
+
plan_dags_table = "_plan_dags"
|
|
6
|
+
if schema:
|
|
7
|
+
plan_dags_table = f"{schema}.{plan_dags_table}"
|
|
8
|
+
|
|
9
|
+
engine_adapter.drop_table(plan_dags_table)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
13
|
+
pass
|
|
@@ -1,23 +1,26 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""Replace 'dbt_name' with 'dbt_node_info' in the snapshot definition"""
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
|
-
|
|
5
4
|
from sqlglot import exp
|
|
5
|
+
from sqlmesh.utils.migration import index_text_type, blob_text_type
|
|
6
|
+
|
|
6
7
|
|
|
7
|
-
|
|
8
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
9
|
+
pass
|
|
8
10
|
|
|
9
11
|
|
|
10
|
-
def
|
|
12
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
11
13
|
import pandas as pd
|
|
12
14
|
|
|
13
|
-
engine_adapter = state_sync.engine_adapter
|
|
14
|
-
schema = state_sync.schema
|
|
15
15
|
snapshots_table = "_snapshots"
|
|
16
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
17
16
|
if schema:
|
|
18
17
|
snapshots_table = f"{schema}.{snapshots_table}"
|
|
19
18
|
|
|
19
|
+
index_type = index_text_type(engine_adapter.dialect)
|
|
20
|
+
blob_type = blob_text_type(engine_adapter.dialect)
|
|
21
|
+
|
|
20
22
|
new_snapshots = []
|
|
23
|
+
migration_needed = False
|
|
21
24
|
|
|
22
25
|
for (
|
|
23
26
|
name,
|
|
@@ -29,6 +32,9 @@ def migrate(state_sync, **kwargs): # type: ignore
|
|
|
29
32
|
unpaused_ts,
|
|
30
33
|
ttl_ms,
|
|
31
34
|
unrestorable,
|
|
35
|
+
forward_only,
|
|
36
|
+
dev_version,
|
|
37
|
+
fingerprint,
|
|
32
38
|
) in engine_adapter.fetchall(
|
|
33
39
|
exp.select(
|
|
34
40
|
"name",
|
|
@@ -40,17 +46,22 @@ def migrate(state_sync, **kwargs): # type: ignore
|
|
|
40
46
|
"unpaused_ts",
|
|
41
47
|
"ttl_ms",
|
|
42
48
|
"unrestorable",
|
|
49
|
+
"forward_only",
|
|
50
|
+
"dev_version",
|
|
51
|
+
"fingerprint",
|
|
43
52
|
).from_(snapshots_table),
|
|
44
53
|
quote_identifiers=True,
|
|
45
54
|
):
|
|
46
55
|
parsed_snapshot = json.loads(snapshot)
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
56
|
+
if dbt_name := parsed_snapshot["node"].get("dbt_name"):
|
|
57
|
+
parsed_snapshot["node"].pop("dbt_name")
|
|
58
|
+
parsed_snapshot["node"]["dbt_node_info"] = {
|
|
59
|
+
"unique_id": dbt_name,
|
|
60
|
+
# these will get populated as metadata-only changes on the next plan
|
|
61
|
+
"name": "",
|
|
62
|
+
"fqn": "",
|
|
63
|
+
}
|
|
64
|
+
migration_needed = True
|
|
54
65
|
|
|
55
66
|
new_snapshots.append(
|
|
56
67
|
{
|
|
@@ -63,10 +74,13 @@ def migrate(state_sync, **kwargs): # type: ignore
|
|
|
63
74
|
"unpaused_ts": unpaused_ts,
|
|
64
75
|
"ttl_ms": ttl_ms,
|
|
65
76
|
"unrestorable": unrestorable,
|
|
77
|
+
"forward_only": forward_only,
|
|
78
|
+
"dev_version": dev_version,
|
|
79
|
+
"fingerprint": fingerprint,
|
|
66
80
|
}
|
|
67
81
|
)
|
|
68
82
|
|
|
69
|
-
if new_snapshots:
|
|
83
|
+
if migration_needed and new_snapshots:
|
|
70
84
|
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
71
85
|
|
|
72
86
|
engine_adapter.insert_append(
|
|
@@ -76,11 +90,14 @@ def migrate(state_sync, **kwargs): # type: ignore
|
|
|
76
90
|
"name": exp.DataType.build(index_type),
|
|
77
91
|
"identifier": exp.DataType.build(index_type),
|
|
78
92
|
"version": exp.DataType.build(index_type),
|
|
79
|
-
"snapshot": exp.DataType.build(
|
|
93
|
+
"snapshot": exp.DataType.build(blob_type),
|
|
80
94
|
"kind_name": exp.DataType.build(index_type),
|
|
81
95
|
"updated_ts": exp.DataType.build("bigint"),
|
|
82
96
|
"unpaused_ts": exp.DataType.build("bigint"),
|
|
83
97
|
"ttl_ms": exp.DataType.build("bigint"),
|
|
84
98
|
"unrestorable": exp.DataType.build("boolean"),
|
|
99
|
+
"forward_only": exp.DataType.build("boolean"),
|
|
100
|
+
"dev_version": exp.DataType.build(index_type),
|
|
101
|
+
"fingerprint": exp.DataType.build(blob_type),
|
|
85
102
|
},
|
|
86
103
|
)
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""Add dev version to the intervals table."""
|
|
2
|
+
|
|
3
|
+
from sqlglot import exp
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
|
|
7
|
+
intervals_table = "_intervals"
|
|
8
|
+
if schema:
|
|
9
|
+
intervals_table = f"{schema}.{intervals_table}"
|
|
10
|
+
|
|
11
|
+
alter_table_exp = exp.Alter(
|
|
12
|
+
this=exp.to_table(intervals_table),
|
|
13
|
+
kind="TABLE",
|
|
14
|
+
actions=[
|
|
15
|
+
exp.ColumnDef(
|
|
16
|
+
this=exp.to_column("last_altered_ts"),
|
|
17
|
+
kind=exp.DataType.build("BIGINT", dialect=engine_adapter.dialect),
|
|
18
|
+
)
|
|
19
|
+
],
|
|
20
|
+
)
|
|
21
|
+
engine_adapter.execute(alter_table_exp)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
|
|
25
|
+
pass
|
sqlmesh/utils/__init__.py
CHANGED
|
@@ -21,6 +21,7 @@ from enum import IntEnum, Enum
|
|
|
21
21
|
from functools import lru_cache, reduce, wraps
|
|
22
22
|
from pathlib import Path
|
|
23
23
|
|
|
24
|
+
import unicodedata
|
|
24
25
|
from sqlglot import exp
|
|
25
26
|
from sqlglot.dialects.dialect import Dialects
|
|
26
27
|
|
|
@@ -291,8 +292,14 @@ def sqlglot_dialects() -> str:
|
|
|
291
292
|
|
|
292
293
|
NON_ALNUM = re.compile(r"[^a-zA-Z0-9_]")
|
|
293
294
|
|
|
295
|
+
NON_ALUM_INCLUDE_UNICODE = re.compile(r"\W", flags=re.UNICODE)
|
|
294
296
|
|
|
295
|
-
|
|
297
|
+
|
|
298
|
+
def sanitize_name(name: str, *, include_unicode: bool = False) -> str:
|
|
299
|
+
if include_unicode:
|
|
300
|
+
s = unicodedata.normalize("NFC", name)
|
|
301
|
+
s = NON_ALUM_INCLUDE_UNICODE.sub("_", s)
|
|
302
|
+
return s
|
|
296
303
|
return NON_ALNUM.sub("_", name)
|
|
297
304
|
|
|
298
305
|
|
sqlmesh/utils/cache.py
CHANGED
|
@@ -59,6 +59,10 @@ class FileCache(t.Generic[T]):
|
|
|
59
59
|
threshold = to_datetime("1 week ago").timestamp()
|
|
60
60
|
# delete all old cache files
|
|
61
61
|
for file in self._path.glob("*"):
|
|
62
|
+
if IS_WINDOWS:
|
|
63
|
+
# the file.stat() call below will fail on windows if the :file name is longer than 260 chars
|
|
64
|
+
file = fix_windows_path(file)
|
|
65
|
+
|
|
62
66
|
if not file.stem.startswith(self._cache_version) or file.stat().st_atime < threshold:
|
|
63
67
|
file.unlink(missing_ok=True)
|
|
64
68
|
|
|
@@ -133,7 +137,7 @@ class FileCache(t.Generic[T]):
|
|
|
133
137
|
|
|
134
138
|
def _cache_entry_path(self, name: str, entry_id: str = "") -> Path:
|
|
135
139
|
entry_file_name = "__".join(p for p in (self._cache_version, name, entry_id) if p)
|
|
136
|
-
full_path = self._path / sanitize_name(entry_file_name)
|
|
140
|
+
full_path = self._path / sanitize_name(entry_file_name, include_unicode=True)
|
|
137
141
|
if IS_WINDOWS:
|
|
138
142
|
# handle paths longer than 260 chars
|
|
139
143
|
full_path = fix_windows_path(full_path)
|
sqlmesh/utils/connection_pool.py
CHANGED
|
@@ -227,7 +227,8 @@ class ThreadLocalConnectionPool(_ThreadLocalBase):
|
|
|
227
227
|
self._thread_connections.pop(thread_id)
|
|
228
228
|
self._thread_cursors.pop(thread_id, None)
|
|
229
229
|
self._discard_transaction(thread_id)
|
|
230
|
-
|
|
230
|
+
|
|
231
|
+
self._thread_attributes.clear()
|
|
231
232
|
|
|
232
233
|
|
|
233
234
|
class ThreadLocalSharedConnectionPool(_ThreadLocalBase):
|
sqlmesh/utils/dag.py
CHANGED
|
@@ -99,6 +99,53 @@ class DAG(t.Generic[T]):
|
|
|
99
99
|
|
|
100
100
|
return self._upstream[node]
|
|
101
101
|
|
|
102
|
+
def _find_cycle_path(self, nodes_in_cycle: t.Dict[T, t.Set[T]]) -> t.Optional[t.List[T]]:
|
|
103
|
+
"""Find the exact cycle path using DFS when a cycle is detected.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
nodes_in_cycle: Dictionary of nodes that are part of the cycle and their dependencies
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
List of nodes forming the cycle path, or None if no cycle found
|
|
110
|
+
"""
|
|
111
|
+
if not nodes_in_cycle:
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
# Use DFS to find a cycle path
|
|
115
|
+
visited: t.Set[T] = set()
|
|
116
|
+
path: t.List[T] = []
|
|
117
|
+
|
|
118
|
+
def dfs(node: T) -> t.Optional[t.List[T]]:
|
|
119
|
+
if node in path:
|
|
120
|
+
# Found a cycle - extract the cycle path
|
|
121
|
+
cycle_start = path.index(node)
|
|
122
|
+
return path[cycle_start:] + [node]
|
|
123
|
+
|
|
124
|
+
if node in visited:
|
|
125
|
+
return None
|
|
126
|
+
|
|
127
|
+
visited.add(node)
|
|
128
|
+
path.append(node)
|
|
129
|
+
|
|
130
|
+
# Only follow edges to nodes that are still in the unprocessed set
|
|
131
|
+
for neighbor in nodes_in_cycle.get(node, set()):
|
|
132
|
+
if neighbor in nodes_in_cycle:
|
|
133
|
+
cycle = dfs(neighbor)
|
|
134
|
+
if cycle:
|
|
135
|
+
return cycle
|
|
136
|
+
|
|
137
|
+
path.pop()
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
# Try starting DFS from each unvisited node
|
|
141
|
+
for start_node in nodes_in_cycle:
|
|
142
|
+
if start_node not in visited:
|
|
143
|
+
cycle = dfs(start_node)
|
|
144
|
+
if cycle:
|
|
145
|
+
return cycle[:-1] # Remove the duplicate node at the end
|
|
146
|
+
|
|
147
|
+
return None
|
|
148
|
+
|
|
102
149
|
@property
|
|
103
150
|
def roots(self) -> t.Set[T]:
|
|
104
151
|
"""Returns all nodes in the graph without any upstream dependencies."""
|
|
@@ -125,23 +172,31 @@ class DAG(t.Generic[T]):
|
|
|
125
172
|
next_nodes = {node for node, deps in unprocessed_nodes.items() if not deps}
|
|
126
173
|
|
|
127
174
|
if not next_nodes:
|
|
128
|
-
#
|
|
129
|
-
|
|
130
|
-
"\nPossible candidates to check for circular references: "
|
|
131
|
-
+ ", ".join(str(node) for node in sorted(cycle_candidates))
|
|
132
|
-
)
|
|
175
|
+
# A cycle was detected - find the exact cycle path
|
|
176
|
+
cycle_path = self._find_cycle_path(unprocessed_nodes)
|
|
133
177
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
178
|
+
last_processed_msg = ""
|
|
179
|
+
if cycle_path:
|
|
180
|
+
node_output = " ->\n".join(
|
|
181
|
+
str(node) for node in (cycle_path + [cycle_path[0]])
|
|
137
182
|
)
|
|
183
|
+
cycle_msg = f"\nCycle:\n{node_output}"
|
|
138
184
|
else:
|
|
139
|
-
|
|
185
|
+
# Fallback message in case a cycle can't be found
|
|
186
|
+
cycle_candidates_msg = (
|
|
187
|
+
"\nPossible candidates to check for circular references: "
|
|
188
|
+
+ ", ".join(str(node) for node in sorted(cycle_candidates))
|
|
189
|
+
)
|
|
190
|
+
cycle_msg = cycle_candidates_msg
|
|
191
|
+
if last_processed_nodes:
|
|
192
|
+
last_processed_msg = "\nLast nodes added to the DAG: " + ", ".join(
|
|
193
|
+
str(node) for node in last_processed_nodes
|
|
194
|
+
)
|
|
140
195
|
|
|
141
196
|
raise SQLMeshError(
|
|
142
197
|
"Detected a cycle in the DAG. "
|
|
143
198
|
"Please make sure there are no circular references between nodes."
|
|
144
|
-
f"{last_processed_msg}{
|
|
199
|
+
f"{last_processed_msg}{cycle_msg}"
|
|
145
200
|
)
|
|
146
201
|
|
|
147
202
|
for node in next_nodes:
|
sqlmesh/utils/date.py
CHANGED
|
@@ -343,6 +343,13 @@ def make_exclusive(time: TimeLike) -> datetime:
|
|
|
343
343
|
return dt
|
|
344
344
|
|
|
345
345
|
|
|
346
|
+
def make_ts_exclusive(time: TimeLike, dialect: DialectType) -> datetime:
|
|
347
|
+
ts = to_datetime(time)
|
|
348
|
+
if dialect == "tsql":
|
|
349
|
+
return to_utc_timestamp(ts) - pd.Timedelta(1, unit="ns")
|
|
350
|
+
return ts + timedelta(microseconds=1)
|
|
351
|
+
|
|
352
|
+
|
|
346
353
|
def to_utc_timestamp(time: datetime) -> pd.Timestamp:
|
|
347
354
|
import pandas as pd
|
|
348
355
|
|
|
@@ -437,7 +444,7 @@ def to_time_column(
|
|
|
437
444
|
|
|
438
445
|
|
|
439
446
|
def pandas_timestamp_to_pydatetime(
|
|
440
|
-
df: pd.DataFrame, columns_to_types: t.Optional[t.Dict[str, exp.DataType]]
|
|
447
|
+
df: pd.DataFrame, columns_to_types: t.Optional[t.Dict[str, exp.DataType]] = None
|
|
441
448
|
) -> pd.DataFrame:
|
|
442
449
|
import pandas as pd
|
|
443
450
|
from pandas.api.types import is_datetime64_any_dtype # type: ignore
|
sqlmesh/utils/errors.py
CHANGED
|
@@ -86,6 +86,10 @@ class AuditConfigError(ConfigError):
|
|
|
86
86
|
pass
|
|
87
87
|
|
|
88
88
|
|
|
89
|
+
class StateMigrationError(SQLMeshError):
|
|
90
|
+
pass
|
|
91
|
+
|
|
92
|
+
|
|
89
93
|
class AuditError(SQLMeshError):
|
|
90
94
|
def __init__(
|
|
91
95
|
self,
|
|
@@ -147,6 +151,10 @@ class AdditiveChangeError(SQLMeshError):
|
|
|
147
151
|
pass
|
|
148
152
|
|
|
149
153
|
|
|
154
|
+
class MigrationNotSupportedError(SQLMeshError):
|
|
155
|
+
pass
|
|
156
|
+
|
|
157
|
+
|
|
150
158
|
class NotificationTargetError(SQLMeshError):
|
|
151
159
|
pass
|
|
152
160
|
|