sqlmesh 0.213.1.dev1__py3-none-any.whl → 0.227.2.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlmesh/__init__.py +12 -2
- sqlmesh/_version.py +2 -2
- sqlmesh/cli/main.py +0 -44
- sqlmesh/cli/project_init.py +11 -2
- sqlmesh/core/_typing.py +1 -0
- sqlmesh/core/audit/definition.py +8 -2
- sqlmesh/core/config/__init__.py +1 -1
- sqlmesh/core/config/connection.py +17 -5
- sqlmesh/core/config/dbt.py +13 -0
- sqlmesh/core/config/janitor.py +12 -0
- sqlmesh/core/config/loader.py +7 -0
- sqlmesh/core/config/model.py +2 -0
- sqlmesh/core/config/root.py +3 -0
- sqlmesh/core/console.py +81 -3
- sqlmesh/core/constants.py +1 -1
- sqlmesh/core/context.py +69 -26
- sqlmesh/core/dialect.py +3 -0
- sqlmesh/core/engine_adapter/_typing.py +2 -0
- sqlmesh/core/engine_adapter/base.py +322 -22
- sqlmesh/core/engine_adapter/base_postgres.py +17 -1
- sqlmesh/core/engine_adapter/bigquery.py +146 -7
- sqlmesh/core/engine_adapter/clickhouse.py +17 -13
- sqlmesh/core/engine_adapter/databricks.py +33 -2
- sqlmesh/core/engine_adapter/fabric.py +10 -29
- sqlmesh/core/engine_adapter/mixins.py +142 -48
- sqlmesh/core/engine_adapter/mssql.py +15 -4
- sqlmesh/core/engine_adapter/mysql.py +2 -2
- sqlmesh/core/engine_adapter/postgres.py +9 -3
- sqlmesh/core/engine_adapter/redshift.py +4 -0
- sqlmesh/core/engine_adapter/risingwave.py +1 -0
- sqlmesh/core/engine_adapter/shared.py +6 -0
- sqlmesh/core/engine_adapter/snowflake.py +82 -11
- sqlmesh/core/engine_adapter/spark.py +14 -10
- sqlmesh/core/engine_adapter/trino.py +4 -2
- sqlmesh/core/environment.py +2 -0
- sqlmesh/core/janitor.py +181 -0
- sqlmesh/core/lineage.py +1 -0
- sqlmesh/core/linter/definition.py +13 -13
- sqlmesh/core/linter/rules/builtin.py +29 -0
- sqlmesh/core/macros.py +35 -13
- sqlmesh/core/model/common.py +2 -0
- sqlmesh/core/model/definition.py +82 -28
- sqlmesh/core/model/kind.py +66 -2
- sqlmesh/core/model/meta.py +108 -4
- sqlmesh/core/node.py +101 -1
- sqlmesh/core/plan/builder.py +18 -10
- sqlmesh/core/plan/common.py +199 -2
- sqlmesh/core/plan/definition.py +25 -6
- sqlmesh/core/plan/evaluator.py +75 -113
- sqlmesh/core/plan/explainer.py +90 -8
- sqlmesh/core/plan/stages.py +42 -21
- sqlmesh/core/renderer.py +78 -32
- sqlmesh/core/scheduler.py +102 -22
- sqlmesh/core/selector.py +137 -9
- sqlmesh/core/signal.py +64 -1
- sqlmesh/core/snapshot/__init__.py +2 -0
- sqlmesh/core/snapshot/definition.py +146 -34
- sqlmesh/core/snapshot/evaluator.py +689 -124
- sqlmesh/core/state_sync/__init__.py +0 -1
- sqlmesh/core/state_sync/base.py +55 -33
- sqlmesh/core/state_sync/cache.py +12 -7
- sqlmesh/core/state_sync/common.py +216 -111
- sqlmesh/core/state_sync/db/environment.py +6 -4
- sqlmesh/core/state_sync/db/facade.py +42 -24
- sqlmesh/core/state_sync/db/interval.py +27 -7
- sqlmesh/core/state_sync/db/migrator.py +34 -16
- sqlmesh/core/state_sync/db/snapshot.py +177 -169
- sqlmesh/core/table_diff.py +2 -2
- sqlmesh/core/test/context.py +2 -0
- sqlmesh/core/test/definition.py +14 -9
- sqlmesh/dbt/adapter.py +22 -16
- sqlmesh/dbt/basemodel.py +75 -56
- sqlmesh/dbt/builtin.py +116 -12
- sqlmesh/dbt/column.py +17 -5
- sqlmesh/dbt/common.py +19 -5
- sqlmesh/dbt/context.py +14 -1
- sqlmesh/dbt/loader.py +61 -9
- sqlmesh/dbt/manifest.py +174 -16
- sqlmesh/dbt/model.py +183 -85
- sqlmesh/dbt/package.py +16 -1
- sqlmesh/dbt/profile.py +3 -3
- sqlmesh/dbt/project.py +12 -7
- sqlmesh/dbt/seed.py +6 -1
- sqlmesh/dbt/source.py +13 -1
- sqlmesh/dbt/target.py +25 -6
- sqlmesh/dbt/test.py +36 -5
- sqlmesh/migrations/v0000_baseline.py +95 -0
- sqlmesh/migrations/v0061_mysql_fix_blob_text_type.py +5 -7
- sqlmesh/migrations/v0062_add_model_gateway.py +5 -1
- sqlmesh/migrations/v0063_change_signals.py +5 -3
- sqlmesh/migrations/v0064_join_when_matched_strings.py +5 -3
- sqlmesh/migrations/v0065_add_model_optimize.py +5 -1
- sqlmesh/migrations/v0066_add_auto_restatements.py +8 -3
- sqlmesh/migrations/v0067_add_tsql_date_full_precision.py +5 -1
- sqlmesh/migrations/v0068_include_unrendered_query_in_metadata_hash.py +5 -1
- sqlmesh/migrations/v0069_update_dev_table_suffix.py +5 -3
- sqlmesh/migrations/v0070_include_grains_in_metadata_hash.py +5 -1
- sqlmesh/migrations/v0071_add_dev_version_to_intervals.py +9 -5
- sqlmesh/migrations/v0072_add_environment_statements.py +5 -3
- sqlmesh/migrations/v0073_remove_symbolic_disable_restatement.py +5 -3
- sqlmesh/migrations/v0074_add_partition_by_time_column_property.py +5 -1
- sqlmesh/migrations/v0075_remove_validate_query.py +5 -3
- sqlmesh/migrations/v0076_add_cron_tz.py +5 -1
- sqlmesh/migrations/v0077_fix_column_type_hash_calculation.py +5 -1
- sqlmesh/migrations/v0078_warn_if_non_migratable_python_env.py +5 -3
- sqlmesh/migrations/v0079_add_gateway_managed_property.py +10 -5
- sqlmesh/migrations/v0080_add_batch_size_to_scd_type_2_models.py +5 -1
- sqlmesh/migrations/v0081_update_partitioned_by.py +5 -3
- sqlmesh/migrations/v0082_warn_if_incorrectly_duplicated_statements.py +5 -3
- sqlmesh/migrations/v0083_use_sql_for_scd_time_data_type_data_hash.py +5 -1
- sqlmesh/migrations/v0084_normalize_quote_when_matched_and_merge_filter.py +5 -1
- sqlmesh/migrations/v0085_deterministic_repr.py +5 -3
- sqlmesh/migrations/v0086_check_deterministic_bug.py +5 -3
- sqlmesh/migrations/v0087_normalize_blueprint_variables.py +5 -3
- sqlmesh/migrations/v0088_warn_about_variable_python_env_diffs.py +5 -3
- sqlmesh/migrations/v0089_add_virtual_environment_mode.py +5 -1
- sqlmesh/migrations/v0090_add_forward_only_column.py +9 -5
- sqlmesh/migrations/v0091_on_additive_change.py +5 -1
- sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +5 -3
- sqlmesh/migrations/v0093_use_raw_sql_in_fingerprint.py +5 -1
- sqlmesh/migrations/v0094_add_dev_version_and_fingerprint_columns.py +123 -0
- sqlmesh/migrations/v0095_warn_about_dbt_raw_sql_diff.py +49 -0
- sqlmesh/migrations/v0096_remove_plan_dags_table.py +13 -0
- sqlmesh/migrations/v0097_add_dbt_name_in_node.py +9 -0
- sqlmesh/migrations/{v0060_move_audits_to_model.py → v0098_add_dbt_node_info_in_node.py} +33 -16
- sqlmesh/migrations/v0099_add_last_altered_to_intervals.py +25 -0
- sqlmesh/migrations/v0100_add_grants_and_grants_target_layer.py +9 -0
- sqlmesh/utils/__init__.py +8 -1
- sqlmesh/utils/cache.py +5 -1
- sqlmesh/utils/connection_pool.py +2 -1
- sqlmesh/utils/dag.py +65 -10
- sqlmesh/utils/date.py +8 -1
- sqlmesh/utils/errors.py +8 -0
- sqlmesh/utils/jinja.py +54 -4
- sqlmesh/utils/pydantic.py +6 -6
- sqlmesh/utils/windows.py +13 -3
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/METADATA +7 -10
- sqlmesh-0.227.2.dev4.dist-info/RECORD +370 -0
- sqlmesh_dbt/cli.py +70 -7
- sqlmesh_dbt/console.py +14 -6
- sqlmesh_dbt/operations.py +103 -24
- sqlmesh_dbt/selectors.py +39 -1
- web/client/dist/assets/{Audits-Ucsx1GzF.js → Audits-CBiYyyx-.js} +1 -1
- web/client/dist/assets/{Banner-BWDzvavM.js → Banner-DSRbUlO5.js} +1 -1
- web/client/dist/assets/{ChevronDownIcon-D2VL13Ah.js → ChevronDownIcon-MK_nrjD_.js} +1 -1
- web/client/dist/assets/{ChevronRightIcon-DWGYbf1l.js → ChevronRightIcon-CLWtT22Q.js} +1 -1
- web/client/dist/assets/{Content-DdHDZM3I.js → Content-BNuGZN5l.js} +1 -1
- web/client/dist/assets/{Content-Bikfy8fh.js → Content-CSHJyW0n.js} +1 -1
- web/client/dist/assets/{Data-CzAJH7rW.js → Data-C1oRDbLx.js} +1 -1
- web/client/dist/assets/{DataCatalog-BJF11g8f.js → DataCatalog-HXyX2-_j.js} +1 -1
- web/client/dist/assets/{Editor-s0SBpV2y.js → Editor-BDyfpUuw.js} +1 -1
- web/client/dist/assets/{Editor-DgLhgKnm.js → Editor-D0jNItwC.js} +1 -1
- web/client/dist/assets/{Errors-D0m0O1d3.js → Errors-BfuFLcPi.js} +1 -1
- web/client/dist/assets/{FileExplorer-CEv0vXkt.js → FileExplorer-BR9IE3he.js} +1 -1
- web/client/dist/assets/{Footer-BwzXn8Ew.js → Footer-CgBEtiAh.js} +1 -1
- web/client/dist/assets/{Header-6heDkEqG.js → Header-DSqR6nSO.js} +1 -1
- web/client/dist/assets/{Input-obuJsD6k.js → Input-B-oZ6fGO.js} +1 -1
- web/client/dist/assets/Lineage-DYQVwDbD.js +1 -0
- web/client/dist/assets/{ListboxShow-HM9_qyrt.js → ListboxShow-BE5-xevs.js} +1 -1
- web/client/dist/assets/{ModelLineage-zWdKo0U2.js → ModelLineage-DkIFAYo4.js} +1 -1
- web/client/dist/assets/{Models-Bcu66SRz.js → Models-D5dWr8RB.js} +1 -1
- web/client/dist/assets/{Page-BWEEQfIt.js → Page-C-XfU5BR.js} +1 -1
- web/client/dist/assets/{Plan-C4gXCqlf.js → Plan-ZEuTINBq.js} +1 -1
- web/client/dist/assets/{PlusCircleIcon-CVDO651q.js → PlusCircleIcon-DVXAHG8_.js} +1 -1
- web/client/dist/assets/{ReportErrors-BT6xFwAr.js → ReportErrors-B7FEPzMB.js} +1 -1
- web/client/dist/assets/{Root-ryJoBK4h.js → Root-8aZyhPxF.js} +1 -1
- web/client/dist/assets/{SearchList-DB04sPb9.js → SearchList-W_iT2G82.js} +1 -1
- web/client/dist/assets/{SelectEnvironment-CUYcXUu6.js → SelectEnvironment-C65jALmO.js} +1 -1
- web/client/dist/assets/{SourceList-Doo_9ZGp.js → SourceList-DSLO6nVJ.js} +1 -1
- web/client/dist/assets/{SourceListItem-D5Mj7Dly.js → SourceListItem-BHt8d9-I.js} +1 -1
- web/client/dist/assets/{SplitPane-qHmkD1qy.js → SplitPane-CViaZmw6.js} +1 -1
- web/client/dist/assets/{Tests-DH1Z74ML.js → Tests-DhaVt5t1.js} +1 -1
- web/client/dist/assets/{Welcome-DqUJUNMF.js → Welcome-DvpjH-_4.js} +1 -1
- web/client/dist/assets/context-BctCsyGb.js +71 -0
- web/client/dist/assets/{context-Dr54UHLi.js → context-DFNeGsFF.js} +1 -1
- web/client/dist/assets/{editor-DYIP1yQ4.js → editor-CcO28cqd.js} +1 -1
- web/client/dist/assets/{file-DarlIDVi.js → file-CvJN3aZO.js} +1 -1
- web/client/dist/assets/{floating-ui.react-dom-BH3TFvkM.js → floating-ui.react-dom-CjE-JNW1.js} +1 -1
- web/client/dist/assets/{help-Bl8wqaQc.js → help-DuPhjipa.js} +1 -1
- web/client/dist/assets/{index-D1sR7wpN.js → index-C-dJH7yZ.js} +1 -1
- web/client/dist/assets/{index-O3mjYpnE.js → index-Dj0i1-CA.js} +2 -2
- web/client/dist/assets/{plan-CehRrJUG.js → plan-BTRSbjKn.js} +1 -1
- web/client/dist/assets/{popover-CqgMRE0G.js → popover-_Sf0yvOI.js} +1 -1
- web/client/dist/assets/{project-6gxepOhm.js → project-BvSOI8MY.js} +1 -1
- web/client/dist/index.html +1 -1
- sqlmesh/integrations/llm.py +0 -56
- sqlmesh/migrations/v0001_init.py +0 -60
- sqlmesh/migrations/v0002_remove_identify.py +0 -5
- sqlmesh/migrations/v0003_move_batch_size.py +0 -34
- sqlmesh/migrations/v0004_environmnent_add_finalized_at.py +0 -23
- sqlmesh/migrations/v0005_create_seed_table.py +0 -24
- sqlmesh/migrations/v0006_change_seed_hash.py +0 -5
- sqlmesh/migrations/v0007_env_table_info_to_kind.py +0 -99
- sqlmesh/migrations/v0008_create_intervals_table.py +0 -38
- sqlmesh/migrations/v0009_remove_pre_post_hooks.py +0 -62
- sqlmesh/migrations/v0010_seed_hash_batch_size.py +0 -5
- sqlmesh/migrations/v0011_add_model_kind_name.py +0 -63
- sqlmesh/migrations/v0012_update_jinja_expressions.py +0 -86
- sqlmesh/migrations/v0013_serde_using_model_dialects.py +0 -87
- sqlmesh/migrations/v0014_fix_dev_intervals.py +0 -14
- sqlmesh/migrations/v0015_environment_add_promoted_snapshot_ids.py +0 -26
- sqlmesh/migrations/v0016_fix_windows_path.py +0 -59
- sqlmesh/migrations/v0017_fix_windows_seed_path.py +0 -55
- sqlmesh/migrations/v0018_rename_snapshot_model_to_node.py +0 -53
- sqlmesh/migrations/v0019_add_env_suffix_target.py +0 -28
- sqlmesh/migrations/v0020_remove_redundant_attributes_from_dbt_models.py +0 -80
- sqlmesh/migrations/v0021_fix_table_properties.py +0 -62
- sqlmesh/migrations/v0022_move_project_to_model.py +0 -54
- sqlmesh/migrations/v0023_fix_added_models_with_forward_only_parents.py +0 -65
- sqlmesh/migrations/v0024_replace_model_kind_name_enum_with_value.py +0 -55
- sqlmesh/migrations/v0025_fix_intervals_and_missing_change_category.py +0 -117
- sqlmesh/migrations/v0026_remove_dialect_from_seed.py +0 -55
- sqlmesh/migrations/v0027_minute_interval_to_five.py +0 -57
- sqlmesh/migrations/v0028_add_plan_dags_table.py +0 -29
- sqlmesh/migrations/v0029_generate_schema_types_using_dialect.py +0 -69
- sqlmesh/migrations/v0030_update_unrestorable_snapshots.py +0 -65
- sqlmesh/migrations/v0031_remove_dbt_target_fields.py +0 -65
- sqlmesh/migrations/v0032_add_sqlmesh_version.py +0 -25
- sqlmesh/migrations/v0033_mysql_fix_blob_text_type.py +0 -45
- sqlmesh/migrations/v0034_add_default_catalog.py +0 -367
- sqlmesh/migrations/v0035_add_catalog_name_override.py +0 -22
- sqlmesh/migrations/v0036_delete_plan_dags_bug_fix.py +0 -14
- sqlmesh/migrations/v0037_remove_dbt_is_incremental_macro.py +0 -61
- sqlmesh/migrations/v0038_add_expiration_ts_to_snapshot.py +0 -73
- sqlmesh/migrations/v0039_include_environment_in_plan_dag_spec.py +0 -68
- sqlmesh/migrations/v0040_add_previous_finalized_snapshots.py +0 -26
- sqlmesh/migrations/v0041_remove_hash_raw_query_attribute.py +0 -59
- sqlmesh/migrations/v0042_trim_indirect_versions.py +0 -66
- sqlmesh/migrations/v0043_fix_remove_obsolete_attributes_in_plan_dags.py +0 -61
- sqlmesh/migrations/v0044_quote_identifiers_in_model_attributes.py +0 -5
- sqlmesh/migrations/v0045_move_gateway_variable.py +0 -70
- sqlmesh/migrations/v0046_add_batch_concurrency.py +0 -8
- sqlmesh/migrations/v0047_change_scd_string_to_column.py +0 -5
- sqlmesh/migrations/v0048_drop_indirect_versions.py +0 -59
- sqlmesh/migrations/v0049_replace_identifier_with_version_in_seeds_table.py +0 -57
- sqlmesh/migrations/v0050_drop_seeds_table.py +0 -11
- sqlmesh/migrations/v0051_rename_column_descriptions.py +0 -65
- sqlmesh/migrations/v0052_add_normalize_name_in_environment_naming_info.py +0 -28
- sqlmesh/migrations/v0053_custom_model_kind_extra_attributes.py +0 -5
- sqlmesh/migrations/v0054_fix_trailing_comments.py +0 -5
- sqlmesh/migrations/v0055_add_updated_ts_unpaused_ts_ttl_ms_unrestorable_to_snapshot.py +0 -132
- sqlmesh/migrations/v0056_restore_table_indexes.py +0 -118
- sqlmesh/migrations/v0057_add_table_format.py +0 -5
- sqlmesh/migrations/v0058_add_requirements.py +0 -26
- sqlmesh/migrations/v0059_add_physical_version.py +0 -5
- sqlmesh-0.213.1.dev1.dist-info/RECORD +0 -421
- web/client/dist/assets/Lineage-D0Hgdz2v.js +0 -1
- web/client/dist/assets/context-DgX0fp2E.js +0 -68
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/WHEEL +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/entry_points.txt +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/licenses/LICENSE +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/top_level.txt +0 -0
|
@@ -1,65 +0,0 @@
|
|
|
1
|
-
"""Fix snapshots of added models with forward only parents."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
import typing as t
|
|
5
|
-
|
|
6
|
-
from sqlglot import exp
|
|
7
|
-
|
|
8
|
-
from sqlmesh.utils.dag import DAG
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def migrate(state_sync: t.Any, **kwargs) -> None: # type: ignore
|
|
12
|
-
engine_adapter = state_sync.engine_adapter
|
|
13
|
-
schema = state_sync.schema
|
|
14
|
-
snapshots_table = "_snapshots"
|
|
15
|
-
environments_table = "_environments"
|
|
16
|
-
if schema:
|
|
17
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
18
|
-
environments_table = f"{schema}.{environments_table}"
|
|
19
|
-
|
|
20
|
-
dag: DAG[t.Tuple[str, str]] = DAG()
|
|
21
|
-
snapshot_mapping: t.Dict[t.Tuple[str, str], t.Dict[str, t.Any]] = {}
|
|
22
|
-
|
|
23
|
-
for identifier, snapshot in engine_adapter.fetchall(
|
|
24
|
-
exp.select("identifier", "snapshot").from_(snapshots_table),
|
|
25
|
-
quote_identifiers=True,
|
|
26
|
-
):
|
|
27
|
-
parsed_snapshot = json.loads(snapshot)
|
|
28
|
-
|
|
29
|
-
snapshot_id = (parsed_snapshot["name"], identifier)
|
|
30
|
-
snapshot_mapping[snapshot_id] = parsed_snapshot
|
|
31
|
-
|
|
32
|
-
parent_ids = [
|
|
33
|
-
(parent["name"], parent["identifier"]) for parent in parsed_snapshot["parents"]
|
|
34
|
-
]
|
|
35
|
-
dag.add(snapshot_id, parent_ids)
|
|
36
|
-
|
|
37
|
-
snapshots_to_delete = set()
|
|
38
|
-
|
|
39
|
-
for snapshot_id in dag:
|
|
40
|
-
if snapshot_id not in snapshot_mapping:
|
|
41
|
-
continue
|
|
42
|
-
parsed_snapshot = snapshot_mapping[snapshot_id]
|
|
43
|
-
is_breaking = parsed_snapshot.get("change_category") == 1
|
|
44
|
-
has_previous_versions = bool(parsed_snapshot.get("previous_versions", []))
|
|
45
|
-
|
|
46
|
-
has_paused_forward_only_parent = False
|
|
47
|
-
if is_breaking and not has_previous_versions:
|
|
48
|
-
for upstream_id in dag.upstream(snapshot_id):
|
|
49
|
-
if upstream_id not in snapshot_mapping:
|
|
50
|
-
continue
|
|
51
|
-
upstream_snapshot = snapshot_mapping[upstream_id]
|
|
52
|
-
upstream_change_category = upstream_snapshot.get("change_category")
|
|
53
|
-
is_forward_only_upstream = upstream_change_category == 3
|
|
54
|
-
if is_forward_only_upstream and not upstream_snapshot.get("unpaused_ts"):
|
|
55
|
-
has_paused_forward_only_parent = True
|
|
56
|
-
break
|
|
57
|
-
|
|
58
|
-
if has_paused_forward_only_parent:
|
|
59
|
-
snapshots_to_delete.add(snapshot_id)
|
|
60
|
-
|
|
61
|
-
if snapshots_to_delete:
|
|
62
|
-
where = t.cast(exp.Tuple, exp.convert((exp.column("name"), exp.column("identifier")))).isin(
|
|
63
|
-
*snapshots_to_delete
|
|
64
|
-
)
|
|
65
|
-
engine_adapter.delete_from(snapshots_table, where)
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
"""Replace snapshot model_kind_name enum with value."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.migration import index_text_type
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
11
|
-
import pandas as pd
|
|
12
|
-
|
|
13
|
-
engine_adapter = state_sync.engine_adapter
|
|
14
|
-
schema = state_sync.schema
|
|
15
|
-
snapshots_table = "_snapshots"
|
|
16
|
-
if schema:
|
|
17
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
18
|
-
|
|
19
|
-
new_snapshots = []
|
|
20
|
-
|
|
21
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
22
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
23
|
-
quote_identifiers=True,
|
|
24
|
-
):
|
|
25
|
-
corrected_kind_name = None
|
|
26
|
-
parsed_snapshot = json.loads(snapshot)
|
|
27
|
-
if "kind" in parsed_snapshot["node"]:
|
|
28
|
-
corrected_kind_name = parsed_snapshot["node"]["kind"].get("name")
|
|
29
|
-
|
|
30
|
-
new_snapshots.append(
|
|
31
|
-
{
|
|
32
|
-
"name": name,
|
|
33
|
-
"identifier": identifier,
|
|
34
|
-
"version": version,
|
|
35
|
-
"snapshot": snapshot,
|
|
36
|
-
"kind_name": corrected_kind_name,
|
|
37
|
-
}
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
if new_snapshots:
|
|
41
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
42
|
-
|
|
43
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
44
|
-
|
|
45
|
-
engine_adapter.insert_append(
|
|
46
|
-
snapshots_table,
|
|
47
|
-
pd.DataFrame(new_snapshots),
|
|
48
|
-
target_columns_to_types={
|
|
49
|
-
"name": exp.DataType.build(index_type),
|
|
50
|
-
"identifier": exp.DataType.build(index_type),
|
|
51
|
-
"version": exp.DataType.build(index_type),
|
|
52
|
-
"snapshot": exp.DataType.build("text"),
|
|
53
|
-
"kind_name": exp.DataType.build(index_type),
|
|
54
|
-
},
|
|
55
|
-
)
|
|
@@ -1,117 +0,0 @@
|
|
|
1
|
-
"""Normalize intervals and fix missing change category."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
import zlib
|
|
5
|
-
|
|
6
|
-
from sqlglot import exp
|
|
7
|
-
|
|
8
|
-
from sqlmesh.utils import random_id
|
|
9
|
-
from sqlmesh.utils.date import now_timestamp
|
|
10
|
-
from sqlmesh.utils.migration import index_text_type
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
14
|
-
import pandas as pd
|
|
15
|
-
|
|
16
|
-
engine_adapter = state_sync.engine_adapter
|
|
17
|
-
schema = state_sync.schema
|
|
18
|
-
snapshots_table = "_snapshots"
|
|
19
|
-
intervals_table = "_intervals"
|
|
20
|
-
if schema:
|
|
21
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
22
|
-
intervals_table = f"{schema}.{intervals_table}"
|
|
23
|
-
|
|
24
|
-
migration_required = False
|
|
25
|
-
new_snapshots = []
|
|
26
|
-
new_intervals = []
|
|
27
|
-
|
|
28
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
29
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
30
|
-
quote_identifiers=True,
|
|
31
|
-
):
|
|
32
|
-
parsed_snapshot = json.loads(snapshot)
|
|
33
|
-
|
|
34
|
-
if not parsed_snapshot.get("change_category"):
|
|
35
|
-
fingerprint = parsed_snapshot.get("fingerprint")
|
|
36
|
-
version = _hash(
|
|
37
|
-
[
|
|
38
|
-
fingerprint["data_hash"],
|
|
39
|
-
fingerprint["parent_data_hash"],
|
|
40
|
-
]
|
|
41
|
-
)
|
|
42
|
-
parsed_snapshot["change_category"] = (
|
|
43
|
-
4 if version == parsed_snapshot.get("version") else 5
|
|
44
|
-
)
|
|
45
|
-
migration_required = True
|
|
46
|
-
|
|
47
|
-
def _add_interval(start_ts: int, end_ts: int, is_dev: bool) -> None:
|
|
48
|
-
new_intervals.append(
|
|
49
|
-
{
|
|
50
|
-
"id": random_id(),
|
|
51
|
-
"created_ts": now_timestamp(),
|
|
52
|
-
"name": name,
|
|
53
|
-
"identifier": identifier,
|
|
54
|
-
"version": version,
|
|
55
|
-
"start_ts": start_ts,
|
|
56
|
-
"end_ts": end_ts,
|
|
57
|
-
"is_dev": is_dev,
|
|
58
|
-
"is_removed": False,
|
|
59
|
-
"is_compacted": True,
|
|
60
|
-
}
|
|
61
|
-
)
|
|
62
|
-
|
|
63
|
-
for interval in parsed_snapshot.pop("intervals", []):
|
|
64
|
-
_add_interval(interval[0], interval[1], False)
|
|
65
|
-
migration_required = True
|
|
66
|
-
|
|
67
|
-
for interval in parsed_snapshot.pop("dev_intervals", []):
|
|
68
|
-
_add_interval(interval[0], interval[1], True)
|
|
69
|
-
migration_required = True
|
|
70
|
-
|
|
71
|
-
new_snapshots.append(
|
|
72
|
-
{
|
|
73
|
-
"name": name,
|
|
74
|
-
"identifier": identifier,
|
|
75
|
-
"version": version,
|
|
76
|
-
"snapshot": json.dumps(parsed_snapshot),
|
|
77
|
-
"kind_name": kind_name,
|
|
78
|
-
}
|
|
79
|
-
)
|
|
80
|
-
|
|
81
|
-
if migration_required:
|
|
82
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
83
|
-
|
|
84
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
85
|
-
engine_adapter.insert_append(
|
|
86
|
-
snapshots_table,
|
|
87
|
-
pd.DataFrame(new_snapshots),
|
|
88
|
-
target_columns_to_types={
|
|
89
|
-
"name": exp.DataType.build(index_type),
|
|
90
|
-
"identifier": exp.DataType.build(index_type),
|
|
91
|
-
"version": exp.DataType.build(index_type),
|
|
92
|
-
"snapshot": exp.DataType.build("text"),
|
|
93
|
-
"kind_name": exp.DataType.build(index_type),
|
|
94
|
-
},
|
|
95
|
-
)
|
|
96
|
-
|
|
97
|
-
if new_intervals:
|
|
98
|
-
engine_adapter.insert_append(
|
|
99
|
-
intervals_table,
|
|
100
|
-
pd.DataFrame(new_intervals),
|
|
101
|
-
target_columns_to_types={
|
|
102
|
-
"id": exp.DataType.build(index_type),
|
|
103
|
-
"created_ts": exp.DataType.build("bigint"),
|
|
104
|
-
"name": exp.DataType.build(index_type),
|
|
105
|
-
"identifier": exp.DataType.build(index_type),
|
|
106
|
-
"version": exp.DataType.build(index_type),
|
|
107
|
-
"start_ts": exp.DataType.build("bigint"),
|
|
108
|
-
"end_ts": exp.DataType.build("bigint"),
|
|
109
|
-
"is_dev": exp.DataType.build("boolean"),
|
|
110
|
-
"is_removed": exp.DataType.build("boolean"),
|
|
111
|
-
"is_compacted": exp.DataType.build("boolean"),
|
|
112
|
-
},
|
|
113
|
-
)
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
def _hash(data): # type: ignore
|
|
117
|
-
return str(zlib.crc32(";".join("" if d is None else d for d in data).encode("utf-8")))
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
"""Remove dialect from seeds."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.migration import index_text_type
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
11
|
-
import pandas as pd
|
|
12
|
-
|
|
13
|
-
engine_adapter = state_sync.engine_adapter
|
|
14
|
-
schema = state_sync.schema
|
|
15
|
-
snapshots_table = "_snapshots"
|
|
16
|
-
if schema:
|
|
17
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
18
|
-
|
|
19
|
-
new_snapshots = []
|
|
20
|
-
|
|
21
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
22
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
23
|
-
quote_identifiers=True,
|
|
24
|
-
):
|
|
25
|
-
parsed_snapshot = json.loads(snapshot)
|
|
26
|
-
node = parsed_snapshot["node"]
|
|
27
|
-
if "seed" in node:
|
|
28
|
-
node["seed"].pop("dialect", None)
|
|
29
|
-
|
|
30
|
-
new_snapshots.append(
|
|
31
|
-
{
|
|
32
|
-
"name": name,
|
|
33
|
-
"identifier": identifier,
|
|
34
|
-
"version": version,
|
|
35
|
-
"snapshot": json.dumps(parsed_snapshot),
|
|
36
|
-
"kind_name": kind_name,
|
|
37
|
-
}
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
if new_snapshots:
|
|
41
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
42
|
-
|
|
43
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
44
|
-
|
|
45
|
-
engine_adapter.insert_append(
|
|
46
|
-
snapshots_table,
|
|
47
|
-
pd.DataFrame(new_snapshots),
|
|
48
|
-
target_columns_to_types={
|
|
49
|
-
"name": exp.DataType.build(index_type),
|
|
50
|
-
"identifier": exp.DataType.build(index_type),
|
|
51
|
-
"version": exp.DataType.build(index_type),
|
|
52
|
-
"snapshot": exp.DataType.build("text"),
|
|
53
|
-
"kind_name": exp.DataType.build(index_type),
|
|
54
|
-
},
|
|
55
|
-
)
|
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
"""Change any interval unit of minute to five_minute."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.migration import index_text_type
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
11
|
-
import pandas as pd
|
|
12
|
-
|
|
13
|
-
engine_adapter = state_sync.engine_adapter
|
|
14
|
-
schema = state_sync.schema
|
|
15
|
-
snapshots_table = "_snapshots"
|
|
16
|
-
if schema:
|
|
17
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
18
|
-
|
|
19
|
-
new_snapshots = []
|
|
20
|
-
|
|
21
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
22
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
23
|
-
quote_identifiers=True,
|
|
24
|
-
):
|
|
25
|
-
parsed_snapshot = json.loads(snapshot)
|
|
26
|
-
|
|
27
|
-
node = parsed_snapshot["node"]
|
|
28
|
-
|
|
29
|
-
if node.get("interval_unit") == "minute":
|
|
30
|
-
node["interval_unit"] = "five_minute"
|
|
31
|
-
|
|
32
|
-
new_snapshots.append(
|
|
33
|
-
{
|
|
34
|
-
"name": name,
|
|
35
|
-
"identifier": identifier,
|
|
36
|
-
"version": version,
|
|
37
|
-
"snapshot": json.dumps(parsed_snapshot),
|
|
38
|
-
"kind_name": kind_name,
|
|
39
|
-
}
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
if new_snapshots:
|
|
43
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
44
|
-
|
|
45
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
46
|
-
|
|
47
|
-
engine_adapter.insert_append(
|
|
48
|
-
snapshots_table,
|
|
49
|
-
pd.DataFrame(new_snapshots),
|
|
50
|
-
target_columns_to_types={
|
|
51
|
-
"name": exp.DataType.build(index_type),
|
|
52
|
-
"identifier": exp.DataType.build(index_type),
|
|
53
|
-
"version": exp.DataType.build(index_type),
|
|
54
|
-
"snapshot": exp.DataType.build("text"),
|
|
55
|
-
"kind_name": exp.DataType.build(index_type),
|
|
56
|
-
},
|
|
57
|
-
)
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
"""Creates the '_plan_dags' table if Airflow is used."""
|
|
2
|
-
|
|
3
|
-
from sqlglot import exp
|
|
4
|
-
|
|
5
|
-
from sqlmesh.utils.migration import index_text_type
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
9
|
-
engine_adapter = state_sync.engine_adapter
|
|
10
|
-
schema = state_sync.schema
|
|
11
|
-
plan_dags_table = "_plan_dags"
|
|
12
|
-
|
|
13
|
-
if schema:
|
|
14
|
-
engine_adapter.create_schema(schema)
|
|
15
|
-
plan_dags_table = f"{schema}.{plan_dags_table}"
|
|
16
|
-
|
|
17
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
18
|
-
|
|
19
|
-
engine_adapter.create_state_table(
|
|
20
|
-
plan_dags_table,
|
|
21
|
-
{
|
|
22
|
-
"request_id": exp.DataType.build(index_type),
|
|
23
|
-
"dag_id": exp.DataType.build(index_type),
|
|
24
|
-
"dag_spec": exp.DataType.build("text"),
|
|
25
|
-
},
|
|
26
|
-
primary_key=("request_id",),
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
engine_adapter.create_index(plan_dags_table, "dag_id_idx", ("dag_id",))
|
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
"""Generate mapping schema data types using the corresponding model's dialect."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp, parse_one
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.migration import index_text_type
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
11
|
-
import pandas as pd
|
|
12
|
-
|
|
13
|
-
engine_adapter = state_sync.engine_adapter
|
|
14
|
-
schema = state_sync.schema
|
|
15
|
-
snapshots_table = "_snapshots"
|
|
16
|
-
if schema:
|
|
17
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
18
|
-
|
|
19
|
-
new_snapshots = []
|
|
20
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
21
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
22
|
-
quote_identifiers=True,
|
|
23
|
-
):
|
|
24
|
-
parsed_snapshot = json.loads(snapshot)
|
|
25
|
-
node = parsed_snapshot["node"]
|
|
26
|
-
|
|
27
|
-
mapping_schema = node.get("mapping_schema")
|
|
28
|
-
if mapping_schema:
|
|
29
|
-
node["mapping_schema"] = _convert_schema_types(mapping_schema, node["dialect"])
|
|
30
|
-
|
|
31
|
-
new_snapshots.append(
|
|
32
|
-
{
|
|
33
|
-
"name": name,
|
|
34
|
-
"identifier": identifier,
|
|
35
|
-
"version": version,
|
|
36
|
-
"snapshot": json.dumps(parsed_snapshot),
|
|
37
|
-
"kind_name": kind_name,
|
|
38
|
-
}
|
|
39
|
-
)
|
|
40
|
-
|
|
41
|
-
if new_snapshots:
|
|
42
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
43
|
-
|
|
44
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
45
|
-
|
|
46
|
-
engine_adapter.insert_append(
|
|
47
|
-
snapshots_table,
|
|
48
|
-
pd.DataFrame(new_snapshots),
|
|
49
|
-
target_columns_to_types={
|
|
50
|
-
"name": exp.DataType.build(index_type),
|
|
51
|
-
"identifier": exp.DataType.build(index_type),
|
|
52
|
-
"version": exp.DataType.build(index_type),
|
|
53
|
-
"snapshot": exp.DataType.build("text"),
|
|
54
|
-
"kind_name": exp.DataType.build(index_type),
|
|
55
|
-
},
|
|
56
|
-
)
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
def _convert_schema_types(schema, dialect): # type: ignore
|
|
60
|
-
if not schema:
|
|
61
|
-
return schema
|
|
62
|
-
|
|
63
|
-
for k, v in schema.items():
|
|
64
|
-
if isinstance(v, dict):
|
|
65
|
-
_convert_schema_types(v, dialect)
|
|
66
|
-
else:
|
|
67
|
-
schema[k] = parse_one(v).sql(dialect=dialect)
|
|
68
|
-
|
|
69
|
-
return schema
|
|
@@ -1,65 +0,0 @@
|
|
|
1
|
-
"""Update unrestorable snapshots."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
import typing as t
|
|
5
|
-
from collections import defaultdict
|
|
6
|
-
|
|
7
|
-
from sqlglot import exp
|
|
8
|
-
|
|
9
|
-
from sqlmesh.utils.migration import index_text_type
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def migrate(state_sync: t.Any, **kwargs: t.Any) -> None: # type: ignore
|
|
13
|
-
import pandas as pd
|
|
14
|
-
|
|
15
|
-
engine_adapter = state_sync.engine_adapter
|
|
16
|
-
schema = state_sync.schema
|
|
17
|
-
snapshots_table = "_snapshots"
|
|
18
|
-
if schema:
|
|
19
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
20
|
-
|
|
21
|
-
new_snapshots = []
|
|
22
|
-
snapshots_by_version = defaultdict(list)
|
|
23
|
-
|
|
24
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
25
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
26
|
-
quote_identifiers=True,
|
|
27
|
-
):
|
|
28
|
-
parsed_snapshot = json.loads(snapshot)
|
|
29
|
-
snapshots_by_version[(name, version)].append((identifier, kind_name, parsed_snapshot))
|
|
30
|
-
|
|
31
|
-
for (name, version), snapshots in snapshots_by_version.items():
|
|
32
|
-
has_forward_only = any(s["change_category"] == 3 for _, _, s in snapshots)
|
|
33
|
-
for identifier, kind_name, snapshot in snapshots:
|
|
34
|
-
if (
|
|
35
|
-
has_forward_only
|
|
36
|
-
and snapshot["change_category"] != 3
|
|
37
|
-
and not snapshot.get("unpaused_ts")
|
|
38
|
-
):
|
|
39
|
-
snapshot["unrestorable"] = True
|
|
40
|
-
new_snapshots.append(
|
|
41
|
-
{
|
|
42
|
-
"name": name,
|
|
43
|
-
"identifier": identifier,
|
|
44
|
-
"version": version,
|
|
45
|
-
"snapshot": json.dumps(snapshot),
|
|
46
|
-
"kind_name": kind_name,
|
|
47
|
-
}
|
|
48
|
-
)
|
|
49
|
-
|
|
50
|
-
if new_snapshots:
|
|
51
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
52
|
-
|
|
53
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
54
|
-
|
|
55
|
-
engine_adapter.insert_append(
|
|
56
|
-
snapshots_table,
|
|
57
|
-
pd.DataFrame(new_snapshots),
|
|
58
|
-
target_columns_to_types={
|
|
59
|
-
"name": exp.DataType.build(index_type),
|
|
60
|
-
"identifier": exp.DataType.build(index_type),
|
|
61
|
-
"version": exp.DataType.build(index_type),
|
|
62
|
-
"snapshot": exp.DataType.build("text"),
|
|
63
|
-
"kind_name": exp.DataType.build(index_type),
|
|
64
|
-
},
|
|
65
|
-
)
|
|
@@ -1,65 +0,0 @@
|
|
|
1
|
-
"""Remove dbt target fields from snapshots outside of limited list of approved fields"""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.migration import index_text_type
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
11
|
-
import pandas as pd
|
|
12
|
-
|
|
13
|
-
engine_adapter = state_sync.engine_adapter
|
|
14
|
-
schema = state_sync.schema
|
|
15
|
-
snapshots_table = "_snapshots"
|
|
16
|
-
if schema:
|
|
17
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
18
|
-
|
|
19
|
-
new_snapshots = []
|
|
20
|
-
found_dbt_target = False
|
|
21
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
22
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
23
|
-
quote_identifiers=True,
|
|
24
|
-
):
|
|
25
|
-
parsed_snapshot = json.loads(snapshot)
|
|
26
|
-
node = parsed_snapshot["node"]
|
|
27
|
-
dbt_target = node.get("jinja_macros", {}).get("global_objs", {}).get("target", {})
|
|
28
|
-
# Double check that `target_name` exists as a field since we know that all dbt targets have `target_name`
|
|
29
|
-
# We do this in case someone has a target macro defined that is not related to dbt
|
|
30
|
-
if dbt_target and dbt_target.get("target_name"):
|
|
31
|
-
found_dbt_target = True
|
|
32
|
-
node["jinja_macros"]["global_objs"]["target"] = {
|
|
33
|
-
"type": dbt_target.get("type", "None"),
|
|
34
|
-
"name": dbt_target.get("name", "None"),
|
|
35
|
-
"schema": dbt_target.get("schema", "None"),
|
|
36
|
-
"database": dbt_target.get("database", "None"),
|
|
37
|
-
"target_name": dbt_target["target_name"],
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
new_snapshots.append(
|
|
41
|
-
{
|
|
42
|
-
"name": name,
|
|
43
|
-
"identifier": identifier,
|
|
44
|
-
"version": version,
|
|
45
|
-
"snapshot": json.dumps(parsed_snapshot),
|
|
46
|
-
"kind_name": kind_name,
|
|
47
|
-
}
|
|
48
|
-
)
|
|
49
|
-
|
|
50
|
-
if found_dbt_target:
|
|
51
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
52
|
-
|
|
53
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
54
|
-
|
|
55
|
-
engine_adapter.insert_append(
|
|
56
|
-
snapshots_table,
|
|
57
|
-
pd.DataFrame(new_snapshots),
|
|
58
|
-
target_columns_to_types={
|
|
59
|
-
"name": exp.DataType.build(index_type),
|
|
60
|
-
"identifier": exp.DataType.build(index_type),
|
|
61
|
-
"version": exp.DataType.build(index_type),
|
|
62
|
-
"snapshot": exp.DataType.build("text"),
|
|
63
|
-
"kind_name": exp.DataType.build(index_type),
|
|
64
|
-
},
|
|
65
|
-
)
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
"""Add new 'sqlmesh_version' column to the version state table."""
|
|
2
|
-
|
|
3
|
-
from sqlglot import exp
|
|
4
|
-
|
|
5
|
-
from sqlmesh.utils.migration import index_text_type
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
9
|
-
engine_adapter = state_sync.engine_adapter
|
|
10
|
-
versions_table = "_versions"
|
|
11
|
-
if state_sync.schema:
|
|
12
|
-
versions_table = f"{state_sync.schema}.{versions_table}"
|
|
13
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
14
|
-
alter_table_exp = exp.Alter(
|
|
15
|
-
this=exp.to_table(versions_table),
|
|
16
|
-
kind="TABLE",
|
|
17
|
-
actions=[
|
|
18
|
-
exp.ColumnDef(
|
|
19
|
-
this=exp.to_column("sqlmesh_version"),
|
|
20
|
-
kind=exp.DataType.build(index_type),
|
|
21
|
-
)
|
|
22
|
-
],
|
|
23
|
-
)
|
|
24
|
-
|
|
25
|
-
engine_adapter.execute(alter_table_exp)
|
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
"""Use LONGTEXT type for blob fields in MySQL."""
|
|
2
|
-
|
|
3
|
-
from sqlglot import exp
|
|
4
|
-
|
|
5
|
-
from sqlmesh.utils.migration import blob_text_type
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
9
|
-
engine_adapter = state_sync.engine_adapter
|
|
10
|
-
if engine_adapter.dialect != "mysql":
|
|
11
|
-
return
|
|
12
|
-
|
|
13
|
-
schema = state_sync.schema
|
|
14
|
-
environments_table = "_environments"
|
|
15
|
-
snapshots_table = "_snapshots"
|
|
16
|
-
seeds_table = "_seeds"
|
|
17
|
-
plan_dags_table = "_plan_dags"
|
|
18
|
-
|
|
19
|
-
if schema:
|
|
20
|
-
environments_table = f"{schema}.{environments_table}"
|
|
21
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
22
|
-
seeds_table = f"{state_sync.schema}.{seeds_table}"
|
|
23
|
-
plan_dags_table = f"{schema}.{plan_dags_table}"
|
|
24
|
-
|
|
25
|
-
targets = [
|
|
26
|
-
(environments_table, "snapshots"),
|
|
27
|
-
(snapshots_table, "snapshot"),
|
|
28
|
-
(seeds_table, "content"),
|
|
29
|
-
(plan_dags_table, "dag_spec"),
|
|
30
|
-
]
|
|
31
|
-
|
|
32
|
-
for table_name, column_name in targets:
|
|
33
|
-
blob_type = blob_text_type(engine_adapter.dialect)
|
|
34
|
-
alter_table_exp = exp.Alter(
|
|
35
|
-
this=exp.to_table(table_name),
|
|
36
|
-
kind="TABLE",
|
|
37
|
-
actions=[
|
|
38
|
-
exp.AlterColumn(
|
|
39
|
-
this=exp.to_column(column_name),
|
|
40
|
-
dtype=exp.DataType.build(blob_type),
|
|
41
|
-
)
|
|
42
|
-
],
|
|
43
|
-
)
|
|
44
|
-
|
|
45
|
-
engine_adapter.execute(alter_table_exp)
|