sqlmesh 0.213.1.dev1__py3-none-any.whl → 0.227.2.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlmesh/__init__.py +12 -2
- sqlmesh/_version.py +2 -2
- sqlmesh/cli/main.py +0 -44
- sqlmesh/cli/project_init.py +11 -2
- sqlmesh/core/_typing.py +1 -0
- sqlmesh/core/audit/definition.py +8 -2
- sqlmesh/core/config/__init__.py +1 -1
- sqlmesh/core/config/connection.py +17 -5
- sqlmesh/core/config/dbt.py +13 -0
- sqlmesh/core/config/janitor.py +12 -0
- sqlmesh/core/config/loader.py +7 -0
- sqlmesh/core/config/model.py +2 -0
- sqlmesh/core/config/root.py +3 -0
- sqlmesh/core/console.py +81 -3
- sqlmesh/core/constants.py +1 -1
- sqlmesh/core/context.py +69 -26
- sqlmesh/core/dialect.py +3 -0
- sqlmesh/core/engine_adapter/_typing.py +2 -0
- sqlmesh/core/engine_adapter/base.py +322 -22
- sqlmesh/core/engine_adapter/base_postgres.py +17 -1
- sqlmesh/core/engine_adapter/bigquery.py +146 -7
- sqlmesh/core/engine_adapter/clickhouse.py +17 -13
- sqlmesh/core/engine_adapter/databricks.py +33 -2
- sqlmesh/core/engine_adapter/fabric.py +10 -29
- sqlmesh/core/engine_adapter/mixins.py +142 -48
- sqlmesh/core/engine_adapter/mssql.py +15 -4
- sqlmesh/core/engine_adapter/mysql.py +2 -2
- sqlmesh/core/engine_adapter/postgres.py +9 -3
- sqlmesh/core/engine_adapter/redshift.py +4 -0
- sqlmesh/core/engine_adapter/risingwave.py +1 -0
- sqlmesh/core/engine_adapter/shared.py +6 -0
- sqlmesh/core/engine_adapter/snowflake.py +82 -11
- sqlmesh/core/engine_adapter/spark.py +14 -10
- sqlmesh/core/engine_adapter/trino.py +4 -2
- sqlmesh/core/environment.py +2 -0
- sqlmesh/core/janitor.py +181 -0
- sqlmesh/core/lineage.py +1 -0
- sqlmesh/core/linter/definition.py +13 -13
- sqlmesh/core/linter/rules/builtin.py +29 -0
- sqlmesh/core/macros.py +35 -13
- sqlmesh/core/model/common.py +2 -0
- sqlmesh/core/model/definition.py +82 -28
- sqlmesh/core/model/kind.py +66 -2
- sqlmesh/core/model/meta.py +108 -4
- sqlmesh/core/node.py +101 -1
- sqlmesh/core/plan/builder.py +18 -10
- sqlmesh/core/plan/common.py +199 -2
- sqlmesh/core/plan/definition.py +25 -6
- sqlmesh/core/plan/evaluator.py +75 -113
- sqlmesh/core/plan/explainer.py +90 -8
- sqlmesh/core/plan/stages.py +42 -21
- sqlmesh/core/renderer.py +78 -32
- sqlmesh/core/scheduler.py +102 -22
- sqlmesh/core/selector.py +137 -9
- sqlmesh/core/signal.py +64 -1
- sqlmesh/core/snapshot/__init__.py +2 -0
- sqlmesh/core/snapshot/definition.py +146 -34
- sqlmesh/core/snapshot/evaluator.py +689 -124
- sqlmesh/core/state_sync/__init__.py +0 -1
- sqlmesh/core/state_sync/base.py +55 -33
- sqlmesh/core/state_sync/cache.py +12 -7
- sqlmesh/core/state_sync/common.py +216 -111
- sqlmesh/core/state_sync/db/environment.py +6 -4
- sqlmesh/core/state_sync/db/facade.py +42 -24
- sqlmesh/core/state_sync/db/interval.py +27 -7
- sqlmesh/core/state_sync/db/migrator.py +34 -16
- sqlmesh/core/state_sync/db/snapshot.py +177 -169
- sqlmesh/core/table_diff.py +2 -2
- sqlmesh/core/test/context.py +2 -0
- sqlmesh/core/test/definition.py +14 -9
- sqlmesh/dbt/adapter.py +22 -16
- sqlmesh/dbt/basemodel.py +75 -56
- sqlmesh/dbt/builtin.py +116 -12
- sqlmesh/dbt/column.py +17 -5
- sqlmesh/dbt/common.py +19 -5
- sqlmesh/dbt/context.py +14 -1
- sqlmesh/dbt/loader.py +61 -9
- sqlmesh/dbt/manifest.py +174 -16
- sqlmesh/dbt/model.py +183 -85
- sqlmesh/dbt/package.py +16 -1
- sqlmesh/dbt/profile.py +3 -3
- sqlmesh/dbt/project.py +12 -7
- sqlmesh/dbt/seed.py +6 -1
- sqlmesh/dbt/source.py +13 -1
- sqlmesh/dbt/target.py +25 -6
- sqlmesh/dbt/test.py +36 -5
- sqlmesh/migrations/v0000_baseline.py +95 -0
- sqlmesh/migrations/v0061_mysql_fix_blob_text_type.py +5 -7
- sqlmesh/migrations/v0062_add_model_gateway.py +5 -1
- sqlmesh/migrations/v0063_change_signals.py +5 -3
- sqlmesh/migrations/v0064_join_when_matched_strings.py +5 -3
- sqlmesh/migrations/v0065_add_model_optimize.py +5 -1
- sqlmesh/migrations/v0066_add_auto_restatements.py +8 -3
- sqlmesh/migrations/v0067_add_tsql_date_full_precision.py +5 -1
- sqlmesh/migrations/v0068_include_unrendered_query_in_metadata_hash.py +5 -1
- sqlmesh/migrations/v0069_update_dev_table_suffix.py +5 -3
- sqlmesh/migrations/v0070_include_grains_in_metadata_hash.py +5 -1
- sqlmesh/migrations/v0071_add_dev_version_to_intervals.py +9 -5
- sqlmesh/migrations/v0072_add_environment_statements.py +5 -3
- sqlmesh/migrations/v0073_remove_symbolic_disable_restatement.py +5 -3
- sqlmesh/migrations/v0074_add_partition_by_time_column_property.py +5 -1
- sqlmesh/migrations/v0075_remove_validate_query.py +5 -3
- sqlmesh/migrations/v0076_add_cron_tz.py +5 -1
- sqlmesh/migrations/v0077_fix_column_type_hash_calculation.py +5 -1
- sqlmesh/migrations/v0078_warn_if_non_migratable_python_env.py +5 -3
- sqlmesh/migrations/v0079_add_gateway_managed_property.py +10 -5
- sqlmesh/migrations/v0080_add_batch_size_to_scd_type_2_models.py +5 -1
- sqlmesh/migrations/v0081_update_partitioned_by.py +5 -3
- sqlmesh/migrations/v0082_warn_if_incorrectly_duplicated_statements.py +5 -3
- sqlmesh/migrations/v0083_use_sql_for_scd_time_data_type_data_hash.py +5 -1
- sqlmesh/migrations/v0084_normalize_quote_when_matched_and_merge_filter.py +5 -1
- sqlmesh/migrations/v0085_deterministic_repr.py +5 -3
- sqlmesh/migrations/v0086_check_deterministic_bug.py +5 -3
- sqlmesh/migrations/v0087_normalize_blueprint_variables.py +5 -3
- sqlmesh/migrations/v0088_warn_about_variable_python_env_diffs.py +5 -3
- sqlmesh/migrations/v0089_add_virtual_environment_mode.py +5 -1
- sqlmesh/migrations/v0090_add_forward_only_column.py +9 -5
- sqlmesh/migrations/v0091_on_additive_change.py +5 -1
- sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +5 -3
- sqlmesh/migrations/v0093_use_raw_sql_in_fingerprint.py +5 -1
- sqlmesh/migrations/v0094_add_dev_version_and_fingerprint_columns.py +123 -0
- sqlmesh/migrations/v0095_warn_about_dbt_raw_sql_diff.py +49 -0
- sqlmesh/migrations/v0096_remove_plan_dags_table.py +13 -0
- sqlmesh/migrations/v0097_add_dbt_name_in_node.py +9 -0
- sqlmesh/migrations/{v0060_move_audits_to_model.py → v0098_add_dbt_node_info_in_node.py} +33 -16
- sqlmesh/migrations/v0099_add_last_altered_to_intervals.py +25 -0
- sqlmesh/migrations/v0100_add_grants_and_grants_target_layer.py +9 -0
- sqlmesh/utils/__init__.py +8 -1
- sqlmesh/utils/cache.py +5 -1
- sqlmesh/utils/connection_pool.py +2 -1
- sqlmesh/utils/dag.py +65 -10
- sqlmesh/utils/date.py +8 -1
- sqlmesh/utils/errors.py +8 -0
- sqlmesh/utils/jinja.py +54 -4
- sqlmesh/utils/pydantic.py +6 -6
- sqlmesh/utils/windows.py +13 -3
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/METADATA +7 -10
- sqlmesh-0.227.2.dev4.dist-info/RECORD +370 -0
- sqlmesh_dbt/cli.py +70 -7
- sqlmesh_dbt/console.py +14 -6
- sqlmesh_dbt/operations.py +103 -24
- sqlmesh_dbt/selectors.py +39 -1
- web/client/dist/assets/{Audits-Ucsx1GzF.js → Audits-CBiYyyx-.js} +1 -1
- web/client/dist/assets/{Banner-BWDzvavM.js → Banner-DSRbUlO5.js} +1 -1
- web/client/dist/assets/{ChevronDownIcon-D2VL13Ah.js → ChevronDownIcon-MK_nrjD_.js} +1 -1
- web/client/dist/assets/{ChevronRightIcon-DWGYbf1l.js → ChevronRightIcon-CLWtT22Q.js} +1 -1
- web/client/dist/assets/{Content-DdHDZM3I.js → Content-BNuGZN5l.js} +1 -1
- web/client/dist/assets/{Content-Bikfy8fh.js → Content-CSHJyW0n.js} +1 -1
- web/client/dist/assets/{Data-CzAJH7rW.js → Data-C1oRDbLx.js} +1 -1
- web/client/dist/assets/{DataCatalog-BJF11g8f.js → DataCatalog-HXyX2-_j.js} +1 -1
- web/client/dist/assets/{Editor-s0SBpV2y.js → Editor-BDyfpUuw.js} +1 -1
- web/client/dist/assets/{Editor-DgLhgKnm.js → Editor-D0jNItwC.js} +1 -1
- web/client/dist/assets/{Errors-D0m0O1d3.js → Errors-BfuFLcPi.js} +1 -1
- web/client/dist/assets/{FileExplorer-CEv0vXkt.js → FileExplorer-BR9IE3he.js} +1 -1
- web/client/dist/assets/{Footer-BwzXn8Ew.js → Footer-CgBEtiAh.js} +1 -1
- web/client/dist/assets/{Header-6heDkEqG.js → Header-DSqR6nSO.js} +1 -1
- web/client/dist/assets/{Input-obuJsD6k.js → Input-B-oZ6fGO.js} +1 -1
- web/client/dist/assets/Lineage-DYQVwDbD.js +1 -0
- web/client/dist/assets/{ListboxShow-HM9_qyrt.js → ListboxShow-BE5-xevs.js} +1 -1
- web/client/dist/assets/{ModelLineage-zWdKo0U2.js → ModelLineage-DkIFAYo4.js} +1 -1
- web/client/dist/assets/{Models-Bcu66SRz.js → Models-D5dWr8RB.js} +1 -1
- web/client/dist/assets/{Page-BWEEQfIt.js → Page-C-XfU5BR.js} +1 -1
- web/client/dist/assets/{Plan-C4gXCqlf.js → Plan-ZEuTINBq.js} +1 -1
- web/client/dist/assets/{PlusCircleIcon-CVDO651q.js → PlusCircleIcon-DVXAHG8_.js} +1 -1
- web/client/dist/assets/{ReportErrors-BT6xFwAr.js → ReportErrors-B7FEPzMB.js} +1 -1
- web/client/dist/assets/{Root-ryJoBK4h.js → Root-8aZyhPxF.js} +1 -1
- web/client/dist/assets/{SearchList-DB04sPb9.js → SearchList-W_iT2G82.js} +1 -1
- web/client/dist/assets/{SelectEnvironment-CUYcXUu6.js → SelectEnvironment-C65jALmO.js} +1 -1
- web/client/dist/assets/{SourceList-Doo_9ZGp.js → SourceList-DSLO6nVJ.js} +1 -1
- web/client/dist/assets/{SourceListItem-D5Mj7Dly.js → SourceListItem-BHt8d9-I.js} +1 -1
- web/client/dist/assets/{SplitPane-qHmkD1qy.js → SplitPane-CViaZmw6.js} +1 -1
- web/client/dist/assets/{Tests-DH1Z74ML.js → Tests-DhaVt5t1.js} +1 -1
- web/client/dist/assets/{Welcome-DqUJUNMF.js → Welcome-DvpjH-_4.js} +1 -1
- web/client/dist/assets/context-BctCsyGb.js +71 -0
- web/client/dist/assets/{context-Dr54UHLi.js → context-DFNeGsFF.js} +1 -1
- web/client/dist/assets/{editor-DYIP1yQ4.js → editor-CcO28cqd.js} +1 -1
- web/client/dist/assets/{file-DarlIDVi.js → file-CvJN3aZO.js} +1 -1
- web/client/dist/assets/{floating-ui.react-dom-BH3TFvkM.js → floating-ui.react-dom-CjE-JNW1.js} +1 -1
- web/client/dist/assets/{help-Bl8wqaQc.js → help-DuPhjipa.js} +1 -1
- web/client/dist/assets/{index-D1sR7wpN.js → index-C-dJH7yZ.js} +1 -1
- web/client/dist/assets/{index-O3mjYpnE.js → index-Dj0i1-CA.js} +2 -2
- web/client/dist/assets/{plan-CehRrJUG.js → plan-BTRSbjKn.js} +1 -1
- web/client/dist/assets/{popover-CqgMRE0G.js → popover-_Sf0yvOI.js} +1 -1
- web/client/dist/assets/{project-6gxepOhm.js → project-BvSOI8MY.js} +1 -1
- web/client/dist/index.html +1 -1
- sqlmesh/integrations/llm.py +0 -56
- sqlmesh/migrations/v0001_init.py +0 -60
- sqlmesh/migrations/v0002_remove_identify.py +0 -5
- sqlmesh/migrations/v0003_move_batch_size.py +0 -34
- sqlmesh/migrations/v0004_environmnent_add_finalized_at.py +0 -23
- sqlmesh/migrations/v0005_create_seed_table.py +0 -24
- sqlmesh/migrations/v0006_change_seed_hash.py +0 -5
- sqlmesh/migrations/v0007_env_table_info_to_kind.py +0 -99
- sqlmesh/migrations/v0008_create_intervals_table.py +0 -38
- sqlmesh/migrations/v0009_remove_pre_post_hooks.py +0 -62
- sqlmesh/migrations/v0010_seed_hash_batch_size.py +0 -5
- sqlmesh/migrations/v0011_add_model_kind_name.py +0 -63
- sqlmesh/migrations/v0012_update_jinja_expressions.py +0 -86
- sqlmesh/migrations/v0013_serde_using_model_dialects.py +0 -87
- sqlmesh/migrations/v0014_fix_dev_intervals.py +0 -14
- sqlmesh/migrations/v0015_environment_add_promoted_snapshot_ids.py +0 -26
- sqlmesh/migrations/v0016_fix_windows_path.py +0 -59
- sqlmesh/migrations/v0017_fix_windows_seed_path.py +0 -55
- sqlmesh/migrations/v0018_rename_snapshot_model_to_node.py +0 -53
- sqlmesh/migrations/v0019_add_env_suffix_target.py +0 -28
- sqlmesh/migrations/v0020_remove_redundant_attributes_from_dbt_models.py +0 -80
- sqlmesh/migrations/v0021_fix_table_properties.py +0 -62
- sqlmesh/migrations/v0022_move_project_to_model.py +0 -54
- sqlmesh/migrations/v0023_fix_added_models_with_forward_only_parents.py +0 -65
- sqlmesh/migrations/v0024_replace_model_kind_name_enum_with_value.py +0 -55
- sqlmesh/migrations/v0025_fix_intervals_and_missing_change_category.py +0 -117
- sqlmesh/migrations/v0026_remove_dialect_from_seed.py +0 -55
- sqlmesh/migrations/v0027_minute_interval_to_five.py +0 -57
- sqlmesh/migrations/v0028_add_plan_dags_table.py +0 -29
- sqlmesh/migrations/v0029_generate_schema_types_using_dialect.py +0 -69
- sqlmesh/migrations/v0030_update_unrestorable_snapshots.py +0 -65
- sqlmesh/migrations/v0031_remove_dbt_target_fields.py +0 -65
- sqlmesh/migrations/v0032_add_sqlmesh_version.py +0 -25
- sqlmesh/migrations/v0033_mysql_fix_blob_text_type.py +0 -45
- sqlmesh/migrations/v0034_add_default_catalog.py +0 -367
- sqlmesh/migrations/v0035_add_catalog_name_override.py +0 -22
- sqlmesh/migrations/v0036_delete_plan_dags_bug_fix.py +0 -14
- sqlmesh/migrations/v0037_remove_dbt_is_incremental_macro.py +0 -61
- sqlmesh/migrations/v0038_add_expiration_ts_to_snapshot.py +0 -73
- sqlmesh/migrations/v0039_include_environment_in_plan_dag_spec.py +0 -68
- sqlmesh/migrations/v0040_add_previous_finalized_snapshots.py +0 -26
- sqlmesh/migrations/v0041_remove_hash_raw_query_attribute.py +0 -59
- sqlmesh/migrations/v0042_trim_indirect_versions.py +0 -66
- sqlmesh/migrations/v0043_fix_remove_obsolete_attributes_in_plan_dags.py +0 -61
- sqlmesh/migrations/v0044_quote_identifiers_in_model_attributes.py +0 -5
- sqlmesh/migrations/v0045_move_gateway_variable.py +0 -70
- sqlmesh/migrations/v0046_add_batch_concurrency.py +0 -8
- sqlmesh/migrations/v0047_change_scd_string_to_column.py +0 -5
- sqlmesh/migrations/v0048_drop_indirect_versions.py +0 -59
- sqlmesh/migrations/v0049_replace_identifier_with_version_in_seeds_table.py +0 -57
- sqlmesh/migrations/v0050_drop_seeds_table.py +0 -11
- sqlmesh/migrations/v0051_rename_column_descriptions.py +0 -65
- sqlmesh/migrations/v0052_add_normalize_name_in_environment_naming_info.py +0 -28
- sqlmesh/migrations/v0053_custom_model_kind_extra_attributes.py +0 -5
- sqlmesh/migrations/v0054_fix_trailing_comments.py +0 -5
- sqlmesh/migrations/v0055_add_updated_ts_unpaused_ts_ttl_ms_unrestorable_to_snapshot.py +0 -132
- sqlmesh/migrations/v0056_restore_table_indexes.py +0 -118
- sqlmesh/migrations/v0057_add_table_format.py +0 -5
- sqlmesh/migrations/v0058_add_requirements.py +0 -26
- sqlmesh/migrations/v0059_add_physical_version.py +0 -5
- sqlmesh-0.213.1.dev1.dist-info/RECORD +0 -421
- web/client/dist/assets/Lineage-D0Hgdz2v.js +0 -1
- web/client/dist/assets/context-DgX0fp2E.js +0 -68
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/WHEEL +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/entry_points.txt +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/licenses/LICENSE +0 -0
- {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/top_level.txt +0 -0
|
@@ -1,367 +0,0 @@
|
|
|
1
|
-
"""Add default catalog to snapshots and update names to match new normalization rules."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import json
|
|
6
|
-
import typing as t
|
|
7
|
-
|
|
8
|
-
from sqlglot import exp
|
|
9
|
-
from sqlglot.dialects.dialect import DialectType
|
|
10
|
-
from sqlglot.helper import dict_depth, seq_get
|
|
11
|
-
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
|
|
12
|
-
|
|
13
|
-
from sqlmesh.utils.migration import index_text_type
|
|
14
|
-
from sqlmesh.utils.migration import blob_text_type
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
def set_default_catalog(
|
|
18
|
-
table: exp.Table,
|
|
19
|
-
default_catalog: t.Optional[str],
|
|
20
|
-
) -> exp.Table:
|
|
21
|
-
if default_catalog and not table.catalog and table.db:
|
|
22
|
-
table.set("catalog", exp.parse_identifier(default_catalog))
|
|
23
|
-
|
|
24
|
-
return table
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def normalize_model_name(
|
|
28
|
-
table: str | exp.Table,
|
|
29
|
-
default_catalog: t.Optional[str],
|
|
30
|
-
dialect: DialectType = None,
|
|
31
|
-
) -> str:
|
|
32
|
-
table = exp.to_table(table, dialect=dialect)
|
|
33
|
-
|
|
34
|
-
table = set_default_catalog(table, default_catalog)
|
|
35
|
-
return exp.table_name(normalize_identifiers(table, dialect=dialect), identify=True)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def normalize_mapping_schema(mapping_schema: t.Dict, dialect: str) -> t.Dict:
|
|
39
|
-
# Example input: {'"catalog"': {'schema': {'table': {'column': 'INT'}}}}
|
|
40
|
-
# Example output: {'"catalog"': {'"schema"': {'"table"': {'column': 'INT'}}}}
|
|
41
|
-
normalized_mapping_schema = {}
|
|
42
|
-
for key, value in mapping_schema.items():
|
|
43
|
-
if isinstance(value, dict):
|
|
44
|
-
normalized_mapping_schema[normalize_model_name(key, None, dialect)] = (
|
|
45
|
-
normalize_mapping_schema(value, dialect)
|
|
46
|
-
)
|
|
47
|
-
else:
|
|
48
|
-
normalized_mapping_schema[key] = value
|
|
49
|
-
return normalized_mapping_schema
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
def update_dbt_relations(
|
|
53
|
-
source: t.Optional[t.Dict], keys: t.List[str], default_catalog: t.Optional[str]
|
|
54
|
-
) -> None:
|
|
55
|
-
if not default_catalog or not source:
|
|
56
|
-
return
|
|
57
|
-
for key in keys:
|
|
58
|
-
relations = source.get(key)
|
|
59
|
-
if relations:
|
|
60
|
-
relations = [relations] if "database" in relations else relations.values()
|
|
61
|
-
for relation in relations:
|
|
62
|
-
if not relation["database"]:
|
|
63
|
-
relation["database"] = default_catalog
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def migrate(state_sync, default_catalog: t.Optional[str], **kwargs): # type: ignore
|
|
67
|
-
import pandas as pd
|
|
68
|
-
|
|
69
|
-
engine_adapter = state_sync.engine_adapter
|
|
70
|
-
schema = state_sync.schema
|
|
71
|
-
snapshots_table = "_snapshots"
|
|
72
|
-
environments_table = "_environments"
|
|
73
|
-
intervals_table = "_intervals"
|
|
74
|
-
seeds_table = "_seeds"
|
|
75
|
-
|
|
76
|
-
if schema:
|
|
77
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
78
|
-
environments_table = f"{schema}.{environments_table}"
|
|
79
|
-
intervals_table = f"{schema}.{intervals_table}"
|
|
80
|
-
seeds_table = f"{schema}.{seeds_table}"
|
|
81
|
-
|
|
82
|
-
new_snapshots = []
|
|
83
|
-
snapshot_to_dialect = {}
|
|
84
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
85
|
-
blob_type = blob_text_type(engine_adapter.dialect)
|
|
86
|
-
|
|
87
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
88
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
89
|
-
quote_identifiers=True,
|
|
90
|
-
):
|
|
91
|
-
parsed_snapshot = json.loads(snapshot)
|
|
92
|
-
# This is here in the case where the user originally had catalog in this model name, and therefore
|
|
93
|
-
# we would have before created the table with the catalog in the name. New logic removes the catalog,
|
|
94
|
-
# and therefore we need to make sure the table name is the same as the original table name, so we include
|
|
95
|
-
# this override
|
|
96
|
-
parsed_snapshot["base_table_name_override"] = parsed_snapshot["name"]
|
|
97
|
-
node = parsed_snapshot["node"]
|
|
98
|
-
dialect = node.get("dialect")
|
|
99
|
-
normalized_name = (
|
|
100
|
-
normalize_model_name(name, default_catalog=default_catalog, dialect=dialect)
|
|
101
|
-
if node["source_type"] != "audit"
|
|
102
|
-
else name
|
|
103
|
-
)
|
|
104
|
-
parsed_snapshot["name"] = normalized_name
|
|
105
|
-
# At the time of migration all nodes had default catalog, so we don't have to check type
|
|
106
|
-
node["default_catalog"] = default_catalog
|
|
107
|
-
snapshot_to_dialect[name] = dialect
|
|
108
|
-
mapping_schema = node.get("mapping_schema", {})
|
|
109
|
-
if mapping_schema:
|
|
110
|
-
normalized_default_catalog = (
|
|
111
|
-
normalize_model_name(default_catalog, default_catalog=None, dialect=dialect)
|
|
112
|
-
if default_catalog
|
|
113
|
-
else None
|
|
114
|
-
)
|
|
115
|
-
mapping_schema_depth = dict_depth(mapping_schema)
|
|
116
|
-
if mapping_schema_depth == 3 and normalized_default_catalog:
|
|
117
|
-
mapping_schema = {normalized_default_catalog: mapping_schema}
|
|
118
|
-
node["mapping_schema"] = normalize_mapping_schema(mapping_schema, dialect)
|
|
119
|
-
depends_on = node.get("depends_on", [])
|
|
120
|
-
if depends_on:
|
|
121
|
-
node["depends_on"] = [
|
|
122
|
-
normalize_model_name(dep, default_catalog, dialect) for dep in depends_on
|
|
123
|
-
]
|
|
124
|
-
if parsed_snapshot["parents"]:
|
|
125
|
-
parsed_snapshot["parents"] = [
|
|
126
|
-
{
|
|
127
|
-
"name": normalize_model_name(parent["name"], default_catalog, dialect),
|
|
128
|
-
"identifier": parent["identifier"],
|
|
129
|
-
}
|
|
130
|
-
for parent in parsed_snapshot["parents"]
|
|
131
|
-
]
|
|
132
|
-
if parsed_snapshot["indirect_versions"]:
|
|
133
|
-
parsed_snapshot["indirect_versions"] = {
|
|
134
|
-
normalize_model_name(name, default_catalog, dialect): snapshot_data_versions
|
|
135
|
-
for name, snapshot_data_versions in parsed_snapshot["indirect_versions"].items()
|
|
136
|
-
}
|
|
137
|
-
# dbt specific migration
|
|
138
|
-
jinja_macros = node.get("jinja_macros")
|
|
139
|
-
if (
|
|
140
|
-
default_catalog
|
|
141
|
-
and jinja_macros
|
|
142
|
-
and jinja_macros.get("create_builtins_module") == "sqlmesh.dbt"
|
|
143
|
-
):
|
|
144
|
-
update_dbt_relations(
|
|
145
|
-
jinja_macros.get("global_objs"), ["refs", "sources", "this"], default_catalog
|
|
146
|
-
)
|
|
147
|
-
|
|
148
|
-
new_snapshots.append(
|
|
149
|
-
{
|
|
150
|
-
"name": normalized_name,
|
|
151
|
-
"identifier": identifier,
|
|
152
|
-
"version": version,
|
|
153
|
-
"snapshot": json.dumps(parsed_snapshot),
|
|
154
|
-
"kind_name": kind_name,
|
|
155
|
-
}
|
|
156
|
-
)
|
|
157
|
-
|
|
158
|
-
if new_snapshots:
|
|
159
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
160
|
-
|
|
161
|
-
engine_adapter.insert_append(
|
|
162
|
-
snapshots_table,
|
|
163
|
-
pd.DataFrame(new_snapshots),
|
|
164
|
-
target_columns_to_types={
|
|
165
|
-
"name": exp.DataType.build(index_type),
|
|
166
|
-
"identifier": exp.DataType.build(index_type),
|
|
167
|
-
"version": exp.DataType.build(index_type),
|
|
168
|
-
"snapshot": exp.DataType.build(blob_type),
|
|
169
|
-
"kind_name": exp.DataType.build(index_type),
|
|
170
|
-
},
|
|
171
|
-
)
|
|
172
|
-
|
|
173
|
-
new_environments = []
|
|
174
|
-
default_dialect = seq_get(list(snapshot_to_dialect.values()), 0)
|
|
175
|
-
for (
|
|
176
|
-
name,
|
|
177
|
-
snapshots,
|
|
178
|
-
start_at,
|
|
179
|
-
end_at,
|
|
180
|
-
plan_id,
|
|
181
|
-
previous_plan_id,
|
|
182
|
-
expiration_ts,
|
|
183
|
-
finalized_ts,
|
|
184
|
-
promoted_snapshot_ids,
|
|
185
|
-
suffix_target,
|
|
186
|
-
) in engine_adapter.fetchall(
|
|
187
|
-
exp.select(
|
|
188
|
-
"name",
|
|
189
|
-
"snapshots",
|
|
190
|
-
"start_at",
|
|
191
|
-
"end_at",
|
|
192
|
-
"plan_id",
|
|
193
|
-
"previous_plan_id",
|
|
194
|
-
"expiration_ts",
|
|
195
|
-
"finalized_ts",
|
|
196
|
-
"promoted_snapshot_ids",
|
|
197
|
-
"suffix_target",
|
|
198
|
-
).from_(environments_table),
|
|
199
|
-
quote_identifiers=True,
|
|
200
|
-
):
|
|
201
|
-
new_snapshots = []
|
|
202
|
-
for snapshot in json.loads(snapshots):
|
|
203
|
-
snapshot_name = snapshot["name"]
|
|
204
|
-
snapshot["base_table_name_override"] = snapshot_name
|
|
205
|
-
dialect = snapshot_to_dialect.get(snapshot_name, default_dialect)
|
|
206
|
-
node_type = snapshot.get("node_type")
|
|
207
|
-
normalized_name = (
|
|
208
|
-
normalize_model_name(snapshot_name, default_catalog, dialect)
|
|
209
|
-
if node_type is None or node_type == "model"
|
|
210
|
-
else snapshot_name
|
|
211
|
-
)
|
|
212
|
-
snapshot["name"] = normalized_name
|
|
213
|
-
if snapshot["parents"]:
|
|
214
|
-
snapshot["parents"] = [
|
|
215
|
-
{
|
|
216
|
-
"name": normalize_model_name(parent["name"], default_catalog, dialect),
|
|
217
|
-
"identifier": parent["identifier"],
|
|
218
|
-
}
|
|
219
|
-
for parent in snapshot["parents"]
|
|
220
|
-
]
|
|
221
|
-
new_snapshots.append(snapshot)
|
|
222
|
-
|
|
223
|
-
new_environments.append(
|
|
224
|
-
{
|
|
225
|
-
"name": name,
|
|
226
|
-
"snapshots": json.dumps(new_snapshots),
|
|
227
|
-
"start_at": start_at,
|
|
228
|
-
"end_at": end_at,
|
|
229
|
-
"plan_id": plan_id,
|
|
230
|
-
"previous_plan_id": previous_plan_id,
|
|
231
|
-
"expiration_ts": expiration_ts,
|
|
232
|
-
"finalized_ts": finalized_ts,
|
|
233
|
-
"promoted_snapshot_ids": promoted_snapshot_ids,
|
|
234
|
-
"suffix_target": suffix_target,
|
|
235
|
-
}
|
|
236
|
-
)
|
|
237
|
-
|
|
238
|
-
if new_environments:
|
|
239
|
-
engine_adapter.delete_from(environments_table, "TRUE")
|
|
240
|
-
|
|
241
|
-
engine_adapter.insert_append(
|
|
242
|
-
environments_table,
|
|
243
|
-
pd.DataFrame(new_environments),
|
|
244
|
-
target_columns_to_types={
|
|
245
|
-
"name": exp.DataType.build(index_type),
|
|
246
|
-
"snapshots": exp.DataType.build(blob_type),
|
|
247
|
-
"start_at": exp.DataType.build("text"),
|
|
248
|
-
"end_at": exp.DataType.build("text"),
|
|
249
|
-
"plan_id": exp.DataType.build("text"),
|
|
250
|
-
"previous_plan_id": exp.DataType.build("text"),
|
|
251
|
-
"expiration_ts": exp.DataType.build("bigint"),
|
|
252
|
-
"finalized_ts": exp.DataType.build("bigint"),
|
|
253
|
-
"promoted_snapshot_ids": exp.DataType.build(blob_type),
|
|
254
|
-
"suffix_target": exp.DataType.build("text"),
|
|
255
|
-
},
|
|
256
|
-
)
|
|
257
|
-
|
|
258
|
-
# We update environment to not be finalized in order to force them to update their views
|
|
259
|
-
# in order to make sure the views now have the fully qualified names
|
|
260
|
-
# We only do this if a default catalog was applied otherwise the current views are fine
|
|
261
|
-
# We do this post creating the new environments in order to avoid having to find a way to
|
|
262
|
-
# expression a null timestamp value in pandas that works across all engines
|
|
263
|
-
if default_catalog:
|
|
264
|
-
engine_adapter.execute(
|
|
265
|
-
exp.update(environments_table, {"finalized_ts": None}, where="1=1"),
|
|
266
|
-
quote_identifiers=True,
|
|
267
|
-
)
|
|
268
|
-
|
|
269
|
-
new_intervals = []
|
|
270
|
-
for (
|
|
271
|
-
id,
|
|
272
|
-
created_ts,
|
|
273
|
-
name,
|
|
274
|
-
identifier,
|
|
275
|
-
version,
|
|
276
|
-
start_ts,
|
|
277
|
-
end_ts,
|
|
278
|
-
is_dev,
|
|
279
|
-
is_removed,
|
|
280
|
-
is_compacted,
|
|
281
|
-
) in engine_adapter.fetchall(
|
|
282
|
-
exp.select(
|
|
283
|
-
"id",
|
|
284
|
-
"created_ts",
|
|
285
|
-
"name",
|
|
286
|
-
"identifier",
|
|
287
|
-
"version",
|
|
288
|
-
"start_ts",
|
|
289
|
-
"end_ts",
|
|
290
|
-
"is_dev",
|
|
291
|
-
"is_removed",
|
|
292
|
-
"is_compacted",
|
|
293
|
-
).from_(intervals_table),
|
|
294
|
-
quote_identifiers=True,
|
|
295
|
-
):
|
|
296
|
-
dialect = snapshot_to_dialect.get(name, default_dialect)
|
|
297
|
-
normalized_name = normalize_model_name(name, default_catalog, dialect)
|
|
298
|
-
new_intervals.append(
|
|
299
|
-
{
|
|
300
|
-
"id": id,
|
|
301
|
-
"created_ts": created_ts,
|
|
302
|
-
"name": normalized_name,
|
|
303
|
-
"identifier": identifier,
|
|
304
|
-
"version": version,
|
|
305
|
-
"start_ts": start_ts,
|
|
306
|
-
"end_ts": end_ts,
|
|
307
|
-
"is_dev": is_dev,
|
|
308
|
-
"is_removed": is_removed,
|
|
309
|
-
"is_compacted": is_compacted,
|
|
310
|
-
}
|
|
311
|
-
)
|
|
312
|
-
|
|
313
|
-
if new_intervals:
|
|
314
|
-
engine_adapter.delete_from(intervals_table, "TRUE")
|
|
315
|
-
|
|
316
|
-
engine_adapter.insert_append(
|
|
317
|
-
intervals_table,
|
|
318
|
-
pd.DataFrame(new_intervals),
|
|
319
|
-
target_columns_to_types={
|
|
320
|
-
"id": exp.DataType.build(index_type),
|
|
321
|
-
"created_ts": exp.DataType.build("bigint"),
|
|
322
|
-
"name": exp.DataType.build(index_type),
|
|
323
|
-
"identifier": exp.DataType.build(index_type),
|
|
324
|
-
"version": exp.DataType.build(index_type),
|
|
325
|
-
"start_ts": exp.DataType.build("bigint"),
|
|
326
|
-
"end_ts": exp.DataType.build("bigint"),
|
|
327
|
-
"is_dev": exp.DataType.build("boolean"),
|
|
328
|
-
"is_removed": exp.DataType.build("boolean"),
|
|
329
|
-
"is_compacted": exp.DataType.build("boolean"),
|
|
330
|
-
},
|
|
331
|
-
)
|
|
332
|
-
|
|
333
|
-
new_seeds = []
|
|
334
|
-
for (
|
|
335
|
-
name,
|
|
336
|
-
identifier,
|
|
337
|
-
content,
|
|
338
|
-
) in engine_adapter.fetchall(
|
|
339
|
-
exp.select(
|
|
340
|
-
"name",
|
|
341
|
-
"identifier",
|
|
342
|
-
"content",
|
|
343
|
-
).from_(seeds_table),
|
|
344
|
-
quote_identifiers=True,
|
|
345
|
-
):
|
|
346
|
-
dialect = snapshot_to_dialect.get(name, default_dialect)
|
|
347
|
-
normalized_name = normalize_model_name(name, default_catalog, dialect)
|
|
348
|
-
new_seeds.append(
|
|
349
|
-
{
|
|
350
|
-
"name": normalized_name,
|
|
351
|
-
"identifier": identifier,
|
|
352
|
-
"content": content,
|
|
353
|
-
}
|
|
354
|
-
)
|
|
355
|
-
|
|
356
|
-
if new_seeds:
|
|
357
|
-
engine_adapter.delete_from(seeds_table, "TRUE")
|
|
358
|
-
|
|
359
|
-
engine_adapter.insert_append(
|
|
360
|
-
seeds_table,
|
|
361
|
-
pd.DataFrame(new_seeds),
|
|
362
|
-
target_columns_to_types={
|
|
363
|
-
"name": exp.DataType.build(index_type),
|
|
364
|
-
"identifier": exp.DataType.build(index_type),
|
|
365
|
-
"content": exp.DataType.build("text"),
|
|
366
|
-
},
|
|
367
|
-
)
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
"""Add support for environment catalog name override."""
|
|
2
|
-
|
|
3
|
-
from sqlglot import exp
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
7
|
-
engine_adapter = state_sync.engine_adapter
|
|
8
|
-
environments_table = "_environments"
|
|
9
|
-
if state_sync.schema:
|
|
10
|
-
environments_table = f"{state_sync.schema}.{environments_table}"
|
|
11
|
-
|
|
12
|
-
alter_table_exp = exp.Alter(
|
|
13
|
-
this=exp.to_table(environments_table),
|
|
14
|
-
kind="TABLE",
|
|
15
|
-
actions=[
|
|
16
|
-
exp.ColumnDef(
|
|
17
|
-
this=exp.to_column("catalog_name_override"),
|
|
18
|
-
kind=exp.DataType.build("text"),
|
|
19
|
-
)
|
|
20
|
-
],
|
|
21
|
-
)
|
|
22
|
-
engine_adapter.execute(alter_table_exp)
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
"""Add missing delete from migration #34."""
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
5
|
-
engine_adapter = state_sync.engine_adapter
|
|
6
|
-
schema = state_sync.schema
|
|
7
|
-
plan_dags_table = "_plan_dags"
|
|
8
|
-
if state_sync.schema:
|
|
9
|
-
plan_dags_table = f"{schema}.{plan_dags_table}"
|
|
10
|
-
|
|
11
|
-
# At the time of migration plan_dags table is only needed for in-flight DAGs and therefore we can safely
|
|
12
|
-
# just delete it instead of migrating it
|
|
13
|
-
# If reusing this code verify that this is still the case
|
|
14
|
-
engine_adapter.delete_from(plan_dags_table, "TRUE")
|
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
"""Remove dbt is_incremental macro"""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.migration import index_text_type
|
|
8
|
-
from sqlmesh.utils.migration import blob_text_type
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
12
|
-
import pandas as pd
|
|
13
|
-
|
|
14
|
-
engine_adapter = state_sync.engine_adapter
|
|
15
|
-
schema = state_sync.schema
|
|
16
|
-
snapshots_table = "_snapshots"
|
|
17
|
-
if schema:
|
|
18
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
19
|
-
|
|
20
|
-
blob_type = blob_text_type(engine_adapter.dialect)
|
|
21
|
-
new_snapshots = []
|
|
22
|
-
found_dbt_package = False
|
|
23
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
24
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
25
|
-
quote_identifiers=True,
|
|
26
|
-
):
|
|
27
|
-
parsed_snapshot = json.loads(snapshot)
|
|
28
|
-
node = parsed_snapshot["node"]
|
|
29
|
-
dbt_package = node.get("jinja_macros", {}).get("packages", {}).get("dbt", {})
|
|
30
|
-
|
|
31
|
-
if dbt_package:
|
|
32
|
-
found_dbt_package = True
|
|
33
|
-
dbt_package.pop("is_incremental", None)
|
|
34
|
-
dbt_package.pop("should_full_refresh", None)
|
|
35
|
-
|
|
36
|
-
new_snapshots.append(
|
|
37
|
-
{
|
|
38
|
-
"name": name,
|
|
39
|
-
"identifier": identifier,
|
|
40
|
-
"version": version,
|
|
41
|
-
"snapshot": json.dumps(parsed_snapshot),
|
|
42
|
-
"kind_name": kind_name,
|
|
43
|
-
}
|
|
44
|
-
)
|
|
45
|
-
|
|
46
|
-
if found_dbt_package:
|
|
47
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
48
|
-
|
|
49
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
50
|
-
|
|
51
|
-
engine_adapter.insert_append(
|
|
52
|
-
snapshots_table,
|
|
53
|
-
pd.DataFrame(new_snapshots),
|
|
54
|
-
target_columns_to_types={
|
|
55
|
-
"name": exp.DataType.build(index_type),
|
|
56
|
-
"identifier": exp.DataType.build(index_type),
|
|
57
|
-
"version": exp.DataType.build(index_type),
|
|
58
|
-
"snapshot": exp.DataType.build(blob_type),
|
|
59
|
-
"kind_name": exp.DataType.build(index_type),
|
|
60
|
-
},
|
|
61
|
-
)
|
|
@@ -1,73 +0,0 @@
|
|
|
1
|
-
"""Add the expiration_ts column to the snapshots table."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.date import to_datetime, to_timestamp
|
|
8
|
-
from sqlmesh.utils.migration import index_text_type
|
|
9
|
-
from sqlmesh.utils.migration import blob_text_type
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
13
|
-
import pandas as pd
|
|
14
|
-
|
|
15
|
-
engine_adapter = state_sync.engine_adapter
|
|
16
|
-
schema = state_sync.schema
|
|
17
|
-
snapshots_table = "_snapshots"
|
|
18
|
-
if schema:
|
|
19
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
20
|
-
|
|
21
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
22
|
-
blob_type = blob_text_type(engine_adapter.dialect)
|
|
23
|
-
|
|
24
|
-
alter_table_exp = exp.Alter(
|
|
25
|
-
this=exp.to_table(snapshots_table),
|
|
26
|
-
kind="TABLE",
|
|
27
|
-
actions=[
|
|
28
|
-
exp.ColumnDef(
|
|
29
|
-
this=exp.to_column("expiration_ts"),
|
|
30
|
-
kind=exp.DataType.build("bigint"),
|
|
31
|
-
)
|
|
32
|
-
],
|
|
33
|
-
)
|
|
34
|
-
engine_adapter.execute(alter_table_exp)
|
|
35
|
-
|
|
36
|
-
new_snapshots = []
|
|
37
|
-
|
|
38
|
-
for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
|
|
39
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
|
|
40
|
-
quote_identifiers=True,
|
|
41
|
-
):
|
|
42
|
-
parsed_snapshot = json.loads(snapshot)
|
|
43
|
-
|
|
44
|
-
updated_ts = parsed_snapshot["updated_ts"]
|
|
45
|
-
ttl = parsed_snapshot["ttl"]
|
|
46
|
-
expiration_ts = to_timestamp(ttl, relative_base=to_datetime(updated_ts))
|
|
47
|
-
|
|
48
|
-
new_snapshots.append(
|
|
49
|
-
{
|
|
50
|
-
"name": name,
|
|
51
|
-
"identifier": identifier,
|
|
52
|
-
"version": version,
|
|
53
|
-
"snapshot": snapshot,
|
|
54
|
-
"kind_name": kind_name,
|
|
55
|
-
"expiration_ts": expiration_ts,
|
|
56
|
-
}
|
|
57
|
-
)
|
|
58
|
-
|
|
59
|
-
if new_snapshots:
|
|
60
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
61
|
-
|
|
62
|
-
engine_adapter.insert_append(
|
|
63
|
-
snapshots_table,
|
|
64
|
-
pd.DataFrame(new_snapshots),
|
|
65
|
-
target_columns_to_types={
|
|
66
|
-
"name": exp.DataType.build(index_type),
|
|
67
|
-
"identifier": exp.DataType.build(index_type),
|
|
68
|
-
"version": exp.DataType.build(index_type),
|
|
69
|
-
"snapshot": exp.DataType.build(blob_type),
|
|
70
|
-
"kind_name": exp.DataType.build(index_type),
|
|
71
|
-
"expiration_ts": exp.DataType.build("bigint"),
|
|
72
|
-
},
|
|
73
|
-
)
|
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
"""Include environment in plan dag spec."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.migration import index_text_type
|
|
8
|
-
from sqlmesh.utils.migration import blob_text_type
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
12
|
-
import pandas as pd
|
|
13
|
-
|
|
14
|
-
engine_adapter = state_sync.engine_adapter
|
|
15
|
-
schema = state_sync.schema
|
|
16
|
-
plan_dags_table = "_plan_dags"
|
|
17
|
-
if state_sync.schema:
|
|
18
|
-
plan_dags_table = f"{schema}.{plan_dags_table}"
|
|
19
|
-
|
|
20
|
-
new_specs = []
|
|
21
|
-
|
|
22
|
-
for request_id, dag_id, dag_spec in engine_adapter.fetchall(
|
|
23
|
-
exp.select("request_id", "dag_id", "dag_spec").from_(plan_dags_table),
|
|
24
|
-
quote_identifiers=True,
|
|
25
|
-
):
|
|
26
|
-
parsed_dag_spec = json.loads(dag_spec)
|
|
27
|
-
|
|
28
|
-
environment_naming_info = parsed_dag_spec.pop("environment_naming_info")
|
|
29
|
-
promoted_snapshots = parsed_dag_spec.pop("promoted_snapshots", [])
|
|
30
|
-
start = parsed_dag_spec.pop("start")
|
|
31
|
-
parsed_dag_spec.pop("end", None)
|
|
32
|
-
plan_id = parsed_dag_spec.pop("plan_id")
|
|
33
|
-
previous_plan_id = parsed_dag_spec.pop("previous_plan_id", None)
|
|
34
|
-
expiration_ts = parsed_dag_spec.pop("environment_expiration_ts", None)
|
|
35
|
-
|
|
36
|
-
parsed_dag_spec["environment"] = {
|
|
37
|
-
**environment_naming_info,
|
|
38
|
-
"snapshots": promoted_snapshots,
|
|
39
|
-
"start_at": start,
|
|
40
|
-
"end_at": start,
|
|
41
|
-
"plan_id": plan_id,
|
|
42
|
-
"previous_plan_id": previous_plan_id,
|
|
43
|
-
"expiration_ts": expiration_ts,
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
new_specs.append(
|
|
47
|
-
{
|
|
48
|
-
"request_id": request_id,
|
|
49
|
-
"dag_id": dag_id,
|
|
50
|
-
"dag_spec": json.dumps(parsed_dag_spec),
|
|
51
|
-
}
|
|
52
|
-
)
|
|
53
|
-
|
|
54
|
-
if new_specs:
|
|
55
|
-
engine_adapter.delete_from(plan_dags_table, "TRUE")
|
|
56
|
-
|
|
57
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
58
|
-
blob_type = blob_text_type(engine_adapter.dialect)
|
|
59
|
-
|
|
60
|
-
engine_adapter.insert_append(
|
|
61
|
-
plan_dags_table,
|
|
62
|
-
pd.DataFrame(new_specs),
|
|
63
|
-
target_columns_to_types={
|
|
64
|
-
"request_id": exp.DataType.build(index_type),
|
|
65
|
-
"dag_id": exp.DataType.build(index_type),
|
|
66
|
-
"dag_spec": exp.DataType.build(blob_type),
|
|
67
|
-
},
|
|
68
|
-
)
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
"""Add support for environment previous finalized snapshots."""
|
|
2
|
-
|
|
3
|
-
from sqlglot import exp
|
|
4
|
-
|
|
5
|
-
from sqlmesh.utils.migration import blob_text_type
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
9
|
-
engine_adapter = state_sync.engine_adapter
|
|
10
|
-
environments_table = "_environments"
|
|
11
|
-
if state_sync.schema:
|
|
12
|
-
environments_table = f"{state_sync.schema}.{environments_table}"
|
|
13
|
-
|
|
14
|
-
blob_type = blob_text_type(engine_adapter.dialect)
|
|
15
|
-
|
|
16
|
-
alter_table_exp = exp.Alter(
|
|
17
|
-
this=exp.to_table(environments_table),
|
|
18
|
-
kind="TABLE",
|
|
19
|
-
actions=[
|
|
20
|
-
exp.ColumnDef(
|
|
21
|
-
this=exp.to_column("previous_finalized_snapshots"),
|
|
22
|
-
kind=exp.DataType.build(blob_type),
|
|
23
|
-
)
|
|
24
|
-
],
|
|
25
|
-
)
|
|
26
|
-
engine_adapter.execute(alter_table_exp)
|
|
@@ -1,59 +0,0 @@
|
|
|
1
|
-
"""Remove hash_raw_query from existing snapshots."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
from sqlglot import exp
|
|
6
|
-
|
|
7
|
-
from sqlmesh.utils.migration import index_text_type
|
|
8
|
-
from sqlmesh.utils.migration import blob_text_type
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def migrate(state_sync, **kwargs): # type: ignore
|
|
12
|
-
import pandas as pd
|
|
13
|
-
|
|
14
|
-
engine_adapter = state_sync.engine_adapter
|
|
15
|
-
schema = state_sync.schema
|
|
16
|
-
snapshots_table = "_snapshots"
|
|
17
|
-
if schema:
|
|
18
|
-
snapshots_table = f"{schema}.{snapshots_table}"
|
|
19
|
-
|
|
20
|
-
new_snapshots = []
|
|
21
|
-
|
|
22
|
-
for name, identifier, version, snapshot, kind_name, expiration_ts in engine_adapter.fetchall(
|
|
23
|
-
exp.select("name", "identifier", "version", "snapshot", "kind_name", "expiration_ts").from_(
|
|
24
|
-
snapshots_table
|
|
25
|
-
),
|
|
26
|
-
quote_identifiers=True,
|
|
27
|
-
):
|
|
28
|
-
parsed_snapshot = json.loads(snapshot)
|
|
29
|
-
parsed_snapshot["node"].pop("hash_raw_query", None)
|
|
30
|
-
|
|
31
|
-
new_snapshots.append(
|
|
32
|
-
{
|
|
33
|
-
"name": name,
|
|
34
|
-
"identifier": identifier,
|
|
35
|
-
"version": version,
|
|
36
|
-
"snapshot": json.dumps(parsed_snapshot),
|
|
37
|
-
"kind_name": kind_name,
|
|
38
|
-
"expiration_ts": expiration_ts,
|
|
39
|
-
}
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
if new_snapshots:
|
|
43
|
-
engine_adapter.delete_from(snapshots_table, "TRUE")
|
|
44
|
-
|
|
45
|
-
index_type = index_text_type(engine_adapter.dialect)
|
|
46
|
-
blob_type = blob_text_type(engine_adapter.dialect)
|
|
47
|
-
|
|
48
|
-
engine_adapter.insert_append(
|
|
49
|
-
snapshots_table,
|
|
50
|
-
pd.DataFrame(new_snapshots),
|
|
51
|
-
target_columns_to_types={
|
|
52
|
-
"name": exp.DataType.build(index_type),
|
|
53
|
-
"identifier": exp.DataType.build(index_type),
|
|
54
|
-
"version": exp.DataType.build(index_type),
|
|
55
|
-
"snapshot": exp.DataType.build(blob_type),
|
|
56
|
-
"kind_name": exp.DataType.build(index_type),
|
|
57
|
-
"expiration_ts": exp.DataType.build("bigint"),
|
|
58
|
-
},
|
|
59
|
-
)
|