sqlmesh 0.213.1.dev1__py3-none-any.whl → 0.227.2.dev4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (252) hide show
  1. sqlmesh/__init__.py +12 -2
  2. sqlmesh/_version.py +2 -2
  3. sqlmesh/cli/main.py +0 -44
  4. sqlmesh/cli/project_init.py +11 -2
  5. sqlmesh/core/_typing.py +1 -0
  6. sqlmesh/core/audit/definition.py +8 -2
  7. sqlmesh/core/config/__init__.py +1 -1
  8. sqlmesh/core/config/connection.py +17 -5
  9. sqlmesh/core/config/dbt.py +13 -0
  10. sqlmesh/core/config/janitor.py +12 -0
  11. sqlmesh/core/config/loader.py +7 -0
  12. sqlmesh/core/config/model.py +2 -0
  13. sqlmesh/core/config/root.py +3 -0
  14. sqlmesh/core/console.py +81 -3
  15. sqlmesh/core/constants.py +1 -1
  16. sqlmesh/core/context.py +69 -26
  17. sqlmesh/core/dialect.py +3 -0
  18. sqlmesh/core/engine_adapter/_typing.py +2 -0
  19. sqlmesh/core/engine_adapter/base.py +322 -22
  20. sqlmesh/core/engine_adapter/base_postgres.py +17 -1
  21. sqlmesh/core/engine_adapter/bigquery.py +146 -7
  22. sqlmesh/core/engine_adapter/clickhouse.py +17 -13
  23. sqlmesh/core/engine_adapter/databricks.py +33 -2
  24. sqlmesh/core/engine_adapter/fabric.py +10 -29
  25. sqlmesh/core/engine_adapter/mixins.py +142 -48
  26. sqlmesh/core/engine_adapter/mssql.py +15 -4
  27. sqlmesh/core/engine_adapter/mysql.py +2 -2
  28. sqlmesh/core/engine_adapter/postgres.py +9 -3
  29. sqlmesh/core/engine_adapter/redshift.py +4 -0
  30. sqlmesh/core/engine_adapter/risingwave.py +1 -0
  31. sqlmesh/core/engine_adapter/shared.py +6 -0
  32. sqlmesh/core/engine_adapter/snowflake.py +82 -11
  33. sqlmesh/core/engine_adapter/spark.py +14 -10
  34. sqlmesh/core/engine_adapter/trino.py +4 -2
  35. sqlmesh/core/environment.py +2 -0
  36. sqlmesh/core/janitor.py +181 -0
  37. sqlmesh/core/lineage.py +1 -0
  38. sqlmesh/core/linter/definition.py +13 -13
  39. sqlmesh/core/linter/rules/builtin.py +29 -0
  40. sqlmesh/core/macros.py +35 -13
  41. sqlmesh/core/model/common.py +2 -0
  42. sqlmesh/core/model/definition.py +82 -28
  43. sqlmesh/core/model/kind.py +66 -2
  44. sqlmesh/core/model/meta.py +108 -4
  45. sqlmesh/core/node.py +101 -1
  46. sqlmesh/core/plan/builder.py +18 -10
  47. sqlmesh/core/plan/common.py +199 -2
  48. sqlmesh/core/plan/definition.py +25 -6
  49. sqlmesh/core/plan/evaluator.py +75 -113
  50. sqlmesh/core/plan/explainer.py +90 -8
  51. sqlmesh/core/plan/stages.py +42 -21
  52. sqlmesh/core/renderer.py +78 -32
  53. sqlmesh/core/scheduler.py +102 -22
  54. sqlmesh/core/selector.py +137 -9
  55. sqlmesh/core/signal.py +64 -1
  56. sqlmesh/core/snapshot/__init__.py +2 -0
  57. sqlmesh/core/snapshot/definition.py +146 -34
  58. sqlmesh/core/snapshot/evaluator.py +689 -124
  59. sqlmesh/core/state_sync/__init__.py +0 -1
  60. sqlmesh/core/state_sync/base.py +55 -33
  61. sqlmesh/core/state_sync/cache.py +12 -7
  62. sqlmesh/core/state_sync/common.py +216 -111
  63. sqlmesh/core/state_sync/db/environment.py +6 -4
  64. sqlmesh/core/state_sync/db/facade.py +42 -24
  65. sqlmesh/core/state_sync/db/interval.py +27 -7
  66. sqlmesh/core/state_sync/db/migrator.py +34 -16
  67. sqlmesh/core/state_sync/db/snapshot.py +177 -169
  68. sqlmesh/core/table_diff.py +2 -2
  69. sqlmesh/core/test/context.py +2 -0
  70. sqlmesh/core/test/definition.py +14 -9
  71. sqlmesh/dbt/adapter.py +22 -16
  72. sqlmesh/dbt/basemodel.py +75 -56
  73. sqlmesh/dbt/builtin.py +116 -12
  74. sqlmesh/dbt/column.py +17 -5
  75. sqlmesh/dbt/common.py +19 -5
  76. sqlmesh/dbt/context.py +14 -1
  77. sqlmesh/dbt/loader.py +61 -9
  78. sqlmesh/dbt/manifest.py +174 -16
  79. sqlmesh/dbt/model.py +183 -85
  80. sqlmesh/dbt/package.py +16 -1
  81. sqlmesh/dbt/profile.py +3 -3
  82. sqlmesh/dbt/project.py +12 -7
  83. sqlmesh/dbt/seed.py +6 -1
  84. sqlmesh/dbt/source.py +13 -1
  85. sqlmesh/dbt/target.py +25 -6
  86. sqlmesh/dbt/test.py +36 -5
  87. sqlmesh/migrations/v0000_baseline.py +95 -0
  88. sqlmesh/migrations/v0061_mysql_fix_blob_text_type.py +5 -7
  89. sqlmesh/migrations/v0062_add_model_gateway.py +5 -1
  90. sqlmesh/migrations/v0063_change_signals.py +5 -3
  91. sqlmesh/migrations/v0064_join_when_matched_strings.py +5 -3
  92. sqlmesh/migrations/v0065_add_model_optimize.py +5 -1
  93. sqlmesh/migrations/v0066_add_auto_restatements.py +8 -3
  94. sqlmesh/migrations/v0067_add_tsql_date_full_precision.py +5 -1
  95. sqlmesh/migrations/v0068_include_unrendered_query_in_metadata_hash.py +5 -1
  96. sqlmesh/migrations/v0069_update_dev_table_suffix.py +5 -3
  97. sqlmesh/migrations/v0070_include_grains_in_metadata_hash.py +5 -1
  98. sqlmesh/migrations/v0071_add_dev_version_to_intervals.py +9 -5
  99. sqlmesh/migrations/v0072_add_environment_statements.py +5 -3
  100. sqlmesh/migrations/v0073_remove_symbolic_disable_restatement.py +5 -3
  101. sqlmesh/migrations/v0074_add_partition_by_time_column_property.py +5 -1
  102. sqlmesh/migrations/v0075_remove_validate_query.py +5 -3
  103. sqlmesh/migrations/v0076_add_cron_tz.py +5 -1
  104. sqlmesh/migrations/v0077_fix_column_type_hash_calculation.py +5 -1
  105. sqlmesh/migrations/v0078_warn_if_non_migratable_python_env.py +5 -3
  106. sqlmesh/migrations/v0079_add_gateway_managed_property.py +10 -5
  107. sqlmesh/migrations/v0080_add_batch_size_to_scd_type_2_models.py +5 -1
  108. sqlmesh/migrations/v0081_update_partitioned_by.py +5 -3
  109. sqlmesh/migrations/v0082_warn_if_incorrectly_duplicated_statements.py +5 -3
  110. sqlmesh/migrations/v0083_use_sql_for_scd_time_data_type_data_hash.py +5 -1
  111. sqlmesh/migrations/v0084_normalize_quote_when_matched_and_merge_filter.py +5 -1
  112. sqlmesh/migrations/v0085_deterministic_repr.py +5 -3
  113. sqlmesh/migrations/v0086_check_deterministic_bug.py +5 -3
  114. sqlmesh/migrations/v0087_normalize_blueprint_variables.py +5 -3
  115. sqlmesh/migrations/v0088_warn_about_variable_python_env_diffs.py +5 -3
  116. sqlmesh/migrations/v0089_add_virtual_environment_mode.py +5 -1
  117. sqlmesh/migrations/v0090_add_forward_only_column.py +9 -5
  118. sqlmesh/migrations/v0091_on_additive_change.py +5 -1
  119. sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +5 -3
  120. sqlmesh/migrations/v0093_use_raw_sql_in_fingerprint.py +5 -1
  121. sqlmesh/migrations/v0094_add_dev_version_and_fingerprint_columns.py +123 -0
  122. sqlmesh/migrations/v0095_warn_about_dbt_raw_sql_diff.py +49 -0
  123. sqlmesh/migrations/v0096_remove_plan_dags_table.py +13 -0
  124. sqlmesh/migrations/v0097_add_dbt_name_in_node.py +9 -0
  125. sqlmesh/migrations/{v0060_move_audits_to_model.py → v0098_add_dbt_node_info_in_node.py} +33 -16
  126. sqlmesh/migrations/v0099_add_last_altered_to_intervals.py +25 -0
  127. sqlmesh/migrations/v0100_add_grants_and_grants_target_layer.py +9 -0
  128. sqlmesh/utils/__init__.py +8 -1
  129. sqlmesh/utils/cache.py +5 -1
  130. sqlmesh/utils/connection_pool.py +2 -1
  131. sqlmesh/utils/dag.py +65 -10
  132. sqlmesh/utils/date.py +8 -1
  133. sqlmesh/utils/errors.py +8 -0
  134. sqlmesh/utils/jinja.py +54 -4
  135. sqlmesh/utils/pydantic.py +6 -6
  136. sqlmesh/utils/windows.py +13 -3
  137. {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/METADATA +7 -10
  138. sqlmesh-0.227.2.dev4.dist-info/RECORD +370 -0
  139. sqlmesh_dbt/cli.py +70 -7
  140. sqlmesh_dbt/console.py +14 -6
  141. sqlmesh_dbt/operations.py +103 -24
  142. sqlmesh_dbt/selectors.py +39 -1
  143. web/client/dist/assets/{Audits-Ucsx1GzF.js → Audits-CBiYyyx-.js} +1 -1
  144. web/client/dist/assets/{Banner-BWDzvavM.js → Banner-DSRbUlO5.js} +1 -1
  145. web/client/dist/assets/{ChevronDownIcon-D2VL13Ah.js → ChevronDownIcon-MK_nrjD_.js} +1 -1
  146. web/client/dist/assets/{ChevronRightIcon-DWGYbf1l.js → ChevronRightIcon-CLWtT22Q.js} +1 -1
  147. web/client/dist/assets/{Content-DdHDZM3I.js → Content-BNuGZN5l.js} +1 -1
  148. web/client/dist/assets/{Content-Bikfy8fh.js → Content-CSHJyW0n.js} +1 -1
  149. web/client/dist/assets/{Data-CzAJH7rW.js → Data-C1oRDbLx.js} +1 -1
  150. web/client/dist/assets/{DataCatalog-BJF11g8f.js → DataCatalog-HXyX2-_j.js} +1 -1
  151. web/client/dist/assets/{Editor-s0SBpV2y.js → Editor-BDyfpUuw.js} +1 -1
  152. web/client/dist/assets/{Editor-DgLhgKnm.js → Editor-D0jNItwC.js} +1 -1
  153. web/client/dist/assets/{Errors-D0m0O1d3.js → Errors-BfuFLcPi.js} +1 -1
  154. web/client/dist/assets/{FileExplorer-CEv0vXkt.js → FileExplorer-BR9IE3he.js} +1 -1
  155. web/client/dist/assets/{Footer-BwzXn8Ew.js → Footer-CgBEtiAh.js} +1 -1
  156. web/client/dist/assets/{Header-6heDkEqG.js → Header-DSqR6nSO.js} +1 -1
  157. web/client/dist/assets/{Input-obuJsD6k.js → Input-B-oZ6fGO.js} +1 -1
  158. web/client/dist/assets/Lineage-DYQVwDbD.js +1 -0
  159. web/client/dist/assets/{ListboxShow-HM9_qyrt.js → ListboxShow-BE5-xevs.js} +1 -1
  160. web/client/dist/assets/{ModelLineage-zWdKo0U2.js → ModelLineage-DkIFAYo4.js} +1 -1
  161. web/client/dist/assets/{Models-Bcu66SRz.js → Models-D5dWr8RB.js} +1 -1
  162. web/client/dist/assets/{Page-BWEEQfIt.js → Page-C-XfU5BR.js} +1 -1
  163. web/client/dist/assets/{Plan-C4gXCqlf.js → Plan-ZEuTINBq.js} +1 -1
  164. web/client/dist/assets/{PlusCircleIcon-CVDO651q.js → PlusCircleIcon-DVXAHG8_.js} +1 -1
  165. web/client/dist/assets/{ReportErrors-BT6xFwAr.js → ReportErrors-B7FEPzMB.js} +1 -1
  166. web/client/dist/assets/{Root-ryJoBK4h.js → Root-8aZyhPxF.js} +1 -1
  167. web/client/dist/assets/{SearchList-DB04sPb9.js → SearchList-W_iT2G82.js} +1 -1
  168. web/client/dist/assets/{SelectEnvironment-CUYcXUu6.js → SelectEnvironment-C65jALmO.js} +1 -1
  169. web/client/dist/assets/{SourceList-Doo_9ZGp.js → SourceList-DSLO6nVJ.js} +1 -1
  170. web/client/dist/assets/{SourceListItem-D5Mj7Dly.js → SourceListItem-BHt8d9-I.js} +1 -1
  171. web/client/dist/assets/{SplitPane-qHmkD1qy.js → SplitPane-CViaZmw6.js} +1 -1
  172. web/client/dist/assets/{Tests-DH1Z74ML.js → Tests-DhaVt5t1.js} +1 -1
  173. web/client/dist/assets/{Welcome-DqUJUNMF.js → Welcome-DvpjH-_4.js} +1 -1
  174. web/client/dist/assets/context-BctCsyGb.js +71 -0
  175. web/client/dist/assets/{context-Dr54UHLi.js → context-DFNeGsFF.js} +1 -1
  176. web/client/dist/assets/{editor-DYIP1yQ4.js → editor-CcO28cqd.js} +1 -1
  177. web/client/dist/assets/{file-DarlIDVi.js → file-CvJN3aZO.js} +1 -1
  178. web/client/dist/assets/{floating-ui.react-dom-BH3TFvkM.js → floating-ui.react-dom-CjE-JNW1.js} +1 -1
  179. web/client/dist/assets/{help-Bl8wqaQc.js → help-DuPhjipa.js} +1 -1
  180. web/client/dist/assets/{index-D1sR7wpN.js → index-C-dJH7yZ.js} +1 -1
  181. web/client/dist/assets/{index-O3mjYpnE.js → index-Dj0i1-CA.js} +2 -2
  182. web/client/dist/assets/{plan-CehRrJUG.js → plan-BTRSbjKn.js} +1 -1
  183. web/client/dist/assets/{popover-CqgMRE0G.js → popover-_Sf0yvOI.js} +1 -1
  184. web/client/dist/assets/{project-6gxepOhm.js → project-BvSOI8MY.js} +1 -1
  185. web/client/dist/index.html +1 -1
  186. sqlmesh/integrations/llm.py +0 -56
  187. sqlmesh/migrations/v0001_init.py +0 -60
  188. sqlmesh/migrations/v0002_remove_identify.py +0 -5
  189. sqlmesh/migrations/v0003_move_batch_size.py +0 -34
  190. sqlmesh/migrations/v0004_environmnent_add_finalized_at.py +0 -23
  191. sqlmesh/migrations/v0005_create_seed_table.py +0 -24
  192. sqlmesh/migrations/v0006_change_seed_hash.py +0 -5
  193. sqlmesh/migrations/v0007_env_table_info_to_kind.py +0 -99
  194. sqlmesh/migrations/v0008_create_intervals_table.py +0 -38
  195. sqlmesh/migrations/v0009_remove_pre_post_hooks.py +0 -62
  196. sqlmesh/migrations/v0010_seed_hash_batch_size.py +0 -5
  197. sqlmesh/migrations/v0011_add_model_kind_name.py +0 -63
  198. sqlmesh/migrations/v0012_update_jinja_expressions.py +0 -86
  199. sqlmesh/migrations/v0013_serde_using_model_dialects.py +0 -87
  200. sqlmesh/migrations/v0014_fix_dev_intervals.py +0 -14
  201. sqlmesh/migrations/v0015_environment_add_promoted_snapshot_ids.py +0 -26
  202. sqlmesh/migrations/v0016_fix_windows_path.py +0 -59
  203. sqlmesh/migrations/v0017_fix_windows_seed_path.py +0 -55
  204. sqlmesh/migrations/v0018_rename_snapshot_model_to_node.py +0 -53
  205. sqlmesh/migrations/v0019_add_env_suffix_target.py +0 -28
  206. sqlmesh/migrations/v0020_remove_redundant_attributes_from_dbt_models.py +0 -80
  207. sqlmesh/migrations/v0021_fix_table_properties.py +0 -62
  208. sqlmesh/migrations/v0022_move_project_to_model.py +0 -54
  209. sqlmesh/migrations/v0023_fix_added_models_with_forward_only_parents.py +0 -65
  210. sqlmesh/migrations/v0024_replace_model_kind_name_enum_with_value.py +0 -55
  211. sqlmesh/migrations/v0025_fix_intervals_and_missing_change_category.py +0 -117
  212. sqlmesh/migrations/v0026_remove_dialect_from_seed.py +0 -55
  213. sqlmesh/migrations/v0027_minute_interval_to_five.py +0 -57
  214. sqlmesh/migrations/v0028_add_plan_dags_table.py +0 -29
  215. sqlmesh/migrations/v0029_generate_schema_types_using_dialect.py +0 -69
  216. sqlmesh/migrations/v0030_update_unrestorable_snapshots.py +0 -65
  217. sqlmesh/migrations/v0031_remove_dbt_target_fields.py +0 -65
  218. sqlmesh/migrations/v0032_add_sqlmesh_version.py +0 -25
  219. sqlmesh/migrations/v0033_mysql_fix_blob_text_type.py +0 -45
  220. sqlmesh/migrations/v0034_add_default_catalog.py +0 -367
  221. sqlmesh/migrations/v0035_add_catalog_name_override.py +0 -22
  222. sqlmesh/migrations/v0036_delete_plan_dags_bug_fix.py +0 -14
  223. sqlmesh/migrations/v0037_remove_dbt_is_incremental_macro.py +0 -61
  224. sqlmesh/migrations/v0038_add_expiration_ts_to_snapshot.py +0 -73
  225. sqlmesh/migrations/v0039_include_environment_in_plan_dag_spec.py +0 -68
  226. sqlmesh/migrations/v0040_add_previous_finalized_snapshots.py +0 -26
  227. sqlmesh/migrations/v0041_remove_hash_raw_query_attribute.py +0 -59
  228. sqlmesh/migrations/v0042_trim_indirect_versions.py +0 -66
  229. sqlmesh/migrations/v0043_fix_remove_obsolete_attributes_in_plan_dags.py +0 -61
  230. sqlmesh/migrations/v0044_quote_identifiers_in_model_attributes.py +0 -5
  231. sqlmesh/migrations/v0045_move_gateway_variable.py +0 -70
  232. sqlmesh/migrations/v0046_add_batch_concurrency.py +0 -8
  233. sqlmesh/migrations/v0047_change_scd_string_to_column.py +0 -5
  234. sqlmesh/migrations/v0048_drop_indirect_versions.py +0 -59
  235. sqlmesh/migrations/v0049_replace_identifier_with_version_in_seeds_table.py +0 -57
  236. sqlmesh/migrations/v0050_drop_seeds_table.py +0 -11
  237. sqlmesh/migrations/v0051_rename_column_descriptions.py +0 -65
  238. sqlmesh/migrations/v0052_add_normalize_name_in_environment_naming_info.py +0 -28
  239. sqlmesh/migrations/v0053_custom_model_kind_extra_attributes.py +0 -5
  240. sqlmesh/migrations/v0054_fix_trailing_comments.py +0 -5
  241. sqlmesh/migrations/v0055_add_updated_ts_unpaused_ts_ttl_ms_unrestorable_to_snapshot.py +0 -132
  242. sqlmesh/migrations/v0056_restore_table_indexes.py +0 -118
  243. sqlmesh/migrations/v0057_add_table_format.py +0 -5
  244. sqlmesh/migrations/v0058_add_requirements.py +0 -26
  245. sqlmesh/migrations/v0059_add_physical_version.py +0 -5
  246. sqlmesh-0.213.1.dev1.dist-info/RECORD +0 -421
  247. web/client/dist/assets/Lineage-D0Hgdz2v.js +0 -1
  248. web/client/dist/assets/context-DgX0fp2E.js +0 -68
  249. {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/WHEEL +0 -0
  250. {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/entry_points.txt +0 -0
  251. {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/licenses/LICENSE +0 -0
  252. {sqlmesh-0.213.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/top_level.txt +0 -0
@@ -1,63 +0,0 @@
1
- """Add the kind_name column to the snapshots table."""
2
-
3
- import json
4
-
5
- from sqlglot import exp
6
-
7
- from sqlmesh.utils.migration import index_text_type
8
-
9
-
10
- def migrate(state_sync, **kwargs): # type: ignore
11
- import pandas as pd
12
-
13
- engine_adapter = state_sync.engine_adapter
14
- schema = state_sync.schema
15
- snapshots_table = "_snapshots"
16
- if schema:
17
- snapshots_table = f"{schema}.{snapshots_table}"
18
-
19
- index_type = index_text_type(engine_adapter.dialect)
20
-
21
- alter_table_exp = exp.Alter(
22
- this=exp.to_table(snapshots_table),
23
- kind="TABLE",
24
- actions=[
25
- exp.ColumnDef(
26
- this=exp.to_column("kind_name"),
27
- kind=exp.DataType.build(index_type),
28
- )
29
- ],
30
- )
31
- engine_adapter.execute(alter_table_exp)
32
-
33
- new_snapshots = []
34
-
35
- for name, identifier, version, snapshot in engine_adapter.fetchall(
36
- exp.select("name", "identifier", "version", "snapshot").from_(snapshots_table),
37
- quote_identifiers=True,
38
- ):
39
- parsed_snapshot = json.loads(snapshot)
40
- new_snapshots.append(
41
- {
42
- "name": name,
43
- "identifier": identifier,
44
- "version": version,
45
- "snapshot": snapshot,
46
- "kind_name": parsed_snapshot["model"]["kind"]["name"],
47
- }
48
- )
49
-
50
- if new_snapshots:
51
- engine_adapter.delete_from(snapshots_table, "TRUE")
52
-
53
- engine_adapter.insert_append(
54
- snapshots_table,
55
- pd.DataFrame(new_snapshots),
56
- target_columns_to_types={
57
- "name": exp.DataType.build(index_type),
58
- "identifier": exp.DataType.build(index_type),
59
- "version": exp.DataType.build(index_type),
60
- "snapshot": exp.DataType.build("text"),
61
- "kind_name": exp.DataType.build(index_type),
62
- },
63
- )
@@ -1,86 +0,0 @@
1
- """Fix expressions that contain jinja."""
2
-
3
- import json
4
- import typing as t
5
-
6
- from sqlglot import exp
7
-
8
- from sqlmesh.utils.jinja import has_jinja
9
- from sqlmesh.utils.migration import index_text_type
10
-
11
-
12
- def migrate(state_sync, **kwargs): # type: ignore
13
- import pandas as pd
14
-
15
- engine_adapter = state_sync.engine_adapter
16
- schema = state_sync.schema
17
- snapshots_table = "_snapshots"
18
- if schema:
19
- snapshots_table = f"{schema}.{snapshots_table}"
20
-
21
- new_snapshots = []
22
-
23
- for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
24
- exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
25
- quote_identifiers=True,
26
- ):
27
- parsed_snapshot = json.loads(snapshot)
28
- audits = parsed_snapshot.get("audits", [])
29
- model = parsed_snapshot["model"]
30
-
31
- if "query" in model and has_jinja(model["query"]):
32
- model["query"] = _wrap_query(model["query"])
33
-
34
- _wrap_statements(model, "pre_statements")
35
- _wrap_statements(model, "post_statements")
36
-
37
- for audit in audits:
38
- if has_jinja(audit["query"]):
39
- audit["query"] = _wrap_query(audit["query"])
40
- _wrap_statements(audit, "expressions")
41
-
42
- new_snapshots.append(
43
- {
44
- "name": name,
45
- "identifier": identifier,
46
- "version": version,
47
- "snapshot": json.dumps(parsed_snapshot),
48
- "kind_name": kind_name,
49
- }
50
- )
51
-
52
- if new_snapshots:
53
- engine_adapter.delete_from(snapshots_table, "TRUE")
54
-
55
- index_type = index_text_type(engine_adapter.dialect)
56
-
57
- engine_adapter.insert_append(
58
- snapshots_table,
59
- pd.DataFrame(new_snapshots),
60
- target_columns_to_types={
61
- "name": exp.DataType.build(index_type),
62
- "identifier": exp.DataType.build(index_type),
63
- "version": exp.DataType.build(index_type),
64
- "snapshot": exp.DataType.build("text"),
65
- "kind_name": exp.DataType.build(index_type),
66
- },
67
- )
68
-
69
-
70
- def _wrap_statements(obj: t.Dict, key: str) -> None:
71
- updated_statements = []
72
- for statement in obj.get(key, []):
73
- if has_jinja(statement):
74
- statement = _wrap_statement(statement)
75
- updated_statements.append(statement)
76
-
77
- if updated_statements:
78
- obj[key] = updated_statements
79
-
80
-
81
- def _wrap_query(sql: str) -> str:
82
- return f"JINJA_QUERY_BEGIN;\n{sql}\nJINJA_END;"
83
-
84
-
85
- def _wrap_statement(sql: str) -> str:
86
- return f"JINJA_STATEMENT_BEGIN;\n{sql}\nJINJA_END;"
@@ -1,87 +0,0 @@
1
- """Serialize SQL using the dialect of each model."""
2
-
3
- import json
4
- import typing as t
5
-
6
- from sqlglot import exp, parse_one
7
-
8
- from sqlmesh.utils.jinja import has_jinja
9
- from sqlmesh.utils.migration import index_text_type
10
-
11
-
12
- def migrate(state_sync, **kwargs): # type: ignore
13
- import pandas as pd
14
-
15
- engine_adapter = state_sync.engine_adapter
16
- schema = state_sync.schema
17
- snapshots_table = "_snapshots"
18
- if schema:
19
- snapshots_table = f"{schema}.{snapshots_table}"
20
-
21
- new_snapshots = []
22
-
23
- for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
24
- exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
25
- quote_identifiers=True,
26
- ):
27
- parsed_snapshot = json.loads(snapshot)
28
- model = parsed_snapshot["model"]
29
- dialect = model["dialect"]
30
-
31
- _update_expression(model, "query", dialect)
32
- _update_expression_list(model, "pre_statements", dialect)
33
- _update_expression_list(model, "post_statements", dialect)
34
-
35
- for audit in parsed_snapshot.get("audits", []):
36
- dialect = audit["dialect"]
37
- _update_expression(audit, "query", dialect)
38
- _update_expression_list(audit, "expressions", dialect)
39
-
40
- new_snapshots.append(
41
- {
42
- "name": name,
43
- "identifier": identifier,
44
- "version": version,
45
- "snapshot": json.dumps(parsed_snapshot),
46
- "kind_name": kind_name,
47
- }
48
- )
49
-
50
- if new_snapshots:
51
- engine_adapter.delete_from(snapshots_table, "TRUE")
52
-
53
- index_type = index_text_type(engine_adapter.dialect)
54
-
55
- engine_adapter.insert_append(
56
- snapshots_table,
57
- pd.DataFrame(new_snapshots),
58
- target_columns_to_types={
59
- "name": exp.DataType.build(index_type),
60
- "identifier": exp.DataType.build(index_type),
61
- "version": exp.DataType.build(index_type),
62
- "snapshot": exp.DataType.build("text"),
63
- "kind_name": exp.DataType.build(index_type),
64
- },
65
- )
66
-
67
-
68
- # Note: previously we used to do serde using the SQLGlot dialect, so we need to parse the
69
- # stored queries using that dialect and then write them back using the correct dialect.
70
-
71
-
72
- def _update_expression(obj: t.Dict, key: str, dialect: str) -> None:
73
- if key in obj and not has_jinja(obj[key]):
74
- obj[key] = parse_one(obj[key]).sql(dialect=dialect)
75
-
76
-
77
- def _update_expression_list(obj: t.Dict, key: str, dialect: str) -> None:
78
- if key in obj:
79
- obj[key] = [
80
- (
81
- parse_one(expression).sql(dialect=dialect)
82
- if not has_jinja(expression)
83
- else expression
84
- )
85
- for expression in obj[key]
86
- if expression
87
- ]
@@ -1,14 +0,0 @@
1
- """Fix snapshot intervals that have been erroneously marked as dev."""
2
-
3
-
4
- def migrate(state_sync, **kwargs): # type: ignore
5
- schema = state_sync.schema
6
- intervals_table = "_intervals"
7
- if schema:
8
- intervals_table = f"{schema}.{intervals_table}"
9
-
10
- state_sync.engine_adapter.update_table(
11
- intervals_table,
12
- {"is_dev": False},
13
- where="1=1",
14
- )
@@ -1,26 +0,0 @@
1
- """Include a set of snapshot IDs filtered for promotion."""
2
-
3
- from sqlglot import exp
4
- from sqlmesh.utils.migration import blob_text_type
5
-
6
-
7
- def migrate(state_sync, **kwargs): # type: ignore
8
- engine_adapter = state_sync.engine_adapter
9
- environments_table = "_environments"
10
- if state_sync.schema:
11
- environments_table = f"{state_sync.schema}.{environments_table}"
12
-
13
- blob_type = blob_text_type(engine_adapter.dialect)
14
-
15
- alter_table_exp = exp.Alter(
16
- this=exp.to_table(environments_table),
17
- kind="TABLE",
18
- actions=[
19
- exp.ColumnDef(
20
- this=exp.to_column("promoted_snapshot_ids"),
21
- kind=exp.DataType.build(blob_type),
22
- )
23
- ],
24
- )
25
-
26
- engine_adapter.execute(alter_table_exp)
@@ -1,59 +0,0 @@
1
- """Fix paths that have a Windows forward slash in them."""
2
-
3
- import json
4
-
5
- from sqlglot import exp
6
-
7
- from sqlmesh.utils.migration import index_text_type
8
-
9
-
10
- def migrate(state_sync, **kwargs): # type: ignore
11
- import pandas as pd
12
-
13
- engine_adapter = state_sync.engine_adapter
14
- schema = state_sync.schema
15
- snapshots_table = "_snapshots"
16
- if schema:
17
- snapshots_table = f"{schema}.{snapshots_table}"
18
-
19
- new_snapshots = []
20
-
21
- for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
22
- exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
23
- quote_identifiers=True,
24
- ):
25
- parsed_snapshot = json.loads(snapshot)
26
- model = parsed_snapshot["model"]
27
- python_env = model.get("python_env")
28
- if python_env:
29
- for py_definition in python_env.values():
30
- path = py_definition.get("path")
31
- if path:
32
- py_definition["path"] = path.replace("\\", "/")
33
-
34
- new_snapshots.append(
35
- {
36
- "name": name,
37
- "identifier": identifier,
38
- "version": version,
39
- "snapshot": json.dumps(parsed_snapshot),
40
- "kind_name": kind_name,
41
- }
42
- )
43
-
44
- if new_snapshots:
45
- engine_adapter.delete_from(snapshots_table, "TRUE")
46
-
47
- index_type = index_text_type(engine_adapter.dialect)
48
-
49
- engine_adapter.insert_append(
50
- snapshots_table,
51
- pd.DataFrame(new_snapshots),
52
- target_columns_to_types={
53
- "name": exp.DataType.build(index_type),
54
- "identifier": exp.DataType.build(index_type),
55
- "version": exp.DataType.build(index_type),
56
- "snapshot": exp.DataType.build("text"),
57
- "kind_name": exp.DataType.build(index_type),
58
- },
59
- )
@@ -1,55 +0,0 @@
1
- """Fix seed paths that have a Windows forward slash in them."""
2
-
3
- import json
4
-
5
- from sqlglot import exp
6
-
7
- from sqlmesh.utils.migration import index_text_type
8
-
9
-
10
- def migrate(state_sync, **kwargs): # type: ignore
11
- import pandas as pd
12
-
13
- engine_adapter = state_sync.engine_adapter
14
- schema = state_sync.schema
15
- snapshots_table = "_snapshots"
16
- if schema:
17
- snapshots_table = f"{schema}.{snapshots_table}"
18
-
19
- new_snapshots = []
20
-
21
- for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
22
- exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
23
- quote_identifiers=True,
24
- ):
25
- parsed_snapshot = json.loads(snapshot)
26
- model_kind = parsed_snapshot["model"]["kind"]
27
- if "path" in model_kind:
28
- model_kind["path"] = model_kind["path"].replace("\\", "/")
29
-
30
- new_snapshots.append(
31
- {
32
- "name": name,
33
- "identifier": identifier,
34
- "version": version,
35
- "snapshot": json.dumps(parsed_snapshot),
36
- "kind_name": kind_name,
37
- }
38
- )
39
-
40
- if new_snapshots:
41
- engine_adapter.delete_from(snapshots_table, "TRUE")
42
-
43
- index_type = index_text_type(engine_adapter.dialect)
44
-
45
- engine_adapter.insert_append(
46
- snapshots_table,
47
- pd.DataFrame(new_snapshots),
48
- target_columns_to_types={
49
- "name": exp.DataType.build(index_type),
50
- "identifier": exp.DataType.build(index_type),
51
- "version": exp.DataType.build(index_type),
52
- "snapshot": exp.DataType.build("text"),
53
- "kind_name": exp.DataType.build(index_type),
54
- },
55
- )
@@ -1,53 +0,0 @@
1
- """Replace snapshot model field with node."""
2
-
3
- import json
4
-
5
- from sqlglot import exp
6
-
7
- from sqlmesh.utils.migration import index_text_type
8
-
9
-
10
- def migrate(state_sync, **kwargs): # type: ignore
11
- import pandas as pd
12
-
13
- engine_adapter = state_sync.engine_adapter
14
- schema = state_sync.schema
15
- snapshots_table = "_snapshots"
16
- if schema:
17
- snapshots_table = f"{schema}.{snapshots_table}"
18
-
19
- new_snapshots = []
20
-
21
- for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
22
- exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
23
- quote_identifiers=True,
24
- ):
25
- parsed_snapshot = json.loads(snapshot)
26
- parsed_snapshot["node"] = parsed_snapshot.pop("model")
27
-
28
- new_snapshots.append(
29
- {
30
- "name": name,
31
- "identifier": identifier,
32
- "version": version,
33
- "snapshot": json.dumps(parsed_snapshot),
34
- "kind_name": kind_name,
35
- }
36
- )
37
-
38
- if new_snapshots:
39
- engine_adapter.delete_from(snapshots_table, "TRUE")
40
-
41
- index_type = index_text_type(engine_adapter.dialect)
42
-
43
- engine_adapter.insert_append(
44
- snapshots_table,
45
- pd.DataFrame(new_snapshots),
46
- target_columns_to_types={
47
- "name": exp.DataType.build(index_type),
48
- "identifier": exp.DataType.build(index_type),
49
- "version": exp.DataType.build(index_type),
50
- "snapshot": exp.DataType.build("text"),
51
- "kind_name": exp.DataType.build(index_type),
52
- },
53
- )
@@ -1,28 +0,0 @@
1
- """Add support for environment suffix target."""
2
-
3
- from sqlglot import exp
4
-
5
-
6
- def migrate(state_sync, **kwargs): # type: ignore
7
- engine_adapter = state_sync.engine_adapter
8
- environments_table = "_environments"
9
- if state_sync.schema:
10
- environments_table = f"{state_sync.schema}.{environments_table}"
11
-
12
- alter_table_exp = exp.Alter(
13
- this=exp.to_table(environments_table),
14
- kind="TABLE",
15
- actions=[
16
- exp.ColumnDef(
17
- this=exp.to_column("suffix_target"),
18
- kind=exp.DataType.build("text"),
19
- )
20
- ],
21
- )
22
- engine_adapter.execute(alter_table_exp)
23
-
24
- state_sync.engine_adapter.update_table(
25
- environments_table,
26
- {"suffix_target": "schema"},
27
- where="1=1",
28
- )
@@ -1,80 +0,0 @@
1
- """Remove redundant attributes from dbt models."""
2
-
3
- import json
4
-
5
- from sqlglot import exp
6
-
7
- from sqlmesh.utils.migration import index_text_type
8
-
9
-
10
- def migrate(state_sync, **kwargs): # type: ignore
11
- import pandas as pd
12
-
13
- engine_adapter = state_sync.engine_adapter
14
- schema = state_sync.schema
15
- snapshots_table = "_snapshots"
16
- if schema:
17
- snapshots_table = f"{schema}.{snapshots_table}"
18
-
19
- new_snapshots = []
20
-
21
- for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
22
- exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
23
- quote_identifiers=True,
24
- ):
25
- parsed_snapshot = json.loads(snapshot)
26
- jinja_macros_global_objs = parsed_snapshot["node"]["jinja_macros"]["global_objs"]
27
- if "config" in jinja_macros_global_objs and isinstance(
28
- jinja_macros_global_objs["config"], dict
29
- ):
30
- for key in CONFIG_ATTRIBUTE_KEYS_TO_REMOVE:
31
- jinja_macros_global_objs["config"].pop(key, None)
32
-
33
- new_snapshots.append(
34
- {
35
- "name": name,
36
- "identifier": identifier,
37
- "version": version,
38
- "snapshot": json.dumps(parsed_snapshot),
39
- "kind_name": kind_name,
40
- }
41
- )
42
-
43
- if new_snapshots:
44
- engine_adapter.delete_from(snapshots_table, "TRUE")
45
-
46
- index_type = index_text_type(engine_adapter.dialect)
47
-
48
- engine_adapter.insert_append(
49
- snapshots_table,
50
- pd.DataFrame(new_snapshots),
51
- target_columns_to_types={
52
- "name": exp.DataType.build(index_type),
53
- "identifier": exp.DataType.build(index_type),
54
- "version": exp.DataType.build(index_type),
55
- "snapshot": exp.DataType.build("text"),
56
- "kind_name": exp.DataType.build(index_type),
57
- },
58
- )
59
-
60
-
61
- CONFIG_ATTRIBUTE_KEYS_TO_REMOVE = [
62
- "config",
63
- "config_call_dict",
64
- "depends_on",
65
- "dependencies",
66
- "metrics",
67
- "original_file_path",
68
- "packages",
69
- "patch_path",
70
- "path",
71
- "post-hook",
72
- "pre-hook",
73
- "raw_code",
74
- "refs",
75
- "resource_type",
76
- "sources",
77
- "sql",
78
- "tests",
79
- "unrendered_config",
80
- ]
@@ -1,62 +0,0 @@
1
- """Fix table properties that have extra quoting due to a bug."""
2
-
3
- import json
4
-
5
- from sqlglot import exp
6
-
7
- from sqlmesh.core import dialect as d
8
- from sqlmesh.utils.migration import index_text_type
9
-
10
-
11
- def migrate(state_sync, **kwargs): # type: ignore
12
- import pandas as pd
13
-
14
- engine_adapter = state_sync.engine_adapter
15
- schema = state_sync.schema
16
- snapshots_table = "_snapshots"
17
- if schema:
18
- snapshots_table = f"{schema}.{snapshots_table}"
19
-
20
- new_snapshots = []
21
- found_table_properties = False
22
- for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
23
- exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
24
- quote_identifiers=True,
25
- ):
26
- parsed_snapshot = json.loads(snapshot)
27
- table_properties = parsed_snapshot["node"].get("table_properties")
28
- if table_properties:
29
- found_table_properties = True
30
- dialect = parsed_snapshot["node"].get("dialect")
31
- parsed_snapshot["node"]["table_properties"] = exp.Tuple(
32
- expressions=[
33
- exp.Literal.string(k).eq(d.parse_one(v)) for k, v in table_properties.items()
34
- ]
35
- ).sql(dialect=dialect)
36
-
37
- new_snapshots.append(
38
- {
39
- "name": name,
40
- "identifier": identifier,
41
- "version": version,
42
- "snapshot": json.dumps(parsed_snapshot),
43
- "kind_name": kind_name,
44
- }
45
- )
46
-
47
- if found_table_properties:
48
- engine_adapter.delete_from(snapshots_table, "TRUE")
49
-
50
- index_type = index_text_type(engine_adapter.dialect)
51
-
52
- engine_adapter.insert_append(
53
- snapshots_table,
54
- pd.DataFrame(new_snapshots),
55
- target_columns_to_types={
56
- "name": exp.DataType.build(index_type),
57
- "identifier": exp.DataType.build(index_type),
58
- "version": exp.DataType.build(index_type),
59
- "snapshot": exp.DataType.build("text"),
60
- "kind_name": exp.DataType.build(index_type),
61
- },
62
- )
@@ -1,54 +0,0 @@
1
- """Move project attr from snapshot to model."""
2
-
3
- import json
4
-
5
- from sqlglot import exp
6
-
7
- from sqlmesh.utils.migration import index_text_type
8
-
9
-
10
- def migrate(state_sync, **kwargs): # type: ignore
11
- import pandas as pd
12
-
13
- engine_adapter = state_sync.engine_adapter
14
- schema = state_sync.schema
15
- snapshots_table = "_snapshots"
16
- if schema:
17
- snapshots_table = f"{schema}.{snapshots_table}"
18
-
19
- new_snapshots = []
20
-
21
- for name, identifier, version, snapshot, kind_name in engine_adapter.fetchall(
22
- exp.select("name", "identifier", "version", "snapshot", "kind_name").from_(snapshots_table),
23
- quote_identifiers=True,
24
- ):
25
- parsed_snapshot = json.loads(snapshot)
26
-
27
- parsed_snapshot["node"]["project"] = parsed_snapshot.pop("project", "")
28
-
29
- new_snapshots.append(
30
- {
31
- "name": name,
32
- "identifier": identifier,
33
- "version": version,
34
- "snapshot": json.dumps(parsed_snapshot),
35
- "kind_name": kind_name,
36
- }
37
- )
38
-
39
- engine_adapter.delete_from(snapshots_table, "TRUE")
40
-
41
- index_type = index_text_type(engine_adapter.dialect)
42
-
43
- if new_snapshots:
44
- engine_adapter.insert_append(
45
- snapshots_table,
46
- pd.DataFrame(new_snapshots),
47
- target_columns_to_types={
48
- "name": exp.DataType.build(index_type),
49
- "identifier": exp.DataType.build(index_type),
50
- "version": exp.DataType.build(index_type),
51
- "snapshot": exp.DataType.build("text"),
52
- "kind_name": exp.DataType.build(index_type),
53
- },
54
- )