sqlmesh 0.217.1.dev1__py3-none-any.whl → 0.227.2.dev4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (183) hide show
  1. sqlmesh/__init__.py +12 -2
  2. sqlmesh/_version.py +2 -2
  3. sqlmesh/cli/project_init.py +10 -2
  4. sqlmesh/core/_typing.py +1 -0
  5. sqlmesh/core/audit/definition.py +8 -2
  6. sqlmesh/core/config/__init__.py +1 -1
  7. sqlmesh/core/config/connection.py +17 -5
  8. sqlmesh/core/config/dbt.py +13 -0
  9. sqlmesh/core/config/janitor.py +12 -0
  10. sqlmesh/core/config/loader.py +7 -0
  11. sqlmesh/core/config/model.py +2 -0
  12. sqlmesh/core/config/root.py +3 -0
  13. sqlmesh/core/console.py +80 -2
  14. sqlmesh/core/constants.py +1 -1
  15. sqlmesh/core/context.py +61 -25
  16. sqlmesh/core/dialect.py +3 -0
  17. sqlmesh/core/engine_adapter/_typing.py +2 -0
  18. sqlmesh/core/engine_adapter/base.py +322 -22
  19. sqlmesh/core/engine_adapter/base_postgres.py +17 -1
  20. sqlmesh/core/engine_adapter/bigquery.py +146 -7
  21. sqlmesh/core/engine_adapter/clickhouse.py +17 -13
  22. sqlmesh/core/engine_adapter/databricks.py +33 -2
  23. sqlmesh/core/engine_adapter/fabric.py +1 -29
  24. sqlmesh/core/engine_adapter/mixins.py +142 -48
  25. sqlmesh/core/engine_adapter/mssql.py +15 -4
  26. sqlmesh/core/engine_adapter/mysql.py +2 -2
  27. sqlmesh/core/engine_adapter/postgres.py +9 -3
  28. sqlmesh/core/engine_adapter/redshift.py +4 -0
  29. sqlmesh/core/engine_adapter/risingwave.py +1 -0
  30. sqlmesh/core/engine_adapter/shared.py +6 -0
  31. sqlmesh/core/engine_adapter/snowflake.py +82 -11
  32. sqlmesh/core/engine_adapter/spark.py +14 -10
  33. sqlmesh/core/engine_adapter/trino.py +4 -2
  34. sqlmesh/core/janitor.py +181 -0
  35. sqlmesh/core/lineage.py +1 -0
  36. sqlmesh/core/macros.py +35 -13
  37. sqlmesh/core/model/common.py +2 -0
  38. sqlmesh/core/model/definition.py +65 -4
  39. sqlmesh/core/model/kind.py +66 -2
  40. sqlmesh/core/model/meta.py +107 -2
  41. sqlmesh/core/node.py +101 -2
  42. sqlmesh/core/plan/builder.py +15 -10
  43. sqlmesh/core/plan/common.py +196 -2
  44. sqlmesh/core/plan/definition.py +21 -6
  45. sqlmesh/core/plan/evaluator.py +72 -113
  46. sqlmesh/core/plan/explainer.py +90 -8
  47. sqlmesh/core/plan/stages.py +42 -21
  48. sqlmesh/core/renderer.py +26 -18
  49. sqlmesh/core/scheduler.py +60 -19
  50. sqlmesh/core/selector.py +137 -9
  51. sqlmesh/core/signal.py +64 -1
  52. sqlmesh/core/snapshot/__init__.py +1 -0
  53. sqlmesh/core/snapshot/definition.py +109 -25
  54. sqlmesh/core/snapshot/evaluator.py +610 -50
  55. sqlmesh/core/state_sync/__init__.py +0 -1
  56. sqlmesh/core/state_sync/base.py +31 -27
  57. sqlmesh/core/state_sync/cache.py +12 -4
  58. sqlmesh/core/state_sync/common.py +216 -111
  59. sqlmesh/core/state_sync/db/facade.py +30 -15
  60. sqlmesh/core/state_sync/db/interval.py +27 -7
  61. sqlmesh/core/state_sync/db/migrator.py +14 -8
  62. sqlmesh/core/state_sync/db/snapshot.py +119 -87
  63. sqlmesh/core/table_diff.py +2 -2
  64. sqlmesh/core/test/definition.py +14 -9
  65. sqlmesh/dbt/adapter.py +20 -11
  66. sqlmesh/dbt/basemodel.py +52 -41
  67. sqlmesh/dbt/builtin.py +27 -11
  68. sqlmesh/dbt/column.py +17 -5
  69. sqlmesh/dbt/common.py +4 -2
  70. sqlmesh/dbt/context.py +14 -1
  71. sqlmesh/dbt/loader.py +60 -8
  72. sqlmesh/dbt/manifest.py +136 -8
  73. sqlmesh/dbt/model.py +105 -25
  74. sqlmesh/dbt/package.py +16 -1
  75. sqlmesh/dbt/profile.py +3 -3
  76. sqlmesh/dbt/project.py +12 -7
  77. sqlmesh/dbt/seed.py +1 -1
  78. sqlmesh/dbt/source.py +6 -1
  79. sqlmesh/dbt/target.py +25 -6
  80. sqlmesh/dbt/test.py +31 -1
  81. sqlmesh/migrations/v0000_baseline.py +3 -6
  82. sqlmesh/migrations/v0061_mysql_fix_blob_text_type.py +2 -5
  83. sqlmesh/migrations/v0062_add_model_gateway.py +2 -2
  84. sqlmesh/migrations/v0063_change_signals.py +2 -4
  85. sqlmesh/migrations/v0064_join_when_matched_strings.py +2 -4
  86. sqlmesh/migrations/v0065_add_model_optimize.py +2 -2
  87. sqlmesh/migrations/v0066_add_auto_restatements.py +2 -6
  88. sqlmesh/migrations/v0067_add_tsql_date_full_precision.py +2 -2
  89. sqlmesh/migrations/v0068_include_unrendered_query_in_metadata_hash.py +2 -2
  90. sqlmesh/migrations/v0069_update_dev_table_suffix.py +2 -4
  91. sqlmesh/migrations/v0070_include_grains_in_metadata_hash.py +2 -2
  92. sqlmesh/migrations/v0071_add_dev_version_to_intervals.py +2 -6
  93. sqlmesh/migrations/v0072_add_environment_statements.py +2 -4
  94. sqlmesh/migrations/v0073_remove_symbolic_disable_restatement.py +2 -4
  95. sqlmesh/migrations/v0074_add_partition_by_time_column_property.py +2 -2
  96. sqlmesh/migrations/v0075_remove_validate_query.py +2 -4
  97. sqlmesh/migrations/v0076_add_cron_tz.py +2 -2
  98. sqlmesh/migrations/v0077_fix_column_type_hash_calculation.py +2 -2
  99. sqlmesh/migrations/v0078_warn_if_non_migratable_python_env.py +2 -4
  100. sqlmesh/migrations/v0079_add_gateway_managed_property.py +7 -9
  101. sqlmesh/migrations/v0080_add_batch_size_to_scd_type_2_models.py +2 -2
  102. sqlmesh/migrations/v0081_update_partitioned_by.py +2 -4
  103. sqlmesh/migrations/v0082_warn_if_incorrectly_duplicated_statements.py +2 -4
  104. sqlmesh/migrations/v0083_use_sql_for_scd_time_data_type_data_hash.py +2 -2
  105. sqlmesh/migrations/v0084_normalize_quote_when_matched_and_merge_filter.py +2 -2
  106. sqlmesh/migrations/v0085_deterministic_repr.py +2 -4
  107. sqlmesh/migrations/v0086_check_deterministic_bug.py +2 -4
  108. sqlmesh/migrations/v0087_normalize_blueprint_variables.py +2 -4
  109. sqlmesh/migrations/v0088_warn_about_variable_python_env_diffs.py +2 -4
  110. sqlmesh/migrations/v0089_add_virtual_environment_mode.py +2 -2
  111. sqlmesh/migrations/v0090_add_forward_only_column.py +2 -6
  112. sqlmesh/migrations/v0091_on_additive_change.py +2 -2
  113. sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +2 -4
  114. sqlmesh/migrations/v0093_use_raw_sql_in_fingerprint.py +2 -2
  115. sqlmesh/migrations/v0094_add_dev_version_and_fingerprint_columns.py +2 -6
  116. sqlmesh/migrations/v0095_warn_about_dbt_raw_sql_diff.py +2 -4
  117. sqlmesh/migrations/v0096_remove_plan_dags_table.py +2 -4
  118. sqlmesh/migrations/v0097_add_dbt_name_in_node.py +2 -2
  119. sqlmesh/migrations/v0098_add_dbt_node_info_in_node.py +103 -0
  120. sqlmesh/migrations/v0099_add_last_altered_to_intervals.py +25 -0
  121. sqlmesh/migrations/v0100_add_grants_and_grants_target_layer.py +9 -0
  122. sqlmesh/utils/__init__.py +8 -1
  123. sqlmesh/utils/cache.py +5 -1
  124. sqlmesh/utils/date.py +1 -1
  125. sqlmesh/utils/errors.py +4 -0
  126. sqlmesh/utils/jinja.py +25 -2
  127. sqlmesh/utils/pydantic.py +6 -6
  128. sqlmesh/utils/windows.py +13 -3
  129. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/METADATA +5 -5
  130. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/RECORD +181 -176
  131. sqlmesh_dbt/cli.py +70 -7
  132. sqlmesh_dbt/console.py +14 -6
  133. sqlmesh_dbt/operations.py +103 -24
  134. sqlmesh_dbt/selectors.py +39 -1
  135. web/client/dist/assets/{Audits-Ucsx1GzF.js → Audits-CBiYyyx-.js} +1 -1
  136. web/client/dist/assets/{Banner-BWDzvavM.js → Banner-DSRbUlO5.js} +1 -1
  137. web/client/dist/assets/{ChevronDownIcon-D2VL13Ah.js → ChevronDownIcon-MK_nrjD_.js} +1 -1
  138. web/client/dist/assets/{ChevronRightIcon-DWGYbf1l.js → ChevronRightIcon-CLWtT22Q.js} +1 -1
  139. web/client/dist/assets/{Content-DdHDZM3I.js → Content-BNuGZN5l.js} +1 -1
  140. web/client/dist/assets/{Content-Bikfy8fh.js → Content-CSHJyW0n.js} +1 -1
  141. web/client/dist/assets/{Data-CzAJH7rW.js → Data-C1oRDbLx.js} +1 -1
  142. web/client/dist/assets/{DataCatalog-BJF11g8f.js → DataCatalog-HXyX2-_j.js} +1 -1
  143. web/client/dist/assets/{Editor-s0SBpV2y.js → Editor-BDyfpUuw.js} +1 -1
  144. web/client/dist/assets/{Editor-DgLhgKnm.js → Editor-D0jNItwC.js} +1 -1
  145. web/client/dist/assets/{Errors-D0m0O1d3.js → Errors-BfuFLcPi.js} +1 -1
  146. web/client/dist/assets/{FileExplorer-CEv0vXkt.js → FileExplorer-BR9IE3he.js} +1 -1
  147. web/client/dist/assets/{Footer-BwzXn8Ew.js → Footer-CgBEtiAh.js} +1 -1
  148. web/client/dist/assets/{Header-6heDkEqG.js → Header-DSqR6nSO.js} +1 -1
  149. web/client/dist/assets/{Input-obuJsD6k.js → Input-B-oZ6fGO.js} +1 -1
  150. web/client/dist/assets/Lineage-DYQVwDbD.js +1 -0
  151. web/client/dist/assets/{ListboxShow-HM9_qyrt.js → ListboxShow-BE5-xevs.js} +1 -1
  152. web/client/dist/assets/{ModelLineage-zWdKo0U2.js → ModelLineage-DkIFAYo4.js} +1 -1
  153. web/client/dist/assets/{Models-Bcu66SRz.js → Models-D5dWr8RB.js} +1 -1
  154. web/client/dist/assets/{Page-BWEEQfIt.js → Page-C-XfU5BR.js} +1 -1
  155. web/client/dist/assets/{Plan-C4gXCqlf.js → Plan-ZEuTINBq.js} +1 -1
  156. web/client/dist/assets/{PlusCircleIcon-CVDO651q.js → PlusCircleIcon-DVXAHG8_.js} +1 -1
  157. web/client/dist/assets/{ReportErrors-BT6xFwAr.js → ReportErrors-B7FEPzMB.js} +1 -1
  158. web/client/dist/assets/{Root-ryJoBK4h.js → Root-8aZyhPxF.js} +1 -1
  159. web/client/dist/assets/{SearchList-DB04sPb9.js → SearchList-W_iT2G82.js} +1 -1
  160. web/client/dist/assets/{SelectEnvironment-CUYcXUu6.js → SelectEnvironment-C65jALmO.js} +1 -1
  161. web/client/dist/assets/{SourceList-Doo_9ZGp.js → SourceList-DSLO6nVJ.js} +1 -1
  162. web/client/dist/assets/{SourceListItem-D5Mj7Dly.js → SourceListItem-BHt8d9-I.js} +1 -1
  163. web/client/dist/assets/{SplitPane-qHmkD1qy.js → SplitPane-CViaZmw6.js} +1 -1
  164. web/client/dist/assets/{Tests-DH1Z74ML.js → Tests-DhaVt5t1.js} +1 -1
  165. web/client/dist/assets/{Welcome-DqUJUNMF.js → Welcome-DvpjH-_4.js} +1 -1
  166. web/client/dist/assets/context-BctCsyGb.js +71 -0
  167. web/client/dist/assets/{context-Dr54UHLi.js → context-DFNeGsFF.js} +1 -1
  168. web/client/dist/assets/{editor-DYIP1yQ4.js → editor-CcO28cqd.js} +1 -1
  169. web/client/dist/assets/{file-DarlIDVi.js → file-CvJN3aZO.js} +1 -1
  170. web/client/dist/assets/{floating-ui.react-dom-BH3TFvkM.js → floating-ui.react-dom-CjE-JNW1.js} +1 -1
  171. web/client/dist/assets/{help-Bl8wqaQc.js → help-DuPhjipa.js} +1 -1
  172. web/client/dist/assets/{index-D1sR7wpN.js → index-C-dJH7yZ.js} +1 -1
  173. web/client/dist/assets/{index-O3mjYpnE.js → index-Dj0i1-CA.js} +2 -2
  174. web/client/dist/assets/{plan-CehRrJUG.js → plan-BTRSbjKn.js} +1 -1
  175. web/client/dist/assets/{popover-CqgMRE0G.js → popover-_Sf0yvOI.js} +1 -1
  176. web/client/dist/assets/{project-6gxepOhm.js → project-BvSOI8MY.js} +1 -1
  177. web/client/dist/index.html +1 -1
  178. web/client/dist/assets/Lineage-D0Hgdz2v.js +0 -1
  179. web/client/dist/assets/context-DgX0fp2E.js +0 -68
  180. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/WHEEL +0 -0
  181. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/entry_points.txt +0 -0
  182. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/licenses/LICENSE +0 -0
  183. {sqlmesh-0.217.1.dev1.dist-info → sqlmesh-0.227.2.dev4.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,9 @@
1
1
  """Use the raw SQL when computing the model fingerprint."""
2
2
 
3
3
 
4
- def migrate_schemas(state_sync, **kwargs): # type: ignore
4
+ def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
5
5
  pass
6
6
 
7
7
 
8
- def migrate_rows(state_sync, **kwargs): # type: ignore
8
+ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
9
9
  pass
@@ -7,9 +7,7 @@ from sqlglot import exp
7
7
  from sqlmesh.utils.migration import index_text_type, blob_text_type
8
8
 
9
9
 
10
- def migrate_schemas(state_sync, **kwargs): # type: ignore
11
- engine_adapter = state_sync.engine_adapter
12
- schema = state_sync.schema
10
+ def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
13
11
  snapshots_table = "_snapshots"
14
12
  if schema:
15
13
  snapshots_table = f"{schema}.{snapshots_table}"
@@ -42,11 +40,9 @@ def migrate_schemas(state_sync, **kwargs): # type: ignore
42
40
  engine_adapter.execute(add_fingerprint_exp)
43
41
 
44
42
 
45
- def migrate_rows(state_sync, **kwargs): # type: ignore
43
+ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
46
44
  import pandas as pd
47
45
 
48
- engine_adapter = state_sync.engine_adapter
49
- schema = state_sync.schema
50
46
  snapshots_table = "_snapshots"
51
47
  if schema:
52
48
  snapshots_table = f"{schema}.{snapshots_table}"
@@ -17,13 +17,11 @@ from sqlmesh.core.console import get_console
17
17
  SQLMESH_DBT_PACKAGE = "sqlmesh.dbt"
18
18
 
19
19
 
20
- def migrate_schemas(state_sync, **kwargs): # type: ignore
20
+ def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
21
21
  pass
22
22
 
23
23
 
24
- def migrate_rows(state_sync, **kwargs): # type: ignore
25
- engine_adapter = state_sync.engine_adapter
26
- schema = state_sync.schema
24
+ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
27
25
  snapshots_table = "_snapshots"
28
26
  if schema:
29
27
  snapshots_table = f"{schema}.{snapshots_table}"
@@ -1,9 +1,7 @@
1
1
  """Remove the obsolete _plan_dags table."""
2
2
 
3
3
 
4
- def migrate_schemas(state_sync, **kwargs): # type: ignore
5
- engine_adapter = state_sync.engine_adapter
6
- schema = state_sync.schema
4
+ def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
7
5
  plan_dags_table = "_plan_dags"
8
6
  if schema:
9
7
  plan_dags_table = f"{schema}.{plan_dags_table}"
@@ -11,5 +9,5 @@ def migrate_schemas(state_sync, **kwargs): # type: ignore
11
9
  engine_adapter.drop_table(plan_dags_table)
12
10
 
13
11
 
14
- def migrate_rows(state_sync, **kwargs): # type: ignore
12
+ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
15
13
  pass
@@ -1,9 +1,9 @@
1
1
  """Add 'dbt_name' property to node definition."""
2
2
 
3
3
 
4
- def migrate_schemas(state_sync, **kwargs): # type: ignore
4
+ def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
5
5
  pass
6
6
 
7
7
 
8
- def migrate_rows(state_sync, **kwargs): # type: ignore
8
+ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
9
9
  pass
@@ -0,0 +1,103 @@
1
+ """Replace 'dbt_name' with 'dbt_node_info' in the snapshot definition"""
2
+
3
+ import json
4
+ from sqlglot import exp
5
+ from sqlmesh.utils.migration import index_text_type, blob_text_type
6
+
7
+
8
+ def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
9
+ pass
10
+
11
+
12
+ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
13
+ import pandas as pd
14
+
15
+ snapshots_table = "_snapshots"
16
+ if schema:
17
+ snapshots_table = f"{schema}.{snapshots_table}"
18
+
19
+ index_type = index_text_type(engine_adapter.dialect)
20
+ blob_type = blob_text_type(engine_adapter.dialect)
21
+
22
+ new_snapshots = []
23
+ migration_needed = False
24
+
25
+ for (
26
+ name,
27
+ identifier,
28
+ version,
29
+ snapshot,
30
+ kind_name,
31
+ updated_ts,
32
+ unpaused_ts,
33
+ ttl_ms,
34
+ unrestorable,
35
+ forward_only,
36
+ dev_version,
37
+ fingerprint,
38
+ ) in engine_adapter.fetchall(
39
+ exp.select(
40
+ "name",
41
+ "identifier",
42
+ "version",
43
+ "snapshot",
44
+ "kind_name",
45
+ "updated_ts",
46
+ "unpaused_ts",
47
+ "ttl_ms",
48
+ "unrestorable",
49
+ "forward_only",
50
+ "dev_version",
51
+ "fingerprint",
52
+ ).from_(snapshots_table),
53
+ quote_identifiers=True,
54
+ ):
55
+ parsed_snapshot = json.loads(snapshot)
56
+ if dbt_name := parsed_snapshot["node"].get("dbt_name"):
57
+ parsed_snapshot["node"].pop("dbt_name")
58
+ parsed_snapshot["node"]["dbt_node_info"] = {
59
+ "unique_id": dbt_name,
60
+ # these will get populated as metadata-only changes on the next plan
61
+ "name": "",
62
+ "fqn": "",
63
+ }
64
+ migration_needed = True
65
+
66
+ new_snapshots.append(
67
+ {
68
+ "name": name,
69
+ "identifier": identifier,
70
+ "version": version,
71
+ "snapshot": json.dumps(parsed_snapshot),
72
+ "kind_name": kind_name,
73
+ "updated_ts": updated_ts,
74
+ "unpaused_ts": unpaused_ts,
75
+ "ttl_ms": ttl_ms,
76
+ "unrestorable": unrestorable,
77
+ "forward_only": forward_only,
78
+ "dev_version": dev_version,
79
+ "fingerprint": fingerprint,
80
+ }
81
+ )
82
+
83
+ if migration_needed and new_snapshots:
84
+ engine_adapter.delete_from(snapshots_table, "TRUE")
85
+
86
+ engine_adapter.insert_append(
87
+ snapshots_table,
88
+ pd.DataFrame(new_snapshots),
89
+ target_columns_to_types={
90
+ "name": exp.DataType.build(index_type),
91
+ "identifier": exp.DataType.build(index_type),
92
+ "version": exp.DataType.build(index_type),
93
+ "snapshot": exp.DataType.build(blob_type),
94
+ "kind_name": exp.DataType.build(index_type),
95
+ "updated_ts": exp.DataType.build("bigint"),
96
+ "unpaused_ts": exp.DataType.build("bigint"),
97
+ "ttl_ms": exp.DataType.build("bigint"),
98
+ "unrestorable": exp.DataType.build("boolean"),
99
+ "forward_only": exp.DataType.build("boolean"),
100
+ "dev_version": exp.DataType.build(index_type),
101
+ "fingerprint": exp.DataType.build(blob_type),
102
+ },
103
+ )
@@ -0,0 +1,25 @@
1
+ """Add dev version to the intervals table."""
2
+
3
+ from sqlglot import exp
4
+
5
+
6
+ def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
7
+ intervals_table = "_intervals"
8
+ if schema:
9
+ intervals_table = f"{schema}.{intervals_table}"
10
+
11
+ alter_table_exp = exp.Alter(
12
+ this=exp.to_table(intervals_table),
13
+ kind="TABLE",
14
+ actions=[
15
+ exp.ColumnDef(
16
+ this=exp.to_column("last_altered_ts"),
17
+ kind=exp.DataType.build("BIGINT", dialect=engine_adapter.dialect),
18
+ )
19
+ ],
20
+ )
21
+ engine_adapter.execute(alter_table_exp)
22
+
23
+
24
+ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
25
+ pass
@@ -0,0 +1,9 @@
1
+ """Add grants and grants_target_layer to incremental model metadata hash."""
2
+
3
+
4
+ def migrate_schemas(engine_adapter, schema, **kwargs): # type: ignore
5
+ pass
6
+
7
+
8
+ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore
9
+ pass
sqlmesh/utils/__init__.py CHANGED
@@ -21,6 +21,7 @@ from enum import IntEnum, Enum
21
21
  from functools import lru_cache, reduce, wraps
22
22
  from pathlib import Path
23
23
 
24
+ import unicodedata
24
25
  from sqlglot import exp
25
26
  from sqlglot.dialects.dialect import Dialects
26
27
 
@@ -291,8 +292,14 @@ def sqlglot_dialects() -> str:
291
292
 
292
293
  NON_ALNUM = re.compile(r"[^a-zA-Z0-9_]")
293
294
 
295
+ NON_ALUM_INCLUDE_UNICODE = re.compile(r"\W", flags=re.UNICODE)
294
296
 
295
- def sanitize_name(name: str) -> str:
297
+
298
+ def sanitize_name(name: str, *, include_unicode: bool = False) -> str:
299
+ if include_unicode:
300
+ s = unicodedata.normalize("NFC", name)
301
+ s = NON_ALUM_INCLUDE_UNICODE.sub("_", s)
302
+ return s
296
303
  return NON_ALNUM.sub("_", name)
297
304
 
298
305
 
sqlmesh/utils/cache.py CHANGED
@@ -59,6 +59,10 @@ class FileCache(t.Generic[T]):
59
59
  threshold = to_datetime("1 week ago").timestamp()
60
60
  # delete all old cache files
61
61
  for file in self._path.glob("*"):
62
+ if IS_WINDOWS:
63
+ # the file.stat() call below will fail on windows if the :file name is longer than 260 chars
64
+ file = fix_windows_path(file)
65
+
62
66
  if not file.stem.startswith(self._cache_version) or file.stat().st_atime < threshold:
63
67
  file.unlink(missing_ok=True)
64
68
 
@@ -133,7 +137,7 @@ class FileCache(t.Generic[T]):
133
137
 
134
138
  def _cache_entry_path(self, name: str, entry_id: str = "") -> Path:
135
139
  entry_file_name = "__".join(p for p in (self._cache_version, name, entry_id) if p)
136
- full_path = self._path / sanitize_name(entry_file_name)
140
+ full_path = self._path / sanitize_name(entry_file_name, include_unicode=True)
137
141
  if IS_WINDOWS:
138
142
  # handle paths longer than 260 chars
139
143
  full_path = fix_windows_path(full_path)
sqlmesh/utils/date.py CHANGED
@@ -444,7 +444,7 @@ def to_time_column(
444
444
 
445
445
 
446
446
  def pandas_timestamp_to_pydatetime(
447
- df: pd.DataFrame, columns_to_types: t.Optional[t.Dict[str, exp.DataType]]
447
+ df: pd.DataFrame, columns_to_types: t.Optional[t.Dict[str, exp.DataType]] = None
448
448
  ) -> pd.DataFrame:
449
449
  import pandas as pd
450
450
  from pandas.api.types import is_datetime64_any_dtype # type: ignore
sqlmesh/utils/errors.py CHANGED
@@ -151,6 +151,10 @@ class AdditiveChangeError(SQLMeshError):
151
151
  pass
152
152
 
153
153
 
154
+ class MigrationNotSupportedError(SQLMeshError):
155
+ pass
156
+
157
+
154
158
  class NotificationTargetError(SQLMeshError):
155
159
  pass
156
160
 
sqlmesh/utils/jinja.py CHANGED
@@ -133,6 +133,12 @@ def find_call_names(node: nodes.Node, vars_in_scope: t.Set[str]) -> t.Iterator[C
133
133
  vars_in_scope = vars_in_scope.copy()
134
134
  for child_node in node.iter_child_nodes():
135
135
  if "target" in child_node.fields:
136
+ # For nodes with assignment targets (Assign, AssignBlock, For, Import),
137
+ # the target name could shadow a reference in the right hand side.
138
+ # So we need to process the RHS before adding the target to scope.
139
+ # For example: {% set model = model.path %} should track model.path.
140
+ yield from find_call_names(child_node, vars_in_scope)
141
+
136
142
  target = getattr(child_node, "target")
137
143
  if isinstance(target, nodes.Name):
138
144
  vars_in_scope.add(target.name)
@@ -149,7 +155,9 @@ def find_call_names(node: nodes.Node, vars_in_scope: t.Set[str]) -> t.Iterator[C
149
155
  name = call_name(child_node)
150
156
  if name[0][0] != "'" and name[0] not in vars_in_scope:
151
157
  yield (name, child_node)
152
- yield from find_call_names(child_node, vars_in_scope)
158
+
159
+ if "target" not in child_node.fields:
160
+ yield from find_call_names(child_node, vars_in_scope)
153
161
 
154
162
 
155
163
  def extract_call_names(
@@ -206,6 +214,20 @@ def extract_macro_references_and_variables(
206
214
  return macro_references, variables
207
215
 
208
216
 
217
+ def sort_dict_recursive(
218
+ item: t.Dict[str, t.Any],
219
+ ) -> t.Dict[str, t.Any]:
220
+ sorted_dict: t.Dict[str, t.Any] = {}
221
+ for k, v in sorted(item.items()):
222
+ if isinstance(v, list):
223
+ sorted_dict[k] = sorted(v)
224
+ elif isinstance(v, dict):
225
+ sorted_dict[k] = sort_dict_recursive(v)
226
+ else:
227
+ sorted_dict[k] = v
228
+ return sorted_dict
229
+
230
+
209
231
  JinjaGlobalAttribute = t.Union[str, int, float, bool, AttributeDict]
210
232
 
211
233
 
@@ -355,6 +377,7 @@ class JinjaMacroRegistry(PydanticModel):
355
377
  context.update(builtin_globals)
356
378
  context.update(root_macros)
357
379
  context.update(package_macros)
380
+ context["render"] = lambda input: env.from_string(input).render()
358
381
 
359
382
  env.globals.update(context)
360
383
  env.filters.update(self._environment.filters)
@@ -440,7 +463,7 @@ class JinjaMacroRegistry(PydanticModel):
440
463
  d.PythonCode(
441
464
  expressions=[
442
465
  f"{k} = '{v}'" if isinstance(v, str) else f"{k} = {v}"
443
- for k, v in sorted(filtered_objs.items())
466
+ for k, v in sort_dict_recursive(filtered_objs).items()
444
467
  ]
445
468
  )
446
469
  )
sqlmesh/utils/pydantic.py CHANGED
@@ -289,13 +289,13 @@ def column_validator(v: t.Any, values: t.Any) -> exp.Column:
289
289
  return expression
290
290
 
291
291
 
292
- def list_of_columns_or_star_validator(
292
+ def list_of_fields_or_star_validator(
293
293
  v: t.Any, values: t.Any
294
- ) -> t.Union[exp.Star, t.List[exp.Column]]:
294
+ ) -> t.Union[exp.Star, t.List[exp.Expression]]:
295
295
  expressions = _get_fields(v, values)
296
296
  if len(expressions) == 1 and isinstance(expressions[0], exp.Star):
297
297
  return t.cast(exp.Star, expressions[0])
298
- return t.cast(t.List[exp.Column], expressions)
298
+ return t.cast(t.List[exp.Expression], expressions)
299
299
 
300
300
 
301
301
  def cron_validator(v: t.Any) -> str:
@@ -339,7 +339,7 @@ if t.TYPE_CHECKING:
339
339
  SQLGlotPositiveInt = int
340
340
  SQLGlotColumn = exp.Column
341
341
  SQLGlotListOfFields = t.List[exp.Expression]
342
- SQLGlotListOfColumnsOrStar = t.Union[t.List[exp.Column], exp.Star]
342
+ SQLGlotListOfFieldsOrStar = t.Union[SQLGlotListOfFields, exp.Star]
343
343
  SQLGlotCron = str
344
344
  else:
345
345
  from pydantic.functional_validators import BeforeValidator
@@ -352,7 +352,7 @@ else:
352
352
  SQLGlotListOfFields = t.Annotated[
353
353
  t.List[exp.Expression], BeforeValidator(list_of_fields_validator)
354
354
  ]
355
- SQLGlotListOfColumnsOrStar = t.Annotated[
356
- t.Union[t.List[exp.Column], exp.Star], BeforeValidator(list_of_columns_or_star_validator)
355
+ SQLGlotListOfFieldsOrStar = t.Annotated[
356
+ t.Union[SQLGlotListOfFields, exp.Star], BeforeValidator(list_of_fields_or_star_validator)
357
357
  ]
358
358
  SQLGlotCron = t.Annotated[str, BeforeValidator(cron_validator)]
sqlmesh/utils/windows.py CHANGED
@@ -3,12 +3,22 @@ from pathlib import Path
3
3
 
4
4
  IS_WINDOWS = platform.system() == "Windows"
5
5
 
6
+ WINDOWS_LONGPATH_PREFIX = "\\\\?\\"
7
+
6
8
 
7
9
  def fix_windows_path(path: Path) -> Path:
8
10
  """
9
11
  Windows paths are limited to 260 characters: https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation
10
12
  Users can change this by updating a registry entry but we cant rely on that.
11
- We can quite commonly generate a cache file path that exceeds 260 characters which causes a FileNotFound error.
12
- If we prefix the path with "\\?\" then we can have paths up to 32,767 characters
13
+
14
+ SQLMesh quite commonly generates cache file paths that exceed 260 characters and thus cause a FileNotFound error.
15
+ If we prefix paths with "\\?\" then we can have paths up to 32,767 characters.
16
+
17
+ Note that this prefix also means that relative paths no longer work. From the above docs:
18
+ > Because you cannot use the "\\?\" prefix with a relative path, relative paths are always limited to a total of MAX_PATH characters.
19
+
20
+ So we also call path.resolve() to resolve the relative sections so that operations like `path.read_text()` continue to work
13
21
  """
14
- return Path("\\\\?\\" + str(path.absolute()))
22
+ if path.parts and not path.parts[0].startswith(WINDOWS_LONGPATH_PREFIX):
23
+ path = Path(WINDOWS_LONGPATH_PREFIX + str(path.absolute()))
24
+ return path.resolve()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlmesh
3
- Version: 0.217.1.dev1
3
+ Version: 0.227.2.dev4
4
4
  Summary: Next-generation data transformation framework
5
5
  Author-email: "TobikoData Inc." <engineering@tobikodata.com>
6
6
  License: Apache License
@@ -235,7 +235,7 @@ Requires-Dist: python-dotenv
235
235
  Requires-Dist: requests
236
236
  Requires-Dist: rich[jupyter]
237
237
  Requires-Dist: ruamel.yaml
238
- Requires-Dist: sqlglot[rs]~=27.13.2
238
+ Requires-Dist: sqlglot[rs]~=27.28.0
239
239
  Requires-Dist: tenacity
240
240
  Requires-Dist: time-machine
241
241
  Requires-Dist: json-stream
@@ -258,7 +258,7 @@ Provides-Extra: dev
258
258
  Requires-Dist: agate; extra == "dev"
259
259
  Requires-Dist: beautifulsoup4; extra == "dev"
260
260
  Requires-Dist: clickhouse-connect; extra == "dev"
261
- Requires-Dist: cryptography; extra == "dev"
261
+ Requires-Dist: cryptography<46.0.0; extra == "dev"
262
262
  Requires-Dist: databricks-sql-connector; extra == "dev"
263
263
  Requires-Dist: dbt-bigquery; extra == "dev"
264
264
  Requires-Dist: dbt-core; extra == "dev"
@@ -275,6 +275,7 @@ Requires-Dist: google-cloud-bigquery; extra == "dev"
275
275
  Requires-Dist: google-cloud-bigquery-storage; extra == "dev"
276
276
  Requires-Dist: httpx; extra == "dev"
277
277
  Requires-Dist: mypy~=1.13.0; extra == "dev"
278
+ Requires-Dist: numpy; extra == "dev"
278
279
  Requires-Dist: pandas-stubs; extra == "dev"
279
280
  Requires-Dist: pre-commit; extra == "dev"
280
281
  Requires-Dist: psycopg2-binary; extra == "dev"
@@ -330,7 +331,7 @@ Requires-Dist: redshift_connector; extra == "redshift"
330
331
  Provides-Extra: slack
331
332
  Requires-Dist: slack_sdk; extra == "slack"
332
333
  Provides-Extra: snowflake
333
- Requires-Dist: cryptography; extra == "snowflake"
334
+ Requires-Dist: cryptography<46.0.0; extra == "snowflake"
334
335
  Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "snowflake"
335
336
  Requires-Dist: snowflake-snowpark-python; extra == "snowflake"
336
337
  Provides-Extra: trino
@@ -344,7 +345,6 @@ Requires-Dist: pyarrow; extra == "web"
344
345
  Provides-Extra: lsp
345
346
  Requires-Dist: fastapi==0.115.5; extra == "lsp"
346
347
  Requires-Dist: watchfiles>=0.19.0; extra == "lsp"
347
- Requires-Dist: uvicorn[standard]==0.22.0; extra == "lsp"
348
348
  Requires-Dist: sse-starlette>=0.2.2; extra == "lsp"
349
349
  Requires-Dist: pyarrow; extra == "lsp"
350
350
  Requires-Dist: pygls<2.0.0,>=1.2.0; extra == "lsp"