altimate-code 0.5.2 → 0.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/bin/altimate +6 -0
  3. package/bin/altimate-code +6 -0
  4. package/dbt-tools/bin/altimate-dbt +2 -0
  5. package/dbt-tools/dist/altimate_python_packages/altimate_packages/altimate/__init__.py +0 -0
  6. package/dbt-tools/dist/altimate_python_packages/altimate_packages/altimate/fetch_schema.py +35 -0
  7. package/dbt-tools/dist/altimate_python_packages/altimate_packages/altimate/utils.py +353 -0
  8. package/dbt-tools/dist/altimate_python_packages/altimate_packages/altimate/validate_sql.py +114 -0
  9. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/__init__.py +178 -0
  10. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/__main__.py +96 -0
  11. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/_typing.py +17 -0
  12. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/__init__.py +3 -0
  13. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/__init__.py +18 -0
  14. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/_typing.py +18 -0
  15. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/column.py +332 -0
  16. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/dataframe.py +866 -0
  17. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/functions.py +1267 -0
  18. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/group.py +59 -0
  19. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/normalize.py +78 -0
  20. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/operations.py +53 -0
  21. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/readwriter.py +108 -0
  22. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/session.py +190 -0
  23. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/transforms.py +9 -0
  24. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/types.py +212 -0
  25. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/util.py +32 -0
  26. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/window.py +134 -0
  27. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/__init__.py +118 -0
  28. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/athena.py +166 -0
  29. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/bigquery.py +1331 -0
  30. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/clickhouse.py +1393 -0
  31. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/databricks.py +131 -0
  32. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/dialect.py +1915 -0
  33. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/doris.py +561 -0
  34. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/drill.py +157 -0
  35. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/druid.py +20 -0
  36. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/duckdb.py +1159 -0
  37. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/dune.py +16 -0
  38. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/hive.py +787 -0
  39. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/materialize.py +94 -0
  40. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/mysql.py +1324 -0
  41. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/oracle.py +378 -0
  42. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/postgres.py +778 -0
  43. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/presto.py +788 -0
  44. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/prql.py +203 -0
  45. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/redshift.py +448 -0
  46. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/risingwave.py +78 -0
  47. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/snowflake.py +1464 -0
  48. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/spark.py +202 -0
  49. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/spark2.py +349 -0
  50. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/sqlite.py +320 -0
  51. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/starrocks.py +343 -0
  52. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/tableau.py +61 -0
  53. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/teradata.py +356 -0
  54. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/trino.py +115 -0
  55. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/tsql.py +1403 -0
  56. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/diff.py +456 -0
  57. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/errors.py +93 -0
  58. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/__init__.py +95 -0
  59. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/context.py +101 -0
  60. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/env.py +246 -0
  61. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/python.py +460 -0
  62. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/table.py +155 -0
  63. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/expressions.py +8870 -0
  64. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/generator.py +4993 -0
  65. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/helper.py +582 -0
  66. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/jsonpath.py +227 -0
  67. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/lineage.py +423 -0
  68. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/__init__.py +11 -0
  69. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/annotate_types.py +589 -0
  70. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/canonicalize.py +222 -0
  71. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/eliminate_ctes.py +43 -0
  72. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/eliminate_joins.py +181 -0
  73. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/eliminate_subqueries.py +189 -0
  74. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/isolate_table_selects.py +50 -0
  75. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/merge_subqueries.py +415 -0
  76. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/normalize.py +200 -0
  77. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/normalize_identifiers.py +64 -0
  78. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/optimize_joins.py +91 -0
  79. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/optimizer.py +94 -0
  80. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/pushdown_predicates.py +222 -0
  81. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/pushdown_projections.py +172 -0
  82. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/qualify.py +104 -0
  83. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/qualify_columns.py +1024 -0
  84. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/qualify_tables.py +155 -0
  85. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/scope.py +904 -0
  86. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/simplify.py +1587 -0
  87. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/unnest_subqueries.py +302 -0
  88. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/parser.py +8501 -0
  89. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/planner.py +463 -0
  90. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/schema.py +588 -0
  91. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/serde.py +68 -0
  92. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/time.py +687 -0
  93. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/tokens.py +1520 -0
  94. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/transforms.py +1020 -0
  95. package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/trie.py +81 -0
  96. package/dbt-tools/dist/altimate_python_packages/dbt_core_integration.py +825 -0
  97. package/dbt-tools/dist/altimate_python_packages/dbt_utils.py +157 -0
  98. package/dbt-tools/dist/index.js +23859 -0
  99. package/package.json +13 -13
  100. package/postinstall.mjs +42 -0
  101. package/skills/altimate-setup/SKILL.md +31 -0
@@ -0,0 +1,1403 @@
1
+ from __future__ import annotations
2
+
3
+ import datetime
4
+ import re
5
+ import typing as t
6
+ from functools import partial, reduce
7
+
8
+ from sqlglot import exp, generator, parser, tokens, transforms
9
+ from sqlglot.dialects.dialect import (
10
+ Dialect,
11
+ NormalizationStrategy,
12
+ any_value_to_max_sql,
13
+ build_date_delta,
14
+ date_delta_sql,
15
+ datestrtodate_sql,
16
+ generatedasidentitycolumnconstraint_sql,
17
+ max_or_greatest,
18
+ min_or_least,
19
+ rename_func,
20
+ strposition_sql,
21
+ timestrtotime_sql,
22
+ trim_sql,
23
+ )
24
+ from sqlglot.helper import seq_get
25
+ from sqlglot.parser import build_coalesce
26
+ from sqlglot.time import format_time
27
+ from sqlglot.tokens import TokenType
28
+
29
+ if t.TYPE_CHECKING:
30
+ from sqlglot._typing import E
31
+
32
+ FULL_FORMAT_TIME_MAPPING = {
33
+ "weekday": "%A",
34
+ "dw": "%A",
35
+ "w": "%A",
36
+ "month": "%B",
37
+ "mm": "%B",
38
+ "m": "%B",
39
+ }
40
+
41
+ DATE_DELTA_INTERVAL = {
42
+ "year": "year",
43
+ "yyyy": "year",
44
+ "yy": "year",
45
+ "quarter": "quarter",
46
+ "qq": "quarter",
47
+ "q": "quarter",
48
+ "month": "month",
49
+ "mm": "month",
50
+ "m": "month",
51
+ "week": "week",
52
+ "ww": "week",
53
+ "wk": "week",
54
+ "day": "day",
55
+ "dd": "day",
56
+ "d": "day",
57
+ }
58
+
59
+
60
+ DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})")
61
+
62
+ # N = Numeric, C=Currency
63
+ TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"}
64
+
65
+ DEFAULT_START_DATE = datetime.date(1900, 1, 1)
66
+
67
+ BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias}
68
+
69
+ # Unsupported options:
70
+ # - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] )
71
+ # - TABLE HINT
72
+ OPTIONS: parser.OPTIONS_TYPE = {
73
+ **dict.fromkeys(
74
+ (
75
+ "DISABLE_OPTIMIZED_PLAN_FORCING",
76
+ "FAST",
77
+ "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX",
78
+ "LABEL",
79
+ "MAXDOP",
80
+ "MAXRECURSION",
81
+ "MAX_GRANT_PERCENT",
82
+ "MIN_GRANT_PERCENT",
83
+ "NO_PERFORMANCE_SPOOL",
84
+ "QUERYTRACEON",
85
+ "RECOMPILE",
86
+ ),
87
+ tuple(),
88
+ ),
89
+ "CONCAT": ("UNION",),
90
+ "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"),
91
+ "EXPAND": ("VIEWS",),
92
+ "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"),
93
+ "HASH": ("GROUP", "JOIN", "UNION"),
94
+ "KEEP": ("PLAN",),
95
+ "KEEPFIXED": ("PLAN",),
96
+ "LOOP": ("JOIN",),
97
+ "MERGE": ("JOIN", "UNION"),
98
+ "OPTIMIZE": (("FOR", "UNKNOWN"),),
99
+ "ORDER": ("GROUP",),
100
+ "PARAMETERIZATION": ("FORCED", "SIMPLE"),
101
+ "ROBUST": ("PLAN",),
102
+ "USE": ("PLAN",),
103
+ }
104
+
105
+
106
+ XML_OPTIONS: parser.OPTIONS_TYPE = {
107
+ **dict.fromkeys(
108
+ (
109
+ "AUTO",
110
+ "EXPLICIT",
111
+ "TYPE",
112
+ ),
113
+ tuple(),
114
+ ),
115
+ "ELEMENTS": (
116
+ "XSINIL",
117
+ "ABSENT",
118
+ ),
119
+ "BINARY": ("BASE64",),
120
+ }
121
+
122
+
123
+ OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL")
124
+
125
+
126
+ def _build_formatted_time(
127
+ exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None
128
+ ) -> t.Callable[[t.List], E]:
129
+ def _builder(args: t.List) -> E:
130
+ fmt = seq_get(args, 0)
131
+ if isinstance(fmt, exp.Expression):
132
+ fmt = exp.Literal.string(
133
+ format_time(
134
+ fmt.name.lower(),
135
+ (
136
+ {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING}
137
+ if full_format_mapping
138
+ else TSQL.TIME_MAPPING
139
+ ),
140
+ )
141
+ )
142
+
143
+ this = seq_get(args, 1)
144
+ if isinstance(this, exp.Expression):
145
+ this = exp.cast(this, exp.DataType.Type.DATETIME2)
146
+
147
+ return exp_class(this=this, format=fmt)
148
+
149
+ return _builder
150
+
151
+
152
+ def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr:
153
+ this = seq_get(args, 0)
154
+ fmt = seq_get(args, 1)
155
+ culture = seq_get(args, 2)
156
+
157
+ number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name))
158
+
159
+ if number_fmt:
160
+ return exp.NumberToStr(this=this, format=fmt, culture=culture)
161
+
162
+ if fmt:
163
+ fmt = exp.Literal.string(
164
+ format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING)
165
+ if len(fmt.name) == 1
166
+ else format_time(fmt.name, TSQL.TIME_MAPPING)
167
+ )
168
+
169
+ return exp.TimeToStr(this=this, format=fmt, culture=culture)
170
+
171
+
172
+ def _build_eomonth(args: t.List) -> exp.LastDay:
173
+ date = exp.TsOrDsToDate(this=seq_get(args, 0))
174
+ month_lag = seq_get(args, 1)
175
+
176
+ if month_lag is None:
177
+ this: exp.Expression = date
178
+ else:
179
+ unit = DATE_DELTA_INTERVAL.get("month")
180
+ this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit))
181
+
182
+ return exp.LastDay(this=this)
183
+
184
+
185
+ def _build_hashbytes(args: t.List) -> exp.Expression:
186
+ kind, data = args
187
+ kind = kind.name.upper() if kind.is_string else ""
188
+
189
+ if kind == "MD5":
190
+ args.pop(0)
191
+ return exp.MD5(this=data)
192
+ if kind in ("SHA", "SHA1"):
193
+ args.pop(0)
194
+ return exp.SHA(this=data)
195
+ if kind == "SHA2_256":
196
+ return exp.SHA2(this=data, length=exp.Literal.number(256))
197
+ if kind == "SHA2_512":
198
+ return exp.SHA2(this=data, length=exp.Literal.number(512))
199
+
200
+ return exp.func("HASHBYTES", *args)
201
+
202
+
203
+ DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"}
204
+
205
+
206
+ def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str:
207
+ fmt = expression.args["format"]
208
+
209
+ if not isinstance(expression, exp.NumberToStr):
210
+ if fmt.is_string:
211
+ mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING)
212
+
213
+ name = (mapped_fmt or "").upper()
214
+ if name in DATEPART_ONLY_FORMATS:
215
+ return self.func("DATEPART", name, expression.this)
216
+
217
+ fmt_sql = self.sql(exp.Literal.string(mapped_fmt))
218
+ else:
219
+ fmt_sql = self.format_time(expression) or self.sql(fmt)
220
+ else:
221
+ fmt_sql = self.sql(fmt)
222
+
223
+ return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture"))
224
+
225
+
226
+ def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str:
227
+ this = expression.this
228
+ distinct = expression.find(exp.Distinct)
229
+ if distinct:
230
+ # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression
231
+ self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.")
232
+ this = distinct.pop().expressions[0]
233
+
234
+ order = ""
235
+ if isinstance(expression.this, exp.Order):
236
+ if expression.this.this:
237
+ this = expression.this.this.pop()
238
+ # Order has a leading space
239
+ order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})"
240
+
241
+ separator = expression.args.get("separator") or exp.Literal.string(",")
242
+ return f"STRING_AGG({self.format_args(this, separator)}){order}"
243
+
244
+
245
+ def _build_date_delta(
246
+ exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None
247
+ ) -> t.Callable[[t.List], E]:
248
+ def _builder(args: t.List) -> E:
249
+ unit = seq_get(args, 0)
250
+ if unit and unit_mapping:
251
+ unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name))
252
+
253
+ start_date = seq_get(args, 1)
254
+ if start_date and start_date.is_number:
255
+ # Numeric types are valid DATETIME values
256
+ if start_date.is_int:
257
+ adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py())
258
+ start_date = exp.Literal.string(adds.strftime("%F"))
259
+ else:
260
+ # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs.
261
+ # This is not a problem when generating T-SQL code, it is when transpiling to other dialects.
262
+ return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit)
263
+
264
+ return exp_class(
265
+ this=exp.TimeStrToTime(this=seq_get(args, 2)),
266
+ expression=exp.TimeStrToTime(this=start_date),
267
+ unit=unit,
268
+ )
269
+
270
+ return _builder
271
+
272
+
273
+ def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression:
274
+ """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries."""
275
+ alias = expression.args.get("alias")
276
+
277
+ if (
278
+ isinstance(expression, (exp.CTE, exp.Subquery))
279
+ and isinstance(alias, exp.TableAlias)
280
+ and not alias.columns
281
+ ):
282
+ from sqlglot.optimizer.qualify_columns import qualify_outputs
283
+
284
+ # We keep track of the unaliased column projection indexes instead of the expressions
285
+ # themselves, because the latter are going to be replaced by new nodes when the aliases
286
+ # are added and hence we won't be able to reach these newly added Alias parents
287
+ query = expression.this
288
+ unaliased_column_indexes = (
289
+ i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias
290
+ )
291
+
292
+ qualify_outputs(query)
293
+
294
+ # Preserve the quoting information of columns for newly added Alias nodes
295
+ query_selects = query.selects
296
+ for select_index in unaliased_column_indexes:
297
+ alias = query_selects[select_index]
298
+ column = alias.this
299
+ if isinstance(column.this, exp.Identifier):
300
+ alias.args["alias"].set("quoted", column.this.quoted)
301
+
302
+ return expression
303
+
304
+
305
+ # https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax
306
+ def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts:
307
+ return exp.TimestampFromParts(
308
+ year=seq_get(args, 0),
309
+ month=seq_get(args, 1),
310
+ day=seq_get(args, 2),
311
+ hour=seq_get(args, 3),
312
+ min=seq_get(args, 4),
313
+ sec=seq_get(args, 5),
314
+ milli=seq_get(args, 6),
315
+ )
316
+
317
+
318
+ # https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax
319
+ def _build_timefromparts(args: t.List) -> exp.TimeFromParts:
320
+ return exp.TimeFromParts(
321
+ hour=seq_get(args, 0),
322
+ min=seq_get(args, 1),
323
+ sec=seq_get(args, 2),
324
+ fractions=seq_get(args, 3),
325
+ precision=seq_get(args, 4),
326
+ )
327
+
328
+
329
+ def _build_with_arg_as_text(
330
+ klass: t.Type[exp.Expression],
331
+ ) -> t.Callable[[t.List[exp.Expression]], exp.Expression]:
332
+ def _parse(args: t.List[exp.Expression]) -> exp.Expression:
333
+ this = seq_get(args, 0)
334
+
335
+ if this and not this.is_string:
336
+ this = exp.cast(this, exp.DataType.Type.TEXT)
337
+
338
+ expression = seq_get(args, 1)
339
+ kwargs = {"this": this}
340
+
341
+ if expression:
342
+ kwargs["expression"] = expression
343
+
344
+ return klass(**kwargs)
345
+
346
+ return _parse
347
+
348
+
349
+ # https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16
350
+ def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous:
351
+ # PARSENAME(...) will be stored into exp.SplitPart if:
352
+ # - All args are literals
353
+ # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL)
354
+ if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args):
355
+ this = args[0]
356
+ part_index = args[1]
357
+ split_count = len(this.name.split("."))
358
+ if split_count <= 4:
359
+ return exp.SplitPart(
360
+ this=this,
361
+ delimiter=exp.Literal.string("."),
362
+ part_index=exp.Literal.number(split_count + 1 - part_index.to_py()),
363
+ )
364
+
365
+ return exp.Anonymous(this="PARSENAME", expressions=args)
366
+
367
+
368
+ def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract:
369
+ if len(args) == 1:
370
+ # The default value for path is '$'. As a result, if you don't provide a
371
+ # value for path, JSON_QUERY returns the input expression.
372
+ args.append(exp.Literal.string("$"))
373
+
374
+ return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect)
375
+
376
+
377
+ def _json_extract_sql(
378
+ self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar
379
+ ) -> str:
380
+ json_query = self.func("JSON_QUERY", expression.this, expression.expression)
381
+ json_value = self.func("JSON_VALUE", expression.this, expression.expression)
382
+ return self.func("ISNULL", json_query, json_value)
383
+
384
+
385
+ def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime):
386
+ sql = timestrtotime_sql(self, expression)
387
+ if expression.args.get("zone"):
388
+ # If there is a timezone, produce an expression like:
389
+ # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC'
390
+ # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information
391
+ return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC")))
392
+ return sql
393
+
394
+
395
+ def _build_datetrunc(args: t.List) -> exp.TimestampTrunc:
396
+ unit = seq_get(args, 0)
397
+ this = seq_get(args, 1)
398
+
399
+ if this and this.is_string:
400
+ this = exp.cast(this, exp.DataType.Type.DATETIME2)
401
+
402
+ return exp.TimestampTrunc(this=this, unit=unit)
403
+
404
+
405
+ class TSQL(Dialect):
406
+ SUPPORTS_SEMI_ANTI_JOIN = False
407
+ LOG_BASE_FIRST = False
408
+ TYPED_DIVISION = True
409
+ CONCAT_COALESCE = True
410
+ NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE
411
+ ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False
412
+
413
+ TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'"
414
+
415
+ TIME_MAPPING = {
416
+ "year": "%Y",
417
+ "dayofyear": "%j",
418
+ "day": "%d",
419
+ "dy": "%d",
420
+ "y": "%Y",
421
+ "week": "%W",
422
+ "ww": "%W",
423
+ "wk": "%W",
424
+ "hour": "%h",
425
+ "hh": "%I",
426
+ "minute": "%M",
427
+ "mi": "%M",
428
+ "n": "%M",
429
+ "second": "%S",
430
+ "ss": "%S",
431
+ "s": "%-S",
432
+ "millisecond": "%f",
433
+ "ms": "%f",
434
+ "weekday": "%w",
435
+ "dw": "%w",
436
+ "month": "%m",
437
+ "mm": "%M",
438
+ "m": "%-M",
439
+ "Y": "%Y",
440
+ "YYYY": "%Y",
441
+ "YY": "%y",
442
+ "MMMM": "%B",
443
+ "MMM": "%b",
444
+ "MM": "%m",
445
+ "M": "%-m",
446
+ "dddd": "%A",
447
+ "dd": "%d",
448
+ "d": "%-d",
449
+ "HH": "%H",
450
+ "H": "%-H",
451
+ "h": "%-I",
452
+ "ffffff": "%f",
453
+ "yyyy": "%Y",
454
+ "yy": "%y",
455
+ }
456
+
457
+ CONVERT_FORMAT_MAPPING = {
458
+ "0": "%b %d %Y %-I:%M%p",
459
+ "1": "%m/%d/%y",
460
+ "2": "%y.%m.%d",
461
+ "3": "%d/%m/%y",
462
+ "4": "%d.%m.%y",
463
+ "5": "%d-%m-%y",
464
+ "6": "%d %b %y",
465
+ "7": "%b %d, %y",
466
+ "8": "%H:%M:%S",
467
+ "9": "%b %d %Y %-I:%M:%S:%f%p",
468
+ "10": "mm-dd-yy",
469
+ "11": "yy/mm/dd",
470
+ "12": "yymmdd",
471
+ "13": "%d %b %Y %H:%M:ss:%f",
472
+ "14": "%H:%M:%S:%f",
473
+ "20": "%Y-%m-%d %H:%M:%S",
474
+ "21": "%Y-%m-%d %H:%M:%S.%f",
475
+ "22": "%m/%d/%y %-I:%M:%S %p",
476
+ "23": "%Y-%m-%d",
477
+ "24": "%H:%M:%S",
478
+ "25": "%Y-%m-%d %H:%M:%S.%f",
479
+ "100": "%b %d %Y %-I:%M%p",
480
+ "101": "%m/%d/%Y",
481
+ "102": "%Y.%m.%d",
482
+ "103": "%d/%m/%Y",
483
+ "104": "%d.%m.%Y",
484
+ "105": "%d-%m-%Y",
485
+ "106": "%d %b %Y",
486
+ "107": "%b %d, %Y",
487
+ "108": "%H:%M:%S",
488
+ "109": "%b %d %Y %-I:%M:%S:%f%p",
489
+ "110": "%m-%d-%Y",
490
+ "111": "%Y/%m/%d",
491
+ "112": "%Y%m%d",
492
+ "113": "%d %b %Y %H:%M:%S:%f",
493
+ "114": "%H:%M:%S:%f",
494
+ "120": "%Y-%m-%d %H:%M:%S",
495
+ "121": "%Y-%m-%d %H:%M:%S.%f",
496
+ "126": "%Y-%m-%dT%H:%M:%S.%f",
497
+ }
498
+
499
+ FORMAT_TIME_MAPPING = {
500
+ "y": "%B %Y",
501
+ "d": "%m/%d/%Y",
502
+ "H": "%-H",
503
+ "h": "%-I",
504
+ "s": "%Y-%m-%d %H:%M:%S",
505
+ "D": "%A,%B,%Y",
506
+ "f": "%A,%B,%Y %-I:%M %p",
507
+ "F": "%A,%B,%Y %-I:%M:%S %p",
508
+ "g": "%m/%d/%Y %-I:%M %p",
509
+ "G": "%m/%d/%Y %-I:%M:%S %p",
510
+ "M": "%B %-d",
511
+ "m": "%B %-d",
512
+ "O": "%Y-%m-%dT%H:%M:%S",
513
+ "u": "%Y-%M-%D %H:%M:%S%z",
514
+ "U": "%A, %B %D, %Y %H:%M:%S%z",
515
+ "T": "%-I:%M:%S %p",
516
+ "t": "%-I:%M",
517
+ "Y": "%a %Y",
518
+ }
519
+
520
+ class Tokenizer(tokens.Tokenizer):
521
+ IDENTIFIERS = [("[", "]"), '"']
522
+ QUOTES = ["'", '"']
523
+ HEX_STRINGS = [("0x", ""), ("0X", "")]
524
+ VAR_SINGLE_TOKENS = {"@", "$", "#"}
525
+
526
+ KEYWORDS = {
527
+ **tokens.Tokenizer.KEYWORDS,
528
+ "CLUSTERED INDEX": TokenType.INDEX,
529
+ "DATETIME2": TokenType.DATETIME2,
530
+ "DATETIMEOFFSET": TokenType.TIMESTAMPTZ,
531
+ "DECLARE": TokenType.DECLARE,
532
+ "EXEC": TokenType.COMMAND,
533
+ "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT,
534
+ "GO": TokenType.COMMAND,
535
+ "IMAGE": TokenType.IMAGE,
536
+ "MONEY": TokenType.MONEY,
537
+ "NONCLUSTERED INDEX": TokenType.INDEX,
538
+ "NTEXT": TokenType.TEXT,
539
+ "OPTION": TokenType.OPTION,
540
+ "OUTPUT": TokenType.RETURNING,
541
+ "PRINT": TokenType.COMMAND,
542
+ "PROC": TokenType.PROCEDURE,
543
+ "REAL": TokenType.FLOAT,
544
+ "ROWVERSION": TokenType.ROWVERSION,
545
+ "SMALLDATETIME": TokenType.SMALLDATETIME,
546
+ "SMALLMONEY": TokenType.SMALLMONEY,
547
+ "SQL_VARIANT": TokenType.VARIANT,
548
+ "SYSTEM_USER": TokenType.CURRENT_USER,
549
+ "TOP": TokenType.TOP,
550
+ "TIMESTAMP": TokenType.ROWVERSION,
551
+ "TINYINT": TokenType.UTINYINT,
552
+ "UNIQUEIDENTIFIER": TokenType.UUID,
553
+ "UPDATE STATISTICS": TokenType.COMMAND,
554
+ "XML": TokenType.XML,
555
+ }
556
+ KEYWORDS.pop("/*+")
557
+
558
+ COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
559
+
560
+ class Parser(parser.Parser):
561
+ SET_REQUIRES_ASSIGNMENT_DELIMITER = False
562
+ LOG_DEFAULTS_TO_LN = True
563
+ STRING_ALIASES = True
564
+ NO_PAREN_IF_COMMANDS = False
565
+
566
+ QUERY_MODIFIER_PARSERS = {
567
+ **parser.Parser.QUERY_MODIFIER_PARSERS,
568
+ TokenType.OPTION: lambda self: ("options", self._parse_options()),
569
+ TokenType.FOR: lambda self: ("for", self._parse_for()),
570
+ }
571
+
572
+ # T-SQL does not allow BEGIN to be used as an identifier
573
+ ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN}
574
+ ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN}
575
+ TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN}
576
+ COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN}
577
+ UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN}
578
+
579
+ FUNCTIONS = {
580
+ **parser.Parser.FUNCTIONS,
581
+ "CHARINDEX": lambda args: exp.StrPosition(
582
+ this=seq_get(args, 1),
583
+ substr=seq_get(args, 0),
584
+ position=seq_get(args, 2),
585
+ ),
586
+ "COUNT": lambda args: exp.Count(
587
+ this=seq_get(args, 0), expressions=args[1:], big_int=False
588
+ ),
589
+ "COUNT_BIG": lambda args: exp.Count(
590
+ this=seq_get(args, 0), expressions=args[1:], big_int=True
591
+ ),
592
+ "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL),
593
+ "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL),
594
+ "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True),
595
+ "DATEPART": _build_formatted_time(exp.TimeToStr),
596
+ "DATETIMEFROMPARTS": _build_datetimefromparts,
597
+ "EOMONTH": _build_eomonth,
598
+ "FORMAT": _build_format,
599
+ "GETDATE": exp.CurrentTimestamp.from_arg_list,
600
+ "HASHBYTES": _build_hashbytes,
601
+ "ISNULL": lambda args: build_coalesce(args=args, is_null=True),
602
+ "JSON_QUERY": _build_json_query,
603
+ "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar),
604
+ "LEN": _build_with_arg_as_text(exp.Length),
605
+ "LEFT": _build_with_arg_as_text(exp.Left),
606
+ "NEWID": exp.Uuid.from_arg_list,
607
+ "RIGHT": _build_with_arg_as_text(exp.Right),
608
+ "PARSENAME": _build_parsename,
609
+ "REPLICATE": exp.Repeat.from_arg_list,
610
+ "SCHEMA_NAME": exp.CurrentSchema.from_arg_list,
611
+ "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
612
+ "SYSDATETIME": exp.CurrentTimestamp.from_arg_list,
613
+ "SUSER_NAME": exp.CurrentUser.from_arg_list,
614
+ "SUSER_SNAME": exp.CurrentUser.from_arg_list,
615
+ "SYSTEM_USER": exp.CurrentUser.from_arg_list,
616
+ "TIMEFROMPARTS": _build_timefromparts,
617
+ "DATETRUNC": _build_datetrunc,
618
+ }
619
+
620
+ JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"}
621
+
622
+ PROCEDURE_OPTIONS = dict.fromkeys(
623
+ ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple()
624
+ )
625
+
626
+ COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"}
627
+
628
+ RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - {
629
+ TokenType.TABLE,
630
+ *parser.Parser.TYPE_TOKENS,
631
+ }
632
+
633
+ STATEMENT_PARSERS = {
634
+ **parser.Parser.STATEMENT_PARSERS,
635
+ TokenType.DECLARE: lambda self: self._parse_declare(),
636
+ }
637
+
638
+ RANGE_PARSERS = {
639
+ **parser.Parser.RANGE_PARSERS,
640
+ TokenType.DCOLON: lambda self, this: self.expression(
641
+ exp.ScopeResolution,
642
+ this=this,
643
+ expression=self._parse_function() or self._parse_var(any_token=True),
644
+ ),
645
+ }
646
+
647
+ NO_PAREN_FUNCTION_PARSERS = {
648
+ **parser.Parser.NO_PAREN_FUNCTION_PARSERS,
649
+ "NEXT": lambda self: self._parse_next_value_for(),
650
+ }
651
+
652
+ # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL
653
+ COLUMN_OPERATORS = {
654
+ **parser.Parser.COLUMN_OPERATORS,
655
+ TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to)
656
+ if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED
657
+ else self.expression(exp.ScopeResolution, this=this, expression=to),
658
+ }
659
+
660
+ def _parse_alter_table_set(self) -> exp.AlterSet:
661
+ return self._parse_wrapped(super()._parse_alter_table_set)
662
+
663
+ def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]:
664
+ if self._match(TokenType.MERGE):
665
+ comments = self._prev_comments
666
+ merge = self._parse_merge()
667
+ merge.add_comments(comments, prepend=True)
668
+ return merge
669
+
670
+ return super()._parse_wrapped_select(table=table)
671
+
672
+ def _parse_dcolon(self) -> t.Optional[exp.Expression]:
673
+ # We want to use _parse_types() if the first token after :: is a known type,
674
+ # otherwise we could parse something like x::varchar(max) into a function
675
+ if self._match_set(self.TYPE_TOKENS, advance=False):
676
+ return self._parse_types()
677
+
678
+ return self._parse_function() or self._parse_types()
679
+
680
+ def _parse_options(self) -> t.Optional[t.List[exp.Expression]]:
681
+ if not self._match(TokenType.OPTION):
682
+ return None
683
+
684
+ def _parse_option() -> t.Optional[exp.Expression]:
685
+ option = self._parse_var_from_options(OPTIONS)
686
+ if not option:
687
+ return None
688
+
689
+ self._match(TokenType.EQ)
690
+ return self.expression(
691
+ exp.QueryOption, this=option, expression=self._parse_primary_or_var()
692
+ )
693
+
694
+ return self._parse_wrapped_csv(_parse_option)
695
+
696
+ def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption:
697
+ this = self._parse_primary_or_var()
698
+ if self._match(TokenType.L_PAREN, advance=False):
699
+ expression = self._parse_wrapped(self._parse_string)
700
+ else:
701
+ expression = None
702
+
703
+ return exp.XMLKeyValueOption(this=this, expression=expression)
704
+
705
+ def _parse_for(self) -> t.Optional[t.List[exp.Expression]]:
706
+ if not self._match_pair(TokenType.FOR, TokenType.XML):
707
+ return None
708
+
709
+ def _parse_for_xml() -> t.Optional[exp.Expression]:
710
+ return self.expression(
711
+ exp.QueryOption,
712
+ this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False)
713
+ or self._parse_xml_key_value_option(),
714
+ )
715
+
716
+ return self._parse_csv(_parse_for_xml)
717
+
718
+ def _parse_projections(self) -> t.List[exp.Expression]:
719
+ """
720
+ T-SQL supports the syntax alias = expression in the SELECT's projection list,
721
+ so we transform all parsed Selects to convert their EQ projections into Aliases.
722
+
723
+ See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax
724
+ """
725
+ return [
726
+ (
727
+ exp.alias_(projection.expression, projection.this.this, copy=False)
728
+ if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column)
729
+ else projection
730
+ )
731
+ for projection in super()._parse_projections()
732
+ ]
733
+
734
+ def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback:
735
+ """Applies to SQL Server and Azure SQL Database
736
+ COMMIT [ { TRAN | TRANSACTION }
737
+ [ transaction_name | @tran_name_variable ] ]
738
+ [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ]
739
+
740
+ ROLLBACK { TRAN | TRANSACTION }
741
+ [ transaction_name | @tran_name_variable
742
+ | savepoint_name | @savepoint_variable ]
743
+ """
744
+ rollback = self._prev.token_type == TokenType.ROLLBACK
745
+
746
+ self._match_texts(("TRAN", "TRANSACTION"))
747
+ this = self._parse_id_var()
748
+
749
+ if rollback:
750
+ return self.expression(exp.Rollback, this=this)
751
+
752
+ durability = None
753
+ if self._match_pair(TokenType.WITH, TokenType.L_PAREN):
754
+ self._match_text_seq("DELAYED_DURABILITY")
755
+ self._match(TokenType.EQ)
756
+
757
+ if self._match_text_seq("OFF"):
758
+ durability = False
759
+ else:
760
+ self._match(TokenType.ON)
761
+ durability = True
762
+
763
+ self._match_r_paren()
764
+
765
+ return self.expression(exp.Commit, this=this, durability=durability)
766
+
767
+ def _parse_transaction(self) -> exp.Transaction | exp.Command:
768
+ """Applies to SQL Server and Azure SQL Database
769
+ BEGIN { TRAN | TRANSACTION }
770
+ [ { transaction_name | @tran_name_variable }
771
+ [ WITH MARK [ 'description' ] ]
772
+ ]
773
+ """
774
+ if self._match_texts(("TRAN", "TRANSACTION")):
775
+ transaction = self.expression(exp.Transaction, this=self._parse_id_var())
776
+ if self._match_text_seq("WITH", "MARK"):
777
+ transaction.set("mark", self._parse_string())
778
+
779
+ return transaction
780
+
781
+ return self._parse_as_command(self._prev)
782
+
783
+ def _parse_returns(self) -> exp.ReturnsProperty:
784
+ table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS)
785
+ returns = super()._parse_returns()
786
+ returns.set("table", table)
787
+ return returns
788
+
789
+ def _parse_convert(
790
+ self, strict: bool, safe: t.Optional[bool] = None
791
+ ) -> t.Optional[exp.Expression]:
792
+ this = self._parse_types()
793
+ self._match(TokenType.COMMA)
794
+ args = [this, *self._parse_csv(self._parse_assignment)]
795
+ convert = exp.Convert.from_arg_list(args)
796
+ convert.set("safe", safe)
797
+ convert.set("strict", strict)
798
+ return convert
799
+
800
+ def _parse_column_def(
801
+ self, this: t.Optional[exp.Expression], computed_column: bool = True
802
+ ) -> t.Optional[exp.Expression]:
803
+ this = super()._parse_column_def(this=this, computed_column=computed_column)
804
+ if not this:
805
+ return None
806
+ if self._match(TokenType.EQ):
807
+ this.set("default", self._parse_disjunction())
808
+ if self._match_texts(self.COLUMN_DEFINITION_MODES):
809
+ this.set("output", self._prev.text)
810
+ return this
811
+
812
+ def _parse_user_defined_function(
813
+ self, kind: t.Optional[TokenType] = None
814
+ ) -> t.Optional[exp.Expression]:
815
+ this = super()._parse_user_defined_function(kind=kind)
816
+
817
+ if (
818
+ kind == TokenType.FUNCTION
819
+ or isinstance(this, exp.UserDefinedFunction)
820
+ or self._match(TokenType.ALIAS, advance=False)
821
+ ):
822
+ return this
823
+
824
+ if not self._match(TokenType.WITH, advance=False):
825
+ expressions = self._parse_csv(self._parse_function_parameter)
826
+ else:
827
+ expressions = None
828
+
829
+ return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions)
830
+
831
+ def _parse_into(self) -> t.Optional[exp.Into]:
832
+ into = super()._parse_into()
833
+
834
+ table = isinstance(into, exp.Into) and into.find(exp.Table)
835
+ if isinstance(table, exp.Table):
836
+ table_identifier = table.this
837
+ if table_identifier.args.get("temporary"):
838
+ # Promote the temporary property from the Identifier to the Into expression
839
+ t.cast(exp.Into, into).set("temporary", True)
840
+
841
+ return into
842
+
843
+ def _parse_id_var(
844
+ self,
845
+ any_token: bool = True,
846
+ tokens: t.Optional[t.Collection[TokenType]] = None,
847
+ ) -> t.Optional[exp.Expression]:
848
+ is_temporary = self._match(TokenType.HASH)
849
+ is_global = is_temporary and self._match(TokenType.HASH)
850
+
851
+ this = super()._parse_id_var(any_token=any_token, tokens=tokens)
852
+ if this:
853
+ if is_global:
854
+ this.set("global", True)
855
+ elif is_temporary:
856
+ this.set("temporary", True)
857
+
858
+ return this
859
+
860
+ def _parse_create(self) -> exp.Create | exp.Command:
861
+ create = super()._parse_create()
862
+
863
+ if isinstance(create, exp.Create):
864
+ table = create.this.this if isinstance(create.this, exp.Schema) else create.this
865
+ if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"):
866
+ if not create.args.get("properties"):
867
+ create.set("properties", exp.Properties(expressions=[]))
868
+
869
+ create.args["properties"].append("expressions", exp.TemporaryProperty())
870
+
871
+ return create
872
+
873
+ def _parse_if(self) -> t.Optional[exp.Expression]:
874
+ index = self._index
875
+
876
+ if self._match_text_seq("OBJECT_ID"):
877
+ self._parse_wrapped_csv(self._parse_string)
878
+ if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP):
879
+ return self._parse_drop(exists=True)
880
+ self._retreat(index)
881
+
882
+ return super()._parse_if()
883
+
884
+ def _parse_unique(self) -> exp.UniqueColumnConstraint:
885
+ if self._match_texts(("CLUSTERED", "NONCLUSTERED")):
886
+ this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self)
887
+ else:
888
+ this = self._parse_schema(self._parse_id_var(any_token=False))
889
+
890
+ return self.expression(exp.UniqueColumnConstraint, this=this)
891
+
892
+ def _parse_partition(self) -> t.Optional[exp.Partition]:
893
+ if not self._match_text_seq("WITH", "(", "PARTITIONS"):
894
+ return None
895
+
896
+ def parse_range():
897
+ low = self._parse_bitwise()
898
+ high = self._parse_bitwise() if self._match_text_seq("TO") else None
899
+
900
+ return (
901
+ self.expression(exp.PartitionRange, this=low, expression=high) if high else low
902
+ )
903
+
904
+ partition = self.expression(
905
+ exp.Partition, expressions=self._parse_wrapped_csv(parse_range)
906
+ )
907
+
908
+ self._match_r_paren()
909
+
910
+ return partition
911
+
912
+ def _parse_declare(self) -> exp.Declare | exp.Command:
913
+ index = self._index
914
+ expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem))
915
+
916
+ if not expressions or self._curr:
917
+ self._retreat(index)
918
+ return self._parse_as_command(self._prev)
919
+
920
+ return self.expression(exp.Declare, expressions=expressions)
921
+
922
+ def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]:
923
+ var = self._parse_id_var()
924
+ if not var:
925
+ return None
926
+
927
+ value = None
928
+ self._match(TokenType.ALIAS)
929
+ if self._match(TokenType.TABLE):
930
+ data_type = self._parse_schema()
931
+ else:
932
+ data_type = self._parse_types()
933
+ if self._match(TokenType.EQ):
934
+ value = self._parse_bitwise()
935
+
936
+ return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
937
+
938
+ def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]:
939
+ expression = super()._parse_alter_table_alter()
940
+
941
+ if expression is not None:
942
+ collation = expression.args.get("collate")
943
+ if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier):
944
+ identifier = collation.this
945
+ collation.set("this", exp.Var(this=identifier.name))
946
+
947
+ return expression
948
+
949
+ class Generator(generator.Generator):
950
+ LIMIT_IS_TOP = True
951
+ QUERY_HINTS = False
952
+ RETURNING_END = False
953
+ NVL2_SUPPORTED = False
954
+ ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False
955
+ LIMIT_FETCH = "FETCH"
956
+ COMPUTED_COLUMN_WITH_TYPE = False
957
+ CTE_RECURSIVE_KEYWORD_REQUIRED = False
958
+ ENSURE_BOOLS = True
959
+ NULL_ORDERING_SUPPORTED = None
960
+ SUPPORTS_SINGLE_ARG_CONCAT = False
961
+ TABLESAMPLE_SEED_KEYWORD = "REPEATABLE"
962
+ SUPPORTS_SELECT_INTO = True
963
+ JSON_PATH_BRACKETED_KEY_SUPPORTED = False
964
+ SUPPORTS_TO_NUMBER = False
965
+ SET_OP_MODIFIERS = False
966
+ COPY_PARAMS_EQ_REQUIRED = True
967
+ PARSE_JSON_NAME = None
968
+ EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
969
+ ALTER_SET_WRAPPED = True
970
+ ALTER_SET_TYPE = ""
971
+
972
+ EXPRESSIONS_WITHOUT_NESTED_CTES = {
973
+ exp.Create,
974
+ exp.Delete,
975
+ exp.Insert,
976
+ exp.Intersect,
977
+ exp.Except,
978
+ exp.Merge,
979
+ exp.Select,
980
+ exp.Subquery,
981
+ exp.Union,
982
+ exp.Update,
983
+ }
984
+
985
+ SUPPORTED_JSON_PATH_PARTS = {
986
+ exp.JSONPathKey,
987
+ exp.JSONPathRoot,
988
+ exp.JSONPathSubscript,
989
+ }
990
+
991
+ TYPE_MAPPING = {
992
+ **generator.Generator.TYPE_MAPPING,
993
+ exp.DataType.Type.BOOLEAN: "BIT",
994
+ exp.DataType.Type.DATETIME2: "DATETIME2",
995
+ exp.DataType.Type.DECIMAL: "NUMERIC",
996
+ exp.DataType.Type.DOUBLE: "FLOAT",
997
+ exp.DataType.Type.INT: "INTEGER",
998
+ exp.DataType.Type.ROWVERSION: "ROWVERSION",
999
+ exp.DataType.Type.TEXT: "VARCHAR(MAX)",
1000
+ exp.DataType.Type.TIMESTAMP: "DATETIME2",
1001
+ exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2",
1002
+ exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET",
1003
+ exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME",
1004
+ exp.DataType.Type.UTINYINT: "TINYINT",
1005
+ exp.DataType.Type.VARIANT: "SQL_VARIANT",
1006
+ exp.DataType.Type.UUID: "UNIQUEIDENTIFIER",
1007
+ }
1008
+
1009
+ TYPE_MAPPING.pop(exp.DataType.Type.NCHAR)
1010
+ TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR)
1011
+
1012
+ TRANSFORMS = {
1013
+ **generator.Generator.TRANSFORMS,
1014
+ exp.AnyValue: any_value_to_max_sql,
1015
+ exp.ArrayToString: rename_func("STRING_AGG"),
1016
+ exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY",
1017
+ exp.Chr: rename_func("CHAR"),
1018
+ exp.DateAdd: date_delta_sql("DATEADD"),
1019
+ exp.DateDiff: date_delta_sql("DATEDIFF"),
1020
+ exp.CTE: transforms.preprocess([qualify_derived_table_outputs]),
1021
+ exp.CurrentDate: rename_func("GETDATE"),
1022
+ exp.CurrentTimestamp: rename_func("GETDATE"),
1023
+ exp.DateStrToDate: datestrtodate_sql,
1024
+ exp.Extract: rename_func("DATEPART"),
1025
+ exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql,
1026
+ exp.GroupConcat: _string_agg_sql,
1027
+ exp.If: rename_func("IIF"),
1028
+ exp.JSONExtract: _json_extract_sql,
1029
+ exp.JSONExtractScalar: _json_extract_sql,
1030
+ exp.LastDay: lambda self, e: self.func("EOMONTH", e.this),
1031
+ exp.Ln: rename_func("LOG"),
1032
+ exp.Max: max_or_greatest,
1033
+ exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this),
1034
+ exp.Min: min_or_least,
1035
+ exp.NumberToStr: _format_sql,
1036
+ exp.Repeat: rename_func("REPLICATE"),
1037
+ exp.CurrentSchema: rename_func("SCHEMA_NAME"),
1038
+ exp.Select: transforms.preprocess(
1039
+ [
1040
+ transforms.eliminate_distinct_on,
1041
+ transforms.eliminate_semi_and_anti_joins,
1042
+ transforms.eliminate_qualify,
1043
+ transforms.unnest_generate_date_array_using_recursive_cte,
1044
+ ]
1045
+ ),
1046
+ exp.Stddev: rename_func("STDEV"),
1047
+ exp.StrPosition: lambda self, e: strposition_sql(
1048
+ self, e, func_name="CHARINDEX", supports_position=True
1049
+ ),
1050
+ exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]),
1051
+ exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this),
1052
+ exp.SHA2: lambda self, e: self.func(
1053
+ "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this
1054
+ ),
1055
+ exp.TemporaryProperty: lambda self, e: "",
1056
+ exp.TimeStrToTime: _timestrtotime_sql,
1057
+ exp.TimeToStr: _format_sql,
1058
+ exp.Trim: trim_sql,
1059
+ exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True),
1060
+ exp.TsOrDsDiff: date_delta_sql("DATEDIFF"),
1061
+ exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this),
1062
+ exp.Uuid: lambda *_: "NEWID()",
1063
+ exp.DateFromParts: rename_func("DATEFROMPARTS"),
1064
+ }
1065
+
1066
+ TRANSFORMS.pop(exp.ReturnsProperty)
1067
+
1068
+ PROPERTIES_LOCATION = {
1069
+ **generator.Generator.PROPERTIES_LOCATION,
1070
+ exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
1071
+ }
1072
+
1073
+ def scope_resolution(self, rhs: str, scope_name: str) -> str:
1074
+ return f"{scope_name}::{rhs}"
1075
+
1076
+ def select_sql(self, expression: exp.Select) -> str:
1077
+ limit = expression.args.get("limit")
1078
+ offset = expression.args.get("offset")
1079
+
1080
+ if isinstance(limit, exp.Fetch) and not offset:
1081
+ # Dialects like Oracle can FETCH directly from a row set but
1082
+ # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH
1083
+ offset = exp.Offset(expression=exp.Literal.number(0))
1084
+ expression.set("offset", offset)
1085
+
1086
+ if offset:
1087
+ if not expression.args.get("order"):
1088
+ # ORDER BY is required in order to use OFFSET in a query, so we use
1089
+ # a noop order by, since we don't really care about the order.
1090
+ # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819
1091
+ expression.order_by(exp.select(exp.null()).subquery(), copy=False)
1092
+
1093
+ if isinstance(limit, exp.Limit):
1094
+ # TOP and OFFSET can't be combined, we need use FETCH instead of TOP
1095
+ # we replace here because otherwise TOP would be generated in select_sql
1096
+ limit.replace(exp.Fetch(direction="FIRST", count=limit.expression))
1097
+
1098
+ return super().select_sql(expression)
1099
+
1100
+ def convert_sql(self, expression: exp.Convert) -> str:
1101
+ name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT"
1102
+ return self.func(
1103
+ name, expression.this, expression.expression, expression.args.get("style")
1104
+ )
1105
+
1106
+ def queryoption_sql(self, expression: exp.QueryOption) -> str:
1107
+ option = self.sql(expression, "this")
1108
+ value = self.sql(expression, "expression")
1109
+ if value:
1110
+ optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else ""
1111
+ return f"{option} {optional_equal_sign}{value}"
1112
+ return option
1113
+
1114
+ def lateral_op(self, expression: exp.Lateral) -> str:
1115
+ cross_apply = expression.args.get("cross_apply")
1116
+ if cross_apply is True:
1117
+ return "CROSS APPLY"
1118
+ if cross_apply is False:
1119
+ return "OUTER APPLY"
1120
+
1121
+ # TODO: perhaps we can check if the parent is a Join and transpile it appropriately
1122
+ self.unsupported("LATERAL clause is not supported.")
1123
+ return "LATERAL"
1124
+
1125
+ def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str:
1126
+ this = expression.this
1127
+ split_count = len(this.name.split("."))
1128
+ delimiter = expression.args.get("delimiter")
1129
+ part_index = expression.args.get("part_index")
1130
+
1131
+ if (
1132
+ not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index))
1133
+ or (delimiter and delimiter.name != ".")
1134
+ or not part_index
1135
+ or split_count > 4
1136
+ ):
1137
+ self.unsupported(
1138
+ "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values"
1139
+ )
1140
+ return ""
1141
+
1142
+ return self.func(
1143
+ "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py())
1144
+ )
1145
+
1146
+ def timefromparts_sql(self, expression: exp.TimeFromParts) -> str:
1147
+ nano = expression.args.get("nano")
1148
+ if nano is not None:
1149
+ nano.pop()
1150
+ self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.")
1151
+
1152
+ if expression.args.get("fractions") is None:
1153
+ expression.set("fractions", exp.Literal.number(0))
1154
+ if expression.args.get("precision") is None:
1155
+ expression.set("precision", exp.Literal.number(0))
1156
+
1157
+ return rename_func("TIMEFROMPARTS")(self, expression)
1158
+
1159
+ def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str:
1160
+ zone = expression.args.get("zone")
1161
+ if zone is not None:
1162
+ zone.pop()
1163
+ self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.")
1164
+
1165
+ nano = expression.args.get("nano")
1166
+ if nano is not None:
1167
+ nano.pop()
1168
+ self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.")
1169
+
1170
+ if expression.args.get("milli") is None:
1171
+ expression.set("milli", exp.Literal.number(0))
1172
+
1173
+ return rename_func("DATETIMEFROMPARTS")(self, expression)
1174
+
1175
+ def setitem_sql(self, expression: exp.SetItem) -> str:
1176
+ this = expression.this
1177
+ if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter):
1178
+ # T-SQL does not use '=' in SET command, except when the LHS is a variable.
1179
+ return f"{self.sql(this.left)} {self.sql(this.right)}"
1180
+
1181
+ return super().setitem_sql(expression)
1182
+
1183
+ def boolean_sql(self, expression: exp.Boolean) -> str:
1184
+ if type(expression.parent) in BIT_TYPES or isinstance(
1185
+ expression.find_ancestor(exp.Values, exp.Select), exp.Values
1186
+ ):
1187
+ return "1" if expression.this else "0"
1188
+
1189
+ return "(1 = 1)" if expression.this else "(1 = 0)"
1190
+
1191
+ def is_sql(self, expression: exp.Is) -> str:
1192
+ if isinstance(expression.expression, exp.Boolean):
1193
+ return self.binary(expression, "=")
1194
+ return self.binary(expression, "IS")
1195
+
1196
+ def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
1197
+ sql = self.sql(expression, "this")
1198
+ properties = expression.args.get("properties")
1199
+
1200
+ if sql[:1] != "#" and any(
1201
+ isinstance(prop, exp.TemporaryProperty)
1202
+ for prop in (properties.expressions if properties else [])
1203
+ ):
1204
+ sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}"
1205
+
1206
+ return sql
1207
+
1208
+ def create_sql(self, expression: exp.Create) -> str:
1209
+ kind = expression.kind
1210
+ exists = expression.args.pop("exists", None)
1211
+
1212
+ like_property = expression.find(exp.LikeProperty)
1213
+ if like_property:
1214
+ ctas_expression = like_property.this
1215
+ else:
1216
+ ctas_expression = expression.expression
1217
+
1218
+ if kind == "VIEW":
1219
+ expression.this.set("catalog", None)
1220
+ with_ = expression.args.get("with")
1221
+ if ctas_expression and with_:
1222
+ # We've already preprocessed the Create expression to bubble up any nested CTEs,
1223
+ # but CREATE VIEW actually requires the WITH clause to come after it so we need
1224
+ # to amend the AST by moving the CTEs to the CREATE VIEW statement's query.
1225
+ ctas_expression.set("with", with_.pop())
1226
+
1227
+ sql = super().create_sql(expression)
1228
+
1229
+ table = expression.find(exp.Table)
1230
+
1231
+ # Convert CTAS statement to SELECT .. INTO ..
1232
+ if kind == "TABLE" and ctas_expression:
1233
+ if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES):
1234
+ ctas_expression = ctas_expression.subquery()
1235
+
1236
+ properties = expression.args.get("properties") or exp.Properties()
1237
+ is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions)
1238
+
1239
+ select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True))
1240
+ select_into.set("into", exp.Into(this=table, temporary=is_temp))
1241
+
1242
+ if like_property:
1243
+ select_into.limit(0, copy=False)
1244
+
1245
+ sql = self.sql(select_into)
1246
+
1247
+ if exists:
1248
+ identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else ""))
1249
+ sql_with_ctes = self.prepend_ctes(expression, sql)
1250
+ sql_literal = self.sql(exp.Literal.string(sql_with_ctes))
1251
+ if kind == "SCHEMA":
1252
+ return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})"""
1253
+ elif kind == "TABLE":
1254
+ assert table
1255
+ where = exp.and_(
1256
+ exp.column("table_name").eq(table.name),
1257
+ exp.column("table_schema").eq(table.db) if table.db else None,
1258
+ exp.column("table_catalog").eq(table.catalog) if table.catalog else None,
1259
+ )
1260
+ return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})"""
1261
+ elif kind == "INDEX":
1262
+ index = self.sql(exp.Literal.string(expression.this.text("this")))
1263
+ return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})"""
1264
+ elif expression.args.get("replace"):
1265
+ sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1)
1266
+
1267
+ return self.prepend_ctes(expression, sql)
1268
+
1269
+ @generator.unsupported_args("unlogged", "expressions")
1270
+ def into_sql(self, expression: exp.Into) -> str:
1271
+ if expression.args.get("temporary"):
1272
+ # If the Into expression has a temporary property, push this down to the Identifier
1273
+ table = expression.find(exp.Table)
1274
+ if table and isinstance(table.this, exp.Identifier):
1275
+ table.this.set("temporary", True)
1276
+
1277
+ return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1278
+
1279
+ def count_sql(self, expression: exp.Count) -> str:
1280
+ func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT"
1281
+ return rename_func(func_name)(self, expression)
1282
+
1283
+ def offset_sql(self, expression: exp.Offset) -> str:
1284
+ return f"{super().offset_sql(expression)} ROWS"
1285
+
1286
+ def version_sql(self, expression: exp.Version) -> str:
1287
+ name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name
1288
+ this = f"FOR {name}"
1289
+ expr = expression.expression
1290
+ kind = expression.text("kind")
1291
+ if kind in ("FROM", "BETWEEN"):
1292
+ args = expr.expressions
1293
+ sep = "TO" if kind == "FROM" else "AND"
1294
+ expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}"
1295
+ else:
1296
+ expr_sql = self.sql(expr)
1297
+
1298
+ expr_sql = f" {expr_sql}" if expr_sql else ""
1299
+ return f"{this} {kind}{expr_sql}"
1300
+
1301
+ def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str:
1302
+ table = expression.args.get("table")
1303
+ table = f"{table} " if table else ""
1304
+ return f"RETURNS {table}{self.sql(expression, 'this')}"
1305
+
1306
+ def returning_sql(self, expression: exp.Returning) -> str:
1307
+ into = self.sql(expression, "into")
1308
+ into = self.seg(f"INTO {into}") if into else ""
1309
+ return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}"
1310
+
1311
+ def transaction_sql(self, expression: exp.Transaction) -> str:
1312
+ this = self.sql(expression, "this")
1313
+ this = f" {this}" if this else ""
1314
+ mark = self.sql(expression, "mark")
1315
+ mark = f" WITH MARK {mark}" if mark else ""
1316
+ return f"BEGIN TRANSACTION{this}{mark}"
1317
+
1318
+ def commit_sql(self, expression: exp.Commit) -> str:
1319
+ this = self.sql(expression, "this")
1320
+ this = f" {this}" if this else ""
1321
+ durability = expression.args.get("durability")
1322
+ durability = (
1323
+ f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})"
1324
+ if durability is not None
1325
+ else ""
1326
+ )
1327
+ return f"COMMIT TRANSACTION{this}{durability}"
1328
+
1329
+ def rollback_sql(self, expression: exp.Rollback) -> str:
1330
+ this = self.sql(expression, "this")
1331
+ this = f" {this}" if this else ""
1332
+ return f"ROLLBACK TRANSACTION{this}"
1333
+
1334
+ def identifier_sql(self, expression: exp.Identifier) -> str:
1335
+ identifier = super().identifier_sql(expression)
1336
+
1337
+ if expression.args.get("global"):
1338
+ identifier = f"##{identifier}"
1339
+ elif expression.args.get("temporary"):
1340
+ identifier = f"#{identifier}"
1341
+
1342
+ return identifier
1343
+
1344
+ def constraint_sql(self, expression: exp.Constraint) -> str:
1345
+ this = self.sql(expression, "this")
1346
+ expressions = self.expressions(expression, flat=True, sep=" ")
1347
+ return f"CONSTRAINT {this} {expressions}"
1348
+
1349
+ def length_sql(self, expression: exp.Length) -> str:
1350
+ return self._uncast_text(expression, "LEN")
1351
+
1352
+ def right_sql(self, expression: exp.Right) -> str:
1353
+ return self._uncast_text(expression, "RIGHT")
1354
+
1355
+ def left_sql(self, expression: exp.Left) -> str:
1356
+ return self._uncast_text(expression, "LEFT")
1357
+
1358
+ def _uncast_text(self, expression: exp.Expression, name: str) -> str:
1359
+ this = expression.this
1360
+ if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT):
1361
+ this_sql = self.sql(this, "this")
1362
+ else:
1363
+ this_sql = self.sql(this)
1364
+ expression_sql = self.sql(expression, "expression")
1365
+ return self.func(name, this_sql, expression_sql if expression_sql else None)
1366
+
1367
+ def partition_sql(self, expression: exp.Partition) -> str:
1368
+ return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))"
1369
+
1370
+ def alter_sql(self, expression: exp.Alter) -> str:
1371
+ action = seq_get(expression.args.get("actions") or [], 0)
1372
+ if isinstance(action, exp.AlterRename):
1373
+ return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'"
1374
+ return super().alter_sql(expression)
1375
+
1376
+ def drop_sql(self, expression: exp.Drop) -> str:
1377
+ if expression.args["kind"] == "VIEW":
1378
+ expression.this.set("catalog", None)
1379
+ return super().drop_sql(expression)
1380
+
1381
+ def options_modifier(self, expression: exp.Expression) -> str:
1382
+ options = self.expressions(expression, key="options")
1383
+ return f" OPTION{self.wrap(options)}" if options else ""
1384
+
1385
+ def dpipe_sql(self, expression: exp.DPipe) -> str:
1386
+ return self.sql(
1387
+ reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten())
1388
+ )
1389
+
1390
+ def isascii_sql(self, expression: exp.IsAscii) -> str:
1391
+ return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)"
1392
+
1393
+ def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str:
1394
+ this = super().columndef_sql(expression, sep)
1395
+ default = self.sql(expression, "default")
1396
+ default = f" = {default}" if default else ""
1397
+ output = self.sql(expression, "output")
1398
+ output = f" {output}" if output else ""
1399
+ return f"{this}{default}{output}"
1400
+
1401
+ def coalesce_sql(self, expression: exp.Coalesce) -> str:
1402
+ func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE"
1403
+ return rename_func(func_name)(self, expression)