sqlglot 27.8.0__py3-none-any.whl → 27.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlglot/_version.py +2 -2
- sqlglot/dialects/__init__.py +1 -1
- sqlglot/dialects/bigquery.py +127 -12
- sqlglot/dialects/clickhouse.py +11 -0
- sqlglot/dialects/dialect.py +2 -0
- sqlglot/dialects/doris.py +77 -9
- sqlglot/dialects/dremio.py +75 -15
- sqlglot/dialects/duckdb.py +13 -3
- sqlglot/dialects/exasol.py +23 -0
- sqlglot/dialects/mysql.py +0 -33
- sqlglot/dialects/postgres.py +0 -1
- sqlglot/dialects/redshift.py +1 -0
- sqlglot/dialects/singlestore.py +185 -19
- sqlglot/dialects/tsql.py +2 -0
- sqlglot/expressions.py +123 -7
- sqlglot/generator.py +123 -29
- sqlglot/optimizer/qualify_columns.py +1 -1
- sqlglot/optimizer/scope.py +1 -0
- sqlglot/parser.py +83 -19
- sqlglot/tokens.py +2 -0
- {sqlglot-27.8.0.dist-info → sqlglot-27.10.0.dist-info}/METADATA +41 -1
- {sqlglot-27.8.0.dist-info → sqlglot-27.10.0.dist-info}/RECORD +25 -25
- {sqlglot-27.8.0.dist-info → sqlglot-27.10.0.dist-info}/WHEEL +0 -0
- {sqlglot-27.8.0.dist-info → sqlglot-27.10.0.dist-info}/licenses/LICENSE +0 -0
- {sqlglot-27.8.0.dist-info → sqlglot-27.10.0.dist-info}/top_level.txt +0 -0
sqlglot/dialects/duckdb.py
CHANGED
|
@@ -386,10 +386,12 @@ class DuckDB(Dialect):
|
|
|
386
386
|
"JSON_EXTRACT_PATH": parser.build_extract_json_with_path(exp.JSONExtract),
|
|
387
387
|
"JSON_EXTRACT_STRING": parser.build_extract_json_with_path(exp.JSONExtractScalar),
|
|
388
388
|
"LIST_CONTAINS": exp.ArrayContains.from_arg_list,
|
|
389
|
+
"LIST_FILTER": exp.ArrayFilter.from_arg_list,
|
|
389
390
|
"LIST_HAS": exp.ArrayContains.from_arg_list,
|
|
390
391
|
"LIST_HAS_ANY": exp.ArrayOverlaps.from_arg_list,
|
|
391
392
|
"LIST_REVERSE_SORT": _build_sort_array_desc,
|
|
392
393
|
"LIST_SORT": exp.SortArray.from_arg_list,
|
|
394
|
+
"LIST_TRANSFORM": exp.Transform.from_arg_list,
|
|
393
395
|
"LIST_VALUE": lambda args: exp.Array(expressions=args),
|
|
394
396
|
"MAKE_TIME": exp.TimeFromParts.from_arg_list,
|
|
395
397
|
"MAKE_TIMESTAMP": _build_make_timestamp,
|
|
@@ -643,6 +645,9 @@ class DuckDB(Dialect):
|
|
|
643
645
|
exp.ArrayRemove: remove_from_array_using_filter,
|
|
644
646
|
exp.ArraySort: _array_sort_sql,
|
|
645
647
|
exp.ArraySum: rename_func("LIST_SUM"),
|
|
648
|
+
exp.ArrayUniqueAgg: lambda self, e: self.func(
|
|
649
|
+
"LIST", exp.Distinct(expressions=[e.this])
|
|
650
|
+
),
|
|
646
651
|
exp.BitwiseXor: rename_func("XOR"),
|
|
647
652
|
exp.CommentColumnConstraint: no_comment_column_constraint_sql,
|
|
648
653
|
exp.CurrentDate: lambda *_: "CURRENT_DATE",
|
|
@@ -1117,15 +1122,20 @@ class DuckDB(Dialect):
|
|
|
1117
1122
|
return super().unnest_sql(expression)
|
|
1118
1123
|
|
|
1119
1124
|
def ignorenulls_sql(self, expression: exp.IgnoreNulls) -> str:
|
|
1120
|
-
|
|
1125
|
+
this = expression.this
|
|
1126
|
+
|
|
1127
|
+
if isinstance(this, self.IGNORE_RESPECT_NULLS_WINDOW_FUNCTIONS):
|
|
1121
1128
|
# DuckDB should render IGNORE NULLS only for the general-purpose
|
|
1122
1129
|
# window functions that accept it e.g. FIRST_VALUE(... IGNORE NULLS) OVER (...)
|
|
1123
1130
|
return super().ignorenulls_sql(expression)
|
|
1124
1131
|
|
|
1125
|
-
if
|
|
1132
|
+
if isinstance(this, exp.First):
|
|
1133
|
+
this = exp.AnyValue(this=this.this)
|
|
1134
|
+
|
|
1135
|
+
if not isinstance(this, exp.AnyValue):
|
|
1126
1136
|
self.unsupported("IGNORE NULLS is not supported for non-window functions.")
|
|
1127
1137
|
|
|
1128
|
-
return self.sql(
|
|
1138
|
+
return self.sql(this)
|
|
1129
1139
|
|
|
1130
1140
|
def respectnulls_sql(self, expression: exp.RespectNulls) -> str:
|
|
1131
1141
|
if isinstance(expression.this, self.IGNORE_RESPECT_NULLS_WINDOW_FUNCTIONS):
|
sqlglot/dialects/exasol.py
CHANGED
|
@@ -5,8 +5,10 @@ import typing as t
|
|
|
5
5
|
from sqlglot import exp, generator, parser, tokens
|
|
6
6
|
from sqlglot.dialects.dialect import (
|
|
7
7
|
Dialect,
|
|
8
|
+
NormalizationStrategy,
|
|
8
9
|
binary_from_function,
|
|
9
10
|
build_formatted_time,
|
|
11
|
+
groupconcat_sql,
|
|
10
12
|
rename_func,
|
|
11
13
|
strposition_sql,
|
|
12
14
|
timestrtotime_sql,
|
|
@@ -73,6 +75,17 @@ DATE_UNITS = {"DAY", "WEEK", "MONTH", "YEAR", "HOUR", "MINUTE", "SECOND"}
|
|
|
73
75
|
|
|
74
76
|
|
|
75
77
|
class Exasol(Dialect):
|
|
78
|
+
# https://docs.exasol.com/db/latest/sql_references/basiclanguageelements.htm#SQLidentifier
|
|
79
|
+
NORMALIZATION_STRATEGY = NormalizationStrategy.UPPERCASE
|
|
80
|
+
# https://docs.exasol.com/db/latest/sql_references/data_types/datatypesoverview.htm
|
|
81
|
+
SUPPORTS_USER_DEFINED_TYPES = False
|
|
82
|
+
# https://docs.exasol.com/db/latest/sql/select.htm
|
|
83
|
+
SUPPORTS_SEMI_ANTI_JOIN = False
|
|
84
|
+
SUPPORTS_COLUMN_JOIN_MARKS = True
|
|
85
|
+
NULL_ORDERING = "nulls_are_last"
|
|
86
|
+
# https://docs.exasol.com/db/latest/sql_references/literals.htm#StringLiterals
|
|
87
|
+
CONCAT_COALESCE = True
|
|
88
|
+
|
|
76
89
|
TIME_MAPPING = {
|
|
77
90
|
"yyyy": "%Y",
|
|
78
91
|
"YYYY": "%Y",
|
|
@@ -108,6 +121,7 @@ class Exasol(Dialect):
|
|
|
108
121
|
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/if.htm
|
|
109
122
|
"ENDIF": TokenType.END,
|
|
110
123
|
"LONG VARCHAR": TokenType.TEXT,
|
|
124
|
+
"SEPARATOR": TokenType.SEPARATOR,
|
|
111
125
|
}
|
|
112
126
|
KEYWORDS.pop("DIV")
|
|
113
127
|
|
|
@@ -176,6 +190,12 @@ class Exasol(Dialect):
|
|
|
176
190
|
this=self._match(TokenType.IS) and self._parse_string(),
|
|
177
191
|
),
|
|
178
192
|
}
|
|
193
|
+
FUNCTION_PARSERS = {
|
|
194
|
+
**parser.Parser.FUNCTION_PARSERS,
|
|
195
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/listagg.htm
|
|
196
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/group_concat.htm
|
|
197
|
+
**dict.fromkeys(("GROUP_CONCAT", "LISTAGG"), lambda self: self._parse_group_concat()),
|
|
198
|
+
}
|
|
179
199
|
|
|
180
200
|
class Generator(generator.Generator):
|
|
181
201
|
# https://docs.exasol.com/db/latest/sql_references/data_types/datatypedetails.htm#StringDataType
|
|
@@ -235,6 +255,9 @@ class Exasol(Dialect):
|
|
|
235
255
|
exp.TsOrDsDiff: _date_diff_sql,
|
|
236
256
|
exp.DateTrunc: lambda self, e: self.func("TRUNC", e.this, unit_to_str(e)),
|
|
237
257
|
exp.DatetimeTrunc: timestamptrunc_sql(),
|
|
258
|
+
exp.GroupConcat: lambda self, e: groupconcat_sql(
|
|
259
|
+
self, e, func_name="LISTAGG", within_group=True
|
|
260
|
+
),
|
|
238
261
|
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/edit_distance.htm#EDIT_DISTANCE
|
|
239
262
|
exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost", "max_dist")(
|
|
240
263
|
rename_func("EDIT_DISTANCE")
|
sqlglot/dialects/mysql.py
CHANGED
|
@@ -676,39 +676,6 @@ class MySQL(Dialect):
|
|
|
676
676
|
parse_interval=parse_interval, fallback_to_identifier=fallback_to_identifier
|
|
677
677
|
)
|
|
678
678
|
|
|
679
|
-
def _parse_group_concat(self) -> t.Optional[exp.Expression]:
|
|
680
|
-
def concat_exprs(
|
|
681
|
-
node: t.Optional[exp.Expression], exprs: t.List[exp.Expression]
|
|
682
|
-
) -> exp.Expression:
|
|
683
|
-
if isinstance(node, exp.Distinct) and len(node.expressions) > 1:
|
|
684
|
-
concat_exprs = [
|
|
685
|
-
self.expression(exp.Concat, expressions=node.expressions, safe=True)
|
|
686
|
-
]
|
|
687
|
-
node.set("expressions", concat_exprs)
|
|
688
|
-
return node
|
|
689
|
-
if len(exprs) == 1:
|
|
690
|
-
return exprs[0]
|
|
691
|
-
return self.expression(exp.Concat, expressions=args, safe=True)
|
|
692
|
-
|
|
693
|
-
args = self._parse_csv(self._parse_lambda)
|
|
694
|
-
|
|
695
|
-
if args:
|
|
696
|
-
order = args[-1] if isinstance(args[-1], exp.Order) else None
|
|
697
|
-
|
|
698
|
-
if order:
|
|
699
|
-
# Order By is the last (or only) expression in the list and has consumed the 'expr' before it,
|
|
700
|
-
# remove 'expr' from exp.Order and add it back to args
|
|
701
|
-
args[-1] = order.this
|
|
702
|
-
order.set("this", concat_exprs(order.this, args))
|
|
703
|
-
|
|
704
|
-
this = order or concat_exprs(args[0], args)
|
|
705
|
-
else:
|
|
706
|
-
this = None
|
|
707
|
-
|
|
708
|
-
separator = self._parse_field() if self._match(TokenType.SEPARATOR) else None
|
|
709
|
-
|
|
710
|
-
return self.expression(exp.GroupConcat, this=this, separator=separator)
|
|
711
|
-
|
|
712
679
|
def _parse_alter_table_alter_index(self) -> exp.AlterIndex:
|
|
713
680
|
index = self._parse_field(any_token=True)
|
|
714
681
|
|
sqlglot/dialects/postgres.py
CHANGED
|
@@ -345,7 +345,6 @@ class Postgres(Dialect):
|
|
|
345
345
|
"REFRESH": TokenType.COMMAND,
|
|
346
346
|
"REINDEX": TokenType.COMMAND,
|
|
347
347
|
"RESET": TokenType.COMMAND,
|
|
348
|
-
"REVOKE": TokenType.COMMAND,
|
|
349
348
|
"SERIAL": TokenType.SERIAL,
|
|
350
349
|
"SMALLSERIAL": TokenType.SMALLSERIAL,
|
|
351
350
|
"TEMP": TokenType.TEMPORARY,
|
sqlglot/dialects/redshift.py
CHANGED
|
@@ -192,6 +192,7 @@ class Redshift(Postgres):
|
|
|
192
192
|
exp.DistKeyProperty: lambda self, e: self.func("DISTKEY", e.this),
|
|
193
193
|
exp.DistStyleProperty: lambda self, e: self.naked_property(e),
|
|
194
194
|
exp.Explode: lambda self, e: self.explode_sql(e),
|
|
195
|
+
exp.FarmFingerprint: rename_func("FARMFINGERPRINT64"),
|
|
195
196
|
exp.FromBase: rename_func("STRTOL"),
|
|
196
197
|
exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql,
|
|
197
198
|
exp.JSONExtract: json_extract_segments("JSON_EXTRACT_PATH_TEXT"),
|
sqlglot/dialects/singlestore.py
CHANGED
|
@@ -8,12 +8,25 @@ from sqlglot.dialects.dialect import (
|
|
|
8
8
|
json_extract_segments,
|
|
9
9
|
json_path_key_only_name,
|
|
10
10
|
rename_func,
|
|
11
|
+
bool_xor_sql,
|
|
12
|
+
count_if_to_sum,
|
|
11
13
|
)
|
|
12
14
|
from sqlglot.dialects.mysql import MySQL
|
|
15
|
+
from sqlglot.expressions import DataType
|
|
13
16
|
from sqlglot.generator import unsupported_args
|
|
14
17
|
from sqlglot.helper import seq_get
|
|
15
18
|
|
|
16
19
|
|
|
20
|
+
def cast_to_time6(expression: t.Optional[exp.Expression]) -> exp.Cast:
|
|
21
|
+
return exp.Cast(
|
|
22
|
+
this=expression,
|
|
23
|
+
to=exp.DataType.build(
|
|
24
|
+
exp.DataType.Type.TIME,
|
|
25
|
+
expressions=[exp.DataTypeParam(this=exp.Literal.number(6))],
|
|
26
|
+
),
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
17
30
|
class SingleStore(MySQL):
|
|
18
31
|
SUPPORTS_ORDER_BY_ALL = True
|
|
19
32
|
|
|
@@ -56,22 +69,55 @@ class SingleStore(MySQL):
|
|
|
56
69
|
"TO_CHAR": build_formatted_time(exp.ToChar, "singlestore"),
|
|
57
70
|
"STR_TO_DATE": build_formatted_time(exp.StrToDate, "mysql"),
|
|
58
71
|
"DATE_FORMAT": build_formatted_time(exp.TimeToStr, "mysql"),
|
|
72
|
+
# The first argument of following functions is converted to TIME(6)
|
|
73
|
+
# This is needed because exp.TimeToStr is converted to DATE_FORMAT
|
|
74
|
+
# which interprets the first argument as DATETIME and fails to parse
|
|
75
|
+
# string literals like '12:05:47' without a date part.
|
|
59
76
|
"TIME_FORMAT": lambda args: exp.TimeToStr(
|
|
60
|
-
|
|
61
|
-
# This is needed because exp.TimeToStr is converted to DATE_FORMAT
|
|
62
|
-
# which interprets the first argument as DATETIME and fails to parse
|
|
63
|
-
# string literals like '12:05:47' without a date part.
|
|
64
|
-
this=exp.Cast(
|
|
65
|
-
this=seq_get(args, 0),
|
|
66
|
-
to=exp.DataType.build(
|
|
67
|
-
exp.DataType.Type.TIME,
|
|
68
|
-
expressions=[exp.DataTypeParam(this=exp.Literal.number(6))],
|
|
69
|
-
),
|
|
70
|
-
),
|
|
77
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
71
78
|
format=MySQL.format_time(seq_get(args, 1)),
|
|
72
79
|
),
|
|
80
|
+
"HOUR": lambda args: exp.cast(
|
|
81
|
+
exp.TimeToStr(
|
|
82
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
83
|
+
format=MySQL.format_time(exp.Literal.string("%k")),
|
|
84
|
+
),
|
|
85
|
+
DataType.Type.INT,
|
|
86
|
+
),
|
|
87
|
+
"MICROSECOND": lambda args: exp.cast(
|
|
88
|
+
exp.TimeToStr(
|
|
89
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
90
|
+
format=MySQL.format_time(exp.Literal.string("%f")),
|
|
91
|
+
),
|
|
92
|
+
DataType.Type.INT,
|
|
93
|
+
),
|
|
94
|
+
"SECOND": lambda args: exp.cast(
|
|
95
|
+
exp.TimeToStr(
|
|
96
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
97
|
+
format=MySQL.format_time(exp.Literal.string("%s")),
|
|
98
|
+
),
|
|
99
|
+
DataType.Type.INT,
|
|
100
|
+
),
|
|
101
|
+
"MINUTE": lambda args: exp.cast(
|
|
102
|
+
exp.TimeToStr(
|
|
103
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
104
|
+
format=MySQL.format_time(exp.Literal.string("%i")),
|
|
105
|
+
),
|
|
106
|
+
DataType.Type.INT,
|
|
107
|
+
),
|
|
108
|
+
"MONTHNAME": lambda args: exp.TimeToStr(
|
|
109
|
+
this=seq_get(args, 0),
|
|
110
|
+
format=MySQL.format_time(exp.Literal.string("%M")),
|
|
111
|
+
),
|
|
112
|
+
"WEEKDAY": lambda args: exp.paren(exp.DayOfWeek(this=seq_get(args, 0)) + 5, copy=False)
|
|
113
|
+
% 7,
|
|
73
114
|
"UNIX_TIMESTAMP": exp.StrToUnix.from_arg_list,
|
|
74
115
|
"FROM_UNIXTIME": build_formatted_time(exp.UnixToTime, "mysql"),
|
|
116
|
+
"TIME_BUCKET": lambda args: exp.DateBin(
|
|
117
|
+
this=seq_get(args, 0),
|
|
118
|
+
expression=seq_get(args, 1),
|
|
119
|
+
origin=seq_get(args, 2),
|
|
120
|
+
),
|
|
75
121
|
"BSON_EXTRACT_BSON": build_json_extract_path(exp.JSONBExtract),
|
|
76
122
|
"BSON_EXTRACT_STRING": build_json_extract_path(
|
|
77
123
|
exp.JSONBExtractScalar, json_type="STRING"
|
|
@@ -92,7 +138,52 @@ class SingleStore(MySQL):
|
|
|
92
138
|
"JSON_EXTRACT_BIGINT": build_json_extract_path(
|
|
93
139
|
exp.JSONExtractScalar, json_type="BIGINT"
|
|
94
140
|
),
|
|
141
|
+
"JSON_ARRAY_CONTAINS_STRING": lambda args: exp.JSONArrayContains(
|
|
142
|
+
this=seq_get(args, 1),
|
|
143
|
+
expression=seq_get(args, 0),
|
|
144
|
+
json_type="STRING",
|
|
145
|
+
),
|
|
146
|
+
"JSON_ARRAY_CONTAINS_DOUBLE": lambda args: exp.JSONArrayContains(
|
|
147
|
+
this=seq_get(args, 1),
|
|
148
|
+
expression=seq_get(args, 0),
|
|
149
|
+
json_type="DOUBLE",
|
|
150
|
+
),
|
|
151
|
+
"JSON_ARRAY_CONTAINS_JSON": lambda args: exp.JSONArrayContains(
|
|
152
|
+
this=seq_get(args, 1),
|
|
153
|
+
expression=seq_get(args, 0),
|
|
154
|
+
json_type="JSON",
|
|
155
|
+
),
|
|
156
|
+
"JSON_PRETTY": exp.JSONFormat.from_arg_list,
|
|
95
157
|
"DATE": exp.Date.from_arg_list,
|
|
158
|
+
"DAYNAME": lambda args: exp.TimeToStr(
|
|
159
|
+
this=seq_get(args, 0),
|
|
160
|
+
format=MySQL.format_time(exp.Literal.string("%W")),
|
|
161
|
+
),
|
|
162
|
+
"APPROX_COUNT_DISTINCT": exp.Hll.from_arg_list,
|
|
163
|
+
"APPROX_PERCENTILE": lambda args, dialect: exp.ApproxQuantile(
|
|
164
|
+
this=seq_get(args, 0),
|
|
165
|
+
quantile=seq_get(args, 1),
|
|
166
|
+
error_tolerance=seq_get(args, 2),
|
|
167
|
+
),
|
|
168
|
+
"VARIANCE": exp.VariancePop.from_arg_list,
|
|
169
|
+
"INSTR": exp.Contains.from_arg_list,
|
|
170
|
+
"REGEXP_MATCH": lambda args: exp.RegexpExtractAll(
|
|
171
|
+
this=seq_get(args, 0),
|
|
172
|
+
expression=seq_get(args, 1),
|
|
173
|
+
parameters=seq_get(args, 2),
|
|
174
|
+
),
|
|
175
|
+
"REGEXP_SUBSTR": lambda args: exp.RegexpExtract(
|
|
176
|
+
this=seq_get(args, 0),
|
|
177
|
+
expression=seq_get(args, 1),
|
|
178
|
+
position=seq_get(args, 2),
|
|
179
|
+
occurrence=seq_get(args, 3),
|
|
180
|
+
parameters=seq_get(args, 4),
|
|
181
|
+
),
|
|
182
|
+
"REDUCE": lambda args: exp.Reduce(
|
|
183
|
+
initial=seq_get(args, 0),
|
|
184
|
+
this=seq_get(args, 1),
|
|
185
|
+
merge=seq_get(args, 2),
|
|
186
|
+
),
|
|
96
187
|
}
|
|
97
188
|
|
|
98
189
|
CAST_COLUMN_OPERATORS = {TokenType.COLON_GT, TokenType.NCOLON_GT}
|
|
@@ -128,7 +219,9 @@ class SingleStore(MySQL):
|
|
|
128
219
|
|
|
129
220
|
TRANSFORMS = {
|
|
130
221
|
**MySQL.Generator.TRANSFORMS,
|
|
131
|
-
exp.TsOrDsToDate: lambda self, e: self.func("TO_DATE", e.this, self.format_time(e))
|
|
222
|
+
exp.TsOrDsToDate: lambda self, e: self.func("TO_DATE", e.this, self.format_time(e))
|
|
223
|
+
if e.args.get("format")
|
|
224
|
+
else self.func("DATE", e.this),
|
|
132
225
|
exp.StrToTime: lambda self, e: self.func("TO_TIMESTAMP", e.this, self.format_time(e)),
|
|
133
226
|
exp.ToChar: lambda self, e: self.func("TO_CHAR", e.this, self.format_time(e)),
|
|
134
227
|
exp.StrToDate: lambda self, e: self.func(
|
|
@@ -181,6 +274,9 @@ class SingleStore(MySQL):
|
|
|
181
274
|
),
|
|
182
275
|
),
|
|
183
276
|
exp.UnixToTimeStr: lambda self, e: f"FROM_UNIXTIME({self.sql(e, 'this')}) :> TEXT",
|
|
277
|
+
exp.DateBin: unsupported_args("unit", "zone")(
|
|
278
|
+
lambda self, e: self.func("TIME_BUCKET", e.this, e.expression, e.args.get("origin"))
|
|
279
|
+
),
|
|
184
280
|
exp.JSONExtract: unsupported_args(
|
|
185
281
|
"only_json_types",
|
|
186
282
|
"expressions",
|
|
@@ -195,15 +291,67 @@ class SingleStore(MySQL):
|
|
|
195
291
|
exp.JSONPathKey: json_path_key_only_name,
|
|
196
292
|
exp.JSONPathSubscript: lambda self, e: self.json_path_part(e.this),
|
|
197
293
|
exp.JSONPathRoot: lambda *_: "",
|
|
294
|
+
exp.JSONFormat: unsupported_args("options", "is_json")(rename_func("JSON_PRETTY")),
|
|
295
|
+
exp.DayOfWeekIso: lambda self, e: f"(({self.func('DAYOFWEEK', e.this)} % 7) + 1)",
|
|
296
|
+
exp.DayOfMonth: rename_func("DAY"),
|
|
297
|
+
exp.Hll: rename_func("APPROX_COUNT_DISTINCT"),
|
|
298
|
+
exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
|
|
299
|
+
exp.CountIf: count_if_to_sum,
|
|
300
|
+
exp.LogicalOr: lambda self, e: f"MAX(ABS({self.sql(e, 'this')}))",
|
|
301
|
+
exp.LogicalAnd: lambda self, e: f"MIN(ABS({self.sql(e, 'this')}))",
|
|
302
|
+
exp.ApproxQuantile: unsupported_args("accuracy", "weight")(
|
|
303
|
+
lambda self, e: self.func(
|
|
304
|
+
"APPROX_PERCENTILE",
|
|
305
|
+
e.this,
|
|
306
|
+
e.args.get("quantile"),
|
|
307
|
+
e.args.get("error_tolerance"),
|
|
308
|
+
)
|
|
309
|
+
),
|
|
310
|
+
exp.Variance: rename_func("VAR_SAMP"),
|
|
311
|
+
exp.VariancePop: rename_func("VAR_POP"),
|
|
312
|
+
exp.Xor: bool_xor_sql,
|
|
313
|
+
exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"),
|
|
314
|
+
exp.Repeat: lambda self, e: self.func(
|
|
315
|
+
"LPAD",
|
|
316
|
+
exp.Literal.string(""),
|
|
317
|
+
exp.Mul(this=self.func("LENGTH", e.this), expression=e.args.get("times")),
|
|
318
|
+
e.this,
|
|
319
|
+
),
|
|
320
|
+
exp.IsAscii: lambda self, e: f"({self.sql(e, 'this')} RLIKE '^[\x00-\x7f]*$')",
|
|
321
|
+
exp.MD5Digest: lambda self, e: self.func("UNHEX", self.func("MD5", e.this)),
|
|
322
|
+
exp.Chr: rename_func("CHAR"),
|
|
323
|
+
exp.Contains: rename_func("INSTR"),
|
|
324
|
+
exp.RegexpExtractAll: unsupported_args("position", "occurrence", "group")(
|
|
325
|
+
lambda self, e: self.func(
|
|
326
|
+
"REGEXP_MATCH",
|
|
327
|
+
e.this,
|
|
328
|
+
e.expression,
|
|
329
|
+
e.args.get("parameters"),
|
|
330
|
+
)
|
|
331
|
+
),
|
|
332
|
+
exp.RegexpExtract: unsupported_args("group")(
|
|
333
|
+
lambda self, e: self.func(
|
|
334
|
+
"REGEXP_SUBSTR",
|
|
335
|
+
e.this,
|
|
336
|
+
e.expression,
|
|
337
|
+
e.args.get("position"),
|
|
338
|
+
e.args.get("occurrence"),
|
|
339
|
+
e.args.get("parameters"),
|
|
340
|
+
)
|
|
341
|
+
),
|
|
342
|
+
exp.StartsWith: lambda self, e: self.func(
|
|
343
|
+
"REGEXP_INSTR", e.this, self.func("CONCAT", exp.Literal.string("^"), e.expression)
|
|
344
|
+
),
|
|
345
|
+
exp.FromBase: lambda self, e: self.func(
|
|
346
|
+
"CONV", e.this, e.expression, exp.Literal.number(10)
|
|
347
|
+
),
|
|
348
|
+
exp.Reduce: unsupported_args("finish")(
|
|
349
|
+
lambda self, e: self.func(
|
|
350
|
+
"REDUCE", e.args.get("initial"), e.this, e.args.get("merge")
|
|
351
|
+
)
|
|
352
|
+
),
|
|
198
353
|
}
|
|
199
354
|
TRANSFORMS.pop(exp.JSONExtractScalar)
|
|
200
|
-
TRANSFORMS.pop(exp.JSONPathFilter)
|
|
201
|
-
TRANSFORMS.pop(exp.JSONPathRecursive)
|
|
202
|
-
TRANSFORMS.pop(exp.JSONPathScript)
|
|
203
|
-
TRANSFORMS.pop(exp.JSONPathSelector)
|
|
204
|
-
TRANSFORMS.pop(exp.JSONPathSlice)
|
|
205
|
-
TRANSFORMS.pop(exp.JSONPathUnion)
|
|
206
|
-
TRANSFORMS.pop(exp.JSONPathWildcard)
|
|
207
355
|
|
|
208
356
|
# https://docs.singlestore.com/cloud/reference/sql-reference/restricted-keywords/list-of-restricted-keywords/
|
|
209
357
|
RESERVED_KEYWORDS = {
|
|
@@ -1286,3 +1434,21 @@ class SingleStore(MySQL):
|
|
|
1286
1434
|
res = exp.Cast(this=res, to=returning)
|
|
1287
1435
|
|
|
1288
1436
|
return self.sql(res)
|
|
1437
|
+
|
|
1438
|
+
def all_sql(self, expression: exp.All) -> str:
|
|
1439
|
+
self.unsupported("ALL subquery predicate is not supported in SingleStore")
|
|
1440
|
+
return super().all_sql(expression)
|
|
1441
|
+
|
|
1442
|
+
def jsonarraycontains_sql(self, expression: exp.JSONArrayContains) -> str:
|
|
1443
|
+
json_type = expression.text("json_type").upper()
|
|
1444
|
+
|
|
1445
|
+
if json_type:
|
|
1446
|
+
return self.func(
|
|
1447
|
+
f"JSON_ARRAY_CONTAINS_{json_type}", expression.expression, expression.this
|
|
1448
|
+
)
|
|
1449
|
+
|
|
1450
|
+
return self.func(
|
|
1451
|
+
"JSON_ARRAY_CONTAINS_JSON",
|
|
1452
|
+
expression.expression,
|
|
1453
|
+
self.func("TO_JSON", expression.this),
|
|
1454
|
+
)
|
sqlglot/dialects/tsql.py
CHANGED
|
@@ -658,6 +658,8 @@ class TSQL(Dialect):
|
|
|
658
658
|
else self.expression(exp.ScopeResolution, this=this, expression=to),
|
|
659
659
|
}
|
|
660
660
|
|
|
661
|
+
SET_OP_MODIFIERS = {"offset"}
|
|
662
|
+
|
|
661
663
|
def _parse_alter_table_set(self) -> exp.AlterSet:
|
|
662
664
|
return self._parse_wrapped(super()._parse_alter_table_set)
|
|
663
665
|
|