sqlglot 27.7.0__py3-none-any.whl → 27.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlglot/_version.py +16 -3
- sqlglot/dialects/__init__.py +1 -1
- sqlglot/dialects/bigquery.py +129 -9
- sqlglot/dialects/clickhouse.py +11 -0
- sqlglot/dialects/databricks.py +5 -1
- sqlglot/dialects/dialect.py +74 -23
- sqlglot/dialects/doris.py +77 -9
- sqlglot/dialects/dremio.py +102 -21
- sqlglot/dialects/duckdb.py +20 -43
- sqlglot/dialects/exasol.py +28 -0
- sqlglot/dialects/mysql.py +0 -48
- sqlglot/dialects/presto.py +0 -2
- sqlglot/dialects/redshift.py +1 -0
- sqlglot/dialects/singlestore.py +252 -13
- sqlglot/dialects/spark.py +6 -0
- sqlglot/dialects/trino.py +1 -0
- sqlglot/dialects/tsql.py +2 -0
- sqlglot/expressions.py +143 -7
- sqlglot/generator.py +98 -27
- sqlglot/jsonpath.py +10 -3
- sqlglot/optimizer/qualify_columns.py +1 -1
- sqlglot/parser.py +58 -17
- {sqlglot-27.7.0.dist-info → sqlglot-27.9.0.dist-info}/METADATA +42 -2
- {sqlglot-27.7.0.dist-info → sqlglot-27.9.0.dist-info}/RECORD +27 -27
- {sqlglot-27.7.0.dist-info → sqlglot-27.9.0.dist-info}/WHEEL +0 -0
- {sqlglot-27.7.0.dist-info → sqlglot-27.9.0.dist-info}/licenses/LICENSE +0 -0
- {sqlglot-27.7.0.dist-info → sqlglot-27.9.0.dist-info}/top_level.txt +0 -0
sqlglot/dialects/singlestore.py
CHANGED
|
@@ -2,12 +2,31 @@ from sqlglot import TokenType
|
|
|
2
2
|
import typing as t
|
|
3
3
|
|
|
4
4
|
from sqlglot import exp
|
|
5
|
-
from sqlglot.dialects.dialect import
|
|
5
|
+
from sqlglot.dialects.dialect import (
|
|
6
|
+
build_formatted_time,
|
|
7
|
+
build_json_extract_path,
|
|
8
|
+
json_extract_segments,
|
|
9
|
+
json_path_key_only_name,
|
|
10
|
+
rename_func,
|
|
11
|
+
bool_xor_sql,
|
|
12
|
+
count_if_to_sum,
|
|
13
|
+
)
|
|
6
14
|
from sqlglot.dialects.mysql import MySQL
|
|
15
|
+
from sqlglot.expressions import DataType
|
|
7
16
|
from sqlglot.generator import unsupported_args
|
|
8
17
|
from sqlglot.helper import seq_get
|
|
9
18
|
|
|
10
19
|
|
|
20
|
+
def cast_to_time6(expression: t.Optional[exp.Expression]) -> exp.Cast:
|
|
21
|
+
return exp.Cast(
|
|
22
|
+
this=expression,
|
|
23
|
+
to=exp.DataType.build(
|
|
24
|
+
exp.DataType.Type.TIME,
|
|
25
|
+
expressions=[exp.DataTypeParam(this=exp.Literal.number(6))],
|
|
26
|
+
),
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
11
30
|
class SingleStore(MySQL):
|
|
12
31
|
SUPPORTS_ORDER_BY_ALL = True
|
|
13
32
|
|
|
@@ -50,22 +69,110 @@ class SingleStore(MySQL):
|
|
|
50
69
|
"TO_CHAR": build_formatted_time(exp.ToChar, "singlestore"),
|
|
51
70
|
"STR_TO_DATE": build_formatted_time(exp.StrToDate, "mysql"),
|
|
52
71
|
"DATE_FORMAT": build_formatted_time(exp.TimeToStr, "mysql"),
|
|
72
|
+
# The first argument of following functions is converted to TIME(6)
|
|
73
|
+
# This is needed because exp.TimeToStr is converted to DATE_FORMAT
|
|
74
|
+
# which interprets the first argument as DATETIME and fails to parse
|
|
75
|
+
# string literals like '12:05:47' without a date part.
|
|
53
76
|
"TIME_FORMAT": lambda args: exp.TimeToStr(
|
|
54
|
-
|
|
55
|
-
# This is needed because exp.TimeToStr is converted to DATE_FORMAT
|
|
56
|
-
# which interprets the first argument as DATETIME and fails to parse
|
|
57
|
-
# string literals like '12:05:47' without a date part.
|
|
58
|
-
this=exp.Cast(
|
|
59
|
-
this=seq_get(args, 0),
|
|
60
|
-
to=exp.DataType.build(
|
|
61
|
-
exp.DataType.Type.TIME,
|
|
62
|
-
expressions=[exp.DataTypeParam(this=exp.Literal.number(6))],
|
|
63
|
-
),
|
|
64
|
-
),
|
|
77
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
65
78
|
format=MySQL.format_time(seq_get(args, 1)),
|
|
66
79
|
),
|
|
80
|
+
"HOUR": lambda args: exp.cast(
|
|
81
|
+
exp.TimeToStr(
|
|
82
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
83
|
+
format=MySQL.format_time(exp.Literal.string("%k")),
|
|
84
|
+
),
|
|
85
|
+
DataType.Type.INT,
|
|
86
|
+
),
|
|
87
|
+
"MICROSECOND": lambda args: exp.cast(
|
|
88
|
+
exp.TimeToStr(
|
|
89
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
90
|
+
format=MySQL.format_time(exp.Literal.string("%f")),
|
|
91
|
+
),
|
|
92
|
+
DataType.Type.INT,
|
|
93
|
+
),
|
|
94
|
+
"SECOND": lambda args: exp.cast(
|
|
95
|
+
exp.TimeToStr(
|
|
96
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
97
|
+
format=MySQL.format_time(exp.Literal.string("%s")),
|
|
98
|
+
),
|
|
99
|
+
DataType.Type.INT,
|
|
100
|
+
),
|
|
101
|
+
"MINUTE": lambda args: exp.cast(
|
|
102
|
+
exp.TimeToStr(
|
|
103
|
+
this=cast_to_time6(seq_get(args, 0)),
|
|
104
|
+
format=MySQL.format_time(exp.Literal.string("%i")),
|
|
105
|
+
),
|
|
106
|
+
DataType.Type.INT,
|
|
107
|
+
),
|
|
108
|
+
"MONTHNAME": lambda args: exp.TimeToStr(
|
|
109
|
+
this=seq_get(args, 0),
|
|
110
|
+
format=MySQL.format_time(exp.Literal.string("%M")),
|
|
111
|
+
),
|
|
112
|
+
"WEEKDAY": lambda args: exp.paren(exp.DayOfWeek(this=seq_get(args, 0)) + 5, copy=False)
|
|
113
|
+
% 7,
|
|
67
114
|
"UNIX_TIMESTAMP": exp.StrToUnix.from_arg_list,
|
|
68
115
|
"FROM_UNIXTIME": build_formatted_time(exp.UnixToTime, "mysql"),
|
|
116
|
+
"BSON_EXTRACT_BSON": build_json_extract_path(exp.JSONBExtract),
|
|
117
|
+
"BSON_EXTRACT_STRING": build_json_extract_path(
|
|
118
|
+
exp.JSONBExtractScalar, json_type="STRING"
|
|
119
|
+
),
|
|
120
|
+
"BSON_EXTRACT_DOUBLE": build_json_extract_path(
|
|
121
|
+
exp.JSONBExtractScalar, json_type="DOUBLE"
|
|
122
|
+
),
|
|
123
|
+
"BSON_EXTRACT_BIGINT": build_json_extract_path(
|
|
124
|
+
exp.JSONBExtractScalar, json_type="BIGINT"
|
|
125
|
+
),
|
|
126
|
+
"JSON_EXTRACT_JSON": build_json_extract_path(exp.JSONExtract),
|
|
127
|
+
"JSON_EXTRACT_STRING": build_json_extract_path(
|
|
128
|
+
exp.JSONExtractScalar, json_type="STRING"
|
|
129
|
+
),
|
|
130
|
+
"JSON_EXTRACT_DOUBLE": build_json_extract_path(
|
|
131
|
+
exp.JSONExtractScalar, json_type="DOUBLE"
|
|
132
|
+
),
|
|
133
|
+
"JSON_EXTRACT_BIGINT": build_json_extract_path(
|
|
134
|
+
exp.JSONExtractScalar, json_type="BIGINT"
|
|
135
|
+
),
|
|
136
|
+
"JSON_ARRAY_CONTAINS_STRING": lambda args: exp.JSONArrayContains(
|
|
137
|
+
this=seq_get(args, 1),
|
|
138
|
+
expression=seq_get(args, 0),
|
|
139
|
+
json_type="STRING",
|
|
140
|
+
),
|
|
141
|
+
"JSON_ARRAY_CONTAINS_DOUBLE": lambda args: exp.JSONArrayContains(
|
|
142
|
+
this=seq_get(args, 1),
|
|
143
|
+
expression=seq_get(args, 0),
|
|
144
|
+
json_type="DOUBLE",
|
|
145
|
+
),
|
|
146
|
+
"JSON_ARRAY_CONTAINS_JSON": lambda args: exp.JSONArrayContains(
|
|
147
|
+
this=seq_get(args, 1),
|
|
148
|
+
expression=seq_get(args, 0),
|
|
149
|
+
json_type="JSON",
|
|
150
|
+
),
|
|
151
|
+
"DATE": exp.Date.from_arg_list,
|
|
152
|
+
"DAYNAME": lambda args: exp.TimeToStr(
|
|
153
|
+
this=seq_get(args, 0),
|
|
154
|
+
format=MySQL.format_time(exp.Literal.string("%W")),
|
|
155
|
+
),
|
|
156
|
+
"APPROX_COUNT_DISTINCT": exp.Hll.from_arg_list,
|
|
157
|
+
"APPROX_PERCENTILE": lambda args, dialect: exp.ApproxQuantile(
|
|
158
|
+
this=seq_get(args, 0),
|
|
159
|
+
quantile=seq_get(args, 1),
|
|
160
|
+
error_tolerance=seq_get(args, 2),
|
|
161
|
+
),
|
|
162
|
+
"VARIANCE": exp.VariancePop.from_arg_list,
|
|
163
|
+
"INSTR": exp.Contains.from_arg_list,
|
|
164
|
+
"REGEXP_MATCH": lambda args: exp.RegexpExtractAll(
|
|
165
|
+
this=seq_get(args, 0),
|
|
166
|
+
expression=seq_get(args, 1),
|
|
167
|
+
parameters=seq_get(args, 2),
|
|
168
|
+
),
|
|
169
|
+
"REGEXP_SUBSTR": lambda args: exp.RegexpExtract(
|
|
170
|
+
this=seq_get(args, 0),
|
|
171
|
+
expression=seq_get(args, 1),
|
|
172
|
+
position=seq_get(args, 2),
|
|
173
|
+
occurrence=seq_get(args, 3),
|
|
174
|
+
parameters=seq_get(args, 4),
|
|
175
|
+
),
|
|
69
176
|
}
|
|
70
177
|
|
|
71
178
|
CAST_COLUMN_OPERATORS = {TokenType.COLON_GT, TokenType.NCOLON_GT}
|
|
@@ -81,12 +188,29 @@ class SingleStore(MySQL):
|
|
|
81
188
|
this=this,
|
|
82
189
|
to=to,
|
|
83
190
|
),
|
|
191
|
+
TokenType.DCOLON: lambda self, this, path: build_json_extract_path(exp.JSONExtract)(
|
|
192
|
+
[this, exp.Literal.string(path.name)]
|
|
193
|
+
),
|
|
194
|
+
TokenType.DCOLONDOLLAR: lambda self, this, path: build_json_extract_path(
|
|
195
|
+
exp.JSONExtractScalar, json_type="STRING"
|
|
196
|
+
)([this, exp.Literal.string(path.name)]),
|
|
197
|
+
TokenType.DCOLONPERCENT: lambda self, this, path: build_json_extract_path(
|
|
198
|
+
exp.JSONExtractScalar, json_type="DOUBLE"
|
|
199
|
+
)([this, exp.Literal.string(path.name)]),
|
|
84
200
|
}
|
|
85
201
|
|
|
86
202
|
class Generator(MySQL.Generator):
|
|
203
|
+
SUPPORTED_JSON_PATH_PARTS = {
|
|
204
|
+
exp.JSONPathKey,
|
|
205
|
+
exp.JSONPathRoot,
|
|
206
|
+
exp.JSONPathSubscript,
|
|
207
|
+
}
|
|
208
|
+
|
|
87
209
|
TRANSFORMS = {
|
|
88
210
|
**MySQL.Generator.TRANSFORMS,
|
|
89
|
-
exp.TsOrDsToDate: lambda self, e: self.func("TO_DATE", e.this, self.format_time(e))
|
|
211
|
+
exp.TsOrDsToDate: lambda self, e: self.func("TO_DATE", e.this, self.format_time(e))
|
|
212
|
+
if e.args.get("format")
|
|
213
|
+
else self.func("DATE", e.this),
|
|
90
214
|
exp.StrToTime: lambda self, e: self.func("TO_TIMESTAMP", e.this, self.format_time(e)),
|
|
91
215
|
exp.ToChar: lambda self, e: self.func("TO_CHAR", e.this, self.format_time(e)),
|
|
92
216
|
exp.StrToDate: lambda self, e: self.func(
|
|
@@ -107,6 +231,7 @@ class SingleStore(MySQL):
|
|
|
107
231
|
inverse_time_trie=MySQL.INVERSE_TIME_TRIE,
|
|
108
232
|
),
|
|
109
233
|
),
|
|
234
|
+
exp.Date: unsupported_args("zone", "expressions")(rename_func("DATE")),
|
|
110
235
|
exp.Cast: unsupported_args("format", "action", "default")(
|
|
111
236
|
lambda self, e: f"{self.sql(e, 'this')} :> {self.sql(e, 'to')}"
|
|
112
237
|
),
|
|
@@ -138,7 +263,75 @@ class SingleStore(MySQL):
|
|
|
138
263
|
),
|
|
139
264
|
),
|
|
140
265
|
exp.UnixToTimeStr: lambda self, e: f"FROM_UNIXTIME({self.sql(e, 'this')}) :> TEXT",
|
|
266
|
+
exp.JSONExtract: unsupported_args(
|
|
267
|
+
"only_json_types",
|
|
268
|
+
"expressions",
|
|
269
|
+
"variant_extract",
|
|
270
|
+
"json_query",
|
|
271
|
+
"option",
|
|
272
|
+
"quote",
|
|
273
|
+
"on_condition",
|
|
274
|
+
"requires_json",
|
|
275
|
+
)(json_extract_segments("JSON_EXTRACT_JSON")),
|
|
276
|
+
exp.JSONBExtract: json_extract_segments("BSON_EXTRACT_BSON"),
|
|
277
|
+
exp.JSONPathKey: json_path_key_only_name,
|
|
278
|
+
exp.JSONPathSubscript: lambda self, e: self.json_path_part(e.this),
|
|
279
|
+
exp.JSONPathRoot: lambda *_: "",
|
|
280
|
+
exp.DayOfWeekIso: lambda self, e: f"(({self.func('DAYOFWEEK', e.this)} % 7) + 1)",
|
|
281
|
+
exp.DayOfMonth: rename_func("DAY"),
|
|
282
|
+
exp.Hll: rename_func("APPROX_COUNT_DISTINCT"),
|
|
283
|
+
exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
|
|
284
|
+
exp.CountIf: count_if_to_sum,
|
|
285
|
+
exp.LogicalOr: lambda self, e: f"MAX(ABS({self.sql(e, 'this')}))",
|
|
286
|
+
exp.LogicalAnd: lambda self, e: f"MIN(ABS({self.sql(e, 'this')}))",
|
|
287
|
+
exp.ApproxQuantile: unsupported_args("accuracy", "weight")(
|
|
288
|
+
lambda self, e: self.func(
|
|
289
|
+
"APPROX_PERCENTILE",
|
|
290
|
+
e.this,
|
|
291
|
+
e.args.get("quantile"),
|
|
292
|
+
e.args.get("error_tolerance"),
|
|
293
|
+
)
|
|
294
|
+
),
|
|
295
|
+
exp.Variance: rename_func("VAR_SAMP"),
|
|
296
|
+
exp.VariancePop: rename_func("VAR_POP"),
|
|
297
|
+
exp.Xor: bool_xor_sql,
|
|
298
|
+
exp.RegexpLike: lambda self, e: self.binary(e, "RLIKE"),
|
|
299
|
+
exp.Repeat: lambda self, e: self.func(
|
|
300
|
+
"LPAD",
|
|
301
|
+
exp.Literal.string(""),
|
|
302
|
+
exp.Mul(this=self.func("LENGTH", e.this), expression=e.args.get("times")),
|
|
303
|
+
e.this,
|
|
304
|
+
),
|
|
305
|
+
exp.IsAscii: lambda self, e: f"({self.sql(e, 'this')} RLIKE '^[\x00-\x7f]*$')",
|
|
306
|
+
exp.MD5Digest: lambda self, e: self.func("UNHEX", self.func("MD5", e.this)),
|
|
307
|
+
exp.Chr: rename_func("CHAR"),
|
|
308
|
+
exp.Contains: rename_func("INSTR"),
|
|
309
|
+
exp.RegexpExtractAll: unsupported_args("position", "occurrence", "group")(
|
|
310
|
+
lambda self, e: self.func(
|
|
311
|
+
"REGEXP_MATCH",
|
|
312
|
+
e.this,
|
|
313
|
+
e.expression,
|
|
314
|
+
e.args.get("parameters"),
|
|
315
|
+
)
|
|
316
|
+
),
|
|
317
|
+
exp.RegexpExtract: unsupported_args("group")(
|
|
318
|
+
lambda self, e: self.func(
|
|
319
|
+
"REGEXP_SUBSTR",
|
|
320
|
+
e.this,
|
|
321
|
+
e.expression,
|
|
322
|
+
e.args.get("position"),
|
|
323
|
+
e.args.get("occurrence"),
|
|
324
|
+
e.args.get("parameters"),
|
|
325
|
+
)
|
|
326
|
+
),
|
|
327
|
+
exp.StartsWith: lambda self, e: self.func(
|
|
328
|
+
"REGEXP_INSTR", e.this, self.func("CONCAT", exp.Literal.string("^"), e.expression)
|
|
329
|
+
),
|
|
330
|
+
exp.FromBase: lambda self, e: self.func(
|
|
331
|
+
"CONV", e.this, e.expression, exp.Literal.number(10)
|
|
332
|
+
),
|
|
141
333
|
}
|
|
334
|
+
TRANSFORMS.pop(exp.JSONExtractScalar)
|
|
142
335
|
|
|
143
336
|
# https://docs.singlestore.com/cloud/reference/sql-reference/restricted-keywords/list-of-restricted-keywords/
|
|
144
337
|
RESERVED_KEYWORDS = {
|
|
@@ -1193,3 +1386,49 @@ class SingleStore(MySQL):
|
|
|
1193
1386
|
"zerofill",
|
|
1194
1387
|
"zone",
|
|
1195
1388
|
}
|
|
1389
|
+
|
|
1390
|
+
def jsonextractscalar_sql(self, expression: exp.JSONExtractScalar) -> str:
|
|
1391
|
+
json_type = expression.args.get("json_type")
|
|
1392
|
+
func_name = "JSON_EXTRACT_JSON" if json_type is None else f"JSON_EXTRACT_{json_type}"
|
|
1393
|
+
return json_extract_segments(func_name)(self, expression)
|
|
1394
|
+
|
|
1395
|
+
def jsonbextractscalar_sql(self, expression: exp.JSONBExtractScalar) -> str:
|
|
1396
|
+
json_type = expression.args.get("json_type")
|
|
1397
|
+
func_name = "BSON_EXTRACT_BSON" if json_type is None else f"BSON_EXTRACT_{json_type}"
|
|
1398
|
+
return json_extract_segments(func_name)(self, expression)
|
|
1399
|
+
|
|
1400
|
+
def jsonextractarray_sql(self, expression: exp.JSONExtractArray) -> str:
|
|
1401
|
+
self.unsupported("Arrays are not supported in SingleStore")
|
|
1402
|
+
return self.function_fallback_sql(expression)
|
|
1403
|
+
|
|
1404
|
+
@unsupported_args("on_condition")
|
|
1405
|
+
def jsonvalue_sql(self, expression: exp.JSONValue) -> str:
|
|
1406
|
+
res: exp.Expression = exp.JSONExtractScalar(
|
|
1407
|
+
this=expression.this,
|
|
1408
|
+
expression=expression.args.get("path"),
|
|
1409
|
+
json_type="STRING",
|
|
1410
|
+
)
|
|
1411
|
+
|
|
1412
|
+
returning = expression.args.get("returning")
|
|
1413
|
+
if returning is not None:
|
|
1414
|
+
res = exp.Cast(this=res, to=returning)
|
|
1415
|
+
|
|
1416
|
+
return self.sql(res)
|
|
1417
|
+
|
|
1418
|
+
def all_sql(self, expression: exp.All) -> str:
|
|
1419
|
+
self.unsupported("ALL subquery predicate is not supported in SingleStore")
|
|
1420
|
+
return super().all_sql(expression)
|
|
1421
|
+
|
|
1422
|
+
def jsonarraycontains_sql(self, expression: exp.JSONArrayContains) -> str:
|
|
1423
|
+
json_type = expression.text("json_type").upper()
|
|
1424
|
+
|
|
1425
|
+
if json_type:
|
|
1426
|
+
return self.func(
|
|
1427
|
+
f"JSON_ARRAY_CONTAINS_{json_type}", expression.expression, expression.this
|
|
1428
|
+
)
|
|
1429
|
+
|
|
1430
|
+
return self.func(
|
|
1431
|
+
"JSON_ARRAY_CONTAINS_JSON",
|
|
1432
|
+
expression.expression,
|
|
1433
|
+
self.func("TO_JSON", expression.this),
|
|
1434
|
+
)
|
sqlglot/dialects/spark.py
CHANGED
|
@@ -9,6 +9,7 @@ from sqlglot.dialects.dialect import (
|
|
|
9
9
|
unit_to_var,
|
|
10
10
|
timestampdiff_sql,
|
|
11
11
|
build_date_delta,
|
|
12
|
+
date_delta_to_binary_interval_op,
|
|
12
13
|
groupconcat_sql,
|
|
13
14
|
)
|
|
14
15
|
from sqlglot.dialects.hive import _build_with_ignore_nulls
|
|
@@ -195,13 +196,18 @@ class Spark(Spark2):
|
|
|
195
196
|
]
|
|
196
197
|
),
|
|
197
198
|
exp.DateFromUnixDate: rename_func("DATE_FROM_UNIX_DATE"),
|
|
199
|
+
exp.DatetimeAdd: date_delta_to_binary_interval_op(cast=False),
|
|
200
|
+
exp.DatetimeSub: date_delta_to_binary_interval_op(cast=False),
|
|
198
201
|
exp.GroupConcat: _groupconcat_sql,
|
|
199
202
|
exp.EndsWith: rename_func("ENDSWITH"),
|
|
200
203
|
exp.PartitionedByProperty: lambda self,
|
|
201
204
|
e: f"PARTITIONED BY {self.wrap(self.expressions(sqls=[_normalize_partition(e) for e in e.this.expressions], skip_first=True))}",
|
|
202
205
|
exp.StartsWith: rename_func("STARTSWITH"),
|
|
206
|
+
exp.TimeAdd: date_delta_to_binary_interval_op(cast=False),
|
|
207
|
+
exp.TimeSub: date_delta_to_binary_interval_op(cast=False),
|
|
203
208
|
exp.TsOrDsAdd: _dateadd_sql,
|
|
204
209
|
exp.TimestampAdd: _dateadd_sql,
|
|
210
|
+
exp.TimestampSub: date_delta_to_binary_interval_op(cast=False),
|
|
205
211
|
exp.DatetimeDiff: timestampdiff_sql,
|
|
206
212
|
exp.TimestampDiff: timestampdiff_sql,
|
|
207
213
|
exp.TryCast: lambda self, e: (
|
sqlglot/dialects/trino.py
CHANGED
|
@@ -21,6 +21,7 @@ class Trino(Presto):
|
|
|
21
21
|
**Presto.Parser.FUNCTION_PARSERS,
|
|
22
22
|
"TRIM": lambda self: self._parse_trim(),
|
|
23
23
|
"JSON_QUERY": lambda self: self._parse_json_query(),
|
|
24
|
+
"JSON_VALUE": lambda self: self._parse_json_value(),
|
|
24
25
|
"LISTAGG": lambda self: self._parse_string_agg(),
|
|
25
26
|
}
|
|
26
27
|
|
sqlglot/dialects/tsql.py
CHANGED
|
@@ -658,6 +658,8 @@ class TSQL(Dialect):
|
|
|
658
658
|
else self.expression(exp.ScopeResolution, this=this, expression=to),
|
|
659
659
|
}
|
|
660
660
|
|
|
661
|
+
SET_OP_MODIFIERS = {"offset"}
|
|
662
|
+
|
|
661
663
|
def _parse_alter_table_set(self) -> exp.AlterSet:
|
|
662
664
|
return self._parse_wrapped(super()._parse_alter_table_set)
|
|
663
665
|
|
sqlglot/expressions.py
CHANGED
|
@@ -2790,6 +2790,11 @@ class BackupProperty(Property):
|
|
|
2790
2790
|
arg_types = {"this": True}
|
|
2791
2791
|
|
|
2792
2792
|
|
|
2793
|
+
# https://doris.apache.org/docs/sql-manual/sql-statements/table-and-view/async-materialized-view/CREATE-ASYNC-MATERIALIZED-VIEW/
|
|
2794
|
+
class BuildProperty(Property):
|
|
2795
|
+
arg_types = {"this": True}
|
|
2796
|
+
|
|
2797
|
+
|
|
2793
2798
|
class BlockCompressionProperty(Property):
|
|
2794
2799
|
arg_types = {
|
|
2795
2800
|
"autotemp": False,
|
|
@@ -3031,6 +3036,27 @@ class PartitionByRangePropertyDynamic(Expression):
|
|
|
3031
3036
|
arg_types = {"this": False, "start": True, "end": True, "every": True}
|
|
3032
3037
|
|
|
3033
3038
|
|
|
3039
|
+
# https://doris.apache.org/docs/table-design/data-partitioning/manual-partitioning
|
|
3040
|
+
class PartitionByListProperty(Property):
|
|
3041
|
+
arg_types = {"partition_expressions": True, "create_expressions": True}
|
|
3042
|
+
|
|
3043
|
+
|
|
3044
|
+
# https://doris.apache.org/docs/table-design/data-partitioning/manual-partitioning
|
|
3045
|
+
class PartitionList(Expression):
|
|
3046
|
+
arg_types = {"this": True, "expressions": True}
|
|
3047
|
+
|
|
3048
|
+
|
|
3049
|
+
# https://doris.apache.org/docs/sql-manual/sql-statements/table-and-view/async-materialized-view/CREATE-ASYNC-MATERIALIZED-VIEW
|
|
3050
|
+
class RefreshTriggerProperty(Property):
|
|
3051
|
+
arg_types = {
|
|
3052
|
+
"method": True,
|
|
3053
|
+
"kind": False,
|
|
3054
|
+
"every": False,
|
|
3055
|
+
"unit": False,
|
|
3056
|
+
"starts": False,
|
|
3057
|
+
}
|
|
3058
|
+
|
|
3059
|
+
|
|
3034
3060
|
# https://docs.starrocks.io/docs/sql-reference/sql-statements/table_bucket_part_index/CREATE_TABLE/
|
|
3035
3061
|
class UniqueKeyProperty(Property):
|
|
3036
3062
|
arg_types = {"expressions": True}
|
|
@@ -4304,7 +4330,14 @@ class Select(Query):
|
|
|
4304
4330
|
|
|
4305
4331
|
@property
|
|
4306
4332
|
def named_selects(self) -> t.List[str]:
|
|
4307
|
-
|
|
4333
|
+
selects = []
|
|
4334
|
+
|
|
4335
|
+
for e in self.expressions:
|
|
4336
|
+
if e.alias_or_name:
|
|
4337
|
+
selects.append(e.output_name)
|
|
4338
|
+
elif isinstance(e, Aliases):
|
|
4339
|
+
selects.extend([a.name for a in e.aliases])
|
|
4340
|
+
return selects
|
|
4308
4341
|
|
|
4309
4342
|
@property
|
|
4310
4343
|
def is_star(self) -> bool:
|
|
@@ -4876,6 +4909,7 @@ class Alter(Expression):
|
|
|
4876
4909
|
"options": False,
|
|
4877
4910
|
"cluster": False,
|
|
4878
4911
|
"not_valid": False,
|
|
4912
|
+
"check": False,
|
|
4879
4913
|
}
|
|
4880
4914
|
|
|
4881
4915
|
@property
|
|
@@ -5301,7 +5335,7 @@ class TimeUnit(Expression):
|
|
|
5301
5335
|
|
|
5302
5336
|
def __init__(self, **args):
|
|
5303
5337
|
unit = args.get("unit")
|
|
5304
|
-
if
|
|
5338
|
+
if type(unit) in self.VAR_LIKE:
|
|
5305
5339
|
args["unit"] = Var(
|
|
5306
5340
|
this=(self.UNABBREVIATED_UNIT_NAME.get(unit.name) or unit.name).upper()
|
|
5307
5341
|
)
|
|
@@ -5429,6 +5463,11 @@ class ByteLength(Func):
|
|
|
5429
5463
|
pass
|
|
5430
5464
|
|
|
5431
5465
|
|
|
5466
|
+
# https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#bool_for_json
|
|
5467
|
+
class JSONBool(Func):
|
|
5468
|
+
pass
|
|
5469
|
+
|
|
5470
|
+
|
|
5432
5471
|
class ArrayRemove(Func):
|
|
5433
5472
|
arg_types = {"this": True, "expression": True}
|
|
5434
5473
|
|
|
@@ -5455,15 +5494,42 @@ class ApproxTopK(AggFunc):
|
|
|
5455
5494
|
arg_types = {"this": True, "expression": False, "counters": False}
|
|
5456
5495
|
|
|
5457
5496
|
|
|
5497
|
+
class ApproxTopSum(AggFunc):
|
|
5498
|
+
arg_types = {"this": True, "expression": True, "count": True}
|
|
5499
|
+
|
|
5500
|
+
|
|
5501
|
+
class ApproxQuantiles(AggFunc):
|
|
5502
|
+
arg_types = {"this": True, "expression": False}
|
|
5503
|
+
|
|
5504
|
+
|
|
5505
|
+
class FarmFingerprint(Func):
|
|
5506
|
+
arg_types = {"expressions": True}
|
|
5507
|
+
is_var_len_args = True
|
|
5508
|
+
_sql_names = ["FARM_FINGERPRINT", "FARMFINGERPRINT64"]
|
|
5509
|
+
|
|
5510
|
+
|
|
5458
5511
|
class Flatten(Func):
|
|
5459
5512
|
pass
|
|
5460
5513
|
|
|
5461
5514
|
|
|
5515
|
+
class Float64(Func):
|
|
5516
|
+
arg_types = {"this": True, "expression": False}
|
|
5517
|
+
|
|
5518
|
+
|
|
5462
5519
|
# https://spark.apache.org/docs/latest/api/sql/index.html#transform
|
|
5463
5520
|
class Transform(Func):
|
|
5464
5521
|
arg_types = {"this": True, "expression": True}
|
|
5465
5522
|
|
|
5466
5523
|
|
|
5524
|
+
class Translate(Func):
|
|
5525
|
+
arg_types = {"this": True, "from": True, "to": True}
|
|
5526
|
+
|
|
5527
|
+
|
|
5528
|
+
class Grouping(AggFunc):
|
|
5529
|
+
arg_types = {"expressions": True}
|
|
5530
|
+
is_var_len_args = True
|
|
5531
|
+
|
|
5532
|
+
|
|
5467
5533
|
class Anonymous(Func):
|
|
5468
5534
|
arg_types = {"this": True, "expressions": False}
|
|
5469
5535
|
is_var_len_args = True
|
|
@@ -5531,7 +5597,16 @@ class Pad(Func):
|
|
|
5531
5597
|
# https://docs.snowflake.com/en/sql-reference/functions/to_char
|
|
5532
5598
|
# https://docs.oracle.com/en/database/oracle/oracle-database/23/sqlrf/TO_CHAR-number.html
|
|
5533
5599
|
class ToChar(Func):
|
|
5534
|
-
arg_types = {
|
|
5600
|
+
arg_types = {
|
|
5601
|
+
"this": True,
|
|
5602
|
+
"format": False,
|
|
5603
|
+
"nlsparam": False,
|
|
5604
|
+
"is_numeric": False,
|
|
5605
|
+
}
|
|
5606
|
+
|
|
5607
|
+
|
|
5608
|
+
class ToCodePoints(Func):
|
|
5609
|
+
pass
|
|
5535
5610
|
|
|
5536
5611
|
|
|
5537
5612
|
# https://docs.snowflake.com/en/sql-reference/functions/to_decimal
|
|
@@ -5554,6 +5629,10 @@ class ToDouble(Func):
|
|
|
5554
5629
|
}
|
|
5555
5630
|
|
|
5556
5631
|
|
|
5632
|
+
class CodePointsToBytes(Func):
|
|
5633
|
+
pass
|
|
5634
|
+
|
|
5635
|
+
|
|
5557
5636
|
class Columns(Func):
|
|
5558
5637
|
arg_types = {"this": True, "unpack": False}
|
|
5559
5638
|
|
|
@@ -6179,12 +6258,16 @@ class Floor(Func):
|
|
|
6179
6258
|
arg_types = {"this": True, "decimals": False, "to": False}
|
|
6180
6259
|
|
|
6181
6260
|
|
|
6261
|
+
class FromBase32(Func):
|
|
6262
|
+
pass
|
|
6263
|
+
|
|
6264
|
+
|
|
6182
6265
|
class FromBase64(Func):
|
|
6183
6266
|
pass
|
|
6184
6267
|
|
|
6185
6268
|
|
|
6186
|
-
class
|
|
6187
|
-
|
|
6269
|
+
class ToBase32(Func):
|
|
6270
|
+
pass
|
|
6188
6271
|
|
|
6189
6272
|
|
|
6190
6273
|
class ToBase64(Func):
|
|
@@ -6508,7 +6591,13 @@ class JSONExtractArray(Func):
|
|
|
6508
6591
|
|
|
6509
6592
|
|
|
6510
6593
|
class JSONExtractScalar(Binary, Func):
|
|
6511
|
-
arg_types = {
|
|
6594
|
+
arg_types = {
|
|
6595
|
+
"this": True,
|
|
6596
|
+
"expression": True,
|
|
6597
|
+
"only_json_types": False,
|
|
6598
|
+
"expressions": False,
|
|
6599
|
+
"json_type": False,
|
|
6600
|
+
}
|
|
6512
6601
|
_sql_names = ["JSON_EXTRACT_SCALAR"]
|
|
6513
6602
|
is_var_len_args = True
|
|
6514
6603
|
|
|
@@ -6522,6 +6611,7 @@ class JSONBExtract(Binary, Func):
|
|
|
6522
6611
|
|
|
6523
6612
|
|
|
6524
6613
|
class JSONBExtractScalar(Binary, Func):
|
|
6614
|
+
arg_types = {"this": True, "expression": True, "json_type": False}
|
|
6525
6615
|
_sql_names = ["JSONB_EXTRACT_SCALAR"]
|
|
6526
6616
|
|
|
6527
6617
|
|
|
@@ -6532,9 +6622,18 @@ class JSONFormat(Func):
|
|
|
6532
6622
|
|
|
6533
6623
|
# https://dev.mysql.com/doc/refman/8.0/en/json-search-functions.html#operator_member-of
|
|
6534
6624
|
class JSONArrayContains(Binary, Predicate, Func):
|
|
6625
|
+
arg_types = {"this": True, "expression": True, "json_type": False}
|
|
6535
6626
|
_sql_names = ["JSON_ARRAY_CONTAINS"]
|
|
6536
6627
|
|
|
6537
6628
|
|
|
6629
|
+
class ParseBignumeric(Func):
|
|
6630
|
+
pass
|
|
6631
|
+
|
|
6632
|
+
|
|
6633
|
+
class ParseNumeric(Func):
|
|
6634
|
+
pass
|
|
6635
|
+
|
|
6636
|
+
|
|
6538
6637
|
class ParseJSON(Func):
|
|
6539
6638
|
# BigQuery, Snowflake have PARSE_JSON, Presto has JSON_PARSE
|
|
6540
6639
|
# Snowflake also has TRY_PARSE_JSON, which is represented using `safe`
|
|
@@ -6705,6 +6804,29 @@ class Predict(Func):
|
|
|
6705
6804
|
arg_types = {"this": True, "expression": True, "params_struct": False}
|
|
6706
6805
|
|
|
6707
6806
|
|
|
6807
|
+
# https://cloud.google.com/bigquery/docs/reference/standard-sql/bigqueryml-syntax-feature-time
|
|
6808
|
+
class FeaturesAtTime(Func):
|
|
6809
|
+
arg_types = {"this": True, "time": False, "num_rows": False, "ignore_feature_nulls": False}
|
|
6810
|
+
|
|
6811
|
+
|
|
6812
|
+
# https://cloud.google.com/bigquery/docs/reference/standard-sql/bigqueryml-syntax-generate-embedding
|
|
6813
|
+
class GenerateEmbedding(Func):
|
|
6814
|
+
arg_types = {"this": True, "expression": True, "params_struct": False}
|
|
6815
|
+
|
|
6816
|
+
|
|
6817
|
+
# https://cloud.google.com/bigquery/docs/reference/standard-sql/search_functions#vector_search
|
|
6818
|
+
class VectorSearch(Func):
|
|
6819
|
+
arg_types = {
|
|
6820
|
+
"this": True,
|
|
6821
|
+
"column_to_search": True,
|
|
6822
|
+
"query_table": True,
|
|
6823
|
+
"query_column_to_search": False,
|
|
6824
|
+
"top_k": False,
|
|
6825
|
+
"distance_type": False,
|
|
6826
|
+
"options": False,
|
|
6827
|
+
}
|
|
6828
|
+
|
|
6829
|
+
|
|
6708
6830
|
class Pow(Binary, Func):
|
|
6709
6831
|
_sql_names = ["POWER", "POW"]
|
|
6710
6832
|
|
|
@@ -6722,7 +6844,13 @@ class Quantile(AggFunc):
|
|
|
6722
6844
|
|
|
6723
6845
|
|
|
6724
6846
|
class ApproxQuantile(Quantile):
|
|
6725
|
-
arg_types = {
|
|
6847
|
+
arg_types = {
|
|
6848
|
+
"this": True,
|
|
6849
|
+
"quantile": True,
|
|
6850
|
+
"accuracy": False,
|
|
6851
|
+
"weight": False,
|
|
6852
|
+
"error_tolerance": False,
|
|
6853
|
+
}
|
|
6726
6854
|
|
|
6727
6855
|
|
|
6728
6856
|
class Quarter(Func):
|
|
@@ -6824,6 +6952,10 @@ class SafeDivide(Func):
|
|
|
6824
6952
|
arg_types = {"this": True, "expression": True}
|
|
6825
6953
|
|
|
6826
6954
|
|
|
6955
|
+
class SafeConvertBytesToString(Func):
|
|
6956
|
+
pass
|
|
6957
|
+
|
|
6958
|
+
|
|
6827
6959
|
class SHA(Func):
|
|
6828
6960
|
_sql_names = ["SHA", "SHA1"]
|
|
6829
6961
|
|
|
@@ -6841,6 +6973,10 @@ class SortArray(Func):
|
|
|
6841
6973
|
arg_types = {"this": True, "asc": False}
|
|
6842
6974
|
|
|
6843
6975
|
|
|
6976
|
+
class Soundex(Func):
|
|
6977
|
+
pass
|
|
6978
|
+
|
|
6979
|
+
|
|
6844
6980
|
class Split(Func):
|
|
6845
6981
|
arg_types = {"this": True, "expression": True, "limit": False}
|
|
6846
6982
|
|