sqlglot 27.13.2__py3-none-any.whl → 27.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlglot/_version.py +2 -2
- sqlglot/dialects/bigquery.py +23 -18
- sqlglot/dialects/clickhouse.py +1 -0
- sqlglot/dialects/dialect.py +1 -1
- sqlglot/dialects/duckdb.py +15 -4
- sqlglot/dialects/fabric.py +1 -1
- sqlglot/dialects/hive.py +1 -0
- sqlglot/dialects/mysql.py +41 -9
- sqlglot/dialects/postgres.py +19 -0
- sqlglot/dialects/redshift.py +1 -0
- sqlglot/dialects/singlestore.py +129 -1
- sqlglot/dialects/snowflake.py +88 -3
- sqlglot/dialects/sqlite.py +1 -0
- sqlglot/dialects/tsql.py +10 -0
- sqlglot/expressions.py +68 -4
- sqlglot/generator.py +27 -13
- sqlglot/optimizer/annotate_types.py +35 -5
- sqlglot/optimizer/qualify_columns.py +6 -2
- sqlglot/parser.py +51 -15
- sqlglot/serde.py +108 -50
- sqlglot/tokens.py +4 -0
- {sqlglot-27.13.2.dist-info → sqlglot-27.15.0.dist-info}/METADATA +1 -1
- {sqlglot-27.13.2.dist-info → sqlglot-27.15.0.dist-info}/RECORD +26 -26
- {sqlglot-27.13.2.dist-info → sqlglot-27.15.0.dist-info}/WHEEL +0 -0
- {sqlglot-27.13.2.dist-info → sqlglot-27.15.0.dist-info}/licenses/LICENSE +0 -0
- {sqlglot-27.13.2.dist-info → sqlglot-27.15.0.dist-info}/top_level.txt +0 -0
sqlglot/_version.py
CHANGED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '27.
|
|
32
|
-
__version_tuple__ = version_tuple = (27,
|
|
31
|
+
__version__ = version = '27.15.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (27, 15, 0)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
sqlglot/dialects/bigquery.py
CHANGED
|
@@ -859,10 +859,15 @@ class BigQuery(Dialect):
|
|
|
859
859
|
exp.JSONArray, expressions=self._parse_csv(self._parse_bitwise)
|
|
860
860
|
),
|
|
861
861
|
"MAKE_INTERVAL": lambda self: self._parse_make_interval(),
|
|
862
|
-
"PREDICT": lambda self: self.
|
|
862
|
+
"PREDICT": lambda self: self._parse_ml(exp.Predict),
|
|
863
|
+
"TRANSLATE": lambda self: self._parse_translate(),
|
|
863
864
|
"FEATURES_AT_TIME": lambda self: self._parse_features_at_time(),
|
|
864
|
-
"GENERATE_EMBEDDING": lambda self: self.
|
|
865
|
+
"GENERATE_EMBEDDING": lambda self: self._parse_ml(exp.GenerateEmbedding),
|
|
866
|
+
"GENERATE_TEXT_EMBEDDING": lambda self: self._parse_ml(
|
|
867
|
+
exp.GenerateEmbedding, is_text=True
|
|
868
|
+
),
|
|
865
869
|
"VECTOR_SEARCH": lambda self: self._parse_vector_search(),
|
|
870
|
+
"FORECAST": lambda self: self._parse_ml(exp.MLForecast),
|
|
866
871
|
}
|
|
867
872
|
FUNCTION_PARSERS.pop("TRIM")
|
|
868
873
|
|
|
@@ -892,8 +897,6 @@ class BigQuery(Dialect):
|
|
|
892
897
|
RANGE_PARSERS = parser.Parser.RANGE_PARSERS.copy()
|
|
893
898
|
RANGE_PARSERS.pop(TokenType.OVERLAPS)
|
|
894
899
|
|
|
895
|
-
NULL_TOKENS = {TokenType.NULL, TokenType.UNKNOWN}
|
|
896
|
-
|
|
897
900
|
DASHED_TABLE_PART_FOLLOW_TOKENS = {TokenType.DOT, TokenType.L_PAREN, TokenType.R_PAREN}
|
|
898
901
|
|
|
899
902
|
STATEMENT_PARSERS = {
|
|
@@ -1146,34 +1149,36 @@ class BigQuery(Dialect):
|
|
|
1146
1149
|
|
|
1147
1150
|
return expr
|
|
1148
1151
|
|
|
1149
|
-
def
|
|
1152
|
+
def _parse_ml(self, expr_type: t.Type[E], **kwargs) -> E:
|
|
1150
1153
|
self._match_text_seq("MODEL")
|
|
1151
1154
|
this = self._parse_table()
|
|
1152
1155
|
|
|
1153
1156
|
self._match(TokenType.COMMA)
|
|
1154
1157
|
self._match_text_seq("TABLE")
|
|
1155
1158
|
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
expression=self._parse_table(),
|
|
1160
|
-
params_struct=self._match(TokenType.COMMA) and self._parse_bitwise(),
|
|
1159
|
+
# Certain functions like ML.FORECAST require a STRUCT argument but not a TABLE/SELECT one
|
|
1160
|
+
expression = (
|
|
1161
|
+
self._parse_table() if not self._match(TokenType.STRUCT, advance=False) else None
|
|
1161
1162
|
)
|
|
1162
1163
|
|
|
1163
|
-
def _parse_generate_embedding(self) -> exp.GenerateEmbedding:
|
|
1164
|
-
self._match_text_seq("MODEL")
|
|
1165
|
-
this = self._parse_table()
|
|
1166
|
-
|
|
1167
1164
|
self._match(TokenType.COMMA)
|
|
1168
|
-
self._match_text_seq("TABLE")
|
|
1169
1165
|
|
|
1170
1166
|
return self.expression(
|
|
1171
|
-
|
|
1167
|
+
expr_type,
|
|
1172
1168
|
this=this,
|
|
1173
|
-
expression=
|
|
1174
|
-
params_struct=self.
|
|
1169
|
+
expression=expression,
|
|
1170
|
+
params_struct=self._parse_bitwise(),
|
|
1171
|
+
**kwargs,
|
|
1175
1172
|
)
|
|
1176
1173
|
|
|
1174
|
+
def _parse_translate(self) -> exp.Translate | exp.MLTranslate:
|
|
1175
|
+
# Check if this is ML.TRANSLATE by looking at previous tokens
|
|
1176
|
+
token = seq_get(self._tokens, self._index - 4)
|
|
1177
|
+
if token and token.text.upper() == "ML":
|
|
1178
|
+
return self._parse_ml(exp.MLTranslate)
|
|
1179
|
+
|
|
1180
|
+
return exp.Translate.from_arg_list(self._parse_function_args())
|
|
1181
|
+
|
|
1177
1182
|
def _parse_features_at_time(self) -> exp.FeaturesAtTime:
|
|
1178
1183
|
self._match(TokenType.TABLE)
|
|
1179
1184
|
this = self._parse_table()
|
sqlglot/dialects/clickhouse.py
CHANGED
|
@@ -189,6 +189,7 @@ def _map_sql(self: ClickHouse.Generator, expression: exp.Map | exp.VarMap) -> st
|
|
|
189
189
|
|
|
190
190
|
|
|
191
191
|
class ClickHouse(Dialect):
|
|
192
|
+
INDEX_OFFSET = 1
|
|
192
193
|
NORMALIZE_FUNCTIONS: bool | str = False
|
|
193
194
|
NULL_ORDERING = "nulls_are_last"
|
|
194
195
|
SUPPORTS_USER_DEFINED_TYPES = False
|
sqlglot/dialects/dialect.py
CHANGED
|
@@ -674,6 +674,7 @@ class Dialect(metaclass=_Dialect):
|
|
|
674
674
|
exp.DataType.Type.BOOLEAN: {
|
|
675
675
|
exp.Between,
|
|
676
676
|
exp.Boolean,
|
|
677
|
+
exp.Contains,
|
|
677
678
|
exp.EndsWith,
|
|
678
679
|
exp.In,
|
|
679
680
|
exp.LogicalAnd,
|
|
@@ -818,7 +819,6 @@ class Dialect(metaclass=_Dialect):
|
|
|
818
819
|
exp.Cast: lambda self, e: self._annotate_with_type(e, e.args["to"]),
|
|
819
820
|
exp.Case: lambda self, e: self._annotate_by_args(e, "default", "ifs"),
|
|
820
821
|
exp.Coalesce: lambda self, e: self._annotate_by_args(e, "this", "expressions"),
|
|
821
|
-
exp.Contains: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BOOLEAN),
|
|
822
822
|
exp.Count: lambda self, e: self._annotate_with_type(
|
|
823
823
|
e, exp.DataType.Type.BIGINT if e.args.get("big_int") else exp.DataType.Type.INT
|
|
824
824
|
),
|
sqlglot/dialects/duckdb.py
CHANGED
|
@@ -365,6 +365,9 @@ class DuckDB(Dialect):
|
|
|
365
365
|
"ANY_VALUE": lambda args: exp.IgnoreNulls(this=exp.AnyValue.from_arg_list(args)),
|
|
366
366
|
"ARRAY_REVERSE_SORT": _build_sort_array_desc,
|
|
367
367
|
"ARRAY_SORT": exp.SortArray.from_arg_list,
|
|
368
|
+
"BIT_AND": exp.BitwiseAndAgg.from_arg_list,
|
|
369
|
+
"BIT_OR": exp.BitwiseOrAgg.from_arg_list,
|
|
370
|
+
"BIT_XOR": exp.BitwiseXorAgg.from_arg_list,
|
|
368
371
|
"DATEDIFF": _build_date_diff,
|
|
369
372
|
"DATE_DIFF": _build_date_diff,
|
|
370
373
|
"DATE_TRUNC": date_trunc_to_time,
|
|
@@ -649,7 +652,10 @@ class DuckDB(Dialect):
|
|
|
649
652
|
exp.ArrayUniqueAgg: lambda self, e: self.func(
|
|
650
653
|
"LIST", exp.Distinct(expressions=[e.this])
|
|
651
654
|
),
|
|
655
|
+
exp.BitwiseAndAgg: rename_func("BIT_AND"),
|
|
656
|
+
exp.BitwiseOrAgg: rename_func("BIT_OR"),
|
|
652
657
|
exp.BitwiseXor: rename_func("XOR"),
|
|
658
|
+
exp.BitwiseXorAgg: rename_func("BIT_XOR"),
|
|
653
659
|
exp.CommentColumnConstraint: no_comment_column_constraint_sql,
|
|
654
660
|
exp.CosineDistance: rename_func("LIST_COSINE_DISTANCE"),
|
|
655
661
|
exp.CurrentDate: lambda *_: "CURRENT_DATE",
|
|
@@ -995,13 +1001,18 @@ class DuckDB(Dialect):
|
|
|
995
1001
|
|
|
996
1002
|
def join_sql(self, expression: exp.Join) -> str:
|
|
997
1003
|
if (
|
|
998
|
-
expression.
|
|
1004
|
+
not expression.args.get("using")
|
|
999
1005
|
and not expression.args.get("on")
|
|
1000
|
-
and
|
|
1006
|
+
and not expression.method
|
|
1007
|
+
and (expression.kind in ("", "INNER", "OUTER"))
|
|
1001
1008
|
):
|
|
1002
|
-
# Some dialects support `LEFT JOIN UNNEST(...)` without an explicit ON clause
|
|
1009
|
+
# Some dialects support `LEFT/INNER JOIN UNNEST(...)` without an explicit ON clause
|
|
1003
1010
|
# DuckDB doesn't, but we can just add a dummy ON clause that is always true
|
|
1004
|
-
|
|
1011
|
+
if isinstance(expression.this, exp.Unnest):
|
|
1012
|
+
return super().join_sql(expression.on(exp.true()))
|
|
1013
|
+
|
|
1014
|
+
expression.args.pop("side", None)
|
|
1015
|
+
expression.args.pop("kind", None)
|
|
1005
1016
|
|
|
1006
1017
|
return super().join_sql(expression)
|
|
1007
1018
|
|
sqlglot/dialects/fabric.py
CHANGED
|
@@ -125,7 +125,7 @@ class Fabric(TSQL):
|
|
|
125
125
|
exp.DataType.Type.TIMESTAMPTZ: "DATETIME2",
|
|
126
126
|
exp.DataType.Type.TINYINT: "SMALLINT",
|
|
127
127
|
exp.DataType.Type.UTINYINT: "SMALLINT",
|
|
128
|
-
exp.DataType.Type.UUID: "
|
|
128
|
+
exp.DataType.Type.UUID: "UNIQUEIDENTIFIER",
|
|
129
129
|
exp.DataType.Type.XML: "VARCHAR",
|
|
130
130
|
}
|
|
131
131
|
|
sqlglot/dialects/hive.py
CHANGED
sqlglot/dialects/mysql.py
CHANGED
|
@@ -191,10 +191,8 @@ class MySQL(Dialect):
|
|
|
191
191
|
|
|
192
192
|
KEYWORDS = {
|
|
193
193
|
**tokens.Tokenizer.KEYWORDS,
|
|
194
|
-
"CHARSET": TokenType.CHARACTER_SET,
|
|
195
|
-
# The DESCRIBE and EXPLAIN statements are synonyms.
|
|
196
|
-
# https://dev.mysql.com/doc/refman/8.4/en/explain.html
|
|
197
194
|
"BLOB": TokenType.BLOB,
|
|
195
|
+
"CHARSET": TokenType.CHARACTER_SET,
|
|
198
196
|
"DISTINCTROW": TokenType.DISTINCT,
|
|
199
197
|
"EXPLAIN": TokenType.DESCRIBE,
|
|
200
198
|
"FORCE": TokenType.FORCE,
|
|
@@ -204,17 +202,19 @@ class MySQL(Dialect):
|
|
|
204
202
|
"LONGBLOB": TokenType.LONGBLOB,
|
|
205
203
|
"LONGTEXT": TokenType.LONGTEXT,
|
|
206
204
|
"MEDIUMBLOB": TokenType.MEDIUMBLOB,
|
|
207
|
-
"TINYBLOB": TokenType.TINYBLOB,
|
|
208
|
-
"TINYTEXT": TokenType.TINYTEXT,
|
|
209
|
-
"MEDIUMTEXT": TokenType.MEDIUMTEXT,
|
|
210
205
|
"MEDIUMINT": TokenType.MEDIUMINT,
|
|
206
|
+
"MEDIUMTEXT": TokenType.MEDIUMTEXT,
|
|
211
207
|
"MEMBER OF": TokenType.MEMBER_OF,
|
|
208
|
+
"MOD": TokenType.MOD,
|
|
212
209
|
"SEPARATOR": TokenType.SEPARATOR,
|
|
213
210
|
"SERIAL": TokenType.SERIAL,
|
|
214
|
-
"START": TokenType.BEGIN,
|
|
215
211
|
"SIGNED": TokenType.BIGINT,
|
|
216
212
|
"SIGNED INTEGER": TokenType.BIGINT,
|
|
213
|
+
"SOUNDS LIKE": TokenType.SOUNDS_LIKE,
|
|
214
|
+
"START": TokenType.BEGIN,
|
|
217
215
|
"TIMESTAMP": TokenType.TIMESTAMPTZ,
|
|
216
|
+
"TINYBLOB": TokenType.TINYBLOB,
|
|
217
|
+
"TINYTEXT": TokenType.TINYTEXT,
|
|
218
218
|
"UNLOCK TABLES": TokenType.COMMAND,
|
|
219
219
|
"UNSIGNED": TokenType.UBIGINT,
|
|
220
220
|
"UNSIGNED INTEGER": TokenType.UBIGINT,
|
|
@@ -271,6 +271,7 @@ class MySQL(Dialect):
|
|
|
271
271
|
FUNC_TOKENS = {
|
|
272
272
|
*parser.Parser.FUNC_TOKENS,
|
|
273
273
|
TokenType.DATABASE,
|
|
274
|
+
TokenType.MOD,
|
|
274
275
|
TokenType.SCHEMA,
|
|
275
276
|
TokenType.VALUES,
|
|
276
277
|
}
|
|
@@ -292,6 +293,11 @@ class MySQL(Dialect):
|
|
|
292
293
|
|
|
293
294
|
RANGE_PARSERS = {
|
|
294
295
|
**parser.Parser.RANGE_PARSERS,
|
|
296
|
+
TokenType.SOUNDS_LIKE: lambda self, this: self.expression(
|
|
297
|
+
exp.EQ,
|
|
298
|
+
this=self.expression(exp.Soundex, this=this),
|
|
299
|
+
expression=self.expression(exp.Soundex, this=self._parse_term()),
|
|
300
|
+
),
|
|
295
301
|
TokenType.MEMBER_OF: lambda self, this: self.expression(
|
|
296
302
|
exp.JSONArrayContains,
|
|
297
303
|
this=this,
|
|
@@ -359,6 +365,7 @@ class MySQL(Dialect):
|
|
|
359
365
|
exp.Anonymous, this="VALUES", expressions=[self._parse_id_var()]
|
|
360
366
|
),
|
|
361
367
|
"JSON_VALUE": lambda self: self._parse_json_value(),
|
|
368
|
+
"SUBSTR": lambda self: self._parse_substring(),
|
|
362
369
|
}
|
|
363
370
|
|
|
364
371
|
STATEMENT_PARSERS = {
|
|
@@ -579,9 +586,11 @@ class MySQL(Dialect):
|
|
|
579
586
|
full: t.Optional[bool] = None,
|
|
580
587
|
global_: t.Optional[bool] = None,
|
|
581
588
|
) -> exp.Show:
|
|
589
|
+
json = self._match_text_seq("JSON")
|
|
590
|
+
|
|
582
591
|
if target:
|
|
583
592
|
if isinstance(target, str):
|
|
584
|
-
self._match_text_seq(target)
|
|
593
|
+
self._match_text_seq(*target.split(" "))
|
|
585
594
|
target_id = self._parse_id_var()
|
|
586
595
|
else:
|
|
587
596
|
target_id = None
|
|
@@ -618,6 +627,12 @@ class MySQL(Dialect):
|
|
|
618
627
|
mutex = True if self._match_text_seq("MUTEX") else None
|
|
619
628
|
mutex = False if self._match_text_seq("STATUS") else mutex
|
|
620
629
|
|
|
630
|
+
for_table = self._parse_id_var() if self._match_text_seq("FOR", "TABLE") else None
|
|
631
|
+
for_group = self._parse_string() if self._match_text_seq("FOR", "GROUP") else None
|
|
632
|
+
for_user = self._parse_string() if self._match_text_seq("FOR", "USER") else None
|
|
633
|
+
for_role = self._parse_string() if self._match_text_seq("FOR", "ROLE") else None
|
|
634
|
+
into_outfile = self._parse_string() if self._match_text_seq("INTO", "OUTFILE") else None
|
|
635
|
+
|
|
621
636
|
return self.expression(
|
|
622
637
|
exp.Show,
|
|
623
638
|
this=this,
|
|
@@ -634,6 +649,12 @@ class MySQL(Dialect):
|
|
|
634
649
|
offset=offset,
|
|
635
650
|
limit=limit,
|
|
636
651
|
mutex=mutex,
|
|
652
|
+
for_table=for_table,
|
|
653
|
+
for_group=for_group,
|
|
654
|
+
for_user=for_user,
|
|
655
|
+
for_role=for_role,
|
|
656
|
+
into_outfile=into_outfile,
|
|
657
|
+
json=json,
|
|
637
658
|
**{"global": global_}, # type: ignore
|
|
638
659
|
)
|
|
639
660
|
|
|
@@ -1200,6 +1221,10 @@ class MySQL(Dialect):
|
|
|
1200
1221
|
target = f" FROM{target}"
|
|
1201
1222
|
elif expression.name == "GRANTS":
|
|
1202
1223
|
target = f" FOR{target}"
|
|
1224
|
+
elif expression.name in ("LINKS", "PARTITIONS"):
|
|
1225
|
+
target = f" ON{target}" if target else ""
|
|
1226
|
+
elif expression.name == "PROJECTIONS":
|
|
1227
|
+
target = f" ON TABLE{target}" if target else ""
|
|
1203
1228
|
|
|
1204
1229
|
db = self._prefixed_sql("FROM", expression, "db")
|
|
1205
1230
|
|
|
@@ -1227,7 +1252,14 @@ class MySQL(Dialect):
|
|
|
1227
1252
|
else:
|
|
1228
1253
|
mutex_or_status = ""
|
|
1229
1254
|
|
|
1230
|
-
|
|
1255
|
+
for_table = self._prefixed_sql("FOR TABLE", expression, "for_table")
|
|
1256
|
+
for_group = self._prefixed_sql("FOR GROUP", expression, "for_group")
|
|
1257
|
+
for_user = self._prefixed_sql("FOR USER", expression, "for_user")
|
|
1258
|
+
for_role = self._prefixed_sql("FOR ROLE", expression, "for_role")
|
|
1259
|
+
into_outfile = self._prefixed_sql("INTO OUTFILE", expression, "into_outfile")
|
|
1260
|
+
json = " JSON" if expression.args.get("json") else ""
|
|
1261
|
+
|
|
1262
|
+
return f"SHOW{full}{global_}{this}{json}{target}{for_table}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}{for_group}{for_user}{for_role}{into_outfile}"
|
|
1231
1263
|
|
|
1232
1264
|
def altercolumn_sql(self, expression: exp.AlterColumn) -> str:
|
|
1233
1265
|
dtype = self.sql(expression, "dtype")
|
sqlglot/dialects/postgres.py
CHANGED
|
@@ -325,6 +325,9 @@ class Postgres(Dialect):
|
|
|
325
325
|
"@@": TokenType.DAT,
|
|
326
326
|
"@>": TokenType.AT_GT,
|
|
327
327
|
"<@": TokenType.LT_AT,
|
|
328
|
+
"?&": TokenType.QMARK_AMP,
|
|
329
|
+
"?|": TokenType.QMARK_PIPE,
|
|
330
|
+
"#-": TokenType.HASH_DASH,
|
|
328
331
|
"|/": TokenType.PIPE_SLASH,
|
|
329
332
|
"||/": TokenType.DPIPE_SLASH,
|
|
330
333
|
"BEGIN": TokenType.BEGIN,
|
|
@@ -386,6 +389,9 @@ class Postgres(Dialect):
|
|
|
386
389
|
|
|
387
390
|
FUNCTIONS = {
|
|
388
391
|
**parser.Parser.FUNCTIONS,
|
|
392
|
+
"BIT_AND": exp.BitwiseAndAgg.from_arg_list,
|
|
393
|
+
"BIT_OR": exp.BitwiseOrAgg.from_arg_list,
|
|
394
|
+
"BIT_XOR": exp.BitwiseXorAgg.from_arg_list,
|
|
389
395
|
"DATE_TRUNC": build_timestamp_trunc,
|
|
390
396
|
"DIV": lambda args: exp.cast(
|
|
391
397
|
binary_from_function(exp.IntDiv)(args), exp.DataType.Type.DECIMAL
|
|
@@ -418,6 +424,11 @@ class Postgres(Dialect):
|
|
|
418
424
|
FUNCTION_PARSERS = {
|
|
419
425
|
**parser.Parser.FUNCTION_PARSERS,
|
|
420
426
|
"DATE_PART": lambda self: self._parse_date_part(),
|
|
427
|
+
"JSON_AGG": lambda self: self.expression(
|
|
428
|
+
exp.JSONArrayAgg,
|
|
429
|
+
this=self._parse_bitwise(),
|
|
430
|
+
order=self._parse_order(),
|
|
431
|
+
),
|
|
421
432
|
"JSONB_EXISTS": lambda self: self._parse_jsonb_exists(),
|
|
422
433
|
}
|
|
423
434
|
|
|
@@ -584,7 +595,10 @@ class Postgres(Dialect):
|
|
|
584
595
|
exp.AnyValue: _versioned_anyvalue_sql,
|
|
585
596
|
exp.ArrayConcat: lambda self, e: self.arrayconcat_sql(e, name="ARRAY_CAT"),
|
|
586
597
|
exp.ArrayFilter: filter_array_using_unnest,
|
|
598
|
+
exp.BitwiseAndAgg: rename_func("BIT_AND"),
|
|
599
|
+
exp.BitwiseOrAgg: rename_func("BIT_OR"),
|
|
587
600
|
exp.BitwiseXor: lambda self, e: self.binary(e, "#"),
|
|
601
|
+
exp.BitwiseXorAgg: rename_func("BIT_XOR"),
|
|
588
602
|
exp.ColumnDef: transforms.preprocess([_auto_increment_to_serial, _serial_to_generated]),
|
|
589
603
|
exp.CurrentDate: no_paren_current_date_sql,
|
|
590
604
|
exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP",
|
|
@@ -599,6 +613,11 @@ class Postgres(Dialect):
|
|
|
599
613
|
self, e, func_name="STRING_AGG", within_group=False
|
|
600
614
|
),
|
|
601
615
|
exp.IntDiv: rename_func("DIV"),
|
|
616
|
+
exp.JSONArrayAgg: lambda self, e: self.func(
|
|
617
|
+
"JSON_AGG",
|
|
618
|
+
self.sql(e, "this"),
|
|
619
|
+
suffix=f"{self.sql(e, 'order')})",
|
|
620
|
+
),
|
|
602
621
|
exp.JSONExtract: _json_extract_sql("JSON_EXTRACT_PATH", "->"),
|
|
603
622
|
exp.JSONExtractScalar: _json_extract_sql("JSON_EXTRACT_PATH_TEXT", "->>"),
|
|
604
623
|
exp.JSONBExtract: lambda self, e: self.binary(e, "#>"),
|
sqlglot/dialects/redshift.py
CHANGED
|
@@ -162,6 +162,7 @@ class Redshift(Postgres):
|
|
|
162
162
|
ALTER_SET_TYPE = "TYPE"
|
|
163
163
|
SUPPORTS_DECODE_CASE = True
|
|
164
164
|
SUPPORTS_BETWEEN_FLAGS = False
|
|
165
|
+
LIMIT_FETCH = "LIMIT"
|
|
165
166
|
|
|
166
167
|
# Redshift doesn't have `WITH` as part of their with_properties so we remove it
|
|
167
168
|
WITH_PROPERTIES_PREFIX = " "
|
sqlglot/dialects/singlestore.py
CHANGED
|
@@ -16,7 +16,7 @@ from sqlglot.dialects.dialect import (
|
|
|
16
16
|
date_add_interval_sql,
|
|
17
17
|
timestampdiff_sql,
|
|
18
18
|
)
|
|
19
|
-
from sqlglot.dialects.mysql import MySQL, _remove_ts_or_ds_to_date, date_add_sql
|
|
19
|
+
from sqlglot.dialects.mysql import MySQL, _remove_ts_or_ds_to_date, date_add_sql, _show_parser
|
|
20
20
|
from sqlglot.expressions import DataType
|
|
21
21
|
from sqlglot.generator import unsupported_args
|
|
22
22
|
from sqlglot.helper import seq_get
|
|
@@ -55,6 +55,17 @@ class SingleStore(MySQL):
|
|
|
55
55
|
"FF6": "%f", # only 6 digits are supported in python formats
|
|
56
56
|
}
|
|
57
57
|
|
|
58
|
+
VECTOR_TYPE_ALIASES = {
|
|
59
|
+
"I8": "TINYINT",
|
|
60
|
+
"I16": "SMALLINT",
|
|
61
|
+
"I32": "INT",
|
|
62
|
+
"I64": "BIGINT",
|
|
63
|
+
"F32": "FLOAT",
|
|
64
|
+
"F64": "DOUBLE",
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
INVERSE_VECTOR_TYPE_ALIASES = {v: k for k, v in VECTOR_TYPE_ALIASES.items()}
|
|
68
|
+
|
|
58
69
|
class Tokenizer(MySQL.Tokenizer):
|
|
59
70
|
BYTE_STRINGS = [("e'", "'"), ("E'", "'")]
|
|
60
71
|
|
|
@@ -249,6 +260,63 @@ class SingleStore(MySQL):
|
|
|
249
260
|
COLUMN_OPERATORS.pop(TokenType.DHASH_ARROW)
|
|
250
261
|
COLUMN_OPERATORS.pop(TokenType.PLACEHOLDER)
|
|
251
262
|
|
|
263
|
+
SHOW_PARSERS = {
|
|
264
|
+
**MySQL.Parser.SHOW_PARSERS,
|
|
265
|
+
"AGGREGATES": _show_parser("AGGREGATES"),
|
|
266
|
+
"CDC EXTRACTOR POOL": _show_parser("CDC EXTRACTOR POOL"),
|
|
267
|
+
"CREATE AGGREGATE": _show_parser("CREATE AGGREGATE", target=True),
|
|
268
|
+
"CREATE PIPELINE": _show_parser("CREATE PIPELINE", target=True),
|
|
269
|
+
"CREATE PROJECTION": _show_parser("CREATE PROJECTION", target=True),
|
|
270
|
+
"DATABASE STATUS": _show_parser("DATABASE STATUS"),
|
|
271
|
+
"DISTRIBUTED_PLANCACHE STATUS": _show_parser("DISTRIBUTED_PLANCACHE STATUS"),
|
|
272
|
+
"FULLTEXT SERVICE METRICS LOCAL": _show_parser("FULLTEXT SERVICE METRICS LOCAL"),
|
|
273
|
+
"FULLTEXT SERVICE METRICS FOR NODE": _show_parser(
|
|
274
|
+
"FULLTEXT SERVICE METRICS FOR NODE", target=True
|
|
275
|
+
),
|
|
276
|
+
"FULLTEXT SERVICE STATUS": _show_parser("FULLTEXT SERVICE STATUS"),
|
|
277
|
+
"FUNCTIONS": _show_parser("FUNCTIONS"),
|
|
278
|
+
"GROUPS": _show_parser("GROUPS"),
|
|
279
|
+
"GROUPS FOR ROLE": _show_parser("GROUPS FOR ROLE", target=True),
|
|
280
|
+
"GROUPS FOR USER": _show_parser("GROUPS FOR USER", target=True),
|
|
281
|
+
"INDEXES": _show_parser("INDEX", target="FROM"),
|
|
282
|
+
"KEYS": _show_parser("INDEX", target="FROM"),
|
|
283
|
+
"LINKS": _show_parser("LINKS", target="ON"),
|
|
284
|
+
"LOAD ERRORS": _show_parser("LOAD ERRORS"),
|
|
285
|
+
"LOAD WARNINGS": _show_parser("LOAD WARNINGS"),
|
|
286
|
+
"PARTITIONS": _show_parser("PARTITIONS", target="ON"),
|
|
287
|
+
"PIPELINES": _show_parser("PIPELINES"),
|
|
288
|
+
"PLAN": _show_parser("PLAN", target=True),
|
|
289
|
+
"PLANCACHE": _show_parser("PLANCACHE"),
|
|
290
|
+
"PROCEDURES": _show_parser("PROCEDURES"),
|
|
291
|
+
"PROJECTIONS": _show_parser("PROJECTIONS", target="ON TABLE"),
|
|
292
|
+
"REPLICATION STATUS": _show_parser("REPLICATION STATUS"),
|
|
293
|
+
"REPRODUCTION": _show_parser("REPRODUCTION"),
|
|
294
|
+
"RESOURCE POOLS": _show_parser("RESOURCE POOLS"),
|
|
295
|
+
"ROLES": _show_parser("ROLES"),
|
|
296
|
+
"ROLES FOR USER": _show_parser("ROLES FOR USER", target=True),
|
|
297
|
+
"ROLES FOR GROUP": _show_parser("ROLES FOR GROUP", target=True),
|
|
298
|
+
"STATUS EXTENDED": _show_parser("STATUS EXTENDED"),
|
|
299
|
+
"USERS": _show_parser("USERS"),
|
|
300
|
+
"USERS FOR ROLE": _show_parser("USERS FOR ROLE", target=True),
|
|
301
|
+
"USERS FOR GROUP": _show_parser("USERS FOR GROUP", target=True),
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
ALTER_PARSERS = {
|
|
305
|
+
**MySQL.Parser.ALTER_PARSERS,
|
|
306
|
+
"CHANGE": lambda self: self.expression(
|
|
307
|
+
exp.RenameColumn, this=self._parse_column(), to=self._parse_column()
|
|
308
|
+
),
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
def _parse_vector_expressions(
|
|
312
|
+
self, expressions: t.List[exp.Expression]
|
|
313
|
+
) -> t.List[exp.Expression]:
|
|
314
|
+
type_name = expressions[1].name.upper()
|
|
315
|
+
if type_name in self.dialect.VECTOR_TYPE_ALIASES:
|
|
316
|
+
type_name = self.dialect.VECTOR_TYPE_ALIASES[type_name]
|
|
317
|
+
|
|
318
|
+
return [exp.DataType.build(type_name, dialect=self.dialect), expressions[0]]
|
|
319
|
+
|
|
252
320
|
class Generator(MySQL.Generator):
|
|
253
321
|
SUPPORTS_UESCAPE = False
|
|
254
322
|
NULL_ORDERING_SUPPORTED = True
|
|
@@ -459,6 +527,7 @@ class SingleStore(MySQL):
|
|
|
459
527
|
e.expression,
|
|
460
528
|
self.func("SUBSTRING", e.this, e.args.get("start") + e.args.get("length")),
|
|
461
529
|
),
|
|
530
|
+
exp.National: lambda self, e: self.national_sql(e, prefix=""),
|
|
462
531
|
exp.Reduce: unsupported_args("finish")(
|
|
463
532
|
lambda self, e: self.func(
|
|
464
533
|
"REDUCE", e.args.get("initial"), e.this, e.args.get("merge")
|
|
@@ -467,6 +536,29 @@ class SingleStore(MySQL):
|
|
|
467
536
|
exp.MatchAgainst: unsupported_args("modifier")(
|
|
468
537
|
lambda self, e: super().matchagainst_sql(e)
|
|
469
538
|
),
|
|
539
|
+
exp.Show: unsupported_args(
|
|
540
|
+
"history",
|
|
541
|
+
"terse",
|
|
542
|
+
"offset",
|
|
543
|
+
"starts_with",
|
|
544
|
+
"limit",
|
|
545
|
+
"from",
|
|
546
|
+
"scope",
|
|
547
|
+
"scope_kind",
|
|
548
|
+
"mutex",
|
|
549
|
+
"query",
|
|
550
|
+
"channel",
|
|
551
|
+
"log",
|
|
552
|
+
"types",
|
|
553
|
+
"privileges",
|
|
554
|
+
)(lambda self, e: super().show_sql(e)),
|
|
555
|
+
exp.Describe: unsupported_args(
|
|
556
|
+
"style",
|
|
557
|
+
"kind",
|
|
558
|
+
"expressions",
|
|
559
|
+
"partition",
|
|
560
|
+
"format",
|
|
561
|
+
)(lambda self, e: super().describe_sql(e)),
|
|
470
562
|
}
|
|
471
563
|
TRANSFORMS.pop(exp.JSONExtractScalar)
|
|
472
564
|
TRANSFORMS.pop(exp.CurrentDate)
|
|
@@ -1685,6 +1777,14 @@ class SingleStore(MySQL):
|
|
|
1685
1777
|
return f"DECIMAL({precision}, {scale[0]})"
|
|
1686
1778
|
else:
|
|
1687
1779
|
return f"DECIMAL({precision})"
|
|
1780
|
+
if expression.is_type(exp.DataType.Type.VECTOR):
|
|
1781
|
+
expressions = expression.expressions
|
|
1782
|
+
if len(expressions) == 2:
|
|
1783
|
+
type_name = self.sql(expressions[0])
|
|
1784
|
+
if type_name in self.dialect.INVERSE_VECTOR_TYPE_ALIASES:
|
|
1785
|
+
type_name = self.dialect.INVERSE_VECTOR_TYPE_ALIASES[type_name]
|
|
1786
|
+
|
|
1787
|
+
return f"VECTOR({self.sql(expressions[1])}, {type_name})"
|
|
1688
1788
|
|
|
1689
1789
|
return super().datatype_sql(expression)
|
|
1690
1790
|
|
|
@@ -1741,3 +1841,31 @@ class SingleStore(MySQL):
|
|
|
1741
1841
|
|
|
1742
1842
|
self.unsupported("STANDARD_HASH function is not supported in SingleStore")
|
|
1743
1843
|
return self.func("SHA", expression.this)
|
|
1844
|
+
|
|
1845
|
+
@unsupported_args("is_database", "exists", "cluster", "identity", "option", "partition")
|
|
1846
|
+
def truncatetable_sql(self, expression: exp.TruncateTable) -> str:
|
|
1847
|
+
statements = []
|
|
1848
|
+
for expression in expression.expressions:
|
|
1849
|
+
statements.append(f"TRUNCATE {self.sql(expression)}")
|
|
1850
|
+
|
|
1851
|
+
return "; ".join(statements)
|
|
1852
|
+
|
|
1853
|
+
@unsupported_args("exists")
|
|
1854
|
+
def renamecolumn_sql(self, expression: exp.RenameColumn) -> str:
|
|
1855
|
+
old_column = self.sql(expression, "this")
|
|
1856
|
+
new_column = self.sql(expression, "to")
|
|
1857
|
+
return f"CHANGE {old_column} {new_column}"
|
|
1858
|
+
|
|
1859
|
+
@unsupported_args("drop", "comment", "allow_null", "visible", "using")
|
|
1860
|
+
def altercolumn_sql(self, expression: exp.AlterColumn) -> str:
|
|
1861
|
+
alter = super().altercolumn_sql(expression)
|
|
1862
|
+
|
|
1863
|
+
collate = self.sql(expression, "collate")
|
|
1864
|
+
collate = f" COLLATE {collate}" if collate else ""
|
|
1865
|
+
return f"{alter}{collate}"
|
|
1866
|
+
|
|
1867
|
+
def computedcolumnconstraint_sql(self, expression: exp.ComputedColumnConstraint) -> str:
|
|
1868
|
+
this = self.sql(expression, "this")
|
|
1869
|
+
not_null = " NOT NULL" if expression.args.get("not_null") else ""
|
|
1870
|
+
type = self.sql(expression, "data_type") or "AUTO"
|
|
1871
|
+
return f"AS {this} PERSISTED {type}{not_null}"
|