sqlglot 27.29.0__py3-none-any.whl → 28.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlglot/__main__.py +6 -4
- sqlglot/_version.py +2 -2
- sqlglot/dialects/bigquery.py +116 -295
- sqlglot/dialects/clickhouse.py +67 -2
- sqlglot/dialects/databricks.py +38 -1
- sqlglot/dialects/dialect.py +327 -286
- sqlglot/dialects/dremio.py +4 -1
- sqlglot/dialects/duckdb.py +718 -22
- sqlglot/dialects/exasol.py +243 -10
- sqlglot/dialects/hive.py +8 -8
- sqlglot/dialects/mysql.py +11 -2
- sqlglot/dialects/oracle.py +29 -0
- sqlglot/dialects/postgres.py +46 -24
- sqlglot/dialects/presto.py +47 -16
- sqlglot/dialects/redshift.py +16 -0
- sqlglot/dialects/risingwave.py +3 -0
- sqlglot/dialects/singlestore.py +12 -3
- sqlglot/dialects/snowflake.py +199 -271
- sqlglot/dialects/spark.py +2 -2
- sqlglot/dialects/spark2.py +11 -48
- sqlglot/dialects/sqlite.py +9 -0
- sqlglot/dialects/teradata.py +5 -8
- sqlglot/dialects/trino.py +6 -0
- sqlglot/dialects/tsql.py +61 -25
- sqlglot/diff.py +4 -2
- sqlglot/errors.py +69 -0
- sqlglot/expressions.py +484 -84
- sqlglot/generator.py +143 -41
- sqlglot/helper.py +2 -2
- sqlglot/optimizer/annotate_types.py +247 -140
- sqlglot/optimizer/canonicalize.py +6 -1
- sqlglot/optimizer/eliminate_joins.py +1 -1
- sqlglot/optimizer/eliminate_subqueries.py +2 -2
- sqlglot/optimizer/merge_subqueries.py +5 -5
- sqlglot/optimizer/normalize.py +20 -13
- sqlglot/optimizer/normalize_identifiers.py +17 -3
- sqlglot/optimizer/optimizer.py +4 -0
- sqlglot/optimizer/pushdown_predicates.py +1 -1
- sqlglot/optimizer/qualify.py +14 -6
- sqlglot/optimizer/qualify_columns.py +113 -352
- sqlglot/optimizer/qualify_tables.py +112 -70
- sqlglot/optimizer/resolver.py +374 -0
- sqlglot/optimizer/scope.py +27 -16
- sqlglot/optimizer/simplify.py +1074 -964
- sqlglot/optimizer/unnest_subqueries.py +12 -2
- sqlglot/parser.py +276 -160
- sqlglot/planner.py +2 -2
- sqlglot/schema.py +15 -4
- sqlglot/tokens.py +42 -7
- sqlglot/transforms.py +77 -22
- sqlglot/typing/__init__.py +316 -0
- sqlglot/typing/bigquery.py +376 -0
- sqlglot/typing/hive.py +12 -0
- sqlglot/typing/presto.py +24 -0
- sqlglot/typing/snowflake.py +505 -0
- sqlglot/typing/spark2.py +58 -0
- sqlglot/typing/tsql.py +9 -0
- {sqlglot-27.29.0.dist-info → sqlglot-28.4.1.dist-info}/METADATA +2 -2
- sqlglot-28.4.1.dist-info/RECORD +92 -0
- sqlglot-27.29.0.dist-info/RECORD +0 -84
- {sqlglot-27.29.0.dist-info → sqlglot-28.4.1.dist-info}/WHEEL +0 -0
- {sqlglot-27.29.0.dist-info → sqlglot-28.4.1.dist-info}/licenses/LICENSE +0 -0
- {sqlglot-27.29.0.dist-info → sqlglot-28.4.1.dist-info}/top_level.txt +0 -0
sqlglot/parser.py
CHANGED
|
@@ -7,7 +7,14 @@ import typing as t
|
|
|
7
7
|
from collections import defaultdict
|
|
8
8
|
|
|
9
9
|
from sqlglot import exp
|
|
10
|
-
from sqlglot.errors import
|
|
10
|
+
from sqlglot.errors import (
|
|
11
|
+
ErrorLevel,
|
|
12
|
+
ParseError,
|
|
13
|
+
TokenError,
|
|
14
|
+
concat_messages,
|
|
15
|
+
highlight_sql,
|
|
16
|
+
merge_errors,
|
|
17
|
+
)
|
|
11
18
|
from sqlglot.helper import apply_index_offset, ensure_list, seq_get
|
|
12
19
|
from sqlglot.time import format_time
|
|
13
20
|
from sqlglot.tokens import Token, Tokenizer, TokenType
|
|
@@ -97,6 +104,8 @@ def build_extract_json_with_path(expr_type: t.Type[E]) -> t.Callable[[t.List, Di
|
|
|
97
104
|
)
|
|
98
105
|
if len(args) > 2 and expr_type is exp.JSONExtract:
|
|
99
106
|
expression.set("expressions", args[2:])
|
|
107
|
+
if expr_type is exp.JSONExtractScalar:
|
|
108
|
+
expression.set("scalar_only", dialect.JSON_EXTRACT_SCALAR_SCALAR_ONLY)
|
|
100
109
|
|
|
101
110
|
return expression
|
|
102
111
|
|
|
@@ -226,7 +235,12 @@ class Parser(metaclass=_Parser):
|
|
|
226
235
|
end=seq_get(args, 1),
|
|
227
236
|
step=seq_get(args, 2) or exp.Interval(this=exp.Literal.string(1), unit=exp.var("DAY")),
|
|
228
237
|
),
|
|
238
|
+
"GENERATE_UUID": lambda args, dialect: exp.Uuid(
|
|
239
|
+
is_string=dialect.UUID_IS_STRING_TYPE or None
|
|
240
|
+
),
|
|
229
241
|
"GLOB": lambda args: exp.Glob(this=seq_get(args, 1), expression=seq_get(args, 0)),
|
|
242
|
+
"GREATEST": lambda args: exp.Greatest(this=seq_get(args, 0), expressions=args[1:]),
|
|
243
|
+
"LEAST": lambda args: exp.Least(this=seq_get(args, 0), expressions=args[1:]),
|
|
230
244
|
"HEX": build_hex,
|
|
231
245
|
"JSON_EXTRACT": build_extract_json_with_path(exp.JSONExtract),
|
|
232
246
|
"JSON_EXTRACT_SCALAR": build_extract_json_with_path(exp.JSONExtractScalar),
|
|
@@ -265,6 +279,7 @@ class Parser(metaclass=_Parser):
|
|
|
265
279
|
),
|
|
266
280
|
"UNNEST": lambda args: exp.Unnest(expressions=ensure_list(seq_get(args, 0))),
|
|
267
281
|
"UPPER": build_upper,
|
|
282
|
+
"UUID": lambda args, dialect: exp.Uuid(is_string=dialect.UUID_IS_STRING_TYPE or None),
|
|
268
283
|
"VAR_MAP": build_var_map,
|
|
269
284
|
}
|
|
270
285
|
|
|
@@ -274,9 +289,13 @@ class Parser(metaclass=_Parser):
|
|
|
274
289
|
TokenType.CURRENT_TIME: exp.CurrentTime,
|
|
275
290
|
TokenType.CURRENT_TIMESTAMP: exp.CurrentTimestamp,
|
|
276
291
|
TokenType.CURRENT_USER: exp.CurrentUser,
|
|
292
|
+
TokenType.LOCALTIME: exp.Localtime,
|
|
293
|
+
TokenType.LOCALTIMESTAMP: exp.Localtimestamp,
|
|
294
|
+
TokenType.CURRENT_ROLE: exp.CurrentRole,
|
|
277
295
|
}
|
|
278
296
|
|
|
279
297
|
STRUCT_TYPE_TOKENS = {
|
|
298
|
+
TokenType.FILE,
|
|
280
299
|
TokenType.NESTED,
|
|
281
300
|
TokenType.OBJECT,
|
|
282
301
|
TokenType.STRUCT,
|
|
@@ -316,6 +335,7 @@ class Parser(metaclass=_Parser):
|
|
|
316
335
|
TokenType.UINT,
|
|
317
336
|
TokenType.BIGINT,
|
|
318
337
|
TokenType.UBIGINT,
|
|
338
|
+
TokenType.BIGNUM,
|
|
319
339
|
TokenType.INT128,
|
|
320
340
|
TokenType.UINT128,
|
|
321
341
|
TokenType.INT256,
|
|
@@ -346,6 +366,7 @@ class Parser(metaclass=_Parser):
|
|
|
346
366
|
TokenType.TINYTEXT,
|
|
347
367
|
TokenType.TIME,
|
|
348
368
|
TokenType.TIMETZ,
|
|
369
|
+
TokenType.TIME_NS,
|
|
349
370
|
TokenType.TIMESTAMP,
|
|
350
371
|
TokenType.TIMESTAMP_S,
|
|
351
372
|
TokenType.TIMESTAMP_MS,
|
|
@@ -376,6 +397,7 @@ class Parser(metaclass=_Parser):
|
|
|
376
397
|
TokenType.DECIMAL64,
|
|
377
398
|
TokenType.DECIMAL128,
|
|
378
399
|
TokenType.DECIMAL256,
|
|
400
|
+
TokenType.DECFLOAT,
|
|
379
401
|
TokenType.UDECIMAL,
|
|
380
402
|
TokenType.BIGDECIMAL,
|
|
381
403
|
TokenType.UUID,
|
|
@@ -533,12 +555,14 @@ class Parser(metaclass=_Parser):
|
|
|
533
555
|
TokenType.LIMIT,
|
|
534
556
|
TokenType.LOAD,
|
|
535
557
|
TokenType.LOCK,
|
|
558
|
+
TokenType.MATCH,
|
|
536
559
|
TokenType.MERGE,
|
|
537
560
|
TokenType.NATURAL,
|
|
538
561
|
TokenType.NEXT,
|
|
539
562
|
TokenType.OFFSET,
|
|
540
563
|
TokenType.OPERATOR,
|
|
541
564
|
TokenType.ORDINALITY,
|
|
565
|
+
TokenType.OVER,
|
|
542
566
|
TokenType.OVERLAPS,
|
|
543
567
|
TokenType.OVERWRITE,
|
|
544
568
|
TokenType.PARTITION,
|
|
@@ -615,6 +639,7 @@ class Parser(metaclass=_Parser):
|
|
|
615
639
|
TokenType.CURRENT_TIMESTAMP,
|
|
616
640
|
TokenType.CURRENT_TIME,
|
|
617
641
|
TokenType.CURRENT_USER,
|
|
642
|
+
TokenType.CURRENT_CATALOG,
|
|
618
643
|
TokenType.FILTER,
|
|
619
644
|
TokenType.FIRST,
|
|
620
645
|
TokenType.FORMAT,
|
|
@@ -626,6 +651,8 @@ class Parser(metaclass=_Parser):
|
|
|
626
651
|
TokenType.ILIKE,
|
|
627
652
|
TokenType.INSERT,
|
|
628
653
|
TokenType.LIKE,
|
|
654
|
+
TokenType.LOCALTIME,
|
|
655
|
+
TokenType.LOCALTIMESTAMP,
|
|
629
656
|
TokenType.MERGE,
|
|
630
657
|
TokenType.NEXT,
|
|
631
658
|
TokenType.OFFSET,
|
|
@@ -634,6 +661,7 @@ class Parser(metaclass=_Parser):
|
|
|
634
661
|
TokenType.REPLACE,
|
|
635
662
|
TokenType.RLIKE,
|
|
636
663
|
TokenType.ROW,
|
|
664
|
+
TokenType.SESSION_USER,
|
|
637
665
|
TokenType.UNNEST,
|
|
638
666
|
TokenType.VAR,
|
|
639
667
|
TokenType.LEFT,
|
|
@@ -747,7 +775,7 @@ class Parser(metaclass=_Parser):
|
|
|
747
775
|
TokenType.ARROW: lambda self, expressions: self.expression(
|
|
748
776
|
exp.Lambda,
|
|
749
777
|
this=self._replace_lambda(
|
|
750
|
-
self.
|
|
778
|
+
self._parse_disjunction(),
|
|
751
779
|
expressions,
|
|
752
780
|
),
|
|
753
781
|
expressions=expressions,
|
|
@@ -755,7 +783,7 @@ class Parser(metaclass=_Parser):
|
|
|
755
783
|
TokenType.FARROW: lambda self, expressions: self.expression(
|
|
756
784
|
exp.Kwarg,
|
|
757
785
|
this=exp.var(expressions[0].name),
|
|
758
|
-
expression=self.
|
|
786
|
+
expression=self._parse_disjunction(),
|
|
759
787
|
),
|
|
760
788
|
}
|
|
761
789
|
|
|
@@ -780,6 +808,7 @@ class Parser(metaclass=_Parser):
|
|
|
780
808
|
this=this,
|
|
781
809
|
expression=self.dialect.to_json_path(path),
|
|
782
810
|
only_json_types=self.JSON_ARROWS_REQUIRE_JSON_TYPE,
|
|
811
|
+
scalar_only=self.dialect.JSON_EXTRACT_SCALAR_SCALAR_ONLY,
|
|
783
812
|
),
|
|
784
813
|
TokenType.HASH_ARROW: lambda self, this, path: self.expression(
|
|
785
814
|
exp.JSONBExtract,
|
|
@@ -807,7 +836,7 @@ class Parser(metaclass=_Parser):
|
|
|
807
836
|
exp.Cluster: lambda self: self._parse_sort(exp.Cluster, TokenType.CLUSTER_BY),
|
|
808
837
|
exp.Column: lambda self: self._parse_column(),
|
|
809
838
|
exp.ColumnDef: lambda self: self._parse_column_def(self._parse_column()),
|
|
810
|
-
exp.Condition: lambda self: self.
|
|
839
|
+
exp.Condition: lambda self: self._parse_disjunction(),
|
|
811
840
|
exp.DataType: lambda self: self._parse_types(allow_identifiers=False, schema=True),
|
|
812
841
|
exp.Expression: lambda self: self._parse_expression(),
|
|
813
842
|
exp.From: lambda self: self._parse_from(joins=True),
|
|
@@ -882,33 +911,33 @@ class Parser(metaclass=_Parser):
|
|
|
882
911
|
}
|
|
883
912
|
|
|
884
913
|
STRING_PARSERS = {
|
|
885
|
-
TokenType.HEREDOC_STRING: lambda self, token: self.expression(
|
|
886
|
-
|
|
887
|
-
),
|
|
888
|
-
TokenType.NATIONAL_STRING: lambda self, token: self.expression(
|
|
889
|
-
exp.National, this=token.text
|
|
890
|
-
),
|
|
891
|
-
TokenType.RAW_STRING: lambda self, token: self.expression(exp.RawString, this=token.text),
|
|
914
|
+
TokenType.HEREDOC_STRING: lambda self, token: self.expression(exp.RawString, token=token),
|
|
915
|
+
TokenType.NATIONAL_STRING: lambda self, token: self.expression(exp.National, token=token),
|
|
916
|
+
TokenType.RAW_STRING: lambda self, token: self.expression(exp.RawString, token=token),
|
|
892
917
|
TokenType.STRING: lambda self, token: self.expression(
|
|
893
|
-
exp.Literal,
|
|
918
|
+
exp.Literal, token=token, is_string=True
|
|
894
919
|
),
|
|
895
920
|
TokenType.UNICODE_STRING: lambda self, token: self.expression(
|
|
896
921
|
exp.UnicodeString,
|
|
897
|
-
|
|
922
|
+
token=token,
|
|
898
923
|
escape=self._match_text_seq("UESCAPE") and self._parse_string(),
|
|
899
924
|
),
|
|
900
925
|
}
|
|
901
926
|
|
|
902
927
|
NUMERIC_PARSERS = {
|
|
903
|
-
TokenType.BIT_STRING: lambda self, token: self.expression(exp.BitString,
|
|
904
|
-
TokenType.BYTE_STRING: lambda self, token: self.expression(
|
|
928
|
+
TokenType.BIT_STRING: lambda self, token: self.expression(exp.BitString, token=token),
|
|
929
|
+
TokenType.BYTE_STRING: lambda self, token: self.expression(
|
|
930
|
+
exp.ByteString,
|
|
931
|
+
token=token,
|
|
932
|
+
is_bytes=self.dialect.BYTE_STRING_IS_BYTES_TYPE or None,
|
|
933
|
+
),
|
|
905
934
|
TokenType.HEX_STRING: lambda self, token: self.expression(
|
|
906
935
|
exp.HexString,
|
|
907
|
-
|
|
936
|
+
token=token,
|
|
908
937
|
is_integer=self.dialect.HEX_STRING_IS_INTEGER_TYPE or None,
|
|
909
938
|
),
|
|
910
939
|
TokenType.NUMBER: lambda self, token: self.expression(
|
|
911
|
-
exp.Literal,
|
|
940
|
+
exp.Literal, token=token, is_string=False
|
|
912
941
|
),
|
|
913
942
|
}
|
|
914
943
|
|
|
@@ -1244,6 +1273,7 @@ class Parser(metaclass=_Parser):
|
|
|
1244
1273
|
"EXTRACT": lambda self: self._parse_extract(),
|
|
1245
1274
|
"FLOOR": lambda self: self._parse_ceil_floor(exp.Floor),
|
|
1246
1275
|
"GAP_FILL": lambda self: self._parse_gap_fill(),
|
|
1276
|
+
"INITCAP": lambda self: self._parse_initcap(),
|
|
1247
1277
|
"JSON_OBJECT": lambda self: self._parse_json_object(),
|
|
1248
1278
|
"JSON_OBJECTAGG": lambda self: self._parse_json_object(agg=True),
|
|
1249
1279
|
"JSON_TABLE": lambda self: self._parse_json_table(),
|
|
@@ -1443,6 +1473,8 @@ class Parser(metaclass=_Parser):
|
|
|
1443
1473
|
# The style options for the DESCRIBE statement
|
|
1444
1474
|
DESCRIBE_STYLES = {"ANALYZE", "EXTENDED", "FORMATTED", "HISTORY"}
|
|
1445
1475
|
|
|
1476
|
+
SET_ASSIGNMENT_DELIMITERS = {"=", ":=", "TO"}
|
|
1477
|
+
|
|
1446
1478
|
# The style options for the ANALYZE statement
|
|
1447
1479
|
ANALYZE_STYLES = {
|
|
1448
1480
|
"BUFFER_USAGE_LIMIT",
|
|
@@ -1717,15 +1749,15 @@ class Parser(metaclass=_Parser):
|
|
|
1717
1749
|
error level setting.
|
|
1718
1750
|
"""
|
|
1719
1751
|
token = token or self._curr or self._prev or Token.string("")
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1752
|
+
formatted_sql, start_context, highlight, end_context = highlight_sql(
|
|
1753
|
+
sql=self.sql,
|
|
1754
|
+
positions=[(token.start, token.end)],
|
|
1755
|
+
context_length=self.error_message_context,
|
|
1756
|
+
)
|
|
1757
|
+
formatted_message = f"{message}. Line {token.line}, Col: {token.col}.\n {formatted_sql}"
|
|
1725
1758
|
|
|
1726
1759
|
error = ParseError.new(
|
|
1727
|
-
|
|
1728
|
-
f" {start_context}\033[4m{highlight}\033[0m{end_context}",
|
|
1760
|
+
formatted_message,
|
|
1729
1761
|
description=message,
|
|
1730
1762
|
line=token.line,
|
|
1731
1763
|
col=token.col,
|
|
@@ -1740,7 +1772,11 @@ class Parser(metaclass=_Parser):
|
|
|
1740
1772
|
self.errors.append(error)
|
|
1741
1773
|
|
|
1742
1774
|
def expression(
|
|
1743
|
-
self,
|
|
1775
|
+
self,
|
|
1776
|
+
exp_class: t.Type[E],
|
|
1777
|
+
token: t.Optional[Token] = None,
|
|
1778
|
+
comments: t.Optional[t.List[str]] = None,
|
|
1779
|
+
**kwargs,
|
|
1744
1780
|
) -> E:
|
|
1745
1781
|
"""
|
|
1746
1782
|
Creates a new, validated Expression.
|
|
@@ -1753,7 +1789,11 @@ class Parser(metaclass=_Parser):
|
|
|
1753
1789
|
Returns:
|
|
1754
1790
|
The target expression.
|
|
1755
1791
|
"""
|
|
1756
|
-
|
|
1792
|
+
if token:
|
|
1793
|
+
instance = exp_class(this=token.text, **kwargs)
|
|
1794
|
+
instance.update_positions(token)
|
|
1795
|
+
else:
|
|
1796
|
+
instance = exp_class(**kwargs)
|
|
1757
1797
|
instance.add_comments(comments) if comments else self._add_comments(instance)
|
|
1758
1798
|
return self.validate_expression(instance)
|
|
1759
1799
|
|
|
@@ -2247,7 +2287,10 @@ class Parser(metaclass=_Parser):
|
|
|
2247
2287
|
return self._parse_sortkey(compound=True)
|
|
2248
2288
|
|
|
2249
2289
|
if self._match_text_seq("SQL", "SECURITY"):
|
|
2250
|
-
return self.expression(
|
|
2290
|
+
return self.expression(
|
|
2291
|
+
exp.SqlSecurityProperty,
|
|
2292
|
+
this=self._match_texts(("DEFINER", "INVOKER")) and self._prev.text.upper(),
|
|
2293
|
+
)
|
|
2251
2294
|
|
|
2252
2295
|
index = self._index
|
|
2253
2296
|
|
|
@@ -2366,10 +2409,8 @@ class Parser(metaclass=_Parser):
|
|
|
2366
2409
|
self._match(TokenType.EQ)
|
|
2367
2410
|
prop = self.expression(
|
|
2368
2411
|
exp.WithSystemVersioningProperty,
|
|
2369
|
-
|
|
2370
|
-
|
|
2371
|
-
"with": with_,
|
|
2372
|
-
},
|
|
2412
|
+
on=True,
|
|
2413
|
+
with_=with_,
|
|
2373
2414
|
)
|
|
2374
2415
|
|
|
2375
2416
|
if self._match_text_seq("OFF"):
|
|
@@ -2684,7 +2725,7 @@ class Parser(metaclass=_Parser):
|
|
|
2684
2725
|
|
|
2685
2726
|
def _parse_partition_by(self) -> t.List[exp.Expression]:
|
|
2686
2727
|
if self._match(TokenType.PARTITION_BY):
|
|
2687
|
-
return self._parse_csv(self.
|
|
2728
|
+
return self._parse_csv(self._parse_disjunction)
|
|
2688
2729
|
return []
|
|
2689
2730
|
|
|
2690
2731
|
def _parse_partition_bound_spec(self) -> exp.PartitionBoundSpec:
|
|
@@ -2942,15 +2983,9 @@ class Parser(metaclass=_Parser):
|
|
|
2942
2983
|
self._match(TokenType.TABLE)
|
|
2943
2984
|
is_function = self._match(TokenType.FUNCTION)
|
|
2944
2985
|
|
|
2945
|
-
this = (
|
|
2946
|
-
self._parse_table(schema=True, parse_partition=True)
|
|
2947
|
-
if not is_function
|
|
2948
|
-
else self._parse_function()
|
|
2949
|
-
)
|
|
2950
|
-
if isinstance(this, exp.Table) and self._match(TokenType.ALIAS, advance=False):
|
|
2951
|
-
this.set("alias", self._parse_table_alias())
|
|
2986
|
+
this = self._parse_function() if is_function else self._parse_insert_table()
|
|
2952
2987
|
|
|
2953
|
-
returning = self._parse_returning()
|
|
2988
|
+
returning = self._parse_returning() # TSQL allows RETURNING before source
|
|
2954
2989
|
|
|
2955
2990
|
return self.expression(
|
|
2956
2991
|
exp.Insert,
|
|
@@ -2961,7 +2996,8 @@ class Parser(metaclass=_Parser):
|
|
|
2961
2996
|
stored=self._match_text_seq("STORED") and self._parse_stored(),
|
|
2962
2997
|
by_name=self._match_text_seq("BY", "NAME"),
|
|
2963
2998
|
exists=self._parse_exists(),
|
|
2964
|
-
where=self._match_pair(TokenType.REPLACE, TokenType.WHERE)
|
|
2999
|
+
where=self._match_pair(TokenType.REPLACE, TokenType.WHERE)
|
|
3000
|
+
and self._parse_disjunction(),
|
|
2965
3001
|
partition=self._match(TokenType.PARTITION_BY) and self._parse_partitioned_by(),
|
|
2966
3002
|
settings=self._match_text_seq("SETTINGS") and self._parse_settings_property(),
|
|
2967
3003
|
default=self._match_text_seq("DEFAULT", "VALUES"),
|
|
@@ -2974,6 +3010,12 @@ class Parser(metaclass=_Parser):
|
|
|
2974
3010
|
source=self._match(TokenType.TABLE) and self._parse_table(),
|
|
2975
3011
|
)
|
|
2976
3012
|
|
|
3013
|
+
def _parse_insert_table(self) -> t.Optional[exp.Expression]:
|
|
3014
|
+
this = self._parse_table(schema=True, parse_partition=True)
|
|
3015
|
+
if isinstance(this, exp.Table) and self._match(TokenType.ALIAS, advance=False):
|
|
3016
|
+
this.set("alias", self._parse_table_alias())
|
|
3017
|
+
return this
|
|
3018
|
+
|
|
2977
3019
|
def _parse_kill(self) -> exp.Kill:
|
|
2978
3020
|
kind = exp.var(self._prev.text) if self._match_texts(("CONNECTION", "QUERY")) else None
|
|
2979
3021
|
|
|
@@ -3040,10 +3082,8 @@ class Parser(metaclass=_Parser):
|
|
|
3040
3082
|
return None
|
|
3041
3083
|
return self.expression(
|
|
3042
3084
|
exp.SerdeProperties,
|
|
3043
|
-
|
|
3044
|
-
|
|
3045
|
-
"with": with_,
|
|
3046
|
-
},
|
|
3085
|
+
expressions=self._parse_wrapped_properties(),
|
|
3086
|
+
with_=with_,
|
|
3047
3087
|
)
|
|
3048
3088
|
|
|
3049
3089
|
def _parse_row_format(
|
|
@@ -3118,6 +3158,7 @@ class Parser(metaclass=_Parser):
|
|
|
3118
3158
|
cluster=self._match(TokenType.ON) and self._parse_on_property(),
|
|
3119
3159
|
where=self._parse_where(),
|
|
3120
3160
|
returning=returning or self._parse_returning(),
|
|
3161
|
+
order=self._parse_order(),
|
|
3121
3162
|
limit=self._parse_limit(),
|
|
3122
3163
|
)
|
|
3123
3164
|
|
|
@@ -3131,7 +3172,7 @@ class Parser(metaclass=_Parser):
|
|
|
3131
3172
|
elif self._match(TokenType.RETURNING, advance=False):
|
|
3132
3173
|
kwargs["returning"] = self._parse_returning()
|
|
3133
3174
|
elif self._match(TokenType.FROM, advance=False):
|
|
3134
|
-
kwargs["
|
|
3175
|
+
kwargs["from_"] = self._parse_from(joins=True)
|
|
3135
3176
|
elif self._match(TokenType.WHERE, advance=False):
|
|
3136
3177
|
kwargs["where"] = self._parse_where()
|
|
3137
3178
|
elif self._match(TokenType.ORDER_BY, advance=False):
|
|
@@ -3188,7 +3229,7 @@ class Parser(metaclass=_Parser):
|
|
|
3188
3229
|
return self.expression(
|
|
3189
3230
|
exp.Partition,
|
|
3190
3231
|
subpartition=self._prev.text.upper() == "SUBPARTITION",
|
|
3191
|
-
expressions=self._parse_wrapped_csv(self.
|
|
3232
|
+
expressions=self._parse_wrapped_csv(self._parse_disjunction),
|
|
3192
3233
|
)
|
|
3193
3234
|
|
|
3194
3235
|
def _parse_value(self, values: bool = True) -> t.Optional[exp.Tuple]:
|
|
@@ -3221,8 +3262,8 @@ class Parser(metaclass=_Parser):
|
|
|
3221
3262
|
# Support parentheses for duckdb FROM-first syntax
|
|
3222
3263
|
select = self._parse_select(from_=from_)
|
|
3223
3264
|
if select:
|
|
3224
|
-
if not select.args.get("
|
|
3225
|
-
select.set("
|
|
3265
|
+
if not select.args.get("from_"):
|
|
3266
|
+
select.set("from_", from_)
|
|
3226
3267
|
this = select
|
|
3227
3268
|
else:
|
|
3228
3269
|
this = exp.select("*").from_(t.cast(exp.From, from_))
|
|
@@ -3285,8 +3326,11 @@ class Parser(metaclass=_Parser):
|
|
|
3285
3326
|
self.raise_error("Failed to parse any statement following CTE")
|
|
3286
3327
|
return cte
|
|
3287
3328
|
|
|
3288
|
-
|
|
3289
|
-
this.
|
|
3329
|
+
while isinstance(this, exp.Subquery) and this.is_wrapper:
|
|
3330
|
+
this = this.this
|
|
3331
|
+
|
|
3332
|
+
if "with_" in this.arg_types:
|
|
3333
|
+
this.set("with_", cte)
|
|
3290
3334
|
else:
|
|
3291
3335
|
self.raise_error(f"{this.key} does not support CTE")
|
|
3292
3336
|
this = cte
|
|
@@ -3295,7 +3339,7 @@ class Parser(metaclass=_Parser):
|
|
|
3295
3339
|
|
|
3296
3340
|
# duckdb supports leading with FROM x
|
|
3297
3341
|
from_ = (
|
|
3298
|
-
self._parse_from(consume_pipe=True)
|
|
3342
|
+
self._parse_from(joins=True, consume_pipe=True)
|
|
3299
3343
|
if self._match(TokenType.FROM, advance=False)
|
|
3300
3344
|
else None
|
|
3301
3345
|
)
|
|
@@ -3352,7 +3396,7 @@ class Parser(metaclass=_Parser):
|
|
|
3352
3396
|
from_ = self._parse_from()
|
|
3353
3397
|
|
|
3354
3398
|
if from_:
|
|
3355
|
-
this.set("
|
|
3399
|
+
this.set("from_", from_)
|
|
3356
3400
|
|
|
3357
3401
|
this = self._parse_query_modifiers(this)
|
|
3358
3402
|
elif (table or nested) and self._match(TokenType.L_PAREN):
|
|
@@ -3372,12 +3416,6 @@ class Parser(metaclass=_Parser):
|
|
|
3372
3416
|
return self.expression(exp.Summarize, this=this, table=table)
|
|
3373
3417
|
elif self._match(TokenType.DESCRIBE):
|
|
3374
3418
|
this = self._parse_describe()
|
|
3375
|
-
elif self._match_text_seq("STREAM"):
|
|
3376
|
-
this = self._parse_function()
|
|
3377
|
-
if this:
|
|
3378
|
-
this = self.expression(exp.Stream, this=this)
|
|
3379
|
-
else:
|
|
3380
|
-
self._retreat(self._index - 1)
|
|
3381
3419
|
else:
|
|
3382
3420
|
this = None
|
|
3383
3421
|
|
|
@@ -3524,7 +3562,7 @@ class Parser(metaclass=_Parser):
|
|
|
3524
3562
|
def _implicit_unnests_to_explicit(self, this: E) -> E:
|
|
3525
3563
|
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers as _norm
|
|
3526
3564
|
|
|
3527
|
-
refs = {_norm(this.args["
|
|
3565
|
+
refs = {_norm(this.args["from_"].this.copy(), dialect=self.dialect).alias_or_name}
|
|
3528
3566
|
for i, join in enumerate(this.args.get("joins") or []):
|
|
3529
3567
|
table = join.this
|
|
3530
3568
|
normalized_table = table.copy()
|
|
@@ -3589,7 +3627,7 @@ class Parser(metaclass=_Parser):
|
|
|
3589
3627
|
continue
|
|
3590
3628
|
break
|
|
3591
3629
|
|
|
3592
|
-
if self.SUPPORTS_IMPLICIT_UNNEST and this and this.args.get("
|
|
3630
|
+
if self.SUPPORTS_IMPLICIT_UNNEST and this and this.args.get("from_"):
|
|
3593
3631
|
this = self._implicit_unnests_to_explicit(this)
|
|
3594
3632
|
|
|
3595
3633
|
return this
|
|
@@ -3809,6 +3847,16 @@ class Parser(metaclass=_Parser):
|
|
|
3809
3847
|
ordinality=ordinality,
|
|
3810
3848
|
)
|
|
3811
3849
|
|
|
3850
|
+
def _parse_stream(self) -> t.Optional[exp.Stream]:
|
|
3851
|
+
index = self._index
|
|
3852
|
+
if self._match_text_seq("STREAM"):
|
|
3853
|
+
this = self._try_parse(self._parse_table)
|
|
3854
|
+
if this:
|
|
3855
|
+
return self.expression(exp.Stream, this=this)
|
|
3856
|
+
|
|
3857
|
+
self._retreat(index)
|
|
3858
|
+
return None
|
|
3859
|
+
|
|
3812
3860
|
def _parse_join_parts(
|
|
3813
3861
|
self,
|
|
3814
3862
|
) -> t.Tuple[t.Optional[Token], t.Optional[Token], t.Optional[Token]]:
|
|
@@ -3864,11 +3912,11 @@ class Parser(metaclass=_Parser):
|
|
|
3864
3912
|
)
|
|
3865
3913
|
|
|
3866
3914
|
if method:
|
|
3867
|
-
kwargs["method"] = method.text
|
|
3915
|
+
kwargs["method"] = method.text.upper()
|
|
3868
3916
|
if side:
|
|
3869
|
-
kwargs["side"] = side.text
|
|
3917
|
+
kwargs["side"] = side.text.upper()
|
|
3870
3918
|
if kind:
|
|
3871
|
-
kwargs["kind"] = kind.text
|
|
3919
|
+
kwargs["kind"] = kind.text.upper()
|
|
3872
3920
|
if hint:
|
|
3873
3921
|
kwargs["hint"] = hint
|
|
3874
3922
|
|
|
@@ -3876,7 +3924,7 @@ class Parser(metaclass=_Parser):
|
|
|
3876
3924
|
kwargs["match_condition"] = self._parse_wrapped(self._parse_comparison)
|
|
3877
3925
|
|
|
3878
3926
|
if self._match(TokenType.ON):
|
|
3879
|
-
kwargs["on"] = self.
|
|
3927
|
+
kwargs["on"] = self._parse_disjunction()
|
|
3880
3928
|
elif self._match(TokenType.USING):
|
|
3881
3929
|
kwargs["using"] = self._parse_using_identifiers()
|
|
3882
3930
|
elif (
|
|
@@ -3889,7 +3937,7 @@ class Parser(metaclass=_Parser):
|
|
|
3889
3937
|
joins: t.Optional[list] = list(self._parse_joins())
|
|
3890
3938
|
|
|
3891
3939
|
if joins and self._match(TokenType.ON):
|
|
3892
|
-
kwargs["on"] = self.
|
|
3940
|
+
kwargs["on"] = self._parse_disjunction()
|
|
3893
3941
|
elif joins and self._match(TokenType.USING):
|
|
3894
3942
|
kwargs["using"] = self._parse_using_identifiers()
|
|
3895
3943
|
else:
|
|
@@ -3915,7 +3963,7 @@ class Parser(metaclass=_Parser):
|
|
|
3915
3963
|
return self.expression(exp.Join, comments=comments, **kwargs)
|
|
3916
3964
|
|
|
3917
3965
|
def _parse_opclass(self) -> t.Optional[exp.Expression]:
|
|
3918
|
-
this = self.
|
|
3966
|
+
this = self._parse_disjunction()
|
|
3919
3967
|
|
|
3920
3968
|
if self._match_texts(self.OPCLASS_FOLLOW_KEYWORDS, advance=False):
|
|
3921
3969
|
return this
|
|
@@ -4101,6 +4149,10 @@ class Parser(metaclass=_Parser):
|
|
|
4101
4149
|
parse_partition: bool = False,
|
|
4102
4150
|
consume_pipe: bool = False,
|
|
4103
4151
|
) -> t.Optional[exp.Expression]:
|
|
4152
|
+
stream = self._parse_stream()
|
|
4153
|
+
if stream:
|
|
4154
|
+
return stream
|
|
4155
|
+
|
|
4104
4156
|
lateral = self._parse_lateral()
|
|
4105
4157
|
if lateral:
|
|
4106
4158
|
return lateral
|
|
@@ -4163,6 +4215,11 @@ class Parser(metaclass=_Parser):
|
|
|
4163
4215
|
if alias:
|
|
4164
4216
|
this.set("alias", alias)
|
|
4165
4217
|
|
|
4218
|
+
if self._match(TokenType.INDEXED_BY):
|
|
4219
|
+
this.set("indexed", self._parse_table_parts())
|
|
4220
|
+
elif self._match_text_seq("NOT", "INDEXED"):
|
|
4221
|
+
this.set("indexed", False)
|
|
4222
|
+
|
|
4166
4223
|
if isinstance(this, exp.Table) and self._match_text_seq("AT"):
|
|
4167
4224
|
return self.expression(
|
|
4168
4225
|
exp.AtIndex, this=this.to_column(copy=False), expression=self._parse_id_var()
|
|
@@ -4402,7 +4459,7 @@ class Parser(metaclass=_Parser):
|
|
|
4402
4459
|
expressions = self._match(TokenType.ON) and self._parse_csv(_parse_on)
|
|
4403
4460
|
into = self._parse_unpivot_columns()
|
|
4404
4461
|
using = self._match(TokenType.USING) and self._parse_csv(
|
|
4405
|
-
lambda: self._parse_alias(self.
|
|
4462
|
+
lambda: self._parse_alias(self._parse_column())
|
|
4406
4463
|
)
|
|
4407
4464
|
group = self._parse_group()
|
|
4408
4465
|
|
|
@@ -4431,16 +4488,18 @@ class Parser(metaclass=_Parser):
|
|
|
4431
4488
|
|
|
4432
4489
|
value = self._parse_column()
|
|
4433
4490
|
|
|
4434
|
-
if not self.
|
|
4435
|
-
self.raise_error("Expecting IN
|
|
4491
|
+
if not self._match(TokenType.IN):
|
|
4492
|
+
self.raise_error("Expecting IN")
|
|
4436
4493
|
|
|
4437
|
-
if self._match(TokenType.
|
|
4438
|
-
|
|
4439
|
-
|
|
4440
|
-
|
|
4494
|
+
if self._match(TokenType.L_PAREN):
|
|
4495
|
+
if self._match(TokenType.ANY):
|
|
4496
|
+
exprs: t.List[exp.Expression] = ensure_list(exp.PivotAny(this=self._parse_order()))
|
|
4497
|
+
else:
|
|
4498
|
+
exprs = self._parse_csv(_parse_aliased_expression)
|
|
4499
|
+
self._match_r_paren()
|
|
4500
|
+
return self.expression(exp.In, this=value, expressions=exprs)
|
|
4441
4501
|
|
|
4442
|
-
self.
|
|
4443
|
-
return self.expression(exp.In, this=value, expressions=exprs)
|
|
4502
|
+
return self.expression(exp.In, this=value, field=self._parse_id_var())
|
|
4444
4503
|
|
|
4445
4504
|
def _parse_pivot_aggregation(self) -> t.Optional[exp.Expression]:
|
|
4446
4505
|
func = self._parse_function()
|
|
@@ -4560,7 +4619,7 @@ class Parser(metaclass=_Parser):
|
|
|
4560
4619
|
return None
|
|
4561
4620
|
|
|
4562
4621
|
return self.expression(
|
|
4563
|
-
exp.PreWhere, comments=self._prev_comments, this=self.
|
|
4622
|
+
exp.PreWhere, comments=self._prev_comments, this=self._parse_disjunction()
|
|
4564
4623
|
)
|
|
4565
4624
|
|
|
4566
4625
|
def _parse_where(self, skip_where_token: bool = False) -> t.Optional[exp.Where]:
|
|
@@ -4568,7 +4627,7 @@ class Parser(metaclass=_Parser):
|
|
|
4568
4627
|
return None
|
|
4569
4628
|
|
|
4570
4629
|
return self.expression(
|
|
4571
|
-
exp.Where, comments=self._prev_comments, this=self.
|
|
4630
|
+
exp.Where, comments=self._prev_comments, this=self._parse_disjunction()
|
|
4572
4631
|
)
|
|
4573
4632
|
|
|
4574
4633
|
def _parse_group(self, skip_group_by_token: bool = False) -> t.Optional[exp.Group]:
|
|
@@ -4593,7 +4652,7 @@ class Parser(metaclass=_Parser):
|
|
|
4593
4652
|
self._parse_csv(
|
|
4594
4653
|
lambda: None
|
|
4595
4654
|
if self._match_set((TokenType.CUBE, TokenType.ROLLUP), advance=False)
|
|
4596
|
-
else self.
|
|
4655
|
+
else self._parse_disjunction()
|
|
4597
4656
|
)
|
|
4598
4657
|
)
|
|
4599
4658
|
|
|
@@ -4643,19 +4702,19 @@ class Parser(metaclass=_Parser):
|
|
|
4643
4702
|
if not skip_having_token and not self._match(TokenType.HAVING):
|
|
4644
4703
|
return None
|
|
4645
4704
|
return self.expression(
|
|
4646
|
-
exp.Having, comments=self._prev_comments, this=self.
|
|
4705
|
+
exp.Having, comments=self._prev_comments, this=self._parse_disjunction()
|
|
4647
4706
|
)
|
|
4648
4707
|
|
|
4649
4708
|
def _parse_qualify(self) -> t.Optional[exp.Qualify]:
|
|
4650
4709
|
if not self._match(TokenType.QUALIFY):
|
|
4651
4710
|
return None
|
|
4652
|
-
return self.expression(exp.Qualify, this=self.
|
|
4711
|
+
return self.expression(exp.Qualify, this=self._parse_disjunction())
|
|
4653
4712
|
|
|
4654
4713
|
def _parse_connect_with_prior(self) -> t.Optional[exp.Expression]:
|
|
4655
4714
|
self.NO_PAREN_FUNCTION_PARSERS["PRIOR"] = lambda self: self.expression(
|
|
4656
4715
|
exp.Prior, this=self._parse_bitwise()
|
|
4657
4716
|
)
|
|
4658
|
-
connect = self.
|
|
4717
|
+
connect = self._parse_disjunction()
|
|
4659
4718
|
self.NO_PAREN_FUNCTION_PARSERS.pop("PRIOR")
|
|
4660
4719
|
return connect
|
|
4661
4720
|
|
|
@@ -4663,7 +4722,7 @@ class Parser(metaclass=_Parser):
|
|
|
4663
4722
|
if skip_start_token:
|
|
4664
4723
|
start = None
|
|
4665
4724
|
elif self._match(TokenType.START_WITH):
|
|
4666
|
-
start = self.
|
|
4725
|
+
start = self._parse_disjunction()
|
|
4667
4726
|
else:
|
|
4668
4727
|
return None
|
|
4669
4728
|
|
|
@@ -4672,14 +4731,14 @@ class Parser(metaclass=_Parser):
|
|
|
4672
4731
|
connect = self._parse_connect_with_prior()
|
|
4673
4732
|
|
|
4674
4733
|
if not start and self._match(TokenType.START_WITH):
|
|
4675
|
-
start = self.
|
|
4734
|
+
start = self._parse_disjunction()
|
|
4676
4735
|
|
|
4677
4736
|
return self.expression(exp.Connect, start=start, connect=connect, nocycle=nocycle)
|
|
4678
4737
|
|
|
4679
4738
|
def _parse_name_as_expression(self) -> t.Optional[exp.Expression]:
|
|
4680
4739
|
this = self._parse_id_var(any_token=True)
|
|
4681
4740
|
if self._match(TokenType.ALIAS):
|
|
4682
|
-
this = self.expression(exp.Alias, alias=this, this=self.
|
|
4741
|
+
this = self.expression(exp.Alias, alias=this, this=self._parse_disjunction())
|
|
4683
4742
|
return this
|
|
4684
4743
|
|
|
4685
4744
|
def _parse_interpolate(self) -> t.Optional[t.List[exp.Expression]]:
|
|
@@ -4713,7 +4772,7 @@ class Parser(metaclass=_Parser):
|
|
|
4713
4772
|
def _parse_ordered(
|
|
4714
4773
|
self, parse_method: t.Optional[t.Callable] = None
|
|
4715
4774
|
) -> t.Optional[exp.Ordered]:
|
|
4716
|
-
this = parse_method() if parse_method else self.
|
|
4775
|
+
this = parse_method() if parse_method else self._parse_disjunction()
|
|
4717
4776
|
if not this:
|
|
4718
4777
|
return None
|
|
4719
4778
|
|
|
@@ -4742,12 +4801,10 @@ class Parser(metaclass=_Parser):
|
|
|
4742
4801
|
if self._match_text_seq("WITH", "FILL"):
|
|
4743
4802
|
with_fill = self.expression(
|
|
4744
4803
|
exp.WithFill,
|
|
4745
|
-
|
|
4746
|
-
|
|
4747
|
-
|
|
4748
|
-
|
|
4749
|
-
"interpolate": self._parse_interpolate(),
|
|
4750
|
-
},
|
|
4804
|
+
from_=self._match(TokenType.FROM) and self._parse_bitwise(),
|
|
4805
|
+
to=self._match_text_seq("TO") and self._parse_bitwise(),
|
|
4806
|
+
step=self._match_text_seq("STEP") and self._parse_bitwise(),
|
|
4807
|
+
interpolate=self._parse_interpolate(),
|
|
4751
4808
|
)
|
|
4752
4809
|
else:
|
|
4753
4810
|
with_fill = None
|
|
@@ -5016,7 +5073,7 @@ class Parser(metaclass=_Parser):
|
|
|
5016
5073
|
return this
|
|
5017
5074
|
|
|
5018
5075
|
this = expression
|
|
5019
|
-
elif self._match(TokenType.ISNULL):
|
|
5076
|
+
elif self._match(TokenType.ISNULL) or (negate and self._match(TokenType.NULL)):
|
|
5020
5077
|
this = self.expression(exp.Is, this=this, expression=exp.Null())
|
|
5021
5078
|
|
|
5022
5079
|
# Postgres supports ISNULL and NOTNULL for conditions.
|
|
@@ -5060,10 +5117,13 @@ class Parser(metaclass=_Parser):
|
|
|
5060
5117
|
unique = self._match(TokenType.UNIQUE)
|
|
5061
5118
|
self._match_text_seq("KEYS")
|
|
5062
5119
|
expression: t.Optional[exp.Expression] = self.expression(
|
|
5063
|
-
exp.JSON,
|
|
5120
|
+
exp.JSON,
|
|
5121
|
+
this=kind,
|
|
5122
|
+
with_=_with,
|
|
5123
|
+
unique=unique,
|
|
5064
5124
|
)
|
|
5065
5125
|
else:
|
|
5066
|
-
expression = self.
|
|
5126
|
+
expression = self._parse_null() or self._parse_bitwise()
|
|
5067
5127
|
if not expression:
|
|
5068
5128
|
self._retreat(index)
|
|
5069
5129
|
return None
|
|
@@ -5270,7 +5330,7 @@ class Parser(metaclass=_Parser):
|
|
|
5270
5330
|
|
|
5271
5331
|
def _parse_factor(self) -> t.Optional[exp.Expression]:
|
|
5272
5332
|
parse_method = self._parse_exponent if self.EXPONENT else self._parse_unary
|
|
5273
|
-
this = parse_method()
|
|
5333
|
+
this = self._parse_at_time_zone(parse_method())
|
|
5274
5334
|
|
|
5275
5335
|
while self._match_set(self.FACTOR):
|
|
5276
5336
|
klass = self.FACTOR[self._prev.token_type]
|
|
@@ -5295,14 +5355,14 @@ class Parser(metaclass=_Parser):
|
|
|
5295
5355
|
def _parse_unary(self) -> t.Optional[exp.Expression]:
|
|
5296
5356
|
if self._match_set(self.UNARY_PARSERS):
|
|
5297
5357
|
return self.UNARY_PARSERS[self._prev.token_type](self)
|
|
5298
|
-
return self.
|
|
5358
|
+
return self._parse_type()
|
|
5299
5359
|
|
|
5300
5360
|
def _parse_type(
|
|
5301
5361
|
self, parse_interval: bool = True, fallback_to_identifier: bool = False
|
|
5302
5362
|
) -> t.Optional[exp.Expression]:
|
|
5303
5363
|
interval = parse_interval and self._parse_interval()
|
|
5304
5364
|
if interval:
|
|
5305
|
-
return interval
|
|
5365
|
+
return self._parse_column_ops(interval)
|
|
5306
5366
|
|
|
5307
5367
|
index = self._index
|
|
5308
5368
|
data_type = self._parse_types(check_func=True, allow_identifiers=False)
|
|
@@ -5508,7 +5568,7 @@ class Parser(metaclass=_Parser):
|
|
|
5508
5568
|
self.raise_error("Expecting >")
|
|
5509
5569
|
|
|
5510
5570
|
if self._match_set((TokenType.L_BRACKET, TokenType.L_PAREN)):
|
|
5511
|
-
values = self._parse_csv(self.
|
|
5571
|
+
values = self._parse_csv(self._parse_disjunction)
|
|
5512
5572
|
if not values and is_struct:
|
|
5513
5573
|
values = None
|
|
5514
5574
|
self._retreat(self._index - 1)
|
|
@@ -5600,12 +5660,14 @@ class Parser(metaclass=_Parser):
|
|
|
5600
5660
|
break
|
|
5601
5661
|
|
|
5602
5662
|
matched_array = False
|
|
5603
|
-
values = self._parse_csv(self.
|
|
5663
|
+
values = self._parse_csv(self._parse_disjunction) or None
|
|
5604
5664
|
if (
|
|
5605
5665
|
values
|
|
5606
5666
|
and not schema
|
|
5607
5667
|
and (
|
|
5608
|
-
not self.dialect.SUPPORTS_FIXED_SIZE_ARRAYS
|
|
5668
|
+
not self.dialect.SUPPORTS_FIXED_SIZE_ARRAYS
|
|
5669
|
+
or datatype_token == TokenType.ARRAY
|
|
5670
|
+
or not self._match(TokenType.R_BRACKET, advance=False)
|
|
5609
5671
|
)
|
|
5610
5672
|
):
|
|
5611
5673
|
# Retreating here means that we should not parse the following values as part of the data type, e.g. in DuckDB
|
|
@@ -5663,7 +5725,9 @@ class Parser(metaclass=_Parser):
|
|
|
5663
5725
|
def _parse_at_time_zone(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]:
|
|
5664
5726
|
if not self._match_text_seq("AT", "TIME", "ZONE"):
|
|
5665
5727
|
return this
|
|
5666
|
-
return self.
|
|
5728
|
+
return self._parse_at_time_zone(
|
|
5729
|
+
self.expression(exp.AtTimeZone, this=this, zone=self._parse_unary())
|
|
5730
|
+
)
|
|
5667
5731
|
|
|
5668
5732
|
def _parse_column(self) -> t.Optional[exp.Expression]:
|
|
5669
5733
|
this = self._parse_column_reference()
|
|
@@ -5854,7 +5918,9 @@ class Parser(metaclass=_Parser):
|
|
|
5854
5918
|
expressions.append(exp.Literal.string(self._prev.text))
|
|
5855
5919
|
|
|
5856
5920
|
if len(expressions) > 1:
|
|
5857
|
-
return self.expression(
|
|
5921
|
+
return self.expression(
|
|
5922
|
+
exp.Concat, expressions=expressions, coalesce=self.dialect.CONCAT_COALESCE
|
|
5923
|
+
)
|
|
5858
5924
|
|
|
5859
5925
|
return primary
|
|
5860
5926
|
|
|
@@ -6070,7 +6136,7 @@ class Parser(metaclass=_Parser):
|
|
|
6070
6136
|
def _parse_introducer(self, token: Token) -> exp.Introducer | exp.Identifier:
|
|
6071
6137
|
literal = self._parse_primary()
|
|
6072
6138
|
if literal:
|
|
6073
|
-
return self.expression(exp.Introducer,
|
|
6139
|
+
return self.expression(exp.Introducer, token=token, expression=literal)
|
|
6074
6140
|
|
|
6075
6141
|
return self._identifier_expression(token)
|
|
6076
6142
|
|
|
@@ -6109,7 +6175,7 @@ class Parser(metaclass=_Parser):
|
|
|
6109
6175
|
|
|
6110
6176
|
if self._match(TokenType.DISTINCT):
|
|
6111
6177
|
this = self.expression(
|
|
6112
|
-
exp.Distinct, expressions=self._parse_csv(self.
|
|
6178
|
+
exp.Distinct, expressions=self._parse_csv(self._parse_disjunction)
|
|
6113
6179
|
)
|
|
6114
6180
|
else:
|
|
6115
6181
|
this = self._parse_select_or_expression(alias=alias)
|
|
@@ -6157,7 +6223,7 @@ class Parser(metaclass=_Parser):
|
|
|
6157
6223
|
):
|
|
6158
6224
|
persisted = self._prev.text.upper() == "MATERIALIZED"
|
|
6159
6225
|
constraint_kind = exp.ComputedColumnConstraint(
|
|
6160
|
-
this=self.
|
|
6226
|
+
this=self._parse_disjunction(),
|
|
6161
6227
|
persisted=persisted or self._match_text_seq("PERSISTED"),
|
|
6162
6228
|
data_type=exp.Var(this="AUTO")
|
|
6163
6229
|
if self._match_text_seq("AUTO")
|
|
@@ -6472,6 +6538,14 @@ class Parser(metaclass=_Parser):
|
|
|
6472
6538
|
and self._prev.token_type == TokenType.DESC
|
|
6473
6539
|
)
|
|
6474
6540
|
|
|
6541
|
+
this = None
|
|
6542
|
+
if (
|
|
6543
|
+
self._curr.text.upper() not in self.CONSTRAINT_PARSERS
|
|
6544
|
+
and self._next
|
|
6545
|
+
and self._next.token_type == TokenType.L_PAREN
|
|
6546
|
+
):
|
|
6547
|
+
this = self._parse_id_var()
|
|
6548
|
+
|
|
6475
6549
|
if not in_props and not self._match(TokenType.L_PAREN, advance=False):
|
|
6476
6550
|
return self.expression(
|
|
6477
6551
|
exp.PrimaryKeyColumnConstraint,
|
|
@@ -6485,13 +6559,14 @@ class Parser(metaclass=_Parser):
|
|
|
6485
6559
|
|
|
6486
6560
|
return self.expression(
|
|
6487
6561
|
exp.PrimaryKey,
|
|
6562
|
+
this=this,
|
|
6488
6563
|
expressions=expressions,
|
|
6489
6564
|
include=self._parse_index_params(),
|
|
6490
6565
|
options=self._parse_key_constraint_options(),
|
|
6491
6566
|
)
|
|
6492
6567
|
|
|
6493
6568
|
def _parse_bracket_key_value(self, is_map: bool = False) -> t.Optional[exp.Expression]:
|
|
6494
|
-
return self._parse_slice(self._parse_alias(self.
|
|
6569
|
+
return self._parse_slice(self._parse_alias(self._parse_disjunction(), explicit=True))
|
|
6495
6570
|
|
|
6496
6571
|
def _parse_odbc_datetime_literal(self) -> exp.Expression:
|
|
6497
6572
|
"""
|
|
@@ -6571,9 +6646,16 @@ class Parser(metaclass=_Parser):
|
|
|
6571
6646
|
return self._parse_bracket(this)
|
|
6572
6647
|
|
|
6573
6648
|
def _parse_slice(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]:
|
|
6574
|
-
if self._match(TokenType.COLON):
|
|
6575
|
-
return
|
|
6576
|
-
|
|
6649
|
+
if not self._match(TokenType.COLON):
|
|
6650
|
+
return this
|
|
6651
|
+
|
|
6652
|
+
if self._match_pair(TokenType.DASH, TokenType.COLON, advance=False):
|
|
6653
|
+
self._advance()
|
|
6654
|
+
end: t.Optional[exp.Expression] = -exp.Literal.number("1")
|
|
6655
|
+
else:
|
|
6656
|
+
end = self._parse_unary()
|
|
6657
|
+
step = self._parse_unary() if self._match(TokenType.COLON) else None
|
|
6658
|
+
return self.expression(exp.Slice, this=this, expression=end, step=step)
|
|
6577
6659
|
|
|
6578
6660
|
def _parse_case(self) -> t.Optional[exp.Expression]:
|
|
6579
6661
|
if self._match(TokenType.DOT, advance=False):
|
|
@@ -6585,16 +6667,16 @@ class Parser(metaclass=_Parser):
|
|
|
6585
6667
|
default = None
|
|
6586
6668
|
|
|
6587
6669
|
comments = self._prev_comments
|
|
6588
|
-
expression = self.
|
|
6670
|
+
expression = self._parse_disjunction()
|
|
6589
6671
|
|
|
6590
6672
|
while self._match(TokenType.WHEN):
|
|
6591
|
-
this = self.
|
|
6673
|
+
this = self._parse_disjunction()
|
|
6592
6674
|
self._match(TokenType.THEN)
|
|
6593
|
-
then = self.
|
|
6675
|
+
then = self._parse_disjunction()
|
|
6594
6676
|
ifs.append(self.expression(exp.If, this=this, true=then))
|
|
6595
6677
|
|
|
6596
6678
|
if self._match(TokenType.ELSE):
|
|
6597
|
-
default = self.
|
|
6679
|
+
default = self._parse_disjunction()
|
|
6598
6680
|
|
|
6599
6681
|
if not self._match(TokenType.END):
|
|
6600
6682
|
if isinstance(default, exp.Interval) and default.this.sql().upper() == "END":
|
|
@@ -6619,15 +6701,15 @@ class Parser(metaclass=_Parser):
|
|
|
6619
6701
|
if self.NO_PAREN_IF_COMMANDS and index == 0:
|
|
6620
6702
|
return self._parse_as_command(self._prev)
|
|
6621
6703
|
|
|
6622
|
-
condition = self.
|
|
6704
|
+
condition = self._parse_disjunction()
|
|
6623
6705
|
|
|
6624
6706
|
if not condition:
|
|
6625
6707
|
self._retreat(index)
|
|
6626
6708
|
return None
|
|
6627
6709
|
|
|
6628
6710
|
self._match(TokenType.THEN)
|
|
6629
|
-
true = self.
|
|
6630
|
-
false = self.
|
|
6711
|
+
true = self._parse_disjunction()
|
|
6712
|
+
false = self._parse_disjunction() if self._match(TokenType.ELSE) else None
|
|
6631
6713
|
self._match(TokenType.END)
|
|
6632
6714
|
this = self.expression(exp.If, this=condition, true=true, false=false)
|
|
6633
6715
|
|
|
@@ -6666,7 +6748,7 @@ class Parser(metaclass=_Parser):
|
|
|
6666
6748
|
return self.validate_expression(gap_fill, args)
|
|
6667
6749
|
|
|
6668
6750
|
def _parse_cast(self, strict: bool, safe: t.Optional[bool] = None) -> exp.Expression:
|
|
6669
|
-
this = self.
|
|
6751
|
+
this = self._parse_disjunction()
|
|
6670
6752
|
|
|
6671
6753
|
if not self._match(TokenType.ALIAS):
|
|
6672
6754
|
if self._match(TokenType.COMMA):
|
|
@@ -6726,12 +6808,12 @@ class Parser(metaclass=_Parser):
|
|
|
6726
6808
|
def _parse_string_agg(self) -> exp.GroupConcat:
|
|
6727
6809
|
if self._match(TokenType.DISTINCT):
|
|
6728
6810
|
args: t.List[t.Optional[exp.Expression]] = [
|
|
6729
|
-
self.expression(exp.Distinct, expressions=[self.
|
|
6811
|
+
self.expression(exp.Distinct, expressions=[self._parse_disjunction()])
|
|
6730
6812
|
]
|
|
6731
6813
|
if self._match(TokenType.COMMA):
|
|
6732
|
-
args.extend(self._parse_csv(self.
|
|
6814
|
+
args.extend(self._parse_csv(self._parse_disjunction))
|
|
6733
6815
|
else:
|
|
6734
|
-
args = self._parse_csv(self.
|
|
6816
|
+
args = self._parse_csv(self._parse_disjunction) # type: ignore
|
|
6735
6817
|
|
|
6736
6818
|
if self._match_text_seq("ON", "OVERFLOW"):
|
|
6737
6819
|
# trino: LISTAGG(expression [, separator] [ON OVERFLOW overflow_behavior])
|
|
@@ -6836,7 +6918,7 @@ class Parser(metaclass=_Parser):
|
|
|
6836
6918
|
return namespaces
|
|
6837
6919
|
|
|
6838
6920
|
def _parse_decode(self) -> t.Optional[exp.Decode | exp.DecodeCase]:
|
|
6839
|
-
args = self._parse_csv(self.
|
|
6921
|
+
args = self._parse_csv(self._parse_disjunction)
|
|
6840
6922
|
|
|
6841
6923
|
if len(args) < 3:
|
|
6842
6924
|
return self.expression(exp.Decode, this=seq_get(args, 0), charset=seq_get(args, 1))
|
|
@@ -7262,7 +7344,7 @@ class Parser(metaclass=_Parser):
|
|
|
7262
7344
|
"value": (
|
|
7263
7345
|
(self._match_text_seq("UNBOUNDED") and "UNBOUNDED")
|
|
7264
7346
|
or (self._match_text_seq("CURRENT", "ROW") and "CURRENT ROW")
|
|
7265
|
-
or self.
|
|
7347
|
+
or self._parse_bitwise()
|
|
7266
7348
|
),
|
|
7267
7349
|
"side": self._match_texts(self.WINDOW_SIDES) and self._prev.text,
|
|
7268
7350
|
}
|
|
@@ -7405,7 +7487,7 @@ class Parser(metaclass=_Parser):
|
|
|
7405
7487
|
if self._match(TokenType.L_PAREN, advance=False):
|
|
7406
7488
|
return self._parse_wrapped_csv(self._parse_expression)
|
|
7407
7489
|
|
|
7408
|
-
expression = self._parse_alias(self.
|
|
7490
|
+
expression = self._parse_alias(self._parse_disjunction(), explicit=True)
|
|
7409
7491
|
return [expression] if expression else None
|
|
7410
7492
|
|
|
7411
7493
|
def _parse_csv(
|
|
@@ -7514,9 +7596,19 @@ class Parser(metaclass=_Parser):
|
|
|
7514
7596
|
|
|
7515
7597
|
return self.expression(exp.Commit, chain=chain)
|
|
7516
7598
|
|
|
7517
|
-
def _parse_refresh(self) -> exp.Refresh:
|
|
7518
|
-
self._match(TokenType.TABLE)
|
|
7519
|
-
|
|
7599
|
+
def _parse_refresh(self) -> exp.Refresh | exp.Command:
|
|
7600
|
+
if self._match(TokenType.TABLE):
|
|
7601
|
+
kind = "TABLE"
|
|
7602
|
+
elif self._match_text_seq("MATERIALIZED", "VIEW"):
|
|
7603
|
+
kind = "MATERIALIZED VIEW"
|
|
7604
|
+
else:
|
|
7605
|
+
kind = ""
|
|
7606
|
+
|
|
7607
|
+
this = self._parse_string() or self._parse_table()
|
|
7608
|
+
if not kind and not isinstance(this, exp.Literal):
|
|
7609
|
+
return self._parse_as_command(self._prev)
|
|
7610
|
+
|
|
7611
|
+
return self.expression(exp.Refresh, this=this, kind=kind)
|
|
7520
7612
|
|
|
7521
7613
|
def _parse_column_def_with_exists(self):
|
|
7522
7614
|
start = self._index
|
|
@@ -7613,7 +7705,7 @@ class Parser(metaclass=_Parser):
|
|
|
7613
7705
|
if self._match_pair(TokenType.DROP, TokenType.DEFAULT):
|
|
7614
7706
|
return self.expression(exp.AlterColumn, this=column, drop=True)
|
|
7615
7707
|
if self._match_pair(TokenType.SET, TokenType.DEFAULT):
|
|
7616
|
-
return self.expression(exp.AlterColumn, this=column, default=self.
|
|
7708
|
+
return self.expression(exp.AlterColumn, this=column, default=self._parse_disjunction())
|
|
7617
7709
|
if self._match(TokenType.COMMENT):
|
|
7618
7710
|
return self.expression(exp.AlterColumn, this=column, comment=self._parse_string())
|
|
7619
7711
|
if self._match_text_seq("DROP", "NOT", "NULL"):
|
|
@@ -7642,7 +7734,7 @@ class Parser(metaclass=_Parser):
|
|
|
7642
7734
|
this=column,
|
|
7643
7735
|
dtype=self._parse_types(),
|
|
7644
7736
|
collate=self._match(TokenType.COLLATE) and self._parse_term(),
|
|
7645
|
-
using=self._match(TokenType.USING) and self.
|
|
7737
|
+
using=self._match(TokenType.USING) and self._parse_disjunction(),
|
|
7646
7738
|
)
|
|
7647
7739
|
|
|
7648
7740
|
def _parse_alter_diststyle(self) -> exp.AlterDistStyle:
|
|
@@ -7973,7 +8065,7 @@ class Parser(metaclass=_Parser):
|
|
|
7973
8065
|
exp.Merge,
|
|
7974
8066
|
this=target,
|
|
7975
8067
|
using=using,
|
|
7976
|
-
on=self._match(TokenType.ON) and self.
|
|
8068
|
+
on=self._match(TokenType.ON) and self._parse_disjunction(),
|
|
7977
8069
|
using_cond=self._match(TokenType.USING) and self._parse_using_identifiers(),
|
|
7978
8070
|
whens=self._parse_when_matched(),
|
|
7979
8071
|
returning=self._parse_returning(),
|
|
@@ -7990,7 +8082,7 @@ class Parser(metaclass=_Parser):
|
|
|
7990
8082
|
if self._match_text_seq("BY", "TARGET")
|
|
7991
8083
|
else self._match_text_seq("BY", "SOURCE")
|
|
7992
8084
|
)
|
|
7993
|
-
condition = self.
|
|
8085
|
+
condition = self._parse_disjunction() if self._match(TokenType.AND) else None
|
|
7994
8086
|
|
|
7995
8087
|
self._match(TokenType.THEN)
|
|
7996
8088
|
|
|
@@ -8047,7 +8139,7 @@ class Parser(metaclass=_Parser):
|
|
|
8047
8139
|
return self._parse_set_transaction(global_=kind == "GLOBAL")
|
|
8048
8140
|
|
|
8049
8141
|
left = self._parse_primary() or self._parse_column()
|
|
8050
|
-
assignment_delimiter = self._match_texts(
|
|
8142
|
+
assignment_delimiter = self._match_texts(self.SET_ASSIGNMENT_DELIMITERS)
|
|
8051
8143
|
|
|
8052
8144
|
if not left or (self.SET_REQUIRES_ASSIGNMENT_DELIMITER and not assignment_delimiter):
|
|
8053
8145
|
self._retreat(index)
|
|
@@ -8069,7 +8161,7 @@ class Parser(metaclass=_Parser):
|
|
|
8069
8161
|
exp.SetItem,
|
|
8070
8162
|
expressions=characteristics,
|
|
8071
8163
|
kind="TRANSACTION",
|
|
8072
|
-
|
|
8164
|
+
global_=global_,
|
|
8073
8165
|
)
|
|
8074
8166
|
|
|
8075
8167
|
def _parse_set_item(self) -> t.Optional[exp.Expression]:
|
|
@@ -8173,7 +8265,7 @@ class Parser(metaclass=_Parser):
|
|
|
8173
8265
|
self._retreat(index - 1)
|
|
8174
8266
|
return None
|
|
8175
8267
|
iterator = self._parse_column()
|
|
8176
|
-
condition = self.
|
|
8268
|
+
condition = self._parse_disjunction() if self._match_text_seq("IF") else None
|
|
8177
8269
|
return self.expression(
|
|
8178
8270
|
exp.Comprehension,
|
|
8179
8271
|
this=this,
|
|
@@ -8453,6 +8545,13 @@ class Parser(metaclass=_Parser):
|
|
|
8453
8545
|
elif prev == "FILE_FORMAT":
|
|
8454
8546
|
# T-SQL's external file format case
|
|
8455
8547
|
param.set("expression", self._parse_field())
|
|
8548
|
+
elif (
|
|
8549
|
+
prev == "FORMAT"
|
|
8550
|
+
and self._prev.token_type == TokenType.ALIAS
|
|
8551
|
+
and self._match_texts(("AVRO", "JSON"))
|
|
8552
|
+
):
|
|
8553
|
+
param.set("this", exp.var(f"FORMAT AS {self._prev.text.upper()}"))
|
|
8554
|
+
param.set("expression", self._parse_field())
|
|
8456
8555
|
else:
|
|
8457
8556
|
param.set("expression", self._parse_unquoted_field() or self._parse_bracket())
|
|
8458
8557
|
|
|
@@ -8475,7 +8574,10 @@ class Parser(metaclass=_Parser):
|
|
|
8475
8574
|
if self._match_text_seq("ENCRYPTION"):
|
|
8476
8575
|
expr.set("encryption", self._parse_wrapped_options())
|
|
8477
8576
|
if self._match_text_seq("IAM_ROLE"):
|
|
8478
|
-
expr.set(
|
|
8577
|
+
expr.set(
|
|
8578
|
+
"iam_role",
|
|
8579
|
+
exp.var(self._prev.text) if self._match(TokenType.DEFAULT) else self._parse_field(),
|
|
8580
|
+
)
|
|
8479
8581
|
if self._match_text_seq("REGION"):
|
|
8480
8582
|
expr.set("region", self._parse_field())
|
|
8481
8583
|
|
|
@@ -8552,11 +8654,9 @@ class Parser(metaclass=_Parser):
|
|
|
8552
8654
|
|
|
8553
8655
|
return self.expression(
|
|
8554
8656
|
exp.Star,
|
|
8555
|
-
|
|
8556
|
-
|
|
8557
|
-
|
|
8558
|
-
"rename": self._parse_star_op("RENAME"),
|
|
8559
|
-
},
|
|
8657
|
+
except_=self._parse_star_op("EXCEPT", "EXCLUDE"),
|
|
8658
|
+
replace=self._parse_star_op("REPLACE"),
|
|
8659
|
+
rename=self._parse_star_op("RENAME"),
|
|
8560
8660
|
).update_positions(star_token)
|
|
8561
8661
|
|
|
8562
8662
|
def _parse_grant_privilege(self) -> t.Optional[exp.GrantPrivilege]:
|
|
@@ -8654,14 +8754,17 @@ class Parser(metaclass=_Parser):
|
|
|
8654
8754
|
)
|
|
8655
8755
|
|
|
8656
8756
|
def _parse_overlay(self) -> exp.Overlay:
|
|
8757
|
+
def _parse_overlay_arg(text: str) -> t.Optional[exp.Expression]:
|
|
8758
|
+
return (
|
|
8759
|
+
self._match(TokenType.COMMA) or self._match_text_seq(text)
|
|
8760
|
+
) and self._parse_bitwise()
|
|
8761
|
+
|
|
8657
8762
|
return self.expression(
|
|
8658
8763
|
exp.Overlay,
|
|
8659
|
-
|
|
8660
|
-
|
|
8661
|
-
|
|
8662
|
-
|
|
8663
|
-
"for": self._match_text_seq("FOR") and self._parse_bitwise(),
|
|
8664
|
-
},
|
|
8764
|
+
this=self._parse_bitwise(),
|
|
8765
|
+
expression=_parse_overlay_arg("PLACING"),
|
|
8766
|
+
from_=_parse_overlay_arg("FROM"),
|
|
8767
|
+
for_=_parse_overlay_arg("FOR"),
|
|
8665
8768
|
)
|
|
8666
8769
|
|
|
8667
8770
|
def _parse_format_name(self) -> exp.Property:
|
|
@@ -8689,10 +8792,7 @@ class Parser(metaclass=_Parser):
|
|
|
8689
8792
|
def _identifier_expression(
|
|
8690
8793
|
self, token: t.Optional[Token] = None, **kwargs: t.Any
|
|
8691
8794
|
) -> exp.Identifier:
|
|
8692
|
-
|
|
8693
|
-
expression = self.expression(exp.Identifier, this=token.text, **kwargs)
|
|
8694
|
-
expression.update_positions(token)
|
|
8695
|
-
return expression
|
|
8795
|
+
return self.expression(exp.Identifier, token=token or self._prev, **kwargs)
|
|
8696
8796
|
|
|
8697
8797
|
def _build_pipe_cte(
|
|
8698
8798
|
self,
|
|
@@ -8707,12 +8807,12 @@ class Parser(metaclass=_Parser):
|
|
|
8707
8807
|
self._pipe_cte_counter += 1
|
|
8708
8808
|
new_cte = f"__tmp{self._pipe_cte_counter}"
|
|
8709
8809
|
|
|
8710
|
-
with_ = query.args.get("
|
|
8810
|
+
with_ = query.args.get("with_")
|
|
8711
8811
|
ctes = with_.pop() if with_ else None
|
|
8712
8812
|
|
|
8713
8813
|
new_select = exp.select(*expressions, copy=False).from_(new_cte, copy=False)
|
|
8714
8814
|
if ctes:
|
|
8715
|
-
new_select.set("
|
|
8815
|
+
new_select.set("with_", ctes)
|
|
8716
8816
|
|
|
8717
8817
|
return new_select.with_(new_cte, as_=query, copy=False)
|
|
8718
8818
|
|
|
@@ -8740,7 +8840,7 @@ class Parser(metaclass=_Parser):
|
|
|
8740
8840
|
return query
|
|
8741
8841
|
|
|
8742
8842
|
def _parse_pipe_syntax_aggregate_fields(self) -> t.Optional[exp.Expression]:
|
|
8743
|
-
this = self.
|
|
8843
|
+
this = self._parse_disjunction()
|
|
8744
8844
|
if self._match_text_seq("GROUP", "AND", advance=False):
|
|
8745
8845
|
return this
|
|
8746
8846
|
|
|
@@ -8807,7 +8907,7 @@ class Parser(metaclass=_Parser):
|
|
|
8807
8907
|
]
|
|
8808
8908
|
|
|
8809
8909
|
query = self._build_pipe_cte(query=query, expressions=[exp.Star()])
|
|
8810
|
-
with_ = query.args.get("
|
|
8910
|
+
with_ = query.args.get("with_")
|
|
8811
8911
|
ctes = with_.pop() if with_ else None
|
|
8812
8912
|
|
|
8813
8913
|
if isinstance(first_setop, exp.Union):
|
|
@@ -8817,7 +8917,7 @@ class Parser(metaclass=_Parser):
|
|
|
8817
8917
|
else:
|
|
8818
8918
|
query = query.intersect(*setops, copy=False, **first_setop.args)
|
|
8819
8919
|
|
|
8820
|
-
query.set("
|
|
8920
|
+
query.set("with_", ctes)
|
|
8821
8921
|
|
|
8822
8922
|
return self._build_pipe_cte(query=query, expressions=[exp.Star()])
|
|
8823
8923
|
|
|
@@ -8836,7 +8936,7 @@ class Parser(metaclass=_Parser):
|
|
|
8836
8936
|
if not pivots:
|
|
8837
8937
|
return query
|
|
8838
8938
|
|
|
8839
|
-
from_ = query.args.get("
|
|
8939
|
+
from_ = query.args.get("from_")
|
|
8840
8940
|
if from_:
|
|
8841
8941
|
from_.this.set("pivots", pivots)
|
|
8842
8942
|
|
|
@@ -8850,7 +8950,7 @@ class Parser(metaclass=_Parser):
|
|
|
8850
8950
|
def _parse_pipe_syntax_tablesample(self, query: exp.Select) -> exp.Select:
|
|
8851
8951
|
sample = self._parse_table_sample()
|
|
8852
8952
|
|
|
8853
|
-
with_ = query.args.get("
|
|
8953
|
+
with_ = query.args.get("with_")
|
|
8854
8954
|
if with_:
|
|
8855
8955
|
with_.expressions[-1].this.set("sample", sample)
|
|
8856
8956
|
else:
|
|
@@ -8862,7 +8962,7 @@ class Parser(metaclass=_Parser):
|
|
|
8862
8962
|
if isinstance(query, exp.Subquery):
|
|
8863
8963
|
query = exp.select("*").from_(query, copy=False)
|
|
8864
8964
|
|
|
8865
|
-
if not query.args.get("
|
|
8965
|
+
if not query.args.get("from_"):
|
|
8866
8966
|
query = exp.select("*").from_(query.subquery(copy=False), copy=False)
|
|
8867
8967
|
|
|
8868
8968
|
while self._match(TokenType.PIPE_GT):
|
|
@@ -8935,13 +9035,20 @@ class Parser(metaclass=_Parser):
|
|
|
8935
9035
|
) -> exp.Expression:
|
|
8936
9036
|
if isinstance(node, exp.Distinct) and len(node.expressions) > 1:
|
|
8937
9037
|
concat_exprs = [
|
|
8938
|
-
self.expression(
|
|
9038
|
+
self.expression(
|
|
9039
|
+
exp.Concat,
|
|
9040
|
+
expressions=node.expressions,
|
|
9041
|
+
safe=True,
|
|
9042
|
+
coalesce=self.dialect.CONCAT_COALESCE,
|
|
9043
|
+
)
|
|
8939
9044
|
]
|
|
8940
9045
|
node.set("expressions", concat_exprs)
|
|
8941
9046
|
return node
|
|
8942
9047
|
if len(exprs) == 1:
|
|
8943
9048
|
return exprs[0]
|
|
8944
|
-
return self.expression(
|
|
9049
|
+
return self.expression(
|
|
9050
|
+
exp.Concat, expressions=args, safe=True, coalesce=self.dialect.CONCAT_COALESCE
|
|
9051
|
+
)
|
|
8945
9052
|
|
|
8946
9053
|
args = self._parse_csv(self._parse_lambda)
|
|
8947
9054
|
|
|
@@ -8961,3 +9068,12 @@ class Parser(metaclass=_Parser):
|
|
|
8961
9068
|
separator = self._parse_field() if self._match(TokenType.SEPARATOR) else None
|
|
8962
9069
|
|
|
8963
9070
|
return self.expression(exp.GroupConcat, this=this, separator=separator)
|
|
9071
|
+
|
|
9072
|
+
def _parse_initcap(self) -> exp.Initcap:
|
|
9073
|
+
expr = exp.Initcap.from_arg_list(self._parse_function_args())
|
|
9074
|
+
|
|
9075
|
+
# attach dialect's default delimiters
|
|
9076
|
+
if expr.args.get("expression") is None:
|
|
9077
|
+
expr.set("expression", exp.Literal.string(self.dialect.INITCAP_DEFAULT_DELIMITER_CHARS))
|
|
9078
|
+
|
|
9079
|
+
return expr
|