sqlglot 27.5.1__py3-none-any.whl → 27.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlglot/__init__.py +1 -0
- sqlglot/_version.py +16 -3
- sqlglot/dialects/bigquery.py +3 -1
- sqlglot/dialects/exasol.py +60 -2
- sqlglot/dialects/singlestore.py +22 -0
- sqlglot/dialects/snowflake.py +2 -0
- sqlglot/expressions.py +25 -0
- sqlglot/generator.py +14 -0
- sqlglot/optimizer/qualify_columns.py +40 -25
- sqlglot/parser.py +6 -1
- {sqlglot-27.5.1.dist-info → sqlglot-27.6.1.dist-info}/METADATA +1 -1
- {sqlglot-27.5.1.dist-info → sqlglot-27.6.1.dist-info}/RECORD +15 -15
- {sqlglot-27.5.1.dist-info → sqlglot-27.6.1.dist-info}/WHEEL +0 -0
- {sqlglot-27.5.1.dist-info → sqlglot-27.6.1.dist-info}/licenses/LICENSE +0 -0
- {sqlglot-27.5.1.dist-info → sqlglot-27.6.1.dist-info}/top_level.txt +0 -0
sqlglot/__init__.py
CHANGED
sqlglot/_version.py
CHANGED
|
@@ -1,7 +1,14 @@
|
|
|
1
1
|
# file generated by setuptools-scm
|
|
2
2
|
# don't change, don't track in version control
|
|
3
3
|
|
|
4
|
-
__all__ = [
|
|
4
|
+
__all__ = [
|
|
5
|
+
"__version__",
|
|
6
|
+
"__version_tuple__",
|
|
7
|
+
"version",
|
|
8
|
+
"version_tuple",
|
|
9
|
+
"__commit_id__",
|
|
10
|
+
"commit_id",
|
|
11
|
+
]
|
|
5
12
|
|
|
6
13
|
TYPE_CHECKING = False
|
|
7
14
|
if TYPE_CHECKING:
|
|
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
|
|
|
9
16
|
from typing import Union
|
|
10
17
|
|
|
11
18
|
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
19
|
+
COMMIT_ID = Union[str, None]
|
|
12
20
|
else:
|
|
13
21
|
VERSION_TUPLE = object
|
|
22
|
+
COMMIT_ID = object
|
|
14
23
|
|
|
15
24
|
version: str
|
|
16
25
|
__version__: str
|
|
17
26
|
__version_tuple__: VERSION_TUPLE
|
|
18
27
|
version_tuple: VERSION_TUPLE
|
|
28
|
+
commit_id: COMMIT_ID
|
|
29
|
+
__commit_id__: COMMIT_ID
|
|
19
30
|
|
|
20
|
-
__version__ = version = '27.
|
|
21
|
-
__version_tuple__ = version_tuple = (27,
|
|
31
|
+
__version__ = version = '27.6.1'
|
|
32
|
+
__version_tuple__ = version_tuple = (27, 6, 1)
|
|
33
|
+
|
|
34
|
+
__commit_id__ = commit_id = None
|
sqlglot/dialects/bigquery.py
CHANGED
|
@@ -434,7 +434,9 @@ class BigQuery(Dialect):
|
|
|
434
434
|
|
|
435
435
|
# The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement
|
|
436
436
|
# https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table
|
|
437
|
-
|
|
437
|
+
# https://cloud.google.com/bigquery/docs/querying-wildcard-tables#scanning_a_range_of_tables_using_table_suffix
|
|
438
|
+
# https://cloud.google.com/bigquery/docs/query-cloud-storage-data#query_the_file_name_pseudo-column
|
|
439
|
+
PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE", "_TABLE_SUFFIX", "_FILE_NAME"}
|
|
438
440
|
|
|
439
441
|
# All set operations require either a DISTINCT or ALL specifier
|
|
440
442
|
SET_OP_DISTINCT_BY_DEFAULT = dict.fromkeys((exp.Except, exp.Intersect, exp.Union), None)
|
sqlglot/dialects/exasol.py
CHANGED
|
@@ -3,7 +3,6 @@ from __future__ import annotations
|
|
|
3
3
|
import typing as t
|
|
4
4
|
|
|
5
5
|
from sqlglot import exp, generator, parser, tokens
|
|
6
|
-
from sqlglot.dialects.clickhouse import timestamptrunc_sql
|
|
7
6
|
from sqlglot.dialects.dialect import (
|
|
8
7
|
Dialect,
|
|
9
8
|
binary_from_function,
|
|
@@ -12,6 +11,8 @@ from sqlglot.dialects.dialect import (
|
|
|
12
11
|
strposition_sql,
|
|
13
12
|
timestrtotime_sql,
|
|
14
13
|
unit_to_str,
|
|
14
|
+
timestamptrunc_sql,
|
|
15
|
+
build_date_delta,
|
|
15
16
|
)
|
|
16
17
|
from sqlglot.generator import unsupported_args
|
|
17
18
|
from sqlglot.helper import seq_get
|
|
@@ -46,6 +47,18 @@ def _build_trunc(args: t.List[exp.Expression], dialect: DialectType) -> exp.Expr
|
|
|
46
47
|
return exp.Anonymous(this="TRUNC", expressions=args)
|
|
47
48
|
|
|
48
49
|
|
|
50
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/zeroifnull.htm
|
|
51
|
+
def _build_zeroifnull(args: t.List) -> exp.If:
|
|
52
|
+
cond = exp.Is(this=seq_get(args, 0), expression=exp.Null())
|
|
53
|
+
return exp.If(this=cond, true=exp.Literal.number(0), false=seq_get(args, 0))
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/nullifzero.htm
|
|
57
|
+
def _build_nullifzero(args: t.List) -> exp.If:
|
|
58
|
+
cond = exp.EQ(this=seq_get(args, 0), expression=exp.Literal.number(0))
|
|
59
|
+
return exp.If(this=cond, true=exp.Null(), false=seq_get(args, 0))
|
|
60
|
+
|
|
61
|
+
|
|
49
62
|
class Exasol(Dialect):
|
|
50
63
|
TIME_MAPPING = {
|
|
51
64
|
"yyyy": "%Y",
|
|
@@ -79,11 +92,28 @@ class Exasol(Dialect):
|
|
|
79
92
|
KEYWORDS = {
|
|
80
93
|
**tokens.Tokenizer.KEYWORDS,
|
|
81
94
|
"USER": TokenType.CURRENT_USER,
|
|
95
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/if.htm
|
|
96
|
+
"ENDIF": TokenType.END,
|
|
97
|
+
"LONG VARCHAR": TokenType.TEXT,
|
|
82
98
|
}
|
|
83
99
|
|
|
84
100
|
class Parser(parser.Parser):
|
|
85
101
|
FUNCTIONS = {
|
|
86
102
|
**parser.Parser.FUNCTIONS,
|
|
103
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_days.htm
|
|
104
|
+
"ADD_DAYS": build_date_delta(exp.DateAdd, default_unit="DAY"),
|
|
105
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_years.htm
|
|
106
|
+
"ADD_YEARS": build_date_delta(exp.DateAdd, default_unit="YEAR"),
|
|
107
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_months.htm
|
|
108
|
+
"ADD_MONTHS": build_date_delta(exp.DateAdd, default_unit="MONTH"),
|
|
109
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_weeks.htm
|
|
110
|
+
"ADD_WEEKS": build_date_delta(exp.DateAdd, default_unit="WEEK"),
|
|
111
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_hour.htm
|
|
112
|
+
"ADD_HOURS": build_date_delta(exp.DateAdd, default_unit="HOUR"),
|
|
113
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_minutes.htm
|
|
114
|
+
"ADD_MINUTES": build_date_delta(exp.DateAdd, default_unit="MINUTE"),
|
|
115
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_seconds.htm
|
|
116
|
+
"ADD_SECONDS": build_date_delta(exp.DateAdd, default_unit="SECOND"),
|
|
87
117
|
"BIT_AND": binary_from_function(exp.BitwiseAnd),
|
|
88
118
|
"BIT_OR": binary_from_function(exp.BitwiseOr),
|
|
89
119
|
"BIT_XOR": binary_from_function(exp.BitwiseXor),
|
|
@@ -127,6 +157,8 @@ class Exasol(Dialect):
|
|
|
127
157
|
timestamp=seq_get(args, 0),
|
|
128
158
|
options=seq_get(args, 3),
|
|
129
159
|
),
|
|
160
|
+
"NULLIFZERO": _build_nullifzero,
|
|
161
|
+
"ZEROIFNULL": _build_zeroifnull,
|
|
130
162
|
}
|
|
131
163
|
CONSTRAINT_PARSERS = {
|
|
132
164
|
**parser.Parser.CONSTRAINT_PARSERS,
|
|
@@ -146,7 +178,8 @@ class Exasol(Dialect):
|
|
|
146
178
|
exp.DataType.Type.MEDIUMTEXT: "VARCHAR",
|
|
147
179
|
exp.DataType.Type.TINYBLOB: "VARCHAR",
|
|
148
180
|
exp.DataType.Type.TINYTEXT: "VARCHAR",
|
|
149
|
-
|
|
181
|
+
# https://docs.exasol.com/db/latest/sql_references/data_types/datatypealiases.htm
|
|
182
|
+
exp.DataType.Type.TEXT: "LONG VARCHAR",
|
|
150
183
|
exp.DataType.Type.VARBINARY: "VARCHAR",
|
|
151
184
|
}
|
|
152
185
|
|
|
@@ -163,6 +196,16 @@ class Exasol(Dialect):
|
|
|
163
196
|
exp.DataType.Type.DATETIME: "TIMESTAMP",
|
|
164
197
|
}
|
|
165
198
|
|
|
199
|
+
DATE_ADD_FUNCTION_BY_UNIT = {
|
|
200
|
+
"DAY": "ADD_DAYS",
|
|
201
|
+
"WEEK": "ADD_WEEKS",
|
|
202
|
+
"MONTH": "ADD_MONTHS",
|
|
203
|
+
"YEAR": "ADD_YEARS",
|
|
204
|
+
"HOUR": "ADD_HOURS",
|
|
205
|
+
"MINUTE": "ADD_MINUTES",
|
|
206
|
+
"SECOND": "ADD_SECONDS",
|
|
207
|
+
}
|
|
208
|
+
|
|
166
209
|
def datatype_sql(self, expression: exp.DataType) -> str:
|
|
167
210
|
# Exasol supports a fixed default precision of 3 for TIMESTAMP WITH LOCAL TIME ZONE
|
|
168
211
|
# and does not allow specifying a different custom precision
|
|
@@ -249,3 +292,18 @@ class Exasol(Dialect):
|
|
|
249
292
|
options = expression.args.get("options")
|
|
250
293
|
|
|
251
294
|
return self.func("CONVERT_TZ", datetime, from_tz, to_tz, options)
|
|
295
|
+
|
|
296
|
+
def if_sql(self, expression: exp.If) -> str:
|
|
297
|
+
this = self.sql(expression, "this")
|
|
298
|
+
true = self.sql(expression, "true")
|
|
299
|
+
false = self.sql(expression, "false")
|
|
300
|
+
return f"IF {this} THEN {true} ELSE {false} ENDIF"
|
|
301
|
+
|
|
302
|
+
def dateadd_sql(self, expression: exp.DateAdd) -> str:
|
|
303
|
+
unit = expression.text("unit").upper() or "DAY"
|
|
304
|
+
func_name = self.DATE_ADD_FUNCTION_BY_UNIT.get(unit)
|
|
305
|
+
if not func_name:
|
|
306
|
+
self.unsupported(f"'{unit}' is not supported in Exasol.")
|
|
307
|
+
return self.function_fallback_sql(expression)
|
|
308
|
+
|
|
309
|
+
return self.func(func_name, expression.this, expression.expression)
|
sqlglot/dialects/singlestore.py
CHANGED
|
@@ -4,6 +4,7 @@ import typing as t
|
|
|
4
4
|
from sqlglot import exp
|
|
5
5
|
from sqlglot.dialects.dialect import build_formatted_time
|
|
6
6
|
from sqlglot.dialects.mysql import MySQL
|
|
7
|
+
from sqlglot.generator import unsupported_args
|
|
7
8
|
from sqlglot.helper import seq_get
|
|
8
9
|
|
|
9
10
|
|
|
@@ -65,6 +66,21 @@ class SingleStore(MySQL):
|
|
|
65
66
|
),
|
|
66
67
|
}
|
|
67
68
|
|
|
69
|
+
CAST_COLUMN_OPERATORS = {TokenType.COLON_GT, TokenType.NCOLON_GT}
|
|
70
|
+
|
|
71
|
+
COLUMN_OPERATORS = {
|
|
72
|
+
TokenType.COLON_GT: lambda self, this, to: self.expression(
|
|
73
|
+
exp.Cast,
|
|
74
|
+
this=this,
|
|
75
|
+
to=to,
|
|
76
|
+
),
|
|
77
|
+
TokenType.NCOLON_GT: lambda self, this, to: self.expression(
|
|
78
|
+
exp.TryCast,
|
|
79
|
+
this=this,
|
|
80
|
+
to=to,
|
|
81
|
+
),
|
|
82
|
+
}
|
|
83
|
+
|
|
68
84
|
class Generator(MySQL.Generator):
|
|
69
85
|
TRANSFORMS = {
|
|
70
86
|
**MySQL.Generator.TRANSFORMS,
|
|
@@ -89,6 +105,12 @@ class SingleStore(MySQL):
|
|
|
89
105
|
inverse_time_trie=MySQL.INVERSE_TIME_TRIE,
|
|
90
106
|
),
|
|
91
107
|
),
|
|
108
|
+
exp.Cast: unsupported_args("format", "action", "default")(
|
|
109
|
+
lambda self, e: f"{self.sql(e, 'this')} :> {self.sql(e, 'to')}"
|
|
110
|
+
),
|
|
111
|
+
exp.TryCast: unsupported_args("format", "action", "default")(
|
|
112
|
+
lambda self, e: f"{self.sql(e, 'this')} !:> {self.sql(e, 'to')}"
|
|
113
|
+
),
|
|
92
114
|
}
|
|
93
115
|
|
|
94
116
|
# https://docs.singlestore.com/cloud/reference/sql-reference/restricted-keywords/list-of-restricted-keywords/
|
sqlglot/dialects/snowflake.py
CHANGED
|
@@ -591,6 +591,7 @@ class Snowflake(Dialect):
|
|
|
591
591
|
this=seq_get(args, 0), expression=seq_get(args, 1), max_dist=seq_get(args, 2)
|
|
592
592
|
),
|
|
593
593
|
"FLATTEN": exp.Explode.from_arg_list,
|
|
594
|
+
"GET": exp.GetExtract.from_arg_list,
|
|
594
595
|
"GET_PATH": lambda args, dialect: exp.JSONExtract(
|
|
595
596
|
this=seq_get(args, 0),
|
|
596
597
|
expression=dialect.to_json_path(seq_get(args, 1)),
|
|
@@ -1220,6 +1221,7 @@ class Snowflake(Dialect):
|
|
|
1220
1221
|
exp.GenerateSeries: lambda self, e: self.func(
|
|
1221
1222
|
"ARRAY_GENERATE_RANGE", e.args["start"], e.args["end"] + 1, e.args.get("step")
|
|
1222
1223
|
),
|
|
1224
|
+
exp.GetExtract: rename_func("GET"),
|
|
1223
1225
|
exp.GroupConcat: lambda self, e: groupconcat_sql(self, e, sep=""),
|
|
1224
1226
|
exp.If: if_sql(name="IFF", false_value="NULL"),
|
|
1225
1227
|
exp.JSONExtractArray: _json_extract_value_array_sql,
|
sqlglot/expressions.py
CHANGED
|
@@ -6185,6 +6185,11 @@ class GenerateTimestampArray(Func):
|
|
|
6185
6185
|
arg_types = {"start": True, "end": True, "step": True}
|
|
6186
6186
|
|
|
6187
6187
|
|
|
6188
|
+
# https://docs.snowflake.com/en/sql-reference/functions/get
|
|
6189
|
+
class GetExtract(Func):
|
|
6190
|
+
arg_types = {"this": True, "expression": True}
|
|
6191
|
+
|
|
6192
|
+
|
|
6188
6193
|
class Greatest(Func):
|
|
6189
6194
|
arg_types = {"this": True, "expressions": False}
|
|
6190
6195
|
is_var_len_args = True
|
|
@@ -8619,6 +8624,26 @@ def replace_tree(
|
|
|
8619
8624
|
return new_node
|
|
8620
8625
|
|
|
8621
8626
|
|
|
8627
|
+
def find_tables(expression: Expression) -> t.Set[Table]:
|
|
8628
|
+
"""
|
|
8629
|
+
Find all tables referenced in a query.
|
|
8630
|
+
|
|
8631
|
+
Args:
|
|
8632
|
+
expressions: The query to find the tables in.
|
|
8633
|
+
|
|
8634
|
+
Returns:
|
|
8635
|
+
A set of all the tables.
|
|
8636
|
+
"""
|
|
8637
|
+
from sqlglot.optimizer.scope import traverse_scope
|
|
8638
|
+
|
|
8639
|
+
return {
|
|
8640
|
+
table
|
|
8641
|
+
for scope in traverse_scope(expression)
|
|
8642
|
+
for table in scope.tables
|
|
8643
|
+
if table.name and table.name not in scope.cte_sources
|
|
8644
|
+
}
|
|
8645
|
+
|
|
8646
|
+
|
|
8622
8647
|
def column_table_names(expression: Expression, exclude: str = "") -> t.Set[str]:
|
|
8623
8648
|
"""
|
|
8624
8649
|
Return all table names referenced through columns in an expression.
|
sqlglot/generator.py
CHANGED
|
@@ -5113,3 +5113,17 @@ class Generator(metaclass=_Generator):
|
|
|
5113
5113
|
where = self.sql(expression, "where")
|
|
5114
5114
|
where = self.seg(f"WHERE {where}") if where else ""
|
|
5115
5115
|
return f"SEMANTIC_VIEW({self.indent(this + metrics + dimensions + where)}{self.seg(')', sep='')}"
|
|
5116
|
+
|
|
5117
|
+
def getextract_sql(self, expression: exp.GetExtract) -> str:
|
|
5118
|
+
this = expression.this
|
|
5119
|
+
expr = expression.expression
|
|
5120
|
+
|
|
5121
|
+
if not this.type or not expression.type:
|
|
5122
|
+
from sqlglot.optimizer.annotate_types import annotate_types
|
|
5123
|
+
|
|
5124
|
+
this = annotate_types(this, dialect=self.dialect)
|
|
5125
|
+
|
|
5126
|
+
if this.is_type(*(exp.DataType.Type.ARRAY, exp.DataType.Type.MAP)):
|
|
5127
|
+
return self.sql(exp.Bracket(this=this, expressions=[expr]))
|
|
5128
|
+
|
|
5129
|
+
return self.sql(exp.JSONExtract(this=this, expression=self.dialect.to_json_path(expr)))
|
|
@@ -972,6 +972,44 @@ class Resolver:
|
|
|
972
972
|
}
|
|
973
973
|
return self._all_columns
|
|
974
974
|
|
|
975
|
+
def get_source_columns_from_set_op(self, expression: exp.Expression) -> t.List[str]:
|
|
976
|
+
if isinstance(expression, exp.Select):
|
|
977
|
+
return expression.named_selects
|
|
978
|
+
if isinstance(expression, exp.Subquery) and isinstance(expression.this, exp.SetOperation):
|
|
979
|
+
# Different types of SET modifiers can be chained together if they're explicitly grouped by nesting
|
|
980
|
+
return self.get_source_columns_from_set_op(expression.this)
|
|
981
|
+
if not isinstance(expression, exp.SetOperation):
|
|
982
|
+
raise OptimizeError(f"Unknown set operation: {expression}")
|
|
983
|
+
|
|
984
|
+
set_op = expression
|
|
985
|
+
|
|
986
|
+
# BigQuery specific set operations modifiers, e.g INNER UNION ALL BY NAME
|
|
987
|
+
on_column_list = set_op.args.get("on")
|
|
988
|
+
|
|
989
|
+
if on_column_list:
|
|
990
|
+
# The resulting columns are the columns in the ON clause:
|
|
991
|
+
# {INNER | LEFT | FULL} UNION ALL BY NAME ON (col1, col2, ...)
|
|
992
|
+
columns = [col.name for col in on_column_list]
|
|
993
|
+
elif set_op.side or set_op.kind:
|
|
994
|
+
side = set_op.side
|
|
995
|
+
kind = set_op.kind
|
|
996
|
+
|
|
997
|
+
# Visit the children UNIONs (if any) in a post-order traversal
|
|
998
|
+
left = self.get_source_columns_from_set_op(set_op.left)
|
|
999
|
+
right = self.get_source_columns_from_set_op(set_op.right)
|
|
1000
|
+
|
|
1001
|
+
# We use dict.fromkeys to deduplicate keys and maintain insertion order
|
|
1002
|
+
if side == "LEFT":
|
|
1003
|
+
columns = left
|
|
1004
|
+
elif side == "FULL":
|
|
1005
|
+
columns = list(dict.fromkeys(left + right))
|
|
1006
|
+
elif kind == "INNER":
|
|
1007
|
+
columns = list(dict.fromkeys(left).keys() & dict.fromkeys(right).keys())
|
|
1008
|
+
else:
|
|
1009
|
+
columns = set_op.named_selects
|
|
1010
|
+
|
|
1011
|
+
return columns
|
|
1012
|
+
|
|
975
1013
|
def get_source_columns(self, name: str, only_visible: bool = False) -> t.Sequence[str]:
|
|
976
1014
|
"""Resolve the source columns for a given source `name`."""
|
|
977
1015
|
cache_key = (name, only_visible)
|
|
@@ -996,31 +1034,8 @@ class Resolver:
|
|
|
996
1034
|
for k in source.expression.type.expressions: # type: ignore
|
|
997
1035
|
columns.append(k.name)
|
|
998
1036
|
elif isinstance(source, Scope) and isinstance(source.expression, exp.SetOperation):
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
# BigQuery specific set operations modifiers, e.g INNER UNION ALL BY NAME
|
|
1002
|
-
on_column_list = set_op.args.get("on")
|
|
1003
|
-
|
|
1004
|
-
if on_column_list:
|
|
1005
|
-
# The resulting columns are the columns in the ON clause:
|
|
1006
|
-
# {INNER | LEFT | FULL} UNION ALL BY NAME ON (col1, col2, ...)
|
|
1007
|
-
columns = [col.name for col in on_column_list]
|
|
1008
|
-
elif set_op.side or set_op.kind:
|
|
1009
|
-
side = set_op.side
|
|
1010
|
-
kind = set_op.kind
|
|
1011
|
-
|
|
1012
|
-
left = set_op.left.named_selects
|
|
1013
|
-
right = set_op.right.named_selects
|
|
1014
|
-
|
|
1015
|
-
# We use dict.fromkeys to deduplicate keys and maintain insertion order
|
|
1016
|
-
if side == "LEFT":
|
|
1017
|
-
columns = left
|
|
1018
|
-
elif side == "FULL":
|
|
1019
|
-
columns = list(dict.fromkeys(left + right))
|
|
1020
|
-
elif kind == "INNER":
|
|
1021
|
-
columns = list(dict.fromkeys(left).keys() & dict.fromkeys(right).keys())
|
|
1022
|
-
else:
|
|
1023
|
-
columns = set_op.named_selects
|
|
1037
|
+
columns = self.get_source_columns_from_set_op(source.expression)
|
|
1038
|
+
|
|
1024
1039
|
else:
|
|
1025
1040
|
select = seq_get(source.expression.selects, 0)
|
|
1026
1041
|
|
sqlglot/parser.py
CHANGED
|
@@ -792,6 +792,11 @@ class Parser(metaclass=_Parser):
|
|
|
792
792
|
),
|
|
793
793
|
}
|
|
794
794
|
|
|
795
|
+
CAST_COLUMN_OPERATORS = {
|
|
796
|
+
TokenType.DOTCOLON,
|
|
797
|
+
TokenType.DCOLON,
|
|
798
|
+
}
|
|
799
|
+
|
|
795
800
|
EXPRESSION_PARSERS = {
|
|
796
801
|
exp.Cluster: lambda self: self._parse_sort(exp.Cluster, TokenType.CLUSTER_BY),
|
|
797
802
|
exp.Column: lambda self: self._parse_column(),
|
|
@@ -5621,7 +5626,7 @@ class Parser(metaclass=_Parser):
|
|
|
5621
5626
|
op_token = self._prev.token_type
|
|
5622
5627
|
op = self.COLUMN_OPERATORS.get(op_token)
|
|
5623
5628
|
|
|
5624
|
-
if op_token in
|
|
5629
|
+
if op_token in self.CAST_COLUMN_OPERATORS:
|
|
5625
5630
|
field = self._parse_dcolon()
|
|
5626
5631
|
if not field:
|
|
5627
5632
|
self.raise_error("Expected type")
|
|
@@ -1,15 +1,15 @@
|
|
|
1
|
-
sqlglot/__init__.py,sha256=
|
|
1
|
+
sqlglot/__init__.py,sha256=XtHbbz93ughtCoKNYTbB8UftQvN5Bzt8fJZcD09ZcM8,5423
|
|
2
2
|
sqlglot/__main__.py,sha256=022c173KqxsiABWTEpUIq_tJUxuNiW7a7ABsxBXqvu8,2069
|
|
3
3
|
sqlglot/_typing.py,sha256=-1HPyr3w5COlSJWqlgt8jhFk2dyMvBuvVBqIX1wyVCM,642
|
|
4
|
-
sqlglot/_version.py,sha256=
|
|
4
|
+
sqlglot/_version.py,sha256=HWhnLcS8AiG1fzYyZ19-OOM59GAdOd4gi1uzXLPWzoc,706
|
|
5
5
|
sqlglot/diff.py,sha256=PtOllQMQa1Sw1-V2Y8eypmDqGujXYPaTOp_WLsWkAWk,17314
|
|
6
6
|
sqlglot/errors.py,sha256=QNKMr-pzLUDR-tuMmn_GK6iMHUIVdb_YSJ_BhGEvuso,2126
|
|
7
|
-
sqlglot/expressions.py,sha256=
|
|
8
|
-
sqlglot/generator.py,sha256=
|
|
7
|
+
sqlglot/expressions.py,sha256=eGh8CEbjw9Oyo2IsdGgsvcZ1SwtkfxoIe2Z44y7o0Xk,246738
|
|
8
|
+
sqlglot/generator.py,sha256=Xfbpm-z9NdxZPWdaTeMRJ2ton0H_y5wfC6j3dwgouQQ,218351
|
|
9
9
|
sqlglot/helper.py,sha256=9nZjFVRBtMKFC3EdzpDQ6jkazFO19po6BF8xHiNGZIo,15111
|
|
10
10
|
sqlglot/jsonpath.py,sha256=jneO-A57n4ojVT2drCn2HBlx_Ka8wLcGpemW1JgvbjA,7666
|
|
11
11
|
sqlglot/lineage.py,sha256=Qj5ykuDNcATppb9vOjoIKBqRVLbu3OMPiZk9f3iyv40,15312
|
|
12
|
-
sqlglot/parser.py,sha256=
|
|
12
|
+
sqlglot/parser.py,sha256=FWLmqe0VfmZY623NGrEcNjM1sfNcWbFJt8beZOzUna8,327856
|
|
13
13
|
sqlglot/planner.py,sha256=ql7Li-bWJRcyXzNaZy_n6bQ6B2ZfunEIB8Ztv2xaxq4,14634
|
|
14
14
|
sqlglot/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
15
|
sqlglot/schema.py,sha256=13H2qKQs27EKdTpDLOvcNnSTDAUbYNKjWtJs4aQCSOA,20509
|
|
@@ -20,7 +20,7 @@ sqlglot/transforms.py,sha256=utNDsCBsA7hPUK3-aby3DDgiY_XVMAKQqeoLm1EyihI,41218
|
|
|
20
20
|
sqlglot/trie.py,sha256=v27uXMrHfqrXlJ6GmeTSMovsB_3o0ctnlKhdNt7W6fI,2245
|
|
21
21
|
sqlglot/dialects/__init__.py,sha256=BQUv9EuMmvhP_wVitGLo0PlCi15atvfXgvREpsTsxeQ,3799
|
|
22
22
|
sqlglot/dialects/athena.py,sha256=ofArmayYLev4qZQ15GM8mevG04qqR5WGFb2ZcuYm6x4,10966
|
|
23
|
-
sqlglot/dialects/bigquery.py,sha256=
|
|
23
|
+
sqlglot/dialects/bigquery.py,sha256=VeWS0UYOHKT9IaWXxKOU9A5cBIEBGrpNiUJ2VutU4Mo,56697
|
|
24
24
|
sqlglot/dialects/clickhouse.py,sha256=UY1hFC83RMO2bum1UFfGBey_wmKPBKlsWD5nxbrqeyg,57000
|
|
25
25
|
sqlglot/dialects/databricks.py,sha256=mJN2lFpqgH95x3mtry3qWbuRf4q7NV5jbRAOspqclzY,4548
|
|
26
26
|
sqlglot/dialects/dialect.py,sha256=O8fYmv1iFEfmXa1mUwEZ7GgPsfG51_VuHSv8E_zOw0k,71039
|
|
@@ -30,7 +30,7 @@ sqlglot/dialects/drill.py,sha256=FOh7_KjPx_77pv0DiHKZog0CcmzqeF9_PEmGnJ1ESSM,582
|
|
|
30
30
|
sqlglot/dialects/druid.py,sha256=kh3snZtneehNOWqs3XcPjsrhNaRbkCQ8E4hHbWJ1fHM,690
|
|
31
31
|
sqlglot/dialects/duckdb.py,sha256=X4nY2ZjUSGZZ3pCqUraGxIYXVA-gzB134qV0TeyMmqQ,51418
|
|
32
32
|
sqlglot/dialects/dune.py,sha256=gALut-fFfN2qMsr8LvZ1NQK3F3W9z2f4PwMvTMXVVVg,375
|
|
33
|
-
sqlglot/dialects/exasol.py,sha256=
|
|
33
|
+
sqlglot/dialects/exasol.py,sha256=94t8ijRnfghe4EEDehaA7Nk73aMUK6Q38NcKmITLKd4,15250
|
|
34
34
|
sqlglot/dialects/fabric.py,sha256=4Sng2ZhQSaf6eK3ituR9DqDZERaVwYS_UfdpusjsISg,10220
|
|
35
35
|
sqlglot/dialects/hive.py,sha256=bAZz0qnaOH9f5FyIMkqBu3XB2Cj7y-xnCPbxPsk8U9I,31959
|
|
36
36
|
sqlglot/dialects/materialize.py,sha256=LD2q1kTRrCwkIu1BfoBvnjTGbupDtoQ8JQMDCIYAXHg,3533
|
|
@@ -41,8 +41,8 @@ sqlglot/dialects/presto.py,sha256=Tm3Bx9AJilT1xlgunTpF0wUhIZBOPS-rB5Iwitnygxc,33
|
|
|
41
41
|
sqlglot/dialects/prql.py,sha256=fwN-SPEGx-drwf1K0U2MByN-PkW3C_rOgQ3xeJeychg,7908
|
|
42
42
|
sqlglot/dialects/redshift.py,sha256=MXI9W7CgKCtMNjNRPcZPxO8NBA9_PxZx14HB52o-aUc,15822
|
|
43
43
|
sqlglot/dialects/risingwave.py,sha256=BqWwW1iT_OIVMwfRamaww79snnBwIgCfr22Go-ggO68,3289
|
|
44
|
-
sqlglot/dialects/singlestore.py,sha256=
|
|
45
|
-
sqlglot/dialects/snowflake.py,sha256=
|
|
44
|
+
sqlglot/dialects/singlestore.py,sha256=w-fIrSyt_2r5SezGl7gUMyvvNmG4v2mBLTHLMhmlgD0,30065
|
|
45
|
+
sqlglot/dialects/snowflake.py,sha256=cwSeyUdqwP4FmkNzqqc1DGmYaIqTAxecKJudJYII654,70584
|
|
46
46
|
sqlglot/dialects/spark.py,sha256=hTumyd46Cc3HEl9KvlTla2eq_NKBI3w5Jis3FeMt_R8,8886
|
|
47
47
|
sqlglot/dialects/spark2.py,sha256=aCwPqLduLRSUSPtbI1VtBjydK6haKgEy3iahmueGRo4,14742
|
|
48
48
|
sqlglot/dialects/sqlite.py,sha256=XIDmiNTswWcrDwlFm8gOODCrJ_rPmXQKkm9U_-YAlVs,13183
|
|
@@ -71,13 +71,13 @@ sqlglot/optimizer/optimizer.py,sha256=vXEXDWHvbO-vJmSI7UqJuydM2WrD1xko7rETq2EtVJ
|
|
|
71
71
|
sqlglot/optimizer/pushdown_predicates.py,sha256=HGjs3Z4V3-X2d1VTfWhyByY3aL5SmKnVvt3aDXiiBM0,8414
|
|
72
72
|
sqlglot/optimizer/pushdown_projections.py,sha256=7NoK5NAUVYVhs0YnYyo6WuXfaO-BShSwS6lA8Y-ATQ4,6668
|
|
73
73
|
sqlglot/optimizer/qualify.py,sha256=oAPfwub7dEkrlCrsptcJWpLya4BgKhN6M5SwIs_86LY,4002
|
|
74
|
-
sqlglot/optimizer/qualify_columns.py,sha256=
|
|
74
|
+
sqlglot/optimizer/qualify_columns.py,sha256=hOfhyczK9zBbUuysKdOK36PJ44nCzzw9BivJh8U5RBI,43921
|
|
75
75
|
sqlglot/optimizer/qualify_tables.py,sha256=rRo0rXMMDAloG_ut7nGPtIO3e__ooM2PqShxWECKQbo,6965
|
|
76
76
|
sqlglot/optimizer/scope.py,sha256=T6iVYnYwubt-WB1BOFsFYdJ-D7WtWZGL37SuCRQK23s,31154
|
|
77
77
|
sqlglot/optimizer/simplify.py,sha256=-_yus42OYwqjQ9a2TSGhtG2G0pSkInUry1z7hEMz2pY,51062
|
|
78
78
|
sqlglot/optimizer/unnest_subqueries.py,sha256=kzWUVDlxs8z9nmRx-8U-pHXPtVZhEIwkKqmKhr2QLvc,10908
|
|
79
|
-
sqlglot-27.
|
|
80
|
-
sqlglot-27.
|
|
81
|
-
sqlglot-27.
|
|
82
|
-
sqlglot-27.
|
|
83
|
-
sqlglot-27.
|
|
79
|
+
sqlglot-27.6.1.dist-info/licenses/LICENSE,sha256=p1Yk0B4oa0l8Rh-_dYyy75d8spjPd_vTloXfz4FWxys,1065
|
|
80
|
+
sqlglot-27.6.1.dist-info/METADATA,sha256=284xsaLjHfNx6p_D-H-ocV0oUrsyKun0CS8vEMVktk0,19437
|
|
81
|
+
sqlglot-27.6.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
82
|
+
sqlglot-27.6.1.dist-info/top_level.txt,sha256=5kRskCGA_gVADF9rSfSzPdLHXqvfMusDYeHePfNY2nQ,8
|
|
83
|
+
sqlglot-27.6.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|