sqlglot 27.5.1__py3-none-any.whl → 27.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,8 +2,9 @@ from sqlglot import TokenType
2
2
  import typing as t
3
3
 
4
4
  from sqlglot import exp
5
- from sqlglot.dialects.dialect import build_formatted_time
5
+ from sqlglot.dialects.dialect import build_formatted_time, rename_func
6
6
  from sqlglot.dialects.mysql import MySQL
7
+ from sqlglot.generator import unsupported_args
7
8
  from sqlglot.helper import seq_get
8
9
 
9
10
 
@@ -63,6 +64,23 @@ class SingleStore(MySQL):
63
64
  ),
64
65
  format=MySQL.format_time(seq_get(args, 1)),
65
66
  ),
67
+ "UNIX_TIMESTAMP": exp.StrToUnix.from_arg_list,
68
+ "FROM_UNIXTIME": build_formatted_time(exp.UnixToTime, "mysql"),
69
+ }
70
+
71
+ CAST_COLUMN_OPERATORS = {TokenType.COLON_GT, TokenType.NCOLON_GT}
72
+
73
+ COLUMN_OPERATORS = {
74
+ TokenType.COLON_GT: lambda self, this, to: self.expression(
75
+ exp.Cast,
76
+ this=this,
77
+ to=to,
78
+ ),
79
+ TokenType.NCOLON_GT: lambda self, this, to: self.expression(
80
+ exp.TryCast,
81
+ this=this,
82
+ to=to,
83
+ ),
66
84
  }
67
85
 
68
86
  class Generator(MySQL.Generator):
@@ -89,6 +107,37 @@ class SingleStore(MySQL):
89
107
  inverse_time_trie=MySQL.INVERSE_TIME_TRIE,
90
108
  ),
91
109
  ),
110
+ exp.Cast: unsupported_args("format", "action", "default")(
111
+ lambda self, e: f"{self.sql(e, 'this')} :> {self.sql(e, 'to')}"
112
+ ),
113
+ exp.TryCast: unsupported_args("format", "action", "default")(
114
+ lambda self, e: f"{self.sql(e, 'this')} !:> {self.sql(e, 'to')}"
115
+ ),
116
+ exp.StrToUnix: unsupported_args("format")(rename_func("UNIX_TIMESTAMP")),
117
+ exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"),
118
+ exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"),
119
+ exp.UnixSeconds: rename_func("UNIX_TIMESTAMP"),
120
+ exp.UnixToStr: lambda self, e: self.func(
121
+ "FROM_UNIXTIME",
122
+ e.this,
123
+ self.format_time(
124
+ e,
125
+ inverse_time_mapping=MySQL.INVERSE_TIME_MAPPING,
126
+ inverse_time_trie=MySQL.INVERSE_TIME_TRIE,
127
+ ),
128
+ ),
129
+ exp.UnixToTime: unsupported_args("scale", "zone", "hours", "minutes")(
130
+ lambda self, e: self.func(
131
+ "FROM_UNIXTIME",
132
+ e.this,
133
+ self.format_time(
134
+ e,
135
+ inverse_time_mapping=MySQL.INVERSE_TIME_MAPPING,
136
+ inverse_time_trie=MySQL.INVERSE_TIME_TRIE,
137
+ ),
138
+ ),
139
+ ),
140
+ exp.UnixToTimeStr: lambda self, e: f"FROM_UNIXTIME({self.sql(e, 'this')}) :> TEXT",
92
141
  }
93
142
 
94
143
  # https://docs.singlestore.com/cloud/reference/sql-reference/restricted-keywords/list-of-restricted-keywords/
@@ -461,7 +461,7 @@ def _eliminate_dot_variant_lookup(expression: exp.Expression) -> exp.Expression:
461
461
  unnest_alias = unnest.args.get("alias")
462
462
  if (
463
463
  isinstance(unnest_alias, exp.TableAlias)
464
- and (unnest_alias.args.get("column_only") or not unnest_alias.this)
464
+ and not unnest_alias.this
465
465
  and len(unnest_alias.columns) == 1
466
466
  ):
467
467
  unnest_aliases.add(unnest_alias.columns[0].name)
@@ -591,6 +591,7 @@ class Snowflake(Dialect):
591
591
  this=seq_get(args, 0), expression=seq_get(args, 1), max_dist=seq_get(args, 2)
592
592
  ),
593
593
  "FLATTEN": exp.Explode.from_arg_list,
594
+ "GET": exp.GetExtract.from_arg_list,
594
595
  "GET_PATH": lambda args, dialect: exp.JSONExtract(
595
596
  this=seq_get(args, 0),
596
597
  expression=dialect.to_json_path(seq_get(args, 1)),
@@ -1220,6 +1221,7 @@ class Snowflake(Dialect):
1220
1221
  exp.GenerateSeries: lambda self, e: self.func(
1221
1222
  "ARRAY_GENERATE_RANGE", e.args["start"], e.args["end"] + 1, e.args.get("step")
1222
1223
  ),
1224
+ exp.GetExtract: rename_func("GET"),
1223
1225
  exp.GroupConcat: lambda self, e: groupconcat_sql(self, e, sep=""),
1224
1226
  exp.If: if_sql(name="IFF", false_value="NULL"),
1225
1227
  exp.JSONExtractArray: _json_extract_value_array_sql,
sqlglot/dialects/spark.py CHANGED
@@ -194,6 +194,7 @@ class Spark(Spark2):
194
194
  move_partitioned_by_to_schema_columns,
195
195
  ]
196
196
  ),
197
+ exp.DateFromUnixDate: rename_func("DATE_FROM_UNIX_DATE"),
197
198
  exp.GroupConcat: _groupconcat_sql,
198
199
  exp.EndsWith: rename_func("ENDSWITH"),
199
200
  exp.PartitionedByProperty: lambda self,
@@ -90,6 +90,7 @@ class Teradata(Dialect):
90
90
  "HELP": TokenType.COMMAND,
91
91
  "INS": TokenType.INSERT,
92
92
  "LE": TokenType.LTE,
93
+ "LOCKING": TokenType.LOCK,
93
94
  "LT": TokenType.LT,
94
95
  "MINUS": TokenType.EXCEPT,
95
96
  "MOD": TokenType.MOD,
@@ -155,6 +156,26 @@ class Teradata(Dialect):
155
156
  exp.Use, this=self._parse_table(schema=False)
156
157
  ),
157
158
  TokenType.REPLACE: lambda self: self._parse_create(),
159
+ TokenType.LOCK: lambda self: self._parse_locking_statement(),
160
+ }
161
+
162
+ def _parse_locking_statement(self) -> exp.LockingStatement:
163
+ # Reuse exp.LockingProperty parsing for the lock kind, type etc
164
+ locking_property = self._parse_locking()
165
+ wrapped_query = self._parse_select()
166
+
167
+ if not wrapped_query:
168
+ self.raise_error("Expected SELECT statement after LOCKING clause")
169
+
170
+ return self.expression(
171
+ exp.LockingStatement,
172
+ this=locking_property,
173
+ expression=wrapped_query,
174
+ )
175
+
176
+ SET_PARSERS = {
177
+ **parser.Parser.SET_PARSERS,
178
+ "QUERY_BAND": lambda self: self._parse_query_band(),
158
179
  }
159
180
 
160
181
  FUNCTION_PARSERS = {
@@ -210,6 +231,36 @@ class Teradata(Dialect):
210
231
 
211
232
  return self.expression(exp.RangeN, this=this, expressions=expressions, each=each)
212
233
 
234
+ def _parse_query_band(self) -> exp.QueryBand:
235
+ # Parse: SET QUERY_BAND = 'key=value;key2=value2;' FOR SESSION|TRANSACTION
236
+ # Also supports: SET QUERY_BAND = 'key=value;' UPDATE FOR SESSION|TRANSACTION
237
+ # Also supports: SET QUERY_BAND = NONE FOR SESSION|TRANSACTION
238
+ self._match(TokenType.EQ)
239
+
240
+ # Handle both string literals and NONE keyword
241
+ if self._match_text_seq("NONE"):
242
+ query_band_string: t.Optional[exp.Expression] = exp.Var(this="NONE")
243
+ else:
244
+ query_band_string = self._parse_string()
245
+
246
+ update = self._match_text_seq("UPDATE")
247
+ self._match_text_seq("FOR")
248
+
249
+ # Handle scope - can be SESSION, TRANSACTION, VOLATILE, or SESSION VOLATILE
250
+ if self._match_text_seq("SESSION", "VOLATILE"):
251
+ scope = "SESSION VOLATILE"
252
+ elif self._match_texts(("SESSION", "TRANSACTION")):
253
+ scope = self._prev.text.upper()
254
+ else:
255
+ scope = None
256
+
257
+ return self.expression(
258
+ exp.QueryBand,
259
+ this=query_band_string,
260
+ scope=scope,
261
+ update=update,
262
+ )
263
+
213
264
  def _parse_index_params(self) -> exp.IndexParameters:
214
265
  this = super()._parse_index_params()
215
266
 
@@ -358,6 +409,13 @@ class Teradata(Dialect):
358
409
 
359
410
  return f"RANGE_N({this} BETWEEN {expressions_sql}{each_sql})"
360
411
 
412
+ def lockingstatement_sql(self, expression: exp.LockingStatement) -> str:
413
+ """Generate SQL for LOCKING statement"""
414
+ locking_clause = self.sql(expression, "this")
415
+ query_sql = self.sql(expression, "expression")
416
+
417
+ return f"{locking_clause} {query_sql}"
418
+
361
419
  def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str:
362
420
  kind = self.sql(expression, "kind").upper()
363
421
  if kind == "TABLE" and locations.get(exp.Properties.Location.POST_NAME):
sqlglot/expressions.py CHANGED
@@ -1458,6 +1458,11 @@ class DDL(Expression):
1458
1458
  return self.expression.named_selects if isinstance(self.expression, Query) else []
1459
1459
 
1460
1460
 
1461
+ # https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/SQL-Data-Manipulation-Language/Statement-Syntax/LOCKING-Request-Modifier/LOCKING-Request-Modifier-Syntax
1462
+ class LockingStatement(Expression):
1463
+ arg_types = {"this": True, "expression": True}
1464
+
1465
+
1461
1466
  class DML(Expression):
1462
1467
  def returning(
1463
1468
  self,
@@ -1613,6 +1618,10 @@ class SetItem(Expression):
1613
1618
  }
1614
1619
 
1615
1620
 
1621
+ class QueryBand(Expression):
1622
+ arg_types = {"this": True, "scope": False, "update": False}
1623
+
1624
+
1616
1625
  class Show(Expression):
1617
1626
  arg_types = {
1618
1627
  "this": True,
@@ -1680,7 +1689,7 @@ class ProjectionDef(Expression):
1680
1689
 
1681
1690
 
1682
1691
  class TableAlias(Expression):
1683
- arg_types = {"this": False, "columns": False, "column_only": False}
1692
+ arg_types = {"this": False, "columns": False}
1684
1693
 
1685
1694
  @property
1686
1695
  def columns(self):
@@ -5416,6 +5425,10 @@ class BitwiseCountAgg(AggFunc):
5416
5425
  _sql_names = ["BIT_COUNT"]
5417
5426
 
5418
5427
 
5428
+ class ByteLength(Func):
5429
+ pass
5430
+
5431
+
5419
5432
  class ArrayRemove(Func):
5420
5433
  arg_types = {"this": True, "expression": True}
5421
5434
 
@@ -5564,6 +5577,10 @@ class ConvertTimezone(Func):
5564
5577
  }
5565
5578
 
5566
5579
 
5580
+ class CodePointsToString(Func):
5581
+ pass
5582
+
5583
+
5567
5584
  class GenerateSeries(Func):
5568
5585
  arg_types = {"start": True, "end": True, "step": False, "is_end_exclusive": False}
5569
5586
 
@@ -5791,6 +5808,18 @@ class JSONCast(Cast):
5791
5808
  pass
5792
5809
 
5793
5810
 
5811
+ class JustifyDays(Func):
5812
+ pass
5813
+
5814
+
5815
+ class JustifyHours(Func):
5816
+ pass
5817
+
5818
+
5819
+ class JustifyInterval(Func):
5820
+ pass
5821
+
5822
+
5794
5823
  class Try(Func):
5795
5824
  pass
5796
5825
 
@@ -5947,6 +5976,10 @@ class DatetimeTrunc(Func, TimeUnit):
5947
5976
  arg_types = {"this": True, "unit": True, "zone": False}
5948
5977
 
5949
5978
 
5979
+ class DateFromUnixDate(Func):
5980
+ pass
5981
+
5982
+
5950
5983
  class DayOfWeek(Func):
5951
5984
  _sql_names = ["DAY_OF_WEEK", "DAYOFWEEK"]
5952
5985
 
@@ -6185,6 +6218,11 @@ class GenerateTimestampArray(Func):
6185
6218
  arg_types = {"start": True, "end": True, "step": True}
6186
6219
 
6187
6220
 
6221
+ # https://docs.snowflake.com/en/sql-reference/functions/get
6222
+ class GetExtract(Func):
6223
+ arg_types = {"this": True, "expression": True}
6224
+
6225
+
6188
6226
  class Greatest(Func):
6189
6227
  arg_types = {"this": True, "expressions": False}
6190
6228
  is_var_len_args = True
@@ -6504,6 +6542,14 @@ class ParseJSON(Func):
6504
6542
  arg_types = {"this": True, "expression": False, "safe": False}
6505
6543
 
6506
6544
 
6545
+ class ParseTime(Func):
6546
+ arg_types = {"this": True, "format": True}
6547
+
6548
+
6549
+ class ParseDatetime(Func):
6550
+ arg_types = {"this": True, "format": False, "zone": False}
6551
+
6552
+
6507
6553
  class Least(Func):
6508
6554
  arg_types = {"this": True, "expressions": False}
6509
6555
  is_var_len_args = True
@@ -6517,6 +6563,10 @@ class Right(Func):
6517
6563
  arg_types = {"this": True, "expression": True}
6518
6564
 
6519
6565
 
6566
+ class Reverse(Func):
6567
+ pass
6568
+
6569
+
6520
6570
  class Length(Func):
6521
6571
  arg_types = {"this": True, "binary": False, "encoding": False}
6522
6572
  _sql_names = ["LENGTH", "LEN", "CHAR_LENGTH", "CHARACTER_LENGTH"]
@@ -7042,6 +7092,14 @@ class UnixSeconds(Func):
7042
7092
  pass
7043
7093
 
7044
7094
 
7095
+ class UnixMicros(Func):
7096
+ pass
7097
+
7098
+
7099
+ class UnixMillis(Func):
7100
+ pass
7101
+
7102
+
7045
7103
  class Uuid(Func):
7046
7104
  _sql_names = ["UUID", "GEN_RANDOM_UUID", "GENERATE_UUID", "UUID_STRING"]
7047
7105
 
@@ -7091,6 +7149,10 @@ class Week(Func):
7091
7149
  arg_types = {"this": True, "mode": False}
7092
7150
 
7093
7151
 
7152
+ class WeekStart(Expression):
7153
+ pass
7154
+
7155
+
7094
7156
  class XMLElement(Func):
7095
7157
  _sql_names = ["XMLELEMENT"]
7096
7158
  arg_types = {"this": True, "expressions": False}
sqlglot/generator.py CHANGED
@@ -219,6 +219,7 @@ class Generator(metaclass=_Generator):
219
219
  exp.VarMap: lambda self, e: self.func("MAP", e.args["keys"], e.args["values"]),
220
220
  exp.ViewAttributeProperty: lambda self, e: f"WITH {self.sql(e, 'this')}",
221
221
  exp.VolatileProperty: lambda *_: "VOLATILE",
222
+ exp.WeekStart: lambda self, e: f"WEEK({self.sql(e, 'this')})",
222
223
  exp.WithJournalTableProperty: lambda self, e: f"WITH JOURNAL TABLE={self.sql(e, 'this')}",
223
224
  exp.WithProcedureOptions: lambda self, e: f"WITH {self.expressions(e, flat=True)}",
224
225
  exp.WithSchemaBindingProperty: lambda self, e: f"WITH SCHEMA {self.sql(e, 'this')}",
@@ -2405,6 +2406,14 @@ class Generator(metaclass=_Generator):
2405
2406
  tag = " TAG" if expression.args.get("tag") else ""
2406
2407
  return f"{'UNSET' if expression.args.get('unset') else 'SET'}{tag}{expressions}"
2407
2408
 
2409
+ def queryband_sql(self, expression: exp.QueryBand) -> str:
2410
+ this = self.sql(expression, "this")
2411
+ update = " UPDATE" if expression.args.get("update") else ""
2412
+ scope = self.sql(expression, "scope")
2413
+ scope = f" FOR {scope}" if scope else ""
2414
+
2415
+ return f"QUERY_BAND = {this}{update}{scope}"
2416
+
2408
2417
  def pragma_sql(self, expression: exp.Pragma) -> str:
2409
2418
  return f"PRAGMA {self.sql(expression, 'this')}"
2410
2419
 
@@ -3479,14 +3488,15 @@ class Generator(metaclass=_Generator):
3479
3488
  expressions = f"({expressions})" if expressions else ""
3480
3489
  return f"ALTER{compound} SORTKEY {this or expressions}"
3481
3490
 
3482
- def alterrename_sql(self, expression: exp.AlterRename) -> str:
3491
+ def alterrename_sql(self, expression: exp.AlterRename, include_to: bool = True) -> str:
3483
3492
  if not self.RENAME_TABLE_WITH_DB:
3484
3493
  # Remove db from tables
3485
3494
  expression = expression.transform(
3486
3495
  lambda n: exp.table_(n.this) if isinstance(n, exp.Table) else n
3487
3496
  ).assert_is(exp.AlterRename)
3488
3497
  this = self.sql(expression, "this")
3489
- return f"RENAME TO {this}"
3498
+ to_kw = " TO" if include_to else ""
3499
+ return f"RENAME{to_kw} {this}"
3490
3500
 
3491
3501
  def renamecolumn_sql(self, expression: exp.RenameColumn) -> str:
3492
3502
  exists = " IF EXISTS" if expression.args.get("exists") else ""
@@ -5113,3 +5123,26 @@ class Generator(metaclass=_Generator):
5113
5123
  where = self.sql(expression, "where")
5114
5124
  where = self.seg(f"WHERE {where}") if where else ""
5115
5125
  return f"SEMANTIC_VIEW({self.indent(this + metrics + dimensions + where)}{self.seg(')', sep='')}"
5126
+
5127
+ def getextract_sql(self, expression: exp.GetExtract) -> str:
5128
+ this = expression.this
5129
+ expr = expression.expression
5130
+
5131
+ if not this.type or not expression.type:
5132
+ from sqlglot.optimizer.annotate_types import annotate_types
5133
+
5134
+ this = annotate_types(this, dialect=self.dialect)
5135
+
5136
+ if this.is_type(*(exp.DataType.Type.ARRAY, exp.DataType.Type.MAP)):
5137
+ return self.sql(exp.Bracket(this=this, expressions=[expr]))
5138
+
5139
+ return self.sql(exp.JSONExtract(this=this, expression=self.dialect.to_json_path(expr)))
5140
+
5141
+ def datefromunixdate_sql(self, expression: exp.DateFromUnixDate) -> str:
5142
+ return self.sql(
5143
+ exp.DateAdd(
5144
+ this=exp.cast(exp.Literal.string("1970-01-01"), exp.DataType.Type.DATE),
5145
+ expression=expression.this,
5146
+ unit=exp.var("DAY"),
5147
+ )
5148
+ )
@@ -326,7 +326,10 @@ class TypeAnnotator(metaclass=_TypeAnnotator):
326
326
  struct_type = exp.DataType(
327
327
  this=exp.DataType.Type.STRUCT,
328
328
  expressions=[
329
- exp.ColumnDef(this=exp.to_identifier(select.output_name), kind=select.type)
329
+ exp.ColumnDef(
330
+ this=exp.to_identifier(select.output_name),
331
+ kind=select.type.copy() if select.type else None,
332
+ )
330
333
  for select in scope.expression.selects
331
334
  ],
332
335
  nested=True,
@@ -330,6 +330,10 @@ def _merge_expressions(outer_scope: Scope, inner_scope: Scope, alias: str) -> No
330
330
  if isinstance(column.parent, (exp.Unary, exp.Binary)) and must_wrap_expression:
331
331
  expression = exp.paren(expression, copy=False)
332
332
 
333
+ # make sure we do not accidentally change the name of the column
334
+ if isinstance(column.parent, exp.Select) and column.name != expression.name:
335
+ expression = exp.alias_(expression, column.name)
336
+
333
337
  column.replace(expression.copy())
334
338
 
335
339
 
@@ -972,6 +972,44 @@ class Resolver:
972
972
  }
973
973
  return self._all_columns
974
974
 
975
+ def get_source_columns_from_set_op(self, expression: exp.Expression) -> t.List[str]:
976
+ if isinstance(expression, exp.Select):
977
+ return expression.named_selects
978
+ if isinstance(expression, exp.Subquery) and isinstance(expression.this, exp.SetOperation):
979
+ # Different types of SET modifiers can be chained together if they're explicitly grouped by nesting
980
+ return self.get_source_columns_from_set_op(expression.this)
981
+ if not isinstance(expression, exp.SetOperation):
982
+ raise OptimizeError(f"Unknown set operation: {expression}")
983
+
984
+ set_op = expression
985
+
986
+ # BigQuery specific set operations modifiers, e.g INNER UNION ALL BY NAME
987
+ on_column_list = set_op.args.get("on")
988
+
989
+ if on_column_list:
990
+ # The resulting columns are the columns in the ON clause:
991
+ # {INNER | LEFT | FULL} UNION ALL BY NAME ON (col1, col2, ...)
992
+ columns = [col.name for col in on_column_list]
993
+ elif set_op.side or set_op.kind:
994
+ side = set_op.side
995
+ kind = set_op.kind
996
+
997
+ # Visit the children UNIONs (if any) in a post-order traversal
998
+ left = self.get_source_columns_from_set_op(set_op.left)
999
+ right = self.get_source_columns_from_set_op(set_op.right)
1000
+
1001
+ # We use dict.fromkeys to deduplicate keys and maintain insertion order
1002
+ if side == "LEFT":
1003
+ columns = left
1004
+ elif side == "FULL":
1005
+ columns = list(dict.fromkeys(left + right))
1006
+ elif kind == "INNER":
1007
+ columns = list(dict.fromkeys(left).keys() & dict.fromkeys(right).keys())
1008
+ else:
1009
+ columns = set_op.named_selects
1010
+
1011
+ return columns
1012
+
975
1013
  def get_source_columns(self, name: str, only_visible: bool = False) -> t.Sequence[str]:
976
1014
  """Resolve the source columns for a given source `name`."""
977
1015
  cache_key = (name, only_visible)
@@ -996,31 +1034,8 @@ class Resolver:
996
1034
  for k in source.expression.type.expressions: # type: ignore
997
1035
  columns.append(k.name)
998
1036
  elif isinstance(source, Scope) and isinstance(source.expression, exp.SetOperation):
999
- set_op = source.expression
1000
-
1001
- # BigQuery specific set operations modifiers, e.g INNER UNION ALL BY NAME
1002
- on_column_list = set_op.args.get("on")
1003
-
1004
- if on_column_list:
1005
- # The resulting columns are the columns in the ON clause:
1006
- # {INNER | LEFT | FULL} UNION ALL BY NAME ON (col1, col2, ...)
1007
- columns = [col.name for col in on_column_list]
1008
- elif set_op.side or set_op.kind:
1009
- side = set_op.side
1010
- kind = set_op.kind
1011
-
1012
- left = set_op.left.named_selects
1013
- right = set_op.right.named_selects
1014
-
1015
- # We use dict.fromkeys to deduplicate keys and maintain insertion order
1016
- if side == "LEFT":
1017
- columns = left
1018
- elif side == "FULL":
1019
- columns = list(dict.fromkeys(left + right))
1020
- elif kind == "INNER":
1021
- columns = list(dict.fromkeys(left).keys() & dict.fromkeys(right).keys())
1022
- else:
1023
- columns = set_op.named_selects
1037
+ columns = self.get_source_columns_from_set_op(source.expression)
1038
+
1024
1039
  else:
1025
1040
  select = seq_get(source.expression.selects, 0)
1026
1041
 
@@ -128,14 +128,6 @@ def qualify_tables(
128
128
  table_alias = udtf.args.get("alias") or exp.TableAlias(
129
129
  this=exp.to_identifier(next_alias_name())
130
130
  )
131
- if (
132
- isinstance(udtf, exp.Unnest)
133
- and dialect.UNNEST_COLUMN_ONLY
134
- and not table_alias.columns
135
- ):
136
- table_alias.set("columns", [table_alias.this.copy()])
137
- table_alias.set("column_only", True)
138
-
139
131
  udtf.set("alias", table_alias)
140
132
 
141
133
  if not table_alias.name:
sqlglot/parser.py CHANGED
@@ -792,6 +792,11 @@ class Parser(metaclass=_Parser):
792
792
  ),
793
793
  }
794
794
 
795
+ CAST_COLUMN_OPERATORS = {
796
+ TokenType.DOTCOLON,
797
+ TokenType.DCOLON,
798
+ }
799
+
795
800
  EXPRESSION_PARSERS = {
796
801
  exp.Cluster: lambda self: self._parse_sort(exp.Cluster, TokenType.CLUSTER_BY),
797
802
  exp.Column: lambda self: self._parse_column(),
@@ -2079,7 +2084,24 @@ class Parser(metaclass=_Parser):
2079
2084
 
2080
2085
  if create_token.token_type == TokenType.SEQUENCE:
2081
2086
  expression = self._parse_types()
2082
- extend_props(self._parse_properties())
2087
+ props = self._parse_properties()
2088
+ if props:
2089
+ sequence_props = exp.SequenceProperties()
2090
+ options = []
2091
+ for prop in props:
2092
+ if isinstance(prop, exp.SequenceProperties):
2093
+ for arg, value in prop.args.items():
2094
+ if arg == "options":
2095
+ options.extend(value)
2096
+ else:
2097
+ sequence_props.set(arg, value)
2098
+ prop.pop()
2099
+
2100
+ if options:
2101
+ sequence_props.set("options", options)
2102
+
2103
+ props.append("expressions", sequence_props)
2104
+ extend_props(props)
2083
2105
  else:
2084
2106
  expression = self._parse_ddl_select()
2085
2107
 
@@ -2217,11 +2239,17 @@ class Parser(metaclass=_Parser):
2217
2239
  return self.expression(exp.SqlSecurityProperty, definer=self._match_text_seq("DEFINER"))
2218
2240
 
2219
2241
  index = self._index
2242
+
2243
+ seq_props = self._parse_sequence_properties()
2244
+ if seq_props:
2245
+ return seq_props
2246
+
2247
+ self._retreat(index)
2220
2248
  key = self._parse_column()
2221
2249
 
2222
2250
  if not self._match(TokenType.EQ):
2223
2251
  self._retreat(index)
2224
- return self._parse_sequence_properties()
2252
+ return None
2225
2253
 
2226
2254
  # Transform the key to exp.Dot if it's dotted identifiers wrapped in exp.Column or to exp.Var otherwise
2227
2255
  if isinstance(key, exp.Column):
@@ -3815,7 +3843,8 @@ class Parser(metaclass=_Parser):
3815
3843
  elif self._match(TokenType.USING):
3816
3844
  kwargs["using"] = self._parse_using_identifiers()
3817
3845
  elif (
3818
- not (outer_apply or cross_apply)
3846
+ not method
3847
+ and not (outer_apply or cross_apply)
3819
3848
  and not isinstance(kwargs["this"], exp.Unnest)
3820
3849
  and not (kind and kind.token_type in (TokenType.CROSS, TokenType.ARRAY))
3821
3850
  ):
@@ -5250,7 +5279,7 @@ class Parser(metaclass=_Parser):
5250
5279
  while self._match(TokenType.DOT):
5251
5280
  type_name = f"{type_name}.{self._advance_any() and self._prev.text}"
5252
5281
 
5253
- return exp.DataType.build(type_name, udt=True)
5282
+ return exp.DataType.build(type_name, dialect=self.dialect, udt=True)
5254
5283
 
5255
5284
  def _parse_types(
5256
5285
  self, check_func: bool = False, schema: bool = False, allow_identifiers: bool = True
@@ -5621,7 +5650,7 @@ class Parser(metaclass=_Parser):
5621
5650
  op_token = self._prev.token_type
5622
5651
  op = self.COLUMN_OPERATORS.get(op_token)
5623
5652
 
5624
- if op_token in (TokenType.DCOLON, TokenType.DOTCOLON):
5653
+ if op_token in self.CAST_COLUMN_OPERATORS:
5625
5654
  field = self._parse_dcolon()
5626
5655
  if not field:
5627
5656
  self.raise_error("Expected type")
@@ -6551,7 +6580,7 @@ class Parser(metaclass=_Parser):
6551
6580
  elif not to:
6552
6581
  self.raise_error("Expected TYPE after CAST")
6553
6582
  elif isinstance(to, exp.Identifier):
6554
- to = exp.DataType.build(to.name, udt=True)
6583
+ to = exp.DataType.build(to.name, dialect=self.dialect, udt=True)
6555
6584
  elif to.this == exp.DataType.Type.CHAR:
6556
6585
  if self._match(TokenType.CHARACTER_SET):
6557
6586
  to = self.expression(exp.CharacterSet, this=self._parse_var_or_string())
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlglot
3
- Version: 27.5.1
3
+ Version: 27.7.0
4
4
  Summary: An easily customizable SQL parser and transpiler
5
5
  Author-email: Toby Mao <toby.mao@gmail.com>
6
6
  Project-URL: Homepage, https://sqlglot.com/