sqlglot 27.21.0__py3-none-any.whl → 27.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlglot might be problematic. Click here for more details.

sqlglot/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '27.21.0'
32
- __version_tuple__ = version_tuple = (27, 21, 0)
31
+ __version__ = version = '27.22.0'
32
+ __version_tuple__ = version_tuple = (27, 22, 0)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -525,6 +525,13 @@ class Dialect(metaclass=_Dialect):
525
525
  equivalent of CREATE SCHEMA is CREATE DATABASE.
526
526
  """
527
527
 
528
+ ALTER_TABLE_SUPPORTS_CASCADE = False
529
+ """
530
+ Hive by default does not update the schema of existing partitions when a column is changed.
531
+ the CASCADE clause is used to indicate that the change should be propagated to all existing partitions.
532
+ the Spark dialect, while derived from Hive, does not support the CASCADE clause.
533
+ """
534
+
528
535
  # Whether ADD is present for each column added by ALTER TABLE
529
536
  ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = True
530
537
 
sqlglot/dialects/hive.py CHANGED
@@ -211,6 +211,7 @@ class Hive(Dialect):
211
211
  SAFE_DIVISION = True
212
212
  ARRAY_AGG_INCLUDES_NULLS = None
213
213
  REGEXP_EXTRACT_DEFAULT_GROUP = 1
214
+ ALTER_TABLE_SUPPORTS_CASCADE = True
214
215
 
215
216
  # https://spark.apache.org/docs/latest/sql-ref-identifier.html#description
216
217
  NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE
@@ -310,6 +311,10 @@ class Hive(Dialect):
310
311
  VALUES_FOLLOWED_BY_PAREN = False
311
312
  JOINS_HAVE_EQUAL_PRECEDENCE = True
312
313
  ADD_JOIN_ON_TRUE = True
314
+ ALTER_TABLE_PARTITIONS = True
315
+
316
+ CHANGE_COLUMN_ALTER_SYNTAX = False
317
+ # Whether the dialect supports using ALTER COLUMN syntax with CHANGE COLUMN.
313
318
 
314
319
  FUNCTIONS = {
315
320
  **parser.Parser.FUNCTIONS,
@@ -378,6 +383,11 @@ class Hive(Dialect):
378
383
  ),
379
384
  }
380
385
 
386
+ ALTER_PARSERS = {
387
+ **parser.Parser.ALTER_PARSERS,
388
+ "CHANGE": lambda self: self._parse_alter_table_change(),
389
+ }
390
+
381
391
  def _parse_transform(self) -> t.Optional[exp.Transform | exp.QueryTransform]:
382
392
  if not self._match(TokenType.L_PAREN, advance=False):
383
393
  self._retreat(self._index - 1)
@@ -451,6 +461,35 @@ class Hive(Dialect):
451
461
 
452
462
  return this
453
463
 
464
+ def _parse_alter_table_change(self) -> t.Optional[exp.Expression]:
465
+ self._match(TokenType.COLUMN)
466
+ this = self._parse_field(any_token=True)
467
+
468
+ if self.CHANGE_COLUMN_ALTER_SYNTAX and self._match_text_seq("TYPE"):
469
+ return self.expression(
470
+ exp.AlterColumn,
471
+ this=this,
472
+ dtype=self._parse_types(schema=True),
473
+ )
474
+
475
+ column_new = self._parse_field(any_token=True)
476
+ dtype = self._parse_types(schema=True)
477
+
478
+ comment = self._match(TokenType.COMMENT) and self._parse_string()
479
+
480
+ if not this or not column_new or not dtype:
481
+ self.raise_error(
482
+ "Expected 'CHANGE COLUMN' to be followed by 'column_name' 'column_name' 'data_type'"
483
+ )
484
+
485
+ return self.expression(
486
+ exp.AlterColumn,
487
+ this=this,
488
+ rename_to=column_new,
489
+ dtype=dtype,
490
+ comment=comment,
491
+ )
492
+
454
493
  def _parse_partition_and_order(
455
494
  self,
456
495
  ) -> t.Tuple[t.List[exp.Expression], t.Optional[exp.Expression]]:
@@ -500,6 +539,7 @@ class Hive(Dialect):
500
539
  PAD_FILL_PATTERN_IS_REQUIRED = True
501
540
  SUPPORTS_MEDIAN = False
502
541
  ARRAY_SIZE_NAME = "SIZE"
542
+ ALTER_SET_TYPE = ""
503
543
 
504
544
  EXPRESSIONS_WITHOUT_NESTED_CTES = {
505
545
  exp.Insert,
@@ -757,6 +797,32 @@ class Hive(Dialect):
757
797
  ),
758
798
  )
759
799
 
800
+ def altercolumn_sql(self, expression: exp.AlterColumn) -> str:
801
+ this = self.sql(expression, "this")
802
+ new_name = self.sql(expression, "rename_to") or this
803
+ dtype = self.sql(expression, "dtype")
804
+ comment = (
805
+ f" COMMENT {self.sql(expression, 'comment')}"
806
+ if self.sql(expression, "comment")
807
+ else ""
808
+ )
809
+ default = self.sql(expression, "default")
810
+ visible = expression.args.get("visible")
811
+ allow_null = expression.args.get("allow_null")
812
+ drop = expression.args.get("drop")
813
+
814
+ if any([default, drop, visible, allow_null, drop]):
815
+ self.unsupported("Unsupported CHANGE COLUMN syntax")
816
+
817
+ if not dtype:
818
+ self.unsupported("CHANGE COLUMN without a type is not supported")
819
+
820
+ return f"CHANGE COLUMN {this} {new_name} {dtype}{comment}"
821
+
822
+ def renamecolumn_sql(self, expression: exp.RenameColumn) -> str:
823
+ self.unsupported("Cannot rename columns without data type defined in Hive")
824
+ return ""
825
+
760
826
  def alterset_sql(self, expression: exp.AlterSet) -> str:
761
827
  exprs = self.expressions(expression, flat=True)
762
828
  exprs = f" {exprs}" if exprs else ""
@@ -665,6 +665,7 @@ class Snowflake(Dialect):
665
665
  exp.Right,
666
666
  exp.Stuff,
667
667
  exp.Substring,
668
+ exp.Round,
668
669
  )
669
670
  },
670
671
  **{
@@ -151,6 +151,8 @@ def _annotate_by_similar_args(
151
151
 
152
152
 
153
153
  class Spark2(Hive):
154
+ ALTER_TABLE_SUPPORTS_CASCADE = False
155
+
154
156
  ANNOTATORS = {
155
157
  **Hive.ANNOTATORS,
156
158
  exp.Substring: lambda self, e: self._annotate_by_args(e, "this"),
@@ -172,6 +174,7 @@ class Spark2(Hive):
172
174
 
173
175
  class Parser(Hive.Parser):
174
176
  TRIM_PATTERN_FIRST = True
177
+ CHANGE_COLUMN_ALTER_SYNTAX = True
175
178
 
176
179
  FUNCTIONS = {
177
180
  **Hive.Parser.FUNCTIONS,
@@ -248,6 +251,7 @@ class Spark2(Hive):
248
251
  QUERY_HINTS = True
249
252
  NVL2_SUPPORTED = True
250
253
  CAN_IMPLEMENT_ARRAY_ANY = True
254
+ ALTER_SET_TYPE = "TYPE"
251
255
 
252
256
  PROPERTIES_LOCATION = {
253
257
  **Hive.Generator.PROPERTIES_LOCATION,
@@ -364,3 +368,16 @@ class Spark2(Hive):
364
368
  return super().fileformatproperty_sql(expression)
365
369
 
366
370
  return f"USING {expression.name.upper()}"
371
+
372
+ def altercolumn_sql(self, expression: exp.AlterColumn) -> str:
373
+ this = self.sql(expression, "this")
374
+ new_name = self.sql(expression, "rename_to") or this
375
+ comment = self.sql(expression, "comment")
376
+ if new_name == this:
377
+ if comment:
378
+ return f"ALTER COLUMN {this} COMMENT {comment}"
379
+ return super(Hive.Generator, self).altercolumn_sql(expression)
380
+ return f"RENAME COLUMN {this} TO {new_name}"
381
+
382
+ def renamecolumn_sql(self, expression: exp.RenameColumn) -> str:
383
+ return super(Hive.Generator, self).renamecolumn_sql(expression)
sqlglot/dialects/tsql.py CHANGED
@@ -670,6 +670,12 @@ class TSQL(Dialect):
670
670
 
671
671
  SET_OP_MODIFIERS = {"offset"}
672
672
 
673
+ ODBC_DATETIME_LITERALS = {
674
+ "d": exp.Date,
675
+ "t": exp.Time,
676
+ "ts": exp.Timestamp,
677
+ }
678
+
673
679
  def _parse_alter_table_set(self) -> exp.AlterSet:
674
680
  return self._parse_wrapped(super()._parse_alter_table_set)
675
681
 
@@ -902,6 +908,11 @@ class TSQL(Dialect):
902
908
 
903
909
  return self.expression(exp.UniqueColumnConstraint, this=this)
904
910
 
911
+ def _parse_update(self) -> exp.Update:
912
+ expression = super()._parse_update()
913
+ expression.set("options", self._parse_options())
914
+ return expression
915
+
905
916
  def _parse_partition(self) -> t.Optional[exp.Partition]:
906
917
  if not self._match_text_seq("WITH", "(", "PARTITIONS"):
907
918
  return None
sqlglot/expressions.py CHANGED
@@ -1838,6 +1838,7 @@ class AlterColumn(Expression):
1838
1838
  "comment": False,
1839
1839
  "allow_null": False,
1840
1840
  "visible": False,
1841
+ "rename_to": False,
1841
1842
  }
1842
1843
 
1843
1844
 
@@ -3633,6 +3634,7 @@ class Update(DML):
3633
3634
  "returning": False,
3634
3635
  "order": False,
3635
3636
  "limit": False,
3637
+ "options": False,
3636
3638
  }
3637
3639
 
3638
3640
  def table(
@@ -4956,6 +4958,7 @@ class Alter(Expression):
4956
4958
  "cluster": False,
4957
4959
  "not_valid": False,
4958
4960
  "check": False,
4961
+ "cascade": False,
4959
4962
  }
4960
4963
 
4961
4964
  @property
@@ -6754,7 +6757,13 @@ class JSONExists(Func):
6754
6757
  # https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/JSON_TABLE.html
6755
6758
  # Note: parsing of JSON column definitions is currently incomplete.
6756
6759
  class JSONColumnDef(Expression):
6757
- arg_types = {"this": False, "kind": False, "path": False, "nested_schema": False}
6760
+ arg_types = {
6761
+ "this": False,
6762
+ "kind": False,
6763
+ "path": False,
6764
+ "nested_schema": False,
6765
+ "ordinality": False,
6766
+ }
6758
6767
 
6759
6768
 
6760
6769
  class JSONSchema(Expression):
sqlglot/generator.py CHANGED
@@ -2200,7 +2200,9 @@ class Generator(metaclass=_Generator):
2200
2200
  expression_sql = f"{from_sql}{where_sql}{returning}"
2201
2201
  else:
2202
2202
  expression_sql = f"{returning}{from_sql}{where_sql}"
2203
- sql = f"UPDATE {this} SET {set_sql}{expression_sql}{order}{limit}"
2203
+ options = self.expressions(expression, key="options")
2204
+ options = f" OPTION({options})" if options else ""
2205
+ sql = f"UPDATE {this} SET {set_sql}{expression_sql}{order}{limit}{options}"
2204
2206
  return self.prepend_ctes(expression, sql)
2205
2207
 
2206
2208
  def values_sql(self, expression: exp.Values, values_as_table: bool = True) -> str:
@@ -3216,7 +3218,9 @@ class Generator(metaclass=_Generator):
3216
3218
  this = self.sql(expression, "this")
3217
3219
  kind = self.sql(expression, "kind")
3218
3220
  kind = f" {kind}" if kind else ""
3219
- return f"{this}{kind}{path}"
3221
+
3222
+ ordinality = " FOR ORDINALITY" if expression.args.get("ordinality") else ""
3223
+ return f"{this}{kind}{path}{ordinality}"
3220
3224
 
3221
3225
  def jsonschema_sql(self, expression: exp.JSONSchema) -> str:
3222
3226
  return self.func("COLUMNS", *expression.expressions)
@@ -3621,10 +3625,15 @@ class Generator(metaclass=_Generator):
3621
3625
  kind = self.sql(expression, "kind")
3622
3626
  not_valid = " NOT VALID" if expression.args.get("not_valid") else ""
3623
3627
  check = " WITH CHECK" if expression.args.get("check") else ""
3628
+ cascade = (
3629
+ " CASCADE"
3630
+ if expression.args.get("cascade") and self.dialect.ALTER_TABLE_SUPPORTS_CASCADE
3631
+ else ""
3632
+ )
3624
3633
  this = self.sql(expression, "this")
3625
3634
  this = f" {this}" if this else ""
3626
3635
 
3627
- return f"ALTER {kind}{exists}{only}{this}{on_cluster}{check}{self.sep()}{actions_sql}{not_valid}{options}"
3636
+ return f"ALTER {kind}{exists}{only}{this}{on_cluster}{check}{self.sep()}{actions_sql}{not_valid}{options}{cascade}"
3628
3637
 
3629
3638
  def altersession_sql(self, expression: exp.AlterSession) -> str:
3630
3639
  items_sql = self.expressions(expression, flat=True)
@@ -19,10 +19,15 @@ def optimize_joins(expression):
19
19
  """
20
20
 
21
21
  for select in expression.find_all(exp.Select):
22
+ joins = select.args.get("joins", [])
23
+
24
+ if not _is_reorderable(joins):
25
+ continue
26
+
22
27
  references = {}
23
28
  cross_joins = []
24
29
 
25
- for join in select.args.get("joins", []):
30
+ for join in joins:
26
31
  tables = other_table_names(join)
27
32
 
28
33
  if tables:
@@ -59,11 +64,20 @@ def reorder_joins(expression):
59
64
  """
60
65
  for from_ in expression.find_all(exp.From):
61
66
  parent = from_.parent
62
- joins = {join.alias_or_name: join for join in parent.args.get("joins", [])}
63
- dag = {name: other_table_names(join) for name, join in joins.items()}
67
+ joins = parent.args.get("joins", [])
68
+
69
+ if not _is_reorderable(joins):
70
+ continue
71
+
72
+ joins_by_name = {join.alias_or_name: join for join in joins}
73
+ dag = {name: other_table_names(join) for name, join in joins_by_name.items()}
64
74
  parent.set(
65
75
  "joins",
66
- [joins[name] for name in tsort(dag) if name != from_.alias_or_name and name in joins],
76
+ [
77
+ joins_by_name[name]
78
+ for name in tsort(dag)
79
+ if name != from_.alias_or_name and name in joins_by_name
80
+ ],
67
81
  )
68
82
  return expression
69
83
 
@@ -90,3 +104,23 @@ def normalize(expression):
90
104
  def other_table_names(join: exp.Join) -> t.Set[str]:
91
105
  on = join.args.get("on")
92
106
  return exp.column_table_names(on, join.alias_or_name) if on else set()
107
+
108
+
109
+ def _is_reorderable(joins: t.List[exp.Join]) -> bool:
110
+ """
111
+ Checks if joins can be reordered without changing query semantics.
112
+
113
+ Joins with a side (LEFT, RIGHT, FULL) cannot be reordered easily,
114
+ the order affects which rows are included in the result.
115
+
116
+ Example:
117
+ >>> from sqlglot import parse_one, exp
118
+ >>> from sqlglot.optimizer.optimize_joins import _is_reorderable
119
+ >>> ast = parse_one("SELECT * FROM x JOIN y ON x.id = y.id JOIN z ON y.id = z.id")
120
+ >>> _is_reorderable(ast.find(exp.Select).args.get("joins", []))
121
+ True
122
+ >>> ast = parse_one("SELECT * FROM x LEFT JOIN y ON x.id = y.id JOIN z ON y.id = z.id")
123
+ >>> _is_reorderable(ast.find(exp.Select).args.get("joins", []))
124
+ False
125
+ """
126
+ return not any(join.side for join in joins)
sqlglot/parser.py CHANGED
@@ -1432,11 +1432,7 @@ class Parser(metaclass=_Parser):
1432
1432
 
1433
1433
  IS_JSON_PREDICATE_KIND = {"VALUE", "SCALAR", "ARRAY", "OBJECT"}
1434
1434
 
1435
- ODBC_DATETIME_LITERALS = {
1436
- "d": exp.Date,
1437
- "t": exp.Time,
1438
- "ts": exp.Timestamp,
1439
- }
1435
+ ODBC_DATETIME_LITERALS: t.Dict[str, t.Type[exp.Expression]] = {}
1440
1436
 
1441
1437
  ON_CONDITION_TOKENS = {"ERROR", "NULL", "TRUE", "FALSE", "EMPTY"}
1442
1438
 
@@ -1535,6 +1531,9 @@ class Parser(metaclass=_Parser):
1535
1531
  # Whether renaming a column with an ALTER statement requires the presence of the COLUMN keyword
1536
1532
  ALTER_RENAME_REQUIRES_COLUMN = True
1537
1533
 
1534
+ # Whether Alter statements are allowed to contain Partition specifications
1535
+ ALTER_TABLE_PARTITIONS = False
1536
+
1538
1537
  # Whether all join types have the same precedence, i.e., they "naturally" produce a left-deep tree.
1539
1538
  # In standard SQL, joins that use the JOIN keyword take higher precedence than comma-joins. That is
1540
1539
  # to say, JOIN operators happen before comma operators. This is not the case in some dialects, such
@@ -3539,9 +3538,13 @@ class Parser(metaclass=_Parser):
3539
3538
 
3540
3539
  return this
3541
3540
 
3542
- def _parse_query_modifiers(
3543
- self, this: t.Optional[exp.Expression]
3544
- ) -> t.Optional[exp.Expression]:
3541
+ @t.overload
3542
+ def _parse_query_modifiers(self, this: E) -> E: ...
3543
+
3544
+ @t.overload
3545
+ def _parse_query_modifiers(self, this: None) -> None: ...
3546
+
3547
+ def _parse_query_modifiers(self, this):
3545
3548
  if isinstance(this, self.MODIFIABLES):
3546
3549
  for join in self._parse_joins():
3547
3550
  this.append("joins", join)
@@ -4586,17 +4589,11 @@ class Parser(metaclass=_Parser):
4586
4589
  before_with_index = self._index
4587
4590
  with_prefix = self._match(TokenType.WITH)
4588
4591
 
4589
- cube_or_rollup = self._parse_cube_or_rollup(with_prefix=with_prefix)
4590
- if cube_or_rollup:
4592
+ if cube_or_rollup := self._parse_cube_or_rollup(with_prefix=with_prefix):
4591
4593
  key = "rollup" if isinstance(cube_or_rollup, exp.Rollup) else "cube"
4592
4594
  elements[key].append(cube_or_rollup)
4593
- elif self._match(TokenType.GROUPING_SETS):
4594
- elements["grouping_sets"].append(
4595
- self.expression(
4596
- exp.GroupingSets,
4597
- expressions=self._parse_wrapped_csv(self._parse_grouping_set),
4598
- )
4599
- )
4595
+ elif grouping_sets := self._parse_grouping_sets():
4596
+ elements["grouping_sets"].append(grouping_sets)
4600
4597
  elif self._match_text_seq("TOTALS"):
4601
4598
  elements["totals"] = True # type: ignore
4602
4599
 
@@ -4621,8 +4618,15 @@ class Parser(metaclass=_Parser):
4621
4618
  kind, expressions=[] if with_prefix else self._parse_wrapped_csv(self._parse_column)
4622
4619
  )
4623
4620
 
4621
+ def _parse_grouping_sets(self) -> t.Optional[exp.GroupingSets]:
4622
+ if self._match(TokenType.GROUPING_SETS):
4623
+ return self.expression(
4624
+ exp.GroupingSets, expressions=self._parse_wrapped_csv(self._parse_grouping_set)
4625
+ )
4626
+ return None
4627
+
4624
4628
  def _parse_grouping_set(self) -> t.Optional[exp.Expression]:
4625
- return self._parse_cube_or_rollup() or self._parse_bitwise()
4629
+ return self._parse_grouping_sets() or self._parse_cube_or_rollup() or self._parse_bitwise()
4626
4630
 
4627
4631
  def _parse_having(self, skip_having_token: bool = False) -> t.Optional[exp.Having]:
4628
4632
  if not skip_having_token and not self._match(TokenType.HAVING):
@@ -5059,8 +5063,12 @@ class Parser(metaclass=_Parser):
5059
5063
  matched_l_paren = self._prev.token_type == TokenType.L_PAREN
5060
5064
  expressions = self._parse_csv(lambda: self._parse_select_or_expression(alias=alias))
5061
5065
 
5062
- if len(expressions) == 1 and isinstance(expressions[0], exp.Query):
5063
- this = self.expression(exp.In, this=this, query=expressions[0].subquery(copy=False))
5066
+ if len(expressions) == 1 and isinstance(query := expressions[0], exp.Query):
5067
+ this = self.expression(
5068
+ exp.In,
5069
+ this=this,
5070
+ query=self._parse_query_modifiers(query).subquery(copy=False),
5071
+ )
5064
5072
  else:
5065
5073
  this = self.expression(exp.In, this=this, expressions=expressions)
5066
5074
 
@@ -5790,14 +5798,17 @@ class Parser(metaclass=_Parser):
5790
5798
  else:
5791
5799
  expressions = self._parse_expressions()
5792
5800
 
5793
- this = self._parse_query_modifiers(seq_get(expressions, 0))
5801
+ this = seq_get(expressions, 0)
5794
5802
 
5795
5803
  if not this and self._match(TokenType.R_PAREN, advance=False):
5796
5804
  this = self.expression(exp.Tuple)
5797
5805
  elif isinstance(this, exp.UNWRAPPED_QUERIES):
5798
5806
  this = self._parse_subquery(this=this, parse_alias=False)
5799
5807
  elif isinstance(this, exp.Subquery):
5800
- this = self._parse_subquery(this=self._parse_set_operations(this), parse_alias=False)
5808
+ this = self._parse_subquery(
5809
+ this=self._parse_query_modifiers(self._parse_set_operations(this)),
5810
+ parse_alias=False,
5811
+ )
5801
5812
  elif len(expressions) > 1 or self._prev.token_type == TokenType.COMMA:
5802
5813
  this = self.expression(exp.Tuple, expressions=expressions)
5803
5814
  else:
@@ -6910,10 +6921,12 @@ class Parser(metaclass=_Parser):
6910
6921
  def _parse_json_column_def(self) -> exp.JSONColumnDef:
6911
6922
  if not self._match_text_seq("NESTED"):
6912
6923
  this = self._parse_id_var()
6924
+ ordinality = self._match_pair(TokenType.FOR, TokenType.ORDINALITY)
6913
6925
  kind = self._parse_types(allow_identifiers=False)
6914
6926
  nested = None
6915
6927
  else:
6916
6928
  this = None
6929
+ ordinality = None
6917
6930
  kind = None
6918
6931
  nested = True
6919
6932
 
@@ -6926,6 +6939,7 @@ class Parser(metaclass=_Parser):
6926
6939
  kind=kind,
6927
6940
  path=path,
6928
6941
  nested_schema=nested_schema,
6942
+ ordinality=ordinality,
6929
6943
  )
6930
6944
 
6931
6945
  def _parse_json_schema(self) -> exp.JSONSchema:
@@ -7721,7 +7735,7 @@ class Parser(metaclass=_Parser):
7721
7735
  check = None
7722
7736
  cluster = None
7723
7737
  else:
7724
- this = self._parse_table(schema=True)
7738
+ this = self._parse_table(schema=True, parse_partition=self.ALTER_TABLE_PARTITIONS)
7725
7739
  check = self._match_text_seq("WITH", "CHECK")
7726
7740
  cluster = self._parse_on_property() if self._match(TokenType.ON) else None
7727
7741
 
@@ -7733,6 +7747,7 @@ class Parser(metaclass=_Parser):
7733
7747
  actions = ensure_list(parser(self))
7734
7748
  not_valid = self._match_text_seq("NOT", "VALID")
7735
7749
  options = self._parse_csv(self._parse_property)
7750
+ cascade = self.dialect.ALTER_TABLE_SUPPORTS_CASCADE and self._match_text_seq("CASCADE")
7736
7751
 
7737
7752
  if not self._curr and actions:
7738
7753
  return self.expression(
@@ -7746,6 +7761,7 @@ class Parser(metaclass=_Parser):
7746
7761
  cluster=cluster,
7747
7762
  not_valid=not_valid,
7748
7763
  check=check,
7764
+ cascade=cascade,
7749
7765
  )
7750
7766
 
7751
7767
  return self._parse_as_command(start)
@@ -8641,10 +8657,10 @@ class Parser(metaclass=_Parser):
8641
8657
  args: t.List[exp.Expression] = []
8642
8658
 
8643
8659
  if self._match(TokenType.DISTINCT):
8644
- args.append(self.expression(exp.Distinct, expressions=[self._parse_assignment()]))
8660
+ args.append(self.expression(exp.Distinct, expressions=[self._parse_lambda()]))
8645
8661
  self._match(TokenType.COMMA)
8646
8662
 
8647
- args.extend(self._parse_csv(self._parse_assignment))
8663
+ args.extend(self._parse_function_args())
8648
8664
 
8649
8665
  return self.expression(
8650
8666
  expr_type, this=seq_get(args, 0), expression=seq_get(args, 1), count=seq_get(args, 2)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlglot
3
- Version: 27.21.0
3
+ Version: 27.22.0
4
4
  Summary: An easily customizable SQL parser and transpiler
5
5
  Author-email: Toby Mao <toby.mao@gmail.com>
6
6
  License-Expression: MIT
@@ -1,15 +1,15 @@
1
1
  sqlglot/__init__.py,sha256=za08rtdPh2v7dOpGdNomttlIVGgTrKja7rPd6sQwaTg,5391
2
2
  sqlglot/__main__.py,sha256=022c173KqxsiABWTEpUIq_tJUxuNiW7a7ABsxBXqvu8,2069
3
3
  sqlglot/_typing.py,sha256=-1HPyr3w5COlSJWqlgt8jhFk2dyMvBuvVBqIX1wyVCM,642
4
- sqlglot/_version.py,sha256=IWlM4WDhGNJxd0JF9C00oyahnnFTXwj2lYF8uzhEZhI,708
4
+ sqlglot/_version.py,sha256=E5VE3-EaKBoByAYk3KI0bC-SdIUPpA7ebugcTIcI_gA,708
5
5
  sqlglot/diff.py,sha256=PtOllQMQa1Sw1-V2Y8eypmDqGujXYPaTOp_WLsWkAWk,17314
6
6
  sqlglot/errors.py,sha256=QNKMr-pzLUDR-tuMmn_GK6iMHUIVdb_YSJ_BhGEvuso,2126
7
- sqlglot/expressions.py,sha256=d3x-gzKI3KRVFFMpGxYAozj3uX3gKwUtprHlyKGG9pw,261463
8
- sqlglot/generator.py,sha256=KyFuqWQpawTj3rWV7ONKO4euqVTzV8aFU3desDu8fso,226565
7
+ sqlglot/expressions.py,sha256=PTwHBbIp3jRz7sjVKZShHJuzlkSg_8rNTxsONy0gfAc,261611
8
+ sqlglot/generator.py,sha256=CmYKDYSuwgPjgRsDlf3e__PFeTdojkUCCplMu4xT4qc,226966
9
9
  sqlglot/helper.py,sha256=OOt5_Mbmnl4Uy6WO6v7DR1iLPcb3v6ITybpq6usf3jw,14471
10
10
  sqlglot/jsonpath.py,sha256=SQgaxzaEYBN7At9dkTK4N1Spk6xHxvHL6QtCIP6iM30,7905
11
11
  sqlglot/lineage.py,sha256=Qj5ykuDNcATppb9vOjoIKBqRVLbu3OMPiZk9f3iyv40,15312
12
- sqlglot/parser.py,sha256=9U1w36eR3tgcILRPF-9_Lk2BEFkAXzOvN-bqhe6_Ouk,337450
12
+ sqlglot/parser.py,sha256=N7pgpA20wuXDU2Wo1zjjEGuBcl3fZyBMp_Ss3nLppoY,338172
13
13
  sqlglot/planner.py,sha256=ql7Li-bWJRcyXzNaZy_n6bQ6B2ZfunEIB8Ztv2xaxq4,14634
14
14
  sqlglot/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  sqlglot/schema.py,sha256=13H2qKQs27EKdTpDLOvcNnSTDAUbYNKjWtJs4aQCSOA,20509
@@ -23,7 +23,7 @@ sqlglot/dialects/athena.py,sha256=ofArmayYLev4qZQ15GM8mevG04qqR5WGFb2ZcuYm6x4,10
23
23
  sqlglot/dialects/bigquery.py,sha256=l_t70png3A83zEDWUBBsCbiM40HfKIP03Wuq3Zt9CUI,72889
24
24
  sqlglot/dialects/clickhouse.py,sha256=6kx1cm0YhtHbg5kvcY64Hau2KdeC7Y26SVlVHGLyPEA,58579
25
25
  sqlglot/dialects/databricks.py,sha256=H4QTq7gg6tJylKc_YWsGp6049KydoI_wlQUHM7iCJtI,4753
26
- sqlglot/dialects/dialect.py,sha256=3RibH98zu1zoRNVRKFjPQD5yez7A0V8FbSIRrmvX38o,73632
26
+ sqlglot/dialects/dialect.py,sha256=wUobB-jvOgd_JucRCyUYK6qRx2Hts8mQKmk8kgwWkdc,73979
27
27
  sqlglot/dialects/doris.py,sha256=CFnF955Oav3IjZWA80ickOI8tPpCjxk7BN5R4Z6pA1U,25263
28
28
  sqlglot/dialects/dremio.py,sha256=nOMxu_4xVKSOmMGNSwdxXSPc243cNbbpb-xXzYdgdeg,8460
29
29
  sqlglot/dialects/drill.py,sha256=FOh7_KjPx_77pv0DiHKZog0CcmzqeF9_PEmGnJ1ESSM,5825
@@ -32,7 +32,7 @@ sqlglot/dialects/duckdb.py,sha256=xennAC2Gh3eImkpHo0-cf4BBzcAKx-HkexyZfQMiUCo,54
32
32
  sqlglot/dialects/dune.py,sha256=gALut-fFfN2qMsr8LvZ1NQK3F3W9z2f4PwMvTMXVVVg,375
33
33
  sqlglot/dialects/exasol.py,sha256=ay3g_VyT5WvHTgNyJuCQu0nBt4bpllLZ9IdMBizEgYM,15761
34
34
  sqlglot/dialects/fabric.py,sha256=BdkvzM8s-m5DIdBwdjEYskp32ub7aHCAex_xlhQn92I,10222
35
- sqlglot/dialects/hive.py,sha256=GtksrbpGOsaAWjR3OEZXUCUR7k_S5YnIWNF9w1XYiXk,31972
35
+ sqlglot/dialects/hive.py,sha256=Uw-7Y1LnYOdcv71jCIZXhMvJAWwU5AVcFlIuM-YArnY,34530
36
36
  sqlglot/dialects/materialize.py,sha256=LD2q1kTRrCwkIu1BfoBvnjTGbupDtoQ8JQMDCIYAXHg,3533
37
37
  sqlglot/dialects/mysql.py,sha256=xxVAR-pXMljYCUioavP3nROtOqKmK4kfdp4WWXX7X9g,50049
38
38
  sqlglot/dialects/oracle.py,sha256=qB6Ga0Si2-TpVNqU_2COvWESIUYNL32rYk_BC9aiujE,15898
@@ -42,16 +42,16 @@ sqlglot/dialects/prql.py,sha256=fwN-SPEGx-drwf1K0U2MByN-PkW3C_rOgQ3xeJeychg,7908
42
42
  sqlglot/dialects/redshift.py,sha256=FIwtP3yEg-way9pa32kxCJc6IaFkHVIvgYKZA-Ilmi0,15919
43
43
  sqlglot/dialects/risingwave.py,sha256=Wd-I_Hbwl-6Rgf_NM0I_axliInY418k2kaAWRCmaqyE,3791
44
44
  sqlglot/dialects/singlestore.py,sha256=0QqNYOucNklPQuyeGcsisLI97qPGx_RfWKOFarJz2qw,61711
45
- sqlglot/dialects/snowflake.py,sha256=7NiU0MAc3blzQJxbEWr1cGK39se6yVHfrBN4wuHGV-k,81991
45
+ sqlglot/dialects/snowflake.py,sha256=JZfQ9QO_67TIpI36HIkk77gsjJWsZ1eOZWmWZpoUkjc,82018
46
46
  sqlglot/dialects/solr.py,sha256=pydnl4ml-3M1Fc4ALm6cMVO9h-5EtqZxPZH_91Nz1Ss,617
47
47
  sqlglot/dialects/spark.py,sha256=mt3Twh0_EJelYy_7HLinDEQ1Chj2EYMjeLCPLRzAJXY,10113
48
- sqlglot/dialects/spark2.py,sha256=qz36FT9k4iuiqboRpyG4VpKGkPR0P2fifmqgZ9gNUEU,14851
48
+ sqlglot/dialects/spark2.py,sha256=s4RTOGunYT1_HJt4KbhBWK_eOgmtzlpBCQCl60KEPAQ,15621
49
49
  sqlglot/dialects/sqlite.py,sha256=FuEDDyKZeeWVblknhFSMX7dNoS-ci5ktXpSXZeBK5xA,13592
50
50
  sqlglot/dialects/starrocks.py,sha256=-NWQa2gJbiMMfLauX-Jy9ciJ5DUzUOk2QkPbhglz5W4,11446
51
51
  sqlglot/dialects/tableau.py,sha256=oIawDzUITxGCWaEMB8OaNMPWhbC3U-2y09pYPm4eazc,2190
52
52
  sqlglot/dialects/teradata.py,sha256=7LxCcRwP0Idd_OnCzA57NCdheVjHcKC2aFAKG5N49IU,18202
53
53
  sqlglot/dialects/trino.py,sha256=Z7prRhCxIBh0KCxIQpWmVOIGHCJM9Xl5oRlqySxln4Y,4350
54
- sqlglot/dialects/tsql.py,sha256=7pVL3H-qNLCnoHqBEVSIVKhlTqoPmiYBRtg_HVv8zH4,54462
54
+ sqlglot/dialects/tsql.py,sha256=w4wdIwYiA9JY4JvESYLEqp-KDvRR89MjE88rdTPK410,54783
55
55
  sqlglot/executor/__init__.py,sha256=FslewzYQtQdDNg_0Ju2UaiP4vo4IMUgkfkmFsYUhcN0,2958
56
56
  sqlglot/executor/context.py,sha256=WJHJdYQCOeVXwLw0uSSrWSc25eBMn5Ix108RCvdsKRQ,3386
57
57
  sqlglot/executor/env.py,sha256=tQhU5PpTBMcxgZIFddFqxWMNPtHN0vOOz72voncY3KY,8276
@@ -67,7 +67,7 @@ sqlglot/optimizer/isolate_table_selects.py,sha256=_8rIKVMoL7eY3rrJsmgIdTRvfmBSLU
67
67
  sqlglot/optimizer/merge_subqueries.py,sha256=tis4la3HeAsglhYcLu9EMaVGsNiyecq5iwHkfmW0WQU,15532
68
68
  sqlglot/optimizer/normalize.py,sha256=wu3GeKY36PLyAb9f534jDDfzDwvZJpZ8g_H5QH6acZQ,6667
69
69
  sqlglot/optimizer/normalize_identifiers.py,sha256=uD4xICJAgj0X7EFc2LYcDWxAW2aTHANO2wy7kfn9gfY,2098
70
- sqlglot/optimizer/optimize_joins.py,sha256=tfEnTqBofveBXNKJ30GIvm2lyagAuD24bMNfu3iQi_k,3043
70
+ sqlglot/optimizer/optimize_joins.py,sha256=nnfRpL03lpDQF1oBO1EgaABqnr6t2GP6uMWoSLPW3IQ,4120
71
71
  sqlglot/optimizer/optimizer.py,sha256=vXEXDWHvbO-vJmSI7UqJuydM2WrD1xko7rETq2EtVJo,3533
72
72
  sqlglot/optimizer/pushdown_predicates.py,sha256=HGjs3Z4V3-X2d1VTfWhyByY3aL5SmKnVvt3aDXiiBM0,8414
73
73
  sqlglot/optimizer/pushdown_projections.py,sha256=7NoK5NAUVYVhs0YnYyo6WuXfaO-BShSwS6lA8Y-ATQ4,6668
@@ -77,8 +77,8 @@ sqlglot/optimizer/qualify_tables.py,sha256=dA4ZazL7ShQh2JgBwpHuG-4c5lBw1TNzCnuN7
77
77
  sqlglot/optimizer/scope.py,sha256=UOTrbwqcTc5iRQf0WStgYWXpE24w6riZy-tJYA18yTw,31229
78
78
  sqlglot/optimizer/simplify.py,sha256=27IYsqbz1kyMlURSfRkm_ADSQJg-4805AOMFOjKKytU,51049
79
79
  sqlglot/optimizer/unnest_subqueries.py,sha256=kzWUVDlxs8z9nmRx-8U-pHXPtVZhEIwkKqmKhr2QLvc,10908
80
- sqlglot-27.21.0.dist-info/licenses/LICENSE,sha256=p1Yk0B4oa0l8Rh-_dYyy75d8spjPd_vTloXfz4FWxys,1065
81
- sqlglot-27.21.0.dist-info/METADATA,sha256=UzVIVfhK-VUD4PdRs4Rc9_TMh09qIQl7M1MIwuGgIDk,20825
82
- sqlglot-27.21.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
83
- sqlglot-27.21.0.dist-info/top_level.txt,sha256=5kRskCGA_gVADF9rSfSzPdLHXqvfMusDYeHePfNY2nQ,8
84
- sqlglot-27.21.0.dist-info/RECORD,,
80
+ sqlglot-27.22.0.dist-info/licenses/LICENSE,sha256=p1Yk0B4oa0l8Rh-_dYyy75d8spjPd_vTloXfz4FWxys,1065
81
+ sqlglot-27.22.0.dist-info/METADATA,sha256=aoHNF0rhdj1BNAUJDXumHET2la45gugovXSPzFCXL4k,20825
82
+ sqlglot-27.22.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
83
+ sqlglot-27.22.0.dist-info/top_level.txt,sha256=5kRskCGA_gVADF9rSfSzPdLHXqvfMusDYeHePfNY2nQ,8
84
+ sqlglot-27.22.0.dist-info/RECORD,,