sqlglot 27.19.0__py3-none-any.whl → 27.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sqlglot/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '27.19.0'
32
- __version_tuple__ = version_tuple = (27, 19, 0)
31
+ __version__ = version = '27.21.0'
32
+ __version_tuple__ = version_tuple = (27, 21, 0)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -867,6 +867,8 @@ class BigQuery(Dialect):
867
867
  "FROM_HEX": exp.Unhex.from_arg_list,
868
868
  "WEEK": lambda args: exp.WeekStart(this=exp.var(seq_get(args, 0))),
869
869
  }
870
+ # Remove SEARCH to avoid parameter routing issues - let it fall back to Anonymous function
871
+ FUNCTIONS.pop("SEARCH")
870
872
 
871
873
  FUNCTION_PARSERS = {
872
874
  **parser.Parser.FUNCTION_PARSERS,
@@ -1715,7 +1715,7 @@ def unit_to_str(expression: exp.Expression, default: str = "DAY") -> t.Optional[
1715
1715
  def unit_to_var(expression: exp.Expression, default: str = "DAY") -> t.Optional[exp.Expression]:
1716
1716
  unit = expression.args.get("unit")
1717
1717
 
1718
- if isinstance(unit, (exp.Var, exp.Placeholder, exp.WeekStart)):
1718
+ if isinstance(unit, (exp.Var, exp.Placeholder, exp.WeekStart, exp.Column)):
1719
1719
  return unit
1720
1720
 
1721
1721
  value = unit.name if unit else default
@@ -1736,7 +1736,9 @@ def map_date_part(
1736
1736
 
1737
1737
  def map_date_part(part, dialect: DialectType = Dialect):
1738
1738
  mapped = (
1739
- Dialect.get_or_raise(dialect).DATE_PART_MAPPING.get(part.name.upper()) if part else None
1739
+ Dialect.get_or_raise(dialect).DATE_PART_MAPPING.get(part.name.upper())
1740
+ if part and not (isinstance(part, exp.Column) and len(part.parts) != 1)
1741
+ else None
1740
1742
  )
1741
1743
  if mapped:
1742
1744
  return exp.Literal.string(mapped) if part.is_string else exp.var(mapped)
@@ -311,6 +311,7 @@ class DuckDB(Dialect):
311
311
  "PIVOT_WIDER": TokenType.PIVOT,
312
312
  "POSITIONAL": TokenType.POSITIONAL,
313
313
  "RESET": TokenType.COMMAND,
314
+ "ROW": TokenType.STRUCT,
314
315
  "SIGNED": TokenType.INT,
315
316
  "STRING": TokenType.TEXT,
316
317
  "SUMMARIZE": TokenType.SUMMARIZE,
@@ -337,16 +338,14 @@ class DuckDB(Dialect):
337
338
  class Parser(parser.Parser):
338
339
  MAP_KEYS_ARE_ARBITRARY_EXPRESSIONS = True
339
340
 
340
- BITWISE = {
341
- **parser.Parser.BITWISE,
342
- TokenType.TILDA: exp.RegexpLike,
343
- }
341
+ BITWISE = parser.Parser.BITWISE.copy()
344
342
  BITWISE.pop(TokenType.CARET)
345
343
 
346
344
  RANGE_PARSERS = {
347
345
  **parser.Parser.RANGE_PARSERS,
348
346
  TokenType.DAMP: binary_range_parser(exp.ArrayOverlaps),
349
347
  TokenType.CARET_AT: binary_range_parser(exp.StartsWith),
348
+ TokenType.TILDA: binary_range_parser(exp.RegexpFullMatch),
350
349
  }
351
350
 
352
351
  EXPONENT = {
sqlglot/dialects/hive.py CHANGED
@@ -531,7 +531,6 @@ class Hive(Dialect):
531
531
 
532
532
  TRANSFORMS = {
533
533
  **generator.Generator.TRANSFORMS,
534
- exp.Group: transforms.preprocess([transforms.unalias_group]),
535
534
  exp.Property: property_sql,
536
535
  exp.AnyValue: rename_func("FIRST"),
537
536
  exp.ApproxDistinct: approx_count_distinct_sql,
@@ -307,7 +307,6 @@ class Oracle(Dialect):
307
307
  ),
308
308
  exp.DateTrunc: lambda self, e: self.func("TRUNC", e.this, e.unit),
309
309
  exp.EuclideanDistance: rename_func("L2_DISTANCE"),
310
- exp.Group: transforms.preprocess([transforms.unalias_group]),
311
310
  exp.ILike: no_ilike_sql,
312
311
  exp.LogicalOr: rename_func("MAX"),
313
312
  exp.LogicalAnd: rename_func("MIN"),
@@ -475,7 +475,6 @@ class Presto(Dialect):
475
475
  e: f"WITH_TIMEZONE({self.sql(e, 'this')}, {self.sql(e, 'zone')}) AT TIME ZONE 'UTC'",
476
476
  exp.GenerateSeries: sequence_sql,
477
477
  exp.GenerateDateArray: sequence_sql,
478
- exp.Group: transforms.preprocess([transforms.unalias_group]),
479
478
  exp.If: if_sql(),
480
479
  exp.ILike: no_ilike_sql,
481
480
  exp.Initcap: _initcap_sql,
@@ -25,6 +25,20 @@ class RisingWave(Postgres):
25
25
  "KEY": lambda self: self._parse_encode_property(key=True),
26
26
  }
27
27
 
28
+ CONSTRAINT_PARSERS = {
29
+ **Postgres.Parser.CONSTRAINT_PARSERS,
30
+ "WATERMARK": lambda self: self.expression(
31
+ exp.WatermarkColumnConstraint,
32
+ this=self._match(TokenType.FOR) and self._parse_column(),
33
+ expression=self._match(TokenType.ALIAS) and self._parse_disjunction(),
34
+ ),
35
+ }
36
+
37
+ SCHEMA_UNNAMED_CONSTRAINTS = {
38
+ *Postgres.Parser.SCHEMA_UNNAMED_CONSTRAINTS,
39
+ "WATERMARK",
40
+ }
41
+
28
42
  def _parse_table_hints(self) -> t.Optional[t.List[exp.Expression]]:
29
43
  # There is no hint in risingwave.
30
44
  # Do nothing here to avoid WITH keywords conflict in CREATE SINK statement.
@@ -41,7 +41,18 @@ if t.TYPE_CHECKING:
41
41
  from sqlglot._typing import E, B
42
42
 
43
43
 
44
- # from https://docs.snowflake.com/en/sql-reference/functions/to_timestamp.html
44
+ def _build_strtok(args: t.List) -> exp.SplitPart:
45
+ # Add default delimiter (space) if missing - per Snowflake docs
46
+ if len(args) == 1:
47
+ args.append(exp.Literal.string(" "))
48
+
49
+ # Add default part_index (1) if missing
50
+ if len(args) == 2:
51
+ args.append(exp.Literal.number(1))
52
+
53
+ return exp.SplitPart.from_arg_list(args)
54
+
55
+
45
56
  def _build_datetime(
46
57
  name: str, kind: exp.DataType.Type, safe: bool = False
47
58
  ) -> t.Callable[[t.List], exp.Func]:
@@ -137,12 +148,35 @@ def _build_if_from_div0(args: t.List) -> exp.If:
137
148
  return exp.If(this=cond, true=true, false=false)
138
149
 
139
150
 
151
+ # https://docs.snowflake.com/en/sql-reference/functions/div0null
152
+ def _build_if_from_div0null(args: t.List) -> exp.If:
153
+ lhs = exp._wrap(seq_get(args, 0), exp.Binary)
154
+ rhs = exp._wrap(seq_get(args, 1), exp.Binary)
155
+
156
+ # Returns 0 when divisor is 0 OR NULL
157
+ cond = exp.EQ(this=rhs, expression=exp.Literal.number(0)).or_(
158
+ exp.Is(this=rhs, expression=exp.null())
159
+ )
160
+ true = exp.Literal.number(0)
161
+ false = exp.Div(this=lhs, expression=rhs)
162
+ return exp.If(this=cond, true=true, false=false)
163
+
164
+
140
165
  # https://docs.snowflake.com/en/sql-reference/functions/zeroifnull
141
166
  def _build_if_from_zeroifnull(args: t.List) -> exp.If:
142
167
  cond = exp.Is(this=seq_get(args, 0), expression=exp.Null())
143
168
  return exp.If(this=cond, true=exp.Literal.number(0), false=seq_get(args, 0))
144
169
 
145
170
 
171
+ def _build_search(args: t.List) -> exp.Search:
172
+ kwargs = {
173
+ "this": seq_get(args, 0),
174
+ "expression": seq_get(args, 1),
175
+ **{arg.name.lower(): arg for arg in args[2:] if isinstance(arg, exp.Kwarg)},
176
+ }
177
+ return exp.Search(**kwargs)
178
+
179
+
146
180
  # https://docs.snowflake.com/en/sql-reference/functions/zeroifnull
147
181
  def _build_if_from_nullifzero(args: t.List) -> exp.If:
148
182
  cond = exp.EQ(this=seq_get(args, 0), expression=exp.Literal.number(0))
@@ -529,6 +563,16 @@ class Snowflake(Dialect):
529
563
 
530
564
  TYPE_TO_EXPRESSIONS = {
531
565
  **Dialect.TYPE_TO_EXPRESSIONS,
566
+ exp.DataType.Type.DOUBLE: {
567
+ *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.DOUBLE],
568
+ exp.Cos,
569
+ exp.Cosh,
570
+ exp.Cot,
571
+ exp.Degrees,
572
+ exp.Exp,
573
+ exp.Sin,
574
+ exp.Tan,
575
+ },
532
576
  exp.DataType.Type.INT: {
533
577
  *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.INT],
534
578
  exp.Ascii,
@@ -539,10 +583,12 @@ class Snowflake(Dialect):
539
583
  exp.Levenshtein,
540
584
  exp.JarowinklerSimilarity,
541
585
  exp.StrPosition,
586
+ exp.Unicode,
542
587
  },
543
588
  exp.DataType.Type.VARCHAR: {
544
589
  *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.VARCHAR],
545
590
  exp.Base64DecodeString,
591
+ exp.TryBase64DecodeString,
546
592
  exp.Base64Encode,
547
593
  exp.DecompressString,
548
594
  exp.MD5,
@@ -553,6 +599,7 @@ class Snowflake(Dialect):
553
599
  exp.Collate,
554
600
  exp.Collation,
555
601
  exp.HexDecodeString,
602
+ exp.TryHexDecodeString,
556
603
  exp.HexEncode,
557
604
  exp.Initcap,
558
605
  exp.RegexpExtract,
@@ -561,12 +608,18 @@ class Snowflake(Dialect):
561
608
  exp.Replace,
562
609
  exp.SHA,
563
610
  exp.SHA2,
611
+ exp.Soundex,
612
+ exp.SoundexP123,
564
613
  exp.Space,
614
+ exp.SplitPart,
615
+ exp.Translate,
565
616
  exp.Uuid,
566
617
  },
567
618
  exp.DataType.Type.BINARY: {
568
619
  *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.BINARY],
569
620
  exp.Base64DecodeBinary,
621
+ exp.TryBase64DecodeBinary,
622
+ exp.TryHexDecodeBinary,
570
623
  exp.Compress,
571
624
  exp.DecompressBinary,
572
625
  exp.MD5Digest,
@@ -581,11 +634,20 @@ class Snowflake(Dialect):
581
634
  },
582
635
  exp.DataType.Type.ARRAY: {
583
636
  exp.Split,
637
+ exp.RegexpExtractAll,
638
+ exp.StringToArray,
584
639
  },
585
640
  exp.DataType.Type.OBJECT: {
586
641
  exp.ParseUrl,
587
642
  exp.ParseIp,
588
643
  },
644
+ exp.DataType.Type.DECIMAL: {
645
+ exp.RegexpCount,
646
+ },
647
+ exp.DataType.Type.BOOLEAN: {
648
+ *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.BOOLEAN],
649
+ exp.Search,
650
+ },
589
651
  }
590
652
 
591
653
  ANNOTATORS = {
@@ -605,6 +667,15 @@ class Snowflake(Dialect):
605
667
  exp.Substring,
606
668
  )
607
669
  },
670
+ **{
671
+ expr_type: lambda self, e: self._annotate_with_type(
672
+ e, exp.DataType.build("NUMBER", dialect="snowflake")
673
+ )
674
+ for expr_type in (
675
+ exp.RegexpCount,
676
+ exp.RegexpInstr,
677
+ )
678
+ },
608
679
  exp.ConcatWs: lambda self, e: self._annotate_by_args(e, "expressions"),
609
680
  exp.Reverse: _annotate_reverse,
610
681
  }
@@ -679,7 +750,7 @@ class Snowflake(Dialect):
679
750
  "APPROX_PERCENTILE": exp.ApproxQuantile.from_arg_list,
680
751
  "ARRAY_CONSTRUCT": lambda args: exp.Array(expressions=args),
681
752
  "ARRAY_CONTAINS": lambda args: exp.ArrayContains(
682
- this=seq_get(args, 1), expression=seq_get(args, 0)
753
+ this=seq_get(args, 1), expression=seq_get(args, 0), ensure_variant=False
683
754
  ),
684
755
  "ARRAY_GENERATE_RANGE": lambda args: exp.GenerateSeries(
685
756
  # ARRAY_GENERATE_RANGE has an exlusive end; we normalize it to be inclusive
@@ -715,6 +786,7 @@ class Snowflake(Dialect):
715
786
  "DATEDIFF": _build_datediff,
716
787
  "DAYOFWEEKISO": exp.DayOfWeekIso.from_arg_list,
717
788
  "DIV0": _build_if_from_div0,
789
+ "DIV0NULL": _build_if_from_div0null,
718
790
  "EDITDISTANCE": lambda args: exp.Levenshtein(
719
791
  this=seq_get(args, 0), expression=seq_get(args, 1), max_dist=seq_get(args, 2)
720
792
  ),
@@ -753,6 +825,7 @@ class Snowflake(Dialect):
753
825
  "SHA2_BINARY": exp.SHA2Digest.from_arg_list,
754
826
  "SHA2_HEX": exp.SHA2.from_arg_list,
755
827
  "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
828
+ "STRTOK": _build_strtok,
756
829
  "TABLE": lambda args: exp.TableFromRows(this=seq_get(args, 0)),
757
830
  "TIMEADD": _build_date_time_add(exp.TimeAdd),
758
831
  "TIMEDIFF": _build_datediff,
@@ -787,12 +860,14 @@ class Snowflake(Dialect):
787
860
  "ZEROIFNULL": _build_if_from_zeroifnull,
788
861
  "LIKE": _build_like(exp.Like),
789
862
  "ILIKE": _build_like(exp.ILike),
863
+ "SEARCH": _build_search,
790
864
  }
791
865
  FUNCTIONS.pop("PREDICT")
792
866
 
793
867
  FUNCTION_PARSERS = {
794
868
  **parser.Parser.FUNCTION_PARSERS,
795
869
  "DATE_PART": lambda self: self._parse_date_part(),
870
+ "DIRECTORY": lambda self: self._parse_directory(),
796
871
  "OBJECT_CONSTRUCT_KEEP_NULL": lambda self: self._parse_json_object(),
797
872
  "LISTAGG": lambda self: self._parse_string_agg(),
798
873
  "SEMANTIC_VIEW": lambda self: self._parse_semantic_view(),
@@ -902,6 +977,14 @@ class Snowflake(Dialect):
902
977
  ),
903
978
  }
904
979
 
980
+ def _parse_directory(self) -> exp.DirectoryStage:
981
+ table = self._parse_table_parts()
982
+
983
+ if isinstance(table, exp.Table):
984
+ table = table.this
985
+
986
+ return self.expression(exp.DirectoryStage, this=table)
987
+
905
988
  def _parse_use(self) -> exp.Use:
906
989
  if self._match_text_seq("SECONDARY", "ROLES"):
907
990
  this = self._match_texts(("ALL", "NONE")) and exp.var(self._prev.text.upper())
@@ -1343,7 +1426,13 @@ class Snowflake(Dialect):
1343
1426
  exp.ArgMax: rename_func("MAX_BY"),
1344
1427
  exp.ArgMin: rename_func("MIN_BY"),
1345
1428
  exp.ArrayConcat: lambda self, e: self.arrayconcat_sql(e, name="ARRAY_CAT"),
1346
- exp.ArrayContains: lambda self, e: self.func("ARRAY_CONTAINS", e.expression, e.this),
1429
+ exp.ArrayContains: lambda self, e: self.func(
1430
+ "ARRAY_CONTAINS",
1431
+ e.expression
1432
+ if e.args.get("ensure_variant") is False
1433
+ else exp.cast(e.expression, exp.DataType.Type.VARIANT, copy=False),
1434
+ e.this,
1435
+ ),
1347
1436
  exp.ArrayIntersect: rename_func("ARRAY_INTERSECTION"),
1348
1437
  exp.AtTimeZone: lambda self, e: self.func(
1349
1438
  "CONVERT_TIMEZONE", e.args.get("zone"), e.this
@@ -1873,3 +1962,13 @@ class Snowflake(Dialect):
1873
1962
  return self.func("TO_CHAR", expression.expressions[0])
1874
1963
 
1875
1964
  return self.function_fallback_sql(expression)
1965
+
1966
+ def splitpart_sql(self, expression: exp.SplitPart) -> str:
1967
+ # Set part_index to 1 if missing
1968
+ if not expression.args.get("delimiter"):
1969
+ expression.set("delimiter", exp.Literal.string(" "))
1970
+
1971
+ if not expression.args.get("part_index"):
1972
+ expression.set("part_index", exp.Literal.number(1))
1973
+
1974
+ return rename_func("SPLIT_PART")(self, expression)
sqlglot/dialects/spark.py CHANGED
@@ -230,7 +230,6 @@ class Spark(Spark2):
230
230
  }
231
231
  TRANSFORMS.pop(exp.AnyValue)
232
232
  TRANSFORMS.pop(exp.DateDiff)
233
- TRANSFORMS.pop(exp.Group)
234
233
 
235
234
  def bracket_sql(self, expression: exp.Bracket) -> str:
236
235
  if expression.args.get("safe"):
@@ -342,3 +342,12 @@ class SQLite(Dialect):
342
342
 
343
343
  def respectnulls_sql(self, expression: exp.RespectNulls) -> str:
344
344
  return self.sql(expression.this)
345
+
346
+ def windowspec_sql(self, expression: exp.WindowSpec) -> str:
347
+ if (
348
+ expression.text("kind").upper() == "RANGE"
349
+ and expression.text("start").upper() == "CURRENT ROW"
350
+ ):
351
+ return "RANGE CURRENT ROW"
352
+
353
+ return super().windowspec_sql(expression)
@@ -32,6 +32,7 @@ def st_distance_sphere(self, expression: exp.StDistance) -> str:
32
32
 
33
33
  class StarRocks(MySQL):
34
34
  STRICT_JSON_PATH_SYNTAX = False
35
+ INDEX_OFFSET = 1
35
36
 
36
37
  class Tokenizer(MySQL.Tokenizer):
37
38
  KEYWORDS = {
@@ -49,6 +50,7 @@ class StarRocks(MySQL):
49
50
  "DATE_DIFF": lambda args: exp.DateDiff(
50
51
  this=seq_get(args, 1), expression=seq_get(args, 2), unit=seq_get(args, 0)
51
52
  ),
53
+ "ARRAY_FLATTEN": exp.Flatten.from_arg_list,
52
54
  "REGEXP": exp.RegexpLike.from_arg_list,
53
55
  }
54
56
 
@@ -152,6 +154,7 @@ class StarRocks(MySQL):
152
154
  exp.DateDiff: lambda self, e: self.func(
153
155
  "DATE_DIFF", unit_to_str(e), e.this, e.expression
154
156
  ),
157
+ exp.Flatten: rename_func("ARRAY_FLATTEN"),
155
158
  exp.JSONExtractScalar: arrow_json_extract_sql,
156
159
  exp.JSONExtract: arrow_json_extract_sql,
157
160
  exp.Property: property_sql,
sqlglot/expressions.py CHANGED
@@ -120,19 +120,43 @@ class Expression(metaclass=_Expression):
120
120
  def __eq__(self, other) -> bool:
121
121
  return type(self) is type(other) and hash(self) == hash(other)
122
122
 
123
- @property
124
- def hashable_args(self) -> t.Any:
125
- return frozenset(
126
- (k, tuple(_norm_arg(a) for a in v) if type(v) is list else _norm_arg(v))
127
- for k, v in self.args.items()
128
- if not (v is None or v is False or (type(v) is list and not v))
129
- )
130
-
131
123
  def __hash__(self) -> int:
132
- if self._hash is not None:
133
- return self._hash
134
-
135
- return hash((self.__class__, self.hashable_args))
124
+ if self._hash is None:
125
+ nodes = []
126
+ queue = deque([self])
127
+
128
+ while queue:
129
+ node = queue.popleft()
130
+ nodes.append(node)
131
+
132
+ for v in node.iter_expressions():
133
+ if v._hash is None:
134
+ queue.append(v)
135
+
136
+ for node in reversed(nodes):
137
+ hash_ = hash(node.key)
138
+ t = type(node)
139
+
140
+ if t is Literal or t is Identifier:
141
+ for k, v in sorted(node.args.items()):
142
+ if v:
143
+ hash_ = hash((hash_, k, v))
144
+ else:
145
+ for k, v in sorted(node.args.items()):
146
+ t = type(v)
147
+
148
+ if t is list:
149
+ for x in v:
150
+ if x is not None and x is not False:
151
+ hash_ = hash((hash_, k, x.lower() if type(x) is str else x))
152
+ else:
153
+ hash_ = hash((hash_, k))
154
+ elif v is not None and v is not False:
155
+ hash_ = hash((hash_, k, v.lower() if t is str else v))
156
+
157
+ node._hash = hash_
158
+ assert self._hash
159
+ return self._hash
136
160
 
137
161
  def __reduce__(self) -> t.Tuple[t.Callable, t.Tuple[t.List[t.Dict[str, t.Any]]]]:
138
162
  from sqlglot.serde import dump, load
@@ -369,6 +393,12 @@ class Expression(metaclass=_Expression):
369
393
  overwrite: assuming an index is given, this determines whether to overwrite the
370
394
  list entry instead of only inserting a new value (i.e., like list.insert).
371
395
  """
396
+ expression: t.Optional[Expression] = self
397
+
398
+ while expression and expression._hash is not None:
399
+ expression._hash = None
400
+ expression = expression.parent
401
+
372
402
  if index is not None:
373
403
  expressions = self.args.get(arg_key) or []
374
404
 
@@ -2235,10 +2265,14 @@ class Prior(Expression):
2235
2265
 
2236
2266
 
2237
2267
  class Directory(Expression):
2238
- # https://spark.apache.org/docs/3.0.0-preview/sql-ref-syntax-dml-insert-overwrite-directory-hive.html
2239
2268
  arg_types = {"this": True, "local": False, "row_format": False}
2240
2269
 
2241
2270
 
2271
+ # https://docs.snowflake.com/en/user-guide/data-load-dirtables-query
2272
+ class DirectoryStage(Expression):
2273
+ pass
2274
+
2275
+
2242
2276
  class ForeignKey(Expression):
2243
2277
  arg_types = {
2244
2278
  "expressions": False,
@@ -2298,10 +2332,6 @@ class Identifier(Expression):
2298
2332
  def quoted(self) -> bool:
2299
2333
  return bool(self.args.get("quoted"))
2300
2334
 
2301
- @property
2302
- def hashable_args(self) -> t.Any:
2303
- return (self.this, self.quoted)
2304
-
2305
2335
  @property
2306
2336
  def output_name(self) -> str:
2307
2337
  return self.name
@@ -2536,10 +2566,6 @@ class LimitOptions(Expression):
2536
2566
  class Literal(Condition):
2537
2567
  arg_types = {"this": True, "is_string": True}
2538
2568
 
2539
- @property
2540
- def hashable_args(self) -> t.Any:
2541
- return (self.this, self.args.get("is_string"))
2542
-
2543
2569
  @classmethod
2544
2570
  def number(cls, number) -> Literal:
2545
2571
  return cls(this=str(number), is_string=False)
@@ -5359,7 +5385,7 @@ class TimeUnit(Expression):
5359
5385
 
5360
5386
  def __init__(self, **args):
5361
5387
  unit = args.get("unit")
5362
- if type(unit) in self.VAR_LIKE:
5388
+ if type(unit) in self.VAR_LIKE and not (isinstance(unit, Column) and len(unit.parts) != 1):
5363
5389
  args["unit"] = Var(
5364
5390
  this=(self.UNABBREVIATED_UNIT_NAME.get(unit.name) or unit.name).upper()
5365
5391
  )
@@ -5499,6 +5525,10 @@ class Coth(Func):
5499
5525
  pass
5500
5526
 
5501
5527
 
5528
+ class Cos(Func):
5529
+ pass
5530
+
5531
+
5502
5532
  class Csc(Func):
5503
5533
  pass
5504
5534
 
@@ -5523,6 +5553,18 @@ class Sinh(Func):
5523
5553
  pass
5524
5554
 
5525
5555
 
5556
+ class Tan(Func):
5557
+ pass
5558
+
5559
+
5560
+ class Degrees(Func):
5561
+ pass
5562
+
5563
+
5564
+ class Cosh(Func):
5565
+ pass
5566
+
5567
+
5526
5568
  class CosineDistance(Func):
5527
5569
  arg_types = {"this": True, "expression": True}
5528
5570
 
@@ -5814,6 +5856,7 @@ class ArrayConstructCompact(Func):
5814
5856
 
5815
5857
 
5816
5858
  class ArrayContains(Binary, Func):
5859
+ arg_types = {"this": True, "expression": True, "ensure_variant": False}
5817
5860
  _sql_names = ["ARRAY_CONTAINS", "ARRAY_HAS"]
5818
5861
 
5819
5862
 
@@ -6146,7 +6189,9 @@ class DateTrunc(Func):
6146
6189
  unabbreviate = args.pop("unabbreviate", True)
6147
6190
 
6148
6191
  unit = args.get("unit")
6149
- if isinstance(unit, TimeUnit.VAR_LIKE):
6192
+ if isinstance(unit, TimeUnit.VAR_LIKE) and not (
6193
+ isinstance(unit, Column) and len(unit.parts) != 1
6194
+ ):
6150
6195
  unit_name = unit.name.upper()
6151
6196
  if unabbreviate and unit_name in TimeUnit.UNABBREVIATED_UNIT_NAME:
6152
6197
  unit_name = TimeUnit.UNABBREVIATED_UNIT_NAME[unit_name]
@@ -6427,14 +6472,36 @@ class Base64DecodeBinary(Func):
6427
6472
  arg_types = {"this": True, "alphabet": False}
6428
6473
 
6429
6474
 
6475
+ # https://docs.snowflake.com/en/sql-reference/functions/base64_decode_string
6430
6476
  class Base64DecodeString(Func):
6431
6477
  arg_types = {"this": True, "alphabet": False}
6432
6478
 
6433
6479
 
6480
+ # https://docs.snowflake.com/en/sql-reference/functions/base64_encode
6434
6481
  class Base64Encode(Func):
6435
6482
  arg_types = {"this": True, "max_line_length": False, "alphabet": False}
6436
6483
 
6437
6484
 
6485
+ # https://docs.snowflake.com/en/sql-reference/functions/try_base64_decode_binary
6486
+ class TryBase64DecodeBinary(Func):
6487
+ arg_types = {"this": True, "alphabet": False}
6488
+
6489
+
6490
+ # https://docs.snowflake.com/en/sql-reference/functions/try_base64_decode_string
6491
+ class TryBase64DecodeString(Func):
6492
+ arg_types = {"this": True, "alphabet": False}
6493
+
6494
+
6495
+ # https://docs.snowflake.com/en/sql-reference/functions/try_hex_decode_binary
6496
+ class TryHexDecodeBinary(Func):
6497
+ pass
6498
+
6499
+
6500
+ # https://docs.snowflake.com/en/sql-reference/functions/try_hex_decode_string
6501
+ class TryHexDecodeString(Func):
6502
+ pass
6503
+
6504
+
6438
6505
  # https://trino.io/docs/current/functions/datetime.html#from_iso8601_timestamp
6439
6506
  class FromISO8601Timestamp(Func):
6440
6507
  _sql_names = ["FROM_ISO8601_TIMESTAMP"]
@@ -7231,6 +7298,10 @@ class RegexpILike(Binary, Func):
7231
7298
  arg_types = {"this": True, "expression": True, "flag": False}
7232
7299
 
7233
7300
 
7301
+ class RegexpFullMatch(Binary, Func):
7302
+ arg_types = {"this": True, "expression": True, "options": False}
7303
+
7304
+
7234
7305
  class RegexpInstr(Func):
7235
7306
  arg_types = {
7236
7307
  "this": True,
@@ -7249,6 +7320,15 @@ class RegexpSplit(Func):
7249
7320
  arg_types = {"this": True, "expression": True, "limit": False}
7250
7321
 
7251
7322
 
7323
+ class RegexpCount(Func):
7324
+ arg_types = {
7325
+ "this": True,
7326
+ "expression": True,
7327
+ "position": False,
7328
+ "parameters": False,
7329
+ }
7330
+
7331
+
7252
7332
  class Repeat(Func):
7253
7333
  arg_types = {"this": True, "times": True}
7254
7334
 
@@ -7323,13 +7403,20 @@ class Soundex(Func):
7323
7403
  pass
7324
7404
 
7325
7405
 
7406
+ # https://docs.snowflake.com/en/sql-reference/functions/soundex_p123
7407
+ class SoundexP123(Func):
7408
+ pass
7409
+
7410
+
7326
7411
  class Split(Func):
7327
7412
  arg_types = {"this": True, "expression": True, "limit": False}
7328
7413
 
7329
7414
 
7330
7415
  # https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.split_part.html
7416
+ # https://docs.snowflake.com/en/sql-reference/functions/split_part
7417
+ # https://docs.snowflake.com/en/sql-reference/functions/strtok
7331
7418
  class SplitPart(Func):
7332
- arg_types = {"this": True, "delimiter": True, "part_index": True}
7419
+ arg_types = {"this": True, "delimiter": False, "part_index": False}
7333
7420
 
7334
7421
 
7335
7422
  # Start may be omitted in the case of postgres
@@ -7373,6 +7460,19 @@ class StrPosition(Func):
7373
7460
  }
7374
7461
 
7375
7462
 
7463
+ # Snowflake: https://docs.snowflake.com/en/sql-reference/functions/search
7464
+ # BigQuery: https://cloud.google.com/bigquery/docs/reference/standard-sql/search_functions#search
7465
+ class Search(Func):
7466
+ arg_types = {
7467
+ "this": True, # data_to_search / search_data
7468
+ "expression": True, # search_query / search_string
7469
+ "json_scope": False, # BigQuery: JSON_VALUES | JSON_KEYS | JSON_KEYS_AND_VALUES
7470
+ "analyzer": False, # Both: analyzer / ANALYZER
7471
+ "analyzer_options": False, # BigQuery: analyzer_options_values
7472
+ "search_mode": False, # Snowflake: OR | AND
7473
+ }
7474
+
7475
+
7376
7476
  class StrToDate(Func):
7377
7477
  arg_types = {"this": True, "format": False, "safe": False}
7378
7478
 
@@ -7712,10 +7812,6 @@ class TableColumn(Expression):
7712
7812
  pass
7713
7813
 
7714
7814
 
7715
- def _norm_arg(arg):
7716
- return arg.lower() if type(arg) is str else arg
7717
-
7718
-
7719
7815
  ALL_FUNCTIONS = subclasses(__name__, Func, (AggFunc, Anonymous, Func))
7720
7816
  FUNCTION_BY_NAME = {name: func for func in ALL_FUNCTIONS for name in func.sql_names()}
7721
7817
 
sqlglot/generator.py CHANGED
@@ -5334,3 +5334,6 @@ class Generator(metaclass=_Generator):
5334
5334
  def modelattribute_sql(self, expression: exp.ModelAttribute) -> str:
5335
5335
  self.unsupported("The model!attribute syntax is not supported")
5336
5336
  return ""
5337
+
5338
+ def directorystage_sql(self, expression: exp.DirectoryStage) -> str:
5339
+ return self.func("DIRECTORY", expression.this)
sqlglot/helper.py CHANGED
@@ -226,31 +226,13 @@ def while_changing(expression: Expression, func: t.Callable[[Expression], E]) ->
226
226
  Returns:
227
227
  The transformed expression.
228
228
  """
229
- end_hash: t.Optional[int] = None
230
229
 
231
230
  while True:
232
- # No need to walk the AST– we've already cached the hashes in the previous iteration
233
- if end_hash is None:
234
- for n in reversed(tuple(expression.walk())):
235
- n._hash = hash(n)
236
-
237
231
  start_hash = hash(expression)
238
232
  expression = func(expression)
239
-
240
- expression_nodes = tuple(expression.walk())
241
-
242
- # Uncache previous caches so we can recompute them
243
- for n in reversed(expression_nodes):
244
- n._hash = None
245
- n._hash = hash(n)
246
-
247
233
  end_hash = hash(expression)
248
234
 
249
235
  if start_hash == end_hash:
250
- # ... and reset the hash so we don't risk it becoming out of date if a mutation happens
251
- for n in expression_nodes:
252
- n._hash = None
253
-
254
236
  break
255
237
 
256
238
  return expression
@@ -77,7 +77,7 @@ def coerce_type(node: exp.Expression, promote_to_inferred_datetime_type: bool) -
77
77
  _coerce_date(node.left, node.right, promote_to_inferred_datetime_type)
78
78
  elif isinstance(node, exp.Between):
79
79
  _coerce_date(node.this, node.args["low"], promote_to_inferred_datetime_type)
80
- elif isinstance(node, exp.Extract) and not node.expression.type.is_type(
80
+ elif isinstance(node, exp.Extract) and not node.expression.is_type(
81
81
  *exp.DataType.TEMPORAL_TYPES
82
82
  ):
83
83
  _replace_cast(node.expression, exp.DataType.Type.DATETIME)
@@ -201,6 +201,7 @@ def _mergeable(
201
201
  and not outer_scope.pivots
202
202
  and not any(e.find(exp.AggFunc, exp.Select, exp.Explode) for e in inner_select.expressions)
203
203
  and not (leave_tables_isolated and len(outer_scope.selected_sources) > 1)
204
+ and not (isinstance(from_or_join, exp.Join) and inner_select.args.get("joins"))
204
205
  and not (
205
206
  isinstance(from_or_join, exp.Join)
206
207
  and inner_select.args.get("where")
@@ -282,6 +283,7 @@ def _merge_joins(outer_scope: Scope, inner_scope: Scope, from_or_join: FromOrJoi
282
283
  new_joins = []
283
284
 
284
285
  joins = inner_scope.expression.args.get("joins") or []
286
+
285
287
  for join in joins:
286
288
  new_joins.append(join)
287
289
  outer_scope.add_source(join.alias_or_name, inner_scope.sources[join.alias_or_name])
sqlglot/parser.py CHANGED
@@ -1141,11 +1141,6 @@ class Parser(metaclass=_Parser):
1141
1141
  "TTL": lambda self: self.expression(exp.MergeTreeTTL, expressions=[self._parse_bitwise()]),
1142
1142
  "UNIQUE": lambda self: self._parse_unique(),
1143
1143
  "UPPERCASE": lambda self: self.expression(exp.UppercaseColumnConstraint),
1144
- "WATERMARK": lambda self: self.expression(
1145
- exp.WatermarkColumnConstraint,
1146
- this=self._match(TokenType.FOR) and self._parse_column(),
1147
- expression=self._match(TokenType.ALIAS) and self._parse_disjunction(),
1148
- ),
1149
1144
  "WITH": lambda self: self.expression(
1150
1145
  exp.Properties, expressions=self._parse_wrapped_properties()
1151
1146
  ),
@@ -1211,7 +1206,6 @@ class Parser(metaclass=_Parser):
1211
1206
  "PERIOD",
1212
1207
  "PRIMARY KEY",
1213
1208
  "UNIQUE",
1214
- "WATERMARK",
1215
1209
  "BUCKET",
1216
1210
  "TRUNCATE",
1217
1211
  }
@@ -1411,7 +1405,7 @@ class Parser(metaclass=_Parser):
1411
1405
 
1412
1406
  VIEW_ATTRIBUTES = {"ENCRYPTION", "SCHEMABINDING", "VIEW_METADATA"}
1413
1407
 
1414
- WINDOW_ALIAS_TOKENS = ID_VAR_TOKENS - {TokenType.ROWS}
1408
+ WINDOW_ALIAS_TOKENS = ID_VAR_TOKENS - {TokenType.RANGE, TokenType.ROWS}
1415
1409
  WINDOW_BEFORE_PAREN_TOKENS = {TokenType.OVER}
1416
1410
  WINDOW_SIDES = {"FOLLOWING", "PRECEDING"}
1417
1411
 
@@ -4592,14 +4586,10 @@ class Parser(metaclass=_Parser):
4592
4586
  before_with_index = self._index
4593
4587
  with_prefix = self._match(TokenType.WITH)
4594
4588
 
4595
- if self._match(TokenType.ROLLUP):
4596
- elements["rollup"].append(
4597
- self._parse_cube_or_rollup(exp.Rollup, with_prefix=with_prefix)
4598
- )
4599
- elif self._match(TokenType.CUBE):
4600
- elements["cube"].append(
4601
- self._parse_cube_or_rollup(exp.Cube, with_prefix=with_prefix)
4602
- )
4589
+ cube_or_rollup = self._parse_cube_or_rollup(with_prefix=with_prefix)
4590
+ if cube_or_rollup:
4591
+ key = "rollup" if isinstance(cube_or_rollup, exp.Rollup) else "cube"
4592
+ elements[key].append(cube_or_rollup)
4603
4593
  elif self._match(TokenType.GROUPING_SETS):
4604
4594
  elements["grouping_sets"].append(
4605
4595
  self.expression(
@@ -4619,18 +4609,20 @@ class Parser(metaclass=_Parser):
4619
4609
 
4620
4610
  return self.expression(exp.Group, comments=comments, **elements) # type: ignore
4621
4611
 
4622
- def _parse_cube_or_rollup(self, kind: t.Type[E], with_prefix: bool = False) -> E:
4612
+ def _parse_cube_or_rollup(self, with_prefix: bool = False) -> t.Optional[exp.Cube | exp.Rollup]:
4613
+ if self._match(TokenType.CUBE):
4614
+ kind: t.Type[exp.Cube | exp.Rollup] = exp.Cube
4615
+ elif self._match(TokenType.ROLLUP):
4616
+ kind = exp.Rollup
4617
+ else:
4618
+ return None
4619
+
4623
4620
  return self.expression(
4624
4621
  kind, expressions=[] if with_prefix else self._parse_wrapped_csv(self._parse_column)
4625
4622
  )
4626
4623
 
4627
4624
  def _parse_grouping_set(self) -> t.Optional[exp.Expression]:
4628
- if self._match(TokenType.L_PAREN):
4629
- grouping_set = self._parse_csv(self._parse_bitwise)
4630
- self._match_r_paren()
4631
- return self.expression(exp.Tuple, expressions=grouping_set)
4632
-
4633
- return self._parse_column()
4625
+ return self._parse_cube_or_rollup() or self._parse_bitwise()
4634
4626
 
4635
4627
  def _parse_having(self, skip_having_token: bool = False) -> t.Optional[exp.Having]:
4636
4628
  if not skip_having_token and not self._match(TokenType.HAVING):
@@ -4749,11 +4741,15 @@ class Parser(metaclass=_Parser):
4749
4741
  exp.Ordered, this=this, desc=desc, nulls_first=nulls_first, with_fill=with_fill
4750
4742
  )
4751
4743
 
4752
- def _parse_limit_options(self) -> exp.LimitOptions:
4753
- percent = self._match(TokenType.PERCENT)
4744
+ def _parse_limit_options(self) -> t.Optional[exp.LimitOptions]:
4745
+ percent = self._match_set((TokenType.PERCENT, TokenType.MOD))
4754
4746
  rows = self._match_set((TokenType.ROW, TokenType.ROWS))
4755
4747
  self._match_text_seq("ONLY")
4756
4748
  with_ties = self._match_text_seq("WITH", "TIES")
4749
+
4750
+ if not (percent or rows or with_ties):
4751
+ return None
4752
+
4757
4753
  return self.expression(exp.LimitOptions, percent=percent, rows=rows, with_ties=with_ties)
4758
4754
 
4759
4755
  def _parse_limit(
@@ -4771,10 +4767,13 @@ class Parser(metaclass=_Parser):
4771
4767
  if limit_paren:
4772
4768
  self._match_r_paren()
4773
4769
 
4774
- limit_options = self._parse_limit_options()
4775
4770
  else:
4776
- limit_options = None
4777
- expression = self._parse_term()
4771
+ # Parsing LIMIT x% (i.e x PERCENT) as a term leads to an error, since
4772
+ # we try to build an exp.Mod expr. For that matter, we backtrack and instead
4773
+ # consume the factor plus parse the percentage separately
4774
+ expression = self._try_parse(self._parse_term) or self._parse_factor()
4775
+
4776
+ limit_options = self._parse_limit_options()
4778
4777
 
4779
4778
  if self._match(TokenType.COMMA):
4780
4779
  offset = expression
sqlglot/tokens.py CHANGED
@@ -1421,7 +1421,11 @@ class Tokenizer(metaclass=_Tokenizer):
1421
1421
  raise_unmatched=not self.HEREDOC_TAG_IS_IDENTIFIER,
1422
1422
  )
1423
1423
 
1424
- if tag and self.HEREDOC_TAG_IS_IDENTIFIER and (self._end or not tag.isidentifier()):
1424
+ if (
1425
+ tag
1426
+ and self.HEREDOC_TAG_IS_IDENTIFIER
1427
+ and (self._end or tag.isdigit() or any(c.isspace() for c in tag))
1428
+ ):
1425
1429
  if not self._end:
1426
1430
  self._advance(-1)
1427
1431
 
sqlglot/transforms.py CHANGED
@@ -131,39 +131,6 @@ def unnest_generate_series(expression: exp.Expression) -> exp.Expression:
131
131
  return expression
132
132
 
133
133
 
134
- def unalias_group(expression: exp.Expression) -> exp.Expression:
135
- """
136
- Replace references to select aliases in GROUP BY clauses.
137
-
138
- Example:
139
- >>> import sqlglot
140
- >>> sqlglot.parse_one("SELECT a AS b FROM x GROUP BY b").transform(unalias_group).sql()
141
- 'SELECT a AS b FROM x GROUP BY 1'
142
-
143
- Args:
144
- expression: the expression that will be transformed.
145
-
146
- Returns:
147
- The transformed expression.
148
- """
149
- if isinstance(expression, exp.Group) and isinstance(expression.parent, exp.Select):
150
- aliased_selects = {
151
- e.alias: i
152
- for i, e in enumerate(expression.parent.expressions, start=1)
153
- if isinstance(e, exp.Alias)
154
- }
155
-
156
- for group_by in expression.expressions:
157
- if (
158
- isinstance(group_by, exp.Column)
159
- and not group_by.table
160
- and group_by.name in aliased_selects
161
- ):
162
- group_by.replace(exp.Literal.number(aliased_selects.get(group_by.name)))
163
-
164
- return expression
165
-
166
-
167
134
  def eliminate_distinct_on(expression: exp.Expression) -> exp.Expression:
168
135
  """
169
136
  Convert SELECT DISTINCT ON statements to a subquery with a window function.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlglot
3
- Version: 27.19.0
3
+ Version: 27.21.0
4
4
  Summary: An easily customizable SQL parser and transpiler
5
5
  Author-email: Toby Mao <toby.mao@gmail.com>
6
6
  License-Expression: MIT
@@ -33,7 +33,7 @@ Requires-Dist: typing_extensions; extra == "dev"
33
33
  Requires-Dist: maturin<2.0,>=1.4; extra == "dev"
34
34
  Requires-Dist: pyperf; extra == "dev"
35
35
  Provides-Extra: rs
36
- Requires-Dist: sqlglotrs==0.6.2; extra == "rs"
36
+ Requires-Dist: sqlglotrs==0.7.0; extra == "rs"
37
37
  Dynamic: license-file
38
38
  Dynamic: provides-extra
39
39
 
@@ -89,12 +89,14 @@ pip3 install "sqlglot[rs]"
89
89
  Or with a local checkout:
90
90
 
91
91
  ```
92
+ # Optionally prefix with UV=1 to use uv for the installation
92
93
  make install
93
94
  ```
94
95
 
95
96
  Requirements for development (optional):
96
97
 
97
98
  ```
99
+ # Optionally prefix with UV=1 to use uv for the installation
98
100
  make install-dev
99
101
  ```
100
102
 
@@ -1,53 +1,53 @@
1
1
  sqlglot/__init__.py,sha256=za08rtdPh2v7dOpGdNomttlIVGgTrKja7rPd6sQwaTg,5391
2
2
  sqlglot/__main__.py,sha256=022c173KqxsiABWTEpUIq_tJUxuNiW7a7ABsxBXqvu8,2069
3
3
  sqlglot/_typing.py,sha256=-1HPyr3w5COlSJWqlgt8jhFk2dyMvBuvVBqIX1wyVCM,642
4
- sqlglot/_version.py,sha256=wVbD9GMknBXDREyMG-Yx-Nw3OZVxyzYGofDpSiCozV4,708
4
+ sqlglot/_version.py,sha256=IWlM4WDhGNJxd0JF9C00oyahnnFTXwj2lYF8uzhEZhI,708
5
5
  sqlglot/diff.py,sha256=PtOllQMQa1Sw1-V2Y8eypmDqGujXYPaTOp_WLsWkAWk,17314
6
6
  sqlglot/errors.py,sha256=QNKMr-pzLUDR-tuMmn_GK6iMHUIVdb_YSJ_BhGEvuso,2126
7
- sqlglot/expressions.py,sha256=gR0XWI066giI1oStYhmswO7FY_pA0twxmc1P2XTo1tU,258468
8
- sqlglot/generator.py,sha256=ZHFzi6_EOJl5V-dnz71QJOIwtxj66oJdF6tUQoxBpck,226436
9
- sqlglot/helper.py,sha256=9nZjFVRBtMKFC3EdzpDQ6jkazFO19po6BF8xHiNGZIo,15111
7
+ sqlglot/expressions.py,sha256=d3x-gzKI3KRVFFMpGxYAozj3uX3gKwUtprHlyKGG9pw,261463
8
+ sqlglot/generator.py,sha256=KyFuqWQpawTj3rWV7ONKO4euqVTzV8aFU3desDu8fso,226565
9
+ sqlglot/helper.py,sha256=OOt5_Mbmnl4Uy6WO6v7DR1iLPcb3v6ITybpq6usf3jw,14471
10
10
  sqlglot/jsonpath.py,sha256=SQgaxzaEYBN7At9dkTK4N1Spk6xHxvHL6QtCIP6iM30,7905
11
11
  sqlglot/lineage.py,sha256=Qj5ykuDNcATppb9vOjoIKBqRVLbu3OMPiZk9f3iyv40,15312
12
- sqlglot/parser.py,sha256=4UXHIDwR9mdbN5C7PVMffheR-cj3ipVJqIIK8W-o5ac,337436
12
+ sqlglot/parser.py,sha256=9U1w36eR3tgcILRPF-9_Lk2BEFkAXzOvN-bqhe6_Ouk,337450
13
13
  sqlglot/planner.py,sha256=ql7Li-bWJRcyXzNaZy_n6bQ6B2ZfunEIB8Ztv2xaxq4,14634
14
14
  sqlglot/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  sqlglot/schema.py,sha256=13H2qKQs27EKdTpDLOvcNnSTDAUbYNKjWtJs4aQCSOA,20509
16
16
  sqlglot/serde.py,sha256=nWpBFUjwZh06Li4qBuNb0YRU_QyflzSVyWkFxujM0WM,3175
17
17
  sqlglot/time.py,sha256=Q62gv6kL40OiRBF6BMESxKJcMVn7ZLNw7sv8H34z5FI,18400
18
- sqlglot/tokens.py,sha256=M6E-2vbIs41CYwyLIFtRqre9Mh9kO7Qt9rQvpfVeB7w,49217
19
- sqlglot/transforms.py,sha256=utNDsCBsA7hPUK3-aby3DDgiY_XVMAKQqeoLm1EyihI,41218
18
+ sqlglot/tokens.py,sha256=46CMEmRhJOa3eNagIIK9ndm-_ZSwnaV-AqgZs-SHNro,49321
19
+ sqlglot/transforms.py,sha256=Nx0AM6b2ApfPOcbTlz9_m6DiauWMKC4s4Xe0yuuBmYk,40175
20
20
  sqlglot/trie.py,sha256=v27uXMrHfqrXlJ6GmeTSMovsB_3o0ctnlKhdNt7W6fI,2245
21
21
  sqlglot/dialects/__init__.py,sha256=g3HRtyb32r3LooiHKTzuUNB0_rBO_RauuOegp42gB48,3811
22
22
  sqlglot/dialects/athena.py,sha256=ofArmayYLev4qZQ15GM8mevG04qqR5WGFb2ZcuYm6x4,10966
23
- sqlglot/dialects/bigquery.py,sha256=m3Tk_rieNds4i-t0v-dELB5MvLfIvJY3ozuU6FItXJ8,72758
23
+ sqlglot/dialects/bigquery.py,sha256=l_t70png3A83zEDWUBBsCbiM40HfKIP03Wuq3Zt9CUI,72889
24
24
  sqlglot/dialects/clickhouse.py,sha256=6kx1cm0YhtHbg5kvcY64Hau2KdeC7Y26SVlVHGLyPEA,58579
25
25
  sqlglot/dialects/databricks.py,sha256=H4QTq7gg6tJylKc_YWsGp6049KydoI_wlQUHM7iCJtI,4753
26
- sqlglot/dialects/dialect.py,sha256=KfBctpr7VdrCdHrP1Tk7CqAml53tRq9x-aDAkaN-9l0,73540
26
+ sqlglot/dialects/dialect.py,sha256=3RibH98zu1zoRNVRKFjPQD5yez7A0V8FbSIRrmvX38o,73632
27
27
  sqlglot/dialects/doris.py,sha256=CFnF955Oav3IjZWA80ickOI8tPpCjxk7BN5R4Z6pA1U,25263
28
28
  sqlglot/dialects/dremio.py,sha256=nOMxu_4xVKSOmMGNSwdxXSPc243cNbbpb-xXzYdgdeg,8460
29
29
  sqlglot/dialects/drill.py,sha256=FOh7_KjPx_77pv0DiHKZog0CcmzqeF9_PEmGnJ1ESSM,5825
30
30
  sqlglot/dialects/druid.py,sha256=kh3snZtneehNOWqs3XcPjsrhNaRbkCQ8E4hHbWJ1fHM,690
31
- sqlglot/dialects/duckdb.py,sha256=hLbLqkh5X5Nx3y5yfbBc5h9ye6UWTiZr_VsS4BMY5Rw,54612
31
+ sqlglot/dialects/duckdb.py,sha256=xennAC2Gh3eImkpHo0-cf4BBzcAKx-HkexyZfQMiUCo,54655
32
32
  sqlglot/dialects/dune.py,sha256=gALut-fFfN2qMsr8LvZ1NQK3F3W9z2f4PwMvTMXVVVg,375
33
33
  sqlglot/dialects/exasol.py,sha256=ay3g_VyT5WvHTgNyJuCQu0nBt4bpllLZ9IdMBizEgYM,15761
34
34
  sqlglot/dialects/fabric.py,sha256=BdkvzM8s-m5DIdBwdjEYskp32ub7aHCAex_xlhQn92I,10222
35
- sqlglot/dialects/hive.py,sha256=UGIkXjMCk5a9ndUXQtvfG560oi3emdpqOYLQCmGabBk,32046
35
+ sqlglot/dialects/hive.py,sha256=GtksrbpGOsaAWjR3OEZXUCUR7k_S5YnIWNF9w1XYiXk,31972
36
36
  sqlglot/dialects/materialize.py,sha256=LD2q1kTRrCwkIu1BfoBvnjTGbupDtoQ8JQMDCIYAXHg,3533
37
37
  sqlglot/dialects/mysql.py,sha256=xxVAR-pXMljYCUioavP3nROtOqKmK4kfdp4WWXX7X9g,50049
38
- sqlglot/dialects/oracle.py,sha256=zWPCpzGiTlgCJ5E6FjfX3Rszjcw4SnHg6xeVboMYIyo,15972
38
+ sqlglot/dialects/oracle.py,sha256=qB6Ga0Si2-TpVNqU_2COvWESIUYNL32rYk_BC9aiujE,15898
39
39
  sqlglot/dialects/postgres.py,sha256=_pXSu29684utgeuzPziSJ0Sw54WEIIunwLugJw7KFD8,34853
40
- sqlglot/dialects/presto.py,sha256=XVeYr2NP86x5enlRqI7MYR6le85_ucYg_BBRocGN3jM,33413
40
+ sqlglot/dialects/presto.py,sha256=5C6I_aDC-9CDrLfY97EFsUWymaS3B7aW_-h-mHleWIQ,33339
41
41
  sqlglot/dialects/prql.py,sha256=fwN-SPEGx-drwf1K0U2MByN-PkW3C_rOgQ3xeJeychg,7908
42
42
  sqlglot/dialects/redshift.py,sha256=FIwtP3yEg-way9pa32kxCJc6IaFkHVIvgYKZA-Ilmi0,15919
43
- sqlglot/dialects/risingwave.py,sha256=BqWwW1iT_OIVMwfRamaww79snnBwIgCfr22Go-ggO68,3289
43
+ sqlglot/dialects/risingwave.py,sha256=Wd-I_Hbwl-6Rgf_NM0I_axliInY418k2kaAWRCmaqyE,3791
44
44
  sqlglot/dialects/singlestore.py,sha256=0QqNYOucNklPQuyeGcsisLI97qPGx_RfWKOFarJz2qw,61711
45
- sqlglot/dialects/snowflake.py,sha256=D9GlZtIDLU_aqHWvGQUC8AS-U94WPBS_qEzErIJIoWY,78746
45
+ sqlglot/dialects/snowflake.py,sha256=7NiU0MAc3blzQJxbEWr1cGK39se6yVHfrBN4wuHGV-k,81991
46
46
  sqlglot/dialects/solr.py,sha256=pydnl4ml-3M1Fc4ALm6cMVO9h-5EtqZxPZH_91Nz1Ss,617
47
- sqlglot/dialects/spark.py,sha256=PzyhkelDzbCMgJ3RVHD6yyzLIFp9NdZfwVas5IymowM,10147
47
+ sqlglot/dialects/spark.py,sha256=mt3Twh0_EJelYy_7HLinDEQ1Chj2EYMjeLCPLRzAJXY,10113
48
48
  sqlglot/dialects/spark2.py,sha256=qz36FT9k4iuiqboRpyG4VpKGkPR0P2fifmqgZ9gNUEU,14851
49
- sqlglot/dialects/sqlite.py,sha256=zzXEbnaLjJeg6hPLHricjpfSkuf8tpXECnjcHtoqIbw,13263
50
- sqlglot/dialects/starrocks.py,sha256=2gav0PSNgRdAGXzawdznZliBpglJoQ0wBxPI7ZIMsRw,11314
49
+ sqlglot/dialects/sqlite.py,sha256=FuEDDyKZeeWVblknhFSMX7dNoS-ci5ktXpSXZeBK5xA,13592
50
+ sqlglot/dialects/starrocks.py,sha256=-NWQa2gJbiMMfLauX-Jy9ciJ5DUzUOk2QkPbhglz5W4,11446
51
51
  sqlglot/dialects/tableau.py,sha256=oIawDzUITxGCWaEMB8OaNMPWhbC3U-2y09pYPm4eazc,2190
52
52
  sqlglot/dialects/teradata.py,sha256=7LxCcRwP0Idd_OnCzA57NCdheVjHcKC2aFAKG5N49IU,18202
53
53
  sqlglot/dialects/trino.py,sha256=Z7prRhCxIBh0KCxIQpWmVOIGHCJM9Xl5oRlqySxln4Y,4350
@@ -59,12 +59,12 @@ sqlglot/executor/python.py,sha256=09GYRzrPn3lZGfDJY9pbONOvmYxsRyeSWjUiqkSRHGo,16
59
59
  sqlglot/executor/table.py,sha256=xkuJlgLVNYUXsSUaX0zTcnFekldXLLU8LqDyjR5K9wY,4419
60
60
  sqlglot/optimizer/__init__.py,sha256=FdAvVz6rQLLkiiH21-SD4RxB5zS3WDeU-s03PZkJ-F4,343
61
61
  sqlglot/optimizer/annotate_types.py,sha256=kCPwrbBXSiO0oJUlkw8NkdsoLgjJ4l5LOouCHPqOFlA,26728
62
- sqlglot/optimizer/canonicalize.py,sha256=RJpUbWDudjknRMtO_Kf8MGZ5Hv1twpPWac2u5kpV4Vw,7719
62
+ sqlglot/optimizer/canonicalize.py,sha256=5Yc6cFAd1gENqJ3OqejsUE40MV4vqQ-PqBnJXV3SMj8,7714
63
63
  sqlglot/optimizer/eliminate_ctes.py,sha256=fUBM0RUnPrm2sYptEWBux98B7fcx7W-BM1zVqfgDz9c,1448
64
64
  sqlglot/optimizer/eliminate_joins.py,sha256=2iYtG93aJGxvURqm1BVPosrnnnQ_IXI14RcD4pM8eHc,5942
65
65
  sqlglot/optimizer/eliminate_subqueries.py,sha256=sAB_Pk94_n2n1PIaZ2Mc3M-n2TV-JmjjaomaY14u0Og,6292
66
66
  sqlglot/optimizer/isolate_table_selects.py,sha256=_8rIKVMoL7eY3rrJsmgIdTRvfmBSLUxeHg42q1JW990,1464
67
- sqlglot/optimizer/merge_subqueries.py,sha256=-4C80Hob7gqJDHkt3IeH0oExmqPuU9RGB7JC_hlqr7s,15443
67
+ sqlglot/optimizer/merge_subqueries.py,sha256=tis4la3HeAsglhYcLu9EMaVGsNiyecq5iwHkfmW0WQU,15532
68
68
  sqlglot/optimizer/normalize.py,sha256=wu3GeKY36PLyAb9f534jDDfzDwvZJpZ8g_H5QH6acZQ,6667
69
69
  sqlglot/optimizer/normalize_identifiers.py,sha256=uD4xICJAgj0X7EFc2LYcDWxAW2aTHANO2wy7kfn9gfY,2098
70
70
  sqlglot/optimizer/optimize_joins.py,sha256=tfEnTqBofveBXNKJ30GIvm2lyagAuD24bMNfu3iQi_k,3043
@@ -77,8 +77,8 @@ sqlglot/optimizer/qualify_tables.py,sha256=dA4ZazL7ShQh2JgBwpHuG-4c5lBw1TNzCnuN7
77
77
  sqlglot/optimizer/scope.py,sha256=UOTrbwqcTc5iRQf0WStgYWXpE24w6riZy-tJYA18yTw,31229
78
78
  sqlglot/optimizer/simplify.py,sha256=27IYsqbz1kyMlURSfRkm_ADSQJg-4805AOMFOjKKytU,51049
79
79
  sqlglot/optimizer/unnest_subqueries.py,sha256=kzWUVDlxs8z9nmRx-8U-pHXPtVZhEIwkKqmKhr2QLvc,10908
80
- sqlglot-27.19.0.dist-info/licenses/LICENSE,sha256=p1Yk0B4oa0l8Rh-_dYyy75d8spjPd_vTloXfz4FWxys,1065
81
- sqlglot-27.19.0.dist-info/METADATA,sha256=yaLkwFBKI3tGRmU2ayVsF3uEDoRgBLryVjdzeePUW4M,20703
82
- sqlglot-27.19.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
83
- sqlglot-27.19.0.dist-info/top_level.txt,sha256=5kRskCGA_gVADF9rSfSzPdLHXqvfMusDYeHePfNY2nQ,8
84
- sqlglot-27.19.0.dist-info/RECORD,,
80
+ sqlglot-27.21.0.dist-info/licenses/LICENSE,sha256=p1Yk0B4oa0l8Rh-_dYyy75d8spjPd_vTloXfz4FWxys,1065
81
+ sqlglot-27.21.0.dist-info/METADATA,sha256=UzVIVfhK-VUD4PdRs4Rc9_TMh09qIQl7M1MIwuGgIDk,20825
82
+ sqlglot-27.21.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
83
+ sqlglot-27.21.0.dist-info/top_level.txt,sha256=5kRskCGA_gVADF9rSfSzPdLHXqvfMusDYeHePfNY2nQ,8
84
+ sqlglot-27.21.0.dist-info/RECORD,,