sqlglot 27.10.0__py3-none-any.whl → 27.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sqlglot/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '27.10.0'
32
- __version_tuple__ = version_tuple = (27, 10, 0)
31
+ __version__ = version = '27.11.0'
32
+ __version_tuple__ = version_tuple = (27, 11, 0)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -4,6 +4,9 @@ import logging
4
4
  import re
5
5
  import typing as t
6
6
 
7
+
8
+ from sqlglot.optimizer.annotate_types import TypeAnnotator
9
+
7
10
  from sqlglot import exp, generator, jsonpath, parser, tokens, transforms
8
11
  from sqlglot._typing import E
9
12
  from sqlglot.dialects.dialect import (
@@ -172,6 +175,18 @@ def _build_to_hex(args: t.List) -> exp.Hex | exp.MD5:
172
175
  return exp.MD5(this=arg.this) if isinstance(arg, exp.MD5Digest) else exp.LowerHex(this=arg)
173
176
 
174
177
 
178
+ def _build_json_strip_nulls(args: t.List) -> exp.JSONStripNulls:
179
+ expression = exp.JSONStripNulls(this=seq_get(args, 0))
180
+
181
+ for arg in args[1:]:
182
+ if isinstance(arg, exp.Kwarg):
183
+ expression.set(arg.this.name.lower(), arg)
184
+ else:
185
+ expression.set("expression", arg)
186
+
187
+ return expression
188
+
189
+
175
190
  def _array_contains_sql(self: BigQuery.Generator, expression: exp.ArrayContains) -> str:
176
191
  return self.sql(
177
192
  exp.Exists(
@@ -295,6 +310,24 @@ def _annotate_math_functions(self: TypeAnnotator, expression: E) -> E:
295
310
  return expression
296
311
 
297
312
 
313
+ def _annotate_perncentile_cont(
314
+ self: TypeAnnotator, expression: exp.PercentileCont
315
+ ) -> exp.PercentileCont:
316
+ """
317
+ +------------+-----------+------------+---------+
318
+ | INPUT | NUMERIC | BIGNUMERIC | FLOAT64 |
319
+ +------------+-----------+------------+---------+
320
+ | NUMERIC | NUMERIC | BIGNUMERIC | FLOAT64 |
321
+ | BIGNUMERIC | BIGNUMERIC| BIGNUMERIC | FLOAT64 |
322
+ | FLOAT64 | FLOAT64 | FLOAT64 | FLOAT64 |
323
+ +------------+-----------+------------+---------+
324
+ """
325
+ self._annotate_args(expression)
326
+
327
+ self._set_type(expression, self._maybe_coerce(expression.this.type, expression.expression.type))
328
+ return expression
329
+
330
+
298
331
  def _annotate_by_args_approx_top(self: TypeAnnotator, expression: exp.ApproxTopK) -> exp.ApproxTopK:
299
332
  self._annotate_args(expression)
300
333
 
@@ -453,6 +486,13 @@ class BigQuery(Dialect):
453
486
  # All set operations require either a DISTINCT or ALL specifier
454
487
  SET_OP_DISTINCT_BY_DEFAULT = dict.fromkeys((exp.Except, exp.Intersect, exp.Union), None)
455
488
 
489
+ # https://cloud.google.com/bigquery/docs/reference/standard-sql/navigation_functions#percentile_cont
490
+ COERCES_TO = {
491
+ **TypeAnnotator.COERCES_TO,
492
+ exp.DataType.Type.BIGDECIMAL: {exp.DataType.Type.DOUBLE},
493
+ }
494
+ COERCES_TO[exp.DataType.Type.DECIMAL] |= {exp.DataType.Type.BIGDECIMAL}
495
+
456
496
  # BigQuery maps Type.TIMESTAMP to DATETIME, so we need to amend the inferred types
457
497
  TYPE_TO_EXPRESSIONS = {
458
498
  **Dialect.TYPE_TO_EXPRESSIONS,
@@ -511,37 +551,68 @@ class BigQuery(Dialect):
511
551
  exp.Corr: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
512
552
  exp.CovarPop: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
513
553
  exp.CovarSamp: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
554
+ exp.CumeDist: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
514
555
  exp.DateFromUnixDate: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DATE),
515
556
  exp.DateTrunc: lambda self, e: self._annotate_by_args(e, "this"),
557
+ exp.DenseRank: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
516
558
  exp.FarmFingerprint: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
559
+ exp.FirstValue: lambda self, e: self._annotate_by_args(e, "this"),
517
560
  exp.Unhex: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BINARY),
518
561
  exp.Float64: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
562
+ exp.Format: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.VARCHAR),
519
563
  exp.GenerateTimestampArray: lambda self, e: self._annotate_with_type(
520
564
  e, exp.DataType.build("ARRAY<TIMESTAMP>", dialect="bigquery")
521
565
  ),
522
566
  exp.Grouping: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
567
+ exp.IgnoreNulls: lambda self, e: self._annotate_by_args(e, "this"),
523
568
  exp.JSONArray: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.JSON),
569
+ exp.JSONArrayAppend: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.JSON),
570
+ exp.JSONArrayInsert: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.JSON),
524
571
  exp.JSONBool: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BOOLEAN),
525
572
  exp.JSONExtractScalar: lambda self, e: self._annotate_with_type(
526
573
  e, exp.DataType.Type.VARCHAR
527
574
  ),
528
- exp.JSONValueArray: lambda self, e: self._annotate_with_type(
529
- e, exp.DataType.build("ARRAY<VARCHAR>")
575
+ exp.JSONExtract: lambda self, e: self._annotate_by_args(e, "this"),
576
+ exp.JSONExtractArray: lambda self, e: self._annotate_by_args(e, "this", array=True),
577
+ exp.JSONFormat: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.VARCHAR),
578
+ exp.JSONKeysAtDepth: lambda self, e: self._annotate_with_type(
579
+ e, exp.DataType.build("ARRAY<VARCHAR>", dialect="bigquery")
530
580
  ),
581
+ exp.JSONObject: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.JSON),
582
+ exp.JSONRemove: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.JSON),
583
+ exp.JSONSet: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.JSON),
584
+ exp.JSONStripNulls: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.JSON),
531
585
  exp.JSONType: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.VARCHAR),
586
+ exp.JSONValueArray: lambda self, e: self._annotate_with_type(
587
+ e, exp.DataType.build("ARRAY<VARCHAR>", dialect="bigquery")
588
+ ),
532
589
  exp.Lag: lambda self, e: self._annotate_by_args(e, "this", "default"),
590
+ exp.Lead: lambda self, e: self._annotate_by_args(e, "this"),
533
591
  exp.LowerHex: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.VARCHAR),
592
+ exp.LaxBool: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BOOLEAN),
593
+ exp.LaxFloat64: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
594
+ exp.LaxInt64: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
595
+ exp.LaxString: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.VARCHAR),
534
596
  exp.MD5Digest: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BINARY),
535
597
  exp.Normalize: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.VARCHAR),
598
+ exp.NthValue: lambda self, e: self._annotate_by_args(e, "this"),
599
+ exp.Ntile: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
536
600
  exp.ParseTime: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.TIME),
537
601
  exp.ParseDatetime: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DATETIME),
538
602
  exp.ParseBignumeric: lambda self, e: self._annotate_with_type(
539
603
  e, exp.DataType.Type.BIGDECIMAL
540
604
  ),
541
605
  exp.ParseNumeric: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DECIMAL),
606
+ exp.PercentileCont: lambda self, e: _annotate_perncentile_cont(self, e),
607
+ exp.PercentileDisc: lambda self, e: self._annotate_by_args(e, "this"),
608
+ exp.PercentRank: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
609
+ exp.Rank: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
542
610
  exp.RegexpExtractAll: lambda self, e: self._annotate_by_args(e, "this", array=True),
611
+ exp.RegexpInstr: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
543
612
  exp.Replace: lambda self, e: self._annotate_by_args(e, "this"),
613
+ exp.RespectNulls: lambda self, e: self._annotate_by_args(e, "this"),
544
614
  exp.Reverse: lambda self, e: self._annotate_by_args(e, "this"),
615
+ exp.RowNumber: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
545
616
  exp.SafeConvertBytesToString: lambda self, e: self._annotate_with_type(
546
617
  e, exp.DataType.Type.VARCHAR
547
618
  ),
@@ -682,8 +753,11 @@ class BigQuery(Dialect):
682
753
  "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list,
683
754
  "JSON_EXTRACT_SCALAR": _build_extract_json_with_default_path(exp.JSONExtractScalar),
684
755
  "JSON_EXTRACT_ARRAY": _build_extract_json_with_default_path(exp.JSONExtractArray),
756
+ "JSON_EXTRACT_STRING_ARRAY": _build_extract_json_with_default_path(exp.JSONValueArray),
757
+ "JSON_KEYS": exp.JSONKeysAtDepth.from_arg_list,
685
758
  "JSON_QUERY": parser.build_extract_json_with_path(exp.JSONExtract),
686
759
  "JSON_QUERY_ARRAY": _build_extract_json_with_default_path(exp.JSONExtractArray),
760
+ "JSON_STRIP_NULLS": _build_json_strip_nulls,
687
761
  "JSON_VALUE": _build_extract_json_with_default_path(exp.JSONExtractScalar),
688
762
  "JSON_VALUE_ARRAY": _build_extract_json_with_default_path(exp.JSONValueArray),
689
763
  "LENGTH": lambda args: exp.Length(this=seq_get(args, 0), binary=True),
@@ -798,9 +872,13 @@ class BigQuery(Dialect):
798
872
  "SAFE_ORDINAL": (1, True),
799
873
  }
800
874
 
801
- def _parse_for_in(self) -> exp.ForIn:
875
+ def _parse_for_in(self) -> t.Union[exp.ForIn, exp.Command]:
876
+ index = self._index
802
877
  this = self._parse_range()
803
878
  self._match_text_seq("DO")
879
+ if self._match(TokenType.COMMAND):
880
+ self._retreat(index)
881
+ return self._parse_as_command(self._prev)
804
882
  return self.expression(exp.ForIn, this=this, expression=self._parse_statement())
805
883
 
806
884
  def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]:
@@ -1197,6 +1275,8 @@ class BigQuery(Dialect):
1197
1275
  exp.JSONExtractArray: _json_extract_sql,
1198
1276
  exp.JSONExtractScalar: _json_extract_sql,
1199
1277
  exp.JSONFormat: rename_func("TO_JSON_STRING"),
1278
+ exp.JSONKeysAtDepth: rename_func("JSON_KEYS"),
1279
+ exp.JSONValueArray: rename_func("JSON_VALUE_ARRAY"),
1200
1280
  exp.Levenshtein: _levenshtein_sql,
1201
1281
  exp.Max: max_or_greatest,
1202
1282
  exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
@@ -74,6 +74,27 @@ def build_date_delta_with_cast_interval(
74
74
  return _builder
75
75
 
76
76
 
77
+ def datetype_handler(args: t.List[exp.Expression], dialect: DialectType) -> exp.Expression:
78
+ year, month, day = args
79
+
80
+ if all(isinstance(arg, exp.Literal) and arg.is_int for arg in (year, month, day)):
81
+ date_str = f"{int(year.this):04d}-{int(month.this):02d}-{int(day.this):02d}"
82
+ return exp.Date(this=exp.Literal.string(date_str))
83
+
84
+ return exp.Cast(
85
+ this=exp.Concat(
86
+ expressions=[
87
+ year,
88
+ exp.Literal.string("-"),
89
+ month,
90
+ exp.Literal.string("-"),
91
+ day,
92
+ ]
93
+ ),
94
+ to=exp.DataType.build("DATE"),
95
+ )
96
+
97
+
77
98
  class Dremio(Dialect):
78
99
  SUPPORTS_USER_DEFINED_TYPES = False
79
100
  CONCAT_COALESCE = True
@@ -145,12 +166,16 @@ class Dremio(Dialect):
145
166
 
146
167
  FUNCTIONS = {
147
168
  **parser.Parser.FUNCTIONS,
148
- "TO_CHAR": to_char_is_numeric_handler,
149
- "DATE_FORMAT": build_formatted_time(exp.TimeToStr, "dremio"),
150
- "TO_DATE": build_formatted_time(exp.TsOrDsToDate, "dremio"),
169
+ "ARRAY_GENERATE_RANGE": exp.GenerateSeries.from_arg_list,
151
170
  "DATE_ADD": build_date_delta_with_cast_interval(exp.DateAdd),
171
+ "DATE_FORMAT": build_formatted_time(exp.TimeToStr, "dremio"),
152
172
  "DATE_SUB": build_date_delta_with_cast_interval(exp.DateSub),
153
- "ARRAY_GENERATE_RANGE": exp.GenerateSeries.from_arg_list,
173
+ "REGEXP_MATCHES": exp.RegexpLike.from_arg_list,
174
+ "REPEATSTR": exp.Repeat.from_arg_list,
175
+ "TO_CHAR": to_char_is_numeric_handler,
176
+ "TO_DATE": build_formatted_time(exp.TsOrDsToDate, "dremio"),
177
+ "DATE_PART": exp.Extract.from_arg_list,
178
+ "DATETYPE": datetype_handler,
154
179
  }
155
180
 
156
181
  def _parse_current_date_utc(self) -> exp.Cast:
@@ -10,8 +10,11 @@ from sqlglot.dialects.dialect import (
10
10
  rename_func,
11
11
  bool_xor_sql,
12
12
  count_if_to_sum,
13
+ timestamptrunc_sql,
14
+ date_add_interval_sql,
15
+ timestampdiff_sql,
13
16
  )
14
- from sqlglot.dialects.mysql import MySQL
17
+ from sqlglot.dialects.mysql import MySQL, _remove_ts_or_ds_to_date, date_add_sql
15
18
  from sqlglot.expressions import DataType
16
19
  from sqlglot.generator import unsupported_args
17
20
  from sqlglot.helper import seq_get
@@ -55,6 +58,7 @@ class SingleStore(MySQL):
55
58
  **MySQL.Tokenizer.KEYWORDS,
56
59
  "BSON": TokenType.JSONB,
57
60
  "GEOGRAPHYPOINT": TokenType.GEOGRAPHYPOINT,
61
+ "TIMESTAMP": TokenType.TIMESTAMP,
58
62
  ":>": TokenType.COLON_GT,
59
63
  "!:>": TokenType.NCOLON_GT,
60
64
  "::$": TokenType.DCOLONDOLLAR,
@@ -159,6 +163,11 @@ class SingleStore(MySQL):
159
163
  this=seq_get(args, 0),
160
164
  format=MySQL.format_time(exp.Literal.string("%W")),
161
165
  ),
166
+ "TIMESTAMPDIFF": lambda args: exp.TimestampDiff(
167
+ this=seq_get(args, 2),
168
+ expression=seq_get(args, 1),
169
+ unit=seq_get(args, 0),
170
+ ),
162
171
  "APPROX_COUNT_DISTINCT": exp.Hll.from_arg_list,
163
172
  "APPROX_PERCENTILE": lambda args, dialect: exp.ApproxQuantile(
164
173
  this=seq_get(args, 0),
@@ -189,6 +198,7 @@ class SingleStore(MySQL):
189
198
  CAST_COLUMN_OPERATORS = {TokenType.COLON_GT, TokenType.NCOLON_GT}
190
199
 
191
200
  COLUMN_OPERATORS = {
201
+ **MySQL.Parser.COLUMN_OPERATORS,
192
202
  TokenType.COLON_GT: lambda self, this, to: self.expression(
193
203
  exp.Cast,
194
204
  this=this,
@@ -209,6 +219,11 @@ class SingleStore(MySQL):
209
219
  exp.JSONExtractScalar, json_type="DOUBLE"
210
220
  )([this, exp.Literal.string(path.name)]),
211
221
  }
222
+ COLUMN_OPERATORS.pop(TokenType.ARROW)
223
+ COLUMN_OPERATORS.pop(TokenType.DARROW)
224
+ COLUMN_OPERATORS.pop(TokenType.HASH_ARROW)
225
+ COLUMN_OPERATORS.pop(TokenType.DHASH_ARROW)
226
+ COLUMN_OPERATORS.pop(TokenType.PLACEHOLDER)
212
227
 
213
228
  class Generator(MySQL.Generator):
214
229
  SUPPORTED_JSON_PATH_PARTS = {
@@ -277,6 +292,28 @@ class SingleStore(MySQL):
277
292
  exp.DateBin: unsupported_args("unit", "zone")(
278
293
  lambda self, e: self.func("TIME_BUCKET", e.this, e.expression, e.args.get("origin"))
279
294
  ),
295
+ exp.TimeStrToDate: lambda self, e: self.sql(exp.cast(e.this, exp.DataType.Type.DATE)),
296
+ exp.FromTimeZone: lambda self, e: self.func(
297
+ "CONVERT_TZ", e.this, e.args.get("zone"), "'UTC'"
298
+ ),
299
+ exp.DiToDate: lambda self,
300
+ e: f"STR_TO_DATE({self.sql(e, 'this')}, {SingleStore.DATEINT_FORMAT})",
301
+ exp.DateToDi: lambda self,
302
+ e: f"(DATE_FORMAT({self.sql(e, 'this')}, {SingleStore.DATEINT_FORMAT}) :> INT)",
303
+ exp.TsOrDiToDi: lambda self,
304
+ e: f"(DATE_FORMAT({self.sql(e, 'this')}, {SingleStore.DATEINT_FORMAT}) :> INT)",
305
+ exp.Time: unsupported_args("zone")(lambda self, e: f"{self.sql(e, 'this')} :> TIME"),
306
+ exp.DatetimeAdd: _remove_ts_or_ds_to_date(date_add_sql("ADD")),
307
+ exp.DatetimeTrunc: unsupported_args("zone")(timestamptrunc_sql()),
308
+ exp.DatetimeSub: date_add_interval_sql("DATE", "SUB"),
309
+ exp.DatetimeDiff: timestampdiff_sql,
310
+ exp.DateTrunc: unsupported_args("zone")(timestamptrunc_sql()),
311
+ exp.DateDiff: unsupported_args("zone")(
312
+ lambda self, e: timestampdiff_sql(self, e)
313
+ if e.unit is not None
314
+ else self.func("DATEDIFF", e.this, e.expression)
315
+ ),
316
+ exp.TimestampTrunc: unsupported_args("zone")(timestamptrunc_sql()),
280
317
  exp.JSONExtract: unsupported_args(
281
318
  "only_json_types",
282
319
  "expressions",
@@ -353,6 +390,94 @@ class SingleStore(MySQL):
353
390
  }
354
391
  TRANSFORMS.pop(exp.JSONExtractScalar)
355
392
 
393
+ UNSUPPORTED_TYPES = {
394
+ exp.DataType.Type.ARRAY,
395
+ exp.DataType.Type.AGGREGATEFUNCTION,
396
+ exp.DataType.Type.SIMPLEAGGREGATEFUNCTION,
397
+ exp.DataType.Type.BIGSERIAL,
398
+ exp.DataType.Type.BPCHAR,
399
+ exp.DataType.Type.DATEMULTIRANGE,
400
+ exp.DataType.Type.DATERANGE,
401
+ exp.DataType.Type.DYNAMIC,
402
+ exp.DataType.Type.HLLSKETCH,
403
+ exp.DataType.Type.HSTORE,
404
+ exp.DataType.Type.IMAGE,
405
+ exp.DataType.Type.INET,
406
+ exp.DataType.Type.INT128,
407
+ exp.DataType.Type.INT256,
408
+ exp.DataType.Type.INT4MULTIRANGE,
409
+ exp.DataType.Type.INT4RANGE,
410
+ exp.DataType.Type.INT8MULTIRANGE,
411
+ exp.DataType.Type.INT8RANGE,
412
+ exp.DataType.Type.INTERVAL,
413
+ exp.DataType.Type.IPADDRESS,
414
+ exp.DataType.Type.IPPREFIX,
415
+ exp.DataType.Type.IPV4,
416
+ exp.DataType.Type.IPV6,
417
+ exp.DataType.Type.LIST,
418
+ exp.DataType.Type.MAP,
419
+ exp.DataType.Type.LOWCARDINALITY,
420
+ exp.DataType.Type.MONEY,
421
+ exp.DataType.Type.MULTILINESTRING,
422
+ exp.DataType.Type.NAME,
423
+ exp.DataType.Type.NESTED,
424
+ exp.DataType.Type.NOTHING,
425
+ exp.DataType.Type.NULL,
426
+ exp.DataType.Type.NUMMULTIRANGE,
427
+ exp.DataType.Type.NUMRANGE,
428
+ exp.DataType.Type.OBJECT,
429
+ exp.DataType.Type.RANGE,
430
+ exp.DataType.Type.ROWVERSION,
431
+ exp.DataType.Type.SERIAL,
432
+ exp.DataType.Type.SMALLSERIAL,
433
+ exp.DataType.Type.SMALLMONEY,
434
+ exp.DataType.Type.STRUCT,
435
+ exp.DataType.Type.SUPER,
436
+ exp.DataType.Type.TIMETZ,
437
+ exp.DataType.Type.TIMESTAMPNTZ,
438
+ exp.DataType.Type.TIMESTAMPLTZ,
439
+ exp.DataType.Type.TIMESTAMPTZ,
440
+ exp.DataType.Type.TIMESTAMP_NS,
441
+ exp.DataType.Type.TSMULTIRANGE,
442
+ exp.DataType.Type.TSRANGE,
443
+ exp.DataType.Type.TSTZMULTIRANGE,
444
+ exp.DataType.Type.TSTZRANGE,
445
+ exp.DataType.Type.UINT128,
446
+ exp.DataType.Type.UINT256,
447
+ exp.DataType.Type.UNION,
448
+ exp.DataType.Type.UNKNOWN,
449
+ exp.DataType.Type.USERDEFINED,
450
+ exp.DataType.Type.UUID,
451
+ exp.DataType.Type.VARIANT,
452
+ exp.DataType.Type.XML,
453
+ exp.DataType.Type.TDIGEST,
454
+ }
455
+
456
+ TYPE_MAPPING = {
457
+ **MySQL.Generator.TYPE_MAPPING,
458
+ exp.DataType.Type.BIGDECIMAL: "DECIMAL",
459
+ exp.DataType.Type.BIT: "BOOLEAN",
460
+ exp.DataType.Type.DATE32: "DATE",
461
+ exp.DataType.Type.DATETIME64: "DATETIME",
462
+ exp.DataType.Type.DECIMAL32: "DECIMAL",
463
+ exp.DataType.Type.DECIMAL64: "DECIMAL",
464
+ exp.DataType.Type.DECIMAL128: "DECIMAL",
465
+ exp.DataType.Type.DECIMAL256: "DECIMAL",
466
+ exp.DataType.Type.ENUM8: "ENUM",
467
+ exp.DataType.Type.ENUM16: "ENUM",
468
+ exp.DataType.Type.FIXEDSTRING: "TEXT",
469
+ exp.DataType.Type.GEOMETRY: "GEOGRAPHY",
470
+ exp.DataType.Type.POINT: "GEOGRAPHYPOINT",
471
+ exp.DataType.Type.RING: "GEOGRAPHY",
472
+ exp.DataType.Type.LINESTRING: "GEOGRAPHY",
473
+ exp.DataType.Type.POLYGON: "GEOGRAPHY",
474
+ exp.DataType.Type.MULTIPOLYGON: "GEOGRAPHY",
475
+ exp.DataType.Type.JSONB: "BSON",
476
+ exp.DataType.Type.TIMESTAMP: "TIMESTAMP",
477
+ exp.DataType.Type.TIMESTAMP_S: "TIMESTAMP",
478
+ exp.DataType.Type.TIMESTAMP_MS: "TIMESTAMP(6)",
479
+ }
480
+
356
481
  # https://docs.singlestore.com/cloud/reference/sql-reference/restricted-keywords/list-of-restricted-keywords/
357
482
  RESERVED_KEYWORDS = {
358
483
  "abs",
@@ -1452,3 +1577,32 @@ class SingleStore(MySQL):
1452
1577
  expression.expression,
1453
1578
  self.func("TO_JSON", expression.this),
1454
1579
  )
1580
+
1581
+ @unsupported_args("kind", "nested", "values")
1582
+ def datatype_sql(self, expression: exp.DataType) -> str:
1583
+ if expression.is_type(exp.DataType.Type.VARBINARY) and not expression.expressions:
1584
+ # `VARBINARY` must always have a size - if it doesn't, we always generate `BLOB`
1585
+ return "BLOB"
1586
+ if expression.is_type(
1587
+ exp.DataType.Type.DECIMAL32,
1588
+ exp.DataType.Type.DECIMAL64,
1589
+ exp.DataType.Type.DECIMAL128,
1590
+ exp.DataType.Type.DECIMAL256,
1591
+ ):
1592
+ scale = self.expressions(expression, flat=True)
1593
+
1594
+ if expression.is_type(exp.DataType.Type.DECIMAL32):
1595
+ precision = "9"
1596
+ elif expression.is_type(exp.DataType.Type.DECIMAL64):
1597
+ precision = "18"
1598
+ elif expression.is_type(exp.DataType.Type.DECIMAL128):
1599
+ precision = "38"
1600
+ else:
1601
+ # 65 is a maximum precision supported in SingleStore
1602
+ precision = "65"
1603
+ if scale is not None:
1604
+ return f"DECIMAL({precision}, {scale[0]})"
1605
+ else:
1606
+ return f"DECIMAL({precision})"
1607
+
1608
+ return super().datatype_sql(expression)
@@ -552,6 +552,7 @@ class Snowflake(Dialect):
552
552
 
553
553
  ID_VAR_TOKENS = {
554
554
  *parser.Parser.ID_VAR_TOKENS,
555
+ TokenType.EXCEPT,
555
556
  TokenType.MATCH_CONDITION,
556
557
  }
557
558
 
@@ -187,6 +187,7 @@ class Spark2(Hive):
187
187
  "DAYOFYEAR": lambda args: exp.DayOfYear(this=exp.TsOrDsToDate(this=seq_get(args, 0))),
188
188
  "DOUBLE": _build_as_cast("double"),
189
189
  "FLOAT": _build_as_cast("float"),
190
+ "FORMAT_STRING": exp.Format.from_arg_list,
190
191
  "FROM_UTC_TIMESTAMP": lambda args, dialect: exp.AtTimeZone(
191
192
  this=exp.cast(
192
193
  seq_get(args, 0) or exp.Var(this=""),
@@ -292,6 +293,7 @@ class Spark2(Hive):
292
293
  # (DAY_OF_WEEK(datetime) % 7) + 1 is equivalent to DAYOFWEEK_ISO(datetime)
293
294
  exp.DayOfWeekIso: lambda self, e: f"(({self.func('DAYOFWEEK', e.this)} % 7) + 1)",
294
295
  exp.DayOfYear: rename_func("DAYOFYEAR"),
296
+ exp.Format: rename_func("FORMAT_STRING"),
295
297
  exp.From: transforms.preprocess([_unalias_pivot]),
296
298
  exp.FromTimeZone: lambda self, e: self.func(
297
299
  "TO_UTC_TIMESTAMP", e.this, e.args.get("zone")
@@ -156,6 +156,7 @@ class SQLite(Dialect):
156
156
  EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
157
157
  SUPPORTS_MEDIAN = False
158
158
  JSON_KEY_VALUE_PAIR_SEP = ","
159
+ PARSE_JSON_NAME: t.Optional[str] = None
159
160
 
160
161
  SUPPORTED_JSON_PATH_PARTS = {
161
162
  exp.JSONPathKey,
sqlglot/expressions.py CHANGED
@@ -6110,6 +6110,22 @@ class LastDay(Func, TimeUnit):
6110
6110
  arg_types = {"this": True, "unit": False}
6111
6111
 
6112
6112
 
6113
+ class LaxBool(Func):
6114
+ pass
6115
+
6116
+
6117
+ class LaxFloat64(Func):
6118
+ pass
6119
+
6120
+
6121
+ class LaxInt64(Func):
6122
+ pass
6123
+
6124
+
6125
+ class LaxString(Func):
6126
+ pass
6127
+
6128
+
6113
6129
  class Extract(Func):
6114
6130
  arg_types = {"this": True, "expression": True}
6115
6131
 
@@ -6203,6 +6219,11 @@ class DecodeCase(Func):
6203
6219
  is_var_len_args = True
6204
6220
 
6205
6221
 
6222
+ class DenseRank(AggFunc):
6223
+ arg_types = {"expressions": False}
6224
+ is_var_len_args = True
6225
+
6226
+
6206
6227
  class DiToDate(Func):
6207
6228
  pass
6208
6229
 
@@ -6438,10 +6459,20 @@ class FormatJson(Expression):
6438
6459
  pass
6439
6460
 
6440
6461
 
6462
+ class Format(Func):
6463
+ arg_types = {"this": True, "expressions": True}
6464
+ is_var_len_args = True
6465
+
6466
+
6441
6467
  class JSONKeyValue(Expression):
6442
6468
  arg_types = {"this": True, "expression": True}
6443
6469
 
6444
6470
 
6471
+ # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_keys
6472
+ class JSONKeysAtDepth(Func):
6473
+ arg_types = {"this": True, "expression": False, "mode": False}
6474
+
6475
+
6445
6476
  class JSONObject(Func):
6446
6477
  arg_types = {
6447
6478
  "expressions": False,
@@ -6502,6 +6533,23 @@ class JSONSchema(Expression):
6502
6533
  arg_types = {"expressions": True}
6503
6534
 
6504
6535
 
6536
+ class JSONSet(Func):
6537
+ arg_types = {"this": True, "expressions": True}
6538
+ is_var_len_args = True
6539
+ _sql_names = ["JSON_SET"]
6540
+
6541
+
6542
+ # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_strip_nulls
6543
+ class JSONStripNulls(Func):
6544
+ arg_types = {
6545
+ "this": True,
6546
+ "expression": False,
6547
+ "include_arrays": False,
6548
+ "remove_empty": False,
6549
+ }
6550
+ _sql_names = ["JSON_STRIP_NULLS"]
6551
+
6552
+
6505
6553
  # https://dev.mysql.com/doc/refman/8.4/en/json-search-functions.html#function_json-value
6506
6554
  class JSONValue(Expression):
6507
6555
  arg_types = {
@@ -6516,6 +6564,12 @@ class JSONValueArray(Func):
6516
6564
  arg_types = {"this": True, "expression": False}
6517
6565
 
6518
6566
 
6567
+ class JSONRemove(Func):
6568
+ arg_types = {"this": True, "expressions": True}
6569
+ is_var_len_args = True
6570
+ _sql_names = ["JSON_REMOVE"]
6571
+
6572
+
6519
6573
  # https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/JSON_TABLE.html
6520
6574
  class JSONTable(Func):
6521
6575
  arg_types = {
@@ -6625,12 +6679,24 @@ class JSONFormat(Func):
6625
6679
  _sql_names = ["JSON_FORMAT"]
6626
6680
 
6627
6681
 
6682
+ class JSONArrayAppend(Func):
6683
+ arg_types = {"this": True, "expressions": True}
6684
+ is_var_len_args = True
6685
+ _sql_names = ["JSON_ARRAY_APPEND"]
6686
+
6687
+
6628
6688
  # https://dev.mysql.com/doc/refman/8.0/en/json-search-functions.html#operator_member-of
6629
6689
  class JSONArrayContains(Binary, Predicate, Func):
6630
6690
  arg_types = {"this": True, "expression": True, "json_type": False}
6631
6691
  _sql_names = ["JSON_ARRAY_CONTAINS"]
6632
6692
 
6633
6693
 
6694
+ class JSONArrayInsert(Func):
6695
+ arg_types = {"this": True, "expressions": True}
6696
+ is_var_len_args = True
6697
+ _sql_names = ["JSON_ARRAY_INSERT"]
6698
+
6699
+
6634
6700
  class ParseBignumeric(Func):
6635
6701
  pass
6636
6702
 
@@ -6796,6 +6862,10 @@ class Nvl2(Func):
6796
6862
  arg_types = {"this": True, "true": True, "false": False}
6797
6863
 
6798
6864
 
6865
+ class Ntile(AggFunc):
6866
+ arg_types = {"this": False}
6867
+
6868
+
6799
6869
  class Normalize(Func):
6800
6870
  arg_types = {"this": True, "form": False, "is_casefold": False}
6801
6871
 
@@ -6844,6 +6914,11 @@ class PercentileDisc(AggFunc):
6844
6914
  arg_types = {"this": True, "expression": False}
6845
6915
 
6846
6916
 
6917
+ class PercentRank(AggFunc):
6918
+ arg_types = {"expressions": False}
6919
+ is_var_len_args = True
6920
+
6921
+
6847
6922
  class Quantile(AggFunc):
6848
6923
  arg_types = {"this": True, "quantile": True}
6849
6924
 
@@ -6877,6 +6952,11 @@ class RangeN(Func):
6877
6952
  arg_types = {"this": True, "expressions": True, "each": False}
6878
6953
 
6879
6954
 
6955
+ class Rank(AggFunc):
6956
+ arg_types = {"expressions": False}
6957
+ is_var_len_args = True
6958
+
6959
+
6880
6960
  class ReadCSV(Func):
6881
6961
  _sql_names = ["READ_CSV"]
6882
6962
  is_var_len_args = True
@@ -6928,6 +7008,18 @@ class RegexpILike(Binary, Func):
6928
7008
  arg_types = {"this": True, "expression": True, "flag": False}
6929
7009
 
6930
7010
 
7011
+ class RegexpInstr(Func):
7012
+ arg_types = {
7013
+ "this": True,
7014
+ "expression": True,
7015
+ "position": False,
7016
+ "occurrence": False,
7017
+ "option": False,
7018
+ "parameters": False,
7019
+ "group": False,
7020
+ }
7021
+
7022
+
6931
7023
  # https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.split.html
6932
7024
  # limit is the number of times a pattern is applied
6933
7025
  class RegexpSplit(Func):
@@ -7270,6 +7362,12 @@ class Corr(Binary, AggFunc):
7270
7362
  pass
7271
7363
 
7272
7364
 
7365
+ # https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/CUME_DIST.html
7366
+ class CumeDist(AggFunc):
7367
+ arg_types = {"expressions": False}
7368
+ is_var_len_args = True
7369
+
7370
+
7273
7371
  class Variance(AggFunc):
7274
7372
  _sql_names = ["VARIANCE", "VARIANCE_SAMP", "VAR_SAMP"]
7275
7373
 
sqlglot/generator.py CHANGED
@@ -511,6 +511,8 @@ class Generator(metaclass=_Generator):
511
511
  exp.DataType.Type.SMALLDATETIME: "TIMESTAMP",
512
512
  }
513
513
 
514
+ UNSUPPORTED_TYPES: set[exp.DataType.Type] = set()
515
+
514
516
  TIME_PART_SINGULARS = {
515
517
  "MICROSECONDS": "MICROSECOND",
516
518
  "SECONDS": "SECOND",
@@ -1406,6 +1408,11 @@ class Generator(metaclass=_Generator):
1406
1408
  interior = self.expressions(expression, flat=True)
1407
1409
 
1408
1410
  type_value = expression.this
1411
+ if type_value in self.UNSUPPORTED_TYPES:
1412
+ self.unsupported(
1413
+ f"Data type {type_value.value} is not supported when targeting {self.dialect.__class__.__name__}"
1414
+ )
1415
+
1409
1416
  if type_value == exp.DataType.Type.USERDEFINED and expression.args.get("kind"):
1410
1417
  type_sql = self.sql(expression, "kind")
1411
1418
  else:
@@ -4660,9 +4667,12 @@ class Generator(metaclass=_Generator):
4660
4667
  def arrayconcat_sql(self, expression: exp.ArrayConcat, name: str = "ARRAY_CONCAT") -> str:
4661
4668
  exprs = expression.expressions
4662
4669
  if not self.ARRAY_CONCAT_IS_VAR_LEN:
4663
- rhs = reduce(lambda x, y: exp.ArrayConcat(this=x, expressions=[y]), exprs)
4670
+ if len(exprs) == 0:
4671
+ rhs: t.Union[str, exp.Expression] = exp.Array(expressions=[])
4672
+ else:
4673
+ rhs = reduce(lambda x, y: exp.ArrayConcat(this=x, expressions=[y]), exprs)
4664
4674
  else:
4665
- rhs = self.expressions(expression)
4675
+ rhs = self.expressions(expression) # type: ignore
4666
4676
 
4667
4677
  return self.func(name, expression.this, rhs or None)
4668
4678
 
sqlglot/tokens.py CHANGED
@@ -1427,7 +1427,7 @@ class Tokenizer(metaclass=_Tokenizer):
1427
1427
  self._advance(len(start))
1428
1428
  text = self._extract_string(end, raw_string=token_type == TokenType.RAW_STRING)
1429
1429
 
1430
- if base:
1430
+ if base and text:
1431
1431
  try:
1432
1432
  int(text, base)
1433
1433
  except Exception:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlglot
3
- Version: 27.10.0
3
+ Version: 27.11.0
4
4
  Summary: An easily customizable SQL parser and transpiler
5
5
  Author-email: Toby Mao <toby.mao@gmail.com>
6
6
  License-Expression: MIT
@@ -33,7 +33,7 @@ Requires-Dist: typing_extensions; extra == "dev"
33
33
  Requires-Dist: maturin<2.0,>=1.4; extra == "dev"
34
34
  Requires-Dist: pyperf; extra == "dev"
35
35
  Provides-Extra: rs
36
- Requires-Dist: sqlglotrs==0.6.1; extra == "rs"
36
+ Requires-Dist: sqlglotrs==0.6.2; extra == "rs"
37
37
  Dynamic: license-file
38
38
  Dynamic: provides-extra
39
39
 
@@ -1,11 +1,11 @@
1
1
  sqlglot/__init__.py,sha256=za08rtdPh2v7dOpGdNomttlIVGgTrKja7rPd6sQwaTg,5391
2
2
  sqlglot/__main__.py,sha256=022c173KqxsiABWTEpUIq_tJUxuNiW7a7ABsxBXqvu8,2069
3
3
  sqlglot/_typing.py,sha256=-1HPyr3w5COlSJWqlgt8jhFk2dyMvBuvVBqIX1wyVCM,642
4
- sqlglot/_version.py,sha256=UQWMArALhjnnjO2LJbcK8eqVDukANsZFTdd9DkZF4Vg,708
4
+ sqlglot/_version.py,sha256=oM65QlZoKN_nkllGdqeZbvnaN1K37Ouzx2i0_B4cs3k,708
5
5
  sqlglot/diff.py,sha256=PtOllQMQa1Sw1-V2Y8eypmDqGujXYPaTOp_WLsWkAWk,17314
6
6
  sqlglot/errors.py,sha256=QNKMr-pzLUDR-tuMmn_GK6iMHUIVdb_YSJ_BhGEvuso,2126
7
- sqlglot/expressions.py,sha256=kgMzkI6ypuceCguV7qw8qQY-Q2LmTAnCYa17wh0hjbQ,250479
8
- sqlglot/generator.py,sha256=kAU58UNnAsbuP0ualoi2n4b61c2cg3C0mJJD7IjOciU,222829
7
+ sqlglot/expressions.py,sha256=u7_1s9-VxVvo4SAqC84nktRr-3lw00OwU-_rmGiFm-c,252520
8
+ sqlglot/generator.py,sha256=69I3fUEnnE_-JPrqf56B3x12VsN99PRlWIC_QAiFo3E,223240
9
9
  sqlglot/helper.py,sha256=9nZjFVRBtMKFC3EdzpDQ6jkazFO19po6BF8xHiNGZIo,15111
10
10
  sqlglot/jsonpath.py,sha256=SQgaxzaEYBN7At9dkTK4N1Spk6xHxvHL6QtCIP6iM30,7905
11
11
  sqlglot/lineage.py,sha256=Qj5ykuDNcATppb9vOjoIKBqRVLbu3OMPiZk9f3iyv40,15312
@@ -15,17 +15,17 @@ sqlglot/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  sqlglot/schema.py,sha256=13H2qKQs27EKdTpDLOvcNnSTDAUbYNKjWtJs4aQCSOA,20509
16
16
  sqlglot/serde.py,sha256=DQVJ95WrIvhYfe02Ytb4NQug2aMwDCEwpMBW1LKDqzE,2031
17
17
  sqlglot/time.py,sha256=Q62gv6kL40OiRBF6BMESxKJcMVn7ZLNw7sv8H34z5FI,18400
18
- sqlglot/tokens.py,sha256=4U_eej87MjWiqruZrEqvVQGuDuIS7YfR99Kxk1aLu64,48993
18
+ sqlglot/tokens.py,sha256=gRIc8NnZJcLPdXmsjRysoGWKrM8Fse8sLF7b-uHc6Do,49002
19
19
  sqlglot/transforms.py,sha256=utNDsCBsA7hPUK3-aby3DDgiY_XVMAKQqeoLm1EyihI,41218
20
20
  sqlglot/trie.py,sha256=v27uXMrHfqrXlJ6GmeTSMovsB_3o0ctnlKhdNt7W6fI,2245
21
21
  sqlglot/dialects/__init__.py,sha256=e3K2NHrZO7oXfBzEpRsvgWAgJ_UCEyg7SlUCRqvnPj4,3799
22
22
  sqlglot/dialects/athena.py,sha256=ofArmayYLev4qZQ15GM8mevG04qqR5WGFb2ZcuYm6x4,10966
23
- sqlglot/dialects/bigquery.py,sha256=66hGREFUnfora2KRBw6AqZCMc8a2Xk9XbHegRE8rLPI,64753
23
+ sqlglot/dialects/bigquery.py,sha256=fp1uAPhRHOh5yeFtR8NbrpyJFpM-IJ8PsxzTMvSqvnk,69316
24
24
  sqlglot/dialects/clickhouse.py,sha256=OuaDWpvVrHCF9N-u3ZmHZUCs7DSKq_aaRt8yOh2CtaY,58301
25
25
  sqlglot/dialects/databricks.py,sha256=761qPlGOFHa9V8mdQYNAEw0pv4HUJYMkGEuFsdsLC7s,4714
26
26
  sqlglot/dialects/dialect.py,sha256=02W-j8nYJduA78_BMxMTMGXv2i9mcAGAmXBt1YsH0CQ,73055
27
27
  sqlglot/dialects/doris.py,sha256=NvDGHIKGJDVp0b4oOnMu1fyAprJPOiC8J6M85V75iuU,25136
28
- sqlglot/dialects/dremio.py,sha256=o9wqHnUXlEetOuGUWBZLER1whId9U4l1xledme7qGsg,7392
28
+ sqlglot/dialects/dremio.py,sha256=D2YwjqbTjJ8VZ94GQs7GBRCKUiizwggcEaR_drKpnoc,8242
29
29
  sqlglot/dialects/drill.py,sha256=FOh7_KjPx_77pv0DiHKZog0CcmzqeF9_PEmGnJ1ESSM,5825
30
30
  sqlglot/dialects/druid.py,sha256=kh3snZtneehNOWqs3XcPjsrhNaRbkCQ8E4hHbWJ1fHM,690
31
31
  sqlglot/dialects/duckdb.py,sha256=vCIjZV9lszyXt-hNylVyUdHHlD_F9zsYznzcZLaqGDg,52241
@@ -41,11 +41,11 @@ sqlglot/dialects/presto.py,sha256=XVeYr2NP86x5enlRqI7MYR6le85_ucYg_BBRocGN3jM,33
41
41
  sqlglot/dialects/prql.py,sha256=fwN-SPEGx-drwf1K0U2MByN-PkW3C_rOgQ3xeJeychg,7908
42
42
  sqlglot/dialects/redshift.py,sha256=_sQTom4CGozFDZXW9y6bHQcZ-KiQ7QJjjQqM5rVagSc,15889
43
43
  sqlglot/dialects/risingwave.py,sha256=BqWwW1iT_OIVMwfRamaww79snnBwIgCfr22Go-ggO68,3289
44
- sqlglot/dialects/singlestore.py,sha256=rn3a9Koom9AsS3h7anZfW6AfIEmADvAJ8EuTH9NRG9s,42616
45
- sqlglot/dialects/snowflake.py,sha256=XPsqYGBjn3dfddF2dcuM7Ur-4sYdthjW7cSPnptWq_s,70542
44
+ sqlglot/dialects/singlestore.py,sha256=YxBHLKOyinFbXSZ720XDzgA7g9KrI5aAnjEl0JBA01w,49592
45
+ sqlglot/dialects/snowflake.py,sha256=QvsWQzhabC6zG7QfrBBVc_Dv6E70-xAwTjMg5ExrPeM,70572
46
46
  sqlglot/dialects/spark.py,sha256=5dKMb9cGl0GMreEyFfjClvW1Cwm1JEx3W-hFnxjqW8E,9362
47
- sqlglot/dialects/spark2.py,sha256=aCwPqLduLRSUSPtbI1VtBjydK6haKgEy3iahmueGRo4,14742
48
- sqlglot/dialects/sqlite.py,sha256=XIDmiNTswWcrDwlFm8gOODCrJ_rPmXQKkm9U_-YAlVs,13183
47
+ sqlglot/dialects/spark2.py,sha256=qz36FT9k4iuiqboRpyG4VpKGkPR0P2fifmqgZ9gNUEU,14851
48
+ sqlglot/dialects/sqlite.py,sha256=UIQ66shIt2bQoLd7tYG4NVzh4HwCfERgAaLyukz8HjE,13231
49
49
  sqlglot/dialects/starrocks.py,sha256=2gav0PSNgRdAGXzawdznZliBpglJoQ0wBxPI7ZIMsRw,11314
50
50
  sqlglot/dialects/tableau.py,sha256=oIawDzUITxGCWaEMB8OaNMPWhbC3U-2y09pYPm4eazc,2190
51
51
  sqlglot/dialects/teradata.py,sha256=7LxCcRwP0Idd_OnCzA57NCdheVjHcKC2aFAKG5N49IU,18202
@@ -76,8 +76,8 @@ sqlglot/optimizer/qualify_tables.py,sha256=dA4ZazL7ShQh2JgBwpHuG-4c5lBw1TNzCnuN7
76
76
  sqlglot/optimizer/scope.py,sha256=UOTrbwqcTc5iRQf0WStgYWXpE24w6riZy-tJYA18yTw,31229
77
77
  sqlglot/optimizer/simplify.py,sha256=-_yus42OYwqjQ9a2TSGhtG2G0pSkInUry1z7hEMz2pY,51062
78
78
  sqlglot/optimizer/unnest_subqueries.py,sha256=kzWUVDlxs8z9nmRx-8U-pHXPtVZhEIwkKqmKhr2QLvc,10908
79
- sqlglot-27.10.0.dist-info/licenses/LICENSE,sha256=p1Yk0B4oa0l8Rh-_dYyy75d8spjPd_vTloXfz4FWxys,1065
80
- sqlglot-27.10.0.dist-info/METADATA,sha256=FYz_px3ppPANEJZGC0NthW3Gu3LCGggxS-AaHbJrDbU,20682
81
- sqlglot-27.10.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
82
- sqlglot-27.10.0.dist-info/top_level.txt,sha256=5kRskCGA_gVADF9rSfSzPdLHXqvfMusDYeHePfNY2nQ,8
83
- sqlglot-27.10.0.dist-info/RECORD,,
79
+ sqlglot-27.11.0.dist-info/licenses/LICENSE,sha256=p1Yk0B4oa0l8Rh-_dYyy75d8spjPd_vTloXfz4FWxys,1065
80
+ sqlglot-27.11.0.dist-info/METADATA,sha256=GoUJQsTzum00ctJVKTRCOnO1d19dqBDV7ptg1MKksWE,20682
81
+ sqlglot-27.11.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
82
+ sqlglot-27.11.0.dist-info/top_level.txt,sha256=5kRskCGA_gVADF9rSfSzPdLHXqvfMusDYeHePfNY2nQ,8
83
+ sqlglot-27.11.0.dist-info/RECORD,,