sqlglot 27.6.0__py3-none-any.whl → 27.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,14 +1,22 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import typing as t
4
+
3
5
  from sqlglot import expressions as exp
4
6
  from sqlglot import parser, generator, tokens
5
- from sqlglot.dialects.dialect import Dialect, build_formatted_time, unit_to_var
6
- import typing as t
7
+ from sqlglot.dialects.dialect import (
8
+ Dialect,
9
+ build_timetostr_or_tochar,
10
+ build_formatted_time,
11
+ rename_func,
12
+ unit_to_var,
13
+ )
14
+ from sqlglot.helper import seq_get
15
+
16
+ if t.TYPE_CHECKING:
17
+ from sqlglot.dialects.dialect import DialectType
7
18
 
8
- DATE_DELTA = t.Union[
9
- exp.DateAdd,
10
- exp.DateSub,
11
- ]
19
+ DATE_DELTA = t.Union[exp.DateAdd, exp.DateSub]
12
20
 
13
21
 
14
22
  def _date_delta_sql(name: str) -> t.Callable[[Dremio.Generator, DATE_DELTA], str]:
@@ -31,6 +39,17 @@ def _date_delta_sql(name: str) -> t.Callable[[Dremio.Generator, DATE_DELTA], str
31
39
  return _delta_sql
32
40
 
33
41
 
42
+ def to_char_is_numeric_handler(args: t.List, dialect: DialectType) -> exp.TimeToStr | exp.ToChar:
43
+ expression = build_timetostr_or_tochar(args, dialect)
44
+ fmt = seq_get(args, 1)
45
+
46
+ if fmt and isinstance(expression, exp.ToChar) and fmt.is_string and "#" in fmt.name:
47
+ # Only mark as numeric if format is a literal containing #
48
+ expression.set("is_numeric", True)
49
+
50
+ return expression
51
+
52
+
34
53
  class Dremio(Dialect):
35
54
  SUPPORTS_USER_DEFINED_TYPES = False
36
55
  CONCAT_COALESCE = True
@@ -42,30 +61,51 @@ class Dremio(Dialect):
42
61
  TIME_MAPPING = {
43
62
  # year
44
63
  "YYYY": "%Y",
64
+ "yyyy": "%Y",
45
65
  "YY": "%y",
66
+ "yy": "%y",
46
67
  # month / day
47
68
  "MM": "%m",
69
+ "mm": "%m",
48
70
  "MON": "%b",
71
+ "mon": "%b",
49
72
  "MONTH": "%B",
73
+ "month": "%B",
50
74
  "DDD": "%j",
75
+ "ddd": "%j",
51
76
  "DD": "%d",
77
+ "dd": "%d",
52
78
  "DY": "%a",
79
+ "dy": "%a",
53
80
  "DAY": "%A",
81
+ "day": "%A",
54
82
  # hours / minutes / seconds
55
83
  "HH24": "%H",
84
+ "hh24": "%H",
56
85
  "HH12": "%I",
57
- "HH": "%I", # 24- / 12-hour
86
+ "hh12": "%I",
87
+ "HH": "%I",
88
+ "hh": "%I", # 24- / 12-hour
58
89
  "MI": "%M",
90
+ "mi": "%M",
59
91
  "SS": "%S",
92
+ "ss": "%S",
60
93
  "FFF": "%f",
94
+ "fff": "%f",
61
95
  "AMPM": "%p",
96
+ "ampm": "%p",
62
97
  # ISO week / century etc.
63
98
  "WW": "%W",
99
+ "ww": "%W",
64
100
  "D": "%w",
101
+ "d": "%w",
65
102
  "CC": "%C",
103
+ "cc": "%C",
66
104
  # timezone
67
- "TZD": "%Z", # abbreviation (UTC, PST, ...)
68
- "TZO": "%z", # numeric offset (+0200)
105
+ "TZD": "%Z",
106
+ "tzd": "%Z", # abbreviation (UTC, PST, ...)
107
+ "TZO": "%z",
108
+ "tzo": "%z", # numeric offset (+0200)
69
109
  }
70
110
 
71
111
  class Parser(parser.Parser):
@@ -73,7 +113,9 @@ class Dremio(Dialect):
73
113
 
74
114
  FUNCTIONS = {
75
115
  **parser.Parser.FUNCTIONS,
76
- "TO_CHAR": build_formatted_time(exp.TimeToStr, "dremio"),
116
+ "TO_CHAR": to_char_is_numeric_handler,
117
+ "DATE_FORMAT": build_formatted_time(exp.TimeToStr, "dremio"),
118
+ "TO_DATE": build_formatted_time(exp.TsOrDsToDate, "dremio"),
77
119
  }
78
120
 
79
121
  class Generator(generator.Generator):
@@ -102,8 +144,8 @@ class Dremio(Dialect):
102
144
 
103
145
  TRANSFORMS = {
104
146
  **generator.Generator.TRANSFORMS,
147
+ exp.ToChar: rename_func("TO_CHAR"),
105
148
  exp.TimeToStr: lambda self, e: self.func("TO_CHAR", e.this, self.format_time(e)),
106
- exp.ToChar: lambda self, e: self.function_fallback_sql(e),
107
149
  exp.DateAdd: _date_delta_sql("DATE_ADD"),
108
150
  exp.DateSub: _date_delta_sql("DATE_SUB"),
109
151
  }
@@ -4,7 +4,6 @@ import typing as t
4
4
 
5
5
  from sqlglot import exp, generator, parser, tokens, transforms
6
6
 
7
- from sqlglot.expressions import DATA_TYPE
8
7
  from sqlglot.dialects.dialect import (
9
8
  Dialect,
10
9
  JSON_EXTRACT_TYPE,
@@ -16,6 +15,7 @@ from sqlglot.dialects.dialect import (
16
15
  bool_xor_sql,
17
16
  build_default_decimal_type,
18
17
  count_if_to_sum,
18
+ date_delta_to_binary_interval_op,
19
19
  date_trunc_to_time,
20
20
  datestrtodate_sql,
21
21
  no_datetime_sql,
@@ -32,7 +32,6 @@ from sqlglot.dialects.dialect import (
32
32
  str_to_time_sql,
33
33
  timestamptrunc_sql,
34
34
  timestrtotime_sql,
35
- unit_to_var,
36
35
  unit_to_str,
37
36
  sha256_sql,
38
37
  build_regexp_extract,
@@ -45,38 +44,6 @@ from sqlglot.helper import seq_get
45
44
  from sqlglot.tokens import TokenType
46
45
  from sqlglot.parser import binary_range_parser
47
46
 
48
- DATETIME_DELTA = t.Union[
49
- exp.DateAdd, exp.TimeAdd, exp.DatetimeAdd, exp.TsOrDsAdd, exp.DateSub, exp.DatetimeSub
50
- ]
51
-
52
-
53
- def _date_delta_sql(self: DuckDB.Generator, expression: DATETIME_DELTA) -> str:
54
- this = expression.this
55
- unit = unit_to_var(expression)
56
- op = (
57
- "+"
58
- if isinstance(expression, (exp.DateAdd, exp.TimeAdd, exp.DatetimeAdd, exp.TsOrDsAdd))
59
- else "-"
60
- )
61
-
62
- to_type: t.Optional[DATA_TYPE] = None
63
- if isinstance(expression, exp.TsOrDsAdd):
64
- to_type = expression.return_type
65
- elif this.is_string:
66
- # Cast string literals (i.e function parameters) to the appropriate type for +/- interval to work
67
- to_type = (
68
- exp.DataType.Type.DATETIME
69
- if isinstance(expression, (exp.DatetimeAdd, exp.DatetimeSub))
70
- else exp.DataType.Type.DATE
71
- )
72
-
73
- this = exp.cast(this, to_type) if to_type else this
74
-
75
- expr = expression.expression
76
- interval = expr if isinstance(expr, exp.Interval) else exp.Interval(this=expr, unit=unit)
77
-
78
- return f"{self.sql(this)} {op} {self.sql(interval)}"
79
-
80
47
 
81
48
  # BigQuery -> DuckDB conversion for the DATE function
82
49
  def _date_sql(self: DuckDB.Generator, expression: exp.Date) -> str:
@@ -396,6 +363,7 @@ class DuckDB(Dialect):
396
363
 
397
364
  FUNCTIONS = {
398
365
  **parser.Parser.FUNCTIONS,
366
+ "ANY_VALUE": lambda args: exp.IgnoreNulls(this=exp.AnyValue.from_arg_list(args)),
399
367
  "ARRAY_REVERSE_SORT": _build_sort_array_desc,
400
368
  "ARRAY_SORT": exp.SortArray.from_arg_list,
401
369
  "DATEDIFF": _build_date_diff,
@@ -686,14 +654,14 @@ class DuckDB(Dialect):
686
654
  exp.DayOfYear: rename_func("DAYOFYEAR"),
687
655
  exp.DataType: _datatype_sql,
688
656
  exp.Date: _date_sql,
689
- exp.DateAdd: _date_delta_sql,
657
+ exp.DateAdd: date_delta_to_binary_interval_op(),
690
658
  exp.DateFromParts: rename_func("MAKE_DATE"),
691
- exp.DateSub: _date_delta_sql,
659
+ exp.DateSub: date_delta_to_binary_interval_op(),
692
660
  exp.DateDiff: _date_diff_sql,
693
661
  exp.DateStrToDate: datestrtodate_sql,
694
662
  exp.Datetime: no_datetime_sql,
695
- exp.DatetimeSub: _date_delta_sql,
696
- exp.DatetimeAdd: _date_delta_sql,
663
+ exp.DatetimeSub: date_delta_to_binary_interval_op(),
664
+ exp.DatetimeAdd: date_delta_to_binary_interval_op(),
697
665
  exp.DateToDi: lambda self,
698
666
  e: f"CAST(STRFTIME({self.sql(e, 'this')}, {DuckDB.DATEINT_FORMAT}) AS INT)",
699
667
  exp.Decode: lambda self, e: encode_decode_sql(self, e, "DECODE", replace=False),
@@ -755,7 +723,7 @@ class DuckDB(Dialect):
755
723
  ),
756
724
  exp.Struct: _struct_sql,
757
725
  exp.Transform: rename_func("LIST_TRANSFORM"),
758
- exp.TimeAdd: _date_delta_sql,
726
+ exp.TimeAdd: date_delta_to_binary_interval_op(),
759
727
  exp.Time: no_time_sql,
760
728
  exp.TimeDiff: _timediff_sql,
761
729
  exp.Timestamp: no_timestamp_sql,
@@ -772,7 +740,7 @@ class DuckDB(Dialect):
772
740
  exp.TimeToUnix: rename_func("EPOCH"),
773
741
  exp.TsOrDiToDi: lambda self,
774
742
  e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS TEXT), '-', ''), 1, 8) AS INT)",
775
- exp.TsOrDsAdd: _date_delta_sql,
743
+ exp.TsOrDsAdd: date_delta_to_binary_interval_op(),
776
744
  exp.TsOrDsDiff: lambda self, e: self.func(
777
745
  "DATE_DIFF",
778
746
  f"'{e.args.get('unit') or 'DAY'}'",
@@ -920,6 +888,7 @@ class DuckDB(Dialect):
920
888
  PROPERTIES_LOCATION[exp.LikeProperty] = exp.Properties.Location.POST_SCHEMA
921
889
  PROPERTIES_LOCATION[exp.TemporaryProperty] = exp.Properties.Location.POST_CREATE
922
890
  PROPERTIES_LOCATION[exp.ReturnsProperty] = exp.Properties.Location.POST_ALIAS
891
+ PROPERTIES_LOCATION[exp.SequenceProperties] = exp.Properties.Location.POST_EXPRESSION
923
892
 
924
893
  IGNORE_RESPECT_NULLS_WINDOW_FUNCTIONS = (
925
894
  exp.FirstValue,
@@ -1136,9 +1105,10 @@ class DuckDB(Dialect):
1136
1105
 
1137
1106
  # If BQ's UNNEST is aliased, we transform it from a column alias to a table alias in DDB
1138
1107
  alias = expression.args.get("alias")
1139
- if alias:
1108
+ if isinstance(alias, exp.TableAlias):
1140
1109
  expression.set("alias", None)
1141
- alias = exp.TableAlias(this=seq_get(alias.args.get("columns"), 0))
1110
+ if alias.columns:
1111
+ alias = exp.TableAlias(this=seq_get(alias.columns, 0))
1142
1112
 
1143
1113
  unnest_sql = super().unnest_sql(expression)
1144
1114
  select = exp.Select(expressions=[unnest_sql]).subquery(alias)
@@ -1152,7 +1122,9 @@ class DuckDB(Dialect):
1152
1122
  # window functions that accept it e.g. FIRST_VALUE(... IGNORE NULLS) OVER (...)
1153
1123
  return super().ignorenulls_sql(expression)
1154
1124
 
1155
- self.unsupported("IGNORE NULLS is not supported for non-window functions.")
1125
+ if not isinstance(expression.this, exp.AnyValue):
1126
+ self.unsupported("IGNORE NULLS is not supported for non-window functions.")
1127
+
1156
1128
  return self.sql(expression, "this")
1157
1129
 
1158
1130
  def respectnulls_sql(self, expression: exp.RespectNulls) -> str:
@@ -1247,3 +1219,27 @@ class DuckDB(Dialect):
1247
1219
  return self.sql(exp.Subquery(this=exp.Select(expressions=[posexplode_sql])))
1248
1220
 
1249
1221
  return posexplode_sql
1222
+
1223
+ def addmonths_sql(self, expression: exp.AddMonths) -> str:
1224
+ this = expression.this
1225
+
1226
+ if not this.type:
1227
+ from sqlglot.optimizer.annotate_types import annotate_types
1228
+
1229
+ this = annotate_types(this, dialect=self.dialect)
1230
+
1231
+ if this.is_type(*exp.DataType.TEXT_TYPES):
1232
+ this = exp.Cast(this=this, to=exp.DataType(this=exp.DataType.Type.TIMESTAMP))
1233
+
1234
+ func = self.func(
1235
+ "DATE_ADD", this, exp.Interval(this=expression.expression, unit=exp.var("MONTH"))
1236
+ )
1237
+
1238
+ # DuckDB's DATE_ADD function returns TIMESTAMP/DATETIME by default, even when the input is DATE
1239
+ # To match for example Snowflake's ADD_MONTHS behavior (which preserves the input type)
1240
+ # We need to cast the result back to the original type when the input is DATE or TIMESTAMPTZ
1241
+ # Example: ADD_MONTHS('2023-01-31'::date, 1) should return DATE, not TIMESTAMP
1242
+ if this.is_type(exp.DataType.Type.DATE, exp.DataType.Type.TIMESTAMPTZ):
1243
+ return self.sql(exp.Cast(this=func, to=this.type))
1244
+
1245
+ return self.sql(func)
@@ -28,6 +28,16 @@ def _sha2_sql(self: Exasol.Generator, expression: exp.SHA2) -> str:
28
28
  return self.func(func_name, expression.this)
29
29
 
30
30
 
31
+ def _date_diff_sql(self: Exasol.Generator, expression: exp.DateDiff | exp.TsOrDsDiff) -> str:
32
+ unit = expression.text("unit").upper() or "DAY"
33
+
34
+ if unit not in DATE_UNITS:
35
+ self.unsupported(f"'{unit}' is not supported in Exasol.")
36
+ return self.function_fallback_sql(expression)
37
+
38
+ return self.func(f"{unit}S_BETWEEN", expression.this, expression.expression)
39
+
40
+
31
41
  # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/trunc%5Bate%5D%20(datetime).htm
32
42
  # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/trunc%5Bate%5D%20(number).htm
33
43
  def _build_trunc(args: t.List[exp.Expression], dialect: DialectType) -> exp.Expression:
@@ -59,6 +69,9 @@ def _build_nullifzero(args: t.List) -> exp.If:
59
69
  return exp.If(this=cond, true=exp.Null(), false=seq_get(args, 0))
60
70
 
61
71
 
72
+ DATE_UNITS = {"DAY", "WEEK", "MONTH", "YEAR", "HOUR", "MINUTE", "SECOND"}
73
+
74
+
62
75
  class Exasol(Dialect):
63
76
  TIME_MAPPING = {
64
77
  "yyyy": "%Y",
@@ -96,24 +109,19 @@ class Exasol(Dialect):
96
109
  "ENDIF": TokenType.END,
97
110
  "LONG VARCHAR": TokenType.TEXT,
98
111
  }
112
+ KEYWORDS.pop("DIV")
99
113
 
100
114
  class Parser(parser.Parser):
101
115
  FUNCTIONS = {
102
116
  **parser.Parser.FUNCTIONS,
103
- # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_days.htm
104
- "ADD_DAYS": build_date_delta(exp.DateAdd, default_unit="DAY"),
105
- # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_years.htm
106
- "ADD_YEARS": build_date_delta(exp.DateAdd, default_unit="YEAR"),
107
- # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_months.htm
108
- "ADD_MONTHS": build_date_delta(exp.DateAdd, default_unit="MONTH"),
109
- # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_weeks.htm
110
- "ADD_WEEKS": build_date_delta(exp.DateAdd, default_unit="WEEK"),
111
- # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_hour.htm
112
- "ADD_HOURS": build_date_delta(exp.DateAdd, default_unit="HOUR"),
113
- # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_minutes.htm
114
- "ADD_MINUTES": build_date_delta(exp.DateAdd, default_unit="MINUTE"),
115
- # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/add_seconds.htm
116
- "ADD_SECONDS": build_date_delta(exp.DateAdd, default_unit="SECOND"),
117
+ **{
118
+ f"ADD_{unit}S": build_date_delta(exp.DateAdd, default_unit=unit)
119
+ for unit in DATE_UNITS
120
+ },
121
+ **{
122
+ f"{unit}S_BETWEEN": build_date_delta(exp.DateDiff, default_unit=unit)
123
+ for unit in DATE_UNITS
124
+ },
117
125
  "BIT_AND": binary_from_function(exp.BitwiseAnd),
118
126
  "BIT_OR": binary_from_function(exp.BitwiseOr),
119
127
  "BIT_XOR": binary_from_function(exp.BitwiseXor),
@@ -124,6 +132,7 @@ class Exasol(Dialect):
124
132
  "DATE_TRUNC": lambda args: exp.TimestampTrunc(
125
133
  this=seq_get(args, 1), unit=seq_get(args, 0)
126
134
  ),
135
+ "DIV": binary_from_function(exp.IntDiv),
127
136
  "EVERY": lambda args: exp.All(this=seq_get(args, 0)),
128
137
  "EDIT_DISTANCE": exp.Levenshtein.from_arg_list,
129
138
  "HASH_SHA": exp.SHA.from_arg_list,
@@ -196,16 +205,6 @@ class Exasol(Dialect):
196
205
  exp.DataType.Type.DATETIME: "TIMESTAMP",
197
206
  }
198
207
 
199
- DATE_ADD_FUNCTION_BY_UNIT = {
200
- "DAY": "ADD_DAYS",
201
- "WEEK": "ADD_WEEKS",
202
- "MONTH": "ADD_MONTHS",
203
- "YEAR": "ADD_YEARS",
204
- "HOUR": "ADD_HOURS",
205
- "MINUTE": "ADD_MINUTES",
206
- "SECOND": "ADD_SECONDS",
207
- }
208
-
209
208
  def datatype_sql(self, expression: exp.DataType) -> str:
210
209
  # Exasol supports a fixed default precision of 3 for TIMESTAMP WITH LOCAL TIME ZONE
211
210
  # and does not allow specifying a different custom precision
@@ -230,8 +229,10 @@ class Exasol(Dialect):
230
229
  exp.BitwiseRightShift: rename_func("BIT_RSHIFT"),
231
230
  # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/bit_xor.htm
232
231
  exp.BitwiseXor: rename_func("BIT_XOR"),
233
- # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/every.htm
234
- exp.All: rename_func("EVERY"),
232
+ exp.DateDiff: _date_diff_sql,
233
+ # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/div.htm#DIV
234
+ exp.IntDiv: rename_func("DIV"),
235
+ exp.TsOrDsDiff: _date_diff_sql,
235
236
  exp.DateTrunc: lambda self, e: self.func("TRUNC", e.this, unit_to_str(e)),
236
237
  exp.DatetimeTrunc: timestamptrunc_sql(),
237
238
  # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/edit_distance.htm#EDIT_DISTANCE
@@ -283,6 +284,7 @@ class Exasol(Dialect):
283
284
  exp.MD5Digest: rename_func("HASHTYPE_MD5"),
284
285
  # https://docs.exasol.com/db/latest/sql/create_view.htm
285
286
  exp.CommentColumnConstraint: lambda self, e: f"COMMENT IS {self.sql(e, 'this')}",
287
+ exp.WeekOfYear: rename_func("WEEK"),
286
288
  }
287
289
 
288
290
  def converttimezone_sql(self, expression: exp.ConvertTimezone) -> str:
@@ -301,9 +303,8 @@ class Exasol(Dialect):
301
303
 
302
304
  def dateadd_sql(self, expression: exp.DateAdd) -> str:
303
305
  unit = expression.text("unit").upper() or "DAY"
304
- func_name = self.DATE_ADD_FUNCTION_BY_UNIT.get(unit)
305
- if not func_name:
306
+ if unit not in DATE_UNITS:
306
307
  self.unsupported(f"'{unit}' is not supported in Exasol.")
307
308
  return self.function_fallback_sql(expression)
308
309
 
309
- return self.func(func_name, expression.this, expression.expression)
310
+ return self.func(f"ADD_{unit}S", expression.this, expression.expression)
sqlglot/dialects/mysql.py CHANGED
@@ -709,21 +709,6 @@ class MySQL(Dialect):
709
709
 
710
710
  return self.expression(exp.GroupConcat, this=this, separator=separator)
711
711
 
712
- def _parse_json_value(self) -> exp.JSONValue:
713
- this = self._parse_bitwise()
714
- self._match(TokenType.COMMA)
715
- path = self._parse_bitwise()
716
-
717
- returning = self._match(TokenType.RETURNING) and self._parse_type()
718
-
719
- return self.expression(
720
- exp.JSONValue,
721
- this=this,
722
- path=self.dialect.to_json_path(path),
723
- returning=returning,
724
- on_condition=self._parse_on_condition(),
725
- )
726
-
727
712
  def _parse_alter_table_alter_index(self) -> exp.AlterIndex:
728
713
  index = self._parse_field(any_token=True)
729
714
 
@@ -31,7 +31,6 @@ from sqlglot.dialects.dialect import (
31
31
  sequence_sql,
32
32
  build_regexp_extract,
33
33
  explode_to_unnest_sql,
34
- space_sql,
35
34
  )
36
35
  from sqlglot.dialects.hive import Hive
37
36
  from sqlglot.dialects.mysql import MySQL
@@ -506,7 +505,6 @@ class Presto(Dialect):
506
505
  amend_exploded_column_table,
507
506
  ]
508
507
  ),
509
- exp.Space: space_sql,
510
508
  exp.SortArray: _no_sort_array,
511
509
  exp.StrPosition: lambda self, e: strposition_sql(self, e, supports_occurrence=True),
512
510
  exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)",
@@ -2,7 +2,13 @@ from sqlglot import TokenType
2
2
  import typing as t
3
3
 
4
4
  from sqlglot import exp
5
- from sqlglot.dialects.dialect import build_formatted_time
5
+ from sqlglot.dialects.dialect import (
6
+ build_formatted_time,
7
+ build_json_extract_path,
8
+ json_extract_segments,
9
+ json_path_key_only_name,
10
+ rename_func,
11
+ )
6
12
  from sqlglot.dialects.mysql import MySQL
7
13
  from sqlglot.generator import unsupported_args
8
14
  from sqlglot.helper import seq_get
@@ -64,6 +70,29 @@ class SingleStore(MySQL):
64
70
  ),
65
71
  format=MySQL.format_time(seq_get(args, 1)),
66
72
  ),
73
+ "UNIX_TIMESTAMP": exp.StrToUnix.from_arg_list,
74
+ "FROM_UNIXTIME": build_formatted_time(exp.UnixToTime, "mysql"),
75
+ "BSON_EXTRACT_BSON": build_json_extract_path(exp.JSONBExtract),
76
+ "BSON_EXTRACT_STRING": build_json_extract_path(
77
+ exp.JSONBExtractScalar, json_type="STRING"
78
+ ),
79
+ "BSON_EXTRACT_DOUBLE": build_json_extract_path(
80
+ exp.JSONBExtractScalar, json_type="DOUBLE"
81
+ ),
82
+ "BSON_EXTRACT_BIGINT": build_json_extract_path(
83
+ exp.JSONBExtractScalar, json_type="BIGINT"
84
+ ),
85
+ "JSON_EXTRACT_JSON": build_json_extract_path(exp.JSONExtract),
86
+ "JSON_EXTRACT_STRING": build_json_extract_path(
87
+ exp.JSONExtractScalar, json_type="STRING"
88
+ ),
89
+ "JSON_EXTRACT_DOUBLE": build_json_extract_path(
90
+ exp.JSONExtractScalar, json_type="DOUBLE"
91
+ ),
92
+ "JSON_EXTRACT_BIGINT": build_json_extract_path(
93
+ exp.JSONExtractScalar, json_type="BIGINT"
94
+ ),
95
+ "DATE": exp.Date.from_arg_list,
67
96
  }
68
97
 
69
98
  CAST_COLUMN_OPERATORS = {TokenType.COLON_GT, TokenType.NCOLON_GT}
@@ -79,9 +108,24 @@ class SingleStore(MySQL):
79
108
  this=this,
80
109
  to=to,
81
110
  ),
111
+ TokenType.DCOLON: lambda self, this, path: build_json_extract_path(exp.JSONExtract)(
112
+ [this, exp.Literal.string(path.name)]
113
+ ),
114
+ TokenType.DCOLONDOLLAR: lambda self, this, path: build_json_extract_path(
115
+ exp.JSONExtractScalar, json_type="STRING"
116
+ )([this, exp.Literal.string(path.name)]),
117
+ TokenType.DCOLONPERCENT: lambda self, this, path: build_json_extract_path(
118
+ exp.JSONExtractScalar, json_type="DOUBLE"
119
+ )([this, exp.Literal.string(path.name)]),
82
120
  }
83
121
 
84
122
  class Generator(MySQL.Generator):
123
+ SUPPORTED_JSON_PATH_PARTS = {
124
+ exp.JSONPathKey,
125
+ exp.JSONPathRoot,
126
+ exp.JSONPathSubscript,
127
+ }
128
+
85
129
  TRANSFORMS = {
86
130
  **MySQL.Generator.TRANSFORMS,
87
131
  exp.TsOrDsToDate: lambda self, e: self.func("TO_DATE", e.this, self.format_time(e)),
@@ -105,13 +149,61 @@ class SingleStore(MySQL):
105
149
  inverse_time_trie=MySQL.INVERSE_TIME_TRIE,
106
150
  ),
107
151
  ),
152
+ exp.Date: unsupported_args("zone", "expressions")(rename_func("DATE")),
108
153
  exp.Cast: unsupported_args("format", "action", "default")(
109
154
  lambda self, e: f"{self.sql(e, 'this')} :> {self.sql(e, 'to')}"
110
155
  ),
111
156
  exp.TryCast: unsupported_args("format", "action", "default")(
112
157
  lambda self, e: f"{self.sql(e, 'this')} !:> {self.sql(e, 'to')}"
113
158
  ),
159
+ exp.StrToUnix: unsupported_args("format")(rename_func("UNIX_TIMESTAMP")),
160
+ exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"),
161
+ exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"),
162
+ exp.UnixSeconds: rename_func("UNIX_TIMESTAMP"),
163
+ exp.UnixToStr: lambda self, e: self.func(
164
+ "FROM_UNIXTIME",
165
+ e.this,
166
+ self.format_time(
167
+ e,
168
+ inverse_time_mapping=MySQL.INVERSE_TIME_MAPPING,
169
+ inverse_time_trie=MySQL.INVERSE_TIME_TRIE,
170
+ ),
171
+ ),
172
+ exp.UnixToTime: unsupported_args("scale", "zone", "hours", "minutes")(
173
+ lambda self, e: self.func(
174
+ "FROM_UNIXTIME",
175
+ e.this,
176
+ self.format_time(
177
+ e,
178
+ inverse_time_mapping=MySQL.INVERSE_TIME_MAPPING,
179
+ inverse_time_trie=MySQL.INVERSE_TIME_TRIE,
180
+ ),
181
+ ),
182
+ ),
183
+ exp.UnixToTimeStr: lambda self, e: f"FROM_UNIXTIME({self.sql(e, 'this')}) :> TEXT",
184
+ exp.JSONExtract: unsupported_args(
185
+ "only_json_types",
186
+ "expressions",
187
+ "variant_extract",
188
+ "json_query",
189
+ "option",
190
+ "quote",
191
+ "on_condition",
192
+ "requires_json",
193
+ )(json_extract_segments("JSON_EXTRACT_JSON")),
194
+ exp.JSONBExtract: json_extract_segments("BSON_EXTRACT_BSON"),
195
+ exp.JSONPathKey: json_path_key_only_name,
196
+ exp.JSONPathSubscript: lambda self, e: self.json_path_part(e.this),
197
+ exp.JSONPathRoot: lambda *_: "",
114
198
  }
199
+ TRANSFORMS.pop(exp.JSONExtractScalar)
200
+ TRANSFORMS.pop(exp.JSONPathFilter)
201
+ TRANSFORMS.pop(exp.JSONPathRecursive)
202
+ TRANSFORMS.pop(exp.JSONPathScript)
203
+ TRANSFORMS.pop(exp.JSONPathSelector)
204
+ TRANSFORMS.pop(exp.JSONPathSlice)
205
+ TRANSFORMS.pop(exp.JSONPathUnion)
206
+ TRANSFORMS.pop(exp.JSONPathWildcard)
115
207
 
116
208
  # https://docs.singlestore.com/cloud/reference/sql-reference/restricted-keywords/list-of-restricted-keywords/
117
209
  RESERVED_KEYWORDS = {
@@ -1166,3 +1258,31 @@ class SingleStore(MySQL):
1166
1258
  "zerofill",
1167
1259
  "zone",
1168
1260
  }
1261
+
1262
+ def jsonextractscalar_sql(self, expression: exp.JSONExtractScalar) -> str:
1263
+ json_type = expression.args.get("json_type")
1264
+ func_name = "JSON_EXTRACT_JSON" if json_type is None else f"JSON_EXTRACT_{json_type}"
1265
+ return json_extract_segments(func_name)(self, expression)
1266
+
1267
+ def jsonbextractscalar_sql(self, expression: exp.JSONBExtractScalar) -> str:
1268
+ json_type = expression.args.get("json_type")
1269
+ func_name = "BSON_EXTRACT_BSON" if json_type is None else f"BSON_EXTRACT_{json_type}"
1270
+ return json_extract_segments(func_name)(self, expression)
1271
+
1272
+ def jsonextractarray_sql(self, expression: exp.JSONExtractArray) -> str:
1273
+ self.unsupported("Arrays are not supported in SingleStore")
1274
+ return self.function_fallback_sql(expression)
1275
+
1276
+ @unsupported_args("on_condition")
1277
+ def jsonvalue_sql(self, expression: exp.JSONValue) -> str:
1278
+ res: exp.Expression = exp.JSONExtractScalar(
1279
+ this=expression.this,
1280
+ expression=expression.args.get("path"),
1281
+ json_type="STRING",
1282
+ )
1283
+
1284
+ returning = expression.args.get("returning")
1285
+ if returning is not None:
1286
+ res = exp.Cast(this=res, to=returning)
1287
+
1288
+ return self.sql(res)
@@ -461,7 +461,7 @@ def _eliminate_dot_variant_lookup(expression: exp.Expression) -> exp.Expression:
461
461
  unnest_alias = unnest.args.get("alias")
462
462
  if (
463
463
  isinstance(unnest_alias, exp.TableAlias)
464
- and (unnest_alias.args.get("column_only") or not unnest_alias.this)
464
+ and not unnest_alias.this
465
465
  and len(unnest_alias.columns) == 1
466
466
  ):
467
467
  unnest_aliases.add(unnest_alias.columns[0].name)
sqlglot/dialects/spark.py CHANGED
@@ -9,6 +9,7 @@ from sqlglot.dialects.dialect import (
9
9
  unit_to_var,
10
10
  timestampdiff_sql,
11
11
  build_date_delta,
12
+ date_delta_to_binary_interval_op,
12
13
  groupconcat_sql,
13
14
  )
14
15
  from sqlglot.dialects.hive import _build_with_ignore_nulls
@@ -194,13 +195,19 @@ class Spark(Spark2):
194
195
  move_partitioned_by_to_schema_columns,
195
196
  ]
196
197
  ),
198
+ exp.DateFromUnixDate: rename_func("DATE_FROM_UNIX_DATE"),
199
+ exp.DatetimeAdd: date_delta_to_binary_interval_op(cast=False),
200
+ exp.DatetimeSub: date_delta_to_binary_interval_op(cast=False),
197
201
  exp.GroupConcat: _groupconcat_sql,
198
202
  exp.EndsWith: rename_func("ENDSWITH"),
199
203
  exp.PartitionedByProperty: lambda self,
200
204
  e: f"PARTITIONED BY {self.wrap(self.expressions(sqls=[_normalize_partition(e) for e in e.this.expressions], skip_first=True))}",
201
205
  exp.StartsWith: rename_func("STARTSWITH"),
206
+ exp.TimeAdd: date_delta_to_binary_interval_op(cast=False),
207
+ exp.TimeSub: date_delta_to_binary_interval_op(cast=False),
202
208
  exp.TsOrDsAdd: _dateadd_sql,
203
209
  exp.TimestampAdd: _dateadd_sql,
210
+ exp.TimestampSub: date_delta_to_binary_interval_op(cast=False),
204
211
  exp.DatetimeDiff: timestampdiff_sql,
205
212
  exp.TimestampDiff: timestampdiff_sql,
206
213
  exp.TryCast: lambda self, e: (