fakesnow 0.9.5__py3-none-any.whl → 0.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fakesnow/fakes.py CHANGED
@@ -173,7 +173,7 @@ class FakeSnowflakeCursor:
173
173
  .transform(transforms.drop_schema_cascade)
174
174
  .transform(transforms.tag)
175
175
  .transform(transforms.semi_structured_types)
176
- .transform(transforms.parse_json)
176
+ .transform(transforms.try_parse_json)
177
177
  # indices_to_json_extract must be before regex_substr
178
178
  .transform(transforms.indices_to_json_extract)
179
179
  .transform(transforms.json_extract_cast_as_varchar)
@@ -185,6 +185,7 @@ class FakeSnowflakeCursor:
185
185
  .transform(transforms.values_columns)
186
186
  .transform(transforms.to_date)
187
187
  .transform(transforms.to_decimal)
188
+ .transform(transforms.try_to_decimal)
188
189
  .transform(transforms.to_timestamp_ntz)
189
190
  .transform(transforms.to_timestamp)
190
191
  .transform(transforms.object_construct)
@@ -196,11 +197,20 @@ class FakeSnowflakeCursor:
196
197
  .transform(transforms.array_size)
197
198
  .transform(transforms.random)
198
199
  .transform(transforms.identifier)
200
+ .transform(transforms.array_agg_within_group)
201
+ .transform(transforms.array_agg_to_json)
202
+ .transform(transforms.dateadd_date_cast)
203
+ .transform(transforms.dateadd_string_literal_timestamp_cast)
204
+ .transform(transforms.datediff_string_literal_timestamp_cast)
199
205
  .transform(lambda e: transforms.show_schemas(e, self._conn.database))
200
206
  .transform(lambda e: transforms.show_objects_tables(e, self._conn.database))
201
- .transform(lambda e: transforms.show_primary_keys(e, self._conn.database))
207
+ # TODO collapse into a single show_keys function
208
+ .transform(lambda e: transforms.show_keys(e, self._conn.database, kind="PRIMARY"))
209
+ .transform(lambda e: transforms.show_keys(e, self._conn.database, kind="UNIQUE"))
210
+ .transform(lambda e: transforms.show_keys(e, self._conn.database, kind="FOREIGN"))
202
211
  .transform(transforms.show_users)
203
212
  .transform(transforms.create_user)
213
+ .transform(transforms.sha256)
204
214
  )
205
215
  sql = transformed.sql(dialect="duckdb")
206
216
  result_sql = None
@@ -614,7 +624,9 @@ class FakeSnowflakeConnection:
614
624
  # don't jsonify string
615
625
  df[col] = df[col].apply(lambda x: json.dumps(x) if isinstance(x, (dict, list)) else x)
616
626
 
617
- self._duck_conn.execute(f"INSERT INTO {table_name}({','.join(df.columns.to_list())}) SELECT * FROM df")
627
+ escaped_cols = ",".join(f'"{col}"' for col in df.columns.to_list())
628
+ self._duck_conn.execute(f"INSERT INTO {table_name}({escaped_cols}) SELECT * FROM df")
629
+
618
630
  return self._duck_conn.fetchall()[0][0]
619
631
 
620
632
 
fakesnow/info_schema.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """Info schema extension tables/views used for storing snowflake metadata not captured by duckdb."""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from string import Template
fakesnow/transforms.py CHANGED
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  from pathlib import Path
4
4
  from string import Template
5
- from typing import cast
5
+ from typing import ClassVar, Literal, cast
6
6
 
7
7
  import sqlglot
8
8
  from sqlglot import exp
@@ -22,6 +22,39 @@ def array_size(expression: exp.Expression) -> exp.Expression:
22
22
  return expression
23
23
 
24
24
 
25
+ def array_agg_to_json(expression: exp.Expression) -> exp.Expression:
26
+ if isinstance(expression, exp.ArrayAgg):
27
+ return exp.Anonymous(this="TO_JSON", expressions=[expression])
28
+
29
+ return expression
30
+
31
+
32
+ def array_agg_within_group(expression: exp.Expression) -> exp.Expression:
33
+ """Convert ARRAY_AGG(<expr>) WITHIN GROUP (<order-by-clause>) to ARRAY_AGG( <expr> <order-by-clause> )
34
+ Snowflake uses ARRAY_AGG(<expr>) WITHIN GROUP (ORDER BY <order-by-clause>)
35
+ to order the array, but DuckDB uses ARRAY_AGG( <expr> <order-by-clause> ).
36
+ See;
37
+ - https://docs.snowflake.com/en/sql-reference/functions/array_agg
38
+ - https://duckdb.org/docs/sql/aggregates.html#order-by-clause-in-aggregate-functions
39
+ Note; Snowflake has following restriction;
40
+ If you specify DISTINCT and WITHIN GROUP, both must refer to the same column.
41
+ Transformation does not handle this restriction.
42
+ """
43
+ if (
44
+ isinstance(expression, exp.WithinGroup)
45
+ and (agg := expression.find(exp.ArrayAgg))
46
+ and (order := expression.expression)
47
+ ):
48
+ return exp.ArrayAgg(
49
+ this=exp.Order(
50
+ this=agg.this,
51
+ expressions=order.expressions,
52
+ )
53
+ )
54
+
55
+ return expression
56
+
57
+
25
58
  # TODO: move this into a Dialect as a transpilation
26
59
  def create_database(expression: exp.Expression, db_path: Path | None = None) -> exp.Expression:
27
60
  """Transform create database to attach database.
@@ -38,7 +71,8 @@ def create_database(expression: exp.Expression, db_path: Path | None = None) ->
38
71
  """
39
72
 
40
73
  if isinstance(expression, exp.Create) and str(expression.args.get("kind")).upper() == "DATABASE":
41
- assert (ident := expression.find(exp.Identifier)), f"No identifier in {expression.sql}"
74
+ ident = expression.find(exp.Identifier)
75
+ assert ident, f"No identifier in {expression.sql}"
42
76
  db_name = ident.this
43
77
  db_file = f"{db_path/db_name}.db" if db_path else ":memory:"
44
78
 
@@ -135,6 +169,98 @@ def drop_schema_cascade(expression: exp.Expression) -> exp.Expression:
135
169
  return new
136
170
 
137
171
 
172
+ def dateadd_date_cast(expression: exp.Expression) -> exp.Expression:
173
+ """Cast result of DATEADD to DATE if the given expression is a cast to DATE
174
+ and unit is either DAY, WEEK, MONTH or YEAR to mimic Snowflake's DATEADD
175
+ behaviour.
176
+
177
+ Snowflake;
178
+ SELECT DATEADD(DAY, 3, '2023-03-03'::DATE) as D;
179
+ D: 2023-03-06 (DATE)
180
+ DuckDB;
181
+ SELECT CAST('2023-03-03' AS DATE) + INTERVAL 3 DAY AS D
182
+ D: 2023-03-06 00:00:00 (TIMESTAMP)
183
+ """
184
+
185
+ if not isinstance(expression, exp.DateAdd):
186
+ return expression
187
+
188
+ if expression.unit is None:
189
+ return expression
190
+
191
+ if not isinstance(expression.unit.this, str):
192
+ return expression
193
+
194
+ if (unit := expression.unit.this.upper()) and unit.upper() not in {"DAY", "WEEK", "MONTH", "YEAR"}:
195
+ return expression
196
+
197
+ if not isinstance(expression.this, exp.Cast):
198
+ return expression
199
+
200
+ if expression.this.to.this != exp.DataType.Type.DATE:
201
+ return expression
202
+
203
+ return exp.Cast(
204
+ this=expression,
205
+ to=exp.DataType(this=exp.DataType.Type.DATE, nested=False, prefix=False),
206
+ )
207
+
208
+
209
+ def dateadd_string_literal_timestamp_cast(expression: exp.Expression) -> exp.Expression:
210
+ """Snowflake's DATEADD function implicitly casts string literals to
211
+ timestamps regardless of unit.
212
+ """
213
+ if not isinstance(expression, exp.DateAdd):
214
+ return expression
215
+
216
+ if not isinstance(expression.this, exp.Literal) or not expression.this.is_string:
217
+ return expression
218
+
219
+ new_dateadd = expression.copy()
220
+ new_dateadd.set(
221
+ "this",
222
+ exp.Cast(
223
+ this=expression.this,
224
+ # TODO: support TIMESTAMP_TYPE_MAPPING of TIMESTAMP_LTZ/TZ
225
+ to=exp.DataType(this=exp.DataType.Type.TIMESTAMP, nested=False, prefix=False),
226
+ ),
227
+ )
228
+
229
+ return new_dateadd
230
+
231
+
232
+ def datediff_string_literal_timestamp_cast(expression: exp.Expression) -> exp.Expression:
233
+ """Snowflake's DATEDIFF function implicitly casts string literals to
234
+ timestamps regardless of unit.
235
+ """
236
+
237
+ if not isinstance(expression, exp.DateDiff):
238
+ return expression
239
+
240
+ op1 = expression.this.copy()
241
+ op2 = expression.expression.copy()
242
+
243
+ if isinstance(op1, exp.Literal) and op1.is_string:
244
+ op1 = exp.Cast(
245
+ this=op1,
246
+ # TODO: support TIMESTAMP_TYPE_MAPPING of TIMESTAMP_LTZ/TZ
247
+ to=exp.DataType(this=exp.DataType.Type.TIMESTAMP, nested=False, prefix=False),
248
+ )
249
+
250
+ if isinstance(op2, exp.Literal) and op2.is_string:
251
+ op2 = exp.Cast(
252
+ this=op2,
253
+ # TODO: support TIMESTAMP_TYPE_MAPPING of TIMESTAMP_LTZ/TZ
254
+ to=exp.DataType(this=exp.DataType.Type.TIMESTAMP, nested=False, prefix=False),
255
+ )
256
+
257
+ new_datediff = expression.copy()
258
+ new_datediff.set("this", op1)
259
+ new_datediff.set("expression", op2)
260
+
261
+ return new_datediff
262
+
263
+
138
264
  def extract_comment_on_columns(expression: exp.Expression) -> exp.Expression:
139
265
  """Extract column comments, removing it from the Expression.
140
266
 
@@ -507,38 +633,26 @@ def object_construct(expression: exp.Expression) -> exp.Expression:
507
633
  """
508
634
 
509
635
  if isinstance(expression, exp.Struct):
510
- # remove expressions containing NULL
511
- for enull in expression.find_all(exp.Null):
512
- if enull.parent:
513
- enull.parent.pop()
636
+ non_null_expressions = []
637
+ for e in expression.expressions:
638
+ if not (isinstance(e, exp.PropertyEQ)):
639
+ non_null_expressions.append(e)
640
+ continue
514
641
 
515
- return exp.Anonymous(this="TO_JSON", expressions=[expression])
642
+ left = e.left
643
+ right = e.right
516
644
 
517
- return expression
645
+ left_is_null = isinstance(left, exp.Null)
646
+ right_is_null = isinstance(right, exp.Null)
518
647
 
648
+ if left_is_null or right_is_null:
649
+ continue
519
650
 
520
- def parse_json(expression: exp.Expression) -> exp.Expression:
521
- """Convert parse_json() to json().
651
+ non_null_expressions.append(e)
522
652
 
523
- Example:
524
- >>> import sqlglot
525
- >>> sqlglot.parse_one("insert into table1 (name) select parse_json('{}')").transform(parse_json).sql()
526
- "CREATE TABLE table1 (name JSON)"
527
- Args:
528
- expression (exp.Expression): the expression that will be transformed.
529
-
530
- Returns:
531
- exp.Expression: The transformed expression.
532
- """
533
-
534
- if (
535
- isinstance(expression, exp.Anonymous)
536
- and isinstance(expression.this, str)
537
- and expression.this.upper() == "PARSE_JSON"
538
- ):
539
- new = expression.copy()
540
- new.args["this"] = "JSON"
541
- return new
653
+ new_struct = expression.copy()
654
+ new_struct.set("expressions", non_null_expressions)
655
+ return exp.Anonymous(this="TO_JSON", expressions=[new_struct])
542
656
 
543
657
  return expression
544
658
 
@@ -681,55 +795,61 @@ def set_schema(expression: exp.Expression, current_database: str | None) -> exp.
681
795
  return expression
682
796
 
683
797
 
684
- SQL_SHOW_OBJECTS = """
685
- select
686
- to_timestamp(0)::timestamptz as 'created_on',
687
- table_name as 'name',
688
- case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind',
689
- table_catalog as 'database_name',
690
- table_schema as 'schema_name'
691
- from information_schema.tables
692
- """
693
-
694
-
695
798
  def show_objects_tables(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
696
799
  """Transform SHOW OBJECTS/TABLES to a query against the information_schema.tables table.
697
800
 
698
801
  See https://docs.snowflake.com/en/sql-reference/sql/show-objects
699
802
  https://docs.snowflake.com/en/sql-reference/sql/show-tables
700
803
  """
701
- if (
804
+ if not (
702
805
  isinstance(expression, exp.Show)
703
806
  and isinstance(expression.this, str)
704
- and expression.this.upper() in ["OBJECTS", "TABLES"]
807
+ and (show := expression.this.upper())
808
+ and show in {"OBJECTS", "TABLES"}
705
809
  ):
706
- scope_kind = expression.args.get("scope_kind")
707
- table = expression.find(exp.Table)
708
-
709
- if scope_kind == "DATABASE":
710
- catalog = (table and table.name) or current_database
711
- schema = None
712
- elif scope_kind == "SCHEMA" and table:
713
- catalog = table.db or current_database
714
- schema = table.name
715
- else:
716
- # all objects / tables
717
- catalog = None
718
- schema = None
719
-
720
- tables_only = "table_type = 'BASE TABLE' and " if expression.this.upper() == "TABLES" else ""
721
- exclude_fakesnow_tables = "not (table_schema == 'information_schema' and table_name like '_fs_%%')"
722
- # without a database will show everything in the "account"
723
- table_catalog = f" and table_catalog = '{catalog}'" if catalog else ""
724
- schema = f" and table_schema = '{schema}'" if schema else ""
725
- limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
810
+ return expression
726
811
 
727
- return sqlglot.parse_one(
728
- f"{SQL_SHOW_OBJECTS} where {tables_only}{exclude_fakesnow_tables}{table_catalog}{schema}{limit}",
729
- read="duckdb",
730
- )
731
-
732
- return expression
812
+ scope_kind = expression.args.get("scope_kind")
813
+ table = expression.find(exp.Table)
814
+
815
+ if scope_kind == "DATABASE":
816
+ catalog = (table and table.name) or current_database
817
+ schema = None
818
+ elif scope_kind == "SCHEMA" and table:
819
+ catalog = table.db or current_database
820
+ schema = table.name
821
+ else:
822
+ # all objects / tables
823
+ catalog = None
824
+ schema = None
825
+
826
+ tables_only = "table_type = 'BASE TABLE' and " if show == "TABLES" else ""
827
+ exclude_fakesnow_tables = "not (table_schema == 'information_schema' and table_name like '_fs_%%')"
828
+ # without a database will show everything in the "account"
829
+ table_catalog = f" and table_catalog = '{catalog}'" if catalog else ""
830
+ schema = f" and table_schema = '{schema}'" if schema else ""
831
+ limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
832
+
833
+ columns = [
834
+ "to_timestamp(0)::timestamptz as 'created_on'",
835
+ "table_name as 'name'",
836
+ "case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind'",
837
+ "table_catalog as 'database_name'",
838
+ "table_schema as 'schema_name'",
839
+ ]
840
+
841
+ terse = expression.args["terse"]
842
+ if not terse:
843
+ columns.append('null as "comment"')
844
+
845
+ columns_str = ", ".join(columns)
846
+
847
+ query = (
848
+ f"SELECT {columns_str} from information_schema.tables "
849
+ f"where {tables_only}{exclude_fakesnow_tables}{table_catalog}{schema}{limit}"
850
+ )
851
+
852
+ return sqlglot.parse_one(query, read="duckdb")
733
853
 
734
854
 
735
855
  SQL_SHOW_SCHEMAS = """
@@ -822,30 +942,107 @@ def to_date(expression: exp.Expression) -> exp.Expression:
822
942
  return expression
823
943
 
824
944
 
945
+ def _get_to_number_args(e: exp.ToNumber) -> tuple[exp.Expression | None, exp.Expression | None, exp.Expression | None]:
946
+ arg_format = e.args.get("format")
947
+ arg_precision = e.args.get("precision")
948
+ arg_scale = e.args.get("scale")
949
+
950
+ _format = None
951
+ _precision = None
952
+ _scale = None
953
+
954
+ # to_number(value, <format>, <precision>, <scale>)
955
+ if arg_format:
956
+ if arg_format.is_string:
957
+ # to_number('100', 'TM9' ...)
958
+ _format = arg_format
959
+
960
+ # to_number('100', 'TM9', 10 ...)
961
+ if arg_precision:
962
+ _precision = arg_precision
963
+
964
+ # to_number('100', 'TM9', 10, 2)
965
+ if arg_scale:
966
+ _scale = arg_scale
967
+ else:
968
+ pass
969
+ else:
970
+ # to_number('100', 10, ...)
971
+ # arg_format is not a string, so it must be precision.
972
+ _precision = arg_format
973
+
974
+ # to_number('100', 10, 2)
975
+ # And arg_precision must be scale
976
+ if arg_precision:
977
+ _scale = arg_precision
978
+ else:
979
+ # If format is not provided, just check for precision and scale directly
980
+ if arg_precision:
981
+ _precision = arg_precision
982
+ if arg_scale:
983
+ _scale = arg_scale
984
+
985
+ return _format, _precision, _scale
986
+
987
+
988
+ def _to_decimal(expression: exp.Expression, cast_node: type[exp.Cast]) -> exp.Expression:
989
+ expressions: list[exp.Expression] = expression.expressions
990
+
991
+ if len(expressions) > 1 and expressions[1].is_string:
992
+ # see https://docs.snowflake.com/en/sql-reference/functions/to_decimal#arguments
993
+ raise NotImplementedError(f"{expression.this} with format argument")
994
+
995
+ precision = expressions[1] if len(expressions) > 1 else exp.Literal(this="38", is_string=False)
996
+ scale = expressions[2] if len(expressions) > 2 else exp.Literal(this="0", is_string=False)
997
+
998
+ return cast_node(
999
+ this=expressions[0],
1000
+ to=exp.DataType(this=exp.DataType.Type.DECIMAL, expressions=[precision, scale], nested=False, prefix=False),
1001
+ )
1002
+
1003
+
825
1004
  def to_decimal(expression: exp.Expression) -> exp.Expression:
826
1005
  """Transform to_decimal, to_number, to_numeric expressions from snowflake to duckdb.
827
1006
 
828
1007
  See https://docs.snowflake.com/en/sql-reference/functions/to_decimal
829
1008
  """
830
1009
 
1010
+ if isinstance(expression, exp.ToNumber):
1011
+ format_, precision, scale = _get_to_number_args(expression)
1012
+ if format_:
1013
+ raise NotImplementedError(f"{expression.this} with format argument")
1014
+
1015
+ if not precision:
1016
+ precision = exp.Literal(this="38", is_string=False)
1017
+ if not scale:
1018
+ scale = exp.Literal(this="0", is_string=False)
1019
+
1020
+ return exp.Cast(
1021
+ this=expression.this,
1022
+ to=exp.DataType(this=exp.DataType.Type.DECIMAL, expressions=[precision, scale], nested=False, prefix=False),
1023
+ )
1024
+
831
1025
  if (
832
1026
  isinstance(expression, exp.Anonymous)
833
1027
  and isinstance(expression.this, str)
834
- and expression.this.upper() in ["TO_DECIMAL", "TO_NUMBER", "TO_NUMERIC"]
1028
+ and expression.this.upper() in ["TO_DECIMAL", "TO_NUMERIC"]
835
1029
  ):
836
- expressions: list[exp.Expression] = expression.expressions
1030
+ return _to_decimal(expression, exp.Cast)
837
1031
 
838
- if len(expressions) > 1 and expressions[1].is_string:
839
- # see https://docs.snowflake.com/en/sql-reference/functions/to_decimal#arguments
840
- raise NotImplementedError(f"{expression.this} with format argument")
1032
+ return expression
841
1033
 
842
- precision = expressions[1] if len(expressions) > 1 else exp.Literal(this="38", is_string=False)
843
- scale = expressions[2] if len(expressions) > 2 else exp.Literal(this="0", is_string=False)
844
1034
 
845
- return exp.Cast(
846
- this=expressions[0],
847
- to=exp.DataType(this=exp.DataType.Type.DECIMAL, expressions=[precision, scale], nested=False, prefix=False),
848
- )
1035
+ def try_to_decimal(expression: exp.Expression) -> exp.Expression:
1036
+ """Transform try_to_decimal, try_to_number, try_to_numeric expressions from snowflake to duckdb.
1037
+ See https://docs.snowflake.com/en/sql-reference/functions/try_to_decimal
1038
+ """
1039
+
1040
+ if (
1041
+ isinstance(expression, exp.Anonymous)
1042
+ and isinstance(expression.this, str)
1043
+ and expression.this.upper() in ["TRY_TO_DECIMAL", "TRY_TO_NUMBER", "TRY_TO_NUMERIC"]
1044
+ ):
1045
+ return _to_decimal(expression, exp.TryCast)
849
1046
 
850
1047
  return expression
851
1048
 
@@ -898,6 +1095,34 @@ def timestamp_ntz_ns(expression: exp.Expression) -> exp.Expression:
898
1095
  return expression
899
1096
 
900
1097
 
1098
+ def try_parse_json(expression: exp.Expression) -> exp.Expression:
1099
+ """Convert TRY_PARSE_JSON() to TRY_CAST(... as JSON).
1100
+
1101
+ Example:
1102
+ >>> import sqlglot
1103
+ >>> sqlglot.parse_one("select try_parse_json('{}')").transform(parse_json).sql()
1104
+ "SELECT TRY_CAST('{}' AS JSON)"
1105
+ Args:
1106
+ expression (exp.Expression): the expression that will be transformed.
1107
+
1108
+ Returns:
1109
+ exp.Expression: The transformed expression.
1110
+ """
1111
+
1112
+ if (
1113
+ isinstance(expression, exp.Anonymous)
1114
+ and isinstance(expression.this, str)
1115
+ and expression.this.upper() == "TRY_PARSE_JSON"
1116
+ ):
1117
+ expressions = expression.expressions
1118
+ return exp.TryCast(
1119
+ this=expressions[0],
1120
+ to=exp.DataType(this=exp.DataType.Type.JSON, nested=False),
1121
+ )
1122
+
1123
+ return expression
1124
+
1125
+
901
1126
  # sqlglot.parse_one("create table example(date TIMESTAMP_NTZ(9));", read="snowflake")
902
1127
  def semi_structured_types(expression: exp.Expression) -> exp.Expression:
903
1128
  """Convert OBJECT, ARRAY, and VARIANT types to duckdb compatible types.
@@ -999,32 +1224,70 @@ def create_user(expression: exp.Expression) -> exp.Expression:
999
1224
  return expression
1000
1225
 
1001
1226
 
1002
- def show_primary_keys(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
1003
- """Transform SHOW PRIMARY KEYS to a query against the duckdb_constraints table.
1227
+ def show_keys(
1228
+ expression: exp.Expression,
1229
+ current_database: str | None = None,
1230
+ *,
1231
+ kind: Literal["PRIMARY", "UNIQUE", "FOREIGN"],
1232
+ ) -> exp.Expression:
1233
+ """Transform SHOW <kind> KEYS to a query against the duckdb_constraints meta-table.
1004
1234
 
1005
1235
  https://docs.snowflake.com/en/sql-reference/sql/show-primary-keys
1006
1236
  """
1237
+ snowflake_kind = kind
1238
+ if kind == "FOREIGN":
1239
+ snowflake_kind = "IMPORTED"
1240
+
1007
1241
  if (
1008
1242
  isinstance(expression, exp.Show)
1009
1243
  and isinstance(expression.this, str)
1010
- and expression.this.upper() == "PRIMARY KEYS"
1244
+ and expression.this.upper() == f"{snowflake_kind} KEYS"
1011
1245
  ):
1012
- statement = f"""
1013
- SELECT
1014
- to_timestamp(0)::timestamptz as created_on,
1015
- database_name as database_name,
1016
- schema_name as schema_name,
1017
- table_name as table_name,
1018
- unnest(constraint_column_names) as column_name,
1019
- 1 as key_sequence,
1020
- LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS constraint_name,
1021
- 'false' as rely,
1022
- null as comment
1023
- FROM duckdb_constraints
1024
- WHERE constraint_type = 'PRIMARY KEY'
1025
- AND database_name = '{current_database}'
1026
- AND table_name NOT LIKE '_fs_%'
1027
- """
1246
+ if kind == "FOREIGN":
1247
+ statement = f"""
1248
+ SELECT
1249
+ to_timestamp(0)::timestamptz as created_on,
1250
+
1251
+ '' as pk_database_name,
1252
+ '' as pk_schema_name,
1253
+ '' as pk_table_name,
1254
+ '' as pk_column_name,
1255
+ unnest(constraint_column_names) as pk_column_name,
1256
+
1257
+ database_name as fk_database_name,
1258
+ schema_name as fk_schema_name,
1259
+ table_name as fk_table_name,
1260
+ unnest(constraint_column_names) as fk_column_name,
1261
+ 1 as key_sequence,
1262
+ 'NO ACTION' as update_rule,
1263
+ 'NO ACTION' as delete_rule,
1264
+ LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS fk_name,
1265
+ LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS pk_name,
1266
+ 'NOT DEFERRABLE' as deferrability,
1267
+ 'false' as rely,
1268
+ null as "comment"
1269
+ FROM duckdb_constraints
1270
+ WHERE constraint_type = 'PRIMARY KEY'
1271
+ AND database_name = '{current_database}'
1272
+ AND table_name NOT LIKE '_fs_%'
1273
+ """
1274
+ else:
1275
+ statement = f"""
1276
+ SELECT
1277
+ to_timestamp(0)::timestamptz as created_on,
1278
+ database_name as database_name,
1279
+ schema_name as schema_name,
1280
+ table_name as table_name,
1281
+ unnest(constraint_column_names) as column_name,
1282
+ 1 as key_sequence,
1283
+ LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS constraint_name,
1284
+ 'false' as rely,
1285
+ null as "comment"
1286
+ FROM duckdb_constraints
1287
+ WHERE constraint_type = '{kind} KEY'
1288
+ AND database_name = '{current_database}'
1289
+ AND table_name NOT LIKE '_fs_%'
1290
+ """
1028
1291
 
1029
1292
  scope_kind = expression.args.get("scope_kind")
1030
1293
  if scope_kind:
@@ -1042,3 +1305,48 @@ def show_primary_keys(expression: exp.Expression, current_database: str | None =
1042
1305
  raise NotImplementedError(f"SHOW PRIMARY KEYS with {scope_kind} not yet supported")
1043
1306
  return sqlglot.parse_one(statement)
1044
1307
  return expression
1308
+
1309
+
1310
+ class SHA256(exp.Func):
1311
+ _sql_names: ClassVar = ["SHA256"]
1312
+ arg_types: ClassVar = {"this": True}
1313
+
1314
+
1315
+ def sha256(expression: exp.Expression) -> exp.Expression:
1316
+ """Convert sha2() or sha2_hex() to sha256().
1317
+
1318
+ Convert sha2_binary() to unhex(sha256()).
1319
+
1320
+ Example:
1321
+ >>> import sqlglot
1322
+ >>> sqlglot.parse_one("insert into table1 (name) select sha2('foo')").transform(sha256).sql()
1323
+ "INSERT INTO table1 (name) SELECT SHA256('foo')"
1324
+ Args:
1325
+ expression (exp.Expression): the expression that will be transformed.
1326
+
1327
+ Returns:
1328
+ exp.Expression: The transformed expression.
1329
+ """
1330
+
1331
+ if isinstance(expression, exp.SHA2) and expression.args.get("length", exp.Literal.number(256)).this == "256":
1332
+ return SHA256(this=expression.this)
1333
+ elif (
1334
+ isinstance(expression, exp.Anonymous)
1335
+ and expression.this.upper() == "SHA2_HEX"
1336
+ and (
1337
+ len(expression.expressions) == 1
1338
+ or (len(expression.expressions) == 2 and expression.expressions[1].this == "256")
1339
+ )
1340
+ ):
1341
+ return SHA256(this=expression.expressions[0])
1342
+ elif (
1343
+ isinstance(expression, exp.Anonymous)
1344
+ and expression.this.upper() == "SHA2_BINARY"
1345
+ and (
1346
+ len(expression.expressions) == 1
1347
+ or (len(expression.expressions) == 2 and expression.expressions[1].this == "256")
1348
+ )
1349
+ ):
1350
+ return exp.Unhex(this=SHA256(this=expression.expressions[0]))
1351
+
1352
+ return expression
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.9.5
3
+ Version: 0.9.7
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -213,15 +213,15 @@ License-File: LICENSE
213
213
  Requires-Dist: duckdb ~=0.10.0
214
214
  Requires-Dist: pyarrow
215
215
  Requires-Dist: snowflake-connector-python
216
- Requires-Dist: sqlglot ~=21.1.0
216
+ Requires-Dist: sqlglot ~=23.3.0
217
217
  Provides-Extra: dev
218
- Requires-Dist: black ~=23.9 ; extra == 'dev'
219
218
  Requires-Dist: build ~=1.0 ; extra == 'dev'
219
+ Requires-Dist: pandas-stubs ; extra == 'dev'
220
220
  Requires-Dist: snowflake-connector-python[pandas,secure-local-storage] ; extra == 'dev'
221
221
  Requires-Dist: pre-commit ~=3.4 ; extra == 'dev'
222
- Requires-Dist: pytest ~=7.4 ; extra == 'dev'
223
- Requires-Dist: ruff ~=0.1.6 ; extra == 'dev'
224
- Requires-Dist: twine ~=4.0 ; extra == 'dev'
222
+ Requires-Dist: pytest ~=8.0 ; extra == 'dev'
223
+ Requires-Dist: ruff ~=0.3.2 ; extra == 'dev'
224
+ Requires-Dist: twine ~=5.0 ; extra == 'dev'
225
225
  Requires-Dist: snowflake-sqlalchemy ~=1.5.0 ; extra == 'dev'
226
226
  Provides-Extra: notebook
227
227
  Requires-Dist: duckdb-engine ; extra == 'notebook'
@@ -233,6 +233,7 @@ Requires-Dist: jupysql ; extra == 'notebook'
233
233
  [![ci](https://github.com/tekumara/fakesnow/actions/workflows/ci.yml/badge.svg)](https://github.com/tekumara/fakesnow/actions/workflows/ci.yml)
234
234
  [![release](https://github.com/tekumara/fakesnow/actions/workflows/release.yml/badge.svg)](https://github.com/tekumara/fakesnow/actions/workflows/release.yml)
235
235
  [![PyPI](https://img.shields.io/pypi/v/fakesnow?color=violet)](https://pypi.org/project/fakesnow/)
236
+ [![PyPI - Downloads](https://img.shields.io/pypi/dm/fakesnow?color=violet)](https://pypi.org/project/fakesnow/)
236
237
 
237
238
  Fake [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/python-connector). Run and mock Snowflake DB locally.
238
239
 
@@ -3,16 +3,16 @@ fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
3
3
  fakesnow/checks.py,sha256=-QMvdcrRbhN60rnzxLBJ0IkUBWyLR8gGGKKmCS0w9mA,2383
4
4
  fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
5
5
  fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
6
- fakesnow/fakes.py,sha256=MhZeKSY602Pdl2XMNoHAgs_ZB5ROUJjEWlZlueNUw6Y,28205
6
+ fakesnow/fakes.py,sha256=3tTPaAC1vBaTLmSG92o51QA0AzIT9XDieYiZsMzvY9M,28929
7
7
  fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
8
8
  fakesnow/global_database.py,sha256=WTVIP1VhNvdCeX7TQncX1TRpGQU5rBf5Pbxim40zeSU,1399
9
- fakesnow/info_schema.py,sha256=9UfkGYdjJHTHSk9rbWqxlFUcLGhrS3CyGwP-gkS3A6Q,6265
9
+ fakesnow/info_schema.py,sha256=CdIcGXHEQ_kmEAzdQKvA-PX41LA6wlK-4p1J45qgKYA,6266
10
10
  fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
11
11
  fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
12
- fakesnow/transforms.py,sha256=jYzJ6EeObd-MgNdOKjkFD1usmP8wZGqT6rQd2KrtUMM,38424
13
- fakesnow-0.9.5.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
14
- fakesnow-0.9.5.dist-info/METADATA,sha256=R5uWClyMFddD_7wrovBLhw2EkgKzCLdMf8jy6TIe0pE,17719
15
- fakesnow-0.9.5.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
16
- fakesnow-0.9.5.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
17
- fakesnow-0.9.5.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
18
- fakesnow-0.9.5.dist-info/RECORD,,
12
+ fakesnow/transforms.py,sha256=77hqWLWsZNvi6fLrn-JhIIeDy8CgiJ-zlNIAm8rQLf0,48818
13
+ fakesnow-0.9.7.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
14
+ fakesnow-0.9.7.dist-info/METADATA,sha256=ISDnq1yQPohGORq0isidKp11g_vWYt37rdtWz2vaoKE,17831
15
+ fakesnow-0.9.7.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
16
+ fakesnow-0.9.7.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
17
+ fakesnow-0.9.7.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
18
+ fakesnow-0.9.7.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: bdist_wheel (0.43.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5