fakesnow 0.9.30__py3-none-any.whl → 0.9.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fakesnow/arrow.py CHANGED
@@ -62,7 +62,7 @@ def to_ipc(table: pa.Table) -> pa.Buffer:
62
62
 
63
63
 
64
64
  def to_sf(table: pa.Table, rowtype: list[ColumnInfo]) -> pa.Table:
65
- def to_sf_col(col: pa.Array) -> pa.Array:
65
+ def to_sf_col(col: pa.ChunkedArray) -> pa.Array | pa.ChunkedArray:
66
66
  if pa.types.is_timestamp(col.type):
67
67
  return timestamp_to_sf_struct(col)
68
68
  elif pa.types.is_time(col.type):
@@ -83,7 +83,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
83
83
 
84
84
  # Round to seconds, ie: strip subseconds
85
85
  tsa_without_us = pc.floor_temporal(ts, unit="second") # type: ignore https://github.com/zen-xu/pyarrow-stubs/issues/45
86
- epoch = pc.divide(tsa_without_us.cast(pa.int64()), 1_000_000) # type: ignore https://github.com/zen-xu/pyarrow-stubs/issues/44
86
+ epoch = pc.divide(tsa_without_us.cast(pa.int64()), 1_000_000)
87
87
 
88
88
  # Calculate fractional part as nanoseconds
89
89
  fraction = pc.multiply(pc.subsecond(ts), 1_000_000_000).cast(pa.int32()) # type: ignore
@@ -93,7 +93,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
93
93
  timezone = pa.array([1440] * len(ts), type=pa.int32())
94
94
 
95
95
  return pa.StructArray.from_arrays(
96
- arrays=[epoch, fraction, timezone], # type: ignore https://github.com/zen-xu/pyarrow-stubs/issues/42
96
+ arrays=[epoch, fraction, timezone],
97
97
  fields=[
98
98
  pa.field("epoch", nullable=False, type=pa.int64()),
99
99
  pa.field("fraction", nullable=False, type=pa.int32()),
@@ -102,7 +102,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
102
102
  )
103
103
  else:
104
104
  return pa.StructArray.from_arrays(
105
- arrays=[epoch, fraction], # type: ignore https://github.com/zen-xu/pyarrow-stubs/issues/42
105
+ arrays=[epoch, fraction],
106
106
  fields=[
107
107
  pa.field("epoch", nullable=False, type=pa.int64()),
108
108
  pa.field("fraction", nullable=False, type=pa.int32()),
fakesnow/conn.py CHANGED
@@ -62,7 +62,16 @@ class FakeSnowflakeConnection:
62
62
  where upper(catalog_name) = '{self.database}'"""
63
63
  ).fetchone()
64
64
  ):
65
- db_file = f"{self.db_path / self.database}.db" if self.db_path else ":memory:"
65
+ if self.db_path:
66
+ # raise a helpful error message when directory doesn't exist so users don't think
67
+ # they have to create the database themselves
68
+ if not os.path.isdir(self.db_path):
69
+ raise NotADirectoryError(f"No such directory: '{self.db_path}'. Please ensure db_path exists.")
70
+ db_file = f"{self.db_path / self.database}.db"
71
+ else:
72
+ db_file = ":memory:"
73
+
74
+ # creates db file if it doesn't exist
66
75
  duck_conn.execute(f"ATTACH DATABASE '{db_file}' AS {self.database}")
67
76
  duck_conn.execute(info_schema.per_db_creation_sql(self.database))
68
77
  duck_conn.execute(macros.creation_sql(self.database))
@@ -102,9 +111,6 @@ class FakeSnowflakeConnection:
102
111
  duck_conn.execute(f"SET schema='{self.database}.main'")
103
112
  self.database_set = True
104
113
 
105
- # use UTC instead of local time zone for consistent testing
106
- duck_conn.execute("SET GLOBAL TimeZone = 'UTC'")
107
-
108
114
  def __enter__(self) -> Self:
109
115
  return self
110
116
 
fakesnow/cursor.py CHANGED
@@ -14,6 +14,7 @@ import pyarrow # needed by fetch_arrow_table()
14
14
  import snowflake.connector.converter
15
15
  import snowflake.connector.errors
16
16
  import sqlglot
17
+ import sqlglot.errors
17
18
  from duckdb import DuckDBPyConnection
18
19
  from snowflake.connector.cursor import ResultMetadata
19
20
  from snowflake.connector.result_batch import ResultBatch
@@ -155,6 +156,11 @@ class FakeSnowflakeCursor:
155
156
  except snowflake.connector.errors.ProgrammingError as e:
156
157
  self._sqlstate = e.sqlstate
157
158
  raise e
159
+ except sqlglot.errors.ParseError as e:
160
+ self._sqlstate = "42000"
161
+ # strip highlight for better readability, TODO: show pointer to start of error
162
+ msg = str(e).replace("\x1b[4m", "").replace("\x1b[0m", "")
163
+ raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=1003, sqlstate="42000") from None
158
164
 
159
165
  def check_db_and_schema(self, expression: exp.Expression) -> None:
160
166
  no_database, no_schema = checks.is_unqualified_table_expression(expression)
@@ -225,8 +231,10 @@ class FakeSnowflakeCursor:
225
231
  .transform(transforms.show_databases)
226
232
  .transform(transforms.show_functions)
227
233
  .transform(transforms.show_procedures)
234
+ .transform(transforms.show_warehouses)
228
235
  .transform(lambda e: transforms.show_schemas(e, self._conn.database))
229
236
  .transform(lambda e: transforms.show_objects_tables(e, self._conn.database))
237
+ .transform(lambda e: transforms.show_columns(e, self._conn.database))
230
238
  # TODO collapse into a single show_keys function
231
239
  .transform(lambda e: transforms.show_keys(e, self._conn.database, kind="PRIMARY"))
232
240
  .transform(lambda e: transforms.show_keys(e, self._conn.database, kind="UNIQUE"))
@@ -254,6 +262,9 @@ class FakeSnowflakeCursor:
254
262
 
255
263
  sql = transformed.sql(dialect="duckdb")
256
264
 
265
+ if not sql:
266
+ raise NotImplementedError(transformed.sql(dialect="snowflake"))
267
+
257
268
  if transformed.find(exp.Select) and (seed := transformed.args.get("seed")):
258
269
  sql = f"SELECT setseed({seed}); {sql}"
259
270
 
@@ -287,6 +298,7 @@ class FakeSnowflakeCursor:
287
298
  if set_database := transformed.args.get("set_database"):
288
299
  self._conn.database = set_database
289
300
  self._conn.database_set = True
301
+ self._conn.schema_set = False
290
302
  result_sql = SQL_SUCCESS
291
303
 
292
304
  elif set_schema := transformed.args.get("set_schema"):
fakesnow/instance.py CHANGED
@@ -31,6 +31,9 @@ class FakeSnow:
31
31
  # create the info schema extensions
32
32
  self.duck_conn.execute(info_schema.fs_global_creation_sql(GLOBAL_DATABASE_NAME))
33
33
 
34
+ # use UTC instead of local time zone for consistent testing
35
+ self.duck_conn.execute("SET GLOBAL TimeZone = 'UTC'")
36
+
34
37
  def connect(
35
38
  self, database: str | None = None, schema: str | None = None, **kwargs: Any
36
39
  ) -> fakes.FakeSnowflakeConnection:
fakesnow/server.py CHANGED
@@ -83,6 +83,7 @@ async def query_request(request: Request) -> JSONResponse:
83
83
  rowtype = describe_as_rowtype(cur._describe_last_sql()) # noqa: SLF001
84
84
 
85
85
  except snowflake.connector.errors.ProgrammingError as e:
86
+ logger.info(f"{sql_text=} ProgrammingError {e}")
86
87
  code = f"{e.errno:06d}"
87
88
  return JSONResponse(
88
89
  {
@@ -97,7 +98,7 @@ async def query_request(request: Request) -> JSONResponse:
97
98
  )
98
99
  except Exception as e:
99
100
  # we have a bug or use of an unsupported feature
100
- msg = f"Unhandled error during query {sql_text=}"
101
+ msg = f"{sql_text=} Unhandled exception"
101
102
  logger.error(msg, exc_info=e)
102
103
  # my guess at mimicking a 500 error as per https://docs.snowflake.com/en/developer-guide/sql-api/reference
103
104
  # and https://github.com/snowflakedb/gosnowflake/blob/8ed4c75ffd707dd712ad843f40189843ace683c4/restful.go#L318
@@ -2,70 +2,25 @@ from __future__ import annotations
2
2
 
3
3
  from pathlib import Path
4
4
  from string import Template
5
- from typing import ClassVar, Literal, cast
5
+ from typing import ClassVar, cast
6
6
 
7
7
  import sqlglot
8
8
  from sqlglot import exp
9
9
 
10
- from fakesnow.transforms.merge import merge
10
+ from fakesnow.transforms.merge import merge as merge
11
+ from fakesnow.transforms.show import (
12
+ show_columns as show_columns,
13
+ show_databases as show_databases,
14
+ show_functions as show_functions,
15
+ show_keys as show_keys,
16
+ show_objects_tables as show_objects_tables,
17
+ show_procedures as show_procedures,
18
+ show_schemas as show_schemas,
19
+ show_users as show_users,
20
+ show_warehouses as show_warehouses,
21
+ )
11
22
  from fakesnow.variables import Variables
12
23
 
13
- __all__ = [
14
- "alias_in_join",
15
- "alter_table_strip_cluster_by",
16
- "array_agg",
17
- "array_agg_within_group",
18
- "array_size",
19
- "create_clone",
20
- "create_database",
21
- "create_user",
22
- "dateadd_date_cast",
23
- "dateadd_string_literal_timestamp_cast",
24
- "datediff_string_literal_timestamp_cast",
25
- "drop_schema_cascade",
26
- "extract_comment_on_columns",
27
- "extract_comment_on_table",
28
- "extract_text_length",
29
- "flatten",
30
- "flatten_value_cast_as_varchar",
31
- "float_to_double",
32
- "identifier",
33
- "indices_to_json_extract",
34
- "information_schema_databases",
35
- "information_schema_fs_tables",
36
- "information_schema_fs_views",
37
- "integer_precision",
38
- "json_extract_cased_as_varchar",
39
- "json_extract_cast_as_varchar",
40
- "json_extract_precedence",
41
- "merge",
42
- "object_construct",
43
- "random",
44
- "regex_replace",
45
- "regex_substr",
46
- "sample",
47
- "semi_structured_types",
48
- "set_schema",
49
- "sha256",
50
- "show_keys",
51
- "show_objects_tables",
52
- "show_schemas",
53
- "show_users",
54
- "split",
55
- "tag",
56
- "timestamp_ntz",
57
- "to_date",
58
- "to_decimal",
59
- "to_timestamp",
60
- "to_timestamp_ntz",
61
- "trim_cast_varchar",
62
- "try_parse_json",
63
- "try_to_decimal",
64
- "update_variables",
65
- "upper_case_unquoted_identifiers",
66
- "values_columns",
67
- ]
68
-
69
24
  SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.' as status")
70
25
 
71
26
 
@@ -621,7 +576,7 @@ def identifier(expression: exp.Expression) -> exp.Expression:
621
576
 
622
577
 
623
578
  def indices_to_json_extract(expression: exp.Expression) -> exp.Expression:
624
- """Convert indices on objects and arrays to json_extract.
579
+ """Convert indices on objects and arrays to json_extract or json_extract_string
625
580
 
626
581
  Supports Snowflake array indices, see
627
582
  https://docs.snowflake.com/en/sql-reference/data-types-semistructured#accessing-elements-of-an-array-by-index-or-by-slice
@@ -640,12 +595,16 @@ def indices_to_json_extract(expression: exp.Expression) -> exp.Expression:
640
595
  and isinstance(index, exp.Literal)
641
596
  and index.this
642
597
  ):
598
+ if isinstance(expression.parent, exp.Cast) and expression.parent.to.this == exp.DataType.Type.VARCHAR:
599
+ # If the parent is a cast to varchar, we need to use JSONExtractScalar
600
+ # to get the unquoted string value.
601
+ klass = exp.JSONExtractScalar
602
+ else:
603
+ klass = exp.JSONExtract
643
604
  if index.is_string:
644
- return exp.JSONExtract(this=expression.this, expression=exp.Literal(this=f"$.{index.this}", is_string=True))
605
+ return klass(this=expression.this, expression=exp.Literal(this=f"$.{index.this}", is_string=True))
645
606
  else:
646
- return exp.JSONExtract(
647
- this=expression.this, expression=exp.Literal(this=f"$[{index.this}]", is_string=True)
648
- )
607
+ return klass(this=expression.this, expression=exp.Literal(this=f"$[{index.this}]", is_string=True))
649
608
 
650
609
  return expression
651
610
 
@@ -720,15 +679,21 @@ def information_schema_fs_views(expression: exp.Expression) -> exp.Expression:
720
679
  return expression
721
680
 
722
681
 
682
+ NUMBER_38_0 = [
683
+ exp.DataTypeParam(this=exp.Literal(this="38", is_string=False)),
684
+ exp.DataTypeParam(this=exp.Literal(this="0", is_string=False)),
685
+ ]
686
+
687
+
723
688
  def integer_precision(expression: exp.Expression) -> exp.Expression:
724
- """Convert integers to bigint.
689
+ """Convert integers and number(38,0) to bigint.
725
690
 
726
- So dataframes will return them with a dtype of int64.
691
+ So fetch_all will return int and dataframes will return them with a dtype of int64.
727
692
  """
728
-
729
693
  if (
730
694
  isinstance(expression, exp.DataType)
731
- and (expression.this == exp.DataType.Type.DECIMAL and not expression.expressions)
695
+ and expression.this == exp.DataType.Type.DECIMAL
696
+ and (not expression.expressions or expression.expressions == NUMBER_38_0)
732
697
  ) or expression.this in (exp.DataType.Type.INT, exp.DataType.Type.SMALLINT, exp.DataType.Type.TINYINT):
733
698
  return exp.DataType(
734
699
  this=exp.DataType.Type.BIGINT,
@@ -836,29 +801,29 @@ def object_construct(expression: exp.Expression) -> exp.Expression:
836
801
  See https://docs.snowflake.com/en/sql-reference/functions/object_construct
837
802
  """
838
803
 
839
- if isinstance(expression, exp.Struct):
840
- non_null_expressions = []
841
- for e in expression.expressions:
842
- if not (isinstance(e, exp.PropertyEQ)):
843
- non_null_expressions.append(e)
844
- continue
804
+ if not isinstance(expression, exp.Struct):
805
+ return expression
845
806
 
846
- left = e.left
847
- right = e.right
807
+ non_null_expressions = []
808
+ for e in expression.expressions:
809
+ if not (isinstance(e, exp.PropertyEQ)):
810
+ non_null_expressions.append(e)
811
+ continue
848
812
 
849
- left_is_null = isinstance(left, exp.Null)
850
- right_is_null = isinstance(right, exp.Null)
813
+ left = e.left
814
+ right = e.right
851
815
 
852
- if left_is_null or right_is_null:
853
- continue
816
+ left_is_null = isinstance(left, exp.Null)
817
+ right_is_null = isinstance(right, exp.Null)
854
818
 
855
- non_null_expressions.append(e)
819
+ if left_is_null or right_is_null:
820
+ continue
856
821
 
857
- new_struct = expression.copy()
858
- new_struct.set("expressions", non_null_expressions)
859
- return exp.Anonymous(this="TO_JSON", expressions=[new_struct])
822
+ non_null_expressions.append(e)
860
823
 
861
- return expression
824
+ new_struct = expression.copy()
825
+ new_struct.set("expressions", non_null_expressions)
826
+ return exp.Anonymous(this="TO_JSON", expressions=[new_struct])
862
827
 
863
828
 
864
829
  def regex_replace(expression: exp.Expression) -> exp.Expression:
@@ -999,204 +964,6 @@ def set_schema(expression: exp.Expression, current_database: str | None) -> exp.
999
964
  return expression
1000
965
 
1001
966
 
1002
- def show_objects_tables(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
1003
- """Transform SHOW OBJECTS/TABLES to a query against the information_schema.tables table.
1004
-
1005
- See https://docs.snowflake.com/en/sql-reference/sql/show-objects
1006
- https://docs.snowflake.com/en/sql-reference/sql/show-tables
1007
- """
1008
- if not (
1009
- isinstance(expression, exp.Show)
1010
- and isinstance(expression.this, str)
1011
- and (show := expression.this.upper())
1012
- and show in {"OBJECTS", "TABLES"}
1013
- ):
1014
- return expression
1015
-
1016
- scope_kind = expression.args.get("scope_kind")
1017
- table = expression.find(exp.Table)
1018
-
1019
- if scope_kind == "DATABASE":
1020
- catalog = (table and table.name) or current_database
1021
- schema = None
1022
- elif scope_kind == "SCHEMA" and table:
1023
- catalog = table.db or current_database
1024
- schema = table.name
1025
- else:
1026
- # all objects / tables
1027
- catalog = None
1028
- schema = None
1029
-
1030
- tables_only = "table_type = 'BASE TABLE' and " if show == "TABLES" else ""
1031
- exclude_fakesnow_tables = "not (table_schema == '_fs_information_schema')"
1032
- # without a database will show everything in the "account"
1033
- table_catalog = f" and table_catalog = '{catalog}'" if catalog else ""
1034
- schema = f" and table_schema = '{schema}'" if schema else ""
1035
- limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
1036
-
1037
- columns = [
1038
- "to_timestamp(0)::timestamptz as 'created_on'",
1039
- "table_name as 'name'",
1040
- "case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind'",
1041
- "table_catalog as 'database_name'",
1042
- "table_schema as 'schema_name'",
1043
- ]
1044
-
1045
- terse = expression.args["terse"]
1046
- if not terse:
1047
- columns.append('null as "comment"')
1048
-
1049
- columns_str = ", ".join(columns)
1050
-
1051
- query = (
1052
- f"SELECT {columns_str} from information_schema.tables "
1053
- f"where {tables_only}{exclude_fakesnow_tables}{table_catalog}{schema}{limit}"
1054
- )
1055
-
1056
- return sqlglot.parse_one(query, read="duckdb")
1057
-
1058
-
1059
- SQL_SHOW_SCHEMAS = """
1060
- select
1061
- to_timestamp(0)::timestamptz as 'created_on',
1062
- case
1063
- when schema_name = '_fs_information_schema' then 'information_schema'
1064
- else schema_name
1065
- end as 'name',
1066
- NULL as 'kind',
1067
- catalog_name as 'database_name',
1068
- NULL as 'schema_name'
1069
- from information_schema.schemata
1070
- where not catalog_name in ('memory', 'system', 'temp', '_fs_global')
1071
- and not schema_name in ('main', 'pg_catalog')
1072
- """
1073
-
1074
-
1075
- def show_schemas(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
1076
- """Transform SHOW SCHEMAS to a query against the information_schema.schemata table.
1077
-
1078
- See https://docs.snowflake.com/en/sql-reference/sql/show-schemas
1079
- """
1080
- if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "SCHEMAS":
1081
- if (ident := expression.find(exp.Identifier)) and isinstance(ident.this, str):
1082
- database = ident.this
1083
- else:
1084
- database = current_database
1085
-
1086
- return sqlglot.parse_one(
1087
- f"{SQL_SHOW_SCHEMAS} and catalog_name = '{database}'" if database else SQL_SHOW_SCHEMAS, read="duckdb"
1088
- )
1089
-
1090
- return expression
1091
-
1092
-
1093
- SQL_SHOW_DATABASES = """
1094
- SELECT
1095
- to_timestamp(0)::timestamptz as 'created_on',
1096
- database_name as 'name',
1097
- 'N' as 'is_default',
1098
- 'N' as 'is_current',
1099
- '' as 'origin',
1100
- 'SYSADMIN' as 'owner',
1101
- comment,
1102
- '' as 'options',
1103
- 1 as 'retention_time',
1104
- 'STANDARD' as 'kind',
1105
- NULL as 'budget',
1106
- 'ROLE' as 'owner_role_type',
1107
- NULL as 'object_visibility'
1108
- FROM duckdb_databases
1109
- WHERE database_name NOT IN ('memory', '_fs_global')
1110
- """
1111
-
1112
-
1113
- def show_databases(expression: exp.Expression) -> exp.Expression:
1114
- """Transform SHOW DATABASES to a query against the information_schema.schemata table.
1115
-
1116
- See https://docs.snowflake.com/en/sql-reference/sql/show-databases
1117
- """
1118
- if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "DATABASES":
1119
- return sqlglot.parse_one(SQL_SHOW_DATABASES, read="duckdb")
1120
-
1121
- return expression
1122
-
1123
-
1124
- # returns zero rows
1125
- SQL_SHOW_FUNCTIONS = """
1126
- SELECT
1127
- '1970-01-01 00:00:00 UTC'::timestamptz as created_on,
1128
- 'SYSTIMESTAMP' as name,
1129
- '' as schema_name,
1130
- 'Y' as is_builtin,
1131
- 'N' as is_aggregate,
1132
- 'N' as is_ansi,
1133
- 0 as min_num_arguments,
1134
- 0 as max_num_arguments,
1135
- 'SYSTIMESTAMP() RETURN TIMESTAMP_LTZ' as arguments,
1136
- 'Returns the current timestamp' as description,
1137
- '' as catalog_name,
1138
- 'N' as is_table_function,
1139
- 'N' as valid_for_clustering,
1140
- NULL as is_secure,
1141
- '' as secrets,
1142
- '' as external_access_integrations,
1143
- 'N' as is_external_function,
1144
- 'SQL' as language,
1145
- 'N' as is_memoizable,
1146
- 'N' as is_data_metric
1147
- WHERE 0 = 1;
1148
- """
1149
-
1150
-
1151
- def show_functions(expression: exp.Expression) -> exp.Expression:
1152
- """Transform SHOW FUNCTIONS.
1153
-
1154
- See https://docs.snowflake.com/en/sql-reference/sql/show-functions
1155
- """
1156
- if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "FUNCTIONS":
1157
- return sqlglot.parse_one(SQL_SHOW_FUNCTIONS, read="duckdb")
1158
-
1159
- return expression
1160
-
1161
-
1162
- # returns zero rows
1163
- SQL_SHOW_PROCEDURES = """
1164
- SELECT
1165
- '2012-08-01 07:00:00 UTC'::timestamptz as 'created_on',
1166
- 'SYSTEM$CLASSIFY' as 'name',
1167
- '' as 'schema_name',
1168
- 'Y' as 'is_builtin',
1169
- 'N' as 'is_aggregate',
1170
- 'N' as 'is_ansi',
1171
- 2 as 'min_num_arguments',
1172
- 2 as 'max_num_arguments',
1173
- 'SYSTEM$CLASSIFY(VARCHAR, OBJECT) RETURN OBJECT' as 'arguments',
1174
- 'classify stored proc' as 'description',
1175
- '' as 'catalog_name',
1176
- 'N' as 'is_table_function',
1177
- 'N' as 'valid_for_clustering',
1178
- NULL as 'is_secure',
1179
- '' as 'secrets',
1180
- '' as 'external_access_integrations',
1181
- WHERE 0 = 1;
1182
- """
1183
-
1184
-
1185
- def show_procedures(expression: exp.Expression) -> exp.Expression:
1186
- """Transform SHOW PROCEDURES.
1187
-
1188
- See https://docs.snowflake.com/en/sql-reference/sql/show-procedures
1189
- """
1190
- if (
1191
- isinstance(expression, exp.Show)
1192
- and isinstance(expression.this, str)
1193
- and expression.this.upper() == "PROCEDURES"
1194
- ):
1195
- return sqlglot.parse_one(SQL_SHOW_PROCEDURES, read="duckdb")
1196
-
1197
- return expression
1198
-
1199
-
1200
967
  def split(expression: exp.Expression) -> exp.Expression:
1201
968
  """
1202
969
  Convert output of duckdb str_split from varchar[] to JSON array to match Snowflake.
@@ -1296,8 +1063,6 @@ def _get_to_number_args(e: exp.ToNumber) -> tuple[exp.Expression | None, exp.Exp
1296
1063
  # to_number('100', 'TM9', 10, 2)
1297
1064
  if arg_scale:
1298
1065
  _scale = arg_scale
1299
- else:
1300
- pass
1301
1066
  else:
1302
1067
  # to_number('100', 10, ...)
1303
1068
  # arg_format is not a string, so it must be precision.
@@ -1307,12 +1072,10 @@ def _get_to_number_args(e: exp.ToNumber) -> tuple[exp.Expression | None, exp.Exp
1307
1072
  # And arg_precision must be scale
1308
1073
  if arg_precision:
1309
1074
  _scale = arg_precision
1310
- else:
1311
- # If format is not provided, just check for precision and scale directly
1312
- if arg_precision:
1313
- _precision = arg_precision
1314
- if arg_scale:
1315
- _scale = arg_scale
1075
+ elif arg_precision:
1076
+ _precision = arg_precision
1077
+ if arg_scale:
1078
+ _scale = arg_scale
1316
1079
 
1317
1080
  return _format, _precision, _scale
1318
1081
 
@@ -1537,17 +1300,6 @@ def values_columns(expression: exp.Expression) -> exp.Expression:
1537
1300
  return expression
1538
1301
 
1539
1302
 
1540
- def show_users(expression: exp.Expression) -> exp.Expression:
1541
- """Transform SHOW USERS to a query against the global database's information_schema._fs_users table.
1542
-
1543
- https://docs.snowflake.com/en/sql-reference/sql/show-users
1544
- """
1545
- if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "USERS":
1546
- return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_users_ext", read="duckdb")
1547
-
1548
- return expression
1549
-
1550
-
1551
1303
  def create_user(expression: exp.Expression) -> exp.Expression:
1552
1304
  """Transform CREATE USER to a query against the global database's information_schema._fs_users table.
1553
1305
 
@@ -1568,94 +1320,6 @@ def create_user(expression: exp.Expression) -> exp.Expression:
1568
1320
  return expression
1569
1321
 
1570
1322
 
1571
- def show_keys(
1572
- expression: exp.Expression,
1573
- current_database: str | None = None,
1574
- *,
1575
- kind: Literal["PRIMARY", "UNIQUE", "FOREIGN"],
1576
- ) -> exp.Expression:
1577
- """Transform SHOW <kind> KEYS to a query against the duckdb_constraints meta-table.
1578
-
1579
- https://docs.snowflake.com/en/sql-reference/sql/show-primary-keys
1580
- """
1581
- snowflake_kind = kind
1582
- if kind == "FOREIGN":
1583
- snowflake_kind = "IMPORTED"
1584
-
1585
- if (
1586
- isinstance(expression, exp.Show)
1587
- and isinstance(expression.this, str)
1588
- and expression.this.upper() == f"{snowflake_kind} KEYS"
1589
- ):
1590
- if kind == "FOREIGN":
1591
- statement = f"""
1592
- SELECT
1593
- to_timestamp(0)::timestamptz as created_on,
1594
-
1595
- '' as pk_database_name,
1596
- '' as pk_schema_name,
1597
- '' as pk_table_name,
1598
- '' as pk_column_name,
1599
- unnest(constraint_column_names) as pk_column_name,
1600
-
1601
- database_name as fk_database_name,
1602
- schema_name as fk_schema_name,
1603
- table_name as fk_table_name,
1604
- unnest(constraint_column_names) as fk_column_name,
1605
- 1 as key_sequence,
1606
- 'NO ACTION' as update_rule,
1607
- 'NO ACTION' as delete_rule,
1608
- LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS fk_name,
1609
- LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS pk_name,
1610
- 'NOT DEFERRABLE' as deferrability,
1611
- 'false' as rely,
1612
- null as "comment"
1613
- FROM duckdb_constraints
1614
- WHERE constraint_type = 'PRIMARY KEY'
1615
- AND database_name = '{current_database}'
1616
- AND table_name NOT LIKE '_fs_%'
1617
- """
1618
- else:
1619
- statement = f"""
1620
- SELECT
1621
- to_timestamp(0)::timestamptz as created_on,
1622
- database_name as database_name,
1623
- schema_name as schema_name,
1624
- table_name as table_name,
1625
- unnest(constraint_column_names) as column_name,
1626
- 1 as key_sequence,
1627
- LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS constraint_name,
1628
- 'false' as rely,
1629
- null as "comment"
1630
- FROM duckdb_constraints
1631
- WHERE constraint_type = '{kind} KEY'
1632
- AND database_name = '{current_database}'
1633
- AND table_name NOT LIKE '_fs_%'
1634
- """
1635
-
1636
- scope_kind = expression.args.get("scope_kind")
1637
- if scope_kind:
1638
- table = expression.args["scope"]
1639
-
1640
- if scope_kind == "SCHEMA":
1641
- db = table and table.db
1642
- schema = table and table.name
1643
- if db:
1644
- statement += f"AND database_name = '{db}' "
1645
-
1646
- if schema:
1647
- statement += f"AND schema_name = '{schema}' "
1648
- elif scope_kind == "TABLE":
1649
- if not table:
1650
- raise ValueError(f"SHOW PRIMARY KEYS with {scope_kind} scope requires a table")
1651
-
1652
- statement += f"AND table_name = '{table.name}' "
1653
- else:
1654
- raise NotImplementedError(f"SHOW PRIMARY KEYS with {scope_kind} not yet supported")
1655
- return sqlglot.parse_one(statement)
1656
- return expression
1657
-
1658
-
1659
1323
  def update_variables(
1660
1324
  expression: exp.Expression,
1661
1325
  variables: Variables,
@@ -0,0 +1,476 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Literal
4
+
5
+ import sqlglot
6
+ from sqlglot import exp
7
+
8
+
9
+ def show_columns(
10
+ expression: exp.Expression, current_database: str | None = None, current_schema: str | None = None
11
+ ) -> exp.Expression:
12
+ """Transform SHOW COLUMNS to a query against the fs global information_schema columns table.
13
+
14
+ See https://docs.snowflake.com/en/sql-reference/sql/show-columns
15
+ """
16
+ if not (
17
+ isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "COLUMNS"
18
+ ):
19
+ return expression
20
+
21
+ scope_kind = expression.args.get("scope_kind")
22
+ table = expression.find(exp.Table)
23
+
24
+ if scope_kind == "ACCOUNT" or not scope_kind:
25
+ # all columns
26
+ catalog = None
27
+ schema = None
28
+ table = None
29
+ elif scope_kind == "DATABASE" and table:
30
+ catalog = table.name
31
+ schema = None
32
+ table = None
33
+ elif scope_kind == "SCHEMA" and table:
34
+ catalog = table.db or current_database
35
+ schema = table.name
36
+ table = None
37
+ elif scope_kind in ("TABLE", "VIEW") and table:
38
+ catalog = table.catalog or current_database
39
+ schema = table.db or current_schema
40
+ table = table.name
41
+ else:
42
+ raise NotImplementedError(f"show_object_columns: {expression.sql(dialect='snowflake')}")
43
+
44
+ query = f"""
45
+ SELECT
46
+ table_name,
47
+ table_schema as "schema_name",
48
+ column_name,
49
+ CASE
50
+ WHEN data_type = 'NUMBER' THEN '{{"type":"FIXED","precision":'|| numeric_precision || ',"scale":' || numeric_scale || ',"nullable":true}}'
51
+ WHEN data_type = 'TEXT' THEN '{{"type":"TEXT","length":' || coalesce(character_maximum_length,16777216) || ',"byteLength":' || CASE WHEN character_maximum_length = 16777216 THEN 16777216 ELSE coalesce(character_maximum_length*4,16777216) END || ',"nullable":true,"fixed":false}}'
52
+ WHEN data_type in ('TIMESTAMP_NTZ','TIMESTAMP_TZ','TIME') THEN '{{"type":"' || data_type || '","precision":0,"scale":9,"nullable":true}}'
53
+ WHEN data_type = 'FLOAT' THEN '{{"type":"REAL","nullable":true}}'
54
+ WHEN data_type = 'BINARY' THEN '{{"type":"BINARY","length":8388608,"byteLength":8388608,"nullable":true,"fixed":true}}'
55
+ ELSE '{{"type":"' || data_type || '","nullable":true}}'
56
+ END as "data_type",
57
+ CASE WHEN is_nullable = 'YES' THEN 'true' ELSE 'false' END as "null?",
58
+ COALESCE(column_default, '') as "default",
59
+ 'COLUMN' as "kind",
60
+ '' as "expression",
61
+ COALESCE(comment, '') as "comment",
62
+ table_catalog as "database_name",
63
+ '' as "autoincrement",
64
+ NULL as "schema_evolution_record"
65
+ FROM _fs_global._fs_information_schema._fs_columns c
66
+ WHERE 1=1
67
+ {f"AND table_catalog = '{catalog}'" if catalog else ""}
68
+ {f"AND table_schema = '{schema}'" if schema else ""}
69
+ {f"AND table_name = '{table}'" if table else ""}
70
+ ORDER BY table_name, ordinal_position
71
+ """ # noqa: E501
72
+
73
+ return sqlglot.parse_one(query, read="duckdb")
74
+
75
+
76
+ SQL_SHOW_DATABASES = """
77
+ SELECT
78
+ to_timestamp(0)::timestamptz as 'created_on',
79
+ database_name as 'name',
80
+ 'N' as 'is_default',
81
+ 'N' as 'is_current',
82
+ '' as 'origin',
83
+ 'SYSADMIN' as 'owner',
84
+ comment,
85
+ '' as 'options',
86
+ 1 as 'retention_time',
87
+ 'STANDARD' as 'kind',
88
+ NULL as 'budget',
89
+ 'ROLE' as 'owner_role_type',
90
+ NULL as 'object_visibility'
91
+ FROM duckdb_databases
92
+ WHERE database_name NOT IN ('memory', '_fs_global')
93
+ """
94
+
95
+
96
+ def show_databases(expression: exp.Expression) -> exp.Expression:
97
+ """Transform SHOW DATABASES to a query against the information_schema.schemata table.
98
+
99
+ See https://docs.snowflake.com/en/sql-reference/sql/show-databases
100
+ """
101
+ if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "DATABASES":
102
+ return sqlglot.parse_one(SQL_SHOW_DATABASES, read="duckdb")
103
+
104
+ return expression
105
+
106
+
107
+ # returns zero rows
108
+ SQL_SHOW_FUNCTIONS = """
109
+ SELECT
110
+ '1970-01-01 00:00:00 UTC'::timestamptz as created_on,
111
+ 'SYSTIMESTAMP' as name,
112
+ '' as schema_name,
113
+ 'Y' as is_builtin,
114
+ 'N' as is_aggregate,
115
+ 'N' as is_ansi,
116
+ 0 as min_num_arguments,
117
+ 0 as max_num_arguments,
118
+ 'SYSTIMESTAMP() RETURN TIMESTAMP_LTZ' as arguments,
119
+ 'Returns the current timestamp' as description,
120
+ '' as catalog_name,
121
+ 'N' as is_table_function,
122
+ 'N' as valid_for_clustering,
123
+ NULL as is_secure,
124
+ '' as secrets,
125
+ '' as external_access_integrations,
126
+ 'N' as is_external_function,
127
+ 'SQL' as language,
128
+ 'N' as is_memoizable,
129
+ 'N' as is_data_metric
130
+ WHERE 0 = 1;
131
+ """
132
+
133
+
134
+ def show_functions(expression: exp.Expression) -> exp.Expression:
135
+ """Transform SHOW FUNCTIONS.
136
+
137
+ See https://docs.snowflake.com/en/sql-reference/sql/show-functions
138
+ """
139
+ if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "FUNCTIONS":
140
+ return sqlglot.parse_one(SQL_SHOW_FUNCTIONS, read="duckdb")
141
+
142
+ return expression
143
+
144
+
145
+ # returns zero rows
146
+ SQL_SHOW_WAREHOUSES = """
147
+ SELECT
148
+ 'FAKESNOW_WAREHOUSE' as name,
149
+ 'STARTED' as state,
150
+ 'STANDARD' as type,
151
+ 'X-Small' as size,
152
+ 1 as min_cluster_count,
153
+ 1 as max_cluster_count,
154
+ 1 as started_clusters,
155
+ 0 as running,
156
+ 0 as queued,
157
+ 'N' as is_default,
158
+ 'N' as is_current,
159
+ 600 as auto_suspend,
160
+ 'true' as auto_resume,
161
+ -- nb: deliberate space before '100' to match Snowflake's output
162
+ ' 100' as available,
163
+ '0' as provisioning,
164
+ '0' as quiescing,
165
+ '0' as other,
166
+ '1970-01-01 00:00:00.000000 UTC'::timestamptz as created_on,
167
+ '1970-01-01 00:00:00.000000 UTC'::timestamptz as resumed_on,
168
+ '1970-01-01 00:00:00.000000 UTC'::timestamptz as updated_on,
169
+ 'SYSADMIN' as owner,
170
+ '' as comment,
171
+ 'false' as enable_query_acceleration,
172
+ 8 as query_acceleration_max_scale_factor,
173
+ 'null' as resource_monitor,
174
+
175
+ -- deprecated - these 5 cols are for internal use
176
+ 0 as actives,
177
+ 0 as pendings,
178
+ 0 as failed,
179
+ 0 as suspended,
180
+ '123456789012' as uuid,
181
+
182
+ 'STANDARD' as scaling_policy,
183
+ NULL as budget,
184
+ 'ROLE' as owner_role_type,
185
+ NULL as resource_constraint;
186
+ """
187
+
188
+
189
+ def show_warehouses(expression: exp.Expression) -> exp.Expression:
190
+ """Transform SHOW WAREHOUSES.
191
+
192
+ See https://docs.snowflake.com/en/sql-reference/sql/show-warehouses
193
+ """
194
+ if (
195
+ isinstance(expression, exp.Show)
196
+ and isinstance(expression.this, str)
197
+ and expression.this.upper() == "WAREHOUSES"
198
+ ):
199
+ return sqlglot.parse_one(SQL_SHOW_WAREHOUSES, read="duckdb")
200
+
201
+ return expression
202
+
203
+
204
+ def show_keys(
205
+ expression: exp.Expression,
206
+ current_database: str | None = None,
207
+ *,
208
+ kind: Literal["PRIMARY", "UNIQUE", "FOREIGN"],
209
+ ) -> exp.Expression:
210
+ """Transform SHOW <kind> KEYS to a query against the duckdb_constraints meta-table.
211
+
212
+ https://docs.snowflake.com/en/sql-reference/sql/show-primary-keys
213
+ """
214
+ snowflake_kind = kind
215
+ if kind == "FOREIGN":
216
+ snowflake_kind = "IMPORTED"
217
+
218
+ if (
219
+ isinstance(expression, exp.Show)
220
+ and isinstance(expression.this, str)
221
+ and expression.this.upper() == f"{snowflake_kind} KEYS"
222
+ ):
223
+ if kind == "FOREIGN":
224
+ statement = f"""
225
+ SELECT
226
+ to_timestamp(0)::timestamptz as created_on,
227
+
228
+ '' as pk_database_name,
229
+ '' as pk_schema_name,
230
+ '' as pk_table_name,
231
+ '' as pk_column_name,
232
+ unnest(constraint_column_names) as pk_column_name,
233
+
234
+ database_name as fk_database_name,
235
+ schema_name as fk_schema_name,
236
+ table_name as fk_table_name,
237
+ unnest(constraint_column_names) as fk_column_name,
238
+ 1 as key_sequence,
239
+ 'NO ACTION' as update_rule,
240
+ 'NO ACTION' as delete_rule,
241
+ LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS fk_name,
242
+ LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS pk_name,
243
+ 'NOT DEFERRABLE' as deferrability,
244
+ 'false' as rely,
245
+ null as "comment"
246
+ FROM duckdb_constraints
247
+ WHERE constraint_type = 'PRIMARY KEY'
248
+ AND database_name = '{current_database}'
249
+ AND table_name NOT LIKE '_fs_%'
250
+ """
251
+ else:
252
+ statement = f"""
253
+ SELECT
254
+ to_timestamp(0)::timestamptz as created_on,
255
+ database_name as database_name,
256
+ schema_name as schema_name,
257
+ table_name as table_name,
258
+ unnest(constraint_column_names) as column_name,
259
+ 1 as key_sequence,
260
+ LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS constraint_name,
261
+ 'false' as rely,
262
+ null as "comment"
263
+ FROM duckdb_constraints
264
+ WHERE constraint_type = '{kind} KEY'
265
+ AND database_name = '{current_database}'
266
+ AND table_name NOT LIKE '_fs_%'
267
+ """
268
+
269
+ if scope_kind := expression.args.get("scope_kind"):
270
+ table = expression.args["scope"]
271
+
272
+ if scope_kind == "SCHEMA":
273
+ db = table and table.db
274
+ schema = table and table.name
275
+ if db:
276
+ statement += f"AND database_name = '{db}' "
277
+
278
+ if schema:
279
+ statement += f"AND schema_name = '{schema}' "
280
+ elif scope_kind == "TABLE":
281
+ if not table:
282
+ raise ValueError(f"SHOW PRIMARY KEYS with {scope_kind} scope requires a table")
283
+
284
+ statement += f"AND table_name = '{table.name}' "
285
+ else:
286
+ raise NotImplementedError(f"SHOW PRIMARY KEYS with {scope_kind} not yet supported")
287
+ return sqlglot.parse_one(statement)
288
+ return expression
289
+
290
+
291
+ def show_objects_tables(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
292
+ """Transform SHOW OBJECTS/TABLES to a query against the information_schema.tables table.
293
+
294
+ See https://docs.snowflake.com/en/sql-reference/sql/show-objects
295
+ https://docs.snowflake.com/en/sql-reference/sql/show-tables
296
+ """
297
+ if not (
298
+ isinstance(expression, exp.Show)
299
+ and isinstance(expression.this, str)
300
+ and (show := expression.this.upper())
301
+ and show in {"OBJECTS", "TABLES"}
302
+ ):
303
+ return expression
304
+
305
+ scope_kind = expression.args.get("scope_kind")
306
+ table = expression.find(exp.Table)
307
+
308
+ if scope_kind == "DATABASE":
309
+ catalog = (table and table.name) or current_database
310
+ schema = None
311
+ elif scope_kind == "SCHEMA" and table:
312
+ catalog = table.db or current_database
313
+ schema = table.name
314
+ else:
315
+ # all objects / tables - will show everything in the "account"
316
+ catalog = None
317
+ schema = None
318
+
319
+ columns = [
320
+ "to_timestamp(0)::timestamptz as 'created_on'",
321
+ "table_name as 'name'",
322
+ "case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind'",
323
+ "table_catalog as 'database_name'",
324
+ "table_schema as 'schema_name'",
325
+ ]
326
+ if not expression.args["terse"]:
327
+ if show == "OBJECTS":
328
+ columns.extend(
329
+ [
330
+ "'' as 'comment'",
331
+ "'' as 'cluster_by'",
332
+ # TODO: implement rows and bytes as rows * 1024
333
+ "0 as 'rows'",
334
+ "0 as 'bytes'",
335
+ "'SYSADMIN' as 'owner'",
336
+ "1 as 'retention_time'",
337
+ "'ROLE' as 'owner_role_type'",
338
+ "null as 'budget'",
339
+ "'N' as 'is_hybrid'",
340
+ "'N' as 'is_dynamic'",
341
+ ]
342
+ )
343
+ else:
344
+ # show == "TABLES"
345
+ columns.extend(
346
+ [
347
+ "'' as 'comment'",
348
+ "'' as 'cluster_by'",
349
+ # TODO: implement rows and bytes as rows * 1024
350
+ "0 as 'rows'",
351
+ "0 as 'bytes'",
352
+ "'SYSADMIN' as 'owner'",
353
+ "1 as 'retention_time'",
354
+ "'OFF' as 'automatic_clustering'",
355
+ "'OFF' as 'change_tracking'",
356
+ "'OFF' as 'search_optimization'",
357
+ "null as 'search_optimization_progress'",
358
+ "null as 'search_optimization_bytes'",
359
+ "'N' as 'is_external'",
360
+ "'N' as 'enable_schema_evolution'",
361
+ "'ROLE' as 'owner_role_type'",
362
+ "'N' as 'is_event'",
363
+ "null as 'budget'",
364
+ "'N' as 'is_hybrid'",
365
+ "'N' as 'is_iceberg'",
366
+ "'N' as 'is_dynamic'",
367
+ "'N' as 'is_immutable'",
368
+ ]
369
+ )
370
+
371
+ columns_clause = ", ".join(columns)
372
+
373
+ where = ["not (table_schema == '_fs_information_schema')"] # exclude fakesnow's internal schemas
374
+ if show == "TABLES":
375
+ where.append("table_type = 'BASE TABLE'")
376
+ if catalog:
377
+ where.append(f"table_catalog = '{catalog}'")
378
+ if schema:
379
+ where.append(f"table_schema = '{schema}'")
380
+ if (like := expression.args.get("like")) and isinstance(like, exp.Expression):
381
+ where.append(f"table_name ilike {like.sql()}")
382
+ where_clause = " AND ".join(where)
383
+
384
+ limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
385
+
386
+ query = f"""
387
+ SELECT {columns_clause}
388
+ from information_schema.tables
389
+ where {where_clause}
390
+ {limit}
391
+ """
392
+
393
+ return sqlglot.parse_one(query, read="duckdb")
394
+
395
+
396
+ # returns zero rows
397
+ SQL_SHOW_PROCEDURES = """
398
+ SELECT
399
+ '2012-08-01 07:00:00 UTC'::timestamptz as 'created_on',
400
+ 'SYSTEM$CLASSIFY' as 'name',
401
+ '' as 'schema_name',
402
+ 'Y' as 'is_builtin',
403
+ 'N' as 'is_aggregate',
404
+ 'N' as 'is_ansi',
405
+ 2 as 'min_num_arguments',
406
+ 2 as 'max_num_arguments',
407
+ 'SYSTEM$CLASSIFY(VARCHAR, OBJECT) RETURN OBJECT' as 'arguments',
408
+ 'classify stored proc' as 'description',
409
+ '' as 'catalog_name',
410
+ 'N' as 'is_table_function',
411
+ 'N' as 'valid_for_clustering',
412
+ NULL as 'is_secure',
413
+ '' as 'secrets',
414
+ '' as 'external_access_integrations',
415
+ WHERE 0 = 1;
416
+ """
417
+
418
+
419
+ def show_procedures(expression: exp.Expression) -> exp.Expression:
420
+ """Transform SHOW PROCEDURES.
421
+
422
+ See https://docs.snowflake.com/en/sql-reference/sql/show-procedures
423
+ """
424
+ if (
425
+ isinstance(expression, exp.Show)
426
+ and isinstance(expression.this, str)
427
+ and expression.this.upper() == "PROCEDURES"
428
+ ):
429
+ return sqlglot.parse_one(SQL_SHOW_PROCEDURES, read="duckdb")
430
+
431
+ return expression
432
+
433
+
434
+ SQL_SHOW_SCHEMAS = """
435
+ select
436
+ to_timestamp(0)::timestamptz as 'created_on',
437
+ case
438
+ when schema_name = '_fs_information_schema' then 'information_schema'
439
+ else schema_name
440
+ end as 'name',
441
+ NULL as 'kind',
442
+ catalog_name as 'database_name',
443
+ NULL as 'schema_name'
444
+ from information_schema.schemata
445
+ where not catalog_name in ('memory', 'system', 'temp', '_fs_global')
446
+ and not schema_name in ('main', 'pg_catalog')
447
+ """
448
+
449
+
450
+ def show_schemas(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
451
+ """Transform SHOW SCHEMAS to a query against the information_schema.schemata table.
452
+
453
+ See https://docs.snowflake.com/en/sql-reference/sql/show-schemas
454
+ """
455
+ if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "SCHEMAS":
456
+ if (ident := expression.find(exp.Identifier)) and isinstance(ident.this, str):
457
+ database = ident.this
458
+ else:
459
+ database = current_database
460
+
461
+ return sqlglot.parse_one(
462
+ f"{SQL_SHOW_SCHEMAS} and catalog_name = '{database}'" if database else SQL_SHOW_SCHEMAS, read="duckdb"
463
+ )
464
+
465
+ return expression
466
+
467
+
468
+ def show_users(expression: exp.Expression) -> exp.Expression:
469
+ """Transform SHOW USERS to a query against the global database's information_schema._fs_users table.
470
+
471
+ https://docs.snowflake.com/en/sql-reference/sql/show-users
472
+ """
473
+ if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "USERS":
474
+ return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_users_ext", read="duckdb")
475
+
476
+ return expression
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: fakesnow
3
- Version: 0.9.30
3
+ Version: 0.9.32
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -220,7 +220,7 @@ Requires-Dist: dirty-equals; extra == "dev"
220
220
  Requires-Dist: pandas-stubs; extra == "dev"
221
221
  Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
222
222
  Requires-Dist: pre-commit~=4.0; extra == "dev"
223
- Requires-Dist: pyarrow-stubs==10.0.1.9; extra == "dev"
223
+ Requires-Dist: pyarrow-stubs==17.19; extra == "dev"
224
224
  Requires-Dist: pytest~=8.0; extra == "dev"
225
225
  Requires-Dist: pytest-asyncio; extra == "dev"
226
226
  Requires-Dist: ruff~=0.11.0; extra == "dev"
@@ -1,26 +1,28 @@
1
1
  fakesnow/__init__.py,sha256=qUfgucQYPdELrJaxczalhJgWAWQ6cfTCUAHx6nUqRaI,3528
2
2
  fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
3
- fakesnow/arrow.py,sha256=MwatkdZX5AFADzXvxhBFmcRJVxbW4D39VoqLyhpTbl0,5057
3
+ fakesnow/arrow.py,sha256=XjTpFyLrD9jULWOtPgpr0RyNMmO6a5yi82y6ivi2CCI,4884
4
4
  fakesnow/checks.py,sha256=be-xo0oMoAUVhlMDCu1_Rkoh_L8p_p8qo9P6reJSHIQ,2874
5
5
  fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
6
- fakesnow/conn.py,sha256=HGhFKErKWvAfVEy3QSc0tfNmzGh_T7FtvRfWuDBy_CQ,5744
7
- fakesnow/cursor.py,sha256=KP8aDhq_m10ibpLjiL4retcwUCh_8PsY5sZfEFY_3No,20970
6
+ fakesnow/conn.py,sha256=2WClMmUgfQkQA2hFQjfMP3R-85TbTbZh_8Y1tCdcerA,6053
7
+ fakesnow/cursor.py,sha256=Nvr8TQmmTFs6i0sJwfgCocrEF9td0D0SdDG41quIudI,21621
8
8
  fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
9
9
  fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
10
10
  fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
11
11
  fakesnow/info_schema.py,sha256=xDhGy07fpc8bcy_VTfh54UzwNIaB4ZhGmjgJeoiZ0hQ,8744
12
- fakesnow/instance.py,sha256=vbg4XiAjpdglEqOM7X_HvCOnE-6Bf67nTYeBfGVUSNU,1889
12
+ fakesnow/instance.py,sha256=VsFbhVfy6EAJdEKykgavJwkMtrig01NehorptT51Jh8,2020
13
13
  fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
14
14
  fakesnow/pandas_tools.py,sha256=wI203UQHC8JvDzxE_VjE1NeV4rThek2P-u52oTg2foo,3481
15
15
  fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
16
16
  fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
17
- fakesnow/server.py,sha256=-jKyEVuD2TEr88jUSA1Lu86MAymel7LQAiNlytHqhTg,5934
17
+ fakesnow/server.py,sha256=oLnWJgcxwhPw4sNebJF3B9uxk28A-a-AI8Pyl_lz2_E,5986
18
18
  fakesnow/variables.py,sha256=WXyPnkeNwD08gy52yF66CVe2twiYC50tztNfgXV4q1k,3032
19
- fakesnow/transforms/__init__.py,sha256=ENBHnwfQHAlC9PWOq4tdz-9-YQGy2E48xJB5ce7qEA0,60345
19
+ fakesnow/transforms/__init__.py,sha256=gD8wPo9QprwHkTOEMQ0-IsXSNfUruU0kBJPjO0po-J4,49377
20
20
  fakesnow/transforms/merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
21
- fakesnow-0.9.30.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
22
- fakesnow-0.9.30.dist-info/METADATA,sha256=ryFSnkBU9Vb4BrYI9g9q16zN4M2env6w8-syT7LbJ1k,18109
23
- fakesnow-0.9.30.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
24
- fakesnow-0.9.30.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
25
- fakesnow-0.9.30.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
26
- fakesnow-0.9.30.dist-info/RECORD,,
21
+ fakesnow/transforms/show.py,sha256=2qfK3Fi0RLylqTnkwSVgv5JIorXYb1y0fnf5oErRZ2o,16839
22
+ tools/decode.py,sha256=kC5kUvLQxdCkMRsnH6BqCajlKxKeN77w6rwCKsY6gqU,1781
23
+ fakesnow-0.9.32.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
24
+ fakesnow-0.9.32.dist-info/METADATA,sha256=HTGvul9rrgbrcSK79gmq4KndGAga0be6WzaKSUc_3c4,18106
25
+ fakesnow-0.9.32.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
26
+ fakesnow-0.9.32.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
27
+ fakesnow-0.9.32.dist-info/top_level.txt,sha256=Yos7YveA3f03xVYuURqnBsfMV2DePXfu_yGcsj3pPzI,30
28
+ fakesnow-0.9.32.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (76.0.0)
2
+ Generator: setuptools (76.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,3 +1,4 @@
1
1
  dist
2
2
  fakesnow
3
3
  notebooks
4
+ tools
tools/decode.py ADDED
@@ -0,0 +1,63 @@
1
+ #!/usr/bin/env python
2
+ import base64
3
+ import sys
4
+
5
+ import pyarrow as pa
6
+
7
+
8
+ def dump_field_metadata(field: pa.Field, index: int) -> None:
9
+ """Dump metadata for a single field."""
10
+ print(f"Field {index}: {field.name}")
11
+ print(f" Type: {field.type}")
12
+ print(f" Nullable: {field.nullable}")
13
+ print(" Metadata:")
14
+ assert field.metadata
15
+ for key, value in field.metadata.items():
16
+ try:
17
+ print(f" {key.decode('utf-8')}: {value.decode('utf-8')}")
18
+ except UnicodeDecodeError: # noqa: PERF203
19
+ print(f" {key.decode('utf-8')}: <binary data>")
20
+ print()
21
+
22
+
23
+ def main() -> None:
24
+ if len(sys.argv) > 1:
25
+ print("Usage: python dump_rowset_metadata.py < base64_encoded_file")
26
+ print(" or: cat base64_encoded_file | python dump_rowset_metadata.py")
27
+ print()
28
+ print("Dump pyarrow metadata for a base64-encoded rowset.")
29
+ sys.exit(1)
30
+
31
+ # Read base64 input from stdin
32
+ rowset_b64 = sys.stdin.read().strip()
33
+
34
+ try:
35
+ # Decode base64
36
+ data = base64.b64decode(rowset_b64)
37
+
38
+ # Parse with PyArrow
39
+ reader = pa.ipc.open_stream(data)
40
+
41
+ except Exception as e:
42
+ full_class_name = f"{e.__module__}.{e.__class__.__name__}"
43
+ print(f"Error processing rowset: {full_class_name} {e}")
44
+ sys.exit(1)
45
+
46
+ # Get the first batch
47
+ batch = next(iter(reader))
48
+
49
+ print(f"Total fields: {batch.num_columns}")
50
+ print("=" * 50)
51
+
52
+ # Dump metadata for each field
53
+ for i, field in enumerate(batch.schema):
54
+ dump_field_metadata(field, i)
55
+
56
+ # Also print a sample of the array data
57
+ print(f" Batch data: {batch[i]}")
58
+ print(f" Batch data type: {type(batch[i])}")
59
+ print("=" * 50)
60
+
61
+
62
+ if __name__ == "__main__":
63
+ main()