fakesnow 0.9.31__py3-none-any.whl → 0.9.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fakesnow/conn.py +10 -4
- fakesnow/cursor.py +6 -0
- fakesnow/instance.py +3 -0
- fakesnow/server.py +2 -1
- fakesnow/transforms/__init__.py +28 -366
- fakesnow/transforms/show.py +476 -0
- {fakesnow-0.9.31.dist-info → fakesnow-0.9.33.dist-info}/METADATA +4 -3
- {fakesnow-0.9.31.dist-info → fakesnow-0.9.33.dist-info}/RECORD +13 -11
- {fakesnow-0.9.31.dist-info → fakesnow-0.9.33.dist-info}/WHEEL +1 -1
- {fakesnow-0.9.31.dist-info → fakesnow-0.9.33.dist-info}/top_level.txt +1 -0
- tools/decode.py +63 -0
- {fakesnow-0.9.31.dist-info → fakesnow-0.9.33.dist-info}/entry_points.txt +0 -0
- {fakesnow-0.9.31.dist-info → fakesnow-0.9.33.dist-info/licenses}/LICENSE +0 -0
fakesnow/conn.py
CHANGED
@@ -62,7 +62,16 @@ class FakeSnowflakeConnection:
|
|
62
62
|
where upper(catalog_name) = '{self.database}'"""
|
63
63
|
).fetchone()
|
64
64
|
):
|
65
|
-
|
65
|
+
if self.db_path:
|
66
|
+
# raise a helpful error message when directory doesn't exist so users don't think
|
67
|
+
# they have to create the database themselves
|
68
|
+
if not os.path.isdir(self.db_path):
|
69
|
+
raise NotADirectoryError(f"No such directory: '{self.db_path}'. Please ensure db_path exists.")
|
70
|
+
db_file = f"{self.db_path / self.database}.db"
|
71
|
+
else:
|
72
|
+
db_file = ":memory:"
|
73
|
+
|
74
|
+
# creates db file if it doesn't exist
|
66
75
|
duck_conn.execute(f"ATTACH DATABASE '{db_file}' AS {self.database}")
|
67
76
|
duck_conn.execute(info_schema.per_db_creation_sql(self.database))
|
68
77
|
duck_conn.execute(macros.creation_sql(self.database))
|
@@ -102,9 +111,6 @@ class FakeSnowflakeConnection:
|
|
102
111
|
duck_conn.execute(f"SET schema='{self.database}.main'")
|
103
112
|
self.database_set = True
|
104
113
|
|
105
|
-
# use UTC instead of local time zone for consistent testing
|
106
|
-
duck_conn.execute("SET GLOBAL TimeZone = 'UTC'")
|
107
|
-
|
108
114
|
def __enter__(self) -> Self:
|
109
115
|
return self
|
110
116
|
|
fakesnow/cursor.py
CHANGED
@@ -231,8 +231,10 @@ class FakeSnowflakeCursor:
|
|
231
231
|
.transform(transforms.show_databases)
|
232
232
|
.transform(transforms.show_functions)
|
233
233
|
.transform(transforms.show_procedures)
|
234
|
+
.transform(transforms.show_warehouses)
|
234
235
|
.transform(lambda e: transforms.show_schemas(e, self._conn.database))
|
235
236
|
.transform(lambda e: transforms.show_objects_tables(e, self._conn.database))
|
237
|
+
.transform(lambda e: transforms.show_columns(e, self._conn.database))
|
236
238
|
# TODO collapse into a single show_keys function
|
237
239
|
.transform(lambda e: transforms.show_keys(e, self._conn.database, kind="PRIMARY"))
|
238
240
|
.transform(lambda e: transforms.show_keys(e, self._conn.database, kind="UNIQUE"))
|
@@ -260,6 +262,9 @@ class FakeSnowflakeCursor:
|
|
260
262
|
|
261
263
|
sql = transformed.sql(dialect="duckdb")
|
262
264
|
|
265
|
+
if not sql:
|
266
|
+
raise NotImplementedError(transformed.sql(dialect="snowflake"))
|
267
|
+
|
263
268
|
if transformed.find(exp.Select) and (seed := transformed.args.get("seed")):
|
264
269
|
sql = f"SELECT setseed({seed}); {sql}"
|
265
270
|
|
@@ -293,6 +298,7 @@ class FakeSnowflakeCursor:
|
|
293
298
|
if set_database := transformed.args.get("set_database"):
|
294
299
|
self._conn.database = set_database
|
295
300
|
self._conn.database_set = True
|
301
|
+
self._conn.schema_set = False
|
296
302
|
result_sql = SQL_SUCCESS
|
297
303
|
|
298
304
|
elif set_schema := transformed.args.get("set_schema"):
|
fakesnow/instance.py
CHANGED
@@ -31,6 +31,9 @@ class FakeSnow:
|
|
31
31
|
# create the info schema extensions
|
32
32
|
self.duck_conn.execute(info_schema.fs_global_creation_sql(GLOBAL_DATABASE_NAME))
|
33
33
|
|
34
|
+
# use UTC instead of local time zone for consistent testing
|
35
|
+
self.duck_conn.execute("SET GLOBAL TimeZone = 'UTC'")
|
36
|
+
|
34
37
|
def connect(
|
35
38
|
self, database: str | None = None, schema: str | None = None, **kwargs: Any
|
36
39
|
) -> fakes.FakeSnowflakeConnection:
|
fakesnow/server.py
CHANGED
@@ -83,6 +83,7 @@ async def query_request(request: Request) -> JSONResponse:
|
|
83
83
|
rowtype = describe_as_rowtype(cur._describe_last_sql()) # noqa: SLF001
|
84
84
|
|
85
85
|
except snowflake.connector.errors.ProgrammingError as e:
|
86
|
+
logger.info(f"{sql_text=} ProgrammingError {e}")
|
86
87
|
code = f"{e.errno:06d}"
|
87
88
|
return JSONResponse(
|
88
89
|
{
|
@@ -97,7 +98,7 @@ async def query_request(request: Request) -> JSONResponse:
|
|
97
98
|
)
|
98
99
|
except Exception as e:
|
99
100
|
# we have a bug or use of an unsupported feature
|
100
|
-
msg = f"
|
101
|
+
msg = f"{sql_text=} Unhandled exception"
|
101
102
|
logger.error(msg, exc_info=e)
|
102
103
|
# my guess at mimicking a 500 error as per https://docs.snowflake.com/en/developer-guide/sql-api/reference
|
103
104
|
# and https://github.com/snowflakedb/gosnowflake/blob/8ed4c75ffd707dd712ad843f40189843ace683c4/restful.go#L318
|
fakesnow/transforms/__init__.py
CHANGED
@@ -2,70 +2,25 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
from pathlib import Path
|
4
4
|
from string import Template
|
5
|
-
from typing import ClassVar,
|
5
|
+
from typing import ClassVar, cast
|
6
6
|
|
7
7
|
import sqlglot
|
8
8
|
from sqlglot import exp
|
9
9
|
|
10
|
-
from fakesnow.transforms.merge import merge
|
10
|
+
from fakesnow.transforms.merge import merge as merge
|
11
|
+
from fakesnow.transforms.show import (
|
12
|
+
show_columns as show_columns,
|
13
|
+
show_databases as show_databases,
|
14
|
+
show_functions as show_functions,
|
15
|
+
show_keys as show_keys,
|
16
|
+
show_objects_tables as show_objects_tables,
|
17
|
+
show_procedures as show_procedures,
|
18
|
+
show_schemas as show_schemas,
|
19
|
+
show_users as show_users,
|
20
|
+
show_warehouses as show_warehouses,
|
21
|
+
)
|
11
22
|
from fakesnow.variables import Variables
|
12
23
|
|
13
|
-
__all__ = [
|
14
|
-
"alias_in_join",
|
15
|
-
"alter_table_strip_cluster_by",
|
16
|
-
"array_agg",
|
17
|
-
"array_agg_within_group",
|
18
|
-
"array_size",
|
19
|
-
"create_clone",
|
20
|
-
"create_database",
|
21
|
-
"create_user",
|
22
|
-
"dateadd_date_cast",
|
23
|
-
"dateadd_string_literal_timestamp_cast",
|
24
|
-
"datediff_string_literal_timestamp_cast",
|
25
|
-
"drop_schema_cascade",
|
26
|
-
"extract_comment_on_columns",
|
27
|
-
"extract_comment_on_table",
|
28
|
-
"extract_text_length",
|
29
|
-
"flatten",
|
30
|
-
"flatten_value_cast_as_varchar",
|
31
|
-
"float_to_double",
|
32
|
-
"identifier",
|
33
|
-
"indices_to_json_extract",
|
34
|
-
"information_schema_databases",
|
35
|
-
"information_schema_fs_tables",
|
36
|
-
"information_schema_fs_views",
|
37
|
-
"integer_precision",
|
38
|
-
"json_extract_cased_as_varchar",
|
39
|
-
"json_extract_cast_as_varchar",
|
40
|
-
"json_extract_precedence",
|
41
|
-
"merge",
|
42
|
-
"object_construct",
|
43
|
-
"random",
|
44
|
-
"regex_replace",
|
45
|
-
"regex_substr",
|
46
|
-
"sample",
|
47
|
-
"semi_structured_types",
|
48
|
-
"set_schema",
|
49
|
-
"sha256",
|
50
|
-
"show_keys",
|
51
|
-
"show_objects_tables",
|
52
|
-
"show_schemas",
|
53
|
-
"show_users",
|
54
|
-
"split",
|
55
|
-
"tag",
|
56
|
-
"timestamp_ntz",
|
57
|
-
"to_date",
|
58
|
-
"to_decimal",
|
59
|
-
"to_timestamp",
|
60
|
-
"to_timestamp_ntz",
|
61
|
-
"trim_cast_varchar",
|
62
|
-
"try_parse_json",
|
63
|
-
"try_to_decimal",
|
64
|
-
"update_variables",
|
65
|
-
"upper_case_unquoted_identifiers",
|
66
|
-
"values_columns",
|
67
|
-
]
|
68
|
-
|
69
24
|
SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.' as status")
|
70
25
|
|
71
26
|
|
@@ -104,9 +59,13 @@ def alter_table_strip_cluster_by(expression: exp.Expression) -> exp.Expression:
|
|
104
59
|
|
105
60
|
def array_size(expression: exp.Expression) -> exp.Expression:
|
106
61
|
if isinstance(expression, exp.ArraySize):
|
107
|
-
#
|
62
|
+
# return null if not json array
|
108
63
|
jal = exp.Anonymous(this="json_array_length", expressions=[expression.this])
|
109
|
-
|
64
|
+
is_json_array = exp.EQ(
|
65
|
+
this=exp.Anonymous(this="json_type", expressions=[expression.this]),
|
66
|
+
expression=exp.Literal(this="ARRAY", is_string=True),
|
67
|
+
)
|
68
|
+
return exp.Case(ifs=[exp.If(this=is_json_array, true=jal)])
|
110
69
|
|
111
70
|
return expression
|
112
71
|
|
@@ -621,7 +580,7 @@ def identifier(expression: exp.Expression) -> exp.Expression:
|
|
621
580
|
|
622
581
|
|
623
582
|
def indices_to_json_extract(expression: exp.Expression) -> exp.Expression:
|
624
|
-
"""Convert indices on objects and arrays to json_extract
|
583
|
+
"""Convert indices on objects and arrays to json_extract or json_extract_string
|
625
584
|
|
626
585
|
Supports Snowflake array indices, see
|
627
586
|
https://docs.snowflake.com/en/sql-reference/data-types-semistructured#accessing-elements-of-an-array-by-index-or-by-slice
|
@@ -640,12 +599,16 @@ def indices_to_json_extract(expression: exp.Expression) -> exp.Expression:
|
|
640
599
|
and isinstance(index, exp.Literal)
|
641
600
|
and index.this
|
642
601
|
):
|
602
|
+
if isinstance(expression.parent, exp.Cast) and expression.parent.to.this == exp.DataType.Type.VARCHAR:
|
603
|
+
# If the parent is a cast to varchar, we need to use JSONExtractScalar
|
604
|
+
# to get the unquoted string value.
|
605
|
+
klass = exp.JSONExtractScalar
|
606
|
+
else:
|
607
|
+
klass = exp.JSONExtract
|
643
608
|
if index.is_string:
|
644
|
-
return
|
609
|
+
return klass(this=expression.this, expression=exp.Literal(this=f"$.{index.this}", is_string=True))
|
645
610
|
else:
|
646
|
-
return exp.
|
647
|
-
this=expression.this, expression=exp.Literal(this=f"$[{index.this}]", is_string=True)
|
648
|
-
)
|
611
|
+
return klass(this=expression.this, expression=exp.Literal(this=f"$[{index.this}]", is_string=True))
|
649
612
|
|
650
613
|
return expression
|
651
614
|
|
@@ -1005,209 +968,6 @@ def set_schema(expression: exp.Expression, current_database: str | None) -> exp.
|
|
1005
968
|
return expression
|
1006
969
|
|
1007
970
|
|
1008
|
-
def show_objects_tables(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
|
1009
|
-
"""Transform SHOW OBJECTS/TABLES to a query against the information_schema.tables table.
|
1010
|
-
|
1011
|
-
See https://docs.snowflake.com/en/sql-reference/sql/show-objects
|
1012
|
-
https://docs.snowflake.com/en/sql-reference/sql/show-tables
|
1013
|
-
"""
|
1014
|
-
if not (
|
1015
|
-
isinstance(expression, exp.Show)
|
1016
|
-
and isinstance(expression.this, str)
|
1017
|
-
and (show := expression.this.upper())
|
1018
|
-
and show in {"OBJECTS", "TABLES"}
|
1019
|
-
):
|
1020
|
-
return expression
|
1021
|
-
|
1022
|
-
scope_kind = expression.args.get("scope_kind")
|
1023
|
-
table = expression.find(exp.Table)
|
1024
|
-
|
1025
|
-
if scope_kind == "DATABASE":
|
1026
|
-
catalog = (table and table.name) or current_database
|
1027
|
-
schema = None
|
1028
|
-
elif scope_kind == "SCHEMA" and table:
|
1029
|
-
catalog = table.db or current_database
|
1030
|
-
schema = table.name
|
1031
|
-
else:
|
1032
|
-
# all objects / tables - will show everything in the "account"
|
1033
|
-
catalog = None
|
1034
|
-
schema = None
|
1035
|
-
|
1036
|
-
columns = [
|
1037
|
-
"to_timestamp(0)::timestamptz as 'created_on'",
|
1038
|
-
"table_name as 'name'",
|
1039
|
-
"case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind'",
|
1040
|
-
"table_catalog as 'database_name'",
|
1041
|
-
"table_schema as 'schema_name'",
|
1042
|
-
]
|
1043
|
-
if not expression.args["terse"]:
|
1044
|
-
columns.append('null as "comment"')
|
1045
|
-
columns_clause = ", ".join(columns)
|
1046
|
-
|
1047
|
-
where = ["not (table_schema == '_fs_information_schema')"] # exclude fakesnow's internal schemas
|
1048
|
-
if show == "TABLES":
|
1049
|
-
where.append("table_type = 'BASE TABLE'")
|
1050
|
-
if catalog:
|
1051
|
-
where.append(f"table_catalog = '{catalog}'")
|
1052
|
-
if schema:
|
1053
|
-
where.append(f"table_schema = '{schema}'")
|
1054
|
-
if (like := expression.args.get("like")) and isinstance(like, exp.Expression):
|
1055
|
-
where.append(f"table_name ilike {like.sql()}")
|
1056
|
-
where_clause = " AND ".join(where)
|
1057
|
-
|
1058
|
-
limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
|
1059
|
-
|
1060
|
-
query = f"""
|
1061
|
-
SELECT {columns_clause}
|
1062
|
-
from information_schema.tables
|
1063
|
-
where {where_clause}
|
1064
|
-
{limit}
|
1065
|
-
"""
|
1066
|
-
|
1067
|
-
return sqlglot.parse_one(query, read="duckdb")
|
1068
|
-
|
1069
|
-
|
1070
|
-
SQL_SHOW_SCHEMAS = """
|
1071
|
-
select
|
1072
|
-
to_timestamp(0)::timestamptz as 'created_on',
|
1073
|
-
case
|
1074
|
-
when schema_name = '_fs_information_schema' then 'information_schema'
|
1075
|
-
else schema_name
|
1076
|
-
end as 'name',
|
1077
|
-
NULL as 'kind',
|
1078
|
-
catalog_name as 'database_name',
|
1079
|
-
NULL as 'schema_name'
|
1080
|
-
from information_schema.schemata
|
1081
|
-
where not catalog_name in ('memory', 'system', 'temp', '_fs_global')
|
1082
|
-
and not schema_name in ('main', 'pg_catalog')
|
1083
|
-
"""
|
1084
|
-
|
1085
|
-
|
1086
|
-
def show_schemas(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
|
1087
|
-
"""Transform SHOW SCHEMAS to a query against the information_schema.schemata table.
|
1088
|
-
|
1089
|
-
See https://docs.snowflake.com/en/sql-reference/sql/show-schemas
|
1090
|
-
"""
|
1091
|
-
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "SCHEMAS":
|
1092
|
-
if (ident := expression.find(exp.Identifier)) and isinstance(ident.this, str):
|
1093
|
-
database = ident.this
|
1094
|
-
else:
|
1095
|
-
database = current_database
|
1096
|
-
|
1097
|
-
return sqlglot.parse_one(
|
1098
|
-
f"{SQL_SHOW_SCHEMAS} and catalog_name = '{database}'" if database else SQL_SHOW_SCHEMAS, read="duckdb"
|
1099
|
-
)
|
1100
|
-
|
1101
|
-
return expression
|
1102
|
-
|
1103
|
-
|
1104
|
-
SQL_SHOW_DATABASES = """
|
1105
|
-
SELECT
|
1106
|
-
to_timestamp(0)::timestamptz as 'created_on',
|
1107
|
-
database_name as 'name',
|
1108
|
-
'N' as 'is_default',
|
1109
|
-
'N' as 'is_current',
|
1110
|
-
'' as 'origin',
|
1111
|
-
'SYSADMIN' as 'owner',
|
1112
|
-
comment,
|
1113
|
-
'' as 'options',
|
1114
|
-
1 as 'retention_time',
|
1115
|
-
'STANDARD' as 'kind',
|
1116
|
-
NULL as 'budget',
|
1117
|
-
'ROLE' as 'owner_role_type',
|
1118
|
-
NULL as 'object_visibility'
|
1119
|
-
FROM duckdb_databases
|
1120
|
-
WHERE database_name NOT IN ('memory', '_fs_global')
|
1121
|
-
"""
|
1122
|
-
|
1123
|
-
|
1124
|
-
def show_databases(expression: exp.Expression) -> exp.Expression:
|
1125
|
-
"""Transform SHOW DATABASES to a query against the information_schema.schemata table.
|
1126
|
-
|
1127
|
-
See https://docs.snowflake.com/en/sql-reference/sql/show-databases
|
1128
|
-
"""
|
1129
|
-
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "DATABASES":
|
1130
|
-
return sqlglot.parse_one(SQL_SHOW_DATABASES, read="duckdb")
|
1131
|
-
|
1132
|
-
return expression
|
1133
|
-
|
1134
|
-
|
1135
|
-
# returns zero rows
|
1136
|
-
SQL_SHOW_FUNCTIONS = """
|
1137
|
-
SELECT
|
1138
|
-
'1970-01-01 00:00:00 UTC'::timestamptz as created_on,
|
1139
|
-
'SYSTIMESTAMP' as name,
|
1140
|
-
'' as schema_name,
|
1141
|
-
'Y' as is_builtin,
|
1142
|
-
'N' as is_aggregate,
|
1143
|
-
'N' as is_ansi,
|
1144
|
-
0 as min_num_arguments,
|
1145
|
-
0 as max_num_arguments,
|
1146
|
-
'SYSTIMESTAMP() RETURN TIMESTAMP_LTZ' as arguments,
|
1147
|
-
'Returns the current timestamp' as description,
|
1148
|
-
'' as catalog_name,
|
1149
|
-
'N' as is_table_function,
|
1150
|
-
'N' as valid_for_clustering,
|
1151
|
-
NULL as is_secure,
|
1152
|
-
'' as secrets,
|
1153
|
-
'' as external_access_integrations,
|
1154
|
-
'N' as is_external_function,
|
1155
|
-
'SQL' as language,
|
1156
|
-
'N' as is_memoizable,
|
1157
|
-
'N' as is_data_metric
|
1158
|
-
WHERE 0 = 1;
|
1159
|
-
"""
|
1160
|
-
|
1161
|
-
|
1162
|
-
def show_functions(expression: exp.Expression) -> exp.Expression:
|
1163
|
-
"""Transform SHOW FUNCTIONS.
|
1164
|
-
|
1165
|
-
See https://docs.snowflake.com/en/sql-reference/sql/show-functions
|
1166
|
-
"""
|
1167
|
-
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "FUNCTIONS":
|
1168
|
-
return sqlglot.parse_one(SQL_SHOW_FUNCTIONS, read="duckdb")
|
1169
|
-
|
1170
|
-
return expression
|
1171
|
-
|
1172
|
-
|
1173
|
-
# returns zero rows
|
1174
|
-
SQL_SHOW_PROCEDURES = """
|
1175
|
-
SELECT
|
1176
|
-
'2012-08-01 07:00:00 UTC'::timestamptz as 'created_on',
|
1177
|
-
'SYSTEM$CLASSIFY' as 'name',
|
1178
|
-
'' as 'schema_name',
|
1179
|
-
'Y' as 'is_builtin',
|
1180
|
-
'N' as 'is_aggregate',
|
1181
|
-
'N' as 'is_ansi',
|
1182
|
-
2 as 'min_num_arguments',
|
1183
|
-
2 as 'max_num_arguments',
|
1184
|
-
'SYSTEM$CLASSIFY(VARCHAR, OBJECT) RETURN OBJECT' as 'arguments',
|
1185
|
-
'classify stored proc' as 'description',
|
1186
|
-
'' as 'catalog_name',
|
1187
|
-
'N' as 'is_table_function',
|
1188
|
-
'N' as 'valid_for_clustering',
|
1189
|
-
NULL as 'is_secure',
|
1190
|
-
'' as 'secrets',
|
1191
|
-
'' as 'external_access_integrations',
|
1192
|
-
WHERE 0 = 1;
|
1193
|
-
"""
|
1194
|
-
|
1195
|
-
|
1196
|
-
def show_procedures(expression: exp.Expression) -> exp.Expression:
|
1197
|
-
"""Transform SHOW PROCEDURES.
|
1198
|
-
|
1199
|
-
See https://docs.snowflake.com/en/sql-reference/sql/show-procedures
|
1200
|
-
"""
|
1201
|
-
if (
|
1202
|
-
isinstance(expression, exp.Show)
|
1203
|
-
and isinstance(expression.this, str)
|
1204
|
-
and expression.this.upper() == "PROCEDURES"
|
1205
|
-
):
|
1206
|
-
return sqlglot.parse_one(SQL_SHOW_PROCEDURES, read="duckdb")
|
1207
|
-
|
1208
|
-
return expression
|
1209
|
-
|
1210
|
-
|
1211
971
|
def split(expression: exp.Expression) -> exp.Expression:
|
1212
972
|
"""
|
1213
973
|
Convert output of duckdb str_split from varchar[] to JSON array to match Snowflake.
|
@@ -1544,17 +1304,6 @@ def values_columns(expression: exp.Expression) -> exp.Expression:
|
|
1544
1304
|
return expression
|
1545
1305
|
|
1546
1306
|
|
1547
|
-
def show_users(expression: exp.Expression) -> exp.Expression:
|
1548
|
-
"""Transform SHOW USERS to a query against the global database's information_schema._fs_users table.
|
1549
|
-
|
1550
|
-
https://docs.snowflake.com/en/sql-reference/sql/show-users
|
1551
|
-
"""
|
1552
|
-
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "USERS":
|
1553
|
-
return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_users_ext", read="duckdb")
|
1554
|
-
|
1555
|
-
return expression
|
1556
|
-
|
1557
|
-
|
1558
1307
|
def create_user(expression: exp.Expression) -> exp.Expression:
|
1559
1308
|
"""Transform CREATE USER to a query against the global database's information_schema._fs_users table.
|
1560
1309
|
|
@@ -1575,93 +1324,6 @@ def create_user(expression: exp.Expression) -> exp.Expression:
|
|
1575
1324
|
return expression
|
1576
1325
|
|
1577
1326
|
|
1578
|
-
def show_keys(
|
1579
|
-
expression: exp.Expression,
|
1580
|
-
current_database: str | None = None,
|
1581
|
-
*,
|
1582
|
-
kind: Literal["PRIMARY", "UNIQUE", "FOREIGN"],
|
1583
|
-
) -> exp.Expression:
|
1584
|
-
"""Transform SHOW <kind> KEYS to a query against the duckdb_constraints meta-table.
|
1585
|
-
|
1586
|
-
https://docs.snowflake.com/en/sql-reference/sql/show-primary-keys
|
1587
|
-
"""
|
1588
|
-
snowflake_kind = kind
|
1589
|
-
if kind == "FOREIGN":
|
1590
|
-
snowflake_kind = "IMPORTED"
|
1591
|
-
|
1592
|
-
if (
|
1593
|
-
isinstance(expression, exp.Show)
|
1594
|
-
and isinstance(expression.this, str)
|
1595
|
-
and expression.this.upper() == f"{snowflake_kind} KEYS"
|
1596
|
-
):
|
1597
|
-
if kind == "FOREIGN":
|
1598
|
-
statement = f"""
|
1599
|
-
SELECT
|
1600
|
-
to_timestamp(0)::timestamptz as created_on,
|
1601
|
-
|
1602
|
-
'' as pk_database_name,
|
1603
|
-
'' as pk_schema_name,
|
1604
|
-
'' as pk_table_name,
|
1605
|
-
'' as pk_column_name,
|
1606
|
-
unnest(constraint_column_names) as pk_column_name,
|
1607
|
-
|
1608
|
-
database_name as fk_database_name,
|
1609
|
-
schema_name as fk_schema_name,
|
1610
|
-
table_name as fk_table_name,
|
1611
|
-
unnest(constraint_column_names) as fk_column_name,
|
1612
|
-
1 as key_sequence,
|
1613
|
-
'NO ACTION' as update_rule,
|
1614
|
-
'NO ACTION' as delete_rule,
|
1615
|
-
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS fk_name,
|
1616
|
-
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS pk_name,
|
1617
|
-
'NOT DEFERRABLE' as deferrability,
|
1618
|
-
'false' as rely,
|
1619
|
-
null as "comment"
|
1620
|
-
FROM duckdb_constraints
|
1621
|
-
WHERE constraint_type = 'PRIMARY KEY'
|
1622
|
-
AND database_name = '{current_database}'
|
1623
|
-
AND table_name NOT LIKE '_fs_%'
|
1624
|
-
"""
|
1625
|
-
else:
|
1626
|
-
statement = f"""
|
1627
|
-
SELECT
|
1628
|
-
to_timestamp(0)::timestamptz as created_on,
|
1629
|
-
database_name as database_name,
|
1630
|
-
schema_name as schema_name,
|
1631
|
-
table_name as table_name,
|
1632
|
-
unnest(constraint_column_names) as column_name,
|
1633
|
-
1 as key_sequence,
|
1634
|
-
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS constraint_name,
|
1635
|
-
'false' as rely,
|
1636
|
-
null as "comment"
|
1637
|
-
FROM duckdb_constraints
|
1638
|
-
WHERE constraint_type = '{kind} KEY'
|
1639
|
-
AND database_name = '{current_database}'
|
1640
|
-
AND table_name NOT LIKE '_fs_%'
|
1641
|
-
"""
|
1642
|
-
|
1643
|
-
if scope_kind := expression.args.get("scope_kind"):
|
1644
|
-
table = expression.args["scope"]
|
1645
|
-
|
1646
|
-
if scope_kind == "SCHEMA":
|
1647
|
-
db = table and table.db
|
1648
|
-
schema = table and table.name
|
1649
|
-
if db:
|
1650
|
-
statement += f"AND database_name = '{db}' "
|
1651
|
-
|
1652
|
-
if schema:
|
1653
|
-
statement += f"AND schema_name = '{schema}' "
|
1654
|
-
elif scope_kind == "TABLE":
|
1655
|
-
if not table:
|
1656
|
-
raise ValueError(f"SHOW PRIMARY KEYS with {scope_kind} scope requires a table")
|
1657
|
-
|
1658
|
-
statement += f"AND table_name = '{table.name}' "
|
1659
|
-
else:
|
1660
|
-
raise NotImplementedError(f"SHOW PRIMARY KEYS with {scope_kind} not yet supported")
|
1661
|
-
return sqlglot.parse_one(statement)
|
1662
|
-
return expression
|
1663
|
-
|
1664
|
-
|
1665
1327
|
def update_variables(
|
1666
1328
|
expression: exp.Expression,
|
1667
1329
|
variables: Variables,
|
@@ -0,0 +1,476 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import Literal
|
4
|
+
|
5
|
+
import sqlglot
|
6
|
+
from sqlglot import exp
|
7
|
+
|
8
|
+
|
9
|
+
def show_columns(
|
10
|
+
expression: exp.Expression, current_database: str | None = None, current_schema: str | None = None
|
11
|
+
) -> exp.Expression:
|
12
|
+
"""Transform SHOW COLUMNS to a query against the fs global information_schema columns table.
|
13
|
+
|
14
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-columns
|
15
|
+
"""
|
16
|
+
if not (
|
17
|
+
isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "COLUMNS"
|
18
|
+
):
|
19
|
+
return expression
|
20
|
+
|
21
|
+
scope_kind = expression.args.get("scope_kind")
|
22
|
+
table = expression.find(exp.Table)
|
23
|
+
|
24
|
+
if scope_kind == "ACCOUNT" or not scope_kind:
|
25
|
+
# all columns
|
26
|
+
catalog = None
|
27
|
+
schema = None
|
28
|
+
table = None
|
29
|
+
elif scope_kind == "DATABASE" and table:
|
30
|
+
catalog = table.name
|
31
|
+
schema = None
|
32
|
+
table = None
|
33
|
+
elif scope_kind == "SCHEMA" and table:
|
34
|
+
catalog = table.db or current_database
|
35
|
+
schema = table.name
|
36
|
+
table = None
|
37
|
+
elif scope_kind in ("TABLE", "VIEW") and table:
|
38
|
+
catalog = table.catalog or current_database
|
39
|
+
schema = table.db or current_schema
|
40
|
+
table = table.name
|
41
|
+
else:
|
42
|
+
raise NotImplementedError(f"show_object_columns: {expression.sql(dialect='snowflake')}")
|
43
|
+
|
44
|
+
query = f"""
|
45
|
+
SELECT
|
46
|
+
table_name,
|
47
|
+
table_schema as "schema_name",
|
48
|
+
column_name,
|
49
|
+
CASE
|
50
|
+
WHEN data_type = 'NUMBER' THEN '{{"type":"FIXED","precision":'|| numeric_precision || ',"scale":' || numeric_scale || ',"nullable":true}}'
|
51
|
+
WHEN data_type = 'TEXT' THEN '{{"type":"TEXT","length":' || coalesce(character_maximum_length,16777216) || ',"byteLength":' || CASE WHEN character_maximum_length = 16777216 THEN 16777216 ELSE coalesce(character_maximum_length*4,16777216) END || ',"nullable":true,"fixed":false}}'
|
52
|
+
WHEN data_type in ('TIMESTAMP_NTZ','TIMESTAMP_TZ','TIME') THEN '{{"type":"' || data_type || '","precision":0,"scale":9,"nullable":true}}'
|
53
|
+
WHEN data_type = 'FLOAT' THEN '{{"type":"REAL","nullable":true}}'
|
54
|
+
WHEN data_type = 'BINARY' THEN '{{"type":"BINARY","length":8388608,"byteLength":8388608,"nullable":true,"fixed":true}}'
|
55
|
+
ELSE '{{"type":"' || data_type || '","nullable":true}}'
|
56
|
+
END as "data_type",
|
57
|
+
CASE WHEN is_nullable = 'YES' THEN 'true' ELSE 'false' END as "null?",
|
58
|
+
COALESCE(column_default, '') as "default",
|
59
|
+
'COLUMN' as "kind",
|
60
|
+
'' as "expression",
|
61
|
+
COALESCE(comment, '') as "comment",
|
62
|
+
table_catalog as "database_name",
|
63
|
+
'' as "autoincrement",
|
64
|
+
NULL as "schema_evolution_record"
|
65
|
+
FROM _fs_global._fs_information_schema._fs_columns c
|
66
|
+
WHERE 1=1
|
67
|
+
{f"AND table_catalog = '{catalog}'" if catalog else ""}
|
68
|
+
{f"AND table_schema = '{schema}'" if schema else ""}
|
69
|
+
{f"AND table_name = '{table}'" if table else ""}
|
70
|
+
ORDER BY table_name, ordinal_position
|
71
|
+
""" # noqa: E501
|
72
|
+
|
73
|
+
return sqlglot.parse_one(query, read="duckdb")
|
74
|
+
|
75
|
+
|
76
|
+
SQL_SHOW_DATABASES = """
|
77
|
+
SELECT
|
78
|
+
to_timestamp(0)::timestamptz as 'created_on',
|
79
|
+
database_name as 'name',
|
80
|
+
'N' as 'is_default',
|
81
|
+
'N' as 'is_current',
|
82
|
+
'' as 'origin',
|
83
|
+
'SYSADMIN' as 'owner',
|
84
|
+
comment,
|
85
|
+
'' as 'options',
|
86
|
+
1 as 'retention_time',
|
87
|
+
'STANDARD' as 'kind',
|
88
|
+
NULL as 'budget',
|
89
|
+
'ROLE' as 'owner_role_type',
|
90
|
+
NULL as 'object_visibility'
|
91
|
+
FROM duckdb_databases
|
92
|
+
WHERE database_name NOT IN ('memory', '_fs_global')
|
93
|
+
"""
|
94
|
+
|
95
|
+
|
96
|
+
def show_databases(expression: exp.Expression) -> exp.Expression:
|
97
|
+
"""Transform SHOW DATABASES to a query against the information_schema.schemata table.
|
98
|
+
|
99
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-databases
|
100
|
+
"""
|
101
|
+
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "DATABASES":
|
102
|
+
return sqlglot.parse_one(SQL_SHOW_DATABASES, read="duckdb")
|
103
|
+
|
104
|
+
return expression
|
105
|
+
|
106
|
+
|
107
|
+
# returns zero rows
|
108
|
+
SQL_SHOW_FUNCTIONS = """
|
109
|
+
SELECT
|
110
|
+
'1970-01-01 00:00:00 UTC'::timestamptz as created_on,
|
111
|
+
'SYSTIMESTAMP' as name,
|
112
|
+
'' as schema_name,
|
113
|
+
'Y' as is_builtin,
|
114
|
+
'N' as is_aggregate,
|
115
|
+
'N' as is_ansi,
|
116
|
+
0 as min_num_arguments,
|
117
|
+
0 as max_num_arguments,
|
118
|
+
'SYSTIMESTAMP() RETURN TIMESTAMP_LTZ' as arguments,
|
119
|
+
'Returns the current timestamp' as description,
|
120
|
+
'' as catalog_name,
|
121
|
+
'N' as is_table_function,
|
122
|
+
'N' as valid_for_clustering,
|
123
|
+
NULL as is_secure,
|
124
|
+
'' as secrets,
|
125
|
+
'' as external_access_integrations,
|
126
|
+
'N' as is_external_function,
|
127
|
+
'SQL' as language,
|
128
|
+
'N' as is_memoizable,
|
129
|
+
'N' as is_data_metric
|
130
|
+
WHERE 0 = 1;
|
131
|
+
"""
|
132
|
+
|
133
|
+
|
134
|
+
def show_functions(expression: exp.Expression) -> exp.Expression:
|
135
|
+
"""Transform SHOW FUNCTIONS.
|
136
|
+
|
137
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-functions
|
138
|
+
"""
|
139
|
+
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "FUNCTIONS":
|
140
|
+
return sqlglot.parse_one(SQL_SHOW_FUNCTIONS, read="duckdb")
|
141
|
+
|
142
|
+
return expression
|
143
|
+
|
144
|
+
|
145
|
+
# returns zero rows
|
146
|
+
SQL_SHOW_WAREHOUSES = """
|
147
|
+
SELECT
|
148
|
+
'FAKESNOW_WAREHOUSE' as name,
|
149
|
+
'STARTED' as state,
|
150
|
+
'STANDARD' as type,
|
151
|
+
'X-Small' as size,
|
152
|
+
1 as min_cluster_count,
|
153
|
+
1 as max_cluster_count,
|
154
|
+
1 as started_clusters,
|
155
|
+
0 as running,
|
156
|
+
0 as queued,
|
157
|
+
'N' as is_default,
|
158
|
+
'N' as is_current,
|
159
|
+
600 as auto_suspend,
|
160
|
+
'true' as auto_resume,
|
161
|
+
-- nb: deliberate space before '100' to match Snowflake's output
|
162
|
+
' 100' as available,
|
163
|
+
'0' as provisioning,
|
164
|
+
'0' as quiescing,
|
165
|
+
'0' as other,
|
166
|
+
'1970-01-01 00:00:00.000000 UTC'::timestamptz as created_on,
|
167
|
+
'1970-01-01 00:00:00.000000 UTC'::timestamptz as resumed_on,
|
168
|
+
'1970-01-01 00:00:00.000000 UTC'::timestamptz as updated_on,
|
169
|
+
'SYSADMIN' as owner,
|
170
|
+
'' as comment,
|
171
|
+
'false' as enable_query_acceleration,
|
172
|
+
8 as query_acceleration_max_scale_factor,
|
173
|
+
'null' as resource_monitor,
|
174
|
+
|
175
|
+
-- deprecated - these 5 cols are for internal use
|
176
|
+
0 as actives,
|
177
|
+
0 as pendings,
|
178
|
+
0 as failed,
|
179
|
+
0 as suspended,
|
180
|
+
'123456789012' as uuid,
|
181
|
+
|
182
|
+
'STANDARD' as scaling_policy,
|
183
|
+
NULL as budget,
|
184
|
+
'ROLE' as owner_role_type,
|
185
|
+
NULL as resource_constraint;
|
186
|
+
"""
|
187
|
+
|
188
|
+
|
189
|
+
def show_warehouses(expression: exp.Expression) -> exp.Expression:
|
190
|
+
"""Transform SHOW WAREHOUSES.
|
191
|
+
|
192
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-warehouses
|
193
|
+
"""
|
194
|
+
if (
|
195
|
+
isinstance(expression, exp.Show)
|
196
|
+
and isinstance(expression.this, str)
|
197
|
+
and expression.this.upper() == "WAREHOUSES"
|
198
|
+
):
|
199
|
+
return sqlglot.parse_one(SQL_SHOW_WAREHOUSES, read="duckdb")
|
200
|
+
|
201
|
+
return expression
|
202
|
+
|
203
|
+
|
204
|
+
def show_keys(
|
205
|
+
expression: exp.Expression,
|
206
|
+
current_database: str | None = None,
|
207
|
+
*,
|
208
|
+
kind: Literal["PRIMARY", "UNIQUE", "FOREIGN"],
|
209
|
+
) -> exp.Expression:
|
210
|
+
"""Transform SHOW <kind> KEYS to a query against the duckdb_constraints meta-table.
|
211
|
+
|
212
|
+
https://docs.snowflake.com/en/sql-reference/sql/show-primary-keys
|
213
|
+
"""
|
214
|
+
snowflake_kind = kind
|
215
|
+
if kind == "FOREIGN":
|
216
|
+
snowflake_kind = "IMPORTED"
|
217
|
+
|
218
|
+
if (
|
219
|
+
isinstance(expression, exp.Show)
|
220
|
+
and isinstance(expression.this, str)
|
221
|
+
and expression.this.upper() == f"{snowflake_kind} KEYS"
|
222
|
+
):
|
223
|
+
if kind == "FOREIGN":
|
224
|
+
statement = f"""
|
225
|
+
SELECT
|
226
|
+
to_timestamp(0)::timestamptz as created_on,
|
227
|
+
|
228
|
+
'' as pk_database_name,
|
229
|
+
'' as pk_schema_name,
|
230
|
+
'' as pk_table_name,
|
231
|
+
'' as pk_column_name,
|
232
|
+
unnest(constraint_column_names) as pk_column_name,
|
233
|
+
|
234
|
+
database_name as fk_database_name,
|
235
|
+
schema_name as fk_schema_name,
|
236
|
+
table_name as fk_table_name,
|
237
|
+
unnest(constraint_column_names) as fk_column_name,
|
238
|
+
1 as key_sequence,
|
239
|
+
'NO ACTION' as update_rule,
|
240
|
+
'NO ACTION' as delete_rule,
|
241
|
+
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS fk_name,
|
242
|
+
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS pk_name,
|
243
|
+
'NOT DEFERRABLE' as deferrability,
|
244
|
+
'false' as rely,
|
245
|
+
null as "comment"
|
246
|
+
FROM duckdb_constraints
|
247
|
+
WHERE constraint_type = 'PRIMARY KEY'
|
248
|
+
AND database_name = '{current_database}'
|
249
|
+
AND table_name NOT LIKE '_fs_%'
|
250
|
+
"""
|
251
|
+
else:
|
252
|
+
statement = f"""
|
253
|
+
SELECT
|
254
|
+
to_timestamp(0)::timestamptz as created_on,
|
255
|
+
database_name as database_name,
|
256
|
+
schema_name as schema_name,
|
257
|
+
table_name as table_name,
|
258
|
+
unnest(constraint_column_names) as column_name,
|
259
|
+
1 as key_sequence,
|
260
|
+
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS constraint_name,
|
261
|
+
'false' as rely,
|
262
|
+
null as "comment"
|
263
|
+
FROM duckdb_constraints
|
264
|
+
WHERE constraint_type = '{kind} KEY'
|
265
|
+
AND database_name = '{current_database}'
|
266
|
+
AND table_name NOT LIKE '_fs_%'
|
267
|
+
"""
|
268
|
+
|
269
|
+
if scope_kind := expression.args.get("scope_kind"):
|
270
|
+
table = expression.args["scope"]
|
271
|
+
|
272
|
+
if scope_kind == "SCHEMA":
|
273
|
+
db = table and table.db
|
274
|
+
schema = table and table.name
|
275
|
+
if db:
|
276
|
+
statement += f"AND database_name = '{db}' "
|
277
|
+
|
278
|
+
if schema:
|
279
|
+
statement += f"AND schema_name = '{schema}' "
|
280
|
+
elif scope_kind == "TABLE":
|
281
|
+
if not table:
|
282
|
+
raise ValueError(f"SHOW PRIMARY KEYS with {scope_kind} scope requires a table")
|
283
|
+
|
284
|
+
statement += f"AND table_name = '{table.name}' "
|
285
|
+
else:
|
286
|
+
raise NotImplementedError(f"SHOW PRIMARY KEYS with {scope_kind} not yet supported")
|
287
|
+
return sqlglot.parse_one(statement)
|
288
|
+
return expression
|
289
|
+
|
290
|
+
|
291
|
+
def show_objects_tables(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
|
292
|
+
"""Transform SHOW OBJECTS/TABLES to a query against the information_schema.tables table.
|
293
|
+
|
294
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-objects
|
295
|
+
https://docs.snowflake.com/en/sql-reference/sql/show-tables
|
296
|
+
"""
|
297
|
+
if not (
|
298
|
+
isinstance(expression, exp.Show)
|
299
|
+
and isinstance(expression.this, str)
|
300
|
+
and (show := expression.this.upper())
|
301
|
+
and show in {"OBJECTS", "TABLES"}
|
302
|
+
):
|
303
|
+
return expression
|
304
|
+
|
305
|
+
scope_kind = expression.args.get("scope_kind")
|
306
|
+
table = expression.find(exp.Table)
|
307
|
+
|
308
|
+
if scope_kind == "DATABASE":
|
309
|
+
catalog = (table and table.name) or current_database
|
310
|
+
schema = None
|
311
|
+
elif scope_kind == "SCHEMA" and table:
|
312
|
+
catalog = table.db or current_database
|
313
|
+
schema = table.name
|
314
|
+
else:
|
315
|
+
# all objects / tables - will show everything in the "account"
|
316
|
+
catalog = None
|
317
|
+
schema = None
|
318
|
+
|
319
|
+
columns = [
|
320
|
+
"to_timestamp(0)::timestamptz as 'created_on'",
|
321
|
+
"table_name as 'name'",
|
322
|
+
"case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind'",
|
323
|
+
"table_catalog as 'database_name'",
|
324
|
+
"table_schema as 'schema_name'",
|
325
|
+
]
|
326
|
+
if not expression.args["terse"]:
|
327
|
+
if show == "OBJECTS":
|
328
|
+
columns.extend(
|
329
|
+
[
|
330
|
+
"'' as 'comment'",
|
331
|
+
"'' as 'cluster_by'",
|
332
|
+
# TODO: implement rows and bytes as rows * 1024
|
333
|
+
"0 as 'rows'",
|
334
|
+
"0 as 'bytes'",
|
335
|
+
"'SYSADMIN' as 'owner'",
|
336
|
+
"1 as 'retention_time'",
|
337
|
+
"'ROLE' as 'owner_role_type'",
|
338
|
+
"null as 'budget'",
|
339
|
+
"'N' as 'is_hybrid'",
|
340
|
+
"'N' as 'is_dynamic'",
|
341
|
+
]
|
342
|
+
)
|
343
|
+
else:
|
344
|
+
# show == "TABLES"
|
345
|
+
columns.extend(
|
346
|
+
[
|
347
|
+
"'' as 'comment'",
|
348
|
+
"'' as 'cluster_by'",
|
349
|
+
# TODO: implement rows and bytes as rows * 1024
|
350
|
+
"0 as 'rows'",
|
351
|
+
"0 as 'bytes'",
|
352
|
+
"'SYSADMIN' as 'owner'",
|
353
|
+
"1 as 'retention_time'",
|
354
|
+
"'OFF' as 'automatic_clustering'",
|
355
|
+
"'OFF' as 'change_tracking'",
|
356
|
+
"'OFF' as 'search_optimization'",
|
357
|
+
"null as 'search_optimization_progress'",
|
358
|
+
"null as 'search_optimization_bytes'",
|
359
|
+
"'N' as 'is_external'",
|
360
|
+
"'N' as 'enable_schema_evolution'",
|
361
|
+
"'ROLE' as 'owner_role_type'",
|
362
|
+
"'N' as 'is_event'",
|
363
|
+
"null as 'budget'",
|
364
|
+
"'N' as 'is_hybrid'",
|
365
|
+
"'N' as 'is_iceberg'",
|
366
|
+
"'N' as 'is_dynamic'",
|
367
|
+
"'N' as 'is_immutable'",
|
368
|
+
]
|
369
|
+
)
|
370
|
+
|
371
|
+
columns_clause = ", ".join(columns)
|
372
|
+
|
373
|
+
where = ["not (table_schema == '_fs_information_schema')"] # exclude fakesnow's internal schemas
|
374
|
+
if show == "TABLES":
|
375
|
+
where.append("table_type = 'BASE TABLE'")
|
376
|
+
if catalog:
|
377
|
+
where.append(f"table_catalog = '{catalog}'")
|
378
|
+
if schema:
|
379
|
+
where.append(f"table_schema = '{schema}'")
|
380
|
+
if (like := expression.args.get("like")) and isinstance(like, exp.Expression):
|
381
|
+
where.append(f"table_name ilike {like.sql()}")
|
382
|
+
where_clause = " AND ".join(where)
|
383
|
+
|
384
|
+
limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
|
385
|
+
|
386
|
+
query = f"""
|
387
|
+
SELECT {columns_clause}
|
388
|
+
from information_schema.tables
|
389
|
+
where {where_clause}
|
390
|
+
{limit}
|
391
|
+
"""
|
392
|
+
|
393
|
+
return sqlglot.parse_one(query, read="duckdb")
|
394
|
+
|
395
|
+
|
396
|
+
# returns zero rows
|
397
|
+
SQL_SHOW_PROCEDURES = """
|
398
|
+
SELECT
|
399
|
+
'2012-08-01 07:00:00 UTC'::timestamptz as 'created_on',
|
400
|
+
'SYSTEM$CLASSIFY' as 'name',
|
401
|
+
'' as 'schema_name',
|
402
|
+
'Y' as 'is_builtin',
|
403
|
+
'N' as 'is_aggregate',
|
404
|
+
'N' as 'is_ansi',
|
405
|
+
2 as 'min_num_arguments',
|
406
|
+
2 as 'max_num_arguments',
|
407
|
+
'SYSTEM$CLASSIFY(VARCHAR, OBJECT) RETURN OBJECT' as 'arguments',
|
408
|
+
'classify stored proc' as 'description',
|
409
|
+
'' as 'catalog_name',
|
410
|
+
'N' as 'is_table_function',
|
411
|
+
'N' as 'valid_for_clustering',
|
412
|
+
NULL as 'is_secure',
|
413
|
+
'' as 'secrets',
|
414
|
+
'' as 'external_access_integrations',
|
415
|
+
WHERE 0 = 1;
|
416
|
+
"""
|
417
|
+
|
418
|
+
|
419
|
+
def show_procedures(expression: exp.Expression) -> exp.Expression:
|
420
|
+
"""Transform SHOW PROCEDURES.
|
421
|
+
|
422
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-procedures
|
423
|
+
"""
|
424
|
+
if (
|
425
|
+
isinstance(expression, exp.Show)
|
426
|
+
and isinstance(expression.this, str)
|
427
|
+
and expression.this.upper() == "PROCEDURES"
|
428
|
+
):
|
429
|
+
return sqlglot.parse_one(SQL_SHOW_PROCEDURES, read="duckdb")
|
430
|
+
|
431
|
+
return expression
|
432
|
+
|
433
|
+
|
434
|
+
SQL_SHOW_SCHEMAS = """
|
435
|
+
select
|
436
|
+
to_timestamp(0)::timestamptz as 'created_on',
|
437
|
+
case
|
438
|
+
when schema_name = '_fs_information_schema' then 'information_schema'
|
439
|
+
else schema_name
|
440
|
+
end as 'name',
|
441
|
+
NULL as 'kind',
|
442
|
+
catalog_name as 'database_name',
|
443
|
+
NULL as 'schema_name'
|
444
|
+
from information_schema.schemata
|
445
|
+
where not catalog_name in ('memory', 'system', 'temp', '_fs_global')
|
446
|
+
and not schema_name in ('main', 'pg_catalog')
|
447
|
+
"""
|
448
|
+
|
449
|
+
|
450
|
+
def show_schemas(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
|
451
|
+
"""Transform SHOW SCHEMAS to a query against the information_schema.schemata table.
|
452
|
+
|
453
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-schemas
|
454
|
+
"""
|
455
|
+
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "SCHEMAS":
|
456
|
+
if (ident := expression.find(exp.Identifier)) and isinstance(ident.this, str):
|
457
|
+
database = ident.this
|
458
|
+
else:
|
459
|
+
database = current_database
|
460
|
+
|
461
|
+
return sqlglot.parse_one(
|
462
|
+
f"{SQL_SHOW_SCHEMAS} and catalog_name = '{database}'" if database else SQL_SHOW_SCHEMAS, read="duckdb"
|
463
|
+
)
|
464
|
+
|
465
|
+
return expression
|
466
|
+
|
467
|
+
|
468
|
+
def show_users(expression: exp.Expression) -> exp.Expression:
|
469
|
+
"""Transform SHOW USERS to a query against the global database's information_schema._fs_users table.
|
470
|
+
|
471
|
+
https://docs.snowflake.com/en/sql-reference/sql/show-users
|
472
|
+
"""
|
473
|
+
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "USERS":
|
474
|
+
return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_users_ext", read="duckdb")
|
475
|
+
|
476
|
+
return expression
|
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.33
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -213,7 +213,7 @@ License-File: LICENSE
|
|
213
213
|
Requires-Dist: duckdb~=1.2.0
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=26.
|
216
|
+
Requires-Dist: sqlglot~=26.12.1
|
217
217
|
Provides-Extra: dev
|
218
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
219
|
Requires-Dist: dirty-equals; extra == "dev"
|
@@ -233,6 +233,7 @@ Requires-Dist: jupysql; extra == "notebook"
|
|
233
233
|
Provides-Extra: server
|
234
234
|
Requires-Dist: starlette; extra == "server"
|
235
235
|
Requires-Dist: uvicorn; extra == "server"
|
236
|
+
Dynamic: license-file
|
236
237
|
|
237
238
|
# fakesnow ❄️
|
238
239
|
|
@@ -3,24 +3,26 @@ fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
|
|
3
3
|
fakesnow/arrow.py,sha256=XjTpFyLrD9jULWOtPgpr0RyNMmO6a5yi82y6ivi2CCI,4884
|
4
4
|
fakesnow/checks.py,sha256=be-xo0oMoAUVhlMDCu1_Rkoh_L8p_p8qo9P6reJSHIQ,2874
|
5
5
|
fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
|
6
|
-
fakesnow/conn.py,sha256=
|
7
|
-
fakesnow/cursor.py,sha256=
|
6
|
+
fakesnow/conn.py,sha256=2WClMmUgfQkQA2hFQjfMP3R-85TbTbZh_8Y1tCdcerA,6053
|
7
|
+
fakesnow/cursor.py,sha256=Nvr8TQmmTFs6i0sJwfgCocrEF9td0D0SdDG41quIudI,21621
|
8
8
|
fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
|
9
9
|
fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
|
10
10
|
fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
|
11
11
|
fakesnow/info_schema.py,sha256=xDhGy07fpc8bcy_VTfh54UzwNIaB4ZhGmjgJeoiZ0hQ,8744
|
12
|
-
fakesnow/instance.py,sha256=
|
12
|
+
fakesnow/instance.py,sha256=VsFbhVfy6EAJdEKykgavJwkMtrig01NehorptT51Jh8,2020
|
13
13
|
fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
|
14
14
|
fakesnow/pandas_tools.py,sha256=wI203UQHC8JvDzxE_VjE1NeV4rThek2P-u52oTg2foo,3481
|
15
15
|
fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
|
16
16
|
fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
|
17
|
-
fakesnow/server.py,sha256
|
17
|
+
fakesnow/server.py,sha256=oLnWJgcxwhPw4sNebJF3B9uxk28A-a-AI8Pyl_lz2_E,5986
|
18
18
|
fakesnow/variables.py,sha256=WXyPnkeNwD08gy52yF66CVe2twiYC50tztNfgXV4q1k,3032
|
19
|
-
fakesnow/transforms/__init__.py,sha256=
|
19
|
+
fakesnow/transforms/__init__.py,sha256=xFrpw28DaHvMt6LGaRMsPqTo8PWogg10JgEu3oa6jdA,49515
|
20
20
|
fakesnow/transforms/merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
|
21
|
-
fakesnow
|
22
|
-
fakesnow-0.9.
|
23
|
-
|
24
|
-
fakesnow-0.9.
|
25
|
-
fakesnow-0.9.
|
26
|
-
fakesnow-0.9.
|
21
|
+
fakesnow/transforms/show.py,sha256=2qfK3Fi0RLylqTnkwSVgv5JIorXYb1y0fnf5oErRZ2o,16839
|
22
|
+
fakesnow-0.9.33.dist-info/licenses/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
|
23
|
+
tools/decode.py,sha256=kC5kUvLQxdCkMRsnH6BqCajlKxKeN77w6rwCKsY6gqU,1781
|
24
|
+
fakesnow-0.9.33.dist-info/METADATA,sha256=xOQPPYwFTgDETx_lx1bO2nBpD7wbqWwN1rn2U5MJHFw,18128
|
25
|
+
fakesnow-0.9.33.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
26
|
+
fakesnow-0.9.33.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
|
27
|
+
fakesnow-0.9.33.dist-info/top_level.txt,sha256=Yos7YveA3f03xVYuURqnBsfMV2DePXfu_yGcsj3pPzI,30
|
28
|
+
fakesnow-0.9.33.dist-info/RECORD,,
|
tools/decode.py
ADDED
@@ -0,0 +1,63 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
import base64
|
3
|
+
import sys
|
4
|
+
|
5
|
+
import pyarrow as pa
|
6
|
+
|
7
|
+
|
8
|
+
def dump_field_metadata(field: pa.Field, index: int) -> None:
|
9
|
+
"""Dump metadata for a single field."""
|
10
|
+
print(f"Field {index}: {field.name}")
|
11
|
+
print(f" Type: {field.type}")
|
12
|
+
print(f" Nullable: {field.nullable}")
|
13
|
+
print(" Metadata:")
|
14
|
+
assert field.metadata
|
15
|
+
for key, value in field.metadata.items():
|
16
|
+
try:
|
17
|
+
print(f" {key.decode('utf-8')}: {value.decode('utf-8')}")
|
18
|
+
except UnicodeDecodeError: # noqa: PERF203
|
19
|
+
print(f" {key.decode('utf-8')}: <binary data>")
|
20
|
+
print()
|
21
|
+
|
22
|
+
|
23
|
+
def main() -> None:
|
24
|
+
if len(sys.argv) > 1:
|
25
|
+
print("Usage: python dump_rowset_metadata.py < base64_encoded_file")
|
26
|
+
print(" or: cat base64_encoded_file | python dump_rowset_metadata.py")
|
27
|
+
print()
|
28
|
+
print("Dump pyarrow metadata for a base64-encoded rowset.")
|
29
|
+
sys.exit(1)
|
30
|
+
|
31
|
+
# Read base64 input from stdin
|
32
|
+
rowset_b64 = sys.stdin.read().strip()
|
33
|
+
|
34
|
+
try:
|
35
|
+
# Decode base64
|
36
|
+
data = base64.b64decode(rowset_b64)
|
37
|
+
|
38
|
+
# Parse with PyArrow
|
39
|
+
reader = pa.ipc.open_stream(data)
|
40
|
+
|
41
|
+
except Exception as e:
|
42
|
+
full_class_name = f"{e.__module__}.{e.__class__.__name__}"
|
43
|
+
print(f"Error processing rowset: {full_class_name} {e}")
|
44
|
+
sys.exit(1)
|
45
|
+
|
46
|
+
# Get the first batch
|
47
|
+
batch = next(iter(reader))
|
48
|
+
|
49
|
+
print(f"Total fields: {batch.num_columns}")
|
50
|
+
print("=" * 50)
|
51
|
+
|
52
|
+
# Dump metadata for each field
|
53
|
+
for i, field in enumerate(batch.schema):
|
54
|
+
dump_field_metadata(field, i)
|
55
|
+
|
56
|
+
# Also print a sample of the array data
|
57
|
+
print(f" Batch data: {batch[i]}")
|
58
|
+
print(f" Batch data type: {type(batch[i])}")
|
59
|
+
print("=" * 50)
|
60
|
+
|
61
|
+
|
62
|
+
if __name__ == "__main__":
|
63
|
+
main()
|
File without changes
|
File without changes
|