fakesnow 0.9.29__py3-none-any.whl → 0.9.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fakesnow/arrow.py +4 -4
- fakesnow/checks.py +5 -0
- fakesnow/conn.py +1 -1
- fakesnow/cursor.py +15 -2
- fakesnow/info_schema.py +60 -18
- fakesnow/instance.py +0 -44
- fakesnow/server.py +51 -7
- fakesnow/{transforms.py → transforms/__init__.py} +232 -73
- {fakesnow-0.9.29.dist-info → fakesnow-0.9.31.dist-info}/METADATA +4 -4
- fakesnow-0.9.31.dist-info/RECORD +26 -0
- {fakesnow-0.9.29.dist-info → fakesnow-0.9.31.dist-info}/WHEEL +1 -1
- fakesnow-0.9.29.dist-info/RECORD +0 -26
- /fakesnow/{transforms_merge.py → transforms/merge.py} +0 -0
- {fakesnow-0.9.29.dist-info → fakesnow-0.9.31.dist-info}/LICENSE +0 -0
- {fakesnow-0.9.29.dist-info → fakesnow-0.9.31.dist-info}/entry_points.txt +0 -0
- {fakesnow-0.9.29.dist-info → fakesnow-0.9.31.dist-info}/top_level.txt +0 -0
fakesnow/arrow.py
CHANGED
@@ -62,7 +62,7 @@ def to_ipc(table: pa.Table) -> pa.Buffer:
|
|
62
62
|
|
63
63
|
|
64
64
|
def to_sf(table: pa.Table, rowtype: list[ColumnInfo]) -> pa.Table:
|
65
|
-
def to_sf_col(col: pa.
|
65
|
+
def to_sf_col(col: pa.ChunkedArray) -> pa.Array | pa.ChunkedArray:
|
66
66
|
if pa.types.is_timestamp(col.type):
|
67
67
|
return timestamp_to_sf_struct(col)
|
68
68
|
elif pa.types.is_time(col.type):
|
@@ -83,7 +83,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
|
|
83
83
|
|
84
84
|
# Round to seconds, ie: strip subseconds
|
85
85
|
tsa_without_us = pc.floor_temporal(ts, unit="second") # type: ignore https://github.com/zen-xu/pyarrow-stubs/issues/45
|
86
|
-
epoch = pc.divide(tsa_without_us.cast(pa.int64()), 1_000_000)
|
86
|
+
epoch = pc.divide(tsa_without_us.cast(pa.int64()), 1_000_000)
|
87
87
|
|
88
88
|
# Calculate fractional part as nanoseconds
|
89
89
|
fraction = pc.multiply(pc.subsecond(ts), 1_000_000_000).cast(pa.int32()) # type: ignore
|
@@ -93,7 +93,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
|
|
93
93
|
timezone = pa.array([1440] * len(ts), type=pa.int32())
|
94
94
|
|
95
95
|
return pa.StructArray.from_arrays(
|
96
|
-
arrays=[epoch, fraction, timezone],
|
96
|
+
arrays=[epoch, fraction, timezone],
|
97
97
|
fields=[
|
98
98
|
pa.field("epoch", nullable=False, type=pa.int64()),
|
99
99
|
pa.field("fraction", nullable=False, type=pa.int32()),
|
@@ -102,7 +102,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
|
|
102
102
|
)
|
103
103
|
else:
|
104
104
|
return pa.StructArray.from_arrays(
|
105
|
-
arrays=[epoch, fraction],
|
105
|
+
arrays=[epoch, fraction],
|
106
106
|
fields=[
|
107
107
|
pa.field("epoch", nullable=False, type=pa.int64()),
|
108
108
|
pa.field("fraction", nullable=False, type=pa.int32()),
|
fakesnow/checks.py
CHANGED
@@ -63,6 +63,11 @@ def is_unqualified_table_expression(expression: exp.Expression) -> tuple[bool, b
|
|
63
63
|
else:
|
64
64
|
raise AssertionError(f"Unexpected parent kind: {parent_kind.name}")
|
65
65
|
|
66
|
+
elif node.parent.key == "show":
|
67
|
+
# don't require a database or schema for SHOW
|
68
|
+
# TODO: make this more nuanced
|
69
|
+
no_database = False
|
70
|
+
no_schema = False
|
66
71
|
else:
|
67
72
|
no_database = not node.args.get("catalog")
|
68
73
|
no_schema = not node.args.get("db")
|
fakesnow/conn.py
CHANGED
@@ -64,7 +64,7 @@ class FakeSnowflakeConnection:
|
|
64
64
|
):
|
65
65
|
db_file = f"{self.db_path / self.database}.db" if self.db_path else ":memory:"
|
66
66
|
duck_conn.execute(f"ATTACH DATABASE '{db_file}' AS {self.database}")
|
67
|
-
duck_conn.execute(info_schema.
|
67
|
+
duck_conn.execute(info_schema.per_db_creation_sql(self.database))
|
68
68
|
duck_conn.execute(macros.creation_sql(self.database))
|
69
69
|
|
70
70
|
# create schema if needed
|
fakesnow/cursor.py
CHANGED
@@ -14,6 +14,7 @@ import pyarrow # needed by fetch_arrow_table()
|
|
14
14
|
import snowflake.connector.converter
|
15
15
|
import snowflake.connector.errors
|
16
16
|
import sqlglot
|
17
|
+
import sqlglot.errors
|
17
18
|
from duckdb import DuckDBPyConnection
|
18
19
|
from snowflake.connector.cursor import ResultMetadata
|
19
20
|
from snowflake.connector.result_batch import ResultBatch
|
@@ -155,6 +156,11 @@ class FakeSnowflakeCursor:
|
|
155
156
|
except snowflake.connector.errors.ProgrammingError as e:
|
156
157
|
self._sqlstate = e.sqlstate
|
157
158
|
raise e
|
159
|
+
except sqlglot.errors.ParseError as e:
|
160
|
+
self._sqlstate = "42000"
|
161
|
+
# strip highlight for better readability, TODO: show pointer to start of error
|
162
|
+
msg = str(e).replace("\x1b[4m", "").replace("\x1b[0m", "")
|
163
|
+
raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=1003, sqlstate="42000") from None
|
158
164
|
|
159
165
|
def check_db_and_schema(self, expression: exp.Expression) -> None:
|
160
166
|
no_database, no_schema = checks.is_unqualified_table_expression(expression)
|
@@ -182,7 +188,7 @@ class FakeSnowflakeCursor:
|
|
182
188
|
.transform(transforms.create_database, db_path=self._conn.db_path)
|
183
189
|
.transform(transforms.extract_comment_on_table)
|
184
190
|
.transform(transforms.extract_comment_on_columns)
|
185
|
-
.transform(transforms.
|
191
|
+
.transform(transforms.information_schema_fs_columns)
|
186
192
|
.transform(transforms.information_schema_databases, current_schema=self._conn.schema)
|
187
193
|
.transform(transforms.information_schema_fs_tables)
|
188
194
|
.transform(transforms.information_schema_fs_views)
|
@@ -222,6 +228,9 @@ class FakeSnowflakeCursor:
|
|
222
228
|
.transform(transforms.dateadd_date_cast)
|
223
229
|
.transform(transforms.dateadd_string_literal_timestamp_cast)
|
224
230
|
.transform(transforms.datediff_string_literal_timestamp_cast)
|
231
|
+
.transform(transforms.show_databases)
|
232
|
+
.transform(transforms.show_functions)
|
233
|
+
.transform(transforms.show_procedures)
|
225
234
|
.transform(lambda e: transforms.show_schemas(e, self._conn.database))
|
226
235
|
.transform(lambda e: transforms.show_objects_tables(e, self._conn.database))
|
227
236
|
# TODO collapse into a single show_keys function
|
@@ -276,20 +285,24 @@ class FakeSnowflakeCursor:
|
|
276
285
|
raise e
|
277
286
|
except duckdb.ConnectionException as e:
|
278
287
|
raise snowflake.connector.errors.DatabaseError(msg=e.args[0], errno=250002, sqlstate="08003") from None
|
288
|
+
except duckdb.ParserException as e:
|
289
|
+
raise snowflake.connector.errors.ProgrammingError(msg=e.args[0], errno=1003, sqlstate="42000") from None
|
279
290
|
|
280
291
|
affected_count = None
|
281
292
|
|
282
293
|
if set_database := transformed.args.get("set_database"):
|
283
294
|
self._conn.database = set_database
|
284
295
|
self._conn.database_set = True
|
296
|
+
result_sql = SQL_SUCCESS
|
285
297
|
|
286
298
|
elif set_schema := transformed.args.get("set_schema"):
|
287
299
|
self._conn._schema = set_schema # noqa: SLF001
|
288
300
|
self._conn.schema_set = True
|
301
|
+
result_sql = SQL_SUCCESS
|
289
302
|
|
290
303
|
elif create_db_name := transformed.args.get("create_db_name"):
|
291
304
|
# we created a new database, so create the info schema extensions
|
292
|
-
self._duck_conn.execute(info_schema.
|
305
|
+
self._duck_conn.execute(info_schema.per_db_creation_sql(create_db_name))
|
293
306
|
result_sql = SQL_CREATED_DATABASE.substitute(name=create_db_name)
|
294
307
|
|
295
308
|
elif cmd == "INSERT":
|
fakesnow/info_schema.py
CHANGED
@@ -4,11 +4,14 @@ from __future__ import annotations
|
|
4
4
|
|
5
5
|
from string import Template
|
6
6
|
|
7
|
-
|
7
|
+
SQL_CREATE_GLOBAL_FS_INFORMATION_SCHEMA = """
|
8
|
+
create schema if not exists _fs_global._fs_information_schema
|
9
|
+
"""
|
10
|
+
|
8
11
|
|
9
12
|
# use ext prefix in columns to disambiguate when joining with information_schema.tables
|
10
|
-
|
11
|
-
create table if not exists
|
13
|
+
SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_TABLES_EXT = """
|
14
|
+
create table if not exists _fs_global._fs_information_schema._fs_tables_ext (
|
12
15
|
ext_table_catalog varchar,
|
13
16
|
ext_table_schema varchar,
|
14
17
|
ext_table_name varchar,
|
@@ -18,8 +21,8 @@ create table if not exists {GLOBAL_DATABASE_NAME}.main._fs_tables_ext (
|
|
18
21
|
"""
|
19
22
|
|
20
23
|
|
21
|
-
|
22
|
-
create table if not exists
|
24
|
+
SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_COLUMNS_EXT = """
|
25
|
+
create table if not exists _fs_global._fs_information_schema._fs_columns_ext (
|
23
26
|
ext_table_catalog varchar,
|
24
27
|
ext_table_schema varchar,
|
25
28
|
ext_table_name varchar,
|
@@ -30,18 +33,56 @@ create table if not exists {GLOBAL_DATABASE_NAME}.main._fs_columns_ext (
|
|
30
33
|
)
|
31
34
|
"""
|
32
35
|
|
36
|
+
# replicates the output structure of https://docs.snowflake.com/en/sql-reference/sql/show-users
|
37
|
+
SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_USERS_TABLE_EXT = """
|
38
|
+
create table if not exists _fs_global._fs_information_schema._fs_users_ext (
|
39
|
+
name varchar,
|
40
|
+
created_on TIMESTAMPTZ,
|
41
|
+
login_name varchar,
|
42
|
+
display_name varchar,
|
43
|
+
first_name varchar,
|
44
|
+
last_name varchar,
|
45
|
+
email varchar,
|
46
|
+
mins_to_unlock varchar,
|
47
|
+
days_to_expiry varchar,
|
48
|
+
comment varchar,
|
49
|
+
disabled varchar,
|
50
|
+
must_change_password varchar,
|
51
|
+
snowflake_lock varchar,
|
52
|
+
default_warehouse varchar,
|
53
|
+
default_namespace varchar,
|
54
|
+
default_role varchar,
|
55
|
+
default_secondary_roles varchar,
|
56
|
+
ext_authn_duo varchar,
|
57
|
+
ext_authn_uid varchar,
|
58
|
+
mins_to_bypass_mfa varchar,
|
59
|
+
owner varchar,
|
60
|
+
last_success_login TIMESTAMPTZ,
|
61
|
+
expires_at_time TIMESTAMPTZ,
|
62
|
+
locked_until_time TIMESTAMPTZ,
|
63
|
+
has_password varchar,
|
64
|
+
has_rsa_public_key varchar,
|
65
|
+
)
|
66
|
+
"""
|
67
|
+
|
68
|
+
|
33
69
|
SQL_CREATE_FS_INFORMATION_SCHEMA = Template(
|
34
70
|
"""
|
35
71
|
create schema if not exists ${catalog}._fs_information_schema
|
36
72
|
"""
|
37
73
|
)
|
38
74
|
|
75
|
+
SQL_CREATE_INFORMATION_SCHEMA_COLUMNS_VIEW = Template(
|
76
|
+
"""
|
77
|
+
create view if not exists ${catalog}._fs_information_schema._fs_columns AS
|
78
|
+
select * from _fs_global._fs_information_schema._fs_columns where table_catalog = '${catalog}'
|
79
|
+
"""
|
80
|
+
)
|
39
81
|
|
40
82
|
# only include fields applicable to snowflake (as mentioned by describe table information_schema.columns)
|
41
83
|
# snowflake integers are 38 digits, base 10, See https://docs.snowflake.com/en/sql-reference/data-types-numeric
|
42
|
-
|
43
|
-
|
44
|
-
create view if not exists ${catalog}._fs_information_schema._fs_columns_snowflake AS
|
84
|
+
SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_COLUMNS_VIEW = """
|
85
|
+
create view if not exists _fs_global._fs_information_schema._fs_columns AS
|
45
86
|
select
|
46
87
|
columns.table_catalog AS table_catalog,
|
47
88
|
columns.table_schema AS table_schema,
|
@@ -71,7 +112,7 @@ collation_name, is_identity, identity_generation, identity_cycle,
|
|
71
112
|
null::VARCHAR as identity_start,
|
72
113
|
null::VARCHAR as identity_increment,
|
73
114
|
from system.information_schema.columns columns
|
74
|
-
left join _fs_global.
|
115
|
+
left join _fs_global._fs_information_schema._fs_columns_ext ext
|
75
116
|
on ext_table_catalog = columns.table_catalog
|
76
117
|
AND ext_table_schema = columns.table_schema
|
77
118
|
AND ext_table_name = columns.table_name
|
@@ -81,10 +122,8 @@ LEFT JOIN duckdb_columns ddb_columns
|
|
81
122
|
AND ddb_columns.schema_name = columns.table_schema
|
82
123
|
AND ddb_columns.table_name = columns.table_name
|
83
124
|
AND ddb_columns.column_name = columns.column_name
|
84
|
-
where
|
85
|
-
and schema_name != '_fs_information_schema'
|
125
|
+
where schema_name != '_fs_information_schema'
|
86
126
|
"""
|
87
|
-
)
|
88
127
|
|
89
128
|
|
90
129
|
# replicates https://docs.snowflake.com/sql-reference/info-schema/databases
|
@@ -112,7 +151,7 @@ SQL_CREATE_INFORMATION_SCHEMA_TABLES_VIEW = Template(
|
|
112
151
|
create view if not exists ${catalog}._fs_information_schema._fs_tables AS
|
113
152
|
select *
|
114
153
|
from system.information_schema.tables tables
|
115
|
-
left join _fs_global.
|
154
|
+
left join _fs_global._fs_information_schema._fs_tables_ext on
|
116
155
|
tables.table_catalog = _fs_tables_ext.ext_table_catalog AND
|
117
156
|
tables.table_schema = _fs_tables_ext.ext_table_schema AND
|
118
157
|
tables.table_name = _fs_tables_ext.ext_table_name
|
@@ -147,7 +186,7 @@ where database_name = '${catalog}'
|
|
147
186
|
)
|
148
187
|
|
149
188
|
|
150
|
-
def
|
189
|
+
def per_db_creation_sql(catalog: str) -> str:
|
151
190
|
return f"""
|
152
191
|
{SQL_CREATE_FS_INFORMATION_SCHEMA.substitute(catalog=catalog)};
|
153
192
|
{SQL_CREATE_INFORMATION_SCHEMA_COLUMNS_VIEW.substitute(catalog=catalog)};
|
@@ -159,14 +198,17 @@ def creation_sql(catalog: str) -> str:
|
|
159
198
|
|
160
199
|
def fs_global_creation_sql(catalog: str) -> str:
|
161
200
|
return f"""
|
162
|
-
{
|
163
|
-
{
|
201
|
+
{SQL_CREATE_GLOBAL_FS_INFORMATION_SCHEMA};
|
202
|
+
{SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_TABLES_EXT};
|
203
|
+
{SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_COLUMNS_EXT};
|
204
|
+
{SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_COLUMNS_VIEW};
|
205
|
+
{SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_USERS_TABLE_EXT};
|
164
206
|
"""
|
165
207
|
|
166
208
|
|
167
209
|
def insert_table_comment_sql(catalog: str, schema: str, table: str, comment: str) -> str:
|
168
210
|
return f"""
|
169
|
-
INSERT INTO
|
211
|
+
INSERT INTO _fs_global._fs_information_schema._fs_tables_ext
|
170
212
|
values ('{catalog}', '{schema}', '{table}', '{comment}')
|
171
213
|
ON CONFLICT (ext_table_catalog, ext_table_schema, ext_table_name)
|
172
214
|
DO UPDATE SET comment = excluded.comment
|
@@ -180,7 +222,7 @@ def insert_text_lengths_sql(catalog: str, schema: str, table: str, text_lengths:
|
|
180
222
|
)
|
181
223
|
|
182
224
|
return f"""
|
183
|
-
INSERT INTO
|
225
|
+
INSERT INTO _fs_global._fs_information_schema._fs_columns_ext
|
184
226
|
values {values}
|
185
227
|
ON CONFLICT (ext_table_catalog, ext_table_schema, ext_table_name, ext_column_name)
|
186
228
|
DO UPDATE SET ext_character_maximum_length = excluded.ext_character_maximum_length,
|
fakesnow/instance.py
CHANGED
@@ -9,49 +9,6 @@ import fakesnow.fakes as fakes
|
|
9
9
|
from fakesnow import info_schema
|
10
10
|
|
11
11
|
GLOBAL_DATABASE_NAME = "_fs_global"
|
12
|
-
USERS_TABLE_FQ_NAME = f"{GLOBAL_DATABASE_NAME}._fs_users_ext"
|
13
|
-
|
14
|
-
# replicates the output structure of https://docs.snowflake.com/en/sql-reference/sql/show-users
|
15
|
-
SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT = f"""
|
16
|
-
create table if not exists {USERS_TABLE_FQ_NAME} (
|
17
|
-
name varchar,
|
18
|
-
created_on TIMESTAMPTZ,
|
19
|
-
login_name varchar,
|
20
|
-
display_name varchar,
|
21
|
-
first_name varchar,
|
22
|
-
last_name varchar,
|
23
|
-
email varchar,
|
24
|
-
mins_to_unlock varchar,
|
25
|
-
days_to_expiry varchar,
|
26
|
-
comment varchar,
|
27
|
-
disabled varchar,
|
28
|
-
must_change_password varchar,
|
29
|
-
snowflake_lock varchar,
|
30
|
-
default_warehouse varchar,
|
31
|
-
default_namespace varchar,
|
32
|
-
default_role varchar,
|
33
|
-
default_secondary_roles varchar,
|
34
|
-
ext_authn_duo varchar,
|
35
|
-
ext_authn_uid varchar,
|
36
|
-
mins_to_bypass_mfa varchar,
|
37
|
-
owner varchar,
|
38
|
-
last_success_login TIMESTAMPTZ,
|
39
|
-
expires_at_time TIMESTAMPTZ,
|
40
|
-
locked_until_time TIMESTAMPTZ,
|
41
|
-
has_password varchar,
|
42
|
-
has_rsa_public_key varchar,
|
43
|
-
)
|
44
|
-
"""
|
45
|
-
|
46
|
-
|
47
|
-
def create_global_database(conn: duckdb.DuckDBPyConnection) -> None:
|
48
|
-
"""Create a "global" database for storing objects which span databases.
|
49
|
-
|
50
|
-
Including (but not limited to):
|
51
|
-
- Users
|
52
|
-
"""
|
53
|
-
conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
|
54
|
-
conn.execute(SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT)
|
55
12
|
|
56
13
|
|
57
14
|
class FakeSnow:
|
@@ -71,7 +28,6 @@ class FakeSnow:
|
|
71
28
|
|
72
29
|
# create a "global" database for storing objects which span databases.
|
73
30
|
self.duck_conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
|
74
|
-
self.duck_conn.execute(SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT)
|
75
31
|
# create the info schema extensions
|
76
32
|
self.duck_conn.execute(info_schema.fs_global_creation_sql(GLOBAL_DATABASE_NAME))
|
77
33
|
|
fakesnow/server.py
CHANGED
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import gzip
|
4
4
|
import json
|
5
|
+
import logging
|
5
6
|
import secrets
|
6
7
|
from base64 import b64encode
|
7
8
|
from dataclasses import dataclass
|
@@ -19,6 +20,11 @@ from fakesnow.fakes import FakeSnowflakeConnection
|
|
19
20
|
from fakesnow.instance import FakeSnow
|
20
21
|
from fakesnow.rowtype import describe_as_rowtype
|
21
22
|
|
23
|
+
logger = logging.getLogger("fakesnow.server")
|
24
|
+
# use same format as uvicorn
|
25
|
+
logger.handlers = logging.getLogger("uvicorn").handlers
|
26
|
+
logger.setLevel(logging.INFO)
|
27
|
+
|
22
28
|
shared_fs = FakeSnow()
|
23
29
|
sessions: dict[str, FakeSnowflakeConnection] = {}
|
24
30
|
|
@@ -46,13 +52,22 @@ async def login_request(request: Request) -> JSONResponse:
|
|
46
52
|
# share the in-memory database across connections
|
47
53
|
fs = shared_fs
|
48
54
|
token = secrets.token_urlsafe(32)
|
55
|
+
logger.info(f"Session login {database=} {schema=}")
|
49
56
|
sessions[token] = fs.connect(database, schema)
|
50
|
-
return JSONResponse(
|
57
|
+
return JSONResponse(
|
58
|
+
{
|
59
|
+
"data": {
|
60
|
+
"token": token,
|
61
|
+
"parameters": [{"name": "AUTOCOMMIT", "value": True}],
|
62
|
+
},
|
63
|
+
"success": True,
|
64
|
+
}
|
65
|
+
)
|
51
66
|
|
52
67
|
|
53
68
|
async def query_request(request: Request) -> JSONResponse:
|
54
69
|
try:
|
55
|
-
conn = to_conn(request)
|
70
|
+
conn = to_conn(to_token(request))
|
56
71
|
|
57
72
|
body = await request.body()
|
58
73
|
if request.headers.get("Content-Encoding") == "gzip":
|
@@ -65,6 +80,8 @@ async def query_request(request: Request) -> JSONResponse:
|
|
65
80
|
try:
|
66
81
|
# only a single sql statement is sent at a time by the python snowflake connector
|
67
82
|
cur = await run_in_threadpool(conn.cursor().execute, sql_text)
|
83
|
+
rowtype = describe_as_rowtype(cur._describe_last_sql()) # noqa: SLF001
|
84
|
+
|
68
85
|
except snowflake.connector.errors.ProgrammingError as e:
|
69
86
|
code = f"{e.errno:06d}"
|
70
87
|
return JSONResponse(
|
@@ -78,8 +95,13 @@ async def query_request(request: Request) -> JSONResponse:
|
|
78
95
|
"success": False,
|
79
96
|
}
|
80
97
|
)
|
81
|
-
|
82
|
-
|
98
|
+
except Exception as e:
|
99
|
+
# we have a bug or use of an unsupported feature
|
100
|
+
msg = f"Unhandled error during query {sql_text=}"
|
101
|
+
logger.error(msg, exc_info=e)
|
102
|
+
# my guess at mimicking a 500 error as per https://docs.snowflake.com/en/developer-guide/sql-api/reference
|
103
|
+
# and https://github.com/snowflakedb/gosnowflake/blob/8ed4c75ffd707dd712ad843f40189843ace683c4/restful.go#L318
|
104
|
+
raise ServerError(status_code=500, code="261000", message=msg) from None
|
83
105
|
|
84
106
|
if cur._arrow_table: # noqa: SLF001
|
85
107
|
batch_bytes = to_ipc(to_sf(cur._arrow_table, rowtype)) # noqa: SLF001
|
@@ -107,24 +129,46 @@ async def query_request(request: Request) -> JSONResponse:
|
|
107
129
|
)
|
108
130
|
|
109
131
|
|
110
|
-
def
|
132
|
+
def to_token(request: Request) -> str:
|
111
133
|
if not (auth := request.headers.get("Authorization")):
|
112
|
-
raise ServerError(status_code=401, code="
|
134
|
+
raise ServerError(status_code=401, code="390101", message="Authorization header not found in the request data.")
|
113
135
|
|
114
|
-
|
136
|
+
return auth[17:-1]
|
115
137
|
|
138
|
+
|
139
|
+
def to_conn(token: str) -> FakeSnowflakeConnection:
|
116
140
|
if not (conn := sessions.get(token)):
|
117
141
|
raise ServerError(status_code=401, code="390104", message="User must login again to access the service.")
|
118
142
|
|
119
143
|
return conn
|
120
144
|
|
121
145
|
|
146
|
+
async def session(request: Request) -> JSONResponse:
|
147
|
+
try:
|
148
|
+
token = to_token(request)
|
149
|
+
_ = to_conn(token)
|
150
|
+
|
151
|
+
if bool(request.query_params.get("delete")):
|
152
|
+
del sessions[token]
|
153
|
+
|
154
|
+
return JSONResponse(
|
155
|
+
{"data": None, "code": None, "message": None, "success": True},
|
156
|
+
)
|
157
|
+
|
158
|
+
except ServerError as e:
|
159
|
+
return JSONResponse(
|
160
|
+
{"data": None, "code": e.code, "message": e.message, "success": False, "headers": None},
|
161
|
+
status_code=e.status_code,
|
162
|
+
)
|
163
|
+
|
164
|
+
|
122
165
|
routes = [
|
123
166
|
Route(
|
124
167
|
"/session/v1/login-request",
|
125
168
|
login_request,
|
126
169
|
methods=["POST"],
|
127
170
|
),
|
171
|
+
Route("/session", session, methods=["POST"]),
|
128
172
|
Route(
|
129
173
|
"/queries/v1/query-request",
|
130
174
|
query_request,
|
@@ -7,10 +7,65 @@ from typing import ClassVar, Literal, cast
|
|
7
7
|
import sqlglot
|
8
8
|
from sqlglot import exp
|
9
9
|
|
10
|
-
from fakesnow import
|
11
|
-
from fakesnow.instance import USERS_TABLE_FQ_NAME
|
10
|
+
from fakesnow.transforms.merge import merge
|
12
11
|
from fakesnow.variables import Variables
|
13
12
|
|
13
|
+
__all__ = [
|
14
|
+
"alias_in_join",
|
15
|
+
"alter_table_strip_cluster_by",
|
16
|
+
"array_agg",
|
17
|
+
"array_agg_within_group",
|
18
|
+
"array_size",
|
19
|
+
"create_clone",
|
20
|
+
"create_database",
|
21
|
+
"create_user",
|
22
|
+
"dateadd_date_cast",
|
23
|
+
"dateadd_string_literal_timestamp_cast",
|
24
|
+
"datediff_string_literal_timestamp_cast",
|
25
|
+
"drop_schema_cascade",
|
26
|
+
"extract_comment_on_columns",
|
27
|
+
"extract_comment_on_table",
|
28
|
+
"extract_text_length",
|
29
|
+
"flatten",
|
30
|
+
"flatten_value_cast_as_varchar",
|
31
|
+
"float_to_double",
|
32
|
+
"identifier",
|
33
|
+
"indices_to_json_extract",
|
34
|
+
"information_schema_databases",
|
35
|
+
"information_schema_fs_tables",
|
36
|
+
"information_schema_fs_views",
|
37
|
+
"integer_precision",
|
38
|
+
"json_extract_cased_as_varchar",
|
39
|
+
"json_extract_cast_as_varchar",
|
40
|
+
"json_extract_precedence",
|
41
|
+
"merge",
|
42
|
+
"object_construct",
|
43
|
+
"random",
|
44
|
+
"regex_replace",
|
45
|
+
"regex_substr",
|
46
|
+
"sample",
|
47
|
+
"semi_structured_types",
|
48
|
+
"set_schema",
|
49
|
+
"sha256",
|
50
|
+
"show_keys",
|
51
|
+
"show_objects_tables",
|
52
|
+
"show_schemas",
|
53
|
+
"show_users",
|
54
|
+
"split",
|
55
|
+
"tag",
|
56
|
+
"timestamp_ntz",
|
57
|
+
"to_date",
|
58
|
+
"to_decimal",
|
59
|
+
"to_timestamp",
|
60
|
+
"to_timestamp_ntz",
|
61
|
+
"trim_cast_varchar",
|
62
|
+
"try_parse_json",
|
63
|
+
"try_to_decimal",
|
64
|
+
"update_variables",
|
65
|
+
"upper_case_unquoted_identifiers",
|
66
|
+
"values_columns",
|
67
|
+
]
|
68
|
+
|
14
69
|
SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.' as status")
|
15
70
|
|
16
71
|
|
@@ -166,7 +221,7 @@ SELECT
|
|
166
221
|
NULL::VARCHAR AS "comment",
|
167
222
|
NULL::VARCHAR AS "policy name",
|
168
223
|
NULL::JSON AS "privacy domain",
|
169
|
-
FROM _fs_information_schema.
|
224
|
+
FROM _fs_information_schema._fs_columns
|
170
225
|
WHERE table_catalog = '${catalog}' AND table_schema = '${schema}' AND table_name = '${table}'
|
171
226
|
ORDER BY ordinal_position
|
172
227
|
"""
|
@@ -195,7 +250,7 @@ FROM (DESCRIBE ${view})
|
|
195
250
|
def describe_table(
|
196
251
|
expression: exp.Expression, current_database: str | None = None, current_schema: str | None = None
|
197
252
|
) -> exp.Expression:
|
198
|
-
"""Redirect to the information_schema.
|
253
|
+
"""Redirect to the information_schema._fs_columns to match snowflake.
|
199
254
|
|
200
255
|
See https://docs.snowflake.com/en/sql-reference/sql/desc-table
|
201
256
|
"""
|
@@ -210,17 +265,10 @@ def describe_table(
|
|
210
265
|
catalog = table.catalog or current_database
|
211
266
|
schema = table.db or current_schema
|
212
267
|
|
213
|
-
|
214
|
-
|
215
|
-
#
|
216
|
-
return sqlglot.parse_one(
|
217
|
-
SQL_DESCRIBE_INFO_SCHEMA.substitute(view=f"system.information_schema.{table.name}"), read="duckdb"
|
218
|
-
)
|
219
|
-
elif table.name.upper() == "_FS_COLUMNS_SNOWFLAKE":
|
220
|
-
# information schema views don't exist in _fs_columns_snowflake
|
221
|
-
return sqlglot.parse_one(
|
222
|
-
SQL_DESCRIBE_INFO_SCHEMA.substitute(view="_fs_information_schema._FS_COLUMNS_SNOWFLAKE"), read="duckdb"
|
223
|
-
)
|
268
|
+
if schema and schema.upper() == "_FS_INFORMATION_SCHEMA":
|
269
|
+
# describing an information_schema view
|
270
|
+
# (schema already transformed from information_schema -> _fs_information_schema)
|
271
|
+
return sqlglot.parse_one(SQL_DESCRIBE_INFO_SCHEMA.substitute(view=f"{schema}.{table.name}"), read="duckdb")
|
224
272
|
|
225
273
|
return sqlglot.parse_one(
|
226
274
|
SQL_DESCRIBE_TABLE.substitute(catalog=catalog, schema=schema, table=table.name),
|
@@ -230,7 +278,7 @@ def describe_table(
|
|
230
278
|
return expression
|
231
279
|
|
232
280
|
|
233
|
-
def drop_schema_cascade(expression: exp.Expression) -> exp.Expression:
|
281
|
+
def drop_schema_cascade(expression: exp.Expression) -> exp.Expression: #
|
234
282
|
"""Drop schema cascade.
|
235
283
|
|
236
284
|
By default duckdb won't delete a schema if it contains tables, whereas snowflake will.
|
@@ -602,8 +650,8 @@ def indices_to_json_extract(expression: exp.Expression) -> exp.Expression:
|
|
602
650
|
return expression
|
603
651
|
|
604
652
|
|
605
|
-
def
|
606
|
-
"""Redirect to the
|
653
|
+
def information_schema_fs_columns(expression: exp.Expression) -> exp.Expression:
|
654
|
+
"""Redirect to the _FS_COLUMNS view which has metadata that matches snowflake.
|
607
655
|
|
608
656
|
Because duckdb doesn't store character_maximum_length or character_octet_length.
|
609
657
|
"""
|
@@ -615,7 +663,7 @@ def information_schema_fs_columns_snowflake(expression: exp.Expression) -> exp.E
|
|
615
663
|
and expression.name
|
616
664
|
and expression.name.upper() == "COLUMNS"
|
617
665
|
):
|
618
|
-
expression.set("this", exp.Identifier(this="
|
666
|
+
expression.set("this", exp.Identifier(this="_FS_COLUMNS", quoted=False))
|
619
667
|
expression.set("db", exp.Identifier(this="_FS_INFORMATION_SCHEMA", quoted=False))
|
620
668
|
|
621
669
|
return expression
|
@@ -672,15 +720,21 @@ def information_schema_fs_views(expression: exp.Expression) -> exp.Expression:
|
|
672
720
|
return expression
|
673
721
|
|
674
722
|
|
723
|
+
NUMBER_38_0 = [
|
724
|
+
exp.DataTypeParam(this=exp.Literal(this="38", is_string=False)),
|
725
|
+
exp.DataTypeParam(this=exp.Literal(this="0", is_string=False)),
|
726
|
+
]
|
727
|
+
|
728
|
+
|
675
729
|
def integer_precision(expression: exp.Expression) -> exp.Expression:
|
676
|
-
"""Convert integers to bigint.
|
730
|
+
"""Convert integers and number(38,0) to bigint.
|
677
731
|
|
678
|
-
So dataframes will return them with a dtype of int64.
|
732
|
+
So fetch_all will return int and dataframes will return them with a dtype of int64.
|
679
733
|
"""
|
680
|
-
|
681
734
|
if (
|
682
735
|
isinstance(expression, exp.DataType)
|
683
|
-
and
|
736
|
+
and expression.this == exp.DataType.Type.DECIMAL
|
737
|
+
and (not expression.expressions or expression.expressions == NUMBER_38_0)
|
684
738
|
) or expression.this in (exp.DataType.Type.INT, exp.DataType.Type.SMALLINT, exp.DataType.Type.TINYINT):
|
685
739
|
return exp.DataType(
|
686
740
|
this=exp.DataType.Type.BIGINT,
|
@@ -741,10 +795,6 @@ def json_extract_precedence(expression: exp.Expression) -> exp.Expression:
|
|
741
795
|
return expression
|
742
796
|
|
743
797
|
|
744
|
-
def merge(expression: exp.Expression) -> list[exp.Expression]:
|
745
|
-
return transforms_merge.merge(expression)
|
746
|
-
|
747
|
-
|
748
798
|
def random(expression: exp.Expression) -> exp.Expression:
|
749
799
|
"""Convert random() and random(seed).
|
750
800
|
|
@@ -792,29 +842,29 @@ def object_construct(expression: exp.Expression) -> exp.Expression:
|
|
792
842
|
See https://docs.snowflake.com/en/sql-reference/functions/object_construct
|
793
843
|
"""
|
794
844
|
|
795
|
-
if isinstance(expression, exp.Struct):
|
796
|
-
|
797
|
-
for e in expression.expressions:
|
798
|
-
if not (isinstance(e, exp.PropertyEQ)):
|
799
|
-
non_null_expressions.append(e)
|
800
|
-
continue
|
845
|
+
if not isinstance(expression, exp.Struct):
|
846
|
+
return expression
|
801
847
|
|
802
|
-
|
803
|
-
|
848
|
+
non_null_expressions = []
|
849
|
+
for e in expression.expressions:
|
850
|
+
if not (isinstance(e, exp.PropertyEQ)):
|
851
|
+
non_null_expressions.append(e)
|
852
|
+
continue
|
804
853
|
|
805
|
-
|
806
|
-
|
854
|
+
left = e.left
|
855
|
+
right = e.right
|
807
856
|
|
808
|
-
|
809
|
-
|
857
|
+
left_is_null = isinstance(left, exp.Null)
|
858
|
+
right_is_null = isinstance(right, exp.Null)
|
810
859
|
|
811
|
-
|
860
|
+
if left_is_null or right_is_null:
|
861
|
+
continue
|
812
862
|
|
813
|
-
|
814
|
-
new_struct.set("expressions", non_null_expressions)
|
815
|
-
return exp.Anonymous(this="TO_JSON", expressions=[new_struct])
|
863
|
+
non_null_expressions.append(e)
|
816
864
|
|
817
|
-
|
865
|
+
new_struct = expression.copy()
|
866
|
+
new_struct.set("expressions", non_null_expressions)
|
867
|
+
return exp.Anonymous(this="TO_JSON", expressions=[new_struct])
|
818
868
|
|
819
869
|
|
820
870
|
def regex_replace(expression: exp.Expression) -> exp.Expression:
|
@@ -979,17 +1029,10 @@ def show_objects_tables(expression: exp.Expression, current_database: str | None
|
|
979
1029
|
catalog = table.db or current_database
|
980
1030
|
schema = table.name
|
981
1031
|
else:
|
982
|
-
# all objects / tables
|
1032
|
+
# all objects / tables - will show everything in the "account"
|
983
1033
|
catalog = None
|
984
1034
|
schema = None
|
985
1035
|
|
986
|
-
tables_only = "table_type = 'BASE TABLE' and " if show == "TABLES" else ""
|
987
|
-
exclude_fakesnow_tables = "not (table_schema == '_fs_information_schema')"
|
988
|
-
# without a database will show everything in the "account"
|
989
|
-
table_catalog = f" and table_catalog = '{catalog}'" if catalog else ""
|
990
|
-
schema = f" and table_schema = '{schema}'" if schema else ""
|
991
|
-
limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
|
992
|
-
|
993
1036
|
columns = [
|
994
1037
|
"to_timestamp(0)::timestamptz as 'created_on'",
|
995
1038
|
"table_name as 'name'",
|
@@ -997,17 +1040,29 @@ def show_objects_tables(expression: exp.Expression, current_database: str | None
|
|
997
1040
|
"table_catalog as 'database_name'",
|
998
1041
|
"table_schema as 'schema_name'",
|
999
1042
|
]
|
1000
|
-
|
1001
|
-
terse = expression.args["terse"]
|
1002
|
-
if not terse:
|
1043
|
+
if not expression.args["terse"]:
|
1003
1044
|
columns.append('null as "comment"')
|
1045
|
+
columns_clause = ", ".join(columns)
|
1046
|
+
|
1047
|
+
where = ["not (table_schema == '_fs_information_schema')"] # exclude fakesnow's internal schemas
|
1048
|
+
if show == "TABLES":
|
1049
|
+
where.append("table_type = 'BASE TABLE'")
|
1050
|
+
if catalog:
|
1051
|
+
where.append(f"table_catalog = '{catalog}'")
|
1052
|
+
if schema:
|
1053
|
+
where.append(f"table_schema = '{schema}'")
|
1054
|
+
if (like := expression.args.get("like")) and isinstance(like, exp.Expression):
|
1055
|
+
where.append(f"table_name ilike {like.sql()}")
|
1056
|
+
where_clause = " AND ".join(where)
|
1004
1057
|
|
1005
|
-
|
1058
|
+
limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
|
1006
1059
|
|
1007
|
-
query =
|
1008
|
-
|
1009
|
-
|
1010
|
-
|
1060
|
+
query = f"""
|
1061
|
+
SELECT {columns_clause}
|
1062
|
+
from information_schema.tables
|
1063
|
+
where {where_clause}
|
1064
|
+
{limit}
|
1065
|
+
"""
|
1011
1066
|
|
1012
1067
|
return sqlglot.parse_one(query, read="duckdb")
|
1013
1068
|
|
@@ -1023,7 +1078,7 @@ select
|
|
1023
1078
|
catalog_name as 'database_name',
|
1024
1079
|
NULL as 'schema_name'
|
1025
1080
|
from information_schema.schemata
|
1026
|
-
where not catalog_name in ('memory', 'system', 'temp')
|
1081
|
+
where not catalog_name in ('memory', 'system', 'temp', '_fs_global')
|
1027
1082
|
and not schema_name in ('main', 'pg_catalog')
|
1028
1083
|
"""
|
1029
1084
|
|
@@ -1046,6 +1101,113 @@ def show_schemas(expression: exp.Expression, current_database: str | None = None
|
|
1046
1101
|
return expression
|
1047
1102
|
|
1048
1103
|
|
1104
|
+
SQL_SHOW_DATABASES = """
|
1105
|
+
SELECT
|
1106
|
+
to_timestamp(0)::timestamptz as 'created_on',
|
1107
|
+
database_name as 'name',
|
1108
|
+
'N' as 'is_default',
|
1109
|
+
'N' as 'is_current',
|
1110
|
+
'' as 'origin',
|
1111
|
+
'SYSADMIN' as 'owner',
|
1112
|
+
comment,
|
1113
|
+
'' as 'options',
|
1114
|
+
1 as 'retention_time',
|
1115
|
+
'STANDARD' as 'kind',
|
1116
|
+
NULL as 'budget',
|
1117
|
+
'ROLE' as 'owner_role_type',
|
1118
|
+
NULL as 'object_visibility'
|
1119
|
+
FROM duckdb_databases
|
1120
|
+
WHERE database_name NOT IN ('memory', '_fs_global')
|
1121
|
+
"""
|
1122
|
+
|
1123
|
+
|
1124
|
+
def show_databases(expression: exp.Expression) -> exp.Expression:
|
1125
|
+
"""Transform SHOW DATABASES to a query against the information_schema.schemata table.
|
1126
|
+
|
1127
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-databases
|
1128
|
+
"""
|
1129
|
+
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "DATABASES":
|
1130
|
+
return sqlglot.parse_one(SQL_SHOW_DATABASES, read="duckdb")
|
1131
|
+
|
1132
|
+
return expression
|
1133
|
+
|
1134
|
+
|
1135
|
+
# returns zero rows
|
1136
|
+
SQL_SHOW_FUNCTIONS = """
|
1137
|
+
SELECT
|
1138
|
+
'1970-01-01 00:00:00 UTC'::timestamptz as created_on,
|
1139
|
+
'SYSTIMESTAMP' as name,
|
1140
|
+
'' as schema_name,
|
1141
|
+
'Y' as is_builtin,
|
1142
|
+
'N' as is_aggregate,
|
1143
|
+
'N' as is_ansi,
|
1144
|
+
0 as min_num_arguments,
|
1145
|
+
0 as max_num_arguments,
|
1146
|
+
'SYSTIMESTAMP() RETURN TIMESTAMP_LTZ' as arguments,
|
1147
|
+
'Returns the current timestamp' as description,
|
1148
|
+
'' as catalog_name,
|
1149
|
+
'N' as is_table_function,
|
1150
|
+
'N' as valid_for_clustering,
|
1151
|
+
NULL as is_secure,
|
1152
|
+
'' as secrets,
|
1153
|
+
'' as external_access_integrations,
|
1154
|
+
'N' as is_external_function,
|
1155
|
+
'SQL' as language,
|
1156
|
+
'N' as is_memoizable,
|
1157
|
+
'N' as is_data_metric
|
1158
|
+
WHERE 0 = 1;
|
1159
|
+
"""
|
1160
|
+
|
1161
|
+
|
1162
|
+
def show_functions(expression: exp.Expression) -> exp.Expression:
|
1163
|
+
"""Transform SHOW FUNCTIONS.
|
1164
|
+
|
1165
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-functions
|
1166
|
+
"""
|
1167
|
+
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "FUNCTIONS":
|
1168
|
+
return sqlglot.parse_one(SQL_SHOW_FUNCTIONS, read="duckdb")
|
1169
|
+
|
1170
|
+
return expression
|
1171
|
+
|
1172
|
+
|
1173
|
+
# returns zero rows
|
1174
|
+
SQL_SHOW_PROCEDURES = """
|
1175
|
+
SELECT
|
1176
|
+
'2012-08-01 07:00:00 UTC'::timestamptz as 'created_on',
|
1177
|
+
'SYSTEM$CLASSIFY' as 'name',
|
1178
|
+
'' as 'schema_name',
|
1179
|
+
'Y' as 'is_builtin',
|
1180
|
+
'N' as 'is_aggregate',
|
1181
|
+
'N' as 'is_ansi',
|
1182
|
+
2 as 'min_num_arguments',
|
1183
|
+
2 as 'max_num_arguments',
|
1184
|
+
'SYSTEM$CLASSIFY(VARCHAR, OBJECT) RETURN OBJECT' as 'arguments',
|
1185
|
+
'classify stored proc' as 'description',
|
1186
|
+
'' as 'catalog_name',
|
1187
|
+
'N' as 'is_table_function',
|
1188
|
+
'N' as 'valid_for_clustering',
|
1189
|
+
NULL as 'is_secure',
|
1190
|
+
'' as 'secrets',
|
1191
|
+
'' as 'external_access_integrations',
|
1192
|
+
WHERE 0 = 1;
|
1193
|
+
"""
|
1194
|
+
|
1195
|
+
|
1196
|
+
def show_procedures(expression: exp.Expression) -> exp.Expression:
|
1197
|
+
"""Transform SHOW PROCEDURES.
|
1198
|
+
|
1199
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-procedures
|
1200
|
+
"""
|
1201
|
+
if (
|
1202
|
+
isinstance(expression, exp.Show)
|
1203
|
+
and isinstance(expression.this, str)
|
1204
|
+
and expression.this.upper() == "PROCEDURES"
|
1205
|
+
):
|
1206
|
+
return sqlglot.parse_one(SQL_SHOW_PROCEDURES, read="duckdb")
|
1207
|
+
|
1208
|
+
return expression
|
1209
|
+
|
1210
|
+
|
1049
1211
|
def split(expression: exp.Expression) -> exp.Expression:
|
1050
1212
|
"""
|
1051
1213
|
Convert output of duckdb str_split from varchar[] to JSON array to match Snowflake.
|
@@ -1145,8 +1307,6 @@ def _get_to_number_args(e: exp.ToNumber) -> tuple[exp.Expression | None, exp.Exp
|
|
1145
1307
|
# to_number('100', 'TM9', 10, 2)
|
1146
1308
|
if arg_scale:
|
1147
1309
|
_scale = arg_scale
|
1148
|
-
else:
|
1149
|
-
pass
|
1150
1310
|
else:
|
1151
1311
|
# to_number('100', 10, ...)
|
1152
1312
|
# arg_format is not a string, so it must be precision.
|
@@ -1156,12 +1316,10 @@ def _get_to_number_args(e: exp.ToNumber) -> tuple[exp.Expression | None, exp.Exp
|
|
1156
1316
|
# And arg_precision must be scale
|
1157
1317
|
if arg_precision:
|
1158
1318
|
_scale = arg_precision
|
1159
|
-
|
1160
|
-
|
1161
|
-
if
|
1162
|
-
|
1163
|
-
if arg_scale:
|
1164
|
-
_scale = arg_scale
|
1319
|
+
elif arg_precision:
|
1320
|
+
_precision = arg_precision
|
1321
|
+
if arg_scale:
|
1322
|
+
_scale = arg_scale
|
1165
1323
|
|
1166
1324
|
return _format, _precision, _scale
|
1167
1325
|
|
@@ -1392,7 +1550,7 @@ def show_users(expression: exp.Expression) -> exp.Expression:
|
|
1392
1550
|
https://docs.snowflake.com/en/sql-reference/sql/show-users
|
1393
1551
|
"""
|
1394
1552
|
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "USERS":
|
1395
|
-
return sqlglot.parse_one(
|
1553
|
+
return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_users_ext", read="duckdb")
|
1396
1554
|
|
1397
1555
|
return expression
|
1398
1556
|
|
@@ -1410,7 +1568,9 @@ def create_user(expression: exp.Expression) -> exp.Expression:
|
|
1410
1568
|
_, name, *ignored = sub_exp.split(" ")
|
1411
1569
|
if ignored:
|
1412
1570
|
raise NotImplementedError(f"`CREATE USER` with {ignored} not yet supported")
|
1413
|
-
return sqlglot.parse_one(
|
1571
|
+
return sqlglot.parse_one(
|
1572
|
+
f"INSERT INTO _fs_global._fs_information_schema._fs_users_ext (name) VALUES ('{name}')", read="duckdb"
|
1573
|
+
)
|
1414
1574
|
|
1415
1575
|
return expression
|
1416
1576
|
|
@@ -1480,8 +1640,7 @@ def show_keys(
|
|
1480
1640
|
AND table_name NOT LIKE '_fs_%'
|
1481
1641
|
"""
|
1482
1642
|
|
1483
|
-
scope_kind
|
1484
|
-
if scope_kind:
|
1643
|
+
if scope_kind := expression.args.get("scope_kind"):
|
1485
1644
|
table = expression.args["scope"]
|
1486
1645
|
|
1487
1646
|
if scope_kind == "SCHEMA":
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.31
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -213,17 +213,17 @@ License-File: LICENSE
|
|
213
213
|
Requires-Dist: duckdb~=1.2.0
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=26.
|
216
|
+
Requires-Dist: sqlglot~=26.10.1
|
217
217
|
Provides-Extra: dev
|
218
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
219
|
Requires-Dist: dirty-equals; extra == "dev"
|
220
220
|
Requires-Dist: pandas-stubs; extra == "dev"
|
221
221
|
Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
|
222
222
|
Requires-Dist: pre-commit~=4.0; extra == "dev"
|
223
|
-
Requires-Dist: pyarrow-stubs==
|
223
|
+
Requires-Dist: pyarrow-stubs==17.19; extra == "dev"
|
224
224
|
Requires-Dist: pytest~=8.0; extra == "dev"
|
225
225
|
Requires-Dist: pytest-asyncio; extra == "dev"
|
226
|
-
Requires-Dist: ruff~=0.
|
226
|
+
Requires-Dist: ruff~=0.11.0; extra == "dev"
|
227
227
|
Requires-Dist: twine~=6.0; extra == "dev"
|
228
228
|
Requires-Dist: snowflake-sqlalchemy~=1.7.0; extra == "dev"
|
229
229
|
Provides-Extra: notebook
|
@@ -0,0 +1,26 @@
|
|
1
|
+
fakesnow/__init__.py,sha256=qUfgucQYPdELrJaxczalhJgWAWQ6cfTCUAHx6nUqRaI,3528
|
2
|
+
fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
|
3
|
+
fakesnow/arrow.py,sha256=XjTpFyLrD9jULWOtPgpr0RyNMmO6a5yi82y6ivi2CCI,4884
|
4
|
+
fakesnow/checks.py,sha256=be-xo0oMoAUVhlMDCu1_Rkoh_L8p_p8qo9P6reJSHIQ,2874
|
5
|
+
fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
|
6
|
+
fakesnow/conn.py,sha256=HGhFKErKWvAfVEy3QSc0tfNmzGh_T7FtvRfWuDBy_CQ,5744
|
7
|
+
fakesnow/cursor.py,sha256=tlcQK_w4r6BZHs3BTNHUFGk2hZz-lSnOdIrNLaJ-rBw,21349
|
8
|
+
fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
|
9
|
+
fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
|
10
|
+
fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
|
11
|
+
fakesnow/info_schema.py,sha256=xDhGy07fpc8bcy_VTfh54UzwNIaB4ZhGmjgJeoiZ0hQ,8744
|
12
|
+
fakesnow/instance.py,sha256=vbg4XiAjpdglEqOM7X_HvCOnE-6Bf67nTYeBfGVUSNU,1889
|
13
|
+
fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
|
14
|
+
fakesnow/pandas_tools.py,sha256=wI203UQHC8JvDzxE_VjE1NeV4rThek2P-u52oTg2foo,3481
|
15
|
+
fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
|
16
|
+
fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
|
17
|
+
fakesnow/server.py,sha256=-jKyEVuD2TEr88jUSA1Lu86MAymel7LQAiNlytHqhTg,5934
|
18
|
+
fakesnow/variables.py,sha256=WXyPnkeNwD08gy52yF66CVe2twiYC50tztNfgXV4q1k,3032
|
19
|
+
fakesnow/transforms/__init__.py,sha256=a-WNNqIo6IhY9p_m0zfdre7eb6KDmz-dbyrNkhpsHQk,60506
|
20
|
+
fakesnow/transforms/merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
|
21
|
+
fakesnow-0.9.31.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
|
22
|
+
fakesnow-0.9.31.dist-info/METADATA,sha256=RXWi0hLbvLkUtzDw0SHtcu-N3-Ebd1BC5K7JfoPnZ38,18106
|
23
|
+
fakesnow-0.9.31.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
|
24
|
+
fakesnow-0.9.31.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
|
25
|
+
fakesnow-0.9.31.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
|
26
|
+
fakesnow-0.9.31.dist-info/RECORD,,
|
fakesnow-0.9.29.dist-info/RECORD
DELETED
@@ -1,26 +0,0 @@
|
|
1
|
-
fakesnow/__init__.py,sha256=qUfgucQYPdELrJaxczalhJgWAWQ6cfTCUAHx6nUqRaI,3528
|
2
|
-
fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
|
3
|
-
fakesnow/arrow.py,sha256=MwatkdZX5AFADzXvxhBFmcRJVxbW4D39VoqLyhpTbl0,5057
|
4
|
-
fakesnow/checks.py,sha256=N8sXldhS3u1gG32qvZ4VFlsKgavRKrQrxLiQU8am1lw,2691
|
5
|
-
fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
|
6
|
-
fakesnow/conn.py,sha256=da9ln_covsyKgdNdPXLzMTUBr72P0rRGadIDVt-kaeI,5737
|
7
|
-
fakesnow/cursor.py,sha256=1BP1rZ28JfIfJkIR_8yEFDq2FrUf93JFrrYLJoKJr14,20587
|
8
|
-
fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
|
9
|
-
fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
|
10
|
-
fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
|
11
|
-
fakesnow/info_schema.py,sha256=_4YWnpuOFuyACr9k4iYdf2vLN7GDMG8X_pEBlC-8OmM,7269
|
12
|
-
fakesnow/instance.py,sha256=7xHJv-5-KKAI3Qm7blcvkXgkGg7WYtXEm3nUS4jLyFs,3299
|
13
|
-
fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
|
14
|
-
fakesnow/pandas_tools.py,sha256=wI203UQHC8JvDzxE_VjE1NeV4rThek2P-u52oTg2foo,3481
|
15
|
-
fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
|
16
|
-
fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
|
17
|
-
fakesnow/server.py,sha256=VpM-ZjFS4JekLESEoQTndvXqnqz8bH4ZO8lq_66-c6s,4387
|
18
|
-
fakesnow/transforms.py,sha256=dAoFFRFkJG8kcQdBPnE5w2eed4AZkh4NV3ajx1vu3A8,56444
|
19
|
-
fakesnow/transforms_merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
|
20
|
-
fakesnow/variables.py,sha256=WXyPnkeNwD08gy52yF66CVe2twiYC50tztNfgXV4q1k,3032
|
21
|
-
fakesnow-0.9.29.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
|
22
|
-
fakesnow-0.9.29.dist-info/METADATA,sha256=sx4qqMsuxOaaDMKR5U8A4ZT0djn9wHSrOUjCqthUoWQ,18107
|
23
|
-
fakesnow-0.9.29.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
24
|
-
fakesnow-0.9.29.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
|
25
|
-
fakesnow-0.9.29.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
|
26
|
-
fakesnow-0.9.29.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|