fakesnow 0.9.4__tar.gz → 0.9.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fakesnow-0.9.4/fakesnow.egg-info → fakesnow-0.9.6}/PKG-INFO +10 -8
- {fakesnow-0.9.4 → fakesnow-0.9.6}/README.md +3 -1
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/__init__.py +4 -3
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/fakes.py +27 -15
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/info_schema.py +38 -20
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/transforms.py +141 -41
- {fakesnow-0.9.4 → fakesnow-0.9.6/fakesnow.egg-info}/PKG-INFO +10 -8
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow.egg-info/SOURCES.txt +1 -1
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow.egg-info/requires.txt +6 -6
- {fakesnow-0.9.4 → fakesnow-0.9.6}/pyproject.toml +40 -52
- {fakesnow-0.9.4 → fakesnow-0.9.6}/tests/test_fakes.py +189 -7
- fakesnow-0.9.6/tests/test_sqlalchemy.py +46 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/tests/test_transforms.py +13 -0
- fakesnow-0.9.4/MANIFEST.in +0 -1
- {fakesnow-0.9.4 → fakesnow-0.9.6}/LICENSE +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/__main__.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/checks.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/cli.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/expr.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/fixtures.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/global_database.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/macros.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow/py.typed +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow.egg-info/dependency_links.txt +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow.egg-info/entry_points.txt +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/fakesnow.egg-info/top_level.txt +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/setup.cfg +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/tests/test_checks.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/tests/test_cli.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/tests/test_expr.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/tests/test_info_schema.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/tests/test_patch.py +0 -0
- {fakesnow-0.9.4 → fakesnow-0.9.6}/tests/test_users.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.6
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -213,20 +213,20 @@ License-File: LICENSE
|
|
213
213
|
Requires-Dist: duckdb~=0.10.0
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=21.
|
216
|
+
Requires-Dist: sqlglot~=21.2.0
|
217
217
|
Provides-Extra: dev
|
218
|
-
Requires-Dist: black~=23.9; extra == "dev"
|
219
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
|
+
Requires-Dist: pandas-stubs; extra == "dev"
|
220
220
|
Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
|
221
221
|
Requires-Dist: pre-commit~=3.4; extra == "dev"
|
222
|
-
Requires-Dist: pytest~=
|
223
|
-
Requires-Dist: ruff~=0.
|
224
|
-
Requires-Dist: twine~=
|
222
|
+
Requires-Dist: pytest~=8.0; extra == "dev"
|
223
|
+
Requires-Dist: ruff~=0.3.2; extra == "dev"
|
224
|
+
Requires-Dist: twine~=5.0; extra == "dev"
|
225
|
+
Requires-Dist: snowflake-sqlalchemy~=1.5.0; extra == "dev"
|
225
226
|
Provides-Extra: notebook
|
226
227
|
Requires-Dist: duckdb-engine; extra == "notebook"
|
227
228
|
Requires-Dist: ipykernel; extra == "notebook"
|
228
229
|
Requires-Dist: jupysql; extra == "notebook"
|
229
|
-
Requires-Dist: snowflake-sqlalchemy; extra == "notebook"
|
230
230
|
|
231
231
|
# fakesnow ❄️
|
232
232
|
|
@@ -234,6 +234,8 @@ Requires-Dist: snowflake-sqlalchemy; extra == "notebook"
|
|
234
234
|
[](https://github.com/tekumara/fakesnow/actions/workflows/release.yml)
|
235
235
|
[](https://pypi.org/project/fakesnow/)
|
236
236
|
|
237
|
+
[](../../actions/workflows/ci.yml)
|
238
|
+
|
237
239
|
Fake [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/python-connector). Run and mock Snowflake DB locally.
|
238
240
|
|
239
241
|
## Install
|
@@ -356,7 +358,7 @@ For more detail see [tests/test_fakes.py](tests/test_fakes.py)
|
|
356
358
|
## Caveats
|
357
359
|
|
358
360
|
- The order of rows is non deterministic and may not match Snowflake unless ORDER BY is fully specified.
|
359
|
-
-
|
361
|
+
- A more liberal Snowflake SQL dialect than a real Snowflake instance is supported, ie: some queries might pass using fakesnow that a real Snowflake instance would reject.
|
360
362
|
|
361
363
|
## Contributing
|
362
364
|
|
@@ -4,6 +4,8 @@
|
|
4
4
|
[](https://github.com/tekumara/fakesnow/actions/workflows/release.yml)
|
5
5
|
[](https://pypi.org/project/fakesnow/)
|
6
6
|
|
7
|
+
[](../../actions/workflows/ci.yml)
|
8
|
+
|
7
9
|
Fake [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/python-connector). Run and mock Snowflake DB locally.
|
8
10
|
|
9
11
|
## Install
|
@@ -126,7 +128,7 @@ For more detail see [tests/test_fakes.py](tests/test_fakes.py)
|
|
126
128
|
## Caveats
|
127
129
|
|
128
130
|
- The order of rows is non deterministic and may not match Snowflake unless ORDER BY is fully specified.
|
129
|
-
-
|
131
|
+
- A more liberal Snowflake SQL dialect than a real Snowflake instance is supported, ie: some queries might pass using fakesnow that a real Snowflake instance would reject.
|
130
132
|
|
131
133
|
## Contributing
|
132
134
|
|
@@ -37,6 +37,7 @@ SQL_SUCCESS = "SELECT 'Statement executed successfully.' as 'status'"
|
|
37
37
|
SQL_CREATED_DATABASE = Template("SELECT 'Database ${name} successfully created.' as 'status'")
|
38
38
|
SQL_CREATED_SCHEMA = Template("SELECT 'Schema ${name} successfully created.' as 'status'")
|
39
39
|
SQL_CREATED_TABLE = Template("SELECT 'Table ${name} successfully created.' as 'status'")
|
40
|
+
SQL_CREATED_VIEW = Template("SELECT 'View ${name} successfully created.' as 'status'")
|
40
41
|
SQL_DROPPED = Template("SELECT '${name} successfully dropped.' as 'status'")
|
41
42
|
SQL_INSERTED_ROWS = Template("SELECT ${count} as 'number of rows inserted'")
|
42
43
|
SQL_UPDATED_ROWS = Template("SELECT ${count} as 'number of rows updated', 0 as 'number of multi-joined rows updated'")
|
@@ -177,6 +178,7 @@ class FakeSnowflakeCursor:
|
|
177
178
|
.transform(transforms.indices_to_json_extract)
|
178
179
|
.transform(transforms.json_extract_cast_as_varchar)
|
179
180
|
.transform(transforms.json_extract_cased_as_varchar)
|
181
|
+
.transform(transforms.json_extract_precedence)
|
180
182
|
.transform(transforms.flatten)
|
181
183
|
.transform(transforms.regex_replace)
|
182
184
|
.transform(transforms.regex_substr)
|
@@ -196,6 +198,10 @@ class FakeSnowflakeCursor:
|
|
196
198
|
.transform(transforms.identifier)
|
197
199
|
.transform(lambda e: transforms.show_schemas(e, self._conn.database))
|
198
200
|
.transform(lambda e: transforms.show_objects_tables(e, self._conn.database))
|
201
|
+
# TODO collapse into a single show_keys function
|
202
|
+
.transform(lambda e: transforms.show_keys(e, self._conn.database, kind="PRIMARY"))
|
203
|
+
.transform(lambda e: transforms.show_keys(e, self._conn.database, kind="UNIQUE"))
|
204
|
+
.transform(lambda e: transforms.show_keys(e, self._conn.database, kind="FOREIGN"))
|
199
205
|
.transform(transforms.show_users)
|
200
206
|
.transform(transforms.create_user)
|
201
207
|
)
|
@@ -230,12 +236,18 @@ class FakeSnowflakeCursor:
|
|
230
236
|
raise snowflake.connector.errors.DatabaseError(msg=e.args[0], errno=250002, sqlstate="08003") from None
|
231
237
|
|
232
238
|
affected_count = None
|
233
|
-
|
234
|
-
|
239
|
+
|
240
|
+
if (maybe_ident := expression.find(exp.Identifier, bfs=False)) and isinstance(maybe_ident.this, str):
|
241
|
+
ident = maybe_ident.this if maybe_ident.quoted else maybe_ident.this.upper()
|
242
|
+
else:
|
243
|
+
ident = None
|
244
|
+
|
245
|
+
if cmd == "USE DATABASE" and ident:
|
246
|
+
self._conn.database = ident
|
235
247
|
self._conn.database_set = True
|
236
248
|
|
237
|
-
elif cmd == "USE SCHEMA" and
|
238
|
-
self._conn.schema = ident
|
249
|
+
elif cmd == "USE SCHEMA" and ident:
|
250
|
+
self._conn.schema = ident
|
239
251
|
self._conn.schema_set = True
|
240
252
|
|
241
253
|
elif create_db_name := transformed.args.get("create_db_name"):
|
@@ -243,24 +255,24 @@ class FakeSnowflakeCursor:
|
|
243
255
|
self._duck_conn.execute(info_schema.creation_sql(create_db_name))
|
244
256
|
result_sql = SQL_CREATED_DATABASE.substitute(name=create_db_name)
|
245
257
|
|
246
|
-
elif cmd == "CREATE SCHEMA" and
|
247
|
-
|
248
|
-
|
258
|
+
elif cmd == "CREATE SCHEMA" and ident:
|
259
|
+
result_sql = SQL_CREATED_SCHEMA.substitute(name=ident)
|
260
|
+
|
261
|
+
elif cmd == "CREATE TABLE" and ident:
|
262
|
+
result_sql = SQL_CREATED_TABLE.substitute(name=ident)
|
249
263
|
|
250
|
-
elif cmd == "CREATE
|
251
|
-
|
252
|
-
result_sql = SQL_CREATED_TABLE.substitute(name=name)
|
264
|
+
elif cmd == "CREATE VIEW" and ident:
|
265
|
+
result_sql = SQL_CREATED_VIEW.substitute(name=ident)
|
253
266
|
|
254
|
-
elif cmd.startswith("DROP") and
|
255
|
-
|
256
|
-
result_sql = SQL_DROPPED.substitute(name=name)
|
267
|
+
elif cmd.startswith("DROP") and ident:
|
268
|
+
result_sql = SQL_DROPPED.substitute(name=ident)
|
257
269
|
|
258
270
|
# if dropping the current database/schema then reset conn metadata
|
259
|
-
if cmd == "DROP DATABASE" and
|
271
|
+
if cmd == "DROP DATABASE" and ident == self._conn.database:
|
260
272
|
self._conn.database = None
|
261
273
|
self._conn.schema = None
|
262
274
|
|
263
|
-
elif cmd == "DROP SCHEMA" and
|
275
|
+
elif cmd == "DROP SCHEMA" and ident == self._conn.schema:
|
264
276
|
self._conn.schema = None
|
265
277
|
|
266
278
|
elif cmd == "INSERT":
|
@@ -1,4 +1,5 @@
|
|
1
1
|
"""Info schema extension tables/views used for storing snowflake metadata not captured by duckdb."""
|
2
|
+
|
2
3
|
from __future__ import annotations
|
3
4
|
|
4
5
|
from string import Template
|
@@ -35,28 +36,45 @@ create table if not exists ${catalog}.information_schema._fs_columns_ext (
|
|
35
36
|
SQL_CREATE_INFORMATION_SCHEMA_COLUMNS_VIEW = Template(
|
36
37
|
"""
|
37
38
|
create view if not exists ${catalog}.information_schema._fs_columns_snowflake AS
|
38
|
-
select
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
39
|
+
select
|
40
|
+
columns.table_catalog AS table_catalog,
|
41
|
+
columns.table_schema AS table_schema,
|
42
|
+
columns.table_name AS table_name,
|
43
|
+
columns.column_name AS column_name,
|
44
|
+
columns.ordinal_position AS ordinal_position,
|
45
|
+
columns.column_default AS column_default,
|
46
|
+
columns.is_nullable AS is_nullable,
|
47
|
+
case when starts_with(columns.data_type, 'DECIMAL') or columns.data_type='BIGINT' then 'NUMBER'
|
48
|
+
when columns.data_type='VARCHAR' then 'TEXT'
|
49
|
+
when columns.data_type='DOUBLE' then 'FLOAT'
|
50
|
+
when columns.data_type='BLOB' then 'BINARY'
|
51
|
+
when columns.data_type='TIMESTAMP' then 'TIMESTAMP_NTZ'
|
52
|
+
when columns.data_type='TIMESTAMP WITH TIME ZONE' then 'TIMESTAMP_TZ'
|
53
|
+
when columns.data_type='JSON' then 'VARIANT'
|
54
|
+
else columns.data_type end as data_type,
|
47
55
|
ext_character_maximum_length as character_maximum_length, ext_character_octet_length as character_octet_length,
|
48
|
-
case when data_type='BIGINT' then 38
|
49
|
-
when data_type='DOUBLE' then NULL
|
50
|
-
else numeric_precision end as numeric_precision,
|
51
|
-
case when data_type='BIGINT' then 10
|
52
|
-
when data_type='DOUBLE' then NULL
|
53
|
-
else numeric_precision_radix end as numeric_precision_radix,
|
54
|
-
case when data_type='DOUBLE' then NULL else numeric_scale end as numeric_scale,
|
55
|
-
collation_name, is_identity, identity_generation, identity_cycle
|
56
|
-
|
56
|
+
case when columns.data_type='BIGINT' then 38
|
57
|
+
when columns.data_type='DOUBLE' then NULL
|
58
|
+
else columns.numeric_precision end as numeric_precision,
|
59
|
+
case when columns.data_type='BIGINT' then 10
|
60
|
+
when columns.data_type='DOUBLE' then NULL
|
61
|
+
else columns.numeric_precision_radix end as numeric_precision_radix,
|
62
|
+
case when columns.data_type='DOUBLE' then NULL else columns.numeric_scale end as numeric_scale,
|
63
|
+
collation_name, is_identity, identity_generation, identity_cycle,
|
64
|
+
ddb_columns.comment as comment,
|
65
|
+
null as identity_start,
|
66
|
+
null as identity_increment,
|
67
|
+
from ${catalog}.information_schema.columns columns
|
57
68
|
left join ${catalog}.information_schema._fs_columns_ext ext
|
58
|
-
on ext_table_catalog = table_catalog
|
59
|
-
|
69
|
+
on ext_table_catalog = columns.table_catalog
|
70
|
+
AND ext_table_schema = columns.table_schema
|
71
|
+
AND ext_table_name = columns.table_name
|
72
|
+
AND ext_column_name = columns.column_name
|
73
|
+
LEFT JOIN duckdb_columns ddb_columns
|
74
|
+
ON ddb_columns.database_name = columns.table_catalog
|
75
|
+
AND ddb_columns.schema_name = columns.table_schema
|
76
|
+
AND ddb_columns.table_name = columns.table_name
|
77
|
+
AND ddb_columns.column_name = columns.column_name
|
60
78
|
"""
|
61
79
|
)
|
62
80
|
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
from pathlib import Path
|
4
4
|
from string import Template
|
5
|
-
from typing import cast
|
5
|
+
from typing import Literal, cast
|
6
6
|
|
7
7
|
import sqlglot
|
8
8
|
from sqlglot import exp
|
@@ -38,7 +38,8 @@ def create_database(expression: exp.Expression, db_path: Path | None = None) ->
|
|
38
38
|
"""
|
39
39
|
|
40
40
|
if isinstance(expression, exp.Create) and str(expression.args.get("kind")).upper() == "DATABASE":
|
41
|
-
|
41
|
+
ident = expression.find(exp.Identifier)
|
42
|
+
assert ident, f"No identifier in {expression.sql}"
|
42
43
|
db_name = ident.this
|
43
44
|
db_file = f"{db_path/db_name}.db" if db_path else ":memory:"
|
44
45
|
|
@@ -449,6 +450,16 @@ def json_extract_cast_as_varchar(expression: exp.Expression) -> exp.Expression:
|
|
449
450
|
return expression
|
450
451
|
|
451
452
|
|
453
|
+
def json_extract_precedence(expression: exp.Expression) -> exp.Expression:
|
454
|
+
"""Associate json extract operands to avoid duckdb operators of higher precedence transforming the expression.
|
455
|
+
|
456
|
+
See https://github.com/tekumara/fakesnow/issues/53
|
457
|
+
"""
|
458
|
+
if isinstance(expression, exp.JSONExtract):
|
459
|
+
return exp.Paren(this=expression)
|
460
|
+
return expression
|
461
|
+
|
462
|
+
|
452
463
|
def random(expression: exp.Expression) -> exp.Expression:
|
453
464
|
"""Convert random() and random(seed).
|
454
465
|
|
@@ -671,55 +682,61 @@ def set_schema(expression: exp.Expression, current_database: str | None) -> exp.
|
|
671
682
|
return expression
|
672
683
|
|
673
684
|
|
674
|
-
SQL_SHOW_OBJECTS = """
|
675
|
-
select
|
676
|
-
to_timestamp(0)::timestamptz as 'created_on',
|
677
|
-
table_name as 'name',
|
678
|
-
case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind',
|
679
|
-
table_catalog as 'database_name',
|
680
|
-
table_schema as 'schema_name'
|
681
|
-
from information_schema.tables
|
682
|
-
"""
|
683
|
-
|
684
|
-
|
685
685
|
def show_objects_tables(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
|
686
686
|
"""Transform SHOW OBJECTS/TABLES to a query against the information_schema.tables table.
|
687
687
|
|
688
688
|
See https://docs.snowflake.com/en/sql-reference/sql/show-objects
|
689
689
|
https://docs.snowflake.com/en/sql-reference/sql/show-tables
|
690
690
|
"""
|
691
|
-
if (
|
691
|
+
if not (
|
692
692
|
isinstance(expression, exp.Show)
|
693
693
|
and isinstance(expression.this, str)
|
694
|
-
and expression.this.upper()
|
694
|
+
and (show := expression.this.upper())
|
695
|
+
and show in {"OBJECTS", "TABLES"}
|
695
696
|
):
|
696
|
-
|
697
|
-
table = expression.find(exp.Table)
|
698
|
-
|
699
|
-
if scope_kind == "DATABASE":
|
700
|
-
catalog = (table and table.name) or current_database
|
701
|
-
schema = None
|
702
|
-
elif scope_kind == "SCHEMA" and table:
|
703
|
-
catalog = table.db or current_database
|
704
|
-
schema = table.name
|
705
|
-
else:
|
706
|
-
# all objects / tables
|
707
|
-
catalog = None
|
708
|
-
schema = None
|
709
|
-
|
710
|
-
tables_only = "table_type = 'BASE TABLE' and " if expression.this.upper() == "TABLES" else ""
|
711
|
-
exclude_fakesnow_tables = "not (table_schema == 'information_schema' and table_name like '_fs_%%')"
|
712
|
-
# without a database will show everything in the "account"
|
713
|
-
table_catalog = f" and table_catalog = '{catalog}'" if catalog else ""
|
714
|
-
schema = f" and table_schema = '{schema}'" if schema else ""
|
715
|
-
limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
|
716
|
-
|
717
|
-
return sqlglot.parse_one(
|
718
|
-
f"{SQL_SHOW_OBJECTS} where {tables_only}{exclude_fakesnow_tables}{table_catalog}{schema}{limit}",
|
719
|
-
read="duckdb",
|
720
|
-
)
|
697
|
+
return expression
|
721
698
|
|
722
|
-
|
699
|
+
scope_kind = expression.args.get("scope_kind")
|
700
|
+
table = expression.find(exp.Table)
|
701
|
+
|
702
|
+
if scope_kind == "DATABASE":
|
703
|
+
catalog = (table and table.name) or current_database
|
704
|
+
schema = None
|
705
|
+
elif scope_kind == "SCHEMA" and table:
|
706
|
+
catalog = table.db or current_database
|
707
|
+
schema = table.name
|
708
|
+
else:
|
709
|
+
# all objects / tables
|
710
|
+
catalog = None
|
711
|
+
schema = None
|
712
|
+
|
713
|
+
tables_only = "table_type = 'BASE TABLE' and " if show == "TABLES" else ""
|
714
|
+
exclude_fakesnow_tables = "not (table_schema == 'information_schema' and table_name like '_fs_%%')"
|
715
|
+
# without a database will show everything in the "account"
|
716
|
+
table_catalog = f" and table_catalog = '{catalog}'" if catalog else ""
|
717
|
+
schema = f" and table_schema = '{schema}'" if schema else ""
|
718
|
+
limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
|
719
|
+
|
720
|
+
columns = [
|
721
|
+
"to_timestamp(0)::timestamptz as 'created_on'",
|
722
|
+
"table_name as 'name'",
|
723
|
+
"case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind'",
|
724
|
+
"table_catalog as 'database_name'",
|
725
|
+
"table_schema as 'schema_name'",
|
726
|
+
]
|
727
|
+
|
728
|
+
terse = expression.args["terse"]
|
729
|
+
if not terse:
|
730
|
+
columns.append('null as "comment"')
|
731
|
+
|
732
|
+
columns_str = ", ".join(columns)
|
733
|
+
|
734
|
+
query = (
|
735
|
+
f"SELECT {columns_str} from information_schema.tables "
|
736
|
+
f"where {tables_only}{exclude_fakesnow_tables}{table_catalog}{schema}{limit}"
|
737
|
+
)
|
738
|
+
|
739
|
+
return sqlglot.parse_one(query, read="duckdb")
|
723
740
|
|
724
741
|
|
725
742
|
SQL_SHOW_SCHEMAS = """
|
@@ -987,3 +1004,86 @@ def create_user(expression: exp.Expression) -> exp.Expression:
|
|
987
1004
|
return sqlglot.parse_one(f"INSERT INTO {USERS_TABLE_FQ_NAME} (name) VALUES ('{name}')", read="duckdb")
|
988
1005
|
|
989
1006
|
return expression
|
1007
|
+
|
1008
|
+
|
1009
|
+
def show_keys(
|
1010
|
+
expression: exp.Expression,
|
1011
|
+
current_database: str | None = None,
|
1012
|
+
*,
|
1013
|
+
kind: Literal["PRIMARY", "UNIQUE", "FOREIGN"],
|
1014
|
+
) -> exp.Expression:
|
1015
|
+
"""Transform SHOW <kind> KEYS to a query against the duckdb_constraints meta-table.
|
1016
|
+
|
1017
|
+
https://docs.snowflake.com/en/sql-reference/sql/show-primary-keys
|
1018
|
+
"""
|
1019
|
+
snowflake_kind = kind
|
1020
|
+
if kind == "FOREIGN":
|
1021
|
+
snowflake_kind = "IMPORTED"
|
1022
|
+
|
1023
|
+
if (
|
1024
|
+
isinstance(expression, exp.Show)
|
1025
|
+
and isinstance(expression.this, str)
|
1026
|
+
and expression.this.upper() == f"{snowflake_kind} KEYS"
|
1027
|
+
):
|
1028
|
+
if kind == "FOREIGN":
|
1029
|
+
statement = f"""
|
1030
|
+
SELECT
|
1031
|
+
to_timestamp(0)::timestamptz as created_on,
|
1032
|
+
|
1033
|
+
'' as pk_database_name,
|
1034
|
+
'' as pk_schema_name,
|
1035
|
+
'' as pk_table_name,
|
1036
|
+
'' as pk_column_name,
|
1037
|
+
unnest(constraint_column_names) as pk_column_name,
|
1038
|
+
|
1039
|
+
database_name as fk_database_name,
|
1040
|
+
schema_name as fk_schema_name,
|
1041
|
+
table_name as fk_table_name,
|
1042
|
+
unnest(constraint_column_names) as fk_column_name,
|
1043
|
+
1 as key_sequence,
|
1044
|
+
'NO ACTION' as update_rule,
|
1045
|
+
'NO ACTION' as delete_rule,
|
1046
|
+
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS fk_name,
|
1047
|
+
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS pk_name,
|
1048
|
+
'NOT DEFERRABLE' as deferrability,
|
1049
|
+
'false' as rely,
|
1050
|
+
null as "comment"
|
1051
|
+
FROM duckdb_constraints
|
1052
|
+
WHERE constraint_type = 'PRIMARY KEY'
|
1053
|
+
AND database_name = '{current_database}'
|
1054
|
+
AND table_name NOT LIKE '_fs_%'
|
1055
|
+
"""
|
1056
|
+
else:
|
1057
|
+
statement = f"""
|
1058
|
+
SELECT
|
1059
|
+
to_timestamp(0)::timestamptz as created_on,
|
1060
|
+
database_name as database_name,
|
1061
|
+
schema_name as schema_name,
|
1062
|
+
table_name as table_name,
|
1063
|
+
unnest(constraint_column_names) as column_name,
|
1064
|
+
1 as key_sequence,
|
1065
|
+
LOWER(CONCAT(database_name, '_', schema_name, '_', table_name, '_pkey')) AS constraint_name,
|
1066
|
+
'false' as rely,
|
1067
|
+
null as "comment"
|
1068
|
+
FROM duckdb_constraints
|
1069
|
+
WHERE constraint_type = '{kind} KEY'
|
1070
|
+
AND database_name = '{current_database}'
|
1071
|
+
AND table_name NOT LIKE '_fs_%'
|
1072
|
+
"""
|
1073
|
+
|
1074
|
+
scope_kind = expression.args.get("scope_kind")
|
1075
|
+
if scope_kind:
|
1076
|
+
table = expression.args["scope"]
|
1077
|
+
|
1078
|
+
if scope_kind == "SCHEMA":
|
1079
|
+
db = table and table.db
|
1080
|
+
schema = table and table.name
|
1081
|
+
if db:
|
1082
|
+
statement += f"AND database_name = '{db}' "
|
1083
|
+
|
1084
|
+
if schema:
|
1085
|
+
statement += f"AND schema_name = '{schema}' "
|
1086
|
+
else:
|
1087
|
+
raise NotImplementedError(f"SHOW PRIMARY KEYS with {scope_kind} not yet supported")
|
1088
|
+
return sqlglot.parse_one(statement)
|
1089
|
+
return expression
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.6
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -213,20 +213,20 @@ License-File: LICENSE
|
|
213
213
|
Requires-Dist: duckdb~=0.10.0
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=21.
|
216
|
+
Requires-Dist: sqlglot~=21.2.0
|
217
217
|
Provides-Extra: dev
|
218
|
-
Requires-Dist: black~=23.9; extra == "dev"
|
219
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
|
+
Requires-Dist: pandas-stubs; extra == "dev"
|
220
220
|
Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
|
221
221
|
Requires-Dist: pre-commit~=3.4; extra == "dev"
|
222
|
-
Requires-Dist: pytest~=
|
223
|
-
Requires-Dist: ruff~=0.
|
224
|
-
Requires-Dist: twine~=
|
222
|
+
Requires-Dist: pytest~=8.0; extra == "dev"
|
223
|
+
Requires-Dist: ruff~=0.3.2; extra == "dev"
|
224
|
+
Requires-Dist: twine~=5.0; extra == "dev"
|
225
|
+
Requires-Dist: snowflake-sqlalchemy~=1.5.0; extra == "dev"
|
225
226
|
Provides-Extra: notebook
|
226
227
|
Requires-Dist: duckdb-engine; extra == "notebook"
|
227
228
|
Requires-Dist: ipykernel; extra == "notebook"
|
228
229
|
Requires-Dist: jupysql; extra == "notebook"
|
229
|
-
Requires-Dist: snowflake-sqlalchemy; extra == "notebook"
|
230
230
|
|
231
231
|
# fakesnow ❄️
|
232
232
|
|
@@ -234,6 +234,8 @@ Requires-Dist: snowflake-sqlalchemy; extra == "notebook"
|
|
234
234
|
[](https://github.com/tekumara/fakesnow/actions/workflows/release.yml)
|
235
235
|
[](https://pypi.org/project/fakesnow/)
|
236
236
|
|
237
|
+
[](../../actions/workflows/ci.yml)
|
238
|
+
|
237
239
|
Fake [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/python-connector). Run and mock Snowflake DB locally.
|
238
240
|
|
239
241
|
## Install
|
@@ -356,7 +358,7 @@ For more detail see [tests/test_fakes.py](tests/test_fakes.py)
|
|
356
358
|
## Caveats
|
357
359
|
|
358
360
|
- The order of rows is non deterministic and may not match Snowflake unless ORDER BY is fully specified.
|
359
|
-
-
|
361
|
+
- A more liberal Snowflake SQL dialect than a real Snowflake instance is supported, ie: some queries might pass using fakesnow that a real Snowflake instance would reject.
|
360
362
|
|
361
363
|
## Contributing
|
362
364
|
|
@@ -1,5 +1,4 @@
|
|
1
1
|
LICENSE
|
2
|
-
MANIFEST.in
|
3
2
|
README.md
|
4
3
|
pyproject.toml
|
5
4
|
fakesnow/__init__.py
|
@@ -26,5 +25,6 @@ tests/test_expr.py
|
|
26
25
|
tests/test_fakes.py
|
27
26
|
tests/test_info_schema.py
|
28
27
|
tests/test_patch.py
|
28
|
+
tests/test_sqlalchemy.py
|
29
29
|
tests/test_transforms.py
|
30
30
|
tests/test_users.py
|
@@ -1,19 +1,19 @@
|
|
1
1
|
duckdb~=0.10.0
|
2
2
|
pyarrow
|
3
3
|
snowflake-connector-python
|
4
|
-
sqlglot~=21.
|
4
|
+
sqlglot~=21.2.0
|
5
5
|
|
6
6
|
[dev]
|
7
|
-
black~=23.9
|
8
7
|
build~=1.0
|
8
|
+
pandas-stubs
|
9
9
|
snowflake-connector-python[pandas,secure-local-storage]
|
10
10
|
pre-commit~=3.4
|
11
|
-
pytest~=
|
12
|
-
ruff~=0.
|
13
|
-
twine~=
|
11
|
+
pytest~=8.0
|
12
|
+
ruff~=0.3.2
|
13
|
+
twine~=5.0
|
14
|
+
snowflake-sqlalchemy~=1.5.0
|
14
15
|
|
15
16
|
[notebook]
|
16
17
|
duckdb-engine
|
17
18
|
ipykernel
|
18
19
|
jupysql
|
19
|
-
snowflake-sqlalchemy
|
@@ -1,7 +1,7 @@
|
|
1
1
|
[project]
|
2
2
|
name = "fakesnow"
|
3
3
|
description = "Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally."
|
4
|
-
version = "0.9.
|
4
|
+
version = "0.9.6"
|
5
5
|
readme = "README.md"
|
6
6
|
license = { file = "LICENSE" }
|
7
7
|
classifiers = ["License :: OSI Approved :: MIT License"]
|
@@ -11,7 +11,7 @@ dependencies = [
|
|
11
11
|
"duckdb~=0.10.0",
|
12
12
|
"pyarrow",
|
13
13
|
"snowflake-connector-python",
|
14
|
-
"sqlglot~=21.
|
14
|
+
"sqlglot~=21.2.0",
|
15
15
|
]
|
16
16
|
|
17
17
|
[project.urls]
|
@@ -22,28 +22,26 @@ fakesnow = "fakesnow.cli:main"
|
|
22
22
|
|
23
23
|
[project.optional-dependencies]
|
24
24
|
dev = [
|
25
|
-
"black~=23.9",
|
26
25
|
"build~=1.0",
|
26
|
+
# to fix https://github.com/pandas-dev/pandas/issues/56995
|
27
|
+
"pandas-stubs",
|
27
28
|
# include compatible version of pandas, and secure-local-storage for token caching
|
28
29
|
"snowflake-connector-python[pandas, secure-local-storage]",
|
29
30
|
"pre-commit~=3.4",
|
30
|
-
"pytest~=
|
31
|
-
"ruff~=0.
|
32
|
-
"twine~=
|
31
|
+
"pytest~=8.0",
|
32
|
+
"ruff~=0.3.2",
|
33
|
+
"twine~=5.0",
|
34
|
+
"snowflake-sqlalchemy~=1.5.0",
|
33
35
|
]
|
34
36
|
# for debugging, see https://duckdb.org/docs/guides/python/jupyter.html
|
35
|
-
notebook = ["duckdb-engine", "ipykernel", "jupysql"
|
36
|
-
|
37
|
-
[tool.setuptools.packages.find]
|
38
|
-
where = ["."]
|
39
|
-
exclude = ["tests*"]
|
37
|
+
notebook = ["duckdb-engine", "ipykernel", "jupysql"]
|
40
38
|
|
41
39
|
[build-system]
|
42
|
-
requires = ["setuptools~=
|
40
|
+
requires = ["setuptools~=69.1", "wheel~=0.42"]
|
43
41
|
|
44
|
-
[tool.
|
45
|
-
|
46
|
-
|
42
|
+
[tool.setuptools.packages.find]
|
43
|
+
where = ["."]
|
44
|
+
exclude = ["tests*", "node_modules*", "build*"]
|
47
45
|
|
48
46
|
[tool.pyright]
|
49
47
|
venvPath = "."
|
@@ -53,63 +51,53 @@ strictListInference = true
|
|
53
51
|
strictDictionaryInference = true
|
54
52
|
strictParameterNoneValue = true
|
55
53
|
reportTypedDictNotRequiredAccess = false
|
54
|
+
reportIncompatibleMethodOverride = true
|
55
|
+
reportUnnecessaryTypeIgnoreComment = true
|
56
56
|
|
57
57
|
[tool.ruff]
|
58
|
-
# Compatibility between Ruff and Black
|
59
|
-
# https://beta.ruff.rs/docs/faq/#is-ruff-compatible-with-black
|
60
58
|
line-length = 120
|
59
|
+
# first-party imports for sorting
|
60
|
+
src = ["."]
|
61
|
+
fix = true
|
62
|
+
show-fixes = true
|
61
63
|
|
64
|
+
[tool.ruff.lint]
|
62
65
|
# rules to enable/ignore
|
63
66
|
select = [
|
64
|
-
# pyflakes
|
65
|
-
"
|
66
|
-
# pycodestyle
|
67
|
-
"
|
68
|
-
"
|
69
|
-
#
|
70
|
-
"
|
71
|
-
#
|
72
|
-
"
|
73
|
-
#
|
74
|
-
"
|
75
|
-
#
|
76
|
-
"
|
77
|
-
#
|
78
|
-
|
79
|
-
# flake8-self
|
80
|
-
"SLF",
|
81
|
-
# pyupgrade
|
82
|
-
"UP",
|
83
|
-
# perflint
|
84
|
-
"PERF",
|
85
|
-
# ruff-specific
|
86
|
-
"RUF",
|
87
|
-
# flake8-simplify
|
88
|
-
"SIM",
|
89
|
-
# flake8-builtins
|
90
|
-
"A"
|
67
|
+
"F", # pyflakes
|
68
|
+
"E", # pycodestyle
|
69
|
+
"W", # pycodestyle
|
70
|
+
"ANN", # type annotations
|
71
|
+
"N", # pep8-naming
|
72
|
+
"B", # bugbear
|
73
|
+
"I", # isort
|
74
|
+
# "ARG", # flake8-unused-arguments - disabled because our fakes don't use all arguments
|
75
|
+
"SLF", # flake8-self
|
76
|
+
"UP", # pyupgrade
|
77
|
+
"PERF", # perflint
|
78
|
+
"RUF", # ruff-specific
|
79
|
+
"SIM", # flake8-simplify
|
80
|
+
"S113", # request-without-timeout
|
81
|
+
"A", # flake8-builtins
|
91
82
|
]
|
92
83
|
ignore = [
|
93
|
-
# allow untyped self and cls args
|
84
|
+
# allow untyped self and cls args
|
94
85
|
"ANN101",
|
95
86
|
"ANN102",
|
87
|
+
# allow no return type from dunder methods
|
96
88
|
"ANN204",
|
97
89
|
# allow == True because pandas dataframes overload equality
|
98
90
|
"E712",
|
99
91
|
]
|
100
|
-
# first-party imports for sorting
|
101
|
-
src = ["."]
|
102
|
-
fix = true
|
103
|
-
show-fixes = true
|
104
92
|
|
105
|
-
[tool.ruff.isort]
|
93
|
+
[tool.ruff.lint.isort]
|
106
94
|
combine-as-imports = true
|
107
95
|
force-wrap-aliases = true
|
108
96
|
|
109
|
-
[tool.ruff.per-file-ignores]
|
97
|
+
[tool.ruff.lint.per-file-ignores]
|
110
98
|
# test functions don't need return types
|
111
99
|
"tests/*" = ["ANN201", "ANN202"]
|
112
100
|
|
113
|
-
[tool.ruff.flake8-annotations]
|
101
|
+
[tool.ruff.lint.flake8-annotations]
|
114
102
|
# allow *args: Any, **kwargs: Any
|
115
103
|
allow-star-arg-any = true
|
@@ -115,17 +115,21 @@ def test_connect_different_sessions_use_database(_fakesnow_no_auto_create: None)
|
|
115
115
|
|
116
116
|
def test_connect_reuse_db():
|
117
117
|
with tempfile.TemporaryDirectory(prefix="fakesnow-test") as db_path:
|
118
|
-
with
|
119
|
-
|
120
|
-
|
118
|
+
with (
|
119
|
+
fakesnow.patch(db_path=db_path),
|
120
|
+
snowflake.connector.connect(database="db1", schema="schema1") as conn,
|
121
|
+
conn.cursor() as cur,
|
122
|
+
):
|
121
123
|
# creates db1.schema1.example
|
122
124
|
cur.execute("create table example (x int)")
|
123
125
|
cur.execute("insert into example values (420)")
|
124
126
|
|
125
127
|
# reconnect
|
126
|
-
with
|
127
|
-
|
128
|
-
|
128
|
+
with (
|
129
|
+
fakesnow.patch(db_path=db_path),
|
130
|
+
snowflake.connector.connect(database="db1", schema="schema1") as conn,
|
131
|
+
conn.cursor() as cur,
|
132
|
+
):
|
129
133
|
assert cur.execute("select * from example").fetchall() == [(420,)]
|
130
134
|
|
131
135
|
|
@@ -438,6 +442,15 @@ def test_description_create_drop_table(dcur: snowflake.connector.cursor.DictCurs
|
|
438
442
|
assert dcur.description == [ResultMetadata(name='status', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True)] # fmt: skip
|
439
443
|
|
440
444
|
|
445
|
+
def test_description_create_drop_view(dcur: snowflake.connector.cursor.DictCursor):
|
446
|
+
dcur.execute("create view example(id) as select 1")
|
447
|
+
assert dcur.fetchall() == [{"status": "View EXAMPLE successfully created."}]
|
448
|
+
assert dcur.description == [ResultMetadata(name='status', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True)] # fmt: skip
|
449
|
+
dcur.execute("drop view example")
|
450
|
+
assert dcur.fetchall() == [{"status": "EXAMPLE successfully dropped."}]
|
451
|
+
assert dcur.description == [ResultMetadata(name='status', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True)] # fmt: skip
|
452
|
+
|
453
|
+
|
441
454
|
def test_description_insert(dcur: snowflake.connector.cursor.DictCursor):
|
442
455
|
dcur.execute("create table example (x int)")
|
443
456
|
dcur.execute("insert into example values (1), (2)")
|
@@ -649,6 +662,11 @@ def test_get_path_as_varchar(cur: snowflake.connector.cursor.SnowflakeCursor):
|
|
649
662
|
assert cur.fetchall() == [("42",)]
|
650
663
|
|
651
664
|
|
665
|
+
def test_get_path_precedence(cur: snowflake.connector.cursor.SnowflakeCursor):
|
666
|
+
cur.execute("select {'K1': {'K2': 1}} as col where col:K1:K2 > 0")
|
667
|
+
assert indent(cur.fetchall()) == [('{\n "K1": {\n "K2": 1\n }\n}',)]
|
668
|
+
|
669
|
+
|
652
670
|
def test_get_result_batches(cur: snowflake.connector.cursor.SnowflakeCursor):
|
653
671
|
# no result set
|
654
672
|
assert cur.get_result_batches() is None
|
@@ -850,6 +868,76 @@ def test_semi_structured_types(cur: snowflake.connector.cursor.SnowflakeCursor):
|
|
850
868
|
]
|
851
869
|
|
852
870
|
|
871
|
+
@pytest.mark.xfail(
|
872
|
+
reason="only partial supports exists to support sqlalchemy, see test_reflect",
|
873
|
+
)
|
874
|
+
def test_show_keys(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
875
|
+
dcur.execute("CREATE TABLE test_table (id INT PRIMARY KEY, name TEXT UNIQUE)")
|
876
|
+
dcur.execute("CREATE TABLE test_table2 (id INT, other_id INT, FOREIGN KEY (other_id) REFERENCES test_table(id))")
|
877
|
+
|
878
|
+
dcur.execute("SHOW PRIMARY KEYS")
|
879
|
+
primary_keys = dcur.fetchall()
|
880
|
+
assert primary_keys == [
|
881
|
+
{
|
882
|
+
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
|
883
|
+
"database_name": "DB1",
|
884
|
+
"schema_name": "SCHEMA1",
|
885
|
+
"table_name": "TEST_TABLE",
|
886
|
+
"column_name": "ID",
|
887
|
+
"key_sequence": 1,
|
888
|
+
"constraint_name": "SYS_CONSTRAINT_DB1_SCHEMA1_TEST_TABLE_ID_pk",
|
889
|
+
"rely": "false",
|
890
|
+
"comment": None,
|
891
|
+
}
|
892
|
+
]
|
893
|
+
|
894
|
+
dcur.execute("SHOW UNIQUE KEYS")
|
895
|
+
unique_keys = dcur.fetchall()
|
896
|
+
assert unique_keys == [
|
897
|
+
{
|
898
|
+
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
|
899
|
+
"database_name": "DB1",
|
900
|
+
"schema_name": "SCHEMA1",
|
901
|
+
"table_name": "TEST_TABLE",
|
902
|
+
"column_name": "NAME",
|
903
|
+
"key_sequence": 1,
|
904
|
+
"constraint_name": "SYS_CONSTRAINT_DB1_SCHEMA1_TEST_TABLE_NAME_uk",
|
905
|
+
"rely": "false",
|
906
|
+
"comment": None,
|
907
|
+
}
|
908
|
+
]
|
909
|
+
|
910
|
+
dcur.execute("SHOW IMPORTED KEYS")
|
911
|
+
foreign_keys = dcur.fetchall()
|
912
|
+
assert foreign_keys == [
|
913
|
+
{
|
914
|
+
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
|
915
|
+
"pk_database_name": "DB1",
|
916
|
+
"pk_schema_name": "SCHEMA1",
|
917
|
+
"pk_table_name": "TEST_TABLE",
|
918
|
+
"pk_column_name": "ID",
|
919
|
+
"fk_database_name": "DB1",
|
920
|
+
"fk_schema_name": "SCHEMA1",
|
921
|
+
"fk_table_name": "TEST_TABLE2",
|
922
|
+
"fk_column_name": "OTHER_ID",
|
923
|
+
"key_sequence": 1,
|
924
|
+
"update_rule": "NO ACTION",
|
925
|
+
"delete_rule": "NO ACTION",
|
926
|
+
"fk_name": "SYS_CONSTRAINT_DB1_SCHEMA1_TEST_TABLE2_OTHER_ID_fk",
|
927
|
+
"pk_name": "SYS_CONSTRAINT_DB1_SCHEMA1_TEST_TABLE_ID_pk",
|
928
|
+
"deferrability": "NOT DEFERRABLE",
|
929
|
+
"rely": "false",
|
930
|
+
"comment": None,
|
931
|
+
}
|
932
|
+
]
|
933
|
+
|
934
|
+
dcur.execute("SHOW PRIMARY KEYS IN SCHEMA")
|
935
|
+
assert dcur.fetchall() == primary_keys
|
936
|
+
|
937
|
+
dcur.execute("SHOW PRIMARY KEYS IN DATABASE")
|
938
|
+
assert dcur.fetchall() == primary_keys
|
939
|
+
|
940
|
+
|
853
941
|
def test_show_objects(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
854
942
|
dcur.execute("create table example(x int)")
|
855
943
|
dcur.execute("create view view1 as select * from example")
|
@@ -871,6 +959,7 @@ def test_show_objects(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
|
871
959
|
},
|
872
960
|
]
|
873
961
|
assert dcur.fetchall() == objects
|
962
|
+
|
874
963
|
dcur.execute("show terse objects in database")
|
875
964
|
assert dcur.fetchall() == [
|
876
965
|
*objects,
|
@@ -891,6 +980,24 @@ def test_show_objects(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
|
891
980
|
]
|
892
981
|
assert [r.name for r in dcur.description] == ["created_on", "name", "kind", "database_name", "schema_name"]
|
893
982
|
|
983
|
+
dcur.execute("show objects").fetchall()
|
984
|
+
assert [r.name for r in dcur.description] == [
|
985
|
+
"created_on",
|
986
|
+
"name",
|
987
|
+
"kind",
|
988
|
+
"database_name",
|
989
|
+
"schema_name",
|
990
|
+
"comment",
|
991
|
+
# TODO: include these columns
|
992
|
+
# "cluster_by",
|
993
|
+
# "rows",
|
994
|
+
# "bytes",
|
995
|
+
# "owner",
|
996
|
+
# "retention_time",
|
997
|
+
# "owner_role_type",
|
998
|
+
# "budget"
|
999
|
+
]
|
1000
|
+
|
894
1001
|
|
895
1002
|
def test_show_schemas(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
896
1003
|
dcur.execute("show terse schemas in database db1 limit 100")
|
@@ -929,7 +1036,82 @@ def test_show_tables(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
|
929
1036
|
# assert dcur.fetchall() == objects
|
930
1037
|
dcur.execute("show terse tables in db1.schema1")
|
931
1038
|
assert dcur.fetchall() == objects
|
932
|
-
assert [r.name for r in dcur.description] == [
|
1039
|
+
assert [r.name for r in dcur.description] == [
|
1040
|
+
"created_on",
|
1041
|
+
"name",
|
1042
|
+
"kind",
|
1043
|
+
"database_name",
|
1044
|
+
"schema_name",
|
1045
|
+
]
|
1046
|
+
|
1047
|
+
dcur.execute("show tables in db1.schema1")
|
1048
|
+
assert [r.name for r in dcur.description] == [
|
1049
|
+
"created_on",
|
1050
|
+
"name",
|
1051
|
+
"kind",
|
1052
|
+
"database_name",
|
1053
|
+
"schema_name",
|
1054
|
+
"comment",
|
1055
|
+
# TODO: include these columns
|
1056
|
+
# "cluster_by",
|
1057
|
+
# "rows",
|
1058
|
+
# "bytes",
|
1059
|
+
# "owner",
|
1060
|
+
# "retention_time",
|
1061
|
+
# "automatic_clustering",
|
1062
|
+
# "change_tracking",
|
1063
|
+
# "search_optimization",
|
1064
|
+
# "search_optimization_progress",
|
1065
|
+
# "search_optimization_bytes",
|
1066
|
+
# "is_external",
|
1067
|
+
# "enable_schema_evolution",
|
1068
|
+
# "owner_role_type",
|
1069
|
+
# "is_event",
|
1070
|
+
# "budget",
|
1071
|
+
# "is_hybrid",
|
1072
|
+
# "is_iceberg",
|
1073
|
+
]
|
1074
|
+
|
1075
|
+
|
1076
|
+
def test_show_primary_keys(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
1077
|
+
dcur.execute("CREATE TABLE example (id int, name varchar, PRIMARY KEY (id, name))")
|
1078
|
+
|
1079
|
+
dcur.execute("show primary keys")
|
1080
|
+
result = dcur.fetchall()
|
1081
|
+
|
1082
|
+
assert result == [
|
1083
|
+
{
|
1084
|
+
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
|
1085
|
+
"database_name": "DB1",
|
1086
|
+
"schema_name": "SCHEMA1",
|
1087
|
+
"table_name": "EXAMPLE",
|
1088
|
+
"column_name": "ID",
|
1089
|
+
"key_sequence": 1,
|
1090
|
+
"constraint_name": "db1_schema1_example_pkey",
|
1091
|
+
"rely": "false",
|
1092
|
+
"comment": None,
|
1093
|
+
},
|
1094
|
+
{
|
1095
|
+
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
|
1096
|
+
"database_name": "DB1",
|
1097
|
+
"schema_name": "SCHEMA1",
|
1098
|
+
"table_name": "EXAMPLE",
|
1099
|
+
"column_name": "NAME",
|
1100
|
+
"key_sequence": 1,
|
1101
|
+
"constraint_name": "db1_schema1_example_pkey",
|
1102
|
+
"rely": "false",
|
1103
|
+
"comment": None,
|
1104
|
+
},
|
1105
|
+
]
|
1106
|
+
|
1107
|
+
dcur.execute("show primary keys in schema db1.schema1")
|
1108
|
+
result2 = dcur.fetchall()
|
1109
|
+
assert result == result2
|
1110
|
+
|
1111
|
+
# Assertion to sanity check that the above "in schema" filter isn't wrong, and in fact filters
|
1112
|
+
dcur.execute("show primary keys in schema db1.information_schema")
|
1113
|
+
result3 = dcur.fetchall()
|
1114
|
+
assert result3 == []
|
933
1115
|
|
934
1116
|
|
935
1117
|
def test_sqlstate(cur: snowflake.connector.cursor.SnowflakeCursor):
|
@@ -0,0 +1,46 @@
|
|
1
|
+
from typing import cast
|
2
|
+
|
3
|
+
from sqlalchemy import Column, MetaData, Table, types
|
4
|
+
from sqlalchemy.engine import Engine
|
5
|
+
from sqlalchemy.sql.expression import TextClause
|
6
|
+
|
7
|
+
|
8
|
+
def test_engine(snowflake_engine: Engine):
|
9
|
+
# verifies cursor.description, commit, and rollback issued by SQLAlchemy
|
10
|
+
with snowflake_engine.connect() as conn:
|
11
|
+
conn.execute(TextClause("CREATE VIEW foo AS SELECT * FROM information_schema.databases"))
|
12
|
+
|
13
|
+
result = conn.execute("SELECT database_name FROM foo")
|
14
|
+
assert result
|
15
|
+
assert result.fetchall() == [("DB1",)]
|
16
|
+
|
17
|
+
|
18
|
+
def test_metadata_create_all(snowflake_engine: Engine):
|
19
|
+
metadata = MetaData()
|
20
|
+
|
21
|
+
table = cast(Table, Table("foo", metadata, Column(types.Integer, name="id"), Column(types.String, name="name")))
|
22
|
+
metadata.create_all(bind=snowflake_engine)
|
23
|
+
|
24
|
+
with snowflake_engine.connect() as conn:
|
25
|
+
result = conn.execute(table.select())
|
26
|
+
assert result
|
27
|
+
assert result.fetchall() == []
|
28
|
+
|
29
|
+
|
30
|
+
def test_reflect(snowflake_engine: Engine):
|
31
|
+
with snowflake_engine.connect() as conn:
|
32
|
+
conn.execute(TextClause("CREATE TABLE foo (id INTEGER, name VARCHAR)"))
|
33
|
+
|
34
|
+
metadata = MetaData()
|
35
|
+
metadata.reflect(bind=snowflake_engine, only=["foo"])
|
36
|
+
|
37
|
+
assert metadata.tables
|
38
|
+
foo_table: Table = metadata.tables["foo"]
|
39
|
+
|
40
|
+
with snowflake_engine.connect() as conn:
|
41
|
+
result = conn.execute(foo_table.insert().values(id=1, name="one"))
|
42
|
+
|
43
|
+
result = conn.execute(foo_table.select())
|
44
|
+
|
45
|
+
assert result
|
46
|
+
assert result.fetchall() == [(1, "one")]
|
@@ -21,6 +21,7 @@ from fakesnow.transforms import (
|
|
21
21
|
integer_precision,
|
22
22
|
json_extract_cased_as_varchar,
|
23
23
|
json_extract_cast_as_varchar,
|
24
|
+
json_extract_precedence,
|
24
25
|
object_construct,
|
25
26
|
parse_json,
|
26
27
|
random,
|
@@ -237,6 +238,18 @@ def test_json_extract_cast_as_varchar() -> None:
|
|
237
238
|
)
|
238
239
|
|
239
240
|
|
241
|
+
def test_json_extract_precedence() -> None:
|
242
|
+
assert (
|
243
|
+
sqlglot.parse_one(
|
244
|
+
"""select {'K1': {'K2': 1}} as col where col:K1:K2 > 0""",
|
245
|
+
read="snowflake",
|
246
|
+
)
|
247
|
+
.transform(json_extract_precedence)
|
248
|
+
.sql(dialect="duckdb")
|
249
|
+
== """SELECT {'K1': {'K2': 1}} AS col WHERE (col -> '$.K1' -> '$.K2') > 0"""
|
250
|
+
)
|
251
|
+
|
252
|
+
|
240
253
|
def test_object_construct() -> None:
|
241
254
|
assert (
|
242
255
|
sqlglot.parse_one(
|
fakesnow-0.9.4/MANIFEST.in
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
recursive-include fakesnow py.typed
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|