fakesnow 0.9.13__tar.gz → 0.9.14__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fakesnow-0.9.13 → fakesnow-0.9.14}/PKG-INFO +3 -3
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/fakes.py +2 -1
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/transforms.py +38 -22
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow.egg-info/PKG-INFO +3 -3
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow.egg-info/requires.txt +2 -2
- {fakesnow-0.9.13 → fakesnow-0.9.14}/pyproject.toml +3 -3
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_fakes.py +29 -7
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_info_schema.py +3 -2
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_transforms.py +33 -7
- {fakesnow-0.9.13 → fakesnow-0.9.14}/LICENSE +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/README.md +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/__init__.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/__main__.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/checks.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/cli.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/expr.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/fixtures.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/global_database.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/info_schema.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/macros.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow/py.typed +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow.egg-info/SOURCES.txt +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow.egg-info/dependency_links.txt +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow.egg-info/entry_points.txt +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/fakesnow.egg-info/top_level.txt +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/setup.cfg +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_checks.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_cli.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_expr.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_patch.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_sqlalchemy.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_users.py +0 -0
- {fakesnow-0.9.13 → fakesnow-0.9.14}/tests/test_write_pandas.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.14
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -210,10 +210,10 @@ Classifier: License :: OSI Approved :: MIT License
|
|
210
210
|
Requires-Python: >=3.9
|
211
211
|
Description-Content-Type: text/markdown
|
212
212
|
License-File: LICENSE
|
213
|
-
Requires-Dist: duckdb~=0.10.
|
213
|
+
Requires-Dist: duckdb~=0.10.3
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=
|
216
|
+
Requires-Dist: sqlglot~=24.1.0
|
217
217
|
Provides-Extra: dev
|
218
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
219
|
Requires-Dist: pandas-stubs; extra == "dev"
|
@@ -175,7 +175,7 @@ class FakeSnowflakeCursor:
|
|
175
175
|
.transform(transforms.to_timestamp_ntz)
|
176
176
|
.transform(transforms.to_timestamp)
|
177
177
|
.transform(transforms.object_construct)
|
178
|
-
.transform(transforms.
|
178
|
+
.transform(transforms.timestamp_ntz)
|
179
179
|
.transform(transforms.float_to_double)
|
180
180
|
.transform(transforms.integer_precision)
|
181
181
|
.transform(transforms.extract_text_length)
|
@@ -198,6 +198,7 @@ class FakeSnowflakeCursor:
|
|
198
198
|
.transform(transforms.create_user)
|
199
199
|
.transform(transforms.sha256)
|
200
200
|
.transform(transforms.create_clone)
|
201
|
+
.transform(transforms.alias_in_join)
|
201
202
|
)
|
202
203
|
|
203
204
|
def _execute(
|
@@ -13,6 +13,25 @@ MISSING_DATABASE = "missing_database"
|
|
13
13
|
SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.'")
|
14
14
|
|
15
15
|
|
16
|
+
def alias_in_join(expression: exp.Expression) -> exp.Expression:
|
17
|
+
if (
|
18
|
+
isinstance(expression, exp.Select)
|
19
|
+
and (aliases := {e.args.get("alias"): e for e in expression.expressions if isinstance(e, exp.Alias)})
|
20
|
+
and (joins := expression.args.get("joins"))
|
21
|
+
):
|
22
|
+
j: exp.Join
|
23
|
+
for j in joins:
|
24
|
+
if (
|
25
|
+
(on := j.args.get("on"))
|
26
|
+
and (col := on.this)
|
27
|
+
and (isinstance(col, exp.Column))
|
28
|
+
and (alias := aliases.get(col.this))
|
29
|
+
):
|
30
|
+
col.args["this"] = alias.this
|
31
|
+
|
32
|
+
return expression
|
33
|
+
|
34
|
+
|
16
35
|
def array_size(expression: exp.Expression) -> exp.Expression:
|
17
36
|
if isinstance(expression, exp.ArraySize):
|
18
37
|
# case is used to convert 0 to null, because null is returned by duckdb when no case matches
|
@@ -350,17 +369,13 @@ def extract_comment_on_table(expression: exp.Expression) -> exp.Expression:
|
|
350
369
|
return new
|
351
370
|
elif (
|
352
371
|
isinstance(expression, exp.AlterTable)
|
353
|
-
and (sexp := expression.find(exp.
|
354
|
-
and
|
355
|
-
and (
|
356
|
-
and (eid := eq.find(exp.Identifier))
|
357
|
-
and isinstance(eid.this, str)
|
358
|
-
and eid.this.upper() == "COMMENT"
|
359
|
-
and (lit := eq.find(exp.Literal))
|
372
|
+
and (sexp := expression.find(exp.AlterSet))
|
373
|
+
and (scp := sexp.find(exp.SchemaCommentProperty))
|
374
|
+
and isinstance(scp.this, exp.Literal)
|
360
375
|
and (table := expression.find(exp.Table))
|
361
376
|
):
|
362
377
|
new = SUCCESS_NOP.copy()
|
363
|
-
new.args["table_comment"] = (table,
|
378
|
+
new.args["table_comment"] = (table, scp.this.this)
|
364
379
|
return new
|
365
380
|
|
366
381
|
return expression
|
@@ -596,15 +611,12 @@ def json_extract_cast_as_varchar(expression: exp.Expression) -> exp.Expression:
|
|
596
611
|
"""
|
597
612
|
if (
|
598
613
|
isinstance(expression, exp.Cast)
|
599
|
-
and (to := expression.to)
|
600
|
-
and isinstance(to, exp.DataType)
|
601
|
-
and to.this in {exp.DataType.Type.VARCHAR, exp.DataType.Type.TEXT}
|
602
614
|
and (je := expression.this)
|
603
615
|
and isinstance(je, exp.JSONExtract)
|
604
616
|
and (path := je.expression)
|
605
617
|
and isinstance(path, exp.JSONPath)
|
606
618
|
):
|
607
|
-
|
619
|
+
je.replace(exp.JSONExtractScalar(this=je.this, expression=path))
|
608
620
|
return expression
|
609
621
|
|
610
622
|
|
@@ -937,7 +949,7 @@ def tag(expression: exp.Expression) -> exp.Expression:
|
|
937
949
|
|
938
950
|
if isinstance(expression, exp.AlterTable) and (actions := expression.args.get("actions")):
|
939
951
|
for a in actions:
|
940
|
-
if isinstance(a, exp.
|
952
|
+
if isinstance(a, exp.AlterSet) and a.args.get("tag"):
|
941
953
|
return SUCCESS_NOP
|
942
954
|
elif (
|
943
955
|
isinstance(expression, exp.Command)
|
@@ -947,6 +959,13 @@ def tag(expression: exp.Expression) -> exp.Expression:
|
|
947
959
|
):
|
948
960
|
# alter table modify column set tag
|
949
961
|
return SUCCESS_NOP
|
962
|
+
elif (
|
963
|
+
isinstance(expression, exp.Create)
|
964
|
+
and (kind := expression.args.get("kind"))
|
965
|
+
and isinstance(kind, str)
|
966
|
+
and kind.upper() == "TAG"
|
967
|
+
):
|
968
|
+
return SUCCESS_NOP
|
950
969
|
|
951
970
|
return expression
|
952
971
|
|
@@ -1114,17 +1133,15 @@ def to_timestamp_ntz(expression: exp.Expression) -> exp.Expression:
|
|
1114
1133
|
return expression
|
1115
1134
|
|
1116
1135
|
|
1117
|
-
def
|
1118
|
-
"""Convert timestamp_ntz(
|
1136
|
+
def timestamp_ntz(expression: exp.Expression) -> exp.Expression:
|
1137
|
+
"""Convert timestamp_ntz (snowflake) to timestamp (duckdb).
|
1119
1138
|
|
1120
|
-
|
1139
|
+
NB: timestamp_ntz defaults to nanosecond precision (ie: NTZ(9)). The duckdb equivalent is TIMESTAMP_NS.
|
1140
|
+
However we use TIMESTAMP (ie: microsecond precision) here rather than TIMESTAMP_NS to avoid
|
1141
|
+
https://github.com/duckdb/duckdb/issues/7980 in test_write_pandas_timestamp_ntz.
|
1121
1142
|
"""
|
1122
1143
|
|
1123
|
-
if (
|
1124
|
-
isinstance(expression, exp.DataType)
|
1125
|
-
and expression.this == exp.DataType.Type.TIMESTAMPNTZ
|
1126
|
-
and exp.DataTypeParam(this=exp.Literal(this="9", is_string=False)) in expression.expressions
|
1127
|
-
):
|
1144
|
+
if isinstance(expression, exp.DataType) and expression.this == exp.DataType.Type.TIMESTAMPNTZ:
|
1128
1145
|
return exp.DataType(this=exp.DataType.Type.TIMESTAMP)
|
1129
1146
|
|
1130
1147
|
return expression
|
@@ -1173,7 +1190,6 @@ def try_parse_json(expression: exp.Expression) -> exp.Expression:
|
|
1173
1190
|
return expression
|
1174
1191
|
|
1175
1192
|
|
1176
|
-
# sqlglot.parse_one("create table example(date TIMESTAMP_NTZ(9));", read="snowflake")
|
1177
1193
|
def semi_structured_types(expression: exp.Expression) -> exp.Expression:
|
1178
1194
|
"""Convert OBJECT, ARRAY, and VARIANT types to duckdb compatible types.
|
1179
1195
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.14
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -210,10 +210,10 @@ Classifier: License :: OSI Approved :: MIT License
|
|
210
210
|
Requires-Python: >=3.9
|
211
211
|
Description-Content-Type: text/markdown
|
212
212
|
License-File: LICENSE
|
213
|
-
Requires-Dist: duckdb~=0.10.
|
213
|
+
Requires-Dist: duckdb~=0.10.3
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=
|
216
|
+
Requires-Dist: sqlglot~=24.1.0
|
217
217
|
Provides-Extra: dev
|
218
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
219
|
Requires-Dist: pandas-stubs; extra == "dev"
|
@@ -1,17 +1,17 @@
|
|
1
1
|
[project]
|
2
2
|
name = "fakesnow"
|
3
3
|
description = "Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally."
|
4
|
-
version = "0.9.
|
4
|
+
version = "0.9.14"
|
5
5
|
readme = "README.md"
|
6
6
|
license = { file = "LICENSE" }
|
7
7
|
classifiers = ["License :: OSI Approved :: MIT License"]
|
8
8
|
keywords = ["snowflake", "snowflakedb", "fake", "local", "mock", "testing"]
|
9
9
|
requires-python = ">=3.9"
|
10
10
|
dependencies = [
|
11
|
-
"duckdb~=0.10.
|
11
|
+
"duckdb~=0.10.3",
|
12
12
|
"pyarrow",
|
13
13
|
"snowflake-connector-python",
|
14
|
-
"sqlglot~=
|
14
|
+
"sqlglot~=24.1.0",
|
15
15
|
]
|
16
16
|
|
17
17
|
[project.urls]
|
@@ -20,6 +20,25 @@ import fakesnow
|
|
20
20
|
from tests.utils import dindent, indent
|
21
21
|
|
22
22
|
|
23
|
+
def test_alias_on_join(conn: snowflake.connector.SnowflakeConnection):
|
24
|
+
*_, cur = conn.execute_string(
|
25
|
+
"""
|
26
|
+
CREATE OR REPLACE TEMPORARY TABLE TEST (COL VARCHAR);
|
27
|
+
INSERT INTO TEST (COL) VALUES ('VARCHAR1'), ('VARCHAR2');
|
28
|
+
CREATE OR REPLACE TEMPORARY TABLE JOINED (COL VARCHAR, ANOTHER VARCHAR);
|
29
|
+
INSERT INTO JOINED (COL, ANOTHER) VALUES ('CHAR1', 'JOIN');
|
30
|
+
SELECT
|
31
|
+
T.COL
|
32
|
+
, SUBSTR(T.COL, 4) AS ALIAS
|
33
|
+
, J.ANOTHER
|
34
|
+
FROM TEST AS T
|
35
|
+
LEFT JOIN JOINED AS J
|
36
|
+
ON ALIAS = J.COL;
|
37
|
+
"""
|
38
|
+
)
|
39
|
+
assert cur.fetchall() == [("VARCHAR1", "CHAR1", "JOIN"), ("VARCHAR2", "CHAR2", None)]
|
40
|
+
|
41
|
+
|
23
42
|
def test_alter_table(cur: snowflake.connector.cursor.SnowflakeCursor):
|
24
43
|
cur.execute("create table table1 (id int)")
|
25
44
|
cur.execute("alter table table1 add column name varchar(20)")
|
@@ -401,7 +420,7 @@ def test_describe(cur: snowflake.connector.cursor.SnowflakeCursor):
|
|
401
420
|
XNUMBER82 NUMBER(8,2), XNUMBER NUMBER, XDECIMAL DECIMAL, XNUMERIC NUMERIC,
|
402
421
|
XINT INT, XINTEGER INTEGER, XBIGINT BIGINT, XSMALLINT SMALLINT, XTINYINT TINYINT, XBYTEINT BYTEINT,
|
403
422
|
XVARCHAR20 VARCHAR(20), XVARCHAR VARCHAR, XTEXT TEXT,
|
404
|
-
XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XTIMESTAMP_TZ TIMESTAMP_TZ, XDATE DATE, XTIME TIME,
|
423
|
+
XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ TIMESTAMP_NTZ, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XTIMESTAMP_TZ TIMESTAMP_TZ, XDATE DATE, XTIME TIME,
|
405
424
|
XBINARY BINARY, /* XARRAY ARRAY, XOBJECT OBJECT */ XVARIANT VARIANT
|
406
425
|
)
|
407
426
|
"""
|
@@ -426,6 +445,7 @@ def test_describe(cur: snowflake.connector.cursor.SnowflakeCursor):
|
|
426
445
|
ResultMetadata(name='XVARCHAR', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
|
427
446
|
ResultMetadata(name='XTEXT', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
|
428
447
|
ResultMetadata(name='XTIMESTAMP', type_code=8, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True),
|
448
|
+
ResultMetadata(name='XTIMESTAMP_NTZ', type_code=8, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True),
|
429
449
|
ResultMetadata(name='XTIMESTAMP_NTZ9', type_code=8, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True),
|
430
450
|
ResultMetadata(name='XTIMESTAMP_TZ', type_code=7, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True),
|
431
451
|
ResultMetadata(name='XDATE', type_code=3, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
|
@@ -469,7 +489,7 @@ def test_describe_table(dcur: snowflake.connector.cursor.DictCursor):
|
|
469
489
|
XNUMBER82 NUMBER(8,2), XNUMBER NUMBER, XDECIMAL DECIMAL, XNUMERIC NUMERIC,
|
470
490
|
XINT INT, XINTEGER INTEGER, XBIGINT BIGINT, XSMALLINT SMALLINT, XTINYINT TINYINT, XBYTEINT BYTEINT,
|
471
491
|
XVARCHAR20 VARCHAR(20), XVARCHAR VARCHAR, XTEXT TEXT,
|
472
|
-
XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XTIMESTAMP_TZ TIMESTAMP_TZ, XDATE DATE, XTIME TIME,
|
492
|
+
XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ TIMESTAMP_NTZ, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XTIMESTAMP_TZ TIMESTAMP_TZ, XDATE DATE, XTIME TIME,
|
473
493
|
XBINARY BINARY, /* XARRAY ARRAY, XOBJECT OBJECT */ XVARIANT VARIANT
|
474
494
|
)
|
475
495
|
"""
|
@@ -507,6 +527,7 @@ def test_describe_table(dcur: snowflake.connector.cursor.DictCursor):
|
|
507
527
|
{"name": "XVARCHAR", "type": "VARCHAR(16777216)", **common},
|
508
528
|
{"name": "XTEXT", "type": "VARCHAR(16777216)", **common},
|
509
529
|
{"name": "XTIMESTAMP", "type": "TIMESTAMP_NTZ(9)", **common},
|
530
|
+
{"name": "XTIMESTAMP_NTZ", "type": "TIMESTAMP_NTZ(9)", **common},
|
510
531
|
{"name": "XTIMESTAMP_NTZ9", "type": "TIMESTAMP_NTZ(9)", **common},
|
511
532
|
{"name": "XTIMESTAMP_TZ", "type": "TIMESTAMP_TZ(9)", **common},
|
512
533
|
{"name": "XDATE", "type": "DATE", **common},
|
@@ -1325,6 +1346,7 @@ def test_tags_noop(cur: snowflake.connector.cursor.SnowflakeCursor):
|
|
1325
1346
|
cur.execute("CREATE TABLE table1 (id int)")
|
1326
1347
|
cur.execute("ALTER TABLE table1 SET TAG foo='bar'")
|
1327
1348
|
cur.execute("ALTER TABLE table1 MODIFY COLUMN name1 SET TAG foo='bar'")
|
1349
|
+
cur.execute("CREATE TAG cost_center COMMENT = 'cost_center tag'")
|
1328
1350
|
|
1329
1351
|
|
1330
1352
|
def test_to_timestamp(cur: snowflake.connector.cursor.SnowflakeCursor):
|
@@ -1498,10 +1520,10 @@ def test_values(conn: snowflake.connector.SnowflakeConnection):
|
|
1498
1520
|
|
1499
1521
|
def test_json_extract_cast_as_varchar(dcur: snowflake.connector.cursor.DictCursor):
|
1500
1522
|
dcur.execute("CREATE TABLE example (j VARIANT)")
|
1501
|
-
dcur.execute("""INSERT INTO example SELECT PARSE_JSON('{"str": "100", "
|
1523
|
+
dcur.execute("""INSERT INTO example SELECT PARSE_JSON('{"str": "100", "num" : 200}')""")
|
1502
1524
|
|
1503
|
-
dcur.execute("SELECT j:str::varchar as
|
1504
|
-
assert dcur.fetchall() == [{"
|
1525
|
+
dcur.execute("SELECT j:str::varchar as j_str_varchar, j:num::varchar as j_num_varchar FROM example")
|
1526
|
+
assert dcur.fetchall() == [{"J_STR_VARCHAR": "100", "J_NUM_VARCHAR": "200"}]
|
1505
1527
|
|
1506
|
-
dcur.execute("SELECT j:str::number as
|
1507
|
-
assert dcur.fetchall() == [{"
|
1528
|
+
dcur.execute("SELECT j:str::number as j_str_number, j:num::number as j_num_number FROM example")
|
1529
|
+
assert dcur.fetchall() == [{"J_STR_NUMBER": 100, "J_NUM_NUMBER": 200}]
|
@@ -79,7 +79,7 @@ def test_info_schema_columns_other(cur: snowflake.connector.cursor.SnowflakeCurs
|
|
79
79
|
cur.execute(
|
80
80
|
"""
|
81
81
|
create or replace table example (
|
82
|
-
XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XTIMESTAMP_TZ TIMESTAMP_TZ, XDATE DATE, XTIME TIME,
|
82
|
+
XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ TIMESTAMP_NTZ, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XTIMESTAMP_TZ TIMESTAMP_TZ, XDATE DATE, XTIME TIME,
|
83
83
|
XBINARY BINARY, /* XARRAY ARRAY, XOBJECT OBJECT */ XVARIANT VARIANT
|
84
84
|
)
|
85
85
|
"""
|
@@ -94,6 +94,7 @@ def test_info_schema_columns_other(cur: snowflake.connector.cursor.SnowflakeCurs
|
|
94
94
|
|
95
95
|
assert cur.fetchall() == [
|
96
96
|
("XTIMESTAMP", "TIMESTAMP_NTZ"),
|
97
|
+
("XTIMESTAMP_NTZ", "TIMESTAMP_NTZ"),
|
97
98
|
("XTIMESTAMP_NTZ9", "TIMESTAMP_NTZ"),
|
98
99
|
("XTIMESTAMP_TZ", "TIMESTAMP_TZ"),
|
99
100
|
("XDATE", "DATE"),
|
@@ -180,7 +181,7 @@ def test_info_schema_views_with_views(conn: snowflake.connector.SnowflakeConnect
|
|
180
181
|
"table_schema": "SCHEMA1",
|
181
182
|
"table_name": "BAR",
|
182
183
|
"table_owner": "SYSADMIN",
|
183
|
-
"view_definition": "CREATE VIEW SCHEMA1.BAR AS SELECT * FROM FOO WHERE (ID > 5)
|
184
|
+
"view_definition": "CREATE VIEW SCHEMA1.BAR AS SELECT * FROM FOO WHERE (ID > 5);",
|
184
185
|
"check_option": "NONE",
|
185
186
|
"is_updatable": "NO",
|
186
187
|
"insertable_into": "NO",
|
@@ -7,6 +7,7 @@ from sqlglot import exp
|
|
7
7
|
from fakesnow.transforms import (
|
8
8
|
SUCCESS_NOP,
|
9
9
|
_get_to_number_args,
|
10
|
+
alias_in_join,
|
10
11
|
array_agg_within_group,
|
11
12
|
array_size,
|
12
13
|
create_clone,
|
@@ -40,7 +41,7 @@ from fakesnow.transforms import (
|
|
40
41
|
show_objects_tables,
|
41
42
|
show_schemas,
|
42
43
|
tag,
|
43
|
-
|
44
|
+
timestamp_ntz,
|
44
45
|
to_date,
|
45
46
|
to_decimal,
|
46
47
|
to_timestamp,
|
@@ -53,6 +54,23 @@ from fakesnow.transforms import (
|
|
53
54
|
)
|
54
55
|
|
55
56
|
|
57
|
+
def test_alias_in_join() -> None:
|
58
|
+
assert (
|
59
|
+
sqlglot.parse_one("""
|
60
|
+
SELECT
|
61
|
+
T.COL
|
62
|
+
, SUBSTR(T.COL, 4) AS ALIAS
|
63
|
+
, J.ANOTHER
|
64
|
+
FROM TEST AS T
|
65
|
+
LEFT JOIN JOINED AS J
|
66
|
+
ON ALIAS = J.COL
|
67
|
+
""")
|
68
|
+
.transform(alias_in_join)
|
69
|
+
.sql()
|
70
|
+
== "SELECT T.COL, SUBSTR(T.COL, 4) AS ALIAS, J.ANOTHER FROM TEST AS T LEFT JOIN JOINED AS J ON SUBSTR(T.COL, 4) = J.COL" # noqa: E501
|
71
|
+
)
|
72
|
+
|
73
|
+
|
56
74
|
def test_array_size() -> None:
|
57
75
|
assert (
|
58
76
|
sqlglot.parse_one("""select array_size(parse_json('["a","b"]'))""").transform(array_size).sql(dialect="duckdb")
|
@@ -114,7 +132,7 @@ def test_describe_table() -> None:
|
|
114
132
|
|
115
133
|
def test_drop_schema_cascade() -> None:
|
116
134
|
assert (
|
117
|
-
sqlglot.parse_one("drop schema schema1").transform(drop_schema_cascade).sql() == "DROP
|
135
|
+
sqlglot.parse_one("drop schema schema1").transform(drop_schema_cascade).sql() == "DROP SCHEMA schema1 CASCADE"
|
118
136
|
)
|
119
137
|
|
120
138
|
|
@@ -422,7 +440,7 @@ def test_integer_precision() -> None:
|
|
422
440
|
)
|
423
441
|
.transform(integer_precision)
|
424
442
|
.sql(dialect="duckdb")
|
425
|
-
== "CREATE TABLE example (XNUMBER82 DECIMAL(8, 2), XNUMBER
|
443
|
+
== "CREATE TABLE example (XNUMBER82 DECIMAL(8, 2), XNUMBER DECIMAL(38, 0), XDECIMAL DECIMAL(38, 0), XNUMERIC DECIMAL(38, 0), XINT BIGINT, XINTEGER BIGINT, XBIGINT BIGINT, XSMALLINT BIGINT, XTINYINT BIGINT, XBYTEINT BIGINT)" # noqa: E501
|
426
444
|
)
|
427
445
|
|
428
446
|
|
@@ -471,17 +489,17 @@ def test_json_extract_cast_as_varchar() -> None:
|
|
471
489
|
)
|
472
490
|
.transform(json_extract_cast_as_varchar)
|
473
491
|
.sql(dialect="duckdb")
|
474
|
-
== """SELECT JSON('{"fruit":"banana"}') ->> '$.fruit'"""
|
492
|
+
== """SELECT CAST(JSON('{"fruit":"banana"}') ->> '$.fruit' AS TEXT)"""
|
475
493
|
)
|
476
494
|
|
477
495
|
assert (
|
478
496
|
sqlglot.parse_one(
|
479
|
-
"""select parse_json('{"
|
497
|
+
"""select parse_json('{"count":"9000"}'):count::number""",
|
480
498
|
read="snowflake",
|
481
499
|
)
|
482
500
|
.transform(json_extract_cast_as_varchar)
|
483
501
|
.sql(dialect="duckdb")
|
484
|
-
== """SELECT CAST(JSON('{"
|
502
|
+
== """SELECT CAST(JSON('{"count":"9000"}') ->> '$.count' AS DECIMAL(38, 0))"""
|
485
503
|
)
|
486
504
|
|
487
505
|
|
@@ -598,12 +616,20 @@ def test_tag() -> None:
|
|
598
616
|
sqlglot.parse_one("ALTER TABLE db1.schema1.table1 SET TAG foo.bar='baz'", read="snowflake").transform(tag)
|
599
617
|
== SUCCESS_NOP
|
600
618
|
)
|
619
|
+
assert (
|
620
|
+
sqlglot.parse_one("ALTER TABLE table1 MODIFY COLUMN name1 SET TAG foo='bar'", read="snowflake").transform(tag)
|
621
|
+
== SUCCESS_NOP
|
622
|
+
)
|
623
|
+
assert (
|
624
|
+
sqlglot.parse_one("CREATE TAG cost_center COMMENT = 'cost_center tag'", read="snowflake").transform(tag)
|
625
|
+
== SUCCESS_NOP
|
626
|
+
)
|
601
627
|
|
602
628
|
|
603
629
|
def test_timestamp_ntz_ns() -> None:
|
604
630
|
assert (
|
605
631
|
sqlglot.parse_one("CREATE TABLE table1(ts TIMESTAMP_NTZ(9))", read="snowflake")
|
606
|
-
.transform(
|
632
|
+
.transform(timestamp_ntz)
|
607
633
|
.sql(dialect="duckdb")
|
608
634
|
== "CREATE TABLE table1 (ts TIMESTAMP)"
|
609
635
|
)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|