fakesnow 0.9.7__tar.gz → 0.9.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fakesnow-0.9.7 → fakesnow-0.9.9}/PKG-INFO +2 -2
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/fakes.py +72 -68
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/transforms.py +41 -10
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow.egg-info/PKG-INFO +2 -2
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow.egg-info/requires.txt +1 -1
- {fakesnow-0.9.7 → fakesnow-0.9.9}/pyproject.toml +2 -2
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_fakes.py +42 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_transforms.py +47 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/LICENSE +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/README.md +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/__init__.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/__main__.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/checks.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/cli.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/expr.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/fixtures.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/global_database.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/info_schema.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/macros.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow/py.typed +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow.egg-info/SOURCES.txt +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow.egg-info/dependency_links.txt +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow.egg-info/entry_points.txt +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/fakesnow.egg-info/top_level.txt +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/setup.cfg +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_checks.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_cli.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_expr.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_info_schema.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_patch.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_sqlalchemy.py +0 -0
- {fakesnow-0.9.7 → fakesnow-0.9.9}/tests/test_users.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.9
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -213,7 +213,7 @@ License-File: LICENSE
|
|
213
213
|
Requires-Dist: duckdb~=0.10.0
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=23.
|
216
|
+
Requires-Dist: sqlglot~=23.12.2
|
217
217
|
Provides-Extra: dev
|
218
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
219
|
Requires-Dist: pandas-stubs; extra == "dev"
|
@@ -11,6 +11,7 @@ from types import TracebackType
|
|
11
11
|
from typing import TYPE_CHECKING, Any, Literal, Optional, cast
|
12
12
|
|
13
13
|
import duckdb
|
14
|
+
from sqlglot import exp
|
14
15
|
|
15
16
|
if TYPE_CHECKING:
|
16
17
|
import pandas as pd
|
@@ -22,7 +23,7 @@ import sqlglot
|
|
22
23
|
from duckdb import DuckDBPyConnection
|
23
24
|
from snowflake.connector.cursor import DictCursor, ResultMetadata, SnowflakeCursor
|
24
25
|
from snowflake.connector.result_batch import ResultBatch
|
25
|
-
from sqlglot import
|
26
|
+
from sqlglot import parse_one
|
26
27
|
from typing_extensions import Self
|
27
28
|
|
28
29
|
import fakesnow.checks as checks
|
@@ -112,7 +113,9 @@ class FakeSnowflakeCursor:
|
|
112
113
|
def description(self) -> list[ResultMetadata]:
|
113
114
|
# use a separate cursor to avoid consuming the result set on this cursor
|
114
115
|
with self._conn.cursor() as cur:
|
115
|
-
|
116
|
+
# self._duck_conn.execute(sql, params)
|
117
|
+
expression = sqlglot.parse_one(f"DESCRIBE {self._last_sql}", read="duckdb")
|
118
|
+
cur._execute(expression, self._last_params) # noqa: SLF001
|
116
119
|
meta = FakeSnowflakeCursor._describe_as_result_metadata(cur.fetchall())
|
117
120
|
|
118
121
|
return meta
|
@@ -126,43 +129,20 @@ class FakeSnowflakeCursor:
|
|
126
129
|
) -> FakeSnowflakeCursor:
|
127
130
|
try:
|
128
131
|
self._sqlstate = None
|
129
|
-
|
132
|
+
|
133
|
+
if os.environ.get("FAKESNOW_DEBUG") == "snowflake":
|
134
|
+
print(f"{command};{params=}" if params else f"{command};", file=sys.stderr)
|
135
|
+
|
136
|
+
command, params = self._rewrite_with_params(command, params)
|
137
|
+
expression = parse_one(command, read="snowflake")
|
138
|
+
transformed = self._transform(expression)
|
139
|
+
return self._execute(transformed, params)
|
130
140
|
except snowflake.connector.errors.ProgrammingError as e:
|
131
141
|
self._sqlstate = e.sqlstate
|
132
142
|
raise e
|
133
143
|
|
134
|
-
def
|
135
|
-
|
136
|
-
command: str,
|
137
|
-
params: Sequence[Any] | dict[Any, Any] | None = None,
|
138
|
-
*args: Any,
|
139
|
-
**kwargs: Any,
|
140
|
-
) -> FakeSnowflakeCursor:
|
141
|
-
self._arrow_table = None
|
142
|
-
self._arrow_table_fetch_index = None
|
143
|
-
self._rowcount = None
|
144
|
-
|
145
|
-
command, params = self._rewrite_with_params(command, params)
|
146
|
-
expression = parse_one(command, read="snowflake")
|
147
|
-
|
148
|
-
cmd = expr.key_command(expression)
|
149
|
-
|
150
|
-
no_database, no_schema = checks.is_unqualified_table_expression(expression)
|
151
|
-
|
152
|
-
if no_database and not self._conn.database_set:
|
153
|
-
raise snowflake.connector.errors.ProgrammingError(
|
154
|
-
msg=f"Cannot perform {cmd}. This session does not have a current database. Call 'USE DATABASE', or use a qualified name.", # noqa: E501
|
155
|
-
errno=90105,
|
156
|
-
sqlstate="22000",
|
157
|
-
)
|
158
|
-
elif no_schema and not self._conn.schema_set:
|
159
|
-
raise snowflake.connector.errors.ProgrammingError(
|
160
|
-
msg=f"Cannot perform {cmd}. This session does not have a current schema. Call 'USE SCHEMA', or use a qualified name.", # noqa: E501
|
161
|
-
errno=90106,
|
162
|
-
sqlstate="22000",
|
163
|
-
)
|
164
|
-
|
165
|
-
transformed = (
|
144
|
+
def _transform(self, expression: exp.Expression) -> exp.Expression:
|
145
|
+
return (
|
166
146
|
expression.transform(transforms.upper_case_unquoted_identifiers)
|
167
147
|
.transform(transforms.set_schema, current_database=self._conn.database)
|
168
148
|
.transform(transforms.create_database, db_path=self._conn.db_path)
|
@@ -174,6 +154,8 @@ class FakeSnowflakeCursor:
|
|
174
154
|
.transform(transforms.tag)
|
175
155
|
.transform(transforms.semi_structured_types)
|
176
156
|
.transform(transforms.try_parse_json)
|
157
|
+
# NOTE: trim_cast_varchar must be before json_extract_cast_as_varchar
|
158
|
+
.transform(transforms.trim_cast_varchar)
|
177
159
|
# indices_to_json_extract must be before regex_substr
|
178
160
|
.transform(transforms.indices_to_json_extract)
|
179
161
|
.transform(transforms.json_extract_cast_as_varchar)
|
@@ -212,15 +194,40 @@ class FakeSnowflakeCursor:
|
|
212
194
|
.transform(transforms.create_user)
|
213
195
|
.transform(transforms.sha256)
|
214
196
|
)
|
197
|
+
|
198
|
+
def _execute(
|
199
|
+
self, transformed: exp.Expression, params: Sequence[Any] | dict[Any, Any] | None = None
|
200
|
+
) -> FakeSnowflakeCursor:
|
201
|
+
self._arrow_table = None
|
202
|
+
self._arrow_table_fetch_index = None
|
203
|
+
self._rowcount = None
|
204
|
+
|
205
|
+
cmd = expr.key_command(transformed)
|
206
|
+
|
207
|
+
no_database, no_schema = checks.is_unqualified_table_expression(transformed)
|
208
|
+
|
209
|
+
if no_database and not self._conn.database_set:
|
210
|
+
raise snowflake.connector.errors.ProgrammingError(
|
211
|
+
msg=f"Cannot perform {cmd}. This session does not have a current database. Call 'USE DATABASE', or use a qualified name.", # noqa: E501
|
212
|
+
errno=90105,
|
213
|
+
sqlstate="22000",
|
214
|
+
)
|
215
|
+
elif no_schema and not self._conn.schema_set:
|
216
|
+
raise snowflake.connector.errors.ProgrammingError(
|
217
|
+
msg=f"Cannot perform {cmd}. This session does not have a current schema. Call 'USE SCHEMA', or use a qualified name.", # noqa: E501
|
218
|
+
errno=90106,
|
219
|
+
sqlstate="22000",
|
220
|
+
)
|
221
|
+
|
215
222
|
sql = transformed.sql(dialect="duckdb")
|
216
|
-
result_sql = None
|
217
223
|
|
218
224
|
if transformed.find(exp.Select) and (seed := transformed.args.get("seed")):
|
219
225
|
sql = f"SELECT setseed({seed}); {sql}"
|
220
226
|
|
221
|
-
if fs_debug := os.environ.get("FAKESNOW_DEBUG"):
|
222
|
-
|
223
|
-
|
227
|
+
if (fs_debug := os.environ.get("FAKESNOW_DEBUG")) and fs_debug != "snowflake":
|
228
|
+
print(f"{sql};{params=}" if params else f"{sql};", file=sys.stderr)
|
229
|
+
|
230
|
+
result_sql = None
|
224
231
|
|
225
232
|
try:
|
226
233
|
self._duck_conn.execute(sql, params)
|
@@ -244,17 +251,12 @@ class FakeSnowflakeCursor:
|
|
244
251
|
|
245
252
|
affected_count = None
|
246
253
|
|
247
|
-
if
|
248
|
-
|
249
|
-
else:
|
250
|
-
ident = None
|
251
|
-
|
252
|
-
if cmd == "USE DATABASE" and ident:
|
253
|
-
self._conn.database = ident
|
254
|
+
if set_database := transformed.args.get("set_database"):
|
255
|
+
self._conn.database = set_database
|
254
256
|
self._conn.database_set = True
|
255
257
|
|
256
|
-
elif
|
257
|
-
self._conn.schema =
|
258
|
+
elif set_schema := transformed.args.get("set_schema"):
|
259
|
+
self._conn.schema = set_schema
|
258
260
|
self._conn.schema_set = True
|
259
261
|
|
260
262
|
elif create_db_name := transformed.args.get("create_db_name"):
|
@@ -262,26 +264,6 @@ class FakeSnowflakeCursor:
|
|
262
264
|
self._duck_conn.execute(info_schema.creation_sql(create_db_name))
|
263
265
|
result_sql = SQL_CREATED_DATABASE.substitute(name=create_db_name)
|
264
266
|
|
265
|
-
elif cmd == "CREATE SCHEMA" and ident:
|
266
|
-
result_sql = SQL_CREATED_SCHEMA.substitute(name=ident)
|
267
|
-
|
268
|
-
elif cmd == "CREATE TABLE" and ident:
|
269
|
-
result_sql = SQL_CREATED_TABLE.substitute(name=ident)
|
270
|
-
|
271
|
-
elif cmd == "CREATE VIEW" and ident:
|
272
|
-
result_sql = SQL_CREATED_VIEW.substitute(name=ident)
|
273
|
-
|
274
|
-
elif cmd.startswith("DROP") and ident:
|
275
|
-
result_sql = SQL_DROPPED.substitute(name=ident)
|
276
|
-
|
277
|
-
# if dropping the current database/schema then reset conn metadata
|
278
|
-
if cmd == "DROP DATABASE" and ident == self._conn.database:
|
279
|
-
self._conn.database = None
|
280
|
-
self._conn.schema = None
|
281
|
-
|
282
|
-
elif cmd == "DROP SCHEMA" and ident == self._conn.schema:
|
283
|
-
self._conn.schema = None
|
284
|
-
|
285
267
|
elif cmd == "INSERT":
|
286
268
|
(affected_count,) = self._duck_conn.fetchall()[0]
|
287
269
|
result_sql = SQL_INSERTED_ROWS.substitute(count=affected_count)
|
@@ -301,6 +283,28 @@ class FakeSnowflakeCursor:
|
|
301
283
|
lambda e: transforms.describe_table(e, self._conn.database, self._conn.schema)
|
302
284
|
).sql(dialect="duckdb")
|
303
285
|
|
286
|
+
elif (eid := transformed.find(exp.Identifier, bfs=False)) and isinstance(eid.this, str):
|
287
|
+
ident = eid.this if eid.quoted else eid.this.upper()
|
288
|
+
if cmd == "CREATE SCHEMA" and ident:
|
289
|
+
result_sql = SQL_CREATED_SCHEMA.substitute(name=ident)
|
290
|
+
|
291
|
+
elif cmd == "CREATE TABLE" and ident:
|
292
|
+
result_sql = SQL_CREATED_TABLE.substitute(name=ident)
|
293
|
+
|
294
|
+
elif cmd == "CREATE VIEW" and ident:
|
295
|
+
result_sql = SQL_CREATED_VIEW.substitute(name=ident)
|
296
|
+
|
297
|
+
elif cmd.startswith("DROP") and ident:
|
298
|
+
result_sql = SQL_DROPPED.substitute(name=ident)
|
299
|
+
|
300
|
+
# if dropping the current database/schema then reset conn metadata
|
301
|
+
if cmd == "DROP DATABASE" and ident == self._conn.database:
|
302
|
+
self._conn.database = None
|
303
|
+
self._conn.schema = None
|
304
|
+
|
305
|
+
elif cmd == "DROP SCHEMA" and ident == self._conn.schema:
|
306
|
+
self._conn.schema = None
|
307
|
+
|
304
308
|
if table_comment := cast(tuple[exp.Table, str], transformed.args.get("table_comment")):
|
305
309
|
# record table comment
|
306
310
|
table, comment = table_comment
|
@@ -309,7 +309,7 @@ def extract_comment_on_table(expression: exp.Expression) -> exp.Expression:
|
|
309
309
|
if props := cast(exp.Properties, expression.args.get("properties")):
|
310
310
|
other_props = []
|
311
311
|
for p in props.expressions:
|
312
|
-
if isinstance(p, exp.SchemaCommentProperty) and (isinstance(p.this, (exp.Literal, exp.
|
312
|
+
if isinstance(p, exp.SchemaCommentProperty) and (isinstance(p.this, (exp.Literal, exp.Var))):
|
313
313
|
comment = p.this.this
|
314
314
|
else:
|
315
315
|
other_props.append(p)
|
@@ -360,10 +360,19 @@ def extract_text_length(expression: exp.Expression) -> exp.Expression:
|
|
360
360
|
|
361
361
|
if isinstance(expression, (exp.Create, exp.AlterTable)):
|
362
362
|
text_lengths = []
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
363
|
+
|
364
|
+
# exp.Select is for a ctas, exp.Schema is a plain definition
|
365
|
+
if cols := expression.find(exp.Select, exp.Schema):
|
366
|
+
expressions = cols.expressions
|
367
|
+
else:
|
368
|
+
# alter table
|
369
|
+
expressions = expression.args.get("actions") or []
|
370
|
+
for e in expressions:
|
371
|
+
if dts := [
|
372
|
+
dt for dt in e.find_all(exp.DataType) if dt.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.TEXT)
|
373
|
+
]:
|
374
|
+
col_name = e.alias if isinstance(e, exp.Alias) else e.name
|
375
|
+
if len(dts) == 1 and (dt_size := dts[0].find(exp.DataTypeParam)):
|
367
376
|
size = (
|
368
377
|
isinstance(dt_size.this, exp.Literal)
|
369
378
|
and isinstance(dt_size.this.this, str)
|
@@ -566,6 +575,9 @@ def json_extract_cast_as_varchar(expression: exp.Expression) -> exp.Expression:
|
|
566
575
|
"""
|
567
576
|
if (
|
568
577
|
isinstance(expression, exp.Cast)
|
578
|
+
and (to := expression.to)
|
579
|
+
and isinstance(to, exp.DataType)
|
580
|
+
and to.this in {exp.DataType.Type.VARCHAR, exp.DataType.Type.TEXT}
|
569
581
|
and (je := expression.this)
|
570
582
|
and isinstance(je, exp.JSONExtract)
|
571
583
|
and (path := je.expression)
|
@@ -580,7 +592,7 @@ def json_extract_precedence(expression: exp.Expression) -> exp.Expression:
|
|
580
592
|
|
581
593
|
See https://github.com/tekumara/fakesnow/issues/53
|
582
594
|
"""
|
583
|
-
if isinstance(expression, exp.JSONExtract):
|
595
|
+
if isinstance(expression, (exp.JSONExtract, exp.JSONExtractScalar)):
|
584
596
|
return exp.Paren(this=expression)
|
585
597
|
return expression
|
586
598
|
|
@@ -779,7 +791,10 @@ def set_schema(expression: exp.Expression, current_database: str | None) -> exp.
|
|
779
791
|
|
780
792
|
if kind.name.upper() == "DATABASE":
|
781
793
|
# duckdb's default schema is main
|
782
|
-
|
794
|
+
database = expression.this.name
|
795
|
+
return exp.Command(
|
796
|
+
this="SET", expression=exp.Literal.string(f"schema = '{database}.main'"), set_database=database
|
797
|
+
)
|
783
798
|
else:
|
784
799
|
# SCHEMA
|
785
800
|
if db := expression.this.args.get("db"): # noqa: SIM108
|
@@ -788,9 +803,10 @@ def set_schema(expression: exp.Expression, current_database: str | None) -> exp.
|
|
788
803
|
# isn't qualified with a database
|
789
804
|
db_name = current_database or MISSING_DATABASE
|
790
805
|
|
791
|
-
|
792
|
-
|
793
|
-
|
806
|
+
schema = expression.this.name
|
807
|
+
return exp.Command(
|
808
|
+
this="SET", expression=exp.Literal.string(f"schema = '{db_name}.{schema}'"), set_schema=schema
|
809
|
+
)
|
794
810
|
|
795
811
|
return expression
|
796
812
|
|
@@ -1095,6 +1111,21 @@ def timestamp_ntz_ns(expression: exp.Expression) -> exp.Expression:
|
|
1095
1111
|
return expression
|
1096
1112
|
|
1097
1113
|
|
1114
|
+
def trim_cast_varchar(expression: exp.Expression) -> exp.Expression:
|
1115
|
+
"""Snowflake's TRIM casts input to VARCHAR implicitly."""
|
1116
|
+
|
1117
|
+
if not (isinstance(expression, exp.Trim)):
|
1118
|
+
return expression
|
1119
|
+
|
1120
|
+
operand = expression.this
|
1121
|
+
if isinstance(operand, exp.Cast) and operand.to.this in [exp.DataType.Type.VARCHAR, exp.DataType.Type.TEXT]:
|
1122
|
+
return expression
|
1123
|
+
|
1124
|
+
return exp.Trim(
|
1125
|
+
this=exp.Cast(this=operand, to=exp.DataType(this=exp.DataType.Type.VARCHAR, nested=False, prefix=False))
|
1126
|
+
)
|
1127
|
+
|
1128
|
+
|
1098
1129
|
def try_parse_json(expression: exp.Expression) -> exp.Expression:
|
1099
1130
|
"""Convert TRY_PARSE_JSON() to TRY_CAST(... as JSON).
|
1100
1131
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.9
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -213,7 +213,7 @@ License-File: LICENSE
|
|
213
213
|
Requires-Dist: duckdb~=0.10.0
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=23.
|
216
|
+
Requires-Dist: sqlglot~=23.12.2
|
217
217
|
Provides-Extra: dev
|
218
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
219
|
Requires-Dist: pandas-stubs; extra == "dev"
|
@@ -1,7 +1,7 @@
|
|
1
1
|
[project]
|
2
2
|
name = "fakesnow"
|
3
3
|
description = "Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally."
|
4
|
-
version = "0.9.
|
4
|
+
version = "0.9.9"
|
5
5
|
readme = "README.md"
|
6
6
|
license = { file = "LICENSE" }
|
7
7
|
classifiers = ["License :: OSI Approved :: MIT License"]
|
@@ -11,7 +11,7 @@ dependencies = [
|
|
11
11
|
"duckdb~=0.10.0",
|
12
12
|
"pyarrow",
|
13
13
|
"snowflake-connector-python",
|
14
|
-
"sqlglot~=23.
|
14
|
+
"sqlglot~=23.12.2",
|
15
15
|
]
|
16
16
|
|
17
17
|
[project.urls]
|
@@ -612,6 +612,17 @@ def test_description_delete(dcur: snowflake.connector.cursor.DictCursor):
|
|
612
612
|
# fmt: on
|
613
613
|
|
614
614
|
|
615
|
+
def test_description_select(dcur: snowflake.connector.cursor.DictCursor):
|
616
|
+
dcur.execute("SELECT DATEDIFF( DAY, '2023-04-02'::DATE, '2023-04-05'::DATE) as days")
|
617
|
+
assert dcur.fetchall() == [{"DAYS": 3}]
|
618
|
+
# TODO: Snowflake is actually precision=9, is_nullable=False
|
619
|
+
# fmt: off
|
620
|
+
assert dcur.description == [
|
621
|
+
ResultMetadata(name='DAYS', type_code=0, display_size=None, internal_size=None, precision=38, scale=0, is_nullable=True),
|
622
|
+
]
|
623
|
+
# fmt: on
|
624
|
+
|
625
|
+
|
615
626
|
def test_equal_null(cur: snowflake.connector.cursor.SnowflakeCursor):
|
616
627
|
cur.execute("select equal_null(NULL, NULL), equal_null(1, 1), equal_null(1, 2), equal_null(1, NULL)")
|
617
628
|
assert cur.fetchall() == [(True, True, False, False)]
|
@@ -792,6 +803,11 @@ def test_get_path_precedence(cur: snowflake.connector.cursor.SnowflakeCursor):
|
|
792
803
|
cur.execute("select {'K1': {'K2': 1}} as col where col:K1:K2 > 0")
|
793
804
|
assert indent(cur.fetchall()) == [('{\n "K1": {\n "K2": 1\n }\n}',)]
|
794
805
|
|
806
|
+
cur.execute(
|
807
|
+
"""select parse_json('{"K1": "a", "K2": "b"}') as col, case when col:K1::VARCHAR = 'a' and col:K2::VARCHAR = 'b' then 'yes' end"""
|
808
|
+
)
|
809
|
+
assert indent(cur.fetchall()) == [('{\n "K1": "a",\n "K2": "b"\n}', "yes")]
|
810
|
+
|
795
811
|
|
796
812
|
def test_get_result_batches(cur: snowflake.connector.cursor.SnowflakeCursor):
|
797
813
|
# no result set
|
@@ -1268,6 +1284,13 @@ def test_show_primary_keys(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
|
1268
1284
|
assert result3 == []
|
1269
1285
|
|
1270
1286
|
|
1287
|
+
def test_sqlglot_regression(cur: snowflake.connector.cursor.SnowflakeCursor):
|
1288
|
+
assert cur.execute(
|
1289
|
+
"""with SOURCE_TABLE AS (SELECT '2024-01-01' AS start_date)
|
1290
|
+
SELECT date(a.start_date) from SOURCE_TABLE AS a"""
|
1291
|
+
).fetchone() == (datetime.date(2024, 1, 1),)
|
1292
|
+
|
1293
|
+
|
1271
1294
|
def test_sqlstate(cur: snowflake.connector.cursor.SnowflakeCursor):
|
1272
1295
|
cur.execute("select 'hello world'")
|
1273
1296
|
# sqlstate is None on success
|
@@ -1393,6 +1416,14 @@ def test_transactions(conn: snowflake.connector.SnowflakeConnection):
|
|
1393
1416
|
assert cur.fetchall() == [("Statement executed successfully.",)]
|
1394
1417
|
|
1395
1418
|
|
1419
|
+
def test_trim_cast_varchar(cur: snowflake.connector.cursor.SnowflakeCursor):
|
1420
|
+
cur.execute("select trim(1), trim(' name 1 ')")
|
1421
|
+
assert cur.fetchall() == [("1", "name 1")]
|
1422
|
+
|
1423
|
+
cur.execute("""select trim(parse_json('{"k1": " v11 "}'):k1), trim(parse_json('{"k1": 21}'):k1)""")
|
1424
|
+
assert cur.fetchall() == [("v11", "21")]
|
1425
|
+
|
1426
|
+
|
1396
1427
|
def test_unquoted_identifiers_are_upper_cased(dcur: snowflake.connector.cursor.SnowflakeCursor):
|
1397
1428
|
dcur.execute("create table customers (id int, first_name varchar, last_name varchar)")
|
1398
1429
|
dcur.execute("insert into customers values (1, 'Jenny', 'P')")
|
@@ -1450,6 +1481,17 @@ def test_values(conn: snowflake.connector.SnowflakeConnection):
|
|
1450
1481
|
]
|
1451
1482
|
|
1452
1483
|
|
1484
|
+
def test_json_extract_cast_as_varchar(dcur: snowflake.connector.cursor.DictCursor):
|
1485
|
+
dcur.execute("CREATE TABLE example (j VARIANT)")
|
1486
|
+
dcur.execute("""INSERT INTO example SELECT PARSE_JSON('{"str": "100", "number" : 100}')""")
|
1487
|
+
|
1488
|
+
dcur.execute("SELECT j:str::varchar as c_str_varchar, j:number::varchar as c_num_varchar FROM example")
|
1489
|
+
assert dcur.fetchall() == [{"C_STR_VARCHAR": "100", "C_NUM_VARCHAR": "100"}]
|
1490
|
+
|
1491
|
+
dcur.execute("SELECT j:str::number as c_str_number, j:number::number as c_num_number FROM example")
|
1492
|
+
assert dcur.fetchall() == [{"C_STR_NUMBER": 100, "C_NUM_NUMBER": 100}]
|
1493
|
+
|
1494
|
+
|
1453
1495
|
def test_write_pandas_quoted_column_names(conn: snowflake.connector.SnowflakeConnection):
|
1454
1496
|
with conn.cursor(snowflake.connector.cursor.DictCursor) as dcur:
|
1455
1497
|
# colunmn names with spaces
|
@@ -44,6 +44,7 @@ from fakesnow.transforms import (
|
|
44
44
|
to_decimal,
|
45
45
|
to_timestamp,
|
46
46
|
to_timestamp_ntz,
|
47
|
+
trim_cast_varchar,
|
47
48
|
try_parse_json,
|
48
49
|
try_to_decimal,
|
49
50
|
upper_case_unquoted_identifiers,
|
@@ -334,6 +335,25 @@ def test_extract_text_length() -> None:
|
|
334
335
|
assert e.sql() == sql
|
335
336
|
assert e.args["text_lengths"] == [("t1", 16777216), ("t2", 10), ("t3", 20)]
|
336
337
|
|
338
|
+
sql = "ALTER TABLE t1 ALTER COLUMN c4 SET DATA TYPE VARCHAR(50)"
|
339
|
+
e = sqlglot.parse_one(sql).transform(extract_text_length)
|
340
|
+
assert e.sql() == sql
|
341
|
+
assert e.args["text_lengths"] == [("c4", 50)]
|
342
|
+
|
343
|
+
# test column name is correct with alias
|
344
|
+
sql = """CREATE TABLE table1 AS (
|
345
|
+
SELECT CAST(C1 AS TEXT) AS K, CAST(C2 AS TEXT(10)) AS V
|
346
|
+
FROM (VALUES (1, 2)) AS T(C1, C2))"""
|
347
|
+
e = sqlglot.parse_one(sql).transform(extract_text_length)
|
348
|
+
assert e.args["text_lengths"] == [("K", 16777216), ("V", 10)]
|
349
|
+
|
350
|
+
# test ctas column name is correct for combined field
|
351
|
+
sql = """CREATE TABLE SOME_TABLE AS (
|
352
|
+
SELECT CAST(C1 AS TEXT) || '-' || CAST(C1 AS TEXT) AS K
|
353
|
+
FROM VALUES (1), (2) AS T (C1))"""
|
354
|
+
e = sqlglot.parse_one(sql).transform(extract_text_length)
|
355
|
+
assert e.args["text_lengths"] == [("K", 16777216)]
|
356
|
+
|
337
357
|
|
338
358
|
def test_flatten() -> None:
|
339
359
|
assert (
|
@@ -446,6 +466,16 @@ def test_json_extract_cast_as_varchar() -> None:
|
|
446
466
|
== """SELECT JSON('{"fruit":"banana"}') ->> '$.fruit'"""
|
447
467
|
)
|
448
468
|
|
469
|
+
assert (
|
470
|
+
sqlglot.parse_one(
|
471
|
+
"""select parse_json('{"fruit":"9000"}'):fruit::number""",
|
472
|
+
read="snowflake",
|
473
|
+
)
|
474
|
+
.transform(json_extract_cast_as_varchar)
|
475
|
+
.sql(dialect="duckdb")
|
476
|
+
== """SELECT CAST(JSON('{"fruit":"9000"}') -> '$.fruit' AS DECIMAL)"""
|
477
|
+
)
|
478
|
+
|
449
479
|
|
450
480
|
def test_json_extract_precedence() -> None:
|
451
481
|
assert (
|
@@ -770,6 +800,23 @@ def test_try_parse_json() -> None:
|
|
770
800
|
)
|
771
801
|
|
772
802
|
|
803
|
+
def test_trim_cast_varchar() -> None:
|
804
|
+
assert (
|
805
|
+
sqlglot.parse_one("SELECT TRIM(col) FROM table1").transform(trim_cast_varchar).sql(dialect="duckdb")
|
806
|
+
== "SELECT TRIM(CAST(col AS TEXT)) FROM table1"
|
807
|
+
)
|
808
|
+
|
809
|
+
assert (
|
810
|
+
sqlglot.parse_one("SELECT TRIM(col::varchar) FROM table1").transform(trim_cast_varchar).sql(dialect="duckdb")
|
811
|
+
== "SELECT TRIM(CAST(col AS TEXT)) FROM table1"
|
812
|
+
)
|
813
|
+
|
814
|
+
assert (
|
815
|
+
sqlglot.parse_one("SELECT TRIM(col::TEXT) FROM table1").transform(trim_cast_varchar).sql(dialect="duckdb")
|
816
|
+
== "SELECT TRIM(CAST(col AS TEXT)) FROM table1"
|
817
|
+
)
|
818
|
+
|
819
|
+
|
773
820
|
def test_upper_case_unquoted_identifiers() -> None:
|
774
821
|
assert (
|
775
822
|
sqlglot.parse_one("select name, name as fname from table1").transform(upper_case_unquoted_identifiers).sql()
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|