fakesnow 0.9.6__tar.gz → 0.9.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fakesnow-0.9.6 → fakesnow-0.9.8}/PKG-INFO +3 -4
- {fakesnow-0.9.6 → fakesnow-0.9.8}/README.md +1 -2
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/fakes.py +83 -70
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/transforms.py +329 -44
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow.egg-info/PKG-INFO +3 -4
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow.egg-info/requires.txt +1 -1
- {fakesnow-0.9.6 → fakesnow-0.9.8}/pyproject.toml +2 -2
- {fakesnow-0.9.6 → fakesnow-0.9.8}/tests/test_fakes.py +264 -9
- fakesnow-0.9.8/tests/test_transforms.py +892 -0
- fakesnow-0.9.6/tests/test_transforms.py +0 -426
- {fakesnow-0.9.6 → fakesnow-0.9.8}/LICENSE +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/__init__.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/__main__.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/checks.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/cli.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/expr.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/fixtures.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/global_database.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/info_schema.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/macros.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow/py.typed +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow.egg-info/SOURCES.txt +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow.egg-info/dependency_links.txt +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow.egg-info/entry_points.txt +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/fakesnow.egg-info/top_level.txt +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/setup.cfg +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/tests/test_checks.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/tests/test_cli.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/tests/test_expr.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/tests/test_info_schema.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/tests/test_patch.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/tests/test_sqlalchemy.py +0 -0
- {fakesnow-0.9.6 → fakesnow-0.9.8}/tests/test_users.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.8
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -213,7 +213,7 @@ License-File: LICENSE
|
|
213
213
|
Requires-Dist: duckdb~=0.10.0
|
214
214
|
Requires-Dist: pyarrow
|
215
215
|
Requires-Dist: snowflake-connector-python
|
216
|
-
Requires-Dist: sqlglot~=
|
216
|
+
Requires-Dist: sqlglot~=23.3.0
|
217
217
|
Provides-Extra: dev
|
218
218
|
Requires-Dist: build~=1.0; extra == "dev"
|
219
219
|
Requires-Dist: pandas-stubs; extra == "dev"
|
@@ -233,8 +233,7 @@ Requires-Dist: jupysql; extra == "notebook"
|
|
233
233
|
[](https://github.com/tekumara/fakesnow/actions/workflows/ci.yml)
|
234
234
|
[](https://github.com/tekumara/fakesnow/actions/workflows/release.yml)
|
235
235
|
[](https://pypi.org/project/fakesnow/)
|
236
|
-
|
237
|
-
[](../../actions/workflows/ci.yml)
|
236
|
+
[](https://pypi.org/project/fakesnow/)
|
238
237
|
|
239
238
|
Fake [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/python-connector). Run and mock Snowflake DB locally.
|
240
239
|
|
@@ -3,8 +3,7 @@
|
|
3
3
|
[](https://github.com/tekumara/fakesnow/actions/workflows/ci.yml)
|
4
4
|
[](https://github.com/tekumara/fakesnow/actions/workflows/release.yml)
|
5
5
|
[](https://pypi.org/project/fakesnow/)
|
6
|
-
|
7
|
-
[](../../actions/workflows/ci.yml)
|
6
|
+
[](https://pypi.org/project/fakesnow/)
|
8
7
|
|
9
8
|
Fake [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/python-connector). Run and mock Snowflake DB locally.
|
10
9
|
|
@@ -11,6 +11,7 @@ from types import TracebackType
|
|
11
11
|
from typing import TYPE_CHECKING, Any, Literal, Optional, cast
|
12
12
|
|
13
13
|
import duckdb
|
14
|
+
from sqlglot import exp
|
14
15
|
|
15
16
|
if TYPE_CHECKING:
|
16
17
|
import pandas as pd
|
@@ -22,7 +23,7 @@ import sqlglot
|
|
22
23
|
from duckdb import DuckDBPyConnection
|
23
24
|
from snowflake.connector.cursor import DictCursor, ResultMetadata, SnowflakeCursor
|
24
25
|
from snowflake.connector.result_batch import ResultBatch
|
25
|
-
from sqlglot import
|
26
|
+
from sqlglot import parse_one
|
26
27
|
from typing_extensions import Self
|
27
28
|
|
28
29
|
import fakesnow.checks as checks
|
@@ -112,7 +113,9 @@ class FakeSnowflakeCursor:
|
|
112
113
|
def description(self) -> list[ResultMetadata]:
|
113
114
|
# use a separate cursor to avoid consuming the result set on this cursor
|
114
115
|
with self._conn.cursor() as cur:
|
115
|
-
|
116
|
+
# self._duck_conn.execute(sql, params)
|
117
|
+
expression = sqlglot.parse_one(f"DESCRIBE {self._last_sql}", read="duckdb")
|
118
|
+
cur._execute(expression, self._last_params) # noqa: SLF001
|
116
119
|
meta = FakeSnowflakeCursor._describe_as_result_metadata(cur.fetchall())
|
117
120
|
|
118
121
|
return meta
|
@@ -126,43 +129,20 @@ class FakeSnowflakeCursor:
|
|
126
129
|
) -> FakeSnowflakeCursor:
|
127
130
|
try:
|
128
131
|
self._sqlstate = None
|
129
|
-
|
132
|
+
|
133
|
+
if os.environ.get("FAKESNOW_DEBUG") == "snowflake":
|
134
|
+
print(f"{command};{params=}" if params else f"{command};", file=sys.stderr)
|
135
|
+
|
136
|
+
command, params = self._rewrite_with_params(command, params)
|
137
|
+
expression = parse_one(command, read="snowflake")
|
138
|
+
transformed = self._transform(expression)
|
139
|
+
return self._execute(transformed, params)
|
130
140
|
except snowflake.connector.errors.ProgrammingError as e:
|
131
141
|
self._sqlstate = e.sqlstate
|
132
142
|
raise e
|
133
143
|
|
134
|
-
def
|
135
|
-
|
136
|
-
command: str,
|
137
|
-
params: Sequence[Any] | dict[Any, Any] | None = None,
|
138
|
-
*args: Any,
|
139
|
-
**kwargs: Any,
|
140
|
-
) -> FakeSnowflakeCursor:
|
141
|
-
self._arrow_table = None
|
142
|
-
self._arrow_table_fetch_index = None
|
143
|
-
self._rowcount = None
|
144
|
-
|
145
|
-
command, params = self._rewrite_with_params(command, params)
|
146
|
-
expression = parse_one(command, read="snowflake")
|
147
|
-
|
148
|
-
cmd = expr.key_command(expression)
|
149
|
-
|
150
|
-
no_database, no_schema = checks.is_unqualified_table_expression(expression)
|
151
|
-
|
152
|
-
if no_database and not self._conn.database_set:
|
153
|
-
raise snowflake.connector.errors.ProgrammingError(
|
154
|
-
msg=f"Cannot perform {cmd}. This session does not have a current database. Call 'USE DATABASE', or use a qualified name.", # noqa: E501
|
155
|
-
errno=90105,
|
156
|
-
sqlstate="22000",
|
157
|
-
)
|
158
|
-
elif no_schema and not self._conn.schema_set:
|
159
|
-
raise snowflake.connector.errors.ProgrammingError(
|
160
|
-
msg=f"Cannot perform {cmd}. This session does not have a current schema. Call 'USE SCHEMA', or use a qualified name.", # noqa: E501
|
161
|
-
errno=90106,
|
162
|
-
sqlstate="22000",
|
163
|
-
)
|
164
|
-
|
165
|
-
transformed = (
|
144
|
+
def _transform(self, expression: exp.Expression) -> exp.Expression:
|
145
|
+
return (
|
166
146
|
expression.transform(transforms.upper_case_unquoted_identifiers)
|
167
147
|
.transform(transforms.set_schema, current_database=self._conn.database)
|
168
148
|
.transform(transforms.create_database, db_path=self._conn.db_path)
|
@@ -173,7 +153,9 @@ class FakeSnowflakeCursor:
|
|
173
153
|
.transform(transforms.drop_schema_cascade)
|
174
154
|
.transform(transforms.tag)
|
175
155
|
.transform(transforms.semi_structured_types)
|
176
|
-
.transform(transforms.
|
156
|
+
.transform(transforms.try_parse_json)
|
157
|
+
# NOTE: trim_cast_varchar must be before json_extract_cast_as_varchar
|
158
|
+
.transform(transforms.trim_cast_varchar)
|
177
159
|
# indices_to_json_extract must be before regex_substr
|
178
160
|
.transform(transforms.indices_to_json_extract)
|
179
161
|
.transform(transforms.json_extract_cast_as_varchar)
|
@@ -185,6 +167,7 @@ class FakeSnowflakeCursor:
|
|
185
167
|
.transform(transforms.values_columns)
|
186
168
|
.transform(transforms.to_date)
|
187
169
|
.transform(transforms.to_decimal)
|
170
|
+
.transform(transforms.try_to_decimal)
|
188
171
|
.transform(transforms.to_timestamp_ntz)
|
189
172
|
.transform(transforms.to_timestamp)
|
190
173
|
.transform(transforms.object_construct)
|
@@ -196,6 +179,11 @@ class FakeSnowflakeCursor:
|
|
196
179
|
.transform(transforms.array_size)
|
197
180
|
.transform(transforms.random)
|
198
181
|
.transform(transforms.identifier)
|
182
|
+
.transform(transforms.array_agg_within_group)
|
183
|
+
.transform(transforms.array_agg_to_json)
|
184
|
+
.transform(transforms.dateadd_date_cast)
|
185
|
+
.transform(transforms.dateadd_string_literal_timestamp_cast)
|
186
|
+
.transform(transforms.datediff_string_literal_timestamp_cast)
|
199
187
|
.transform(lambda e: transforms.show_schemas(e, self._conn.database))
|
200
188
|
.transform(lambda e: transforms.show_objects_tables(e, self._conn.database))
|
201
189
|
# TODO collapse into a single show_keys function
|
@@ -204,16 +192,42 @@ class FakeSnowflakeCursor:
|
|
204
192
|
.transform(lambda e: transforms.show_keys(e, self._conn.database, kind="FOREIGN"))
|
205
193
|
.transform(transforms.show_users)
|
206
194
|
.transform(transforms.create_user)
|
195
|
+
.transform(transforms.sha256)
|
207
196
|
)
|
197
|
+
|
198
|
+
def _execute(
|
199
|
+
self, transformed: exp.Expression, params: Sequence[Any] | dict[Any, Any] | None = None
|
200
|
+
) -> FakeSnowflakeCursor:
|
201
|
+
self._arrow_table = None
|
202
|
+
self._arrow_table_fetch_index = None
|
203
|
+
self._rowcount = None
|
204
|
+
|
205
|
+
cmd = expr.key_command(transformed)
|
206
|
+
|
207
|
+
no_database, no_schema = checks.is_unqualified_table_expression(transformed)
|
208
|
+
|
209
|
+
if no_database and not self._conn.database_set:
|
210
|
+
raise snowflake.connector.errors.ProgrammingError(
|
211
|
+
msg=f"Cannot perform {cmd}. This session does not have a current database. Call 'USE DATABASE', or use a qualified name.", # noqa: E501
|
212
|
+
errno=90105,
|
213
|
+
sqlstate="22000",
|
214
|
+
)
|
215
|
+
elif no_schema and not self._conn.schema_set:
|
216
|
+
raise snowflake.connector.errors.ProgrammingError(
|
217
|
+
msg=f"Cannot perform {cmd}. This session does not have a current schema. Call 'USE SCHEMA', or use a qualified name.", # noqa: E501
|
218
|
+
errno=90106,
|
219
|
+
sqlstate="22000",
|
220
|
+
)
|
221
|
+
|
208
222
|
sql = transformed.sql(dialect="duckdb")
|
209
|
-
result_sql = None
|
210
223
|
|
211
224
|
if transformed.find(exp.Select) and (seed := transformed.args.get("seed")):
|
212
225
|
sql = f"SELECT setseed({seed}); {sql}"
|
213
226
|
|
214
|
-
if fs_debug := os.environ.get("FAKESNOW_DEBUG"):
|
215
|
-
|
216
|
-
|
227
|
+
if (fs_debug := os.environ.get("FAKESNOW_DEBUG")) and fs_debug != "snowflake":
|
228
|
+
print(f"{sql};{params=}" if params else f"{sql};", file=sys.stderr)
|
229
|
+
|
230
|
+
result_sql = None
|
217
231
|
|
218
232
|
try:
|
219
233
|
self._duck_conn.execute(sql, params)
|
@@ -237,17 +251,12 @@ class FakeSnowflakeCursor:
|
|
237
251
|
|
238
252
|
affected_count = None
|
239
253
|
|
240
|
-
if
|
241
|
-
|
242
|
-
else:
|
243
|
-
ident = None
|
244
|
-
|
245
|
-
if cmd == "USE DATABASE" and ident:
|
246
|
-
self._conn.database = ident
|
254
|
+
if set_database := transformed.args.get("set_database"):
|
255
|
+
self._conn.database = set_database
|
247
256
|
self._conn.database_set = True
|
248
257
|
|
249
|
-
elif
|
250
|
-
self._conn.schema =
|
258
|
+
elif set_schema := transformed.args.get("set_schema"):
|
259
|
+
self._conn.schema = set_schema
|
251
260
|
self._conn.schema_set = True
|
252
261
|
|
253
262
|
elif create_db_name := transformed.args.get("create_db_name"):
|
@@ -255,26 +264,6 @@ class FakeSnowflakeCursor:
|
|
255
264
|
self._duck_conn.execute(info_schema.creation_sql(create_db_name))
|
256
265
|
result_sql = SQL_CREATED_DATABASE.substitute(name=create_db_name)
|
257
266
|
|
258
|
-
elif cmd == "CREATE SCHEMA" and ident:
|
259
|
-
result_sql = SQL_CREATED_SCHEMA.substitute(name=ident)
|
260
|
-
|
261
|
-
elif cmd == "CREATE TABLE" and ident:
|
262
|
-
result_sql = SQL_CREATED_TABLE.substitute(name=ident)
|
263
|
-
|
264
|
-
elif cmd == "CREATE VIEW" and ident:
|
265
|
-
result_sql = SQL_CREATED_VIEW.substitute(name=ident)
|
266
|
-
|
267
|
-
elif cmd.startswith("DROP") and ident:
|
268
|
-
result_sql = SQL_DROPPED.substitute(name=ident)
|
269
|
-
|
270
|
-
# if dropping the current database/schema then reset conn metadata
|
271
|
-
if cmd == "DROP DATABASE" and ident == self._conn.database:
|
272
|
-
self._conn.database = None
|
273
|
-
self._conn.schema = None
|
274
|
-
|
275
|
-
elif cmd == "DROP SCHEMA" and ident == self._conn.schema:
|
276
|
-
self._conn.schema = None
|
277
|
-
|
278
267
|
elif cmd == "INSERT":
|
279
268
|
(affected_count,) = self._duck_conn.fetchall()[0]
|
280
269
|
result_sql = SQL_INSERTED_ROWS.substitute(count=affected_count)
|
@@ -294,6 +283,28 @@ class FakeSnowflakeCursor:
|
|
294
283
|
lambda e: transforms.describe_table(e, self._conn.database, self._conn.schema)
|
295
284
|
).sql(dialect="duckdb")
|
296
285
|
|
286
|
+
elif (eid := transformed.find(exp.Identifier, bfs=False)) and isinstance(eid.this, str):
|
287
|
+
ident = eid.this if eid.quoted else eid.this.upper()
|
288
|
+
if cmd == "CREATE SCHEMA" and ident:
|
289
|
+
result_sql = SQL_CREATED_SCHEMA.substitute(name=ident)
|
290
|
+
|
291
|
+
elif cmd == "CREATE TABLE" and ident:
|
292
|
+
result_sql = SQL_CREATED_TABLE.substitute(name=ident)
|
293
|
+
|
294
|
+
elif cmd == "CREATE VIEW" and ident:
|
295
|
+
result_sql = SQL_CREATED_VIEW.substitute(name=ident)
|
296
|
+
|
297
|
+
elif cmd.startswith("DROP") and ident:
|
298
|
+
result_sql = SQL_DROPPED.substitute(name=ident)
|
299
|
+
|
300
|
+
# if dropping the current database/schema then reset conn metadata
|
301
|
+
if cmd == "DROP DATABASE" and ident == self._conn.database:
|
302
|
+
self._conn.database = None
|
303
|
+
self._conn.schema = None
|
304
|
+
|
305
|
+
elif cmd == "DROP SCHEMA" and ident == self._conn.schema:
|
306
|
+
self._conn.schema = None
|
307
|
+
|
297
308
|
if table_comment := cast(tuple[exp.Table, str], transformed.args.get("table_comment")):
|
298
309
|
# record table comment
|
299
310
|
table, comment = table_comment
|
@@ -617,7 +628,9 @@ class FakeSnowflakeConnection:
|
|
617
628
|
# don't jsonify string
|
618
629
|
df[col] = df[col].apply(lambda x: json.dumps(x) if isinstance(x, (dict, list)) else x)
|
619
630
|
|
620
|
-
|
631
|
+
escaped_cols = ",".join(f'"{col}"' for col in df.columns.to_list())
|
632
|
+
self._duck_conn.execute(f"INSERT INTO {table_name}({escaped_cols}) SELECT * FROM df")
|
633
|
+
|
621
634
|
return self._duck_conn.fetchall()[0][0]
|
622
635
|
|
623
636
|
|