fakesnow 0.9.30__py3-none-any.whl → 0.9.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fakesnow/arrow.py +4 -4
- fakesnow/cursor.py +6 -0
- fakesnow/transforms/__init__.py +53 -47
- {fakesnow-0.9.30.dist-info → fakesnow-0.9.31.dist-info}/METADATA +2 -2
- {fakesnow-0.9.30.dist-info → fakesnow-0.9.31.dist-info}/RECORD +9 -9
- {fakesnow-0.9.30.dist-info → fakesnow-0.9.31.dist-info}/WHEEL +1 -1
- {fakesnow-0.9.30.dist-info → fakesnow-0.9.31.dist-info}/LICENSE +0 -0
- {fakesnow-0.9.30.dist-info → fakesnow-0.9.31.dist-info}/entry_points.txt +0 -0
- {fakesnow-0.9.30.dist-info → fakesnow-0.9.31.dist-info}/top_level.txt +0 -0
fakesnow/arrow.py
CHANGED
@@ -62,7 +62,7 @@ def to_ipc(table: pa.Table) -> pa.Buffer:
|
|
62
62
|
|
63
63
|
|
64
64
|
def to_sf(table: pa.Table, rowtype: list[ColumnInfo]) -> pa.Table:
|
65
|
-
def to_sf_col(col: pa.
|
65
|
+
def to_sf_col(col: pa.ChunkedArray) -> pa.Array | pa.ChunkedArray:
|
66
66
|
if pa.types.is_timestamp(col.type):
|
67
67
|
return timestamp_to_sf_struct(col)
|
68
68
|
elif pa.types.is_time(col.type):
|
@@ -83,7 +83,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
|
|
83
83
|
|
84
84
|
# Round to seconds, ie: strip subseconds
|
85
85
|
tsa_without_us = pc.floor_temporal(ts, unit="second") # type: ignore https://github.com/zen-xu/pyarrow-stubs/issues/45
|
86
|
-
epoch = pc.divide(tsa_without_us.cast(pa.int64()), 1_000_000)
|
86
|
+
epoch = pc.divide(tsa_without_us.cast(pa.int64()), 1_000_000)
|
87
87
|
|
88
88
|
# Calculate fractional part as nanoseconds
|
89
89
|
fraction = pc.multiply(pc.subsecond(ts), 1_000_000_000).cast(pa.int32()) # type: ignore
|
@@ -93,7 +93,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
|
|
93
93
|
timezone = pa.array([1440] * len(ts), type=pa.int32())
|
94
94
|
|
95
95
|
return pa.StructArray.from_arrays(
|
96
|
-
arrays=[epoch, fraction, timezone],
|
96
|
+
arrays=[epoch, fraction, timezone],
|
97
97
|
fields=[
|
98
98
|
pa.field("epoch", nullable=False, type=pa.int64()),
|
99
99
|
pa.field("fraction", nullable=False, type=pa.int32()),
|
@@ -102,7 +102,7 @@ def timestamp_to_sf_struct(ts: pa.Array | pa.ChunkedArray) -> pa.Array:
|
|
102
102
|
)
|
103
103
|
else:
|
104
104
|
return pa.StructArray.from_arrays(
|
105
|
-
arrays=[epoch, fraction],
|
105
|
+
arrays=[epoch, fraction],
|
106
106
|
fields=[
|
107
107
|
pa.field("epoch", nullable=False, type=pa.int64()),
|
108
108
|
pa.field("fraction", nullable=False, type=pa.int32()),
|
fakesnow/cursor.py
CHANGED
@@ -14,6 +14,7 @@ import pyarrow # needed by fetch_arrow_table()
|
|
14
14
|
import snowflake.connector.converter
|
15
15
|
import snowflake.connector.errors
|
16
16
|
import sqlglot
|
17
|
+
import sqlglot.errors
|
17
18
|
from duckdb import DuckDBPyConnection
|
18
19
|
from snowflake.connector.cursor import ResultMetadata
|
19
20
|
from snowflake.connector.result_batch import ResultBatch
|
@@ -155,6 +156,11 @@ class FakeSnowflakeCursor:
|
|
155
156
|
except snowflake.connector.errors.ProgrammingError as e:
|
156
157
|
self._sqlstate = e.sqlstate
|
157
158
|
raise e
|
159
|
+
except sqlglot.errors.ParseError as e:
|
160
|
+
self._sqlstate = "42000"
|
161
|
+
# strip highlight for better readability, TODO: show pointer to start of error
|
162
|
+
msg = str(e).replace("\x1b[4m", "").replace("\x1b[0m", "")
|
163
|
+
raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=1003, sqlstate="42000") from None
|
158
164
|
|
159
165
|
def check_db_and_schema(self, expression: exp.Expression) -> None:
|
160
166
|
no_database, no_schema = checks.is_unqualified_table_expression(expression)
|
fakesnow/transforms/__init__.py
CHANGED
@@ -720,15 +720,21 @@ def information_schema_fs_views(expression: exp.Expression) -> exp.Expression:
|
|
720
720
|
return expression
|
721
721
|
|
722
722
|
|
723
|
+
NUMBER_38_0 = [
|
724
|
+
exp.DataTypeParam(this=exp.Literal(this="38", is_string=False)),
|
725
|
+
exp.DataTypeParam(this=exp.Literal(this="0", is_string=False)),
|
726
|
+
]
|
727
|
+
|
728
|
+
|
723
729
|
def integer_precision(expression: exp.Expression) -> exp.Expression:
|
724
|
-
"""Convert integers to bigint.
|
730
|
+
"""Convert integers and number(38,0) to bigint.
|
725
731
|
|
726
|
-
So dataframes will return them with a dtype of int64.
|
732
|
+
So fetch_all will return int and dataframes will return them with a dtype of int64.
|
727
733
|
"""
|
728
|
-
|
729
734
|
if (
|
730
735
|
isinstance(expression, exp.DataType)
|
731
|
-
and
|
736
|
+
and expression.this == exp.DataType.Type.DECIMAL
|
737
|
+
and (not expression.expressions or expression.expressions == NUMBER_38_0)
|
732
738
|
) or expression.this in (exp.DataType.Type.INT, exp.DataType.Type.SMALLINT, exp.DataType.Type.TINYINT):
|
733
739
|
return exp.DataType(
|
734
740
|
this=exp.DataType.Type.BIGINT,
|
@@ -836,29 +842,29 @@ def object_construct(expression: exp.Expression) -> exp.Expression:
|
|
836
842
|
See https://docs.snowflake.com/en/sql-reference/functions/object_construct
|
837
843
|
"""
|
838
844
|
|
839
|
-
if isinstance(expression, exp.Struct):
|
840
|
-
|
841
|
-
for e in expression.expressions:
|
842
|
-
if not (isinstance(e, exp.PropertyEQ)):
|
843
|
-
non_null_expressions.append(e)
|
844
|
-
continue
|
845
|
+
if not isinstance(expression, exp.Struct):
|
846
|
+
return expression
|
845
847
|
|
846
|
-
|
847
|
-
|
848
|
+
non_null_expressions = []
|
849
|
+
for e in expression.expressions:
|
850
|
+
if not (isinstance(e, exp.PropertyEQ)):
|
851
|
+
non_null_expressions.append(e)
|
852
|
+
continue
|
848
853
|
|
849
|
-
|
850
|
-
|
854
|
+
left = e.left
|
855
|
+
right = e.right
|
851
856
|
|
852
|
-
|
853
|
-
|
857
|
+
left_is_null = isinstance(left, exp.Null)
|
858
|
+
right_is_null = isinstance(right, exp.Null)
|
854
859
|
|
855
|
-
|
860
|
+
if left_is_null or right_is_null:
|
861
|
+
continue
|
856
862
|
|
857
|
-
|
858
|
-
new_struct.set("expressions", non_null_expressions)
|
859
|
-
return exp.Anonymous(this="TO_JSON", expressions=[new_struct])
|
863
|
+
non_null_expressions.append(e)
|
860
864
|
|
861
|
-
|
865
|
+
new_struct = expression.copy()
|
866
|
+
new_struct.set("expressions", non_null_expressions)
|
867
|
+
return exp.Anonymous(this="TO_JSON", expressions=[new_struct])
|
862
868
|
|
863
869
|
|
864
870
|
def regex_replace(expression: exp.Expression) -> exp.Expression:
|
@@ -1023,17 +1029,10 @@ def show_objects_tables(expression: exp.Expression, current_database: str | None
|
|
1023
1029
|
catalog = table.db or current_database
|
1024
1030
|
schema = table.name
|
1025
1031
|
else:
|
1026
|
-
# all objects / tables
|
1032
|
+
# all objects / tables - will show everything in the "account"
|
1027
1033
|
catalog = None
|
1028
1034
|
schema = None
|
1029
1035
|
|
1030
|
-
tables_only = "table_type = 'BASE TABLE' and " if show == "TABLES" else ""
|
1031
|
-
exclude_fakesnow_tables = "not (table_schema == '_fs_information_schema')"
|
1032
|
-
# without a database will show everything in the "account"
|
1033
|
-
table_catalog = f" and table_catalog = '{catalog}'" if catalog else ""
|
1034
|
-
schema = f" and table_schema = '{schema}'" if schema else ""
|
1035
|
-
limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
|
1036
|
-
|
1037
1036
|
columns = [
|
1038
1037
|
"to_timestamp(0)::timestamptz as 'created_on'",
|
1039
1038
|
"table_name as 'name'",
|
@@ -1041,17 +1040,29 @@ def show_objects_tables(expression: exp.Expression, current_database: str | None
|
|
1041
1040
|
"table_catalog as 'database_name'",
|
1042
1041
|
"table_schema as 'schema_name'",
|
1043
1042
|
]
|
1044
|
-
|
1045
|
-
terse = expression.args["terse"]
|
1046
|
-
if not terse:
|
1043
|
+
if not expression.args["terse"]:
|
1047
1044
|
columns.append('null as "comment"')
|
1045
|
+
columns_clause = ", ".join(columns)
|
1046
|
+
|
1047
|
+
where = ["not (table_schema == '_fs_information_schema')"] # exclude fakesnow's internal schemas
|
1048
|
+
if show == "TABLES":
|
1049
|
+
where.append("table_type = 'BASE TABLE'")
|
1050
|
+
if catalog:
|
1051
|
+
where.append(f"table_catalog = '{catalog}'")
|
1052
|
+
if schema:
|
1053
|
+
where.append(f"table_schema = '{schema}'")
|
1054
|
+
if (like := expression.args.get("like")) and isinstance(like, exp.Expression):
|
1055
|
+
where.append(f"table_name ilike {like.sql()}")
|
1056
|
+
where_clause = " AND ".join(where)
|
1048
1057
|
|
1049
|
-
|
1058
|
+
limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
|
1050
1059
|
|
1051
|
-
query =
|
1052
|
-
|
1053
|
-
|
1054
|
-
|
1060
|
+
query = f"""
|
1061
|
+
SELECT {columns_clause}
|
1062
|
+
from information_schema.tables
|
1063
|
+
where {where_clause}
|
1064
|
+
{limit}
|
1065
|
+
"""
|
1055
1066
|
|
1056
1067
|
return sqlglot.parse_one(query, read="duckdb")
|
1057
1068
|
|
@@ -1296,8 +1307,6 @@ def _get_to_number_args(e: exp.ToNumber) -> tuple[exp.Expression | None, exp.Exp
|
|
1296
1307
|
# to_number('100', 'TM9', 10, 2)
|
1297
1308
|
if arg_scale:
|
1298
1309
|
_scale = arg_scale
|
1299
|
-
else:
|
1300
|
-
pass
|
1301
1310
|
else:
|
1302
1311
|
# to_number('100', 10, ...)
|
1303
1312
|
# arg_format is not a string, so it must be precision.
|
@@ -1307,12 +1316,10 @@ def _get_to_number_args(e: exp.ToNumber) -> tuple[exp.Expression | None, exp.Exp
|
|
1307
1316
|
# And arg_precision must be scale
|
1308
1317
|
if arg_precision:
|
1309
1318
|
_scale = arg_precision
|
1310
|
-
|
1311
|
-
|
1312
|
-
if
|
1313
|
-
|
1314
|
-
if arg_scale:
|
1315
|
-
_scale = arg_scale
|
1319
|
+
elif arg_precision:
|
1320
|
+
_precision = arg_precision
|
1321
|
+
if arg_scale:
|
1322
|
+
_scale = arg_scale
|
1316
1323
|
|
1317
1324
|
return _format, _precision, _scale
|
1318
1325
|
|
@@ -1633,8 +1640,7 @@ def show_keys(
|
|
1633
1640
|
AND table_name NOT LIKE '_fs_%'
|
1634
1641
|
"""
|
1635
1642
|
|
1636
|
-
scope_kind
|
1637
|
-
if scope_kind:
|
1643
|
+
if scope_kind := expression.args.get("scope_kind"):
|
1638
1644
|
table = expression.args["scope"]
|
1639
1645
|
|
1640
1646
|
if scope_kind == "SCHEMA":
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.31
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -220,7 +220,7 @@ Requires-Dist: dirty-equals; extra == "dev"
|
|
220
220
|
Requires-Dist: pandas-stubs; extra == "dev"
|
221
221
|
Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
|
222
222
|
Requires-Dist: pre-commit~=4.0; extra == "dev"
|
223
|
-
Requires-Dist: pyarrow-stubs==
|
223
|
+
Requires-Dist: pyarrow-stubs==17.19; extra == "dev"
|
224
224
|
Requires-Dist: pytest~=8.0; extra == "dev"
|
225
225
|
Requires-Dist: pytest-asyncio; extra == "dev"
|
226
226
|
Requires-Dist: ruff~=0.11.0; extra == "dev"
|
@@ -1,10 +1,10 @@
|
|
1
1
|
fakesnow/__init__.py,sha256=qUfgucQYPdELrJaxczalhJgWAWQ6cfTCUAHx6nUqRaI,3528
|
2
2
|
fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
|
3
|
-
fakesnow/arrow.py,sha256=
|
3
|
+
fakesnow/arrow.py,sha256=XjTpFyLrD9jULWOtPgpr0RyNMmO6a5yi82y6ivi2CCI,4884
|
4
4
|
fakesnow/checks.py,sha256=be-xo0oMoAUVhlMDCu1_Rkoh_L8p_p8qo9P6reJSHIQ,2874
|
5
5
|
fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
|
6
6
|
fakesnow/conn.py,sha256=HGhFKErKWvAfVEy3QSc0tfNmzGh_T7FtvRfWuDBy_CQ,5744
|
7
|
-
fakesnow/cursor.py,sha256=
|
7
|
+
fakesnow/cursor.py,sha256=tlcQK_w4r6BZHs3BTNHUFGk2hZz-lSnOdIrNLaJ-rBw,21349
|
8
8
|
fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
|
9
9
|
fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
|
10
10
|
fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
|
@@ -16,11 +16,11 @@ fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
|
|
16
16
|
fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
|
17
17
|
fakesnow/server.py,sha256=-jKyEVuD2TEr88jUSA1Lu86MAymel7LQAiNlytHqhTg,5934
|
18
18
|
fakesnow/variables.py,sha256=WXyPnkeNwD08gy52yF66CVe2twiYC50tztNfgXV4q1k,3032
|
19
|
-
fakesnow/transforms/__init__.py,sha256=
|
19
|
+
fakesnow/transforms/__init__.py,sha256=a-WNNqIo6IhY9p_m0zfdre7eb6KDmz-dbyrNkhpsHQk,60506
|
20
20
|
fakesnow/transforms/merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
|
21
|
-
fakesnow-0.9.
|
22
|
-
fakesnow-0.9.
|
23
|
-
fakesnow-0.9.
|
24
|
-
fakesnow-0.9.
|
25
|
-
fakesnow-0.9.
|
26
|
-
fakesnow-0.9.
|
21
|
+
fakesnow-0.9.31.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
|
22
|
+
fakesnow-0.9.31.dist-info/METADATA,sha256=RXWi0hLbvLkUtzDw0SHtcu-N3-Ebd1BC5K7JfoPnZ38,18106
|
23
|
+
fakesnow-0.9.31.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
|
24
|
+
fakesnow-0.9.31.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
|
25
|
+
fakesnow-0.9.31.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
|
26
|
+
fakesnow-0.9.31.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|