fakesnow 0.9.38__py3-none-any.whl → 0.9.40__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fakesnow/checks.py +2 -2
- fakesnow/copy_into.py +194 -29
- fakesnow/cursor.py +37 -10
- fakesnow/info_schema.py +45 -0
- fakesnow/macros.py +11 -0
- fakesnow/server.py +15 -0
- fakesnow/transforms/__init__.py +7 -3
- fakesnow/transforms/show.py +263 -173
- fakesnow/transforms/stage.py +163 -0
- fakesnow/transforms/transforms.py +31 -42
- fakesnow/variables.py +3 -1
- {fakesnow-0.9.38.dist-info → fakesnow-0.9.40.dist-info}/METADATA +2 -2
- {fakesnow-0.9.38.dist-info → fakesnow-0.9.40.dist-info}/RECORD +17 -16
- {fakesnow-0.9.38.dist-info → fakesnow-0.9.40.dist-info}/WHEEL +1 -1
- {fakesnow-0.9.38.dist-info → fakesnow-0.9.40.dist-info}/entry_points.txt +0 -0
- {fakesnow-0.9.38.dist-info → fakesnow-0.9.40.dist-info}/licenses/LICENSE +0 -0
- {fakesnow-0.9.38.dist-info → fakesnow-0.9.40.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,163 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import datetime
|
4
|
+
from urllib.parse import urlparse
|
5
|
+
from urllib.request import url2pathname
|
6
|
+
|
7
|
+
import snowflake.connector.errors
|
8
|
+
import sqlglot
|
9
|
+
from sqlglot import exp
|
10
|
+
|
11
|
+
LOCAL_BUCKET_PATH = "/tmp/fakesnow_bucket"
|
12
|
+
|
13
|
+
|
14
|
+
def create_stage(
|
15
|
+
expression: exp.Expression, current_database: str | None, current_schema: str | None
|
16
|
+
) -> exp.Expression:
|
17
|
+
"""Transform CREATE STAGE to an INSERT statement for the fake stages table."""
|
18
|
+
if not (
|
19
|
+
isinstance(expression, exp.Create)
|
20
|
+
and (kind := expression.args.get("kind"))
|
21
|
+
and isinstance(kind, str)
|
22
|
+
and kind.upper() == "STAGE"
|
23
|
+
and (table := expression.find(exp.Table))
|
24
|
+
):
|
25
|
+
return expression
|
26
|
+
|
27
|
+
catalog = table.catalog or current_database
|
28
|
+
schema = table.db or current_schema
|
29
|
+
ident = table.this
|
30
|
+
if isinstance(ident, exp.Placeholder):
|
31
|
+
stage_name = "?"
|
32
|
+
elif isinstance(ident, exp.Identifier):
|
33
|
+
stage_name = ident.this if ident.quoted else ident.this.upper()
|
34
|
+
else:
|
35
|
+
raise ValueError(f"Invalid identifier type {ident.__class__.__name__} for stage name")
|
36
|
+
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
37
|
+
|
38
|
+
is_temp = False
|
39
|
+
url = ""
|
40
|
+
properties = expression.args.get("properties") or []
|
41
|
+
for prop in properties:
|
42
|
+
if isinstance(prop, exp.TemporaryProperty):
|
43
|
+
is_temp = True
|
44
|
+
elif (
|
45
|
+
isinstance(prop, exp.Property)
|
46
|
+
and isinstance(prop.this, exp.Var)
|
47
|
+
and isinstance(prop.this.this, str)
|
48
|
+
and prop.this.this.upper() == "URL"
|
49
|
+
):
|
50
|
+
value = prop.args.get("value")
|
51
|
+
if isinstance(value, exp.Literal):
|
52
|
+
url = value.this
|
53
|
+
|
54
|
+
# Determine cloud provider based on url
|
55
|
+
cloud = "AWS" if url.startswith("s3://") else None
|
56
|
+
|
57
|
+
stage_type = ("EXTERNAL" if url else "INTERNAL") + (" TEMPORARY" if is_temp else "")
|
58
|
+
stage_name_value = stage_name if stage_name == "?" else repr(stage_name)
|
59
|
+
|
60
|
+
insert_sql = f"""
|
61
|
+
INSERT INTO _fs_global._fs_information_schema._fs_stages
|
62
|
+
(created_on, name, database_name, schema_name, url, has_credentials, has_encryption_key, owner,
|
63
|
+
comment, region, type, cloud, notification_channel, storage_integration, endpoint, owner_role_type,
|
64
|
+
directory_enabled)
|
65
|
+
VALUES (
|
66
|
+
'{now}', {stage_name_value}, '{catalog}', '{schema}', '{url}', 'N', 'N', 'SYSADMIN',
|
67
|
+
'', NULL, '{stage_type}', {f"'{cloud}'" if cloud else "NULL"}, NULL, NULL, NULL, 'ROLE',
|
68
|
+
'N'
|
69
|
+
)
|
70
|
+
"""
|
71
|
+
transformed = sqlglot.parse_one(insert_sql, read="duckdb")
|
72
|
+
transformed.args["create_stage_name"] = stage_name
|
73
|
+
return transformed
|
74
|
+
|
75
|
+
|
76
|
+
# TODO: handle ?
|
77
|
+
|
78
|
+
|
79
|
+
def put_stage(expression: exp.Expression, current_database: str | None, current_schema: str | None) -> exp.Expression:
|
80
|
+
"""Transform PUT to a SELECT statement to locate the stage.
|
81
|
+
|
82
|
+
See https://docs.snowflake.com/en/sql-reference/sql/put
|
83
|
+
"""
|
84
|
+
if not isinstance(expression, exp.Put):
|
85
|
+
return expression
|
86
|
+
|
87
|
+
assert isinstance(expression.this, exp.Literal), "PUT command requires a file path as a literal"
|
88
|
+
src_url = urlparse(expression.this.this)
|
89
|
+
src_path = url2pathname(src_url.path)
|
90
|
+
target = expression.args["target"]
|
91
|
+
|
92
|
+
assert isinstance(target, exp.Var), f"{target} is not a exp.Var"
|
93
|
+
var = target.text("this")
|
94
|
+
if not var.startswith("@"):
|
95
|
+
msg = f"SQL compilation error:\n{var} does not start with @"
|
96
|
+
raise snowflake.connector.errors.ProgrammingError(
|
97
|
+
msg=msg,
|
98
|
+
errno=1003,
|
99
|
+
sqlstate="42000",
|
100
|
+
)
|
101
|
+
catalog, schema, stage_name = parts_from_var(var, current_database=current_database, current_schema=current_schema)
|
102
|
+
|
103
|
+
query = f"""
|
104
|
+
SELECT *
|
105
|
+
from _fs_global._fs_information_schema._fs_stages
|
106
|
+
where database_name = '{catalog}' and schema_name = '{schema}' and name = '{stage_name}'
|
107
|
+
"""
|
108
|
+
|
109
|
+
transformed = sqlglot.parse_one(query, read="duckdb")
|
110
|
+
transformed.args["put_stage_name"] = f"{catalog}.{schema}.{stage_name}"
|
111
|
+
transformed.args["put_stage_data"] = {
|
112
|
+
"stageInfo": {
|
113
|
+
# use LOCAL_FS otherwise we need to mock S3 with HTTPS which requires a certificate
|
114
|
+
"locationType": "LOCAL_FS",
|
115
|
+
"location": f"{LOCAL_BUCKET_PATH}/{stage_name}/",
|
116
|
+
"creds": {},
|
117
|
+
},
|
118
|
+
"src_locations": [src_path],
|
119
|
+
# defaults as per https://docs.snowflake.com/en/sql-reference/sql/put TODO: support other values
|
120
|
+
"parallel": 4,
|
121
|
+
"autoCompress": True,
|
122
|
+
"sourceCompression": "auto_detect",
|
123
|
+
"overwrite": False,
|
124
|
+
"command": "UPLOAD",
|
125
|
+
}
|
126
|
+
|
127
|
+
return transformed
|
128
|
+
|
129
|
+
|
130
|
+
def normalise_ident(name: str) -> str:
|
131
|
+
"""
|
132
|
+
Strip double quotes if present else return uppercased.
|
133
|
+
Snowflake treats quoted identifiers as case-sensitive and un-quoted identifiers as case-insensitive
|
134
|
+
"""
|
135
|
+
if name.startswith('"') and name.endswith('"'):
|
136
|
+
return name[1:-1] # Strip quotes
|
137
|
+
|
138
|
+
return name.upper()
|
139
|
+
|
140
|
+
|
141
|
+
def parts_from_var(var: str, current_database: str | None, current_schema: str | None) -> tuple[str, str, str]:
|
142
|
+
parts = var[1:].split(".")
|
143
|
+
if len(parts) == 3:
|
144
|
+
# Fully qualified name
|
145
|
+
database_name, schema_name, name = parts
|
146
|
+
elif len(parts) == 2:
|
147
|
+
# Schema + stage name
|
148
|
+
assert current_database, "Current database must be set when stage name is not fully qualified"
|
149
|
+
database_name, schema_name, name = current_database, parts[0], parts[1]
|
150
|
+
elif len(parts) == 1:
|
151
|
+
# Stage name only
|
152
|
+
assert current_database, "Current database must be set when stage name is not fully qualified"
|
153
|
+
assert current_schema, "Current schema must be set when stage name is not fully qualified"
|
154
|
+
database_name, schema_name, name = current_database, current_schema, parts[0]
|
155
|
+
else:
|
156
|
+
raise ValueError(f"Invalid stage name: {var}")
|
157
|
+
|
158
|
+
# Normalize names to uppercase if not wrapped in double quotes
|
159
|
+
database_name = normalise_ident(database_name)
|
160
|
+
schema_name = normalise_ident(schema_name)
|
161
|
+
name = normalise_ident(name)
|
162
|
+
|
163
|
+
return database_name, schema_name, name
|
@@ -45,6 +45,24 @@ def alter_table_strip_cluster_by(expression: exp.Expression) -> exp.Expression:
|
|
45
45
|
return expression
|
46
46
|
|
47
47
|
|
48
|
+
def array_construct_etc(expression: exp.Expression) -> exp.Expression:
|
49
|
+
"""Handle ARRAY_CONSTRUCT_* and ARRAY_CAT
|
50
|
+
|
51
|
+
Convert ARRAY_CONSTRUCT args to json_array.
|
52
|
+
Convert ARRAY_CONSTRUCT_COMPACT args to a list.
|
53
|
+
Because the macro expects a single argument to use with UNNEST.
|
54
|
+
|
55
|
+
TODO: fix ARRAY_CONSTRUCT_COMPACT to handle args of differing types.
|
56
|
+
"""
|
57
|
+
if isinstance(expression, exp.ArrayConstructCompact):
|
58
|
+
return exp.ArrayConstructCompact(expressions=[exp.Array(expressions=expression.expressions)])
|
59
|
+
elif isinstance(expression, exp.Array) and isinstance(expression.parent, exp.Select):
|
60
|
+
return exp.Anonymous(this="json_array", expressions=expression.expressions)
|
61
|
+
elif isinstance(expression, exp.ArrayConcat) and isinstance(expression.parent, exp.Select):
|
62
|
+
return exp.Cast(this=expression, to=exp.DataType(this=exp.DataType.Type.JSON, nested=False))
|
63
|
+
return expression
|
64
|
+
|
65
|
+
|
48
66
|
def array_size(expression: exp.Expression) -> exp.Expression:
|
49
67
|
if isinstance(expression, exp.ArraySize):
|
50
68
|
# return null if not json array
|
@@ -530,8 +548,11 @@ def identifier(expression: exp.Expression) -> exp.Expression:
|
|
530
548
|
and isinstance(expression.this, str)
|
531
549
|
and expression.this.upper() == "IDENTIFIER"
|
532
550
|
):
|
533
|
-
|
534
|
-
|
551
|
+
arg = expression.expressions[0]
|
552
|
+
# ? is parsed as exp.Placeholder
|
553
|
+
if isinstance(arg, exp.Placeholder):
|
554
|
+
return arg
|
555
|
+
return exp.Identifier(this=arg.this, quoted=False)
|
535
556
|
return expression
|
536
557
|
|
537
558
|
|
@@ -569,20 +590,20 @@ def indices_to_json_extract(expression: exp.Expression) -> exp.Expression:
|
|
569
590
|
return expression
|
570
591
|
|
571
592
|
|
572
|
-
def
|
573
|
-
"""
|
574
|
-
|
575
|
-
|
593
|
+
def information_schema_fs(expression: exp.Expression) -> exp.Expression:
|
594
|
+
"""Redirects for
|
595
|
+
* _FS_COLUMNS view which has character_maximum_length or character_octet_length.
|
596
|
+
* _FS_TABLES to access additional metadata columns (eg: comment).
|
597
|
+
* _FS_VIEWS to return Snowflake's version instead of duckdb's
|
598
|
+
* _FS_LOAD_HISTORY table which duckdb doesn't have.
|
576
599
|
"""
|
577
600
|
|
578
601
|
if (
|
579
602
|
isinstance(expression, exp.Table)
|
580
|
-
and expression.db
|
581
603
|
and expression.db.upper() == "INFORMATION_SCHEMA"
|
582
|
-
and expression.name
|
583
|
-
and expression.name.upper() == "COLUMNS"
|
604
|
+
and expression.name.upper() in {"COLUMNS", "TABLES", "VIEWS", "LOAD_HISTORY"}
|
584
605
|
):
|
585
|
-
expression.set("this", exp.Identifier(this="
|
606
|
+
expression.set("this", exp.Identifier(this=f"_FS_{expression.name.upper()}", quoted=False))
|
586
607
|
expression.set("db", exp.Identifier(this="_FS_INFORMATION_SCHEMA", quoted=False))
|
587
608
|
|
588
609
|
return expression
|
@@ -607,38 +628,6 @@ def information_schema_databases(
|
|
607
628
|
return expression
|
608
629
|
|
609
630
|
|
610
|
-
def information_schema_fs_tables(
|
611
|
-
expression: exp.Expression,
|
612
|
-
) -> exp.Expression:
|
613
|
-
"""Use _FS_TABLES to access additional metadata columns (eg: comment)."""
|
614
|
-
|
615
|
-
if (
|
616
|
-
isinstance(expression, exp.Select)
|
617
|
-
and (tbl := expression.find(exp.Table))
|
618
|
-
and tbl.db.upper() == "INFORMATION_SCHEMA"
|
619
|
-
and tbl.name.upper() == "TABLES"
|
620
|
-
):
|
621
|
-
tbl.set("this", exp.Identifier(this="_FS_TABLES", quoted=False))
|
622
|
-
tbl.set("db", exp.Identifier(this="_FS_INFORMATION_SCHEMA", quoted=False))
|
623
|
-
|
624
|
-
return expression
|
625
|
-
|
626
|
-
|
627
|
-
def information_schema_fs_views(expression: exp.Expression) -> exp.Expression:
|
628
|
-
"""Use _FS_VIEWS to return Snowflake's version instead of duckdb's."""
|
629
|
-
|
630
|
-
if (
|
631
|
-
isinstance(expression, exp.Select)
|
632
|
-
and (tbl := expression.find(exp.Table))
|
633
|
-
and tbl.db.upper() == "INFORMATION_SCHEMA"
|
634
|
-
and tbl.name.upper() == "VIEWS"
|
635
|
-
):
|
636
|
-
tbl.set("this", exp.Identifier(this="_FS_VIEWS", quoted=False))
|
637
|
-
tbl.set("db", exp.Identifier(this="_FS_INFORMATION_SCHEMA", quoted=False))
|
638
|
-
|
639
|
-
return expression
|
640
|
-
|
641
|
-
|
642
631
|
NUMBER_38_0 = [
|
643
632
|
exp.DataTypeParam(this=exp.Literal(this="38", is_string=False)),
|
644
633
|
exp.DataTypeParam(this=exp.Literal(this="0", is_string=False)),
|
fakesnow/variables.py
CHANGED
@@ -62,7 +62,9 @@ class Variables:
|
|
62
62
|
for name, value in self._variables.items():
|
63
63
|
sql = re.sub(rf"\${name}", value, sql, flags=re.IGNORECASE)
|
64
64
|
|
65
|
-
|
65
|
+
# Only treat $<word> (not $<number>) as session variables,
|
66
|
+
# ignore identifiers containing $
|
67
|
+
if remaining_variables := re.search(r"(?<![\$\w])\$(?!\d+)\w+", sql):
|
66
68
|
raise snowflake.connector.errors.ProgrammingError(
|
67
69
|
msg=f"Session variable '{remaining_variables.group().upper()}' does not exist"
|
68
70
|
)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: fakesnow
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.40
|
4
4
|
Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
|
5
5
|
License: Apache License
|
6
6
|
Version 2.0, January 2004
|
@@ -214,7 +214,7 @@ License-File: LICENSE
|
|
214
214
|
Requires-Dist: duckdb~=1.2.0
|
215
215
|
Requires-Dist: pyarrow
|
216
216
|
Requires-Dist: snowflake-connector-python
|
217
|
-
Requires-Dist: sqlglot~=26.
|
217
|
+
Requires-Dist: sqlglot~=26.24.0
|
218
218
|
Provides-Extra: server
|
219
219
|
Requires-Dist: starlette; extra == "server"
|
220
220
|
Requires-Dist: uvicorn; extra == "server"
|
@@ -1,32 +1,33 @@
|
|
1
1
|
fakesnow/__init__.py,sha256=71Rk_3s_4eTDCi7-bbo-xT71WN0E0MAPf5qjsguIeJU,5117
|
2
2
|
fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
|
3
3
|
fakesnow/arrow.py,sha256=XjTpFyLrD9jULWOtPgpr0RyNMmO6a5yi82y6ivi2CCI,4884
|
4
|
-
fakesnow/checks.py,sha256=
|
4
|
+
fakesnow/checks.py,sha256=bOJPMp46AvjJV_bXXjx2njO2dXNjffLrznwRuKyYZ4g,2889
|
5
5
|
fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
|
6
6
|
fakesnow/conn.py,sha256=diCwcjaCBrlCn9PyjbScfIQTNQjqiPTkQanUTqcvblE,6009
|
7
7
|
fakesnow/converter.py,sha256=wPOfsFXIUJNJSx5oFNAxh13udxmAVIIHsLK8BiGkXGA,1635
|
8
|
-
fakesnow/copy_into.py,sha256=
|
9
|
-
fakesnow/cursor.py,sha256=
|
8
|
+
fakesnow/copy_into.py,sha256=YIr5Bq3JwKOPYWm5t2QXUuFGxLL-1ioEXEumNjGbBvM,13648
|
9
|
+
fakesnow/cursor.py,sha256=mOwyXnBFXp79nDh0vtbmxS_hmFNy4j7hPlskENWeIrI,23818
|
10
10
|
fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
|
11
11
|
fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
|
12
12
|
fakesnow/fixtures.py,sha256=2rj0MTZlaZc4PNWhaqC5IiiLa7E9G0QZT3g45YawsL0,633
|
13
|
-
fakesnow/info_schema.py,sha256=
|
13
|
+
fakesnow/info_schema.py,sha256=lqEYD5aWK2MamjALbj6ct7pz_1yyAq3tAk51kLa8NKk,9872
|
14
14
|
fakesnow/instance.py,sha256=OKoYXwaI6kL9HQpnHx44yzpON_xNfuIT_F4oJNF_XXQ,2114
|
15
15
|
fakesnow/logger.py,sha256=U6EjUENQuTrDeNYqER2hxazoySmXzLmZJ-t-SDZgjkg,363
|
16
|
-
fakesnow/macros.py,sha256=
|
16
|
+
fakesnow/macros.py,sha256=lxtznTCYryjecFkwswbqWMzCVamDLWyQZRKWtkWCWEk,1397
|
17
17
|
fakesnow/pandas_tools.py,sha256=wI203UQHC8JvDzxE_VjE1NeV4rThek2P-u52oTg2foo,3481
|
18
18
|
fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
|
19
19
|
fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
|
20
|
-
fakesnow/server.py,sha256=
|
21
|
-
fakesnow/variables.py,sha256=
|
22
|
-
fakesnow/transforms/__init__.py,sha256=
|
20
|
+
fakesnow/server.py,sha256=PGNuYEpmI0L0ZrwBCP1pDTc_lnFrtfSnlZ6zyVuCTqk,7173
|
21
|
+
fakesnow/variables.py,sha256=BGnD4LAdVByfJ2GXL6qpGBaTF8ZJRjt3pdJsd9sIAcw,3134
|
22
|
+
fakesnow/transforms/__init__.py,sha256=OE-dunCuum8lv832s2cjEzThgBnpKELo5aaTXD_bMNg,2807
|
23
23
|
fakesnow/transforms/merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
|
24
|
-
fakesnow/transforms/show.py,sha256=
|
25
|
-
fakesnow/transforms/
|
26
|
-
fakesnow
|
24
|
+
fakesnow/transforms/show.py,sha256=ejvs9S2l2Wcal4fhnNSVs3JkZwKsFxMEU35ufUV3-kg,20421
|
25
|
+
fakesnow/transforms/stage.py,sha256=FSIyI5kpthD_pdbVfzYCrby5HaikMZUpdRr6oBSrgk4,6176
|
26
|
+
fakesnow/transforms/transforms.py,sha256=t99pHmNm8aG89o738zVSXCG6a0dOXpnY1OqAz28EQHc,48072
|
27
|
+
fakesnow-0.9.40.dist-info/licenses/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
|
27
28
|
tools/decode.py,sha256=kC5kUvLQxdCkMRsnH6BqCajlKxKeN77w6rwCKsY6gqU,1781
|
28
|
-
fakesnow-0.9.
|
29
|
-
fakesnow-0.9.
|
30
|
-
fakesnow-0.9.
|
31
|
-
fakesnow-0.9.
|
32
|
-
fakesnow-0.9.
|
29
|
+
fakesnow-0.9.40.dist-info/METADATA,sha256=ssyuxQf4dJU5Mt5dQYr14H52eO9r62X1NIzdfJ7k1jA,20680
|
30
|
+
fakesnow-0.9.40.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
31
|
+
fakesnow-0.9.40.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
|
32
|
+
fakesnow-0.9.40.dist-info/top_level.txt,sha256=Yos7YveA3f03xVYuURqnBsfMV2DePXfu_yGcsj3pPzI,30
|
33
|
+
fakesnow-0.9.40.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|