fakesnow 0.9.37__py3-none-any.whl → 0.9.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fakesnow/checks.py +2 -2
- fakesnow/copy_into.py +221 -27
- fakesnow/cursor.py +20 -10
- fakesnow/info_schema.py +45 -0
- fakesnow/server.py +1 -0
- fakesnow/transforms/__init__.py +3 -3
- fakesnow/transforms/show.py +263 -173
- fakesnow/transforms/stage.py +68 -0
- fakesnow/transforms/transforms.py +13 -42
- fakesnow/variables.py +3 -1
- {fakesnow-0.9.37.dist-info → fakesnow-0.9.39.dist-info}/METADATA +1 -1
- {fakesnow-0.9.37.dist-info → fakesnow-0.9.39.dist-info}/RECORD +16 -15
- {fakesnow-0.9.37.dist-info → fakesnow-0.9.39.dist-info}/WHEEL +1 -1
- {fakesnow-0.9.37.dist-info → fakesnow-0.9.39.dist-info}/entry_points.txt +0 -0
- {fakesnow-0.9.37.dist-info → fakesnow-0.9.39.dist-info}/licenses/LICENSE +0 -0
- {fakesnow-0.9.37.dist-info → fakesnow-0.9.39.dist-info}/top_level.txt +0 -0
fakesnow/checks.py
CHANGED
@@ -39,8 +39,8 @@ def is_unqualified_table_expression(expression: exp.Expression) -> tuple[bool, b
|
|
39
39
|
# "CREATE/DROP SCHEMA"
|
40
40
|
no_database = not node.args.get("catalog")
|
41
41
|
no_schema = False
|
42
|
-
elif parent_kind.upper() in {"TABLE", "VIEW"}:
|
43
|
-
# "CREATE/DROP TABLE/VIEW"
|
42
|
+
elif parent_kind.upper() in {"TABLE", "VIEW", "STAGE"}:
|
43
|
+
# "CREATE/DROP TABLE/VIEW/STAGE"
|
44
44
|
no_database = not node.args.get("catalog")
|
45
45
|
no_schema = not node.args.get("db")
|
46
46
|
else:
|
fakesnow/copy_into.py
CHANGED
@@ -1,8 +1,9 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
|
+
import datetime
|
3
4
|
from collections.abc import Sequence
|
4
5
|
from dataclasses import dataclass, field
|
5
|
-
from typing import Any, Protocol, cast
|
6
|
+
from typing import Any, NamedTuple, Protocol, cast
|
6
7
|
from urllib.parse import urlparse, urlunparse
|
7
8
|
|
8
9
|
import duckdb
|
@@ -13,27 +14,121 @@ from sqlglot import exp
|
|
13
14
|
from fakesnow import logger
|
14
15
|
|
15
16
|
|
17
|
+
class LoadHistoryRecord(NamedTuple):
|
18
|
+
"""Represents a record in the INFORMATION_SCHEMA.LOAD_HISTORY table."""
|
19
|
+
|
20
|
+
schema_name: str
|
21
|
+
file_name: str
|
22
|
+
table_name: str
|
23
|
+
last_load_time: str # ISO8601 datetime with timezone
|
24
|
+
status: str
|
25
|
+
row_count: int
|
26
|
+
row_parsed: int
|
27
|
+
first_error_message: str | None
|
28
|
+
first_error_line_number: int | None
|
29
|
+
first_error_character_position: int | None
|
30
|
+
first_error_col_name: str | None
|
31
|
+
error_count: int
|
32
|
+
error_limit: int | None
|
33
|
+
|
34
|
+
|
16
35
|
def copy_into(
|
17
|
-
duck_conn: DuckDBPyConnection,
|
36
|
+
duck_conn: DuckDBPyConnection,
|
37
|
+
current_database: str | None,
|
38
|
+
current_schema: str | None,
|
39
|
+
expr: exp.Copy,
|
40
|
+
params: Sequence[Any] | dict[Any, Any] | None = None,
|
18
41
|
) -> str:
|
19
42
|
cparams = _params(expr)
|
20
|
-
|
43
|
+
if isinstance(cparams.file_format, ReadParquet):
|
44
|
+
from_ = expr.args["files"][0]
|
45
|
+
# parquet must use MATCH_BY_COLUMN_NAME (TODO) or a copy transformation
|
46
|
+
# ie: the from clause in COPY INTO must be a subquery
|
47
|
+
if not isinstance(from_, exp.Subquery):
|
48
|
+
raise snowflake.connector.errors.ProgrammingError(
|
49
|
+
msg="SQL compilation error:\nPARQUET file format can produce one and only one column of type variant, object, or array. Load data into separate columns using the MATCH_BY_COLUMN_NAME copy option or copy with transformation.", # noqa: E501
|
50
|
+
errno=2019,
|
51
|
+
sqlstate="0A000",
|
52
|
+
)
|
53
|
+
|
54
|
+
from_source = _from_source(expr)
|
55
|
+
source = (
|
56
|
+
stage_url_from_var(from_source, duck_conn, current_database, current_schema)
|
57
|
+
if from_source.startswith("@")
|
58
|
+
else from_source
|
59
|
+
)
|
60
|
+
urls = _source_urls(source, cparams.files)
|
61
|
+
|
21
62
|
inserts = _inserts(expr, cparams, urls)
|
63
|
+
table = expr.this
|
64
|
+
if isinstance(expr.this, exp.Table):
|
65
|
+
table = expr.this
|
66
|
+
elif isinstance(expr.this, exp.Schema) and isinstance(expr.this.this, exp.Table):
|
67
|
+
table = expr.this.this
|
68
|
+
else:
|
69
|
+
raise AssertionError(f"copy into {expr.this.__class__} is not Table or Schema")
|
70
|
+
|
71
|
+
schema = table.db or current_schema
|
72
|
+
assert schema
|
22
73
|
|
23
|
-
|
74
|
+
histories: list[LoadHistoryRecord] = []
|
75
|
+
load_time = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
24
76
|
try:
|
25
|
-
|
77
|
+
check_sql = "SELECT 1 FROM _fs_information_schema._fs_load_history WHERE FILE_NAME = ? LIMIT 1"
|
78
|
+
|
26
79
|
for i, url in zip(inserts, urls):
|
27
|
-
|
28
|
-
|
80
|
+
# Check if file has been loaded into any table before
|
81
|
+
duck_conn.execute(check_sql, [url])
|
82
|
+
if duck_conn.fetchone() and not cparams.force:
|
83
|
+
affected_count = 0
|
84
|
+
status = "LOAD_SKIPPED"
|
85
|
+
error_limit = None
|
86
|
+
error_count = 1
|
87
|
+
first_error_message = "File was loaded before."
|
88
|
+
else:
|
89
|
+
sql = i.sql(dialect="duckdb")
|
90
|
+
logger.log_sql(sql, params)
|
91
|
+
duck_conn.execute(sql, params)
|
92
|
+
(affected_count,) = duck_conn.fetchall()[0]
|
93
|
+
status = "LOADED"
|
94
|
+
error_limit = 1
|
95
|
+
error_count = 0
|
96
|
+
first_error_message = None
|
97
|
+
|
98
|
+
history = LoadHistoryRecord(
|
99
|
+
schema_name=schema,
|
100
|
+
file_name=url,
|
101
|
+
table_name=table.name,
|
102
|
+
last_load_time=load_time,
|
103
|
+
status=status,
|
104
|
+
row_count=affected_count,
|
105
|
+
row_parsed=affected_count,
|
106
|
+
first_error_message=first_error_message,
|
107
|
+
first_error_line_number=None,
|
108
|
+
first_error_character_position=None,
|
109
|
+
first_error_col_name=None,
|
110
|
+
error_count=error_count,
|
111
|
+
error_limit=error_limit,
|
112
|
+
)
|
113
|
+
histories.append(history)
|
114
|
+
|
115
|
+
if insert_histories := [h for h in histories if h.status != "LOAD_SKIPPED"]:
|
116
|
+
values = "\n ,".join(str(tuple(history)).replace("None", "NULL") for history in insert_histories)
|
117
|
+
sql = f"INSERT INTO _fs_information_schema._fs_load_history VALUES {values}"
|
29
118
|
duck_conn.execute(sql, params)
|
30
|
-
(affected_count,) = duck_conn.fetchall()[0]
|
31
|
-
results.append(f"('{url}', 'LOADED', {affected_count}, {affected_count}, 1, 0, NULL, NULL, NULL, NULL)")
|
32
|
-
|
33
|
-
# TODO: update load_history with the results if loaded
|
34
119
|
|
35
|
-
columns =
|
36
|
-
|
120
|
+
columns = (
|
121
|
+
"file, status, rows_parsed, rows_loaded, error_limit, errors_seen, first_error, first_error_line, "
|
122
|
+
"first_error_character, first_error_column_name"
|
123
|
+
)
|
124
|
+
values = "\n, ".join(
|
125
|
+
f"('{h.file_name}', '{h.status}', {h.row_parsed}, {h.row_count}, "
|
126
|
+
f"{h.error_limit or 'NULL'}, {h.error_count}, "
|
127
|
+
f"{repr(h.first_error_message) if h.first_error_message else 'NULL'}, "
|
128
|
+
f"{h.first_error_line_number or 'NULL'}, {h.first_error_character_position or 'NULL'}, "
|
129
|
+
f"{h.first_error_col_name or 'NULL'})"
|
130
|
+
for h in histories
|
131
|
+
)
|
37
132
|
sql = f"SELECT * FROM (VALUES\n {values}\n) AS t({columns})"
|
38
133
|
duck_conn.execute(sql)
|
39
134
|
return sql
|
@@ -62,6 +157,8 @@ def _params(expr: exp.Copy) -> Params:
|
|
62
157
|
|
63
158
|
if var_type == "CSV":
|
64
159
|
kwargs["file_format"] = handle_csv(param.expressions)
|
160
|
+
elif var_type == "PARQUET":
|
161
|
+
kwargs["file_format"] = ReadParquet()
|
65
162
|
else:
|
66
163
|
raise NotImplementedError(f"{var_type} FILE_FORMAT is not currently implemented")
|
67
164
|
elif var == "FORCE":
|
@@ -71,28 +168,97 @@ def _params(expr: exp.Copy) -> Params:
|
|
71
168
|
else:
|
72
169
|
raise ValueError(f"Unknown copy parameter: {param.this}")
|
73
170
|
|
74
|
-
|
75
|
-
|
171
|
+
return Params(force=force, **kwargs)
|
172
|
+
|
173
|
+
|
174
|
+
def _from_source(expr: exp.Copy) -> str:
|
175
|
+
# NB: sqlglot parses the from clause as "files" strangely
|
176
|
+
from_ = expr.args["files"][0].this
|
177
|
+
|
178
|
+
if isinstance(from_, exp.Select):
|
179
|
+
from_table = from_.args["from"]
|
180
|
+
# if a subquery is used in the FROM clause it must be loaded from a stage not an external location
|
181
|
+
assert isinstance(from_table, exp.From), f"{from_table.__class__} is not a From"
|
182
|
+
assert isinstance(from_table.this, exp.Table), f"{from_table.__class__} is not a Table"
|
183
|
+
var = from_table.this.this
|
184
|
+
if not isinstance(var, exp.Var):
|
185
|
+
# not a very helpful message, but this is what Snowflake returns
|
186
|
+
raise snowflake.connector.errors.ProgrammingError(
|
187
|
+
msg=f"SQL compilation error:\ninvalid URL prefix found in: {from_table.this.this}",
|
188
|
+
errno=1011,
|
189
|
+
sqlstate="42601",
|
190
|
+
)
|
191
|
+
# return the name of the stage, eg: @stage1
|
192
|
+
return var.this
|
76
193
|
|
77
|
-
|
194
|
+
assert isinstance(from_, exp.Literal), f"{from_} is not a exp.Literal"
|
195
|
+
# return url
|
196
|
+
return from_.name
|
78
197
|
|
79
198
|
|
80
|
-
def
|
199
|
+
def normalise_ident(name: str) -> str:
|
81
200
|
"""
|
82
|
-
|
83
|
-
|
201
|
+
Strip double quotes if present else return uppercased.
|
202
|
+
Snowflake treats quoted identifiers as case-sensitive and un-quoted identifiers as case-insensitive
|
84
203
|
"""
|
85
|
-
|
86
|
-
|
204
|
+
if name.startswith('"') and name.endswith('"'):
|
205
|
+
return name[1:-1] # Strip quotes
|
206
|
+
|
207
|
+
return name.upper()
|
208
|
+
|
209
|
+
|
210
|
+
def stage_url_from_var(
|
211
|
+
from_source: str, duck_conn: DuckDBPyConnection, current_database: str | None, current_schema: str | None
|
212
|
+
) -> str:
|
213
|
+
parts = from_source[1:].split(".")
|
214
|
+
if len(parts) == 3:
|
215
|
+
# Fully qualified name
|
216
|
+
database_name, schema_name, name = parts
|
217
|
+
elif len(parts) == 2:
|
218
|
+
# Schema + stage name
|
219
|
+
assert current_database, "Current database must be set when stage name is not fully qualified"
|
220
|
+
database_name, schema_name, name = current_database, parts[0], parts[1]
|
221
|
+
elif len(parts) == 1:
|
222
|
+
# Stage name only
|
223
|
+
assert current_database, "Current database must be set when stage name is not fully qualified"
|
224
|
+
assert current_schema, "Current schema must be set when stage name is not fully qualified"
|
225
|
+
database_name, schema_name, name = current_database, current_schema, parts[0]
|
226
|
+
else:
|
227
|
+
raise ValueError(f"Invalid stage name: {from_source}")
|
228
|
+
|
229
|
+
# Normalize names to uppercase if not wrapped in double quotes
|
230
|
+
database_name = normalise_ident(database_name)
|
231
|
+
schema_name = normalise_ident(schema_name)
|
232
|
+
name = normalise_ident(name)
|
233
|
+
|
234
|
+
# Look up the stage URL
|
235
|
+
duck_conn.execute(
|
236
|
+
"""
|
237
|
+
SELECT url FROM _fs_global._fs_information_schema._fs_stages
|
238
|
+
WHERE database_name = ? and schema_name = ? and name = ?
|
239
|
+
""",
|
240
|
+
(database_name, schema_name, name),
|
241
|
+
)
|
242
|
+
if result := duck_conn.fetchone():
|
243
|
+
return result[0]
|
244
|
+
else:
|
245
|
+
raise snowflake.connector.errors.ProgrammingError(
|
246
|
+
msg=f"SQL compilation error:\nStage '{database_name}.{schema_name}.{name}' does not exist or not authorized.", # noqa: E501
|
247
|
+
errno=2003,
|
248
|
+
sqlstate="02000",
|
249
|
+
)
|
87
250
|
|
88
|
-
|
251
|
+
|
252
|
+
def _source_urls(from_source: str, files: list[str]) -> list[str]:
|
253
|
+
"""Convert from_source to a list of URLs."""
|
254
|
+
scheme, netloc, path, params, query, fragment = urlparse(from_source)
|
89
255
|
if not scheme:
|
90
256
|
raise snowflake.connector.errors.ProgrammingError(
|
91
|
-
msg=f"SQL compilation error:\ninvalid URL prefix found in: '{
|
257
|
+
msg=f"SQL compilation error:\ninvalid URL prefix found in: '{from_source}'", errno=1011, sqlstate="42601"
|
92
258
|
)
|
93
259
|
|
94
260
|
# rebuild url from components to ensure correct handling of host slash
|
95
|
-
return [_urlunparse(scheme, netloc, path, params, query, fragment, file) for file in files] or [
|
261
|
+
return [_urlunparse(scheme, netloc, path, params, query, fragment, file) for file in files] or [from_source]
|
96
262
|
|
97
263
|
|
98
264
|
def _urlunparse(scheme: str, netloc: str, path: str, params: str, query: str, fragment: str, suffix: str) -> str:
|
@@ -111,9 +277,16 @@ def _urlunparse(scheme: str, netloc: str, path: str, params: str, query: str, fr
|
|
111
277
|
def _inserts(expr: exp.Copy, params: Params, urls: list[str]) -> list[exp.Expression]:
|
112
278
|
# INTO expression
|
113
279
|
target = expr.this
|
114
|
-
|
115
|
-
|
116
|
-
|
280
|
+
|
281
|
+
from_ = expr.args["files"][0]
|
282
|
+
if isinstance(from_, exp.Subquery):
|
283
|
+
select = from_.this
|
284
|
+
assert isinstance(select, exp.Select), f"{select.__class__} is not a Select"
|
285
|
+
columns = _strip_json_extract(select).expressions
|
286
|
+
else:
|
287
|
+
columns = [exp.Column(this=exp.Identifier(this=f"column{i}")) for i in range(len(target.expressions))] or [
|
288
|
+
exp.Column(this=exp.Star())
|
289
|
+
]
|
117
290
|
|
118
291
|
return [
|
119
292
|
exp.Insert(
|
@@ -124,6 +297,20 @@ def _inserts(expr: exp.Copy, params: Params, urls: list[str]) -> list[exp.Expres
|
|
124
297
|
]
|
125
298
|
|
126
299
|
|
300
|
+
def _strip_json_extract(expr: exp.Select) -> exp.Select:
|
301
|
+
"""
|
302
|
+
Strip $1 prefix from SELECT statement columns.
|
303
|
+
"""
|
304
|
+
dollar1 = exp.Parameter(this=exp.Literal(this="1", is_string=False))
|
305
|
+
|
306
|
+
for p in expr.find_all(exp.Parameter):
|
307
|
+
if p == dollar1 and p.parent and (key := p.parent.expression.find(exp.JSONPathKey)):
|
308
|
+
assert p.parent.parent, expr
|
309
|
+
p.parent.parent.args["this"] = exp.Identifier(this=key.this)
|
310
|
+
|
311
|
+
return expr
|
312
|
+
|
313
|
+
|
127
314
|
def handle_csv(expressions: list[exp.Property]) -> ReadCSV:
|
128
315
|
skip_header = ReadCSV.skip_header
|
129
316
|
quote = ReadCSV.quote
|
@@ -192,8 +379,15 @@ class ReadCSV(FileTypeHandler):
|
|
192
379
|
return exp.func("read_csv", exp.Literal(this=url, is_string=True), *args)
|
193
380
|
|
194
381
|
|
382
|
+
@dataclass
|
383
|
+
class ReadParquet(FileTypeHandler):
|
384
|
+
def read_expression(self, url: str) -> exp.Expression:
|
385
|
+
return exp.func("read_parquet", exp.Literal(this=url, is_string=True))
|
386
|
+
|
387
|
+
|
195
388
|
@dataclass
|
196
389
|
class Params:
|
197
390
|
files: list[str] = field(default_factory=list)
|
198
391
|
# Snowflake defaults to CSV when no file format is specified
|
199
392
|
file_format: FileTypeHandler = field(default_factory=ReadCSV)
|
393
|
+
force: bool = False
|
fakesnow/cursor.py
CHANGED
@@ -43,6 +43,7 @@ SQL_CREATED_DATABASE = Template("SELECT 'Database ${name} successfully created.'
|
|
43
43
|
SQL_CREATED_SCHEMA = Template("SELECT 'Schema ${name} successfully created.' as 'status'")
|
44
44
|
SQL_CREATED_TABLE = Template("SELECT 'Table ${name} successfully created.' as 'status'")
|
45
45
|
SQL_CREATED_VIEW = Template("SELECT 'View ${name} successfully created.' as 'status'")
|
46
|
+
SQL_CREATED_STAGE = Template("SELECT 'Stage area ${name} successfully created.' as status")
|
46
47
|
SQL_DROPPED = Template("SELECT '${name} successfully dropped.' as 'status'")
|
47
48
|
SQL_INSERTED_ROWS = Template("SELECT ${count} as 'number of rows inserted'")
|
48
49
|
SQL_UPDATED_ROWS = Template("SELECT ${count} as 'number of rows updated', 0 as 'number of multi-joined rows updated'")
|
@@ -203,10 +204,8 @@ class FakeSnowflakeCursor:
|
|
203
204
|
.transform(transforms.create_database, db_path=self._conn.db_path)
|
204
205
|
.transform(transforms.extract_comment_on_table)
|
205
206
|
.transform(transforms.extract_comment_on_columns)
|
206
|
-
.transform(transforms.
|
207
|
+
.transform(transforms.information_schema_fs)
|
207
208
|
.transform(transforms.information_schema_databases, current_schema=self._conn.schema)
|
208
|
-
.transform(transforms.information_schema_fs_tables)
|
209
|
-
.transform(transforms.information_schema_fs_views)
|
210
209
|
.transform(transforms.drop_schema_cascade)
|
211
210
|
.transform(transforms.tag)
|
212
211
|
.transform(transforms.semi_structured_types)
|
@@ -248,8 +247,9 @@ class FakeSnowflakeCursor:
|
|
248
247
|
.transform(transforms.show_procedures)
|
249
248
|
.transform(transforms.show_warehouses)
|
250
249
|
.transform(lambda e: transforms.show_schemas(e, self._conn.database))
|
251
|
-
.transform(lambda e: transforms.show_tables_etc(e, self._conn.database))
|
252
|
-
.transform(lambda e: transforms.show_columns(e, self._conn.database))
|
250
|
+
.transform(lambda e: transforms.show_tables_etc(e, self._conn.database, self._conn.schema))
|
251
|
+
.transform(lambda e: transforms.show_columns(e, self._conn.database, self._conn.schema))
|
252
|
+
.transform(lambda e: transforms.show_stages(e, self._conn.database, self._conn.schema))
|
253
253
|
# TODO collapse into a single show_keys function
|
254
254
|
.transform(lambda e: transforms.show_keys(e, self._conn.database, kind="PRIMARY"))
|
255
255
|
.transform(lambda e: transforms.show_keys(e, self._conn.database, kind="UNIQUE"))
|
@@ -260,6 +260,7 @@ class FakeSnowflakeCursor:
|
|
260
260
|
.transform(transforms.create_clone)
|
261
261
|
.transform(transforms.alias_in_join)
|
262
262
|
.transform(transforms.alter_table_strip_cluster_by)
|
263
|
+
.transform(lambda e: transforms.create_stage(e, self._conn.database, self._conn.schema))
|
263
264
|
)
|
264
265
|
|
265
266
|
def _transform_explode(self, expression: exp.Expression) -> list[exp.Expression]:
|
@@ -287,17 +288,17 @@ class FakeSnowflakeCursor:
|
|
287
288
|
|
288
289
|
try:
|
289
290
|
if isinstance(transformed, exp.Copy):
|
290
|
-
sql = copy_into(self._duck_conn, transformed, params)
|
291
|
+
sql = copy_into(self._duck_conn, self._conn.database, self._conn.schema, transformed, params)
|
291
292
|
else:
|
292
293
|
logger.log_sql(sql, params)
|
293
294
|
self._duck_conn.execute(sql, params)
|
294
295
|
except duckdb.BinderException as e:
|
295
296
|
msg = e.args[0]
|
296
|
-
raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=2043, sqlstate="02000") from
|
297
|
+
raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=2043, sqlstate="02000") from e
|
297
298
|
except duckdb.CatalogException as e:
|
298
299
|
# minimal processing to make it look like a snowflake exception, message content may differ
|
299
300
|
msg = cast(str, e.args[0]).split("\n")[0]
|
300
|
-
raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=2003, sqlstate="42S02") from
|
301
|
+
raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=2003, sqlstate="42S02") from e
|
301
302
|
except duckdb.TransactionException as e:
|
302
303
|
if "cannot rollback - no transaction is active" in str(
|
303
304
|
e
|
@@ -307,9 +308,9 @@ class FakeSnowflakeCursor:
|
|
307
308
|
else:
|
308
309
|
raise e
|
309
310
|
except duckdb.ConnectionException as e:
|
310
|
-
raise snowflake.connector.errors.DatabaseError(msg=e.args[0], errno=250002, sqlstate="08003") from
|
311
|
+
raise snowflake.connector.errors.DatabaseError(msg=e.args[0], errno=250002, sqlstate="08003") from e
|
311
312
|
except duckdb.ParserException as e:
|
312
|
-
raise snowflake.connector.errors.ProgrammingError(msg=e.args[0], errno=1003, sqlstate="42000") from
|
313
|
+
raise snowflake.connector.errors.ProgrammingError(msg=e.args[0], errno=1003, sqlstate="42000") from e
|
313
314
|
|
314
315
|
affected_count = None
|
315
316
|
|
@@ -329,6 +330,15 @@ class FakeSnowflakeCursor:
|
|
329
330
|
self._duck_conn.execute(info_schema.per_db_creation_sql(create_db_name))
|
330
331
|
result_sql = SQL_CREATED_DATABASE.substitute(name=create_db_name)
|
331
332
|
|
333
|
+
elif stage_name := transformed.args.get("stage_name"):
|
334
|
+
if stage_name == "?":
|
335
|
+
assert isinstance(params, (tuple, list)) and len(params) == 1, (
|
336
|
+
"Expected single parameter for stage name"
|
337
|
+
)
|
338
|
+
result_sql = SQL_CREATED_STAGE.substitute(name=params[0].upper())
|
339
|
+
else:
|
340
|
+
result_sql = SQL_CREATED_STAGE.substitute(name=stage_name.upper())
|
341
|
+
|
332
342
|
elif cmd == "INSERT":
|
333
343
|
(affected_count,) = self._duck_conn.fetchall()[0]
|
334
344
|
result_sql = SQL_INSERTED_ROWS.substitute(count=affected_count)
|
fakesnow/info_schema.py
CHANGED
@@ -185,6 +185,49 @@ where database_name = '${catalog}'
|
|
185
185
|
"""
|
186
186
|
)
|
187
187
|
|
188
|
+
SQL_CREATE_LOAD_HISTORY_TABLE = Template(
|
189
|
+
"""
|
190
|
+
create table if not exists ${catalog}._fs_information_schema._fs_load_history (
|
191
|
+
SCHEMA_NAME VARCHAR,
|
192
|
+
FILE_NAME VARCHAR,
|
193
|
+
TABLE_NAME VARCHAR,
|
194
|
+
LAST_LOAD_TIME TIMESTAMPTZ,
|
195
|
+
STATUS VARCHAR,
|
196
|
+
ROW_COUNT INTEGER,
|
197
|
+
ROW_PARSED INTEGER,
|
198
|
+
FIRST_ERROR_MESSAGE VARCHAR,
|
199
|
+
FIRST_ERROR_LINE_NUMBER INTEGER,
|
200
|
+
FIRST_ERROR_CHARACTER_POSITION INTEGER,
|
201
|
+
FIRST_ERROR_COL_NAME VARCHAR,
|
202
|
+
ERROR_COUNT INTEGER,
|
203
|
+
ERROR_LIMIT INTEGER
|
204
|
+
)
|
205
|
+
"""
|
206
|
+
)
|
207
|
+
|
208
|
+
|
209
|
+
SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_STAGES_TABLE = """
|
210
|
+
CREATE TABLE IF NOT EXISTS _fs_global._fs_information_schema._fs_stages (
|
211
|
+
created_on TIMESTAMPTZ,
|
212
|
+
name TEXT,
|
213
|
+
database_name TEXT,
|
214
|
+
schema_name TEXT,
|
215
|
+
url TEXT,
|
216
|
+
has_credentials TEXT,
|
217
|
+
has_encryption_key TEXT,
|
218
|
+
owner TEXT,
|
219
|
+
comment TEXT,
|
220
|
+
region TEXT,
|
221
|
+
type TEXT,
|
222
|
+
cloud TEXT,
|
223
|
+
notification_channel TEXT,
|
224
|
+
storage_integration TEXT,
|
225
|
+
endpoint TEXT,
|
226
|
+
owner_role_type TEXT,
|
227
|
+
directory_enabled TEXT
|
228
|
+
);
|
229
|
+
"""
|
230
|
+
|
188
231
|
|
189
232
|
def per_db_creation_sql(catalog: str) -> str:
|
190
233
|
return f"""
|
@@ -193,6 +236,7 @@ def per_db_creation_sql(catalog: str) -> str:
|
|
193
236
|
{SQL_CREATE_INFORMATION_SCHEMA_DATABASES_VIEW.substitute(catalog=catalog)};
|
194
237
|
{SQL_CREATE_INFORMATION_SCHEMA_TABLES_VIEW.substitute(catalog=catalog)};
|
195
238
|
{SQL_CREATE_INFORMATION_SCHEMA_VIEWS_VIEW.substitute(catalog=catalog)};
|
239
|
+
{SQL_CREATE_LOAD_HISTORY_TABLE.substitute(catalog=catalog)};
|
196
240
|
"""
|
197
241
|
|
198
242
|
|
@@ -203,6 +247,7 @@ def fs_global_creation_sql() -> str:
|
|
203
247
|
{SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_COLUMNS_EXT};
|
204
248
|
{SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_COLUMNS_VIEW};
|
205
249
|
{SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_USERS_TABLE};
|
250
|
+
{SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_STAGES_TABLE}
|
206
251
|
"""
|
207
252
|
|
208
253
|
|
fakesnow/server.py
CHANGED
fakesnow/transforms/__init__.py
CHANGED
@@ -8,10 +8,12 @@ from fakesnow.transforms.show import (
|
|
8
8
|
show_keys as show_keys,
|
9
9
|
show_procedures as show_procedures,
|
10
10
|
show_schemas as show_schemas,
|
11
|
+
show_stages as show_stages,
|
11
12
|
show_tables_etc as show_tables_etc,
|
12
13
|
show_users as show_users,
|
13
14
|
show_warehouses as show_warehouses,
|
14
15
|
)
|
16
|
+
from fakesnow.transforms.stage import create_stage as create_stage
|
15
17
|
from fakesnow.transforms.transforms import (
|
16
18
|
SUCCESS_NOP as SUCCESS_NOP,
|
17
19
|
alias_in_join as alias_in_join,
|
@@ -36,9 +38,7 @@ from fakesnow.transforms.transforms import (
|
|
36
38
|
identifier as identifier,
|
37
39
|
indices_to_json_extract as indices_to_json_extract,
|
38
40
|
information_schema_databases as information_schema_databases,
|
39
|
-
|
40
|
-
information_schema_fs_tables as information_schema_fs_tables,
|
41
|
-
information_schema_fs_views as information_schema_fs_views,
|
41
|
+
information_schema_fs as information_schema_fs,
|
42
42
|
integer_precision as integer_precision,
|
43
43
|
json_extract_cased_as_varchar as json_extract_cased_as_varchar,
|
44
44
|
json_extract_cast_as_varchar as json_extract_cast_as_varchar,
|
fakesnow/transforms/show.py
CHANGED
@@ -11,11 +11,52 @@ def fs_global_creation_sql() -> str:
|
|
11
11
|
{SQL_CREATE_VIEW_SHOW_OBJECTS};
|
12
12
|
{SQL_CREATE_VIEW_SHOW_TABLES};
|
13
13
|
{SQL_CREATE_VIEW_SHOW_VIEWS};
|
14
|
+
{SQL_CREATE_VIEW_SHOW_COLUMNS};
|
15
|
+
{SQL_CREATE_VIEW_SHOW_DATABASES};
|
16
|
+
{SQL_CREATE_VIEW_SHOW_FUNCTIONS};
|
17
|
+
{SQL_CREATE_VIEW_SHOW_SCHEMAS};
|
18
|
+
{SQL_CREATE_VIEW_SHOW_PROCEDURES};
|
14
19
|
"""
|
15
20
|
|
16
21
|
|
22
|
+
# see https://docs.snowflake.com/en/sql-reference/sql/show-columns
|
23
|
+
SQL_CREATE_VIEW_SHOW_COLUMNS = """
|
24
|
+
create view if not exists _fs_global._fs_information_schema._fs_show_columns as
|
25
|
+
SELECT
|
26
|
+
table_name,
|
27
|
+
table_schema as "schema_name",
|
28
|
+
column_name,
|
29
|
+
CASE
|
30
|
+
WHEN data_type = 'NUMBER' THEN
|
31
|
+
'{"type":"FIXED","precision":' || numeric_precision || ',"scale":' || numeric_scale || ',"nullable":true}'
|
32
|
+
WHEN data_type = 'TEXT' THEN
|
33
|
+
'{"type":"TEXT","length":' || coalesce(character_maximum_length,16777216) || ',"byteLength":' ||
|
34
|
+
CASE
|
35
|
+
WHEN character_maximum_length = 16777216 THEN 16777216
|
36
|
+
ELSE coalesce(character_maximum_length*4,16777216)
|
37
|
+
END || ',"nullable":true,"fixed":false}'
|
38
|
+
WHEN data_type in ('TIMESTAMP_NTZ','TIMESTAMP_TZ','TIME') THEN
|
39
|
+
'{"type":"' || data_type || '","precision":0,"scale":9,"nullable":true}'
|
40
|
+
WHEN data_type = 'FLOAT' THEN '{"type":"REAL","nullable":true}'
|
41
|
+
WHEN data_type = 'BINARY' THEN
|
42
|
+
'{"type":"BINARY","length":8388608,"byteLength":8388608,"nullable":true,"fixed":true}'
|
43
|
+
ELSE '{"type":"' || data_type || '","nullable":true}'
|
44
|
+
END as "data_type",
|
45
|
+
CASE WHEN is_nullable = 'YES' THEN 'true' ELSE 'false' END as "null?",
|
46
|
+
COALESCE(column_default, '') as "default",
|
47
|
+
'COLUMN' as "kind",
|
48
|
+
'' as "expression",
|
49
|
+
COALESCE(comment, '') as "comment",
|
50
|
+
table_catalog as "database_name",
|
51
|
+
'' as "autoincrement",
|
52
|
+
NULL as "schema_evolution_record"
|
53
|
+
FROM _fs_global._fs_information_schema._fs_columns
|
54
|
+
ORDER BY table_catalog, table_schema, table_name, ordinal_position
|
55
|
+
"""
|
56
|
+
|
57
|
+
|
17
58
|
def show_columns(
|
18
|
-
expression: exp.Expression, current_database: str | None
|
59
|
+
expression: exp.Expression, current_database: str | None, current_schema: str | None
|
19
60
|
) -> exp.Expression:
|
20
61
|
"""Transform SHOW COLUMNS to a query against the fs global information_schema columns table.
|
21
62
|
|
@@ -29,12 +70,7 @@ def show_columns(
|
|
29
70
|
scope_kind = expression.args.get("scope_kind")
|
30
71
|
table = expression.find(exp.Table)
|
31
72
|
|
32
|
-
if scope_kind == "
|
33
|
-
# all columns
|
34
|
-
catalog = None
|
35
|
-
schema = None
|
36
|
-
table = None
|
37
|
-
elif scope_kind == "DATABASE" and table:
|
73
|
+
if scope_kind == "DATABASE" and table:
|
38
74
|
catalog = table.name
|
39
75
|
schema = None
|
40
76
|
table = None
|
@@ -46,42 +82,39 @@ def show_columns(
|
|
46
82
|
catalog = table.catalog or current_database
|
47
83
|
schema = table.db or current_schema
|
48
84
|
table = table.name
|
85
|
+
elif scope_kind == "ACCOUNT":
|
86
|
+
# all columns
|
87
|
+
catalog = None
|
88
|
+
schema = None
|
89
|
+
table = None
|
90
|
+
elif not scope_kind:
|
91
|
+
# no explicit scope - show current database and schema only
|
92
|
+
catalog = current_database
|
93
|
+
schema = current_schema
|
94
|
+
table = None
|
49
95
|
else:
|
50
96
|
raise NotImplementedError(f"show_object_columns: {expression.sql(dialect='snowflake')}")
|
51
97
|
|
98
|
+
where = ["1=1"]
|
99
|
+
if catalog:
|
100
|
+
where.append(f"database_name = '{catalog}'")
|
101
|
+
if schema:
|
102
|
+
where.append(f"schema_name = '{schema}'")
|
103
|
+
if table:
|
104
|
+
where.append(f"table_name = '{table}'")
|
105
|
+
where_clause = " AND ".join(where)
|
106
|
+
|
52
107
|
query = f"""
|
53
|
-
SELECT
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
CASE
|
58
|
-
WHEN data_type = 'NUMBER' THEN '{{"type":"FIXED","precision":'|| numeric_precision || ',"scale":' || numeric_scale || ',"nullable":true}}'
|
59
|
-
WHEN data_type = 'TEXT' THEN '{{"type":"TEXT","length":' || coalesce(character_maximum_length,16777216) || ',"byteLength":' || CASE WHEN character_maximum_length = 16777216 THEN 16777216 ELSE coalesce(character_maximum_length*4,16777216) END || ',"nullable":true,"fixed":false}}'
|
60
|
-
WHEN data_type in ('TIMESTAMP_NTZ','TIMESTAMP_TZ','TIME') THEN '{{"type":"' || data_type || '","precision":0,"scale":9,"nullable":true}}'
|
61
|
-
WHEN data_type = 'FLOAT' THEN '{{"type":"REAL","nullable":true}}'
|
62
|
-
WHEN data_type = 'BINARY' THEN '{{"type":"BINARY","length":8388608,"byteLength":8388608,"nullable":true,"fixed":true}}'
|
63
|
-
ELSE '{{"type":"' || data_type || '","nullable":true}}'
|
64
|
-
END as "data_type",
|
65
|
-
CASE WHEN is_nullable = 'YES' THEN 'true' ELSE 'false' END as "null?",
|
66
|
-
COALESCE(column_default, '') as "default",
|
67
|
-
'COLUMN' as "kind",
|
68
|
-
'' as "expression",
|
69
|
-
COALESCE(comment, '') as "comment",
|
70
|
-
table_catalog as "database_name",
|
71
|
-
'' as "autoincrement",
|
72
|
-
NULL as "schema_evolution_record"
|
73
|
-
FROM _fs_global._fs_information_schema._fs_columns c
|
74
|
-
WHERE 1=1
|
75
|
-
{f"AND table_catalog = '{catalog}'" if catalog else ""}
|
76
|
-
{f"AND table_schema = '{schema}'" if schema else ""}
|
77
|
-
{f"AND table_name = '{table}'" if table else ""}
|
78
|
-
ORDER BY table_name, ordinal_position
|
79
|
-
""" # noqa: E501
|
108
|
+
SELECT *
|
109
|
+
FROM _fs_global._fs_information_schema._fs_show_columns
|
110
|
+
WHERE {where_clause}
|
111
|
+
"""
|
80
112
|
|
81
113
|
return sqlglot.parse_one(query, read="duckdb")
|
82
114
|
|
83
115
|
|
84
|
-
|
116
|
+
SQL_CREATE_VIEW_SHOW_DATABASES = """
|
117
|
+
create view if not exists _fs_global._fs_information_schema._fs_show_databases as
|
85
118
|
SELECT
|
86
119
|
to_timestamp(0)::timestamptz as 'created_on',
|
87
120
|
database_name as 'name',
|
@@ -102,18 +135,18 @@ WHERE database_name NOT IN ('memory', '_fs_global')
|
|
102
135
|
|
103
136
|
|
104
137
|
def show_databases(expression: exp.Expression) -> exp.Expression:
|
105
|
-
"""Transform SHOW DATABASES to a query against
|
138
|
+
"""Transform SHOW DATABASES to a query against _fs_show_databases.
|
106
139
|
|
107
140
|
See https://docs.snowflake.com/en/sql-reference/sql/show-databases
|
108
141
|
"""
|
109
142
|
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "DATABASES":
|
110
|
-
return sqlglot.parse_one(
|
143
|
+
return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_show_databases", read="duckdb")
|
111
144
|
|
112
145
|
return expression
|
113
146
|
|
114
147
|
|
115
|
-
|
116
|
-
|
148
|
+
SQL_CREATE_VIEW_SHOW_FUNCTIONS = """
|
149
|
+
create view if not exists _fs_global._fs_information_schema._fs_show_functions as
|
117
150
|
SELECT
|
118
151
|
'1970-01-01 00:00:00 UTC'::timestamptz as created_on,
|
119
152
|
'SYSTIMESTAMP' as name,
|
@@ -145,73 +178,14 @@ def show_functions(expression: exp.Expression) -> exp.Expression:
|
|
145
178
|
See https://docs.snowflake.com/en/sql-reference/sql/show-functions
|
146
179
|
"""
|
147
180
|
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "FUNCTIONS":
|
148
|
-
return sqlglot.parse_one(
|
149
|
-
|
150
|
-
return expression
|
151
|
-
|
152
|
-
|
153
|
-
# returns zero rows
|
154
|
-
SQL_SHOW_WAREHOUSES = """
|
155
|
-
SELECT
|
156
|
-
'FAKESNOW_WAREHOUSE' as name,
|
157
|
-
'STARTED' as state,
|
158
|
-
'STANDARD' as type,
|
159
|
-
'X-Small' as size,
|
160
|
-
1 as min_cluster_count,
|
161
|
-
1 as max_cluster_count,
|
162
|
-
1 as started_clusters,
|
163
|
-
0 as running,
|
164
|
-
0 as queued,
|
165
|
-
'N' as is_default,
|
166
|
-
'N' as is_current,
|
167
|
-
600 as auto_suspend,
|
168
|
-
'true' as auto_resume,
|
169
|
-
-- nb: deliberate space before '100' to match Snowflake's output
|
170
|
-
' 100' as available,
|
171
|
-
'0' as provisioning,
|
172
|
-
'0' as quiescing,
|
173
|
-
'0' as other,
|
174
|
-
'1970-01-01 00:00:00.000000 UTC'::timestamptz as created_on,
|
175
|
-
'1970-01-01 00:00:00.000000 UTC'::timestamptz as resumed_on,
|
176
|
-
'1970-01-01 00:00:00.000000 UTC'::timestamptz as updated_on,
|
177
|
-
'SYSADMIN' as owner,
|
178
|
-
'' as comment,
|
179
|
-
'false' as enable_query_acceleration,
|
180
|
-
8 as query_acceleration_max_scale_factor,
|
181
|
-
'null' as resource_monitor,
|
182
|
-
|
183
|
-
-- deprecated - these 5 cols are for internal use
|
184
|
-
0 as actives,
|
185
|
-
0 as pendings,
|
186
|
-
0 as failed,
|
187
|
-
0 as suspended,
|
188
|
-
'123456789012' as uuid,
|
189
|
-
|
190
|
-
'STANDARD' as scaling_policy,
|
191
|
-
NULL as budget,
|
192
|
-
'ROLE' as owner_role_type,
|
193
|
-
NULL as resource_constraint;
|
194
|
-
"""
|
195
|
-
|
196
|
-
|
197
|
-
def show_warehouses(expression: exp.Expression) -> exp.Expression:
|
198
|
-
"""Transform SHOW WAREHOUSES.
|
199
|
-
|
200
|
-
See https://docs.snowflake.com/en/sql-reference/sql/show-warehouses
|
201
|
-
"""
|
202
|
-
if (
|
203
|
-
isinstance(expression, exp.Show)
|
204
|
-
and isinstance(expression.this, str)
|
205
|
-
and expression.this.upper() == "WAREHOUSES"
|
206
|
-
):
|
207
|
-
return sqlglot.parse_one(SQL_SHOW_WAREHOUSES, read="duckdb")
|
181
|
+
return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_show_functions", read="duckdb")
|
208
182
|
|
209
183
|
return expression
|
210
184
|
|
211
185
|
|
212
186
|
def show_keys(
|
213
187
|
expression: exp.Expression,
|
214
|
-
current_database: str | None
|
188
|
+
current_database: str | None,
|
215
189
|
*,
|
216
190
|
kind: Literal["PRIMARY", "UNIQUE", "FOREIGN"],
|
217
191
|
) -> exp.Expression:
|
@@ -296,15 +270,138 @@ def show_keys(
|
|
296
270
|
return expression
|
297
271
|
|
298
272
|
|
273
|
+
SQL_CREATE_VIEW_SHOW_PROCEDURES = """
|
274
|
+
create view if not exists _fs_global._fs_information_schema._fs_show_procedures as
|
275
|
+
SELECT
|
276
|
+
'2012-08-01 07:00:00 UTC'::timestamptz as 'created_on',
|
277
|
+
'SYSTEM$CLASSIFY' as 'name',
|
278
|
+
'' as 'schema_name',
|
279
|
+
'Y' as 'is_builtin',
|
280
|
+
'N' as 'is_aggregate',
|
281
|
+
'N' as 'is_ansi',
|
282
|
+
2 as 'min_num_arguments',
|
283
|
+
2 as 'max_num_arguments',
|
284
|
+
'SYSTEM$CLASSIFY(VARCHAR, OBJECT) RETURN OBJECT' as 'arguments',
|
285
|
+
'classify stored proc' as 'description',
|
286
|
+
'' as 'catalog_name',
|
287
|
+
'N' as 'is_table_function',
|
288
|
+
'N' as 'valid_for_clustering',
|
289
|
+
NULL as 'is_secure',
|
290
|
+
'' as 'secrets',
|
291
|
+
'' as 'external_access_integrations',
|
292
|
+
WHERE 0 = 1;
|
293
|
+
"""
|
294
|
+
|
295
|
+
|
296
|
+
def show_procedures(expression: exp.Expression) -> exp.Expression:
|
297
|
+
"""Transform SHOW PROCEDURES.
|
298
|
+
|
299
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-procedures
|
300
|
+
"""
|
301
|
+
if (
|
302
|
+
isinstance(expression, exp.Show)
|
303
|
+
and isinstance(expression.this, str)
|
304
|
+
and expression.this.upper() == "PROCEDURES"
|
305
|
+
):
|
306
|
+
return sqlglot.parse_one(
|
307
|
+
"SELECT * FROM _fs_global._fs_information_schema._fs_show_procedures",
|
308
|
+
read="duckdb",
|
309
|
+
)
|
310
|
+
|
311
|
+
return expression
|
312
|
+
|
313
|
+
|
314
|
+
SQL_CREATE_VIEW_SHOW_SCHEMAS = """
|
315
|
+
create view if not exists _fs_global._fs_information_schema._fs_show_schemas as
|
316
|
+
select
|
317
|
+
to_timestamp(0)::timestamptz as 'created_on',
|
318
|
+
case
|
319
|
+
when schema_name = '_fs_information_schema' then 'information_schema'
|
320
|
+
else schema_name
|
321
|
+
end as 'name',
|
322
|
+
NULL as 'kind',
|
323
|
+
catalog_name as 'database_name',
|
324
|
+
NULL as 'schema_name'
|
325
|
+
from information_schema.schemata
|
326
|
+
where not catalog_name in ('memory', 'system', 'temp', '_fs_global')
|
327
|
+
and not schema_name in ('main', 'pg_catalog')
|
328
|
+
"""
|
329
|
+
|
330
|
+
|
331
|
+
def show_schemas(expression: exp.Expression, current_database: str | None) -> exp.Expression:
|
332
|
+
"""Transform SHOW SCHEMAS to a query against the _fs_show_schemas view.
|
333
|
+
|
334
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-schemas
|
335
|
+
"""
|
336
|
+
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "SCHEMAS":
|
337
|
+
if (ident := expression.find(exp.Identifier)) and isinstance(ident.this, str):
|
338
|
+
database = ident.this
|
339
|
+
else:
|
340
|
+
database = current_database
|
341
|
+
|
342
|
+
query = "SELECT * FROM _fs_global._fs_information_schema._fs_show_schemas"
|
343
|
+
|
344
|
+
if database:
|
345
|
+
query += f" WHERE database_name = '{database}'"
|
346
|
+
return sqlglot.parse_one(query, read="duckdb")
|
347
|
+
|
348
|
+
return expression
|
349
|
+
|
350
|
+
|
351
|
+
def show_stages(expression: exp.Expression, current_database: str | None, current_schema: str | None) -> exp.Expression:
|
352
|
+
"""Transform SHOW STAGES to a select from the fake _fs_stages table."""
|
353
|
+
if not (
|
354
|
+
isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "STAGES"
|
355
|
+
):
|
356
|
+
return expression
|
357
|
+
|
358
|
+
scope_kind = expression.args.get("scope_kind")
|
359
|
+
table = expression.find(exp.Table)
|
360
|
+
|
361
|
+
if scope_kind == "DATABASE":
|
362
|
+
catalog = (table and table.name) or current_database
|
363
|
+
schema = None
|
364
|
+
elif scope_kind == "SCHEMA" and table:
|
365
|
+
catalog = table.db or current_database
|
366
|
+
schema = table.name
|
367
|
+
elif scope_kind == "TABLE" and table:
|
368
|
+
catalog = table.db or current_database
|
369
|
+
assert isinstance(table.this, exp.Identifier)
|
370
|
+
schema = table.this.this
|
371
|
+
elif scope_kind == "ACCOUNT":
|
372
|
+
# show all objects / tables in the account
|
373
|
+
catalog = None
|
374
|
+
schema = None
|
375
|
+
else:
|
376
|
+
# no explicit scope - show current database and schema only
|
377
|
+
catalog = current_database
|
378
|
+
schema = current_schema
|
379
|
+
|
380
|
+
where = ["1=1"]
|
381
|
+
if catalog:
|
382
|
+
where.append(f"database_name = '{catalog}'")
|
383
|
+
if schema:
|
384
|
+
where.append(f"schema_name = '{schema}'")
|
385
|
+
where_clause = " AND ".join(where)
|
386
|
+
|
387
|
+
query = f"""
|
388
|
+
SELECT *
|
389
|
+
from _fs_global._fs_information_schema._fs_stages
|
390
|
+
where {where_clause}
|
391
|
+
"""
|
392
|
+
|
393
|
+
return sqlglot.parse_one(query, read="duckdb")
|
394
|
+
|
395
|
+
|
299
396
|
# see https://docs.snowflake.com/en/sql-reference/sql/show-objects
|
300
397
|
SQL_CREATE_VIEW_SHOW_OBJECTS = """
|
301
398
|
create view if not exists _fs_global._fs_information_schema._fs_show_objects as
|
302
399
|
select
|
303
400
|
to_timestamp(0)::timestamptz as created_on,
|
304
401
|
table_name as name,
|
305
|
-
case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind',
|
306
402
|
table_catalog as database_name,
|
307
403
|
table_schema as schema_name,
|
404
|
+
case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind',
|
308
405
|
'' as comment,
|
309
406
|
'' as cluster_by,
|
310
407
|
-- TODO: implement rows and bytes as rows * 1024
|
@@ -313,9 +410,9 @@ select
|
|
313
410
|
'SYSADMIN' as owner,
|
314
411
|
1 as retention_time,
|
315
412
|
'ROLE' as owner_role_type,
|
316
|
-
null as budget,
|
317
413
|
'N' as is_hybrid,
|
318
|
-
'N' as is_dynamic
|
414
|
+
'N' as is_dynamic,
|
415
|
+
'N' as is_iceberg
|
319
416
|
from information_schema.tables
|
320
417
|
where not (table_schema == '_fs_information_schema')
|
321
418
|
"""
|
@@ -377,7 +474,9 @@ where not table_catalog in ('system')
|
|
377
474
|
"""
|
378
475
|
|
379
476
|
|
380
|
-
def show_tables_etc(
|
477
|
+
def show_tables_etc(
|
478
|
+
expression: exp.Expression, current_database: str | None, current_schema: str | None
|
479
|
+
) -> exp.Expression:
|
381
480
|
"""Transform SHOW OBJECTS/TABLES/VIEWS to a query against the _fs_information_schema views."""
|
382
481
|
if not (
|
383
482
|
isinstance(expression, exp.Show)
|
@@ -396,10 +495,14 @@ def show_tables_etc(expression: exp.Expression, current_database: str | None = N
|
|
396
495
|
elif scope_kind == "SCHEMA" and table:
|
397
496
|
catalog = table.db or current_database
|
398
497
|
schema = table.name
|
399
|
-
|
400
|
-
# all objects / tables
|
498
|
+
elif scope_kind == "ACCOUNT":
|
499
|
+
# show all objects / tables in the account
|
401
500
|
catalog = None
|
402
501
|
schema = None
|
502
|
+
else:
|
503
|
+
# no explicit scope - show current database and schema only
|
504
|
+
catalog = current_database
|
505
|
+
schema = current_schema
|
403
506
|
|
404
507
|
if expression.args["terse"] and show == "VIEWS":
|
405
508
|
columns = ["created_on, name, 'VIEW' as kind, database_name, schema_name"]
|
@@ -430,84 +533,71 @@ def show_tables_etc(expression: exp.Expression, current_database: str | None = N
|
|
430
533
|
return sqlglot.parse_one(query, read="duckdb")
|
431
534
|
|
432
535
|
|
433
|
-
|
434
|
-
|
435
|
-
SELECT
|
436
|
-
'2012-08-01 07:00:00 UTC'::timestamptz as 'created_on',
|
437
|
-
'SYSTEM$CLASSIFY' as 'name',
|
438
|
-
'' as 'schema_name',
|
439
|
-
'Y' as 'is_builtin',
|
440
|
-
'N' as 'is_aggregate',
|
441
|
-
'N' as 'is_ansi',
|
442
|
-
2 as 'min_num_arguments',
|
443
|
-
2 as 'max_num_arguments',
|
444
|
-
'SYSTEM$CLASSIFY(VARCHAR, OBJECT) RETURN OBJECT' as 'arguments',
|
445
|
-
'classify stored proc' as 'description',
|
446
|
-
'' as 'catalog_name',
|
447
|
-
'N' as 'is_table_function',
|
448
|
-
'N' as 'valid_for_clustering',
|
449
|
-
NULL as 'is_secure',
|
450
|
-
'' as 'secrets',
|
451
|
-
'' as 'external_access_integrations',
|
452
|
-
WHERE 0 = 1;
|
453
|
-
"""
|
454
|
-
|
455
|
-
|
456
|
-
def show_procedures(expression: exp.Expression) -> exp.Expression:
|
457
|
-
"""Transform SHOW PROCEDURES.
|
536
|
+
def show_users(expression: exp.Expression) -> exp.Expression:
|
537
|
+
"""Transform SHOW USERS to a query against the global database's information_schema._fs_users table.
|
458
538
|
|
459
|
-
|
539
|
+
https://docs.snowflake.com/en/sql-reference/sql/show-users
|
460
540
|
"""
|
461
|
-
if (
|
462
|
-
|
463
|
-
and isinstance(expression.this, str)
|
464
|
-
and expression.this.upper() == "PROCEDURES"
|
465
|
-
):
|
466
|
-
return sqlglot.parse_one(SQL_SHOW_PROCEDURES, read="duckdb")
|
541
|
+
if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "USERS":
|
542
|
+
return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_users", read="duckdb")
|
467
543
|
|
468
544
|
return expression
|
469
545
|
|
470
546
|
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
|
487
|
-
|
488
|
-
|
489
|
-
|
490
|
-
|
491
|
-
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
547
|
+
# returns zero rows
|
548
|
+
SQL_SHOW_WAREHOUSES = """
|
549
|
+
SELECT
|
550
|
+
'FAKESNOW_WAREHOUSE' as name,
|
551
|
+
'STARTED' as state,
|
552
|
+
'STANDARD' as type,
|
553
|
+
'X-Small' as size,
|
554
|
+
1 as min_cluster_count,
|
555
|
+
1 as max_cluster_count,
|
556
|
+
1 as started_clusters,
|
557
|
+
0 as running,
|
558
|
+
0 as queued,
|
559
|
+
'N' as is_default,
|
560
|
+
'N' as is_current,
|
561
|
+
600 as auto_suspend,
|
562
|
+
'true' as auto_resume,
|
563
|
+
-- nb: deliberate space before '100' to match Snowflake's output
|
564
|
+
' 100' as available,
|
565
|
+
'0' as provisioning,
|
566
|
+
'0' as quiescing,
|
567
|
+
'0' as other,
|
568
|
+
'1970-01-01 00:00:00.000000 UTC'::timestamptz as created_on,
|
569
|
+
'1970-01-01 00:00:00.000000 UTC'::timestamptz as resumed_on,
|
570
|
+
'1970-01-01 00:00:00.000000 UTC'::timestamptz as updated_on,
|
571
|
+
'SYSADMIN' as owner,
|
572
|
+
'' as comment,
|
573
|
+
'false' as enable_query_acceleration,
|
574
|
+
8 as query_acceleration_max_scale_factor,
|
575
|
+
'null' as resource_monitor,
|
497
576
|
|
498
|
-
|
499
|
-
|
500
|
-
|
577
|
+
-- deprecated - these 5 cols are for internal use
|
578
|
+
0 as actives,
|
579
|
+
0 as pendings,
|
580
|
+
0 as failed,
|
581
|
+
0 as suspended,
|
582
|
+
'123456789012' as uuid,
|
501
583
|
|
502
|
-
|
584
|
+
'STANDARD' as scaling_policy,
|
585
|
+
NULL as budget,
|
586
|
+
'ROLE' as owner_role_type,
|
587
|
+
NULL as resource_constraint;
|
588
|
+
"""
|
503
589
|
|
504
590
|
|
505
|
-
def
|
506
|
-
"""Transform SHOW
|
591
|
+
def show_warehouses(expression: exp.Expression) -> exp.Expression:
|
592
|
+
"""Transform SHOW WAREHOUSES.
|
507
593
|
|
508
|
-
https://docs.snowflake.com/en/sql-reference/sql/show-
|
594
|
+
See https://docs.snowflake.com/en/sql-reference/sql/show-warehouses
|
509
595
|
"""
|
510
|
-
if
|
511
|
-
|
596
|
+
if (
|
597
|
+
isinstance(expression, exp.Show)
|
598
|
+
and isinstance(expression.this, str)
|
599
|
+
and expression.this.upper() == "WAREHOUSES"
|
600
|
+
):
|
601
|
+
return sqlglot.parse_one(SQL_SHOW_WAREHOUSES, read="duckdb")
|
512
602
|
|
513
603
|
return expression
|
@@ -0,0 +1,68 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import datetime
|
4
|
+
|
5
|
+
import sqlglot
|
6
|
+
from sqlglot import exp
|
7
|
+
|
8
|
+
|
9
|
+
def create_stage(
|
10
|
+
expression: exp.Expression, current_database: str | None = None, current_schema: str | None = None
|
11
|
+
) -> exp.Expression:
|
12
|
+
"""Transform CREATE STAGE to an INSERT statement for the fake stages table."""
|
13
|
+
if not (
|
14
|
+
isinstance(expression, exp.Create)
|
15
|
+
and (kind := expression.args.get("kind"))
|
16
|
+
and isinstance(kind, str)
|
17
|
+
and kind.upper() == "STAGE"
|
18
|
+
and (table := expression.find(exp.Table))
|
19
|
+
):
|
20
|
+
return expression
|
21
|
+
|
22
|
+
catalog = table.catalog or current_database
|
23
|
+
schema = table.db or current_schema
|
24
|
+
ident = table.this
|
25
|
+
if isinstance(ident, exp.Placeholder):
|
26
|
+
stage_name = "?"
|
27
|
+
elif isinstance(ident, exp.Identifier):
|
28
|
+
stage_name = ident.this if ident.quoted else ident.this.upper()
|
29
|
+
else:
|
30
|
+
raise ValueError(f"Invalid identifier type {ident.__class__.__name__} for stage name")
|
31
|
+
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
32
|
+
|
33
|
+
is_temp = False
|
34
|
+
url = ""
|
35
|
+
properties = expression.args.get("properties") or []
|
36
|
+
for prop in properties:
|
37
|
+
if isinstance(prop, exp.TemporaryProperty):
|
38
|
+
is_temp = True
|
39
|
+
elif (
|
40
|
+
isinstance(prop, exp.Property)
|
41
|
+
and isinstance(prop.this, exp.Var)
|
42
|
+
and isinstance(prop.this.this, str)
|
43
|
+
and prop.this.this.upper() == "URL"
|
44
|
+
):
|
45
|
+
value = prop.args.get("value")
|
46
|
+
if isinstance(value, exp.Literal):
|
47
|
+
url = value.this
|
48
|
+
|
49
|
+
# Determine cloud provider based on url
|
50
|
+
cloud = "AWS" if url.startswith("s3://") else None
|
51
|
+
|
52
|
+
stage_type = ("EXTERNAL" if url else "INTERNAL") + (" TEMPORARY" if is_temp else "")
|
53
|
+
stage_name_value = stage_name if stage_name == "?" else repr(stage_name)
|
54
|
+
|
55
|
+
insert_sql = f"""
|
56
|
+
INSERT INTO _fs_global._fs_information_schema._fs_stages
|
57
|
+
(created_on, name, database_name, schema_name, url, has_credentials, has_encryption_key, owner,
|
58
|
+
comment, region, type, cloud, notification_channel, storage_integration, endpoint, owner_role_type,
|
59
|
+
directory_enabled)
|
60
|
+
VALUES (
|
61
|
+
'{now}', {stage_name_value}, '{catalog}', '{schema}', '{url}', 'N', 'N', 'SYSADMIN',
|
62
|
+
'', NULL, '{stage_type}', {f"'{cloud}'" if cloud else "NULL"}, NULL, NULL, NULL, 'ROLE',
|
63
|
+
'N'
|
64
|
+
)
|
65
|
+
"""
|
66
|
+
transformed = sqlglot.parse_one(insert_sql, read="duckdb")
|
67
|
+
transformed.args["stage_name"] = stage_name
|
68
|
+
return transformed
|
@@ -530,8 +530,11 @@ def identifier(expression: exp.Expression) -> exp.Expression:
|
|
530
530
|
and isinstance(expression.this, str)
|
531
531
|
and expression.this.upper() == "IDENTIFIER"
|
532
532
|
):
|
533
|
-
|
534
|
-
|
533
|
+
arg = expression.expressions[0]
|
534
|
+
# ? is parsed as exp.Placeholder
|
535
|
+
if isinstance(arg, exp.Placeholder):
|
536
|
+
return arg
|
537
|
+
return exp.Identifier(this=arg.this, quoted=False)
|
535
538
|
return expression
|
536
539
|
|
537
540
|
|
@@ -569,20 +572,20 @@ def indices_to_json_extract(expression: exp.Expression) -> exp.Expression:
|
|
569
572
|
return expression
|
570
573
|
|
571
574
|
|
572
|
-
def
|
573
|
-
"""
|
574
|
-
|
575
|
-
|
575
|
+
def information_schema_fs(expression: exp.Expression) -> exp.Expression:
|
576
|
+
"""Redirects for
|
577
|
+
* _FS_COLUMNS view which has character_maximum_length or character_octet_length.
|
578
|
+
* _FS_TABLES to access additional metadata columns (eg: comment).
|
579
|
+
* _FS_VIEWS to return Snowflake's version instead of duckdb's
|
580
|
+
* _FS_LOAD_HISTORY table which duckdb doesn't have.
|
576
581
|
"""
|
577
582
|
|
578
583
|
if (
|
579
584
|
isinstance(expression, exp.Table)
|
580
|
-
and expression.db
|
581
585
|
and expression.db.upper() == "INFORMATION_SCHEMA"
|
582
|
-
and expression.name
|
583
|
-
and expression.name.upper() == "COLUMNS"
|
586
|
+
and expression.name.upper() in {"COLUMNS", "TABLES", "VIEWS", "LOAD_HISTORY"}
|
584
587
|
):
|
585
|
-
expression.set("this", exp.Identifier(this="
|
588
|
+
expression.set("this", exp.Identifier(this=f"_FS_{expression.name.upper()}", quoted=False))
|
586
589
|
expression.set("db", exp.Identifier(this="_FS_INFORMATION_SCHEMA", quoted=False))
|
587
590
|
|
588
591
|
return expression
|
@@ -607,38 +610,6 @@ def information_schema_databases(
|
|
607
610
|
return expression
|
608
611
|
|
609
612
|
|
610
|
-
def information_schema_fs_tables(
|
611
|
-
expression: exp.Expression,
|
612
|
-
) -> exp.Expression:
|
613
|
-
"""Use _FS_TABLES to access additional metadata columns (eg: comment)."""
|
614
|
-
|
615
|
-
if (
|
616
|
-
isinstance(expression, exp.Select)
|
617
|
-
and (tbl := expression.find(exp.Table))
|
618
|
-
and tbl.db.upper() == "INFORMATION_SCHEMA"
|
619
|
-
and tbl.name.upper() == "TABLES"
|
620
|
-
):
|
621
|
-
tbl.set("this", exp.Identifier(this="_FS_TABLES", quoted=False))
|
622
|
-
tbl.set("db", exp.Identifier(this="_FS_INFORMATION_SCHEMA", quoted=False))
|
623
|
-
|
624
|
-
return expression
|
625
|
-
|
626
|
-
|
627
|
-
def information_schema_fs_views(expression: exp.Expression) -> exp.Expression:
|
628
|
-
"""Use _FS_VIEWS to return Snowflake's version instead of duckdb's."""
|
629
|
-
|
630
|
-
if (
|
631
|
-
isinstance(expression, exp.Select)
|
632
|
-
and (tbl := expression.find(exp.Table))
|
633
|
-
and tbl.db.upper() == "INFORMATION_SCHEMA"
|
634
|
-
and tbl.name.upper() == "VIEWS"
|
635
|
-
):
|
636
|
-
tbl.set("this", exp.Identifier(this="_FS_VIEWS", quoted=False))
|
637
|
-
tbl.set("db", exp.Identifier(this="_FS_INFORMATION_SCHEMA", quoted=False))
|
638
|
-
|
639
|
-
return expression
|
640
|
-
|
641
|
-
|
642
613
|
NUMBER_38_0 = [
|
643
614
|
exp.DataTypeParam(this=exp.Literal(this="38", is_string=False)),
|
644
615
|
exp.DataTypeParam(this=exp.Literal(this="0", is_string=False)),
|
fakesnow/variables.py
CHANGED
@@ -62,7 +62,9 @@ class Variables:
|
|
62
62
|
for name, value in self._variables.items():
|
63
63
|
sql = re.sub(rf"\${name}", value, sql, flags=re.IGNORECASE)
|
64
64
|
|
65
|
-
|
65
|
+
# Only treat $<word> (not $<number>) as session variables,
|
66
|
+
# ignore identifiers containing $
|
67
|
+
if remaining_variables := re.search(r"(?<![\$\w])\$(?!\d+)\w+", sql):
|
66
68
|
raise snowflake.connector.errors.ProgrammingError(
|
67
69
|
msg=f"Session variable '{remaining_variables.group().upper()}' does not exist"
|
68
70
|
)
|
@@ -1,32 +1,33 @@
|
|
1
1
|
fakesnow/__init__.py,sha256=71Rk_3s_4eTDCi7-bbo-xT71WN0E0MAPf5qjsguIeJU,5117
|
2
2
|
fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
|
3
3
|
fakesnow/arrow.py,sha256=XjTpFyLrD9jULWOtPgpr0RyNMmO6a5yi82y6ivi2CCI,4884
|
4
|
-
fakesnow/checks.py,sha256=
|
4
|
+
fakesnow/checks.py,sha256=bOJPMp46AvjJV_bXXjx2njO2dXNjffLrznwRuKyYZ4g,2889
|
5
5
|
fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
|
6
6
|
fakesnow/conn.py,sha256=diCwcjaCBrlCn9PyjbScfIQTNQjqiPTkQanUTqcvblE,6009
|
7
7
|
fakesnow/converter.py,sha256=wPOfsFXIUJNJSx5oFNAxh13udxmAVIIHsLK8BiGkXGA,1635
|
8
|
-
fakesnow/copy_into.py,sha256=
|
9
|
-
fakesnow/cursor.py,sha256=
|
8
|
+
fakesnow/copy_into.py,sha256=CRqAK5CUz9xYSwP9PZJpVxUz8KT9ZZpsCRQEuEEVSbM,14832
|
9
|
+
fakesnow/cursor.py,sha256=xDKvet679VnICUxbDCfRnT8K9Q03Tii948_3XD2YFnk,22954
|
10
10
|
fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
|
11
11
|
fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
|
12
12
|
fakesnow/fixtures.py,sha256=2rj0MTZlaZc4PNWhaqC5IiiLa7E9G0QZT3g45YawsL0,633
|
13
|
-
fakesnow/info_schema.py,sha256=
|
13
|
+
fakesnow/info_schema.py,sha256=lqEYD5aWK2MamjALbj6ct7pz_1yyAq3tAk51kLa8NKk,9872
|
14
14
|
fakesnow/instance.py,sha256=OKoYXwaI6kL9HQpnHx44yzpON_xNfuIT_F4oJNF_XXQ,2114
|
15
15
|
fakesnow/logger.py,sha256=U6EjUENQuTrDeNYqER2hxazoySmXzLmZJ-t-SDZgjkg,363
|
16
16
|
fakesnow/macros.py,sha256=bQfZR5ptO4Gk-8fFRK2iksqYWkJUT8e-rPp-000qzu0,999
|
17
17
|
fakesnow/pandas_tools.py,sha256=wI203UQHC8JvDzxE_VjE1NeV4rThek2P-u52oTg2foo,3481
|
18
18
|
fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
|
19
19
|
fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
|
20
|
-
fakesnow/server.py,sha256=
|
21
|
-
fakesnow/variables.py,sha256=
|
22
|
-
fakesnow/transforms/__init__.py,sha256=
|
20
|
+
fakesnow/server.py,sha256=WyLU_4zcqQHLcvilQbNeavwRLVRQLm7n8NmtHJHzjqY,6687
|
21
|
+
fakesnow/variables.py,sha256=BGnD4LAdVByfJ2GXL6qpGBaTF8ZJRjt3pdJsd9sIAcw,3134
|
22
|
+
fakesnow/transforms/__init__.py,sha256=ZT0ehX8F1PPUY1983RjVofJ5spPE2syen5henc2m1Sk,2722
|
23
23
|
fakesnow/transforms/merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
|
24
|
-
fakesnow/transforms/show.py,sha256=
|
25
|
-
fakesnow/transforms/
|
26
|
-
fakesnow
|
24
|
+
fakesnow/transforms/show.py,sha256=ejvs9S2l2Wcal4fhnNSVs3JkZwKsFxMEU35ufUV3-kg,20421
|
25
|
+
fakesnow/transforms/stage.py,sha256=jkabKkxFEYjTVuLsDH3fgjyhFdc_GGh3DavWv7G7k68,2560
|
26
|
+
fakesnow/transforms/transforms.py,sha256=I2X7KZ5wphq1NJvfbFmY8JYE-8vpVHMn-KTXurMxouE,47149
|
27
|
+
fakesnow-0.9.39.dist-info/licenses/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
|
27
28
|
tools/decode.py,sha256=kC5kUvLQxdCkMRsnH6BqCajlKxKeN77w6rwCKsY6gqU,1781
|
28
|
-
fakesnow-0.9.
|
29
|
-
fakesnow-0.9.
|
30
|
-
fakesnow-0.9.
|
31
|
-
fakesnow-0.9.
|
32
|
-
fakesnow-0.9.
|
29
|
+
fakesnow-0.9.39.dist-info/METADATA,sha256=IlCT__J_g9f5ltb3eC7t0cUs5t3Aq0BaX4eNcKkxCYE,20680
|
30
|
+
fakesnow-0.9.39.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
31
|
+
fakesnow-0.9.39.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
|
32
|
+
fakesnow-0.9.39.dist-info/top_level.txt,sha256=Yos7YveA3f03xVYuURqnBsfMV2DePXfu_yGcsj3pPzI,30
|
33
|
+
fakesnow-0.9.39.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|