fakesnow 0.9.20__py3-none-any.whl → 0.9.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fakesnow/__init__.py CHANGED
@@ -8,12 +8,11 @@ import unittest.mock as mock
8
8
  from collections.abc import Iterator, Sequence
9
9
  from contextlib import contextmanager
10
10
 
11
- import duckdb
12
11
  import snowflake.connector
13
12
  import snowflake.connector.pandas_tools
14
13
 
15
14
  import fakesnow.fakes as fakes
16
- from fakesnow.global_database import create_global_database
15
+ from fakesnow.instance import FakeSnow
17
16
 
18
17
 
19
18
  @contextmanager
@@ -52,20 +51,15 @@ def patch(
52
51
  # won't be able to patch extra targets
53
52
  assert not isinstance(snowflake.connector.connect, mock.MagicMock), "Snowflake connector is already patched"
54
53
 
55
- duck_conn = duckdb.connect(database=":memory:")
56
- create_global_database(duck_conn)
54
+ fs = FakeSnow(
55
+ create_database_on_connect=create_database_on_connect,
56
+ create_schema_on_connect=create_schema_on_connect,
57
+ db_path=db_path,
58
+ nop_regexes=nop_regexes,
59
+ )
57
60
 
58
61
  fake_fns = {
59
- # every time we connect, create a new cursor (ie: connection) so we can isolate each connection's
60
- # schema setting, see https://duckdb.org/docs/api/python/overview.html#startup--shutdown
61
- snowflake.connector.connect: lambda **kwargs: fakes.FakeSnowflakeConnection(
62
- duck_conn.cursor(),
63
- create_database=create_database_on_connect,
64
- create_schema=create_schema_on_connect,
65
- db_path=db_path,
66
- nop_regexes=nop_regexes,
67
- **kwargs,
68
- ),
62
+ snowflake.connector.connect: fs.connect,
69
63
  snowflake.connector.pandas_tools.write_pandas: fakes.write_pandas,
70
64
  }
71
65
 
fakesnow/arrow.py ADDED
@@ -0,0 +1,32 @@
1
+ import pyarrow as pa
2
+
3
+
4
+ def with_sf_metadata(schema: pa.Schema) -> pa.Schema:
5
+ # see https://github.com/snowflakedb/snowflake-connector-python/blob/e9393a6/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/CArrowTableIterator.cpp#L32
6
+ # and https://github.com/snowflakedb/snowflake-connector-python/blob/e9393a6/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/SnowflakeType.cpp#L10
7
+ fms = []
8
+ for i, t in enumerate(schema.types):
9
+ f = schema.field(i)
10
+
11
+ if isinstance(t, pa.Decimal128Type):
12
+ fm = f.with_metadata({"logicalType": "FIXED", "precision": str(t.precision), "scale": str(t.scale)})
13
+ elif t == pa.string():
14
+ fm = f.with_metadata({"logicalType": "TEXT"})
15
+ else:
16
+ raise NotImplementedError(f"Unsupported Arrow type: {t}")
17
+ fms.append(fm)
18
+ return pa.schema(fms)
19
+
20
+
21
+ def to_ipc(table: pa.Table) -> pa.Buffer:
22
+ batches = table.to_batches()
23
+ if len(batches) != 1:
24
+ raise NotImplementedError(f"{len(batches)} batches")
25
+ batch = batches[0]
26
+
27
+ sink = pa.BufferOutputStream()
28
+
29
+ with pa.ipc.new_stream(sink, with_sf_metadata(batch.schema)) as writer:
30
+ writer.write_batch(batch)
31
+
32
+ return sink.getvalue()
fakesnow/fakes.py CHANGED
@@ -114,7 +114,6 @@ class FakeSnowflakeCursor:
114
114
  def description(self) -> list[ResultMetadata]:
115
115
  # use a separate cursor to avoid consuming the result set on this cursor
116
116
  with self._conn.cursor() as cur:
117
- # self._duck_conn.execute(sql, params)
118
117
  expression = sqlglot.parse_one(f"DESCRIBE {self._last_sql}", read="duckdb")
119
118
  cur._execute(expression, self._last_params) # noqa: SLF001
120
119
  meta = FakeSnowflakeCursor._describe_as_result_metadata(cur.fetchall())
@@ -203,6 +202,7 @@ class FakeSnowflakeCursor:
203
202
  .transform(transforms.sha256)
204
203
  .transform(transforms.create_clone)
205
204
  .transform(transforms.alias_in_join)
205
+ .transform(transforms.alter_table_strip_cluster_by)
206
206
  )
207
207
 
208
208
  def _execute(
@@ -234,12 +234,10 @@ class FakeSnowflakeCursor:
234
234
  if transformed.find(exp.Select) and (seed := transformed.args.get("seed")):
235
235
  sql = f"SELECT setseed({seed}); {sql}"
236
236
 
237
- if (fs_debug := os.environ.get("FAKESNOW_DEBUG")) and fs_debug != "snowflake":
238
- print(f"{sql};{params=}" if params else f"{sql};", file=sys.stderr)
239
-
240
237
  result_sql = None
241
238
 
242
239
  try:
240
+ self._log_sql(sql, params)
243
241
  self._duck_conn.execute(sql, params)
244
242
  except duckdb.BinderException as e:
245
243
  msg = e.args[0]
@@ -286,9 +284,9 @@ class FakeSnowflakeCursor:
286
284
  (affected_count,) = self._duck_conn.fetchall()[0]
287
285
  result_sql = SQL_DELETED_ROWS.substitute(count=affected_count)
288
286
 
289
- elif cmd == "DESCRIBE TABLE":
290
- # DESCRIBE TABLE has already been run above to detect and error if the table exists
291
- # We now rerun DESCRIBE TABLE but transformed with columns to match Snowflake
287
+ elif cmd in ("DESCRIBE TABLE", "DESCRIBE VIEW"):
288
+ # DESCRIBE TABLE/VIEW has already been run above to detect and error if the table exists
289
+ # We now rerun DESCRIBE TABLE/VIEW but transformed with columns to match Snowflake
292
290
  result_sql = transformed.transform(
293
291
  lambda e: transforms.describe_table(e, self._conn.database, self._conn.schema)
294
292
  ).sql(dialect="duckdb")
@@ -336,6 +334,7 @@ class FakeSnowflakeCursor:
336
334
  self._duck_conn.execute(info_schema.insert_text_lengths_sql(catalog, schema, table.name, text_lengths))
337
335
 
338
336
  if result_sql:
337
+ self._log_sql(result_sql, params)
339
338
  self._duck_conn.execute(result_sql)
340
339
 
341
340
  self._arrow_table = self._duck_conn.fetch_arrow_table()
@@ -346,6 +345,10 @@ class FakeSnowflakeCursor:
346
345
 
347
346
  return self
348
347
 
348
+ def _log_sql(self, sql: str, params: Sequence[Any] | dict[Any, Any] | None = None) -> None:
349
+ if (fs_debug := os.environ.get("FAKESNOW_DEBUG")) and fs_debug != "snowflake":
350
+ print(f"{sql};{params=}" if params else f"{sql};", file=sys.stderr)
351
+
349
352
  def executemany(
350
353
  self,
351
354
  command: str,
@@ -388,12 +391,13 @@ class FakeSnowflakeCursor:
388
391
  if self._arrow_table is None:
389
392
  # mimic snowflake python connector error type
390
393
  raise TypeError("No open result set")
394
+ tslice = self._arrow_table.slice(offset=self._arrow_table_fetch_index or 0, length=size).to_pylist()
395
+
391
396
  if self._arrow_table_fetch_index is None:
392
- self._arrow_table_fetch_index = 0
397
+ self._arrow_table_fetch_index = size
393
398
  else:
394
399
  self._arrow_table_fetch_index += size
395
400
 
396
- tslice = self._arrow_table.slice(offset=self._arrow_table_fetch_index, length=size).to_pylist()
397
401
  return tslice if self._use_dict_result else [tuple(d.values()) for d in tslice]
398
402
 
399
403
  def get_result_batches(self) -> list[ResultBatch] | None:
@@ -522,9 +526,14 @@ class FakeSnowflakeConnection:
522
526
  ):
523
527
  self._duck_conn = duck_conn
524
528
  # upper case database and schema like snowflake unquoted identifiers
525
- # NB: catalog names are not case-sensitive in duckdb but stored as cased in information_schema.schemata
529
+ # so they appear as upper-cased in information_schema
530
+ # catalog and schema names are not actually case-sensitive in duckdb even though
531
+ # they are as cased in information_schema.schemata, so when selecting from
532
+ # information_schema.schemata below we use upper-case to match any existing duckdb
533
+ # catalog or schemas like "information_schema"
526
534
  self.database = database and database.upper()
527
535
  self.schema = schema and schema.upper()
536
+
528
537
  self.database_set = False
529
538
  self.schema_set = False
530
539
  self.db_path = Path(db_path) if db_path else None
@@ -538,7 +547,7 @@ class FakeSnowflakeConnection:
538
547
  and self.database
539
548
  and not duck_conn.execute(
540
549
  f"""select * from information_schema.schemata
541
- where catalog_name = '{self.database}'"""
550
+ where upper(catalog_name) = '{self.database}'"""
542
551
  ).fetchone()
543
552
  ):
544
553
  db_file = f"{self.db_path/self.database}.db" if self.db_path else ":memory:"
@@ -553,7 +562,7 @@ class FakeSnowflakeConnection:
553
562
  and self.schema
554
563
  and not duck_conn.execute(
555
564
  f"""select * from information_schema.schemata
556
- where catalog_name = '{self.database}' and schema_name = '{self.schema}'"""
565
+ where upper(catalog_name) = '{self.database}' and upper(schema_name) = '{self.schema}'"""
557
566
  ).fetchone()
558
567
  ):
559
568
  duck_conn.execute(f"CREATE SCHEMA {self.database}.{self.schema}")
@@ -564,7 +573,7 @@ class FakeSnowflakeConnection:
564
573
  and self.schema
565
574
  and duck_conn.execute(
566
575
  f"""select * from information_schema.schemata
567
- where catalog_name = '{self.database}' and schema_name = '{self.schema}'"""
576
+ where upper(catalog_name) = '{self.database}' and upper(schema_name) = '{self.schema}'"""
568
577
  ).fetchone()
569
578
  ):
570
579
  duck_conn.execute(f"SET schema='{self.database}.{self.schema}'")
@@ -575,7 +584,7 @@ class FakeSnowflakeConnection:
575
584
  self.database
576
585
  and duck_conn.execute(
577
586
  f"""select * from information_schema.schemata
578
- where catalog_name = '{self.database}'"""
587
+ where upper(catalog_name) = '{self.database}'"""
579
588
  ).fetchone()
580
589
  ):
581
590
  duck_conn.execute(f"SET schema='{self.database}.main'")
@@ -602,6 +611,7 @@ class FakeSnowflakeConnection:
602
611
  self.cursor().execute("COMMIT")
603
612
 
604
613
  def cursor(self, cursor_class: type[SnowflakeCursor] = SnowflakeCursor) -> FakeSnowflakeCursor:
614
+ # TODO: use duck_conn cursor for thread-safety
605
615
  return FakeSnowflakeCursor(conn=self, duck_conn=self._duck_conn, use_dict_result=cursor_class == DictCursor)
606
616
 
607
617
  def execute_string(
fakesnow/info_schema.py CHANGED
@@ -62,8 +62,8 @@ case when columns.data_type='BIGINT' then 10
62
62
  case when columns.data_type='DOUBLE' then NULL else columns.numeric_scale end as numeric_scale,
63
63
  collation_name, is_identity, identity_generation, identity_cycle,
64
64
  ddb_columns.comment as comment,
65
- null as identity_start,
66
- null as identity_increment,
65
+ null::VARCHAR as identity_start,
66
+ null::VARCHAR as identity_increment,
67
67
  from ${catalog}.information_schema.columns columns
68
68
  left join ${catalog}.information_schema._fs_columns_ext ext
69
69
  on ext_table_catalog = columns.table_catalog
@@ -78,6 +78,7 @@ LEFT JOIN duckdb_columns ddb_columns
78
78
  """
79
79
  )
80
80
 
81
+
81
82
  # replicates https://docs.snowflake.com/sql-reference/info-schema/databases
82
83
  SQL_CREATE_INFORMATION_SCHEMA_DATABASES_VIEW = Template(
83
84
  """
@@ -86,7 +87,7 @@ select
86
87
  catalog_name as database_name,
87
88
  'SYSADMIN' as database_owner,
88
89
  'NO' as is_transient,
89
- null as comment,
90
+ null::VARCHAR as comment,
90
91
  to_timestamp(0)::timestamptz as created,
91
92
  to_timestamp(0)::timestamptz as last_altered,
92
93
  1 as retention_time,
@@ -116,7 +117,7 @@ select
116
117
  to_timestamp(0)::timestamptz as last_altered,
117
118
  to_timestamp(0)::timestamptz as last_ddl,
118
119
  'SYSADMIN' as last_ddl_by,
119
- null as comment
120
+ null::VARCHAR as comment
120
121
  from duckdb_views
121
122
  where database_name = '${catalog}'
122
123
  and schema_name != 'information_schema'
fakesnow/instance.py ADDED
@@ -0,0 +1,92 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from typing import Any
5
+
6
+ import duckdb
7
+
8
+ import fakesnow.fakes as fakes
9
+
10
+ GLOBAL_DATABASE_NAME = "_fs_global"
11
+ USERS_TABLE_FQ_NAME = f"{GLOBAL_DATABASE_NAME}._fs_users_ext"
12
+
13
+ # replicates the output structure of https://docs.snowflake.com/en/sql-reference/sql/show-users
14
+ SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT = f"""
15
+ create table if not exists {USERS_TABLE_FQ_NAME} (
16
+ name varchar,
17
+ created_on TIMESTAMPTZ,
18
+ login_name varchar,
19
+ display_name varchar,
20
+ first_name varchar,
21
+ last_name varchar,
22
+ email varchar,
23
+ mins_to_unlock varchar,
24
+ days_to_expiry varchar,
25
+ comment varchar,
26
+ disabled varchar,
27
+ must_change_password varchar,
28
+ snowflake_lock varchar,
29
+ default_warehouse varchar,
30
+ default_namespace varchar,
31
+ default_role varchar,
32
+ default_secondary_roles varchar,
33
+ ext_authn_duo varchar,
34
+ ext_authn_uid varchar,
35
+ mins_to_bypass_mfa varchar,
36
+ owner varchar,
37
+ last_success_login TIMESTAMPTZ,
38
+ expires_at_time TIMESTAMPTZ,
39
+ locked_until_time TIMESTAMPTZ,
40
+ has_password varchar,
41
+ has_rsa_public_key varchar,
42
+ )
43
+ """
44
+
45
+
46
+ def create_global_database(conn: duckdb.DuckDBPyConnection) -> None:
47
+ """Create a "global" database for storing objects which span databases.
48
+
49
+ Including (but not limited to):
50
+ - Users
51
+ """
52
+ conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
53
+ conn.execute(SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT)
54
+
55
+
56
+ class FakeSnow:
57
+ def __init__(
58
+ self,
59
+ create_database_on_connect: bool = True,
60
+ create_schema_on_connect: bool = True,
61
+ db_path: str | os.PathLike | None = None,
62
+ nop_regexes: list[str] | None = None,
63
+ ):
64
+ self.create_database_on_connect = create_database_on_connect
65
+ self.create_schema_on_connect = create_schema_on_connect
66
+ self.db_path = db_path
67
+ self.nop_regexes = nop_regexes
68
+
69
+ self.duck_conn = duckdb.connect(database=":memory:")
70
+
71
+ # create a "global" database for storing objects which span databases.
72
+ self.duck_conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
73
+ self.duck_conn.execute(SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT)
74
+
75
+ def connect(
76
+ self, database: str | None = None, schema: str | None = None, **kwargs: Any
77
+ ) -> fakes.FakeSnowflakeConnection:
78
+ # every time we connect, create a new cursor (ie: connection) so we can isolate each connection's
79
+ # schema setting see
80
+ # https://github.com/duckdb/duckdb/blob/18254ec/tools/pythonpkg/src/pyconnection.cpp#L1440
81
+ # and to make connections thread-safe see
82
+ # https://duckdb.org/docs/api/python/overview.html#using-connections-in-parallel-python-programs
83
+ return fakes.FakeSnowflakeConnection(
84
+ self.duck_conn.cursor(),
85
+ database,
86
+ schema,
87
+ create_database=self.create_database_on_connect,
88
+ create_schema=self.create_schema_on_connect,
89
+ db_path=self.db_path,
90
+ nop_regexes=self.nop_regexes,
91
+ **kwargs,
92
+ )
fakesnow/server.py ADDED
@@ -0,0 +1,109 @@
1
+ from __future__ import annotations
2
+
3
+ import gzip
4
+ import json
5
+ import secrets
6
+ from base64 import b64encode
7
+ from dataclasses import dataclass
8
+
9
+ from starlette.applications import Starlette
10
+ from starlette.concurrency import run_in_threadpool
11
+ from starlette.requests import Request
12
+ from starlette.responses import JSONResponse
13
+ from starlette.routing import Route
14
+
15
+ from fakesnow.arrow import to_ipc
16
+ from fakesnow.fakes import FakeSnowflakeConnection
17
+ from fakesnow.instance import FakeSnow
18
+
19
+ fs = FakeSnow()
20
+ sessions = {}
21
+
22
+
23
+ @dataclass
24
+ class ServerError(Exception):
25
+ status_code: int
26
+ code: str
27
+ message: str
28
+
29
+
30
+ def login_request(request: Request) -> JSONResponse:
31
+ database = request.query_params.get("databaseName")
32
+ schema = request.query_params.get("schemaName")
33
+ token = secrets.token_urlsafe(32)
34
+ sessions[token] = fs.connect(database, schema)
35
+ return JSONResponse({"data": {"token": token}, "success": True})
36
+
37
+
38
+ async def query_request(request: Request) -> JSONResponse:
39
+ try:
40
+ conn = to_conn(request)
41
+
42
+ body = await request.body()
43
+ body_json = json.loads(gzip.decompress(body))
44
+
45
+ sql_text = body_json["sqlText"]
46
+
47
+ # only a single sql statement is sent at a time by the python snowflake connector
48
+ cur = await run_in_threadpool(conn.cursor().execute, sql_text)
49
+
50
+ assert cur._arrow_table, "No result set" # noqa: SLF001
51
+
52
+ batch_bytes = to_ipc(cur._arrow_table) # noqa: SLF001
53
+ rowset_b64 = b64encode(batch_bytes).decode("utf-8")
54
+
55
+ return JSONResponse(
56
+ {
57
+ "data": {
58
+ "rowtype": [
59
+ {
60
+ "name": "'HELLO WORLD'",
61
+ "nullable": False,
62
+ "type": "text",
63
+ "length": 11,
64
+ "scale": None,
65
+ "precision": None,
66
+ }
67
+ ],
68
+ "rowsetBase64": rowset_b64,
69
+ "total": 1,
70
+ "queryResultFormat": "arrow",
71
+ },
72
+ "success": True,
73
+ }
74
+ )
75
+
76
+ except ServerError as e:
77
+ return JSONResponse(
78
+ {"data": None, "code": e.code, "message": e.message, "success": False, "headers": None},
79
+ status_code=e.status_code,
80
+ )
81
+
82
+
83
+ def to_conn(request: Request) -> FakeSnowflakeConnection:
84
+ if not (auth := request.headers.get("Authorization")):
85
+ raise ServerError(status_code=401, code="390103", message="Session token not found in the request data.")
86
+
87
+ token = auth[17:-1]
88
+
89
+ if not (conn := sessions.get(token)):
90
+ raise ServerError(status_code=401, code="390104", message="User must login again to access the service.")
91
+
92
+ return conn
93
+
94
+
95
+ routes = [
96
+ Route(
97
+ "/session/v1/login-request",
98
+ login_request,
99
+ methods=["POST"],
100
+ ),
101
+ Route(
102
+ "/queries/v1/query-request",
103
+ query_request,
104
+ methods=["POST"],
105
+ ),
106
+ Route("/queries/v1/abort-request", lambda _: JSONResponse({"success": True}), methods=["POST"]),
107
+ ]
108
+
109
+ app = Starlette(debug=True, routes=routes)
fakesnow/transforms.py CHANGED
@@ -7,11 +7,11 @@ from typing import ClassVar, Literal, cast
7
7
  import sqlglot
8
8
  from sqlglot import exp
9
9
 
10
- from fakesnow.global_database import USERS_TABLE_FQ_NAME
10
+ from fakesnow.instance import USERS_TABLE_FQ_NAME
11
11
  from fakesnow.variables import Variables
12
12
 
13
13
  MISSING_DATABASE = "missing_database"
14
- SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.'")
14
+ SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.' as status")
15
15
 
16
16
 
17
17
  def alias_in_join(expression: exp.Expression) -> exp.Expression:
@@ -33,6 +33,18 @@ def alias_in_join(expression: exp.Expression) -> exp.Expression:
33
33
  return expression
34
34
 
35
35
 
36
+ def alter_table_strip_cluster_by(expression: exp.Expression) -> exp.Expression:
37
+ """Turn alter table cluster by into a no-op"""
38
+ if (
39
+ isinstance(expression, exp.AlterTable)
40
+ and (actions := expression.args.get("actions"))
41
+ and len(actions) == 1
42
+ and (isinstance(actions[0], exp.Cluster))
43
+ ):
44
+ return SUCCESS_NOP
45
+ return expression
46
+
47
+
36
48
  def array_size(expression: exp.Expression) -> exp.Expression:
37
49
  if isinstance(expression, exp.ArraySize):
38
50
  # case is used to convert 0 to null, because null is returned by duckdb when no case matches
@@ -147,22 +159,41 @@ SELECT
147
159
  column_default AS "default",
148
160
  'N' AS "primary key",
149
161
  'N' AS "unique key",
150
- NULL AS "check",
151
- NULL AS "expression",
152
- NULL AS "comment",
153
- NULL AS "policy name",
154
- NULL AS "privacy domain",
162
+ NULL::VARCHAR AS "check",
163
+ NULL::VARCHAR AS "expression",
164
+ NULL::VARCHAR AS "comment",
165
+ NULL::VARCHAR AS "policy name",
166
+ NULL::JSON AS "privacy domain",
155
167
  FROM information_schema._fs_columns_snowflake
156
168
  WHERE table_catalog = '${catalog}' AND table_schema = '${schema}' AND table_name = '${table}'
157
169
  ORDER BY ordinal_position
158
170
  """
159
171
  )
160
172
 
173
+ SQL_DESCRIBE_INFO_SCHEMA = Template(
174
+ """
175
+ SELECT
176
+ column_name AS "name",
177
+ column_type as "type",
178
+ 'COLUMN' AS "kind",
179
+ CASE WHEN "null" = 'YES' THEN 'Y' ELSE 'N' END AS "null?",
180
+ NULL::VARCHAR AS "default",
181
+ 'N' AS "primary key",
182
+ 'N' AS "unique key",
183
+ NULL::VARCHAR AS "check",
184
+ NULL::VARCHAR AS "expression",
185
+ NULL::VARCHAR AS "comment",
186
+ NULL::VARCHAR AS "policy name",
187
+ NULL::JSON AS "privacy domain",
188
+ FROM (DESCRIBE information_schema.${view})
189
+ """
190
+ )
191
+
161
192
 
162
193
  def describe_table(
163
194
  expression: exp.Expression, current_database: str | None = None, current_schema: str | None = None
164
195
  ) -> exp.Expression:
165
- """Redirect to the information_schema._fs_describe_table to match snowflake.
196
+ """Redirect to the information_schema._fs_columns_snowflake to match snowflake.
166
197
 
167
198
  See https://docs.snowflake.com/en/sql-reference/sql/desc-table
168
199
  """
@@ -171,12 +202,16 @@ def describe_table(
171
202
  isinstance(expression, exp.Describe)
172
203
  and (kind := expression.args.get("kind"))
173
204
  and isinstance(kind, str)
174
- and kind.upper() == "TABLE"
205
+ and kind.upper() in ("TABLE", "VIEW")
175
206
  and (table := expression.find(exp.Table))
176
207
  ):
177
208
  catalog = table.catalog or current_database
178
209
  schema = table.db or current_schema
179
210
 
211
+ if schema and schema.upper() == "INFORMATION_SCHEMA":
212
+ # information schema views don't exist in _fs_columns_snowflake
213
+ return sqlglot.parse_one(SQL_DESCRIBE_INFO_SCHEMA.substitute(view=table.name), read="duckdb")
214
+
180
215
  return sqlglot.parse_one(
181
216
  SQL_DESCRIBE_TABLE.substitute(catalog=catalog, schema=schema, table=table.name),
182
217
  read="duckdb",
@@ -551,12 +586,13 @@ def information_schema_fs_columns_snowflake(expression: exp.Expression) -> exp.E
551
586
  """
552
587
 
553
588
  if (
554
- isinstance(expression, exp.Select)
555
- and (tbl_exp := expression.find(exp.Table))
556
- and tbl_exp.name.upper() == "COLUMNS"
557
- and tbl_exp.db.upper() == "INFORMATION_SCHEMA"
589
+ isinstance(expression, exp.Table)
590
+ and expression.db
591
+ and expression.db.upper() == "INFORMATION_SCHEMA"
592
+ and expression.name
593
+ and expression.name.upper() == "COLUMNS"
558
594
  ):
559
- tbl_exp.set("this", exp.Identifier(this="_FS_COLUMNS_SNOWFLAKE", quoted=False))
595
+ expression.set("this", exp.Identifier(this="_FS_COLUMNS_SNOWFLAKE", quoted=False))
560
596
 
561
597
  return expression
562
598
 
fakesnow/variables.py CHANGED
@@ -5,10 +5,23 @@ from sqlglot import exp
5
5
 
6
6
 
7
7
  # Implements snowflake variables: https://docs.snowflake.com/en/sql-reference/session-variables#using-variables-in-sql
8
+ # [ ] Add support for setting multiple variables in a single statement
8
9
  class Variables:
9
10
  @classmethod
10
11
  def is_variable_modifier(cls, expr: exp.Expression) -> bool:
11
- return isinstance(expr, exp.Set) or cls._is_unset_expression(expr)
12
+ return cls._is_set_expression(expr) or cls._is_unset_expression(expr)
13
+
14
+ @classmethod
15
+ def _is_set_expression(cls, expr: exp.Expression) -> bool:
16
+ if isinstance(expr, exp.Set):
17
+ is_set = not expr.args.get("unset")
18
+ if is_set: # SET varname = value;
19
+ set_expressions = expr.args.get("expressions")
20
+ assert set_expressions, "SET without values in expression(s) is unexpected."
21
+ # Avoids mistakenly setting variables for statements that use SET in a different context.
22
+ # (eg. WHEN MATCHED THEN UPDATE SET x=7)
23
+ return isinstance(set_expressions[0], exp.SetItem)
24
+ return False
12
25
 
13
26
  @classmethod
14
27
  def _is_unset_expression(cls, expr: exp.Expression) -> bool:
@@ -22,11 +35,11 @@ class Variables:
22
35
 
23
36
  def update_variables(self, expr: exp.Expression) -> None:
24
37
  if isinstance(expr, exp.Set):
25
- unset = expr.args.get("unset")
26
- if not unset: # SET varname = value;
27
- unset_expressions = expr.args.get("expressions")
28
- assert unset_expressions, "SET without values in expression(s) is unexpected."
29
- eq = unset_expressions[0].this
38
+ is_set = not expr.args.get("unset")
39
+ if is_set: # SET varname = value;
40
+ set_expressions = expr.args.get("expressions")
41
+ assert set_expressions, "SET without values in expression(s) is unexpected."
42
+ eq = set_expressions[0].this
30
43
  name = eq.this.sql()
31
44
  value = eq.args.get("expression").sql()
32
45
  self._set(name, value)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.9.20
3
+ Version: 0.9.22
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -210,23 +210,28 @@ Classifier: License :: OSI Approved :: MIT License
210
210
  Requires-Python: >=3.9
211
211
  Description-Content-Type: text/markdown
212
212
  License-File: LICENSE
213
- Requires-Dist: duckdb ~=1.0.0
213
+ Requires-Dist: duckdb~=1.0.0
214
214
  Requires-Dist: pyarrow
215
215
  Requires-Dist: snowflake-connector-python
216
- Requires-Dist: sqlglot ~=25.3.0
216
+ Requires-Dist: sqlglot~=25.9.0
217
217
  Provides-Extra: dev
218
- Requires-Dist: build ~=1.0 ; extra == 'dev'
219
- Requires-Dist: pandas-stubs ; extra == 'dev'
220
- Requires-Dist: snowflake-connector-python[pandas,secure-local-storage] ; extra == 'dev'
221
- Requires-Dist: pre-commit ~=3.4 ; extra == 'dev'
222
- Requires-Dist: pytest ~=8.0 ; extra == 'dev'
223
- Requires-Dist: ruff ~=0.4.2 ; extra == 'dev'
224
- Requires-Dist: twine ~=5.0 ; extra == 'dev'
225
- Requires-Dist: snowflake-sqlalchemy ~=1.5.0 ; extra == 'dev'
218
+ Requires-Dist: build~=1.0; extra == "dev"
219
+ Requires-Dist: pandas-stubs; extra == "dev"
220
+ Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
221
+ Requires-Dist: pre-commit~=3.4; extra == "dev"
222
+ Requires-Dist: pyarrow-stubs; extra == "dev"
223
+ Requires-Dist: pytest~=8.0; extra == "dev"
224
+ Requires-Dist: pytest-asyncio; extra == "dev"
225
+ Requires-Dist: ruff~=0.5.1; extra == "dev"
226
+ Requires-Dist: twine~=5.0; extra == "dev"
227
+ Requires-Dist: snowflake-sqlalchemy~=1.5.0; extra == "dev"
226
228
  Provides-Extra: notebook
227
- Requires-Dist: duckdb-engine ; extra == 'notebook'
228
- Requires-Dist: ipykernel ; extra == 'notebook'
229
- Requires-Dist: jupysql ; extra == 'notebook'
229
+ Requires-Dist: duckdb-engine; extra == "notebook"
230
+ Requires-Dist: ipykernel; extra == "notebook"
231
+ Requires-Dist: jupysql; extra == "notebook"
232
+ Provides-Extra: server
233
+ Requires-Dist: starlette; extra == "server"
234
+ Requires-Dist: uvicorn; extra == "server"
230
235
 
231
236
  # fakesnow ❄️
232
237
 
@@ -0,0 +1,21 @@
1
+ fakesnow/__init__.py,sha256=9tFJJKvowKNW3vfnlmza6hOLN1I52DwChgNc5Ew6CcA,3499
2
+ fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
3
+ fakesnow/arrow.py,sha256=1ypCsf-r2Ven6CuSm-bTLoeq1G31kBD6JnaLvDxpwhU,1218
4
+ fakesnow/checks.py,sha256=-QMvdcrRbhN60rnzxLBJ0IkUBWyLR8gGGKKmCS0w9mA,2383
5
+ fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
6
+ fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
7
+ fakesnow/fakes.py,sha256=8roPAjUiVxSZDhxnpsP85sueSa3abZhyoDwM8awZZBY,31376
8
+ fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
9
+ fakesnow/info_schema.py,sha256=DObVOrhzppAFHsdtj4YI9oRISn9SkJUG6ONjVleQQ_Y,6303
10
+ fakesnow/instance.py,sha256=3cJvPRuFy19dMKXbtBLl6imzO48pEw8uTYhZyFDuwhk,3133
11
+ fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
12
+ fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
13
+ fakesnow/server.py,sha256=cTuMzbYL3etm61wZJ7bcnWpcSNoCSTk31gAnl0Kxi20,3183
14
+ fakesnow/transforms.py,sha256=ellcY5OBc7mqgL9ChNolrqcCLWXF9RH21Jt88FcFl-I,54419
15
+ fakesnow/variables.py,sha256=WXyPnkeNwD08gy52yF66CVe2twiYC50tztNfgXV4q1k,3032
16
+ fakesnow-0.9.22.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
17
+ fakesnow-0.9.22.dist-info/METADATA,sha256=Uu-JhX3mgGrgAP3jgLCP3b8YjRhWTh4qXwx-izVDZPM,18020
18
+ fakesnow-0.9.22.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
19
+ fakesnow-0.9.22.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
20
+ fakesnow-0.9.22.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
21
+ fakesnow-0.9.22.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: bdist_wheel (0.44.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,46 +0,0 @@
1
- from duckdb import DuckDBPyConnection
2
-
3
- GLOBAL_DATABASE_NAME = "_fs_global"
4
- USERS_TABLE_FQ_NAME = f"{GLOBAL_DATABASE_NAME}._fs_users_ext"
5
-
6
- # replicates the output structure of https://docs.snowflake.com/en/sql-reference/sql/show-users
7
- SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT = f"""
8
- create table if not exists {USERS_TABLE_FQ_NAME} (
9
- name varchar,
10
- created_on TIMESTAMPTZ,
11
- login_name varchar,
12
- display_name varchar,
13
- first_name varchar,
14
- last_name varchar,
15
- email varchar,
16
- mins_to_unlock varchar,
17
- days_to_expiry varchar,
18
- comment varchar,
19
- disabled varchar,
20
- must_change_password varchar,
21
- snowflake_lock varchar,
22
- default_warehouse varchar,
23
- default_namespace varchar,
24
- default_role varchar,
25
- default_secondary_roles varchar,
26
- ext_authn_duo varchar,
27
- ext_authn_uid varchar,
28
- mins_to_bypass_mfa varchar,
29
- owner varchar,
30
- last_success_login TIMESTAMPTZ,
31
- expires_at_time TIMESTAMPTZ,
32
- locked_until_time TIMESTAMPTZ,
33
- has_password varchar,
34
- has_rsa_public_key varchar,
35
- )
36
- """
37
-
38
-
39
- def create_global_database(conn: DuckDBPyConnection) -> None:
40
- """Create a "global" database for storing objects which span database.
41
-
42
- Including (but not limited to):
43
- - Users
44
- """
45
- conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
46
- conn.execute(SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT)
@@ -1,19 +0,0 @@
1
- fakesnow/__init__.py,sha256=gEo6Jg6f8tJUwRTsrfb9q-FpDOufelf8Y3XeWtEJ9wg,3897
2
- fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
3
- fakesnow/checks.py,sha256=-QMvdcrRbhN60rnzxLBJ0IkUBWyLR8gGGKKmCS0w9mA,2383
4
- fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
5
- fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
6
- fakesnow/fakes.py,sha256=MYzj3XqVokPbOdOcU4WFep1CRsuXlBIwaz9EWaNCGo4,30778
7
- fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
8
- fakesnow/global_database.py,sha256=WTVIP1VhNvdCeX7TQncX1TRpGQU5rBf5Pbxim40zeSU,1399
9
- fakesnow/info_schema.py,sha256=CdIcGXHEQ_kmEAzdQKvA-PX41LA6wlK-4p1J45qgKYA,6266
10
- fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
11
- fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
12
- fakesnow/transforms.py,sha256=7XS42Ehv3SRciQLpAL4swCFCkNpOTwarXEIX6jUzO4Y,53230
13
- fakesnow/variables.py,sha256=iE8fnyaMnIoWh_ftT2qUq39fCNglwtQoGbC7MkTqHGU,2341
14
- fakesnow-0.9.20.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
15
- fakesnow-0.9.20.dist-info/METADATA,sha256=RSk43Zbbb8LQsG3G03DwUO8nVbaxs5qNd5VKTHqg4Xw,17839
16
- fakesnow-0.9.20.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
17
- fakesnow-0.9.20.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
18
- fakesnow-0.9.20.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
19
- fakesnow-0.9.20.dist-info/RECORD,,