fakesnow 0.9.20__tar.gz → 0.9.22__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. {fakesnow-0.9.20 → fakesnow-0.9.22}/PKG-INFO +8 -3
  2. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/__init__.py +8 -14
  3. fakesnow-0.9.22/fakesnow/arrow.py +32 -0
  4. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/fakes.py +24 -14
  5. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/info_schema.py +5 -4
  6. fakesnow-0.9.22/fakesnow/instance.py +92 -0
  7. fakesnow-0.9.22/fakesnow/server.py +109 -0
  8. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/transforms.py +50 -14
  9. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/variables.py +19 -6
  10. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow.egg-info/PKG-INFO +8 -3
  11. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow.egg-info/SOURCES.txt +5 -1
  12. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow.egg-info/requires.txt +8 -2
  13. {fakesnow-0.9.20 → fakesnow-0.9.22}/pyproject.toml +15 -3
  14. fakesnow-0.9.22/tests/test_arrow.py +53 -0
  15. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_connect.py +9 -0
  16. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_fakes.py +58 -8
  17. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_info_schema.py +53 -0
  18. fakesnow-0.9.22/tests/test_server.py +67 -0
  19. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_transforms.py +12 -4
  20. fakesnow-0.9.20/fakesnow/global_database.py +0 -46
  21. {fakesnow-0.9.20 → fakesnow-0.9.22}/LICENSE +0 -0
  22. {fakesnow-0.9.20 → fakesnow-0.9.22}/README.md +0 -0
  23. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/__main__.py +0 -0
  24. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/checks.py +0 -0
  25. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/cli.py +0 -0
  26. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/expr.py +0 -0
  27. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/fixtures.py +0 -0
  28. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/macros.py +0 -0
  29. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow/py.typed +0 -0
  30. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow.egg-info/dependency_links.txt +0 -0
  31. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow.egg-info/entry_points.txt +0 -0
  32. {fakesnow-0.9.20 → fakesnow-0.9.22}/fakesnow.egg-info/top_level.txt +0 -0
  33. {fakesnow-0.9.20 → fakesnow-0.9.22}/setup.cfg +0 -0
  34. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_checks.py +0 -0
  35. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_cli.py +0 -0
  36. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_expr.py +0 -0
  37. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_patch.py +0 -0
  38. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_sqlalchemy.py +0 -0
  39. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_users.py +0 -0
  40. {fakesnow-0.9.20 → fakesnow-0.9.22}/tests/test_write_pandas.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.9.20
3
+ Version: 0.9.22
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -213,20 +213,25 @@ License-File: LICENSE
213
213
  Requires-Dist: duckdb~=1.0.0
214
214
  Requires-Dist: pyarrow
215
215
  Requires-Dist: snowflake-connector-python
216
- Requires-Dist: sqlglot~=25.3.0
216
+ Requires-Dist: sqlglot~=25.9.0
217
217
  Provides-Extra: dev
218
218
  Requires-Dist: build~=1.0; extra == "dev"
219
219
  Requires-Dist: pandas-stubs; extra == "dev"
220
220
  Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
221
221
  Requires-Dist: pre-commit~=3.4; extra == "dev"
222
+ Requires-Dist: pyarrow-stubs; extra == "dev"
222
223
  Requires-Dist: pytest~=8.0; extra == "dev"
223
- Requires-Dist: ruff~=0.4.2; extra == "dev"
224
+ Requires-Dist: pytest-asyncio; extra == "dev"
225
+ Requires-Dist: ruff~=0.5.1; extra == "dev"
224
226
  Requires-Dist: twine~=5.0; extra == "dev"
225
227
  Requires-Dist: snowflake-sqlalchemy~=1.5.0; extra == "dev"
226
228
  Provides-Extra: notebook
227
229
  Requires-Dist: duckdb-engine; extra == "notebook"
228
230
  Requires-Dist: ipykernel; extra == "notebook"
229
231
  Requires-Dist: jupysql; extra == "notebook"
232
+ Provides-Extra: server
233
+ Requires-Dist: starlette; extra == "server"
234
+ Requires-Dist: uvicorn; extra == "server"
230
235
 
231
236
  # fakesnow ❄️
232
237
 
@@ -8,12 +8,11 @@ import unittest.mock as mock
8
8
  from collections.abc import Iterator, Sequence
9
9
  from contextlib import contextmanager
10
10
 
11
- import duckdb
12
11
  import snowflake.connector
13
12
  import snowflake.connector.pandas_tools
14
13
 
15
14
  import fakesnow.fakes as fakes
16
- from fakesnow.global_database import create_global_database
15
+ from fakesnow.instance import FakeSnow
17
16
 
18
17
 
19
18
  @contextmanager
@@ -52,20 +51,15 @@ def patch(
52
51
  # won't be able to patch extra targets
53
52
  assert not isinstance(snowflake.connector.connect, mock.MagicMock), "Snowflake connector is already patched"
54
53
 
55
- duck_conn = duckdb.connect(database=":memory:")
56
- create_global_database(duck_conn)
54
+ fs = FakeSnow(
55
+ create_database_on_connect=create_database_on_connect,
56
+ create_schema_on_connect=create_schema_on_connect,
57
+ db_path=db_path,
58
+ nop_regexes=nop_regexes,
59
+ )
57
60
 
58
61
  fake_fns = {
59
- # every time we connect, create a new cursor (ie: connection) so we can isolate each connection's
60
- # schema setting, see https://duckdb.org/docs/api/python/overview.html#startup--shutdown
61
- snowflake.connector.connect: lambda **kwargs: fakes.FakeSnowflakeConnection(
62
- duck_conn.cursor(),
63
- create_database=create_database_on_connect,
64
- create_schema=create_schema_on_connect,
65
- db_path=db_path,
66
- nop_regexes=nop_regexes,
67
- **kwargs,
68
- ),
62
+ snowflake.connector.connect: fs.connect,
69
63
  snowflake.connector.pandas_tools.write_pandas: fakes.write_pandas,
70
64
  }
71
65
 
@@ -0,0 +1,32 @@
1
+ import pyarrow as pa
2
+
3
+
4
+ def with_sf_metadata(schema: pa.Schema) -> pa.Schema:
5
+ # see https://github.com/snowflakedb/snowflake-connector-python/blob/e9393a6/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/CArrowTableIterator.cpp#L32
6
+ # and https://github.com/snowflakedb/snowflake-connector-python/blob/e9393a6/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/SnowflakeType.cpp#L10
7
+ fms = []
8
+ for i, t in enumerate(schema.types):
9
+ f = schema.field(i)
10
+
11
+ if isinstance(t, pa.Decimal128Type):
12
+ fm = f.with_metadata({"logicalType": "FIXED", "precision": str(t.precision), "scale": str(t.scale)})
13
+ elif t == pa.string():
14
+ fm = f.with_metadata({"logicalType": "TEXT"})
15
+ else:
16
+ raise NotImplementedError(f"Unsupported Arrow type: {t}")
17
+ fms.append(fm)
18
+ return pa.schema(fms)
19
+
20
+
21
+ def to_ipc(table: pa.Table) -> pa.Buffer:
22
+ batches = table.to_batches()
23
+ if len(batches) != 1:
24
+ raise NotImplementedError(f"{len(batches)} batches")
25
+ batch = batches[0]
26
+
27
+ sink = pa.BufferOutputStream()
28
+
29
+ with pa.ipc.new_stream(sink, with_sf_metadata(batch.schema)) as writer:
30
+ writer.write_batch(batch)
31
+
32
+ return sink.getvalue()
@@ -114,7 +114,6 @@ class FakeSnowflakeCursor:
114
114
  def description(self) -> list[ResultMetadata]:
115
115
  # use a separate cursor to avoid consuming the result set on this cursor
116
116
  with self._conn.cursor() as cur:
117
- # self._duck_conn.execute(sql, params)
118
117
  expression = sqlglot.parse_one(f"DESCRIBE {self._last_sql}", read="duckdb")
119
118
  cur._execute(expression, self._last_params) # noqa: SLF001
120
119
  meta = FakeSnowflakeCursor._describe_as_result_metadata(cur.fetchall())
@@ -203,6 +202,7 @@ class FakeSnowflakeCursor:
203
202
  .transform(transforms.sha256)
204
203
  .transform(transforms.create_clone)
205
204
  .transform(transforms.alias_in_join)
205
+ .transform(transforms.alter_table_strip_cluster_by)
206
206
  )
207
207
 
208
208
  def _execute(
@@ -234,12 +234,10 @@ class FakeSnowflakeCursor:
234
234
  if transformed.find(exp.Select) and (seed := transformed.args.get("seed")):
235
235
  sql = f"SELECT setseed({seed}); {sql}"
236
236
 
237
- if (fs_debug := os.environ.get("FAKESNOW_DEBUG")) and fs_debug != "snowflake":
238
- print(f"{sql};{params=}" if params else f"{sql};", file=sys.stderr)
239
-
240
237
  result_sql = None
241
238
 
242
239
  try:
240
+ self._log_sql(sql, params)
243
241
  self._duck_conn.execute(sql, params)
244
242
  except duckdb.BinderException as e:
245
243
  msg = e.args[0]
@@ -286,9 +284,9 @@ class FakeSnowflakeCursor:
286
284
  (affected_count,) = self._duck_conn.fetchall()[0]
287
285
  result_sql = SQL_DELETED_ROWS.substitute(count=affected_count)
288
286
 
289
- elif cmd == "DESCRIBE TABLE":
290
- # DESCRIBE TABLE has already been run above to detect and error if the table exists
291
- # We now rerun DESCRIBE TABLE but transformed with columns to match Snowflake
287
+ elif cmd in ("DESCRIBE TABLE", "DESCRIBE VIEW"):
288
+ # DESCRIBE TABLE/VIEW has already been run above to detect and error if the table exists
289
+ # We now rerun DESCRIBE TABLE/VIEW but transformed with columns to match Snowflake
292
290
  result_sql = transformed.transform(
293
291
  lambda e: transforms.describe_table(e, self._conn.database, self._conn.schema)
294
292
  ).sql(dialect="duckdb")
@@ -336,6 +334,7 @@ class FakeSnowflakeCursor:
336
334
  self._duck_conn.execute(info_schema.insert_text_lengths_sql(catalog, schema, table.name, text_lengths))
337
335
 
338
336
  if result_sql:
337
+ self._log_sql(result_sql, params)
339
338
  self._duck_conn.execute(result_sql)
340
339
 
341
340
  self._arrow_table = self._duck_conn.fetch_arrow_table()
@@ -346,6 +345,10 @@ class FakeSnowflakeCursor:
346
345
 
347
346
  return self
348
347
 
348
+ def _log_sql(self, sql: str, params: Sequence[Any] | dict[Any, Any] | None = None) -> None:
349
+ if (fs_debug := os.environ.get("FAKESNOW_DEBUG")) and fs_debug != "snowflake":
350
+ print(f"{sql};{params=}" if params else f"{sql};", file=sys.stderr)
351
+
349
352
  def executemany(
350
353
  self,
351
354
  command: str,
@@ -388,12 +391,13 @@ class FakeSnowflakeCursor:
388
391
  if self._arrow_table is None:
389
392
  # mimic snowflake python connector error type
390
393
  raise TypeError("No open result set")
394
+ tslice = self._arrow_table.slice(offset=self._arrow_table_fetch_index or 0, length=size).to_pylist()
395
+
391
396
  if self._arrow_table_fetch_index is None:
392
- self._arrow_table_fetch_index = 0
397
+ self._arrow_table_fetch_index = size
393
398
  else:
394
399
  self._arrow_table_fetch_index += size
395
400
 
396
- tslice = self._arrow_table.slice(offset=self._arrow_table_fetch_index, length=size).to_pylist()
397
401
  return tslice if self._use_dict_result else [tuple(d.values()) for d in tslice]
398
402
 
399
403
  def get_result_batches(self) -> list[ResultBatch] | None:
@@ -522,9 +526,14 @@ class FakeSnowflakeConnection:
522
526
  ):
523
527
  self._duck_conn = duck_conn
524
528
  # upper case database and schema like snowflake unquoted identifiers
525
- # NB: catalog names are not case-sensitive in duckdb but stored as cased in information_schema.schemata
529
+ # so they appear as upper-cased in information_schema
530
+ # catalog and schema names are not actually case-sensitive in duckdb even though
531
+ # they are as cased in information_schema.schemata, so when selecting from
532
+ # information_schema.schemata below we use upper-case to match any existing duckdb
533
+ # catalog or schemas like "information_schema"
526
534
  self.database = database and database.upper()
527
535
  self.schema = schema and schema.upper()
536
+
528
537
  self.database_set = False
529
538
  self.schema_set = False
530
539
  self.db_path = Path(db_path) if db_path else None
@@ -538,7 +547,7 @@ class FakeSnowflakeConnection:
538
547
  and self.database
539
548
  and not duck_conn.execute(
540
549
  f"""select * from information_schema.schemata
541
- where catalog_name = '{self.database}'"""
550
+ where upper(catalog_name) = '{self.database}'"""
542
551
  ).fetchone()
543
552
  ):
544
553
  db_file = f"{self.db_path/self.database}.db" if self.db_path else ":memory:"
@@ -553,7 +562,7 @@ class FakeSnowflakeConnection:
553
562
  and self.schema
554
563
  and not duck_conn.execute(
555
564
  f"""select * from information_schema.schemata
556
- where catalog_name = '{self.database}' and schema_name = '{self.schema}'"""
565
+ where upper(catalog_name) = '{self.database}' and upper(schema_name) = '{self.schema}'"""
557
566
  ).fetchone()
558
567
  ):
559
568
  duck_conn.execute(f"CREATE SCHEMA {self.database}.{self.schema}")
@@ -564,7 +573,7 @@ class FakeSnowflakeConnection:
564
573
  and self.schema
565
574
  and duck_conn.execute(
566
575
  f"""select * from information_schema.schemata
567
- where catalog_name = '{self.database}' and schema_name = '{self.schema}'"""
576
+ where upper(catalog_name) = '{self.database}' and upper(schema_name) = '{self.schema}'"""
568
577
  ).fetchone()
569
578
  ):
570
579
  duck_conn.execute(f"SET schema='{self.database}.{self.schema}'")
@@ -575,7 +584,7 @@ class FakeSnowflakeConnection:
575
584
  self.database
576
585
  and duck_conn.execute(
577
586
  f"""select * from information_schema.schemata
578
- where catalog_name = '{self.database}'"""
587
+ where upper(catalog_name) = '{self.database}'"""
579
588
  ).fetchone()
580
589
  ):
581
590
  duck_conn.execute(f"SET schema='{self.database}.main'")
@@ -602,6 +611,7 @@ class FakeSnowflakeConnection:
602
611
  self.cursor().execute("COMMIT")
603
612
 
604
613
  def cursor(self, cursor_class: type[SnowflakeCursor] = SnowflakeCursor) -> FakeSnowflakeCursor:
614
+ # TODO: use duck_conn cursor for thread-safety
605
615
  return FakeSnowflakeCursor(conn=self, duck_conn=self._duck_conn, use_dict_result=cursor_class == DictCursor)
606
616
 
607
617
  def execute_string(
@@ -62,8 +62,8 @@ case when columns.data_type='BIGINT' then 10
62
62
  case when columns.data_type='DOUBLE' then NULL else columns.numeric_scale end as numeric_scale,
63
63
  collation_name, is_identity, identity_generation, identity_cycle,
64
64
  ddb_columns.comment as comment,
65
- null as identity_start,
66
- null as identity_increment,
65
+ null::VARCHAR as identity_start,
66
+ null::VARCHAR as identity_increment,
67
67
  from ${catalog}.information_schema.columns columns
68
68
  left join ${catalog}.information_schema._fs_columns_ext ext
69
69
  on ext_table_catalog = columns.table_catalog
@@ -78,6 +78,7 @@ LEFT JOIN duckdb_columns ddb_columns
78
78
  """
79
79
  )
80
80
 
81
+
81
82
  # replicates https://docs.snowflake.com/sql-reference/info-schema/databases
82
83
  SQL_CREATE_INFORMATION_SCHEMA_DATABASES_VIEW = Template(
83
84
  """
@@ -86,7 +87,7 @@ select
86
87
  catalog_name as database_name,
87
88
  'SYSADMIN' as database_owner,
88
89
  'NO' as is_transient,
89
- null as comment,
90
+ null::VARCHAR as comment,
90
91
  to_timestamp(0)::timestamptz as created,
91
92
  to_timestamp(0)::timestamptz as last_altered,
92
93
  1 as retention_time,
@@ -116,7 +117,7 @@ select
116
117
  to_timestamp(0)::timestamptz as last_altered,
117
118
  to_timestamp(0)::timestamptz as last_ddl,
118
119
  'SYSADMIN' as last_ddl_by,
119
- null as comment
120
+ null::VARCHAR as comment
120
121
  from duckdb_views
121
122
  where database_name = '${catalog}'
122
123
  and schema_name != 'information_schema'
@@ -0,0 +1,92 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from typing import Any
5
+
6
+ import duckdb
7
+
8
+ import fakesnow.fakes as fakes
9
+
10
+ GLOBAL_DATABASE_NAME = "_fs_global"
11
+ USERS_TABLE_FQ_NAME = f"{GLOBAL_DATABASE_NAME}._fs_users_ext"
12
+
13
+ # replicates the output structure of https://docs.snowflake.com/en/sql-reference/sql/show-users
14
+ SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT = f"""
15
+ create table if not exists {USERS_TABLE_FQ_NAME} (
16
+ name varchar,
17
+ created_on TIMESTAMPTZ,
18
+ login_name varchar,
19
+ display_name varchar,
20
+ first_name varchar,
21
+ last_name varchar,
22
+ email varchar,
23
+ mins_to_unlock varchar,
24
+ days_to_expiry varchar,
25
+ comment varchar,
26
+ disabled varchar,
27
+ must_change_password varchar,
28
+ snowflake_lock varchar,
29
+ default_warehouse varchar,
30
+ default_namespace varchar,
31
+ default_role varchar,
32
+ default_secondary_roles varchar,
33
+ ext_authn_duo varchar,
34
+ ext_authn_uid varchar,
35
+ mins_to_bypass_mfa varchar,
36
+ owner varchar,
37
+ last_success_login TIMESTAMPTZ,
38
+ expires_at_time TIMESTAMPTZ,
39
+ locked_until_time TIMESTAMPTZ,
40
+ has_password varchar,
41
+ has_rsa_public_key varchar,
42
+ )
43
+ """
44
+
45
+
46
+ def create_global_database(conn: duckdb.DuckDBPyConnection) -> None:
47
+ """Create a "global" database for storing objects which span databases.
48
+
49
+ Including (but not limited to):
50
+ - Users
51
+ """
52
+ conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
53
+ conn.execute(SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT)
54
+
55
+
56
+ class FakeSnow:
57
+ def __init__(
58
+ self,
59
+ create_database_on_connect: bool = True,
60
+ create_schema_on_connect: bool = True,
61
+ db_path: str | os.PathLike | None = None,
62
+ nop_regexes: list[str] | None = None,
63
+ ):
64
+ self.create_database_on_connect = create_database_on_connect
65
+ self.create_schema_on_connect = create_schema_on_connect
66
+ self.db_path = db_path
67
+ self.nop_regexes = nop_regexes
68
+
69
+ self.duck_conn = duckdb.connect(database=":memory:")
70
+
71
+ # create a "global" database for storing objects which span databases.
72
+ self.duck_conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
73
+ self.duck_conn.execute(SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT)
74
+
75
+ def connect(
76
+ self, database: str | None = None, schema: str | None = None, **kwargs: Any
77
+ ) -> fakes.FakeSnowflakeConnection:
78
+ # every time we connect, create a new cursor (ie: connection) so we can isolate each connection's
79
+ # schema setting see
80
+ # https://github.com/duckdb/duckdb/blob/18254ec/tools/pythonpkg/src/pyconnection.cpp#L1440
81
+ # and to make connections thread-safe see
82
+ # https://duckdb.org/docs/api/python/overview.html#using-connections-in-parallel-python-programs
83
+ return fakes.FakeSnowflakeConnection(
84
+ self.duck_conn.cursor(),
85
+ database,
86
+ schema,
87
+ create_database=self.create_database_on_connect,
88
+ create_schema=self.create_schema_on_connect,
89
+ db_path=self.db_path,
90
+ nop_regexes=self.nop_regexes,
91
+ **kwargs,
92
+ )
@@ -0,0 +1,109 @@
1
+ from __future__ import annotations
2
+
3
+ import gzip
4
+ import json
5
+ import secrets
6
+ from base64 import b64encode
7
+ from dataclasses import dataclass
8
+
9
+ from starlette.applications import Starlette
10
+ from starlette.concurrency import run_in_threadpool
11
+ from starlette.requests import Request
12
+ from starlette.responses import JSONResponse
13
+ from starlette.routing import Route
14
+
15
+ from fakesnow.arrow import to_ipc
16
+ from fakesnow.fakes import FakeSnowflakeConnection
17
+ from fakesnow.instance import FakeSnow
18
+
19
+ fs = FakeSnow()
20
+ sessions = {}
21
+
22
+
23
+ @dataclass
24
+ class ServerError(Exception):
25
+ status_code: int
26
+ code: str
27
+ message: str
28
+
29
+
30
+ def login_request(request: Request) -> JSONResponse:
31
+ database = request.query_params.get("databaseName")
32
+ schema = request.query_params.get("schemaName")
33
+ token = secrets.token_urlsafe(32)
34
+ sessions[token] = fs.connect(database, schema)
35
+ return JSONResponse({"data": {"token": token}, "success": True})
36
+
37
+
38
+ async def query_request(request: Request) -> JSONResponse:
39
+ try:
40
+ conn = to_conn(request)
41
+
42
+ body = await request.body()
43
+ body_json = json.loads(gzip.decompress(body))
44
+
45
+ sql_text = body_json["sqlText"]
46
+
47
+ # only a single sql statement is sent at a time by the python snowflake connector
48
+ cur = await run_in_threadpool(conn.cursor().execute, sql_text)
49
+
50
+ assert cur._arrow_table, "No result set" # noqa: SLF001
51
+
52
+ batch_bytes = to_ipc(cur._arrow_table) # noqa: SLF001
53
+ rowset_b64 = b64encode(batch_bytes).decode("utf-8")
54
+
55
+ return JSONResponse(
56
+ {
57
+ "data": {
58
+ "rowtype": [
59
+ {
60
+ "name": "'HELLO WORLD'",
61
+ "nullable": False,
62
+ "type": "text",
63
+ "length": 11,
64
+ "scale": None,
65
+ "precision": None,
66
+ }
67
+ ],
68
+ "rowsetBase64": rowset_b64,
69
+ "total": 1,
70
+ "queryResultFormat": "arrow",
71
+ },
72
+ "success": True,
73
+ }
74
+ )
75
+
76
+ except ServerError as e:
77
+ return JSONResponse(
78
+ {"data": None, "code": e.code, "message": e.message, "success": False, "headers": None},
79
+ status_code=e.status_code,
80
+ )
81
+
82
+
83
+ def to_conn(request: Request) -> FakeSnowflakeConnection:
84
+ if not (auth := request.headers.get("Authorization")):
85
+ raise ServerError(status_code=401, code="390103", message="Session token not found in the request data.")
86
+
87
+ token = auth[17:-1]
88
+
89
+ if not (conn := sessions.get(token)):
90
+ raise ServerError(status_code=401, code="390104", message="User must login again to access the service.")
91
+
92
+ return conn
93
+
94
+
95
+ routes = [
96
+ Route(
97
+ "/session/v1/login-request",
98
+ login_request,
99
+ methods=["POST"],
100
+ ),
101
+ Route(
102
+ "/queries/v1/query-request",
103
+ query_request,
104
+ methods=["POST"],
105
+ ),
106
+ Route("/queries/v1/abort-request", lambda _: JSONResponse({"success": True}), methods=["POST"]),
107
+ ]
108
+
109
+ app = Starlette(debug=True, routes=routes)
@@ -7,11 +7,11 @@ from typing import ClassVar, Literal, cast
7
7
  import sqlglot
8
8
  from sqlglot import exp
9
9
 
10
- from fakesnow.global_database import USERS_TABLE_FQ_NAME
10
+ from fakesnow.instance import USERS_TABLE_FQ_NAME
11
11
  from fakesnow.variables import Variables
12
12
 
13
13
  MISSING_DATABASE = "missing_database"
14
- SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.'")
14
+ SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.' as status")
15
15
 
16
16
 
17
17
  def alias_in_join(expression: exp.Expression) -> exp.Expression:
@@ -33,6 +33,18 @@ def alias_in_join(expression: exp.Expression) -> exp.Expression:
33
33
  return expression
34
34
 
35
35
 
36
+ def alter_table_strip_cluster_by(expression: exp.Expression) -> exp.Expression:
37
+ """Turn alter table cluster by into a no-op"""
38
+ if (
39
+ isinstance(expression, exp.AlterTable)
40
+ and (actions := expression.args.get("actions"))
41
+ and len(actions) == 1
42
+ and (isinstance(actions[0], exp.Cluster))
43
+ ):
44
+ return SUCCESS_NOP
45
+ return expression
46
+
47
+
36
48
  def array_size(expression: exp.Expression) -> exp.Expression:
37
49
  if isinstance(expression, exp.ArraySize):
38
50
  # case is used to convert 0 to null, because null is returned by duckdb when no case matches
@@ -147,22 +159,41 @@ SELECT
147
159
  column_default AS "default",
148
160
  'N' AS "primary key",
149
161
  'N' AS "unique key",
150
- NULL AS "check",
151
- NULL AS "expression",
152
- NULL AS "comment",
153
- NULL AS "policy name",
154
- NULL AS "privacy domain",
162
+ NULL::VARCHAR AS "check",
163
+ NULL::VARCHAR AS "expression",
164
+ NULL::VARCHAR AS "comment",
165
+ NULL::VARCHAR AS "policy name",
166
+ NULL::JSON AS "privacy domain",
155
167
  FROM information_schema._fs_columns_snowflake
156
168
  WHERE table_catalog = '${catalog}' AND table_schema = '${schema}' AND table_name = '${table}'
157
169
  ORDER BY ordinal_position
158
170
  """
159
171
  )
160
172
 
173
+ SQL_DESCRIBE_INFO_SCHEMA = Template(
174
+ """
175
+ SELECT
176
+ column_name AS "name",
177
+ column_type as "type",
178
+ 'COLUMN' AS "kind",
179
+ CASE WHEN "null" = 'YES' THEN 'Y' ELSE 'N' END AS "null?",
180
+ NULL::VARCHAR AS "default",
181
+ 'N' AS "primary key",
182
+ 'N' AS "unique key",
183
+ NULL::VARCHAR AS "check",
184
+ NULL::VARCHAR AS "expression",
185
+ NULL::VARCHAR AS "comment",
186
+ NULL::VARCHAR AS "policy name",
187
+ NULL::JSON AS "privacy domain",
188
+ FROM (DESCRIBE information_schema.${view})
189
+ """
190
+ )
191
+
161
192
 
162
193
  def describe_table(
163
194
  expression: exp.Expression, current_database: str | None = None, current_schema: str | None = None
164
195
  ) -> exp.Expression:
165
- """Redirect to the information_schema._fs_describe_table to match snowflake.
196
+ """Redirect to the information_schema._fs_columns_snowflake to match snowflake.
166
197
 
167
198
  See https://docs.snowflake.com/en/sql-reference/sql/desc-table
168
199
  """
@@ -171,12 +202,16 @@ def describe_table(
171
202
  isinstance(expression, exp.Describe)
172
203
  and (kind := expression.args.get("kind"))
173
204
  and isinstance(kind, str)
174
- and kind.upper() == "TABLE"
205
+ and kind.upper() in ("TABLE", "VIEW")
175
206
  and (table := expression.find(exp.Table))
176
207
  ):
177
208
  catalog = table.catalog or current_database
178
209
  schema = table.db or current_schema
179
210
 
211
+ if schema and schema.upper() == "INFORMATION_SCHEMA":
212
+ # information schema views don't exist in _fs_columns_snowflake
213
+ return sqlglot.parse_one(SQL_DESCRIBE_INFO_SCHEMA.substitute(view=table.name), read="duckdb")
214
+
180
215
  return sqlglot.parse_one(
181
216
  SQL_DESCRIBE_TABLE.substitute(catalog=catalog, schema=schema, table=table.name),
182
217
  read="duckdb",
@@ -551,12 +586,13 @@ def information_schema_fs_columns_snowflake(expression: exp.Expression) -> exp.E
551
586
  """
552
587
 
553
588
  if (
554
- isinstance(expression, exp.Select)
555
- and (tbl_exp := expression.find(exp.Table))
556
- and tbl_exp.name.upper() == "COLUMNS"
557
- and tbl_exp.db.upper() == "INFORMATION_SCHEMA"
589
+ isinstance(expression, exp.Table)
590
+ and expression.db
591
+ and expression.db.upper() == "INFORMATION_SCHEMA"
592
+ and expression.name
593
+ and expression.name.upper() == "COLUMNS"
558
594
  ):
559
- tbl_exp.set("this", exp.Identifier(this="_FS_COLUMNS_SNOWFLAKE", quoted=False))
595
+ expression.set("this", exp.Identifier(this="_FS_COLUMNS_SNOWFLAKE", quoted=False))
560
596
 
561
597
  return expression
562
598
 
@@ -5,10 +5,23 @@ from sqlglot import exp
5
5
 
6
6
 
7
7
  # Implements snowflake variables: https://docs.snowflake.com/en/sql-reference/session-variables#using-variables-in-sql
8
+ # [ ] Add support for setting multiple variables in a single statement
8
9
  class Variables:
9
10
  @classmethod
10
11
  def is_variable_modifier(cls, expr: exp.Expression) -> bool:
11
- return isinstance(expr, exp.Set) or cls._is_unset_expression(expr)
12
+ return cls._is_set_expression(expr) or cls._is_unset_expression(expr)
13
+
14
+ @classmethod
15
+ def _is_set_expression(cls, expr: exp.Expression) -> bool:
16
+ if isinstance(expr, exp.Set):
17
+ is_set = not expr.args.get("unset")
18
+ if is_set: # SET varname = value;
19
+ set_expressions = expr.args.get("expressions")
20
+ assert set_expressions, "SET without values in expression(s) is unexpected."
21
+ # Avoids mistakenly setting variables for statements that use SET in a different context.
22
+ # (eg. WHEN MATCHED THEN UPDATE SET x=7)
23
+ return isinstance(set_expressions[0], exp.SetItem)
24
+ return False
12
25
 
13
26
  @classmethod
14
27
  def _is_unset_expression(cls, expr: exp.Expression) -> bool:
@@ -22,11 +35,11 @@ class Variables:
22
35
 
23
36
  def update_variables(self, expr: exp.Expression) -> None:
24
37
  if isinstance(expr, exp.Set):
25
- unset = expr.args.get("unset")
26
- if not unset: # SET varname = value;
27
- unset_expressions = expr.args.get("expressions")
28
- assert unset_expressions, "SET without values in expression(s) is unexpected."
29
- eq = unset_expressions[0].this
38
+ is_set = not expr.args.get("unset")
39
+ if is_set: # SET varname = value;
40
+ set_expressions = expr.args.get("expressions")
41
+ assert set_expressions, "SET without values in expression(s) is unexpected."
42
+ eq = set_expressions[0].this
30
43
  name = eq.this.sql()
31
44
  value = eq.args.get("expression").sql()
32
45
  self._set(name, value)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.9.20
3
+ Version: 0.9.22
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -213,20 +213,25 @@ License-File: LICENSE
213
213
  Requires-Dist: duckdb~=1.0.0
214
214
  Requires-Dist: pyarrow
215
215
  Requires-Dist: snowflake-connector-python
216
- Requires-Dist: sqlglot~=25.3.0
216
+ Requires-Dist: sqlglot~=25.9.0
217
217
  Provides-Extra: dev
218
218
  Requires-Dist: build~=1.0; extra == "dev"
219
219
  Requires-Dist: pandas-stubs; extra == "dev"
220
220
  Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
221
221
  Requires-Dist: pre-commit~=3.4; extra == "dev"
222
+ Requires-Dist: pyarrow-stubs; extra == "dev"
222
223
  Requires-Dist: pytest~=8.0; extra == "dev"
223
- Requires-Dist: ruff~=0.4.2; extra == "dev"
224
+ Requires-Dist: pytest-asyncio; extra == "dev"
225
+ Requires-Dist: ruff~=0.5.1; extra == "dev"
224
226
  Requires-Dist: twine~=5.0; extra == "dev"
225
227
  Requires-Dist: snowflake-sqlalchemy~=1.5.0; extra == "dev"
226
228
  Provides-Extra: notebook
227
229
  Requires-Dist: duckdb-engine; extra == "notebook"
228
230
  Requires-Dist: ipykernel; extra == "notebook"
229
231
  Requires-Dist: jupysql; extra == "notebook"
232
+ Provides-Extra: server
233
+ Requires-Dist: starlette; extra == "server"
234
+ Requires-Dist: uvicorn; extra == "server"
230
235
 
231
236
  # fakesnow ❄️
232
237
 
@@ -3,15 +3,17 @@ README.md
3
3
  pyproject.toml
4
4
  fakesnow/__init__.py
5
5
  fakesnow/__main__.py
6
+ fakesnow/arrow.py
6
7
  fakesnow/checks.py
7
8
  fakesnow/cli.py
8
9
  fakesnow/expr.py
9
10
  fakesnow/fakes.py
10
11
  fakesnow/fixtures.py
11
- fakesnow/global_database.py
12
12
  fakesnow/info_schema.py
13
+ fakesnow/instance.py
13
14
  fakesnow/macros.py
14
15
  fakesnow/py.typed
16
+ fakesnow/server.py
15
17
  fakesnow/transforms.py
16
18
  fakesnow/variables.py
17
19
  fakesnow.egg-info/PKG-INFO
@@ -20,6 +22,7 @@ fakesnow.egg-info/dependency_links.txt
20
22
  fakesnow.egg-info/entry_points.txt
21
23
  fakesnow.egg-info/requires.txt
22
24
  fakesnow.egg-info/top_level.txt
25
+ tests/test_arrow.py
23
26
  tests/test_checks.py
24
27
  tests/test_cli.py
25
28
  tests/test_connect.py
@@ -27,6 +30,7 @@ tests/test_expr.py
27
30
  tests/test_fakes.py
28
31
  tests/test_info_schema.py
29
32
  tests/test_patch.py
33
+ tests/test_server.py
30
34
  tests/test_sqlalchemy.py
31
35
  tests/test_transforms.py
32
36
  tests/test_users.py
@@ -1,15 +1,17 @@
1
1
  duckdb~=1.0.0
2
2
  pyarrow
3
3
  snowflake-connector-python
4
- sqlglot~=25.3.0
4
+ sqlglot~=25.9.0
5
5
 
6
6
  [dev]
7
7
  build~=1.0
8
8
  pandas-stubs
9
9
  snowflake-connector-python[pandas,secure-local-storage]
10
10
  pre-commit~=3.4
11
+ pyarrow-stubs
11
12
  pytest~=8.0
12
- ruff~=0.4.2
13
+ pytest-asyncio
14
+ ruff~=0.5.1
13
15
  twine~=5.0
14
16
  snowflake-sqlalchemy~=1.5.0
15
17
 
@@ -17,3 +19,7 @@ snowflake-sqlalchemy~=1.5.0
17
19
  duckdb-engine
18
20
  ipykernel
19
21
  jupysql
22
+
23
+ [server]
24
+ starlette
25
+ uvicorn
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  name = "fakesnow"
3
3
  description = "Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally."
4
- version = "0.9.20"
4
+ version = "0.9.22"
5
5
  readme = "README.md"
6
6
  license = { file = "LICENSE" }
7
7
  classifiers = ["License :: OSI Approved :: MIT License"]
@@ -11,7 +11,7 @@ dependencies = [
11
11
  "duckdb~=1.0.0",
12
12
  "pyarrow",
13
13
  "snowflake-connector-python",
14
- "sqlglot~=25.3.0",
14
+ "sqlglot~=25.9.0",
15
15
  ]
16
16
 
17
17
  [project.urls]
@@ -28,13 +28,17 @@ dev = [
28
28
  # include compatible version of pandas, and secure-local-storage for token caching
29
29
  "snowflake-connector-python[pandas, secure-local-storage]",
30
30
  "pre-commit~=3.4",
31
+ "pyarrow-stubs",
31
32
  "pytest~=8.0",
32
- "ruff~=0.4.2",
33
+ "pytest-asyncio",
34
+ "ruff~=0.5.1",
33
35
  "twine~=5.0",
34
36
  "snowflake-sqlalchemy~=1.5.0",
35
37
  ]
36
38
  # for debugging, see https://duckdb.org/docs/guides/python/jupyter.html
37
39
  notebook = ["duckdb-engine", "ipykernel", "jupysql"]
40
+ # for the standalone server
41
+ server = ["starlette", "uvicorn"]
38
42
 
39
43
  [build-system]
40
44
  requires = ["setuptools~=69.1", "wheel~=0.42"]
@@ -51,9 +55,17 @@ strictListInference = true
51
55
  strictDictionaryInference = true
52
56
  strictParameterNoneValue = true
53
57
  reportTypedDictNotRequiredAccess = false
58
+ reportIncompatibleVariableOverride = true
54
59
  reportIncompatibleMethodOverride = true
60
+ reportMatchNotExhaustive = true
55
61
  reportUnnecessaryTypeIgnoreComment = true
56
62
 
63
+ [tool.pytest.ini_options]
64
+ asyncio_mode = "auto"
65
+ # error on unhandled exceptions in background threads
66
+ # useful for catching errors in server or snowflake connector threads
67
+ filterwarnings = ["error::pytest.PytestUnhandledThreadExceptionWarning"]
68
+
57
69
  [tool.ruff]
58
70
  line-length = 120
59
71
  # first-party imports for sorting
@@ -0,0 +1,53 @@
1
+ from base64 import b64decode
2
+
3
+ import pandas as pd
4
+ import pyarrow as pa
5
+
6
+ from fakesnow.arrow import to_ipc, with_sf_metadata
7
+
8
+
9
+ def test_with_sf_metadata() -> None:
10
+ # see https://arrow.apache.org/docs/python/api/datatypes.html
11
+ def f(t: pa.DataType) -> dict:
12
+ return with_sf_metadata(pa.schema([pa.field(str(t), t)])).field(0).metadata
13
+
14
+ assert f(pa.string()) == {b"logicalType": b"TEXT"}
15
+ assert f(pa.decimal128(10, 2)) == {b"logicalType": b"FIXED", b"precision": b"10", b"scale": b"2"}
16
+
17
+
18
+ def test_ipc_writes_sf_metadata() -> None:
19
+ df = pd.DataFrame.from_dict(
20
+ {
21
+ "'HELLO WORLD'": ["hello world"],
22
+ }
23
+ )
24
+
25
+ table = pa.Table.from_pandas(df)
26
+ table_bytes = to_ipc(table)
27
+
28
+ batch = next(iter(pa.ipc.open_stream(table_bytes)))
29
+
30
+ # field and schema metadata is ignored
31
+ assert pa.table(batch) == table
32
+ assert batch.schema.field(0).metadata == {b"logicalType": b"TEXT"}, "Missing Snowflake field metadata"
33
+
34
+
35
+ def test_read_base64() -> None:
36
+ # select to_decimal('12.3456', 10,2)
37
+ rowset_b64 = "/////5gBAAAQAAAAAAAKAAwABgAFAAgACgAAAAABBAAMAAAACAAIAAAABAAIAAAABAAAAAEAAAAYAAAAAAASABgACAAAAAcADAAAABAAFAASAAAAAAAAAhQAAAA0AQAACAAAACgAAAAAAAAAGwAAAFRPX0RFQ0lNQUwoJzEyLjM0NTYnLCAxMCwyKQAGAAAA0AAAAKAAAAB8AAAAVAAAACwAAAAEAAAAUP///xAAAAAEAAAAAQAAAFQAAAAJAAAAZmluYWxUeXBlAAAAdP///xAAAAAEAAAAAQAAADIAAAAKAAAAYnl0ZUxlbmd0aAAAmP///xAAAAAEAAAAAQAAADAAAAAKAAAAY2hhckxlbmd0aAAAvP///xAAAAAEAAAAAQAAADIAAAAFAAAAc2NhbGUAAADc////EAAAAAQAAAACAAAAMTAAAAkAAABwcmVjaXNpb24AAAAIAAwABAAIAAgAAAAUAAAABAAAAAUAAABGSVhFRAAAAAsAAABsb2dpY2FsVHlwZQAIAAwACAAHAAgAAAAAAAABEAAAAAAAAAD/////iAAAABQAAAAAAAAADAAWAAYABQAIAAwADAAAAAADBAAYAAAACAAAAAAAAAAAAAoAGAAMAAQACAAKAAAAPAAAABAAAAABAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAQAAAAEAAAAAAAAAAAAAAAAAAADTBAAAAAAAAA==" # noqa: E501
38
+
39
+ f = b64decode(rowset_b64)
40
+ reader = pa.ipc.open_stream(f)
41
+
42
+ batch = next(reader)
43
+
44
+ field = batch.schema.field(0)
45
+ assert field == pa.field(name="TO_DECIMAL('12.3456', 10,2)", type=pa.int16(), nullable=False)
46
+ assert field.metadata == {
47
+ b"logicalType": b"FIXED",
48
+ b"precision": b"10",
49
+ b"scale": b"2",
50
+ b"charLength": b"0",
51
+ b"byteLength": b"2",
52
+ b"finalType": b"T",
53
+ }
@@ -83,6 +83,15 @@ def test_connect_db_path_reuse():
83
83
  assert cur.execute("select * from example").fetchall() == [(420,)]
84
84
 
85
85
 
86
+ def test_connect_information_schema():
87
+ with fakesnow.patch(create_schema_on_connect=False):
88
+ conn = snowflake.connector.connect(database="db1", schema="information_schema")
89
+ assert conn.schema == "INFORMATION_SCHEMA"
90
+ with conn, conn.cursor() as cur:
91
+ # shouldn't fail
92
+ cur.execute("SELECT * FROM databases")
93
+
94
+
86
95
  def test_connect_without_database(_fakesnow_no_auto_create: None):
87
96
  with snowflake.connector.connect() as conn, conn.cursor() as cur:
88
97
  with pytest.raises(snowflake.connector.errors.ProgrammingError) as excinfo:
@@ -41,10 +41,13 @@ def test_alias_on_join(conn: snowflake.connector.SnowflakeConnection):
41
41
  assert cur.fetchall() == [("VARCHAR1", "CHAR1", "JOIN"), ("VARCHAR2", "CHAR2", None)]
42
42
 
43
43
 
44
- def test_alter_table(cur: snowflake.connector.cursor.SnowflakeCursor):
45
- cur.execute("create table table1 (id int)")
46
- cur.execute("alter table table1 add column name varchar(20)")
47
- cur.execute("select name from table1")
44
+ def test_alter_table(dcur: snowflake.connector.cursor.SnowflakeCursor):
45
+ dcur.execute("create table table1 (id int)")
46
+ dcur.execute("alter table table1 add column name varchar(20)")
47
+ dcur.execute("select name from table1")
48
+ assert dcur.execute("alter table table1 cluster by (name)").fetchall() == [
49
+ {"status": "Statement executed successfully."}
50
+ ]
48
51
 
49
52
 
50
53
  def test_array_size(cur: snowflake.connector.cursor.SnowflakeCursor):
@@ -433,6 +436,52 @@ def test_describe_table(dcur: snowflake.connector.cursor.DictCursor):
433
436
  assert "002003 (42S02): Catalog Error: Table with name THIS_DOES_NOT_EXIST does not exist!" in str(excinfo.value)
434
437
 
435
438
 
439
+ def test_describe_view(dcur: snowflake.connector.cursor.DictCursor):
440
+ dcur.execute(
441
+ """
442
+ create or replace table example (
443
+ XVARCHAR VARCHAR
444
+ -- ,XVARCHAR20 VARCHAR(20) -- TODO: preserve varchar size
445
+ )
446
+ """
447
+ )
448
+
449
+ common = {
450
+ "kind": "COLUMN",
451
+ "null?": "Y",
452
+ "default": None,
453
+ "primary key": "N",
454
+ "unique key": "N",
455
+ "check": None,
456
+ "expression": None,
457
+ "comment": None,
458
+ "policy name": None,
459
+ "privacy domain": None,
460
+ }
461
+ expected = [
462
+ {"name": "XVARCHAR", "type": "VARCHAR(16777216)", **common},
463
+ # TODO: preserve varchar size
464
+ # {"name": "XVARCHAR20", "type": "VARCHAR(20)", **common},
465
+ ]
466
+
467
+ dcur.execute("create view v1 as select * from example")
468
+ assert dcur.execute("describe view v1").fetchall() == expected
469
+ assert [r.name for r in dcur.description] == [
470
+ "name",
471
+ "type",
472
+ "kind",
473
+ "null?",
474
+ "default",
475
+ "primary key",
476
+ "unique key",
477
+ "check",
478
+ "expression",
479
+ "comment",
480
+ "policy name",
481
+ "privacy domain",
482
+ ]
483
+
484
+
436
485
  ## descriptions are needed for ipython-sql/jupysql which describes every statement
437
486
  def test_description_create_drop_database(dcur: snowflake.connector.cursor.DictCursor):
438
487
  dcur.execute("create database example")
@@ -602,9 +651,10 @@ def test_fetchmany(conn: snowflake.connector.SnowflakeConnection):
602
651
  cur.execute("insert into customers values (3, 'Jeremy', 'K')")
603
652
  cur.execute("select id, first_name, last_name from customers")
604
653
 
654
+ # mimic jupysql fetchmany behaviour
605
655
  assert cur.fetchmany(2) == [(1, "Jenny", "P"), (2, "Jasper", "M")]
606
- assert cur.fetchmany(2) == [(3, "Jeremy", "K")]
607
- assert cur.fetchmany(2) == []
656
+ assert cur.fetchmany(5) == [(3, "Jeremy", "K")]
657
+ assert cur.fetchmany(5) == []
608
658
 
609
659
  with conn.cursor(snowflake.connector.cursor.DictCursor) as cur:
610
660
  cur.execute("select id, first_name, last_name from customers")
@@ -612,10 +662,10 @@ def test_fetchmany(conn: snowflake.connector.SnowflakeConnection):
612
662
  {"ID": 1, "FIRST_NAME": "Jenny", "LAST_NAME": "P"},
613
663
  {"ID": 2, "FIRST_NAME": "Jasper", "LAST_NAME": "M"},
614
664
  ]
615
- assert cur.fetchmany(2) == [
665
+ assert cur.fetchmany(5) == [
616
666
  {"ID": 3, "FIRST_NAME": "Jeremy", "LAST_NAME": "K"},
617
667
  ]
618
- assert cur.fetchmany(2) == []
668
+ assert cur.fetchmany(5) == []
619
669
 
620
670
 
621
671
  def test_fetch_pandas_all(cur: snowflake.connector.cursor.SnowflakeCursor):
@@ -39,6 +39,46 @@ def test_info_schema_columns_describe(cur: snowflake.connector.cursor.SnowflakeC
39
39
  assert cur.description == expected_metadata
40
40
 
41
41
 
42
+ def test_describe_view_columns(dcur: snowflake.connector.cursor.DictCursor):
43
+ cols = [
44
+ "name",
45
+ "type",
46
+ "kind",
47
+ "null?",
48
+ "default",
49
+ "primary key",
50
+ "unique key",
51
+ "check",
52
+ "expression",
53
+ "comment",
54
+ "policy name",
55
+ "privacy domain",
56
+ ]
57
+ dcur.execute("describe view information_schema.columns")
58
+ result: list[dict] = dcur.fetchall() # type: ignore
59
+ assert list(result[0].keys()) == cols
60
+ names = [r["name"] for r in result]
61
+ # should contain snowflake-specific columns (from _FS_COLUMNS_SNOWFLAKE)
62
+ assert "comment" in names
63
+ # fmt: off
64
+ assert dcur.description[:-1] == [
65
+ ResultMetadata(name='name', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
66
+ ResultMetadata(name='type', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
67
+ ResultMetadata(name='kind', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
68
+ ResultMetadata(name='null?', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
69
+ ResultMetadata(name='default', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
70
+ ResultMetadata(name='primary key', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
71
+ ResultMetadata(name='unique key', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
72
+ ResultMetadata(name='check', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
73
+ ResultMetadata(name='expression', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
74
+ ResultMetadata(name='comment', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
75
+ ResultMetadata(name='policy name', type_code=2, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
76
+ # TODO: ignore the following, see https://github.com/tekumara/fakesnow/issues/26
77
+ # ResultMetadata(name='privacy domain', type_code=9, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True)
78
+ ]
79
+ # fmt: on
80
+
81
+
42
82
  def test_info_schema_columns_numeric(cur: snowflake.connector.cursor.SnowflakeCursor):
43
83
  # see https://docs.snowflake.com/en/sql-reference/data-types-numeric
44
84
  cur.execute(
@@ -211,3 +251,16 @@ def test_info_schema_show_primary_keys_from_table(cur: snowflake.connector.curso
211
251
 
212
252
  pk_columns = [result[4] for result in pk_result]
213
253
  assert pk_columns == ["ID", "VERSION"]
254
+
255
+
256
+ def test_type_column_is_not_null(cur: snowflake.connector.cursor.SnowflakeCursor) -> None:
257
+ for table in [
258
+ "information_schema.databases",
259
+ "information_schema.views",
260
+ "information_schema.columns",
261
+ ]:
262
+ cur.execute(f"DESCRIBE VIEW {table}")
263
+ result = cur.fetchall()
264
+ data_types = [dt for (_, dt, *_) in result]
265
+ nulls = [dt for dt in data_types if "NULL" in dt]
266
+ assert not nulls
@@ -0,0 +1,67 @@
1
+ import threading
2
+ from collections.abc import Iterator
3
+ from decimal import Decimal
4
+ from time import sleep
5
+ from typing import Callable
6
+
7
+ import pytest
8
+ import snowflake.connector
9
+ import uvicorn
10
+
11
+ import fakesnow.server
12
+
13
+
14
+ @pytest.fixture(scope="session")
15
+ def unused_port(unused_tcp_port_factory: Callable[[], int]) -> int:
16
+ # unused_tcp_port_factory is from pytest-asyncio
17
+ return unused_tcp_port_factory()
18
+
19
+
20
+ @pytest.fixture(scope="session")
21
+ def server(unused_tcp_port_factory: Callable[[], int]) -> Iterator[dict]:
22
+ port = unused_tcp_port_factory()
23
+ server = uvicorn.Server(uvicorn.Config(fakesnow.server.app, port=port, log_level="info"))
24
+ thread = threading.Thread(target=server.run, name="Server", daemon=True)
25
+ thread.start()
26
+
27
+ while not server.started:
28
+ sleep(0.1)
29
+ yield dict(
30
+ user="fake",
31
+ password="snow",
32
+ account="fakesnow",
33
+ host="localhost",
34
+ port=port,
35
+ protocol="http",
36
+ # disable telemetry
37
+ session_parameters={"CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED": False},
38
+ )
39
+
40
+ server.should_exit = True
41
+ # wait for server thread to end
42
+ thread.join()
43
+
44
+
45
+ def test_server_connect(server: dict) -> None:
46
+ with (
47
+ snowflake.connector.connect(
48
+ **server,
49
+ # disable infinite retries on error
50
+ network_timeout=1,
51
+ ) as conn1,
52
+ conn1.cursor() as cur,
53
+ ):
54
+ cur.execute("select 'hello', to_decimal('12.3456', 10,2)")
55
+ assert cur.fetchall() == [("hello", Decimal("12.35"))]
56
+
57
+
58
+ def test_server_abort_request(server: dict) -> None:
59
+ with (
60
+ snowflake.connector.connect(
61
+ **server,
62
+ # triggers an abort request
63
+ network_timeout=0,
64
+ ) as conn1,
65
+ conn1.cursor() as cur,
66
+ ):
67
+ cur.execute("select 'will abort'")
@@ -8,6 +8,7 @@ from fakesnow.transforms import (
8
8
  SUCCESS_NOP,
9
9
  _get_to_number_args,
10
10
  alias_in_join,
11
+ alter_table_strip_cluster_by,
11
12
  array_agg,
12
13
  array_agg_within_group,
13
14
  array_size,
@@ -74,6 +75,13 @@ def test_alias_in_join() -> None:
74
75
  )
75
76
 
76
77
 
78
+ def test_alter_table_strip_cluster_by() -> None:
79
+ assert (
80
+ sqlglot.parse_one("alter table table1 cluster by (name)").transform(alter_table_strip_cluster_by).sql()
81
+ == "SELECT 'Statement executed successfully.' AS status"
82
+ )
83
+
84
+
77
85
  def test_array_size() -> None:
78
86
  assert (
79
87
  sqlglot.parse_one("""select array_size(parse_json('["a","b"]'))""").transform(array_size).sql(dialect="duckdb")
@@ -342,7 +350,7 @@ def test_extract_comment_on_columns() -> None:
342
350
  e = sqlglot.parse_one("ALTER TABLE ingredients ALTER amount COMMENT 'tablespoons'").transform(
343
351
  extract_comment_on_columns
344
352
  )
345
- assert e.sql() == "SELECT 'Statement executed successfully.'"
353
+ assert e.sql() == "SELECT 'Statement executed successfully.' AS status"
346
354
  assert e.args["col_comments"] == [("amount", "tablespoons")]
347
355
 
348
356
  # TODO
@@ -365,19 +373,19 @@ def test_extract_comment_on_table() -> None:
365
373
  assert e.args["table_comment"] == (table1, "foobar")
366
374
 
367
375
  e = sqlglot.parse_one("COMMENT ON TABLE table1 IS 'comment1'").transform(extract_comment_on_table)
368
- assert e.sql() == "SELECT 'Statement executed successfully.'"
376
+ assert e.sql() == "SELECT 'Statement executed successfully.' AS status"
369
377
  assert e.args["table_comment"] == (table1, "comment1")
370
378
 
371
379
  e = sqlglot.parse_one("COMMENT ON TABLE table1 IS $$comment2$$", read="snowflake").transform(
372
380
  extract_comment_on_table
373
381
  )
374
- assert e.sql() == "SELECT 'Statement executed successfully.'"
382
+ assert e.sql() == "SELECT 'Statement executed successfully.' AS status"
375
383
  assert e.args["table_comment"] == (table1, "comment2")
376
384
 
377
385
  e = sqlglot.parse_one("ALTER TABLE table1 SET COMMENT = 'comment1'", read="snowflake").transform(
378
386
  extract_comment_on_table
379
387
  )
380
- assert e.sql() == "SELECT 'Statement executed successfully.'"
388
+ assert e.sql() == "SELECT 'Statement executed successfully.' AS status"
381
389
  assert e.args["table_comment"] == (table1, "comment1")
382
390
 
383
391
 
@@ -1,46 +0,0 @@
1
- from duckdb import DuckDBPyConnection
2
-
3
- GLOBAL_DATABASE_NAME = "_fs_global"
4
- USERS_TABLE_FQ_NAME = f"{GLOBAL_DATABASE_NAME}._fs_users_ext"
5
-
6
- # replicates the output structure of https://docs.snowflake.com/en/sql-reference/sql/show-users
7
- SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT = f"""
8
- create table if not exists {USERS_TABLE_FQ_NAME} (
9
- name varchar,
10
- created_on TIMESTAMPTZ,
11
- login_name varchar,
12
- display_name varchar,
13
- first_name varchar,
14
- last_name varchar,
15
- email varchar,
16
- mins_to_unlock varchar,
17
- days_to_expiry varchar,
18
- comment varchar,
19
- disabled varchar,
20
- must_change_password varchar,
21
- snowflake_lock varchar,
22
- default_warehouse varchar,
23
- default_namespace varchar,
24
- default_role varchar,
25
- default_secondary_roles varchar,
26
- ext_authn_duo varchar,
27
- ext_authn_uid varchar,
28
- mins_to_bypass_mfa varchar,
29
- owner varchar,
30
- last_success_login TIMESTAMPTZ,
31
- expires_at_time TIMESTAMPTZ,
32
- locked_until_time TIMESTAMPTZ,
33
- has_password varchar,
34
- has_rsa_public_key varchar,
35
- )
36
- """
37
-
38
-
39
- def create_global_database(conn: DuckDBPyConnection) -> None:
40
- """Create a "global" database for storing objects which span database.
41
-
42
- Including (but not limited to):
43
- - Users
44
- """
45
- conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
46
- conn.execute(SQL_CREATE_INFORMATION_SCHEMA_USERS_TABLE_EXT)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes