fakesnow 0.9.34__py3-none-any.whl → 0.9.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fakesnow/__init__.py CHANGED
@@ -91,3 +91,54 @@ def patch(
91
91
  finally:
92
92
  stack.close()
93
93
  fs.duck_conn.close()
94
+
95
+
96
+ @contextmanager
97
+ def server(port: int | None = None, session_parameters: dict[str, str | int | bool] | None = None) -> Iterator[dict]:
98
+ """Start a fake snowflake server in a separate thread and yield connection kwargs.
99
+
100
+ Args:
101
+ port (int | None, optional): Port to run the server on. If None, an available port is chosen. Defaults to None.
102
+
103
+ Yields:
104
+ Iterator[dict]: Connection parameters for the fake snowflake server.
105
+ """
106
+ import socket
107
+ import threading
108
+ from time import sleep
109
+
110
+ import uvicorn
111
+
112
+ import fakesnow.server
113
+
114
+ # find an unused TCP port between 1024-65535
115
+ if not port:
116
+ with contextlib.closing(socket.socket(type=socket.SOCK_STREAM)) as sock:
117
+ sock.bind(("127.0.0.1", 0))
118
+ port = sock.getsockname()[1]
119
+
120
+ assert port
121
+ server = uvicorn.Server(uvicorn.Config(fakesnow.server.app, port=port, log_level="info"))
122
+ thread = threading.Thread(target=server.run, name="Server", daemon=True)
123
+ thread.start()
124
+
125
+ while not server.started:
126
+ sleep(0.1)
127
+
128
+ try:
129
+ yield dict(
130
+ user="fake",
131
+ password="snow",
132
+ account="fakesnow",
133
+ host="localhost",
134
+ port=port,
135
+ protocol="http",
136
+ # disable telemetry
137
+ session_parameters={"CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED": False} | (session_parameters or {}),
138
+ # disable retries on error
139
+ network_timeout=1,
140
+ )
141
+ finally:
142
+ server.should_exit = True
143
+ # wait for server thread to end
144
+ thread.join()
fakesnow/conn.py CHANGED
@@ -6,8 +6,7 @@ from pathlib import Path
6
6
  from types import TracebackType
7
7
  from typing import Any
8
8
 
9
- import snowflake.connector.converter
10
- import snowflake.connector.errors
9
+ import snowflake.connector
11
10
  import sqlglot
12
11
  from duckdb import DuckDBPyConnection
13
12
  from snowflake.connector.cursor import DictCursor, SnowflakeCursor
fakesnow/converter.py CHANGED
@@ -4,7 +4,7 @@ import binascii
4
4
  import datetime
5
5
  from datetime import date, time, timezone
6
6
 
7
- # convert bindings provided as strings to the server into python types
7
+ # convert server bindings from strings into python types
8
8
 
9
9
 
10
10
  def from_binding(binding: dict[str, str]) -> int | bytes | bool | date | time | datetime.datetime | str:
fakesnow/cursor.py CHANGED
@@ -46,6 +46,12 @@ SQL_DROPPED = Template("SELECT '${name} successfully dropped.' as 'status'")
46
46
  SQL_INSERTED_ROWS = Template("SELECT ${count} as 'number of rows inserted'")
47
47
  SQL_UPDATED_ROWS = Template("SELECT ${count} as 'number of rows updated', 0 as 'number of multi-joined rows updated'")
48
48
  SQL_DELETED_ROWS = Template("SELECT ${count} as 'number of rows deleted'")
49
+ SQL_COPY_ROWS = Template(
50
+ "SELECT '${file}' as file, 'LOADED' as status, ${count} as rows_parsed, "
51
+ "${count} as rows_loaded, 1 as error_limit, 0 as errors_seen, "
52
+ "NULL as first_error, NULL as first_error_line, NULL as first_error_character, "
53
+ "NULL as first_error_column_name"
54
+ )
49
55
 
50
56
 
51
57
  class FakeSnowflakeCursor:
@@ -166,6 +172,9 @@ class FakeSnowflakeCursor:
166
172
  # strip highlight for better readability, TODO: show pointer to start of error
167
173
  msg = str(e).replace("\x1b[4m", "").replace("\x1b[0m", "")
168
174
  raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=1003, sqlstate="42000") from None
175
+ except NotImplementedError as e:
176
+ msg = f"{e} not implemented. Please raise an issue via https://github.com/tekumara/fakesnow/issues/new"
177
+ raise snowflake.connector.errors.ProgrammingError(msg=msg, errno=9999, sqlstate="99999") from e
169
178
 
170
179
  def check_db_and_schema(self, expression: exp.Expression) -> None:
171
180
  no_database, no_schema = checks.is_unqualified_table_expression(expression)
@@ -238,7 +247,7 @@ class FakeSnowflakeCursor:
238
247
  .transform(transforms.show_procedures)
239
248
  .transform(transforms.show_warehouses)
240
249
  .transform(lambda e: transforms.show_schemas(e, self._conn.database))
241
- .transform(lambda e: transforms.show_objects_tables(e, self._conn.database))
250
+ .transform(lambda e: transforms.show_tables_etc(e, self._conn.database))
242
251
  .transform(lambda e: transforms.show_columns(e, self._conn.database))
243
252
  # TODO collapse into a single show_keys function
244
253
  .transform(lambda e: transforms.show_keys(e, self._conn.database, kind="PRIMARY"))
@@ -246,6 +255,7 @@ class FakeSnowflakeCursor:
246
255
  .transform(lambda e: transforms.show_keys(e, self._conn.database, kind="FOREIGN"))
247
256
  .transform(transforms.show_users)
248
257
  .transform(transforms.create_user)
258
+ .transform(transforms.copy_into)
249
259
  .transform(transforms.sha256)
250
260
  .transform(transforms.create_clone)
251
261
  .transform(transforms.alias_in_join)
@@ -297,6 +307,10 @@ class FakeSnowflakeCursor:
297
307
  raise snowflake.connector.errors.DatabaseError(msg=e.args[0], errno=250002, sqlstate="08003") from None
298
308
  except duckdb.ParserException as e:
299
309
  raise snowflake.connector.errors.ProgrammingError(msg=e.args[0], errno=1003, sqlstate="42000") from None
310
+ except duckdb.HTTPException as e:
311
+ raise snowflake.connector.errors.ProgrammingError(msg=e.args[0], errno=91016, sqlstate="22000") from None
312
+ except duckdb.ConversionException as e:
313
+ raise snowflake.connector.errors.ProgrammingError(msg=e.args[0], errno=100038, sqlstate="22018") from None
300
314
 
301
315
  affected_count = None
302
316
 
@@ -316,6 +330,10 @@ class FakeSnowflakeCursor:
316
330
  self._duck_conn.execute(info_schema.per_db_creation_sql(create_db_name))
317
331
  result_sql = SQL_CREATED_DATABASE.substitute(name=create_db_name)
318
332
 
333
+ elif copy_from := transformed.args.get("copy_from"):
334
+ (affected_count,) = self._duck_conn.fetchall()[0]
335
+ result_sql = SQL_COPY_ROWS.substitute(count=affected_count, file=copy_from)
336
+
319
337
  elif cmd == "INSERT":
320
338
  (affected_count,) = self._duck_conn.fetchall()[0]
321
339
  result_sql = SQL_INSERTED_ROWS.substitute(count=affected_count)
@@ -403,7 +421,7 @@ class FakeSnowflakeCursor:
403
421
  ) -> FakeSnowflakeCursor:
404
422
  if isinstance(seqparams, dict):
405
423
  # see https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-api
406
- raise NotImplementedError("dict params not supported yet")
424
+ raise NotImplementedError("executemany dict params")
407
425
 
408
426
  # TODO: support insert optimisations
409
427
  # the snowflake connector will optimise inserts into a single query
fakesnow/fixtures.py CHANGED
@@ -1,4 +1,5 @@
1
1
  from collections.abc import Iterator
2
+ from typing import Any
2
3
 
3
4
  import pytest
4
5
 
@@ -11,6 +12,12 @@ def _fakesnow() -> Iterator[None]:
11
12
  yield
12
13
 
13
14
 
15
+ @pytest.fixture(scope="session")
16
+ def fakesnow_server() -> Iterator[dict[str, Any]]:
17
+ with fakesnow.server() as conn_kwargs:
18
+ yield conn_kwargs
19
+
20
+
14
21
  @pytest.fixture
15
22
  def _fakesnow_no_auto_create() -> Iterator[None]:
16
23
  with fakesnow.patch(create_database_on_connect=False, create_schema_on_connect=False):
fakesnow/info_schema.py CHANGED
@@ -34,8 +34,8 @@ create table if not exists _fs_global._fs_information_schema._fs_columns_ext (
34
34
  """
35
35
 
36
36
  # replicates the output structure of https://docs.snowflake.com/en/sql-reference/sql/show-users
37
- SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_USERS_TABLE_EXT = """
38
- create table if not exists _fs_global._fs_information_schema._fs_users_ext (
37
+ SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_USERS_TABLE = """
38
+ create table if not exists _fs_global._fs_information_schema._fs_users (
39
39
  name varchar,
40
40
  created_on TIMESTAMPTZ,
41
41
  login_name varchar,
@@ -196,13 +196,13 @@ def per_db_creation_sql(catalog: str) -> str:
196
196
  """
197
197
 
198
198
 
199
- def fs_global_creation_sql(catalog: str) -> str:
199
+ def fs_global_creation_sql() -> str:
200
200
  return f"""
201
201
  {SQL_CREATE_GLOBAL_FS_INFORMATION_SCHEMA};
202
202
  {SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_TABLES_EXT};
203
203
  {SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_COLUMNS_EXT};
204
204
  {SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_COLUMNS_VIEW};
205
- {SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_USERS_TABLE_EXT};
205
+ {SQL_CREATE_GLOBAL_INFORMATION_SCHEMA_USERS_TABLE};
206
206
  """
207
207
 
208
208
 
fakesnow/instance.py CHANGED
@@ -7,6 +7,7 @@ import duckdb
7
7
 
8
8
  import fakesnow.fakes as fakes
9
9
  from fakesnow import info_schema
10
+ from fakesnow.transforms import show
10
11
 
11
12
  GLOBAL_DATABASE_NAME = "_fs_global"
12
13
 
@@ -28,8 +29,9 @@ class FakeSnow:
28
29
 
29
30
  # create a "global" database for storing objects which span databases.
30
31
  self.duck_conn.execute(f"ATTACH IF NOT EXISTS ':memory:' AS {GLOBAL_DATABASE_NAME}")
31
- # create the info schema extensions
32
- self.duck_conn.execute(info_schema.fs_global_creation_sql(GLOBAL_DATABASE_NAME))
32
+ # create the info schema extensions and show views
33
+ self.duck_conn.execute(info_schema.fs_global_creation_sql())
34
+ self.duck_conn.execute(show.fs_global_creation_sql())
33
35
 
34
36
  # use UTC instead of local time zone for consistent testing
35
37
  self.duck_conn.execute("SET GLOBAL TimeZone = 'UTC'")
fakesnow/server.py CHANGED
@@ -59,7 +59,10 @@ async def login_request(request: Request) -> JSONResponse:
59
59
  {
60
60
  "data": {
61
61
  "token": token,
62
- "parameters": [{"name": "AUTOCOMMIT", "value": True}],
62
+ "parameters": [
63
+ {"name": "AUTOCOMMIT", "value": True},
64
+ {"name": "CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY", "value": 3600},
65
+ ],
63
66
  },
64
67
  "success": True,
65
68
  }
@@ -78,12 +81,12 @@ async def query_request(request: Request) -> JSONResponse:
78
81
 
79
82
  sql_text = body_json["sqlText"]
80
83
 
81
- params = None
82
-
83
84
  if bindings := body_json.get("bindings"):
84
- # Convert parameters like {'1': {'type': 'FIXED', 'value': '1'}, ...} to tuple (1, ...)
85
+ # Convert parameters like {'1': {'type': 'FIXED', 'value': '10'}, ...} to tuple (10, ...)
85
86
  params = tuple(from_binding(bindings[str(pos)]) for pos in range(1, len(bindings) + 1))
86
87
  logger.debug(f"Bindings: {params}")
88
+ else:
89
+ params = None
87
90
 
88
91
  try:
89
92
  # only a single sql statement is sent at a time by the python snowflake connector
@@ -121,6 +124,9 @@ async def query_request(request: Request) -> JSONResponse:
121
124
  return JSONResponse(
122
125
  {
123
126
  "data": {
127
+ "parameters": [
128
+ {"name": "TIMEZONE", "value": "Etc/UTC"},
129
+ ],
124
130
  "rowtype": rowtype,
125
131
  "rowsetBase64": rowset_b64,
126
132
  "total": cur._rowcount, # noqa: SLF001
@@ -7,15 +7,16 @@ from typing import ClassVar, cast
7
7
  import sqlglot
8
8
  from sqlglot import exp
9
9
 
10
+ from fakesnow.transforms.copy_into import copy_into as copy_into
10
11
  from fakesnow.transforms.merge import merge as merge
11
12
  from fakesnow.transforms.show import (
12
13
  show_columns as show_columns,
13
14
  show_databases as show_databases,
14
15
  show_functions as show_functions,
15
16
  show_keys as show_keys,
16
- show_objects_tables as show_objects_tables,
17
17
  show_procedures as show_procedures,
18
18
  show_schemas as show_schemas,
19
+ show_tables_etc as show_tables_etc,
19
20
  show_users as show_users,
20
21
  show_warehouses as show_warehouses,
21
22
  )
@@ -837,7 +838,7 @@ def regex_replace(expression: exp.Expression) -> exp.Expression:
837
838
  if len(expression.args) > 3:
838
839
  # see https://docs.snowflake.com/en/sql-reference/functions/regexp_replace
839
840
  raise NotImplementedError(
840
- "REGEXP_REPLACE with additional parameters (eg: <position>, <occurrence>, <parameters>) not supported"
841
+ "REGEXP_REPLACE with additional parameters (eg: <position>, <occurrence>, <parameters>)"
841
842
  )
842
843
 
843
844
  # pattern: snowflake requires escaping backslashes in single-quoted string constants, but duckdb doesn't
@@ -1316,9 +1317,9 @@ def create_user(expression: exp.Expression) -> exp.Expression:
1316
1317
  if sub_exp.upper().startswith("USER"):
1317
1318
  _, name, *ignored = sub_exp.split(" ")
1318
1319
  if ignored:
1319
- raise NotImplementedError(f"`CREATE USER` with {ignored} not yet supported")
1320
+ raise NotImplementedError(f"`CREATE USER` with {ignored}")
1320
1321
  return sqlglot.parse_one(
1321
- f"INSERT INTO _fs_global._fs_information_schema._fs_users_ext (name) VALUES ('{name}')", read="duckdb"
1322
+ f"INSERT INTO _fs_global._fs_information_schema._fs_users (name) VALUES ('{name}')", read="duckdb"
1322
1323
  )
1323
1324
 
1324
1325
  return expression
@@ -0,0 +1,163 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field, replace
4
+ from pathlib import PurePath
5
+ from typing import Protocol
6
+ from urllib.parse import urlparse, urlunparse
7
+
8
+ import snowflake.connector.errors
9
+ from sqlglot import exp
10
+ from typing_extensions import Self
11
+
12
+
13
+ def copy_into(expr: exp.Expression) -> exp.Expression:
14
+ if not isinstance(expr, exp.Copy):
15
+ return expr
16
+
17
+ schema = expr.this
18
+
19
+ columns = [exp.Column(this=exp.Identifier(this=f"column{i}")) for i in range(len(schema.expressions))] or [
20
+ exp.Column(this=exp.Star())
21
+ ]
22
+
23
+ params = expr.args.get("params", [])
24
+ # TODO: remove columns
25
+ file_type_handler = _handle_params(params, [c.name for c in columns])
26
+
27
+ # the FROM expression
28
+ source = expr.args["files"][0].this
29
+ assert isinstance(source, exp.Literal), f"{source.__class__} is not a exp.Literal"
30
+
31
+ if len(file_type_handler.files) > 1:
32
+ raise NotImplementedError("Multiple files not currently supported")
33
+ file = file_type_handler.files[0]
34
+
35
+ scheme, netloc, path, params, query, fragment = urlparse(source.name)
36
+ if not scheme:
37
+ raise snowflake.connector.errors.ProgrammingError(
38
+ msg=f"SQL compilation error:\ninvalid URL prefix found in: '{source.name}'", errno=1011, sqlstate="42601"
39
+ )
40
+ path = str(PurePath(path) / file.name)
41
+ url = urlunparse((scheme, netloc, path, params, query, fragment))
42
+
43
+ return exp.Insert(
44
+ this=schema,
45
+ expression=exp.Select(expressions=columns).from_(exp.Table(this=file_type_handler.read_expression(url))),
46
+ copy_from=url,
47
+ )
48
+
49
+
50
+ def _handle_params(params: list[exp.CopyParameter], columns: list[str]) -> FileTypeHandler:
51
+ file_type_handler = None
52
+ force = False
53
+ files = []
54
+ for param in params:
55
+ var = param.this.name
56
+ if var == "FILE_FORMAT":
57
+ if file_type_handler:
58
+ raise ValueError(params)
59
+
60
+ var_type = next((e.args["value"].this for e in param.expressions if e.this.this == "TYPE"), None)
61
+ if not var_type:
62
+ raise NotImplementedError("FILE_FORMAT without TYPE is not currently implemented")
63
+
64
+ if var_type == "CSV":
65
+ file_type_handler = handle_csv(param.expressions, columns)
66
+ else:
67
+ raise NotImplementedError(f"{var_type} FILE_FORMAT is not currently implemented")
68
+
69
+ elif var == "FILES":
70
+ files = param.expression.expressions if isinstance(param.expression, exp.Tuple) else [param.expression.this]
71
+ elif var == "FORCE":
72
+ force = True
73
+ pass
74
+ else:
75
+ raise ValueError(f"Unknown copy parameter: {param.this}")
76
+
77
+ if not force:
78
+ raise NotImplementedError("COPY INTO with FORCE=false (default) is not currently implemented")
79
+
80
+ if not files:
81
+ raise NotImplementedError("COPY INTO without FILES is not currently implemented")
82
+
83
+ if not file_type_handler:
84
+ # default to CSV
85
+ file_type_handler = handle_csv([], columns)
86
+
87
+ file_type_handler = file_type_handler.with_files(files)
88
+ return file_type_handler
89
+
90
+
91
+ def handle_csv(expressions: list[exp.Property], columns: list[str]) -> ReadCSV:
92
+ skip_header = ReadCSV.skip_header
93
+ quote = ReadCSV.quote
94
+ delimiter = ReadCSV.delimiter
95
+
96
+ for expression in expressions:
97
+ exp_type = expression.name
98
+ if exp_type in {"TYPE"}:
99
+ continue
100
+
101
+ elif exp_type == "SKIP_HEADER":
102
+ skip_header = True
103
+ elif exp_type == "FIELD_OPTIONALLY_ENCLOSED_BY":
104
+ quote = expression.args["value"].this
105
+ elif exp_type == "FIELD_DELIMITER":
106
+ delimiter = expression.args["value"].this
107
+ else:
108
+ raise NotImplementedError(f"{exp_type} is not currently implemented")
109
+
110
+ return ReadCSV(
111
+ skip_header=skip_header,
112
+ quote=quote,
113
+ delimiter=delimiter,
114
+ columns=columns,
115
+ )
116
+
117
+
118
+ @dataclass
119
+ class FileTypeHandler(Protocol):
120
+ files: list = field(default_factory=list)
121
+
122
+ def read_expression(self, url: str) -> exp.Expression: ...
123
+
124
+ def with_files(self, files: list) -> Self:
125
+ return replace(self, files=files)
126
+
127
+ @staticmethod
128
+ def make_eq(name: str, value: list | str | int | bool) -> exp.EQ:
129
+ if isinstance(value, list):
130
+ expression = exp.array(*[exp.Literal(this=str(v), is_string=isinstance(v, str)) for v in value])
131
+ elif isinstance(value, bool):
132
+ expression = exp.Boolean(this=value)
133
+ else:
134
+ expression = exp.Literal(this=str(value), is_string=isinstance(value, str))
135
+
136
+ return exp.EQ(this=exp.Literal(this=name, is_string=False), expression=expression)
137
+
138
+
139
+ @dataclass
140
+ class ReadCSV(FileTypeHandler):
141
+ skip_header: bool = False
142
+ quote: str | None = None
143
+ delimiter: str = ","
144
+ columns: list[str] = field(default_factory=list)
145
+
146
+ def read_expression(self, url: str) -> exp.Expression:
147
+ args = []
148
+
149
+ # don't parse header and use as column names, keep them as column0, column1, etc
150
+ args.append(self.make_eq("header", False))
151
+
152
+ if self.skip_header:
153
+ args.append(self.make_eq("skip", 1))
154
+
155
+ if self.quote:
156
+ quote = self.quote.replace("'", "''")
157
+ args.append(self.make_eq("quote", quote))
158
+
159
+ if self.delimiter and self.delimiter != ",":
160
+ delimiter = self.delimiter.replace("'", "''")
161
+ args.append(self.make_eq("sep", delimiter))
162
+
163
+ return exp.func("read_csv", exp.Literal(this=url, is_string=True), *args)
@@ -6,6 +6,14 @@ import sqlglot
6
6
  from sqlglot import exp
7
7
 
8
8
 
9
+ def fs_global_creation_sql() -> str:
10
+ return f"""
11
+ {SQL_CREATE_VIEW_SHOW_OBJECTS};
12
+ {SQL_CREATE_VIEW_SHOW_TABLES};
13
+ {SQL_CREATE_VIEW_SHOW_VIEWS};
14
+ """
15
+
16
+
9
17
  def show_columns(
10
18
  expression: exp.Expression, current_database: str | None = None, current_schema: str | None = None
11
19
  ) -> exp.Expression:
@@ -283,22 +291,99 @@ def show_keys(
283
291
 
284
292
  statement += f"AND table_name = '{table.name}' "
285
293
  else:
286
- raise NotImplementedError(f"SHOW PRIMARY KEYS with {scope_kind} not yet supported")
294
+ raise NotImplementedError(f"SHOW PRIMARY KEYS with {scope_kind}")
287
295
  return sqlglot.parse_one(statement)
288
296
  return expression
289
297
 
290
298
 
291
- def show_objects_tables(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
292
- """Transform SHOW OBJECTS/TABLES to a query against the information_schema.tables table.
299
+ # see https://docs.snowflake.com/en/sql-reference/sql/show-objects
300
+ SQL_CREATE_VIEW_SHOW_OBJECTS = """
301
+ create view if not exists _fs_global._fs_information_schema._fs_show_objects as
302
+ select
303
+ to_timestamp(0)::timestamptz as created_on,
304
+ table_name as name,
305
+ case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind',
306
+ table_catalog as database_name,
307
+ table_schema as schema_name,
308
+ '' as comment,
309
+ '' as cluster_by,
310
+ -- TODO: implement rows and bytes as rows * 1024
311
+ 0 as rows,
312
+ 0 as bytes,
313
+ 'SYSADMIN' as owner,
314
+ 1 as retention_time,
315
+ 'ROLE' as owner_role_type,
316
+ null as budget,
317
+ 'N' as is_hybrid,
318
+ 'N' as is_dynamic
319
+ from information_schema.tables
320
+ where not (table_schema == '_fs_information_schema')
321
+ """
293
322
 
294
- See https://docs.snowflake.com/en/sql-reference/sql/show-objects
295
- https://docs.snowflake.com/en/sql-reference/sql/show-tables
296
- """
323
+ # see https://docs.snowflake.com/en/sql-reference/sql/show-tables
324
+ SQL_CREATE_VIEW_SHOW_TABLES = """
325
+ create view if not exists _fs_global._fs_information_schema._fs_show_tables as
326
+ select
327
+ to_timestamp(0)::timestamptz as created_on,
328
+ table_name as name,
329
+ 'TABLE' as kind,
330
+ table_catalog as database_name,
331
+ table_schema as schema_name,
332
+ '' as comment,
333
+ '' as cluster_by,
334
+ -- TODO: implement rows and bytes as rows * 1024
335
+ 0 as rows,
336
+ 0 as bytes,
337
+ 'SYSADMIN' as owner,
338
+ 1 as retention_time,
339
+ 'OFF' as automatic_clustering,
340
+ 'OFF' as change_tracking,
341
+ 'OFF' as search_optimization,
342
+ null as search_optimization_progress,
343
+ null as search_optimization_bytes,
344
+ 'N' as is_external,
345
+ 'N' as enable_schema_evolution,
346
+ 'ROLE' as owner_role_type,
347
+ 'N' as is_event,
348
+ null as budget,
349
+ 'N' as is_hybrid,
350
+ 'N' as is_iceberg,
351
+ 'N' as is_dynamic,
352
+ 'N' as is_immutable
353
+ from information_schema.tables
354
+ where not (table_schema == '_fs_information_schema')
355
+ and table_type = 'BASE TABLE'
356
+ """
357
+
358
+ # see https://docs.snowflake.com/en/sql-reference/sql/show-views
359
+ SQL_CREATE_VIEW_SHOW_VIEWS = """
360
+ create view if not exists _fs_global._fs_information_schema._fs_show_views as
361
+ select
362
+ to_timestamp(0)::timestamptz as created_on,
363
+ table_name as name,
364
+ '' as reserved,
365
+ table_catalog as database_name,
366
+ table_schema as schema_name,
367
+ 'SYSADMIN' as owner,
368
+ '' as comment,
369
+ view_definition as text,
370
+ false as is_secure,
371
+ false as is_materialized,
372
+ 'ROLE' as owner_role_type,
373
+ 'OFF' as change_tracking
374
+ from information_schema.views
375
+ where not table_catalog in ('system')
376
+ and not table_schema in ('main', '_fs_information_schema')
377
+ """
378
+
379
+
380
+ def show_tables_etc(expression: exp.Expression, current_database: str | None = None) -> exp.Expression:
381
+ """Transform SHOW OBJECTS/TABLES/VIEWS to a query against the _fs_information_schema views."""
297
382
  if not (
298
383
  isinstance(expression, exp.Show)
299
384
  and isinstance(expression.this, str)
300
385
  and (show := expression.this.upper())
301
- and show in {"OBJECTS", "TABLES"}
386
+ and show in {"OBJECTS", "TABLES", "VIEWS"}
302
387
  ):
303
388
  return expression
304
389
 
@@ -316,76 +401,28 @@ def show_objects_tables(expression: exp.Expression, current_database: str | None
316
401
  catalog = None
317
402
  schema = None
318
403
 
319
- columns = [
320
- "to_timestamp(0)::timestamptz as 'created_on'",
321
- "table_name as 'name'",
322
- "case when table_type='BASE TABLE' then 'TABLE' else table_type end as 'kind'",
323
- "table_catalog as 'database_name'",
324
- "table_schema as 'schema_name'",
325
- ]
326
- if not expression.args["terse"]:
327
- if show == "OBJECTS":
328
- columns.extend(
329
- [
330
- "'' as 'comment'",
331
- "'' as 'cluster_by'",
332
- # TODO: implement rows and bytes as rows * 1024
333
- "0 as 'rows'",
334
- "0 as 'bytes'",
335
- "'SYSADMIN' as 'owner'",
336
- "1 as 'retention_time'",
337
- "'ROLE' as 'owner_role_type'",
338
- "null as 'budget'",
339
- "'N' as 'is_hybrid'",
340
- "'N' as 'is_dynamic'",
341
- ]
342
- )
343
- else:
344
- # show == "TABLES"
345
- columns.extend(
346
- [
347
- "'' as 'comment'",
348
- "'' as 'cluster_by'",
349
- # TODO: implement rows and bytes as rows * 1024
350
- "0 as 'rows'",
351
- "0 as 'bytes'",
352
- "'SYSADMIN' as 'owner'",
353
- "1 as 'retention_time'",
354
- "'OFF' as 'automatic_clustering'",
355
- "'OFF' as 'change_tracking'",
356
- "'OFF' as 'search_optimization'",
357
- "null as 'search_optimization_progress'",
358
- "null as 'search_optimization_bytes'",
359
- "'N' as 'is_external'",
360
- "'N' as 'enable_schema_evolution'",
361
- "'ROLE' as 'owner_role_type'",
362
- "'N' as 'is_event'",
363
- "null as 'budget'",
364
- "'N' as 'is_hybrid'",
365
- "'N' as 'is_iceberg'",
366
- "'N' as 'is_dynamic'",
367
- "'N' as 'is_immutable'",
368
- ]
369
- )
370
-
404
+ if expression.args["terse"] and show == "VIEWS":
405
+ columns = ["created_on, name, 'VIEW' as kind, database_name, schema_name"]
406
+ elif expression.args["terse"]:
407
+ columns = ["created_on, name, kind, database_name, schema_name"]
408
+ else:
409
+ columns = ["*"]
371
410
  columns_clause = ", ".join(columns)
372
411
 
373
- where = ["not (table_schema == '_fs_information_schema')"] # exclude fakesnow's internal schemas
374
- if show == "TABLES":
375
- where.append("table_type = 'BASE TABLE'")
412
+ where = ["1=1"]
376
413
  if catalog:
377
- where.append(f"table_catalog = '{catalog}'")
414
+ where.append(f"database_name = '{catalog}'")
378
415
  if schema:
379
- where.append(f"table_schema = '{schema}'")
416
+ where.append(f"schema_name = '{schema}'")
380
417
  if (like := expression.args.get("like")) and isinstance(like, exp.Expression):
381
- where.append(f"table_name ilike {like.sql()}")
418
+ where.append(f"name ilike {like.sql()}")
382
419
  where_clause = " AND ".join(where)
383
420
 
384
421
  limit = limit.sql() if (limit := expression.args.get("limit")) and isinstance(limit, exp.Expression) else ""
385
422
 
386
423
  query = f"""
387
424
  SELECT {columns_clause}
388
- from information_schema.tables
425
+ from _fs_global._fs_information_schema._fs_show_{show.lower()}
389
426
  where {where_clause}
390
427
  {limit}
391
428
  """
@@ -471,6 +508,6 @@ def show_users(expression: exp.Expression) -> exp.Expression:
471
508
  https://docs.snowflake.com/en/sql-reference/sql/show-users
472
509
  """
473
510
  if isinstance(expression, exp.Show) and isinstance(expression.this, str) and expression.this.upper() == "USERS":
474
- return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_users_ext", read="duckdb")
511
+ return sqlglot.parse_one("SELECT * FROM _fs_global._fs_information_schema._fs_users", read="duckdb")
475
512
 
476
513
  return expression
fakesnow/variables.py CHANGED
@@ -45,7 +45,7 @@ class Variables:
45
45
  self._set(name, value)
46
46
  else:
47
47
  # Haven't been able to produce this in tests yet due to UNSET being parsed as an Alias expression.
48
- raise NotImplementedError("UNSET not supported yet")
48
+ raise NotImplementedError("UNSET")
49
49
  elif self._is_unset_expression(expr): # Unfortunately UNSET varname; is parsed as an Alias expression :(
50
50
  alias = expr.args.get("alias")
51
51
  assert alias, "UNSET without value in alias attribute is unexpected."
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fakesnow
3
- Version: 0.9.34
3
+ Version: 0.9.36
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -215,6 +215,7 @@ Requires-Dist: pyarrow
215
215
  Requires-Dist: snowflake-connector-python
216
216
  Requires-Dist: sqlglot~=26.12.1
217
217
  Provides-Extra: dev
218
+ Requires-Dist: boto3-stubs[s3,sts]; extra == "dev"
218
219
  Requires-Dist: build~=1.0; extra == "dev"
219
220
  Requires-Dist: dirty-equals; extra == "dev"
220
221
  Requires-Dist: pandas-stubs; extra == "dev"
@@ -222,10 +223,11 @@ Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra ==
222
223
  Requires-Dist: pre-commit~=4.0; extra == "dev"
223
224
  Requires-Dist: pyarrow-stubs==17.19; extra == "dev"
224
225
  Requires-Dist: pytest~=8.0; extra == "dev"
225
- Requires-Dist: pytest-asyncio; extra == "dev"
226
226
  Requires-Dist: ruff~=0.11.0; extra == "dev"
227
227
  Requires-Dist: twine~=6.0; extra == "dev"
228
228
  Requires-Dist: snowflake-sqlalchemy~=1.7.0; extra == "dev"
229
+ Requires-Dist: boto3; extra == "dev"
230
+ Requires-Dist: moto[server]>=5; extra == "dev"
229
231
  Provides-Extra: notebook
230
232
  Requires-Dist: duckdb-engine; extra == "notebook"
231
233
  Requires-Dist: ipykernel; extra == "notebook"
@@ -242,7 +244,7 @@ Dynamic: license-file
242
244
  [![PyPI](https://img.shields.io/pypi/v/fakesnow?color=violet)](https://pypi.org/project/fakesnow/)
243
245
  [![PyPI - Downloads](https://img.shields.io/pypi/dm/fakesnow?color=violet)](https://pypi.org/project/fakesnow/)
244
246
 
245
- Fake [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/python-connector). Run and mock Snowflake DB locally.
247
+ Run, mock and test fake Snowflake databases locally.
246
248
 
247
249
  ## Install
248
250
 
@@ -250,9 +252,21 @@ Fake [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/p
250
252
  pip install fakesnow
251
253
  ```
252
254
 
255
+ Or to install with the server:
256
+
257
+ ```
258
+ pip install fakesnow[server]
259
+ ```
260
+
253
261
  ## Usage
254
262
 
255
- Run script.py with fakesnow:
263
+ fakesnow offers two main approaches for faking Snowflake: in-process patching of the [Snowflake Connector for Python](https://docs.snowflake.com/en/user-guide/python-connector) or a standalone HTTP server.
264
+
265
+ Patching only applies to the current Python process. If a subprocess is spawned it won't be patched. For subprocesses, or for non-Python clients, use the server instead.
266
+
267
+ ### In-process patching
268
+
269
+ To run script.py with patching:
256
270
 
257
271
  ```shell
258
272
  fakesnow script.py
@@ -266,9 +280,9 @@ fakesnow -m pytest
266
280
 
267
281
  `fakesnow` executes `fakesnow.patch` before running the script or module.
268
282
 
269
- ### fakesnow.patch
283
+ #### Use fakesnow.patch in your code
270
284
 
271
- To use fakesnow within your code:
285
+ Alternatively, use fakesnow.patch in your code:
272
286
 
273
287
  ```python
274
288
  import fakesnow
@@ -280,12 +294,16 @@ with fakesnow.patch():
280
294
  print(conn.cursor().execute("SELECT 'Hello fake world!'").fetchone())
281
295
  ```
282
296
 
283
- The following imports are automatically patched:
297
+ #### What gets patched
298
+
299
+ The following standard imports are automatically patched:
284
300
 
285
301
  - `import snowflake.connector.connect`
286
302
  - `import snowflake.connector.pandas_tools.write_pandas`
287
303
 
288
- To patch modules that use the `from ... import` syntax, manually specify them, eg: if _mymodule.py_ has the import:
304
+ #### Handling "from ... import" Statements
305
+
306
+ To patch modules that use the `from ... import` syntax, you need to manually specify them, eg: if _mymodule.py_ contains:
289
307
 
290
308
  ```python
291
309
  from snowflake.connector.pandas_tools import write_pandas
@@ -298,33 +316,77 @@ with fakesnow.patch("mymodule.write_pandas"):
298
316
  ...
299
317
  ```
300
318
 
301
- By default databases are in-memory. To persist databases between processes, specify a databases path:
319
+ #### Database Persistence
320
+
321
+ By default, databases are in-memory and will be lost when the process ends. To persist databases between processes, specify a databases path:
302
322
 
303
323
  ```python
304
324
  with fakesnow.patch(db_path="databases/"):
305
325
  ...
306
326
  ```
307
327
 
328
+ ### Run fakesnow as a server
329
+
330
+ For scenarios where patching won't work (like subprocesses or non-Python clients), you can run fakesnow as an HTTP server:
331
+
332
+ ```python
333
+ import fakesnow
334
+ import snowflake.connector
335
+
336
+ # Start the fakesnow server in a context manager
337
+ # This yields connection kwargs (host, port, etc.)
338
+ with fakesnow.server() as conn_kwargs:
339
+ # Connect to the fakesnow server using the yielded kwargs
340
+ with snowflake.connector.connect(**conn_kwargs) as conn:
341
+ print(conn.cursor().execute("SELECT 'Hello fake server!'").fetchone())
342
+
343
+ # The server is automatically stopped when exiting the context manager
344
+ ```
345
+
346
+ This starts an HTTP server in its own thread listening for requests on localhost on an available random port.
347
+ The server accepts any username/password combination.
348
+
349
+ #### Server Configuration Options
350
+
351
+ By default, the server uses a single in-memory database for its lifetime. To configure database persistence or isolation:
352
+
353
+ ```python
354
+ # Databases will be saved to the "databases/" directory
355
+ with fakesnow.server(session_parameters={"FAKESNOW_DB_PATH": "databases/"}):
356
+ ...
357
+
358
+ # Each connection gets its own isolated in-memory database
359
+ with fakesnow.server(session_parameters={"FAKESNOW_DB_PATH": ":isolated:"}):
360
+ ...
361
+ ```
362
+
363
+ To specify a port for the server:
364
+
365
+ ```python
366
+ with fakesnow.server(port=12345) as conn_kwargs:
367
+ ...
368
+ ```
369
+
308
370
  ### pytest fixtures
309
371
 
310
- pytest [fixtures](fakesnow/fixtures.py) are provided for testing. Example _conftest.py_:
372
+ fakesnow provides [fixtures](fakesnow/fixtures.py) for easier test integration. Here's an example _conftest.py_ using them:
311
373
 
312
374
  ```python
313
375
  from typing import Iterator
314
376
 
315
- import fakesnow.fixtures
316
377
  import pytest
317
378
 
318
- pytest_plugins = fakesnow.fixtures.__name__
379
+ pytest_plugins = "fakesnow.fixtures"
319
380
 
320
381
  @pytest.fixture(scope="session", autouse=True)
321
382
  def setup(_fakesnow_session: None) -> Iterator[None]:
322
383
  # the standard imports are now patched
323
- ...
384
+ # Add any additional setup here
324
385
  yield
386
+ # Add any teardown here
325
387
  ```
326
388
 
327
- Or with `from ... import` patch targets:
389
+ For code that uses `from ... import` statements:
328
390
 
329
391
  ```python
330
392
  from typing import Iterator
@@ -338,34 +400,56 @@ def _fakesnow_session() -> Iterator[None]:
338
400
  yield
339
401
  ```
340
402
 
403
+ To start a fakesnow server instance, use the `fakesnow_server` session fixture:
404
+
405
+ ```python
406
+ import snowflake.connector
407
+
408
+ def test_with_server(fakesnow_server: dict):
409
+ # fakesnow_server contains connection kwargs (host, port, etc.)
410
+ with snowflake.connector.connect(**fakesnow_server) as conn:
411
+ conn.cursor().execute("SELECT 1")
412
+ assert conn.cursor().fetchone() == (1,)
413
+ ```
414
+
341
415
  ## Implementation coverage
342
416
 
343
- - [x] cursors and standard SQL
344
- - [x] [get_result_batches()](https://docs.snowflake.com/en/user-guide/python-connector-api#get_result_batches)
345
- - [x] information schema
346
- - [x] multiple databases
347
- - [x] [parameter binding](https://docs.snowflake.com/en/user-guide/python-connector-example#binding-data)
348
- - [x] table comments
349
- - [x] [write_pandas(..)](https://docs.snowflake.com/en/user-guide/python-connector-api#write_pandas)
350
- - [ ] [access control](https://docs.snowflake.com/en/user-guide/security-access-control-overview)
351
- - [ ] standalone/out of process api/support for faking non-python connectors
352
- - [ ] [stored procedures](https://docs.snowflake.com/en/sql-reference/stored-procedures)
417
+ Fully supported:
353
418
 
354
- Partial support
419
+ - Standard SQL operations and cursors
420
+ - Information schema queries
421
+ - Multiple databases
422
+ - [Parameter binding](https://docs.snowflake.com/en/user-guide/python-connector-example#binding-data) in queries
423
+ - Table comments
424
+ - Pandas integration including [write_pandas(..)](https://docs.snowflake.com/en/user-guide/python-connector-api#write_pandas) (not available via the server yet)
425
+ - Result batch retrieval via [get_result_batches()](https://docs.snowflake.com/en/user-guide/python-connector-api#get_result_batches)
426
+ - HTTP server for non-Python connectors
355
427
 
356
- - [x] date functions
357
- - [x] regex functions
358
- - [x] semi-structured data
359
- - [x] tags
360
- - [x] user management (See [tests/test_users.py](tests/test_users.py))
428
+ Partially supported:
361
429
 
362
- For more detail see [tests/test_fakes.py](tests/test_fakes.py)
430
+ - Date functions
431
+ - Regular expression functions
432
+ - Semi-structured data operations
433
+ - Tags
434
+ - User management
435
+ - `COPY INTO` from S3 sources, see [COPY INTO](#copy-into)
436
+
437
+ Not yet implemented:
438
+
439
+ - [Access control](https://docs.snowflake.com/en/user-guide/security-access-control-overview)
440
+ - [Stored procedures](https://docs.snowflake.com/en/sql-reference/stored-procedures)
441
+
442
+ For more detail see the [test suite](tests/).
363
443
 
364
444
  ## Caveats
365
445
 
366
- - The order of rows is non deterministic and may not match Snowflake unless ORDER BY is fully specified.
367
- - A more liberal Snowflake SQL dialect than used by a real Snowflake instance is supported, ie: some queries might pass using fakesnow that a real Snowflake instance would reject.
446
+ - Row ordering is non-deterministic and may differ from Snowflake unless you fully specify the ORDER BY clause.
447
+ - fakesnow supports a more liberal SQL dialect than actual Snowflake. This means some queries that work with fakesnow might not work with a real Snowflake instance.
448
+
449
+ ## COPY INTO
450
+
451
+ `COPY INTO` can be used from S3 sources. By default the standard AWS credential chain will be used. If you are getting an HTTP 403 or need to provide alternative S3 credentials you can use the duckdb [CREATE SECRET](https://duckdb.org/docs/stable/extensions/httpfs/s3api) statement. For an example of creating a secret to use a moto S3 endpoint see `s3_client` in [conftest.py](tests/conftest.py#L80)
368
452
 
369
453
  ## Contributing
370
454
 
371
- See [CONTRIBUTING.md](CONTRIBUTING.md) to get started and develop in this repo.
455
+ See [CONTRIBUTING.md](CONTRIBUTING.md) for instructions on getting started with development and contributing to this project.
@@ -0,0 +1,30 @@
1
+ fakesnow/__init__.py,sha256=It-8mTZWBaVi4suZjL7UJlJBGFhLWmPnI-THX02XRJU,5108
2
+ fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
3
+ fakesnow/arrow.py,sha256=XjTpFyLrD9jULWOtPgpr0RyNMmO6a5yi82y6ivi2CCI,4884
4
+ fakesnow/checks.py,sha256=be-xo0oMoAUVhlMDCu1_Rkoh_L8p_p8qo9P6reJSHIQ,2874
5
+ fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
6
+ fakesnow/conn.py,sha256=diCwcjaCBrlCn9PyjbScfIQTNQjqiPTkQanUTqcvblE,6009
7
+ fakesnow/converter.py,sha256=xoBFnfBbGWQyUQAVr6zi-RyglU8A7A3GSlwLPkH1dzI,1621
8
+ fakesnow/cursor.py,sha256=3JCxSoBJ2g6bndIGQnJnTAWu8Ad7zK_6kwmAY_b0VKE,22949
9
+ fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
10
+ fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
11
+ fakesnow/fixtures.py,sha256=2rj0MTZlaZc4PNWhaqC5IiiLa7E9G0QZT3g45YawsL0,633
12
+ fakesnow/info_schema.py,sha256=AYmTIHxk5Y6xdMTgttgBL1V0VO8qiM2T1-gKwkLmWDs,8720
13
+ fakesnow/instance.py,sha256=OKoYXwaI6kL9HQpnHx44yzpON_xNfuIT_F4oJNF_XXQ,2114
14
+ fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
15
+ fakesnow/pandas_tools.py,sha256=wI203UQHC8JvDzxE_VjE1NeV4rThek2P-u52oTg2foo,3481
16
+ fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
17
+ fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
18
+ fakesnow/server.py,sha256=RHf7ffKYi5xBH9fh8wZr3tEPmnzFWuvUbziCC8UwTh4,6652
19
+ fakesnow/variables.py,sha256=C3y_9u7LuVtARkpcim3ihgVWg6KKdz1hSVeW4YI7oL4,3014
20
+ fakesnow/transforms/__init__.py,sha256=RcVGkp95yKByluQ5O6RALJTiRlox8FK4pMl1rt_gJPc,49536
21
+ fakesnow/transforms/copy_into.py,sha256=QJ1hh3hVi9kPJgyQHlGO3Vi8sP3qnWmvY4JWO--HWl0,5565
22
+ fakesnow/transforms/merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
23
+ fakesnow/transforms/show.py,sha256=0NjuLQjodrukfUw8mcxcAmtBkV_6r02mA3nuE3ad3rE,17458
24
+ fakesnow-0.9.36.dist-info/licenses/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
25
+ tools/decode.py,sha256=kC5kUvLQxdCkMRsnH6BqCajlKxKeN77w6rwCKsY6gqU,1781
26
+ fakesnow-0.9.36.dist-info/METADATA,sha256=Nu1iV0SOnWCNeQCVJXbwC2PVxx7mkdQ6YDfDVkfTmI4,21160
27
+ fakesnow-0.9.36.dist-info/WHEEL,sha256=lTU6B6eIfYoiQJTZNc-fyaR6BpL6ehTzU3xGYxn2n8k,91
28
+ fakesnow-0.9.36.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
29
+ fakesnow-0.9.36.dist-info/top_level.txt,sha256=Yos7YveA3f03xVYuURqnBsfMV2DePXfu_yGcsj3pPzI,30
30
+ fakesnow-0.9.36.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (78.1.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,29 +0,0 @@
1
- fakesnow/__init__.py,sha256=qUfgucQYPdELrJaxczalhJgWAWQ6cfTCUAHx6nUqRaI,3528
2
- fakesnow/__main__.py,sha256=GDrGyNTvBFuqn_UfDjKs7b3LPtU6gDv1KwosVDrukIM,76
3
- fakesnow/arrow.py,sha256=XjTpFyLrD9jULWOtPgpr0RyNMmO6a5yi82y6ivi2CCI,4884
4
- fakesnow/checks.py,sha256=be-xo0oMoAUVhlMDCu1_Rkoh_L8p_p8qo9P6reJSHIQ,2874
5
- fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
6
- fakesnow/conn.py,sha256=2WClMmUgfQkQA2hFQjfMP3R-85TbTbZh_8Y1tCdcerA,6053
7
- fakesnow/converter.py,sha256=7YlASaMomzchMZoorTH3KtVmgBakaHrF5fAl5VP747I,1635
8
- fakesnow/cursor.py,sha256=mK4nC1iucON1MohicTugJqUOfRsx5c8ToUJgnCfUSbs,21813
9
- fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
10
- fakesnow/fakes.py,sha256=JQTiUkkwPeQrJ8FDWhPFPK6pGwd_aR2oiOrNzCWznlM,187
11
- fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
12
- fakesnow/info_schema.py,sha256=xDhGy07fpc8bcy_VTfh54UzwNIaB4ZhGmjgJeoiZ0hQ,8744
13
- fakesnow/instance.py,sha256=VsFbhVfy6EAJdEKykgavJwkMtrig01NehorptT51Jh8,2020
14
- fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
15
- fakesnow/pandas_tools.py,sha256=wI203UQHC8JvDzxE_VjE1NeV4rThek2P-u52oTg2foo,3481
16
- fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
17
- fakesnow/rowtype.py,sha256=QUp8EaXD5LT0Xv8BXk5ze4WseEn52xoJ6R05pJjs5mM,2729
18
- fakesnow/server.py,sha256=4DgZUTd-G_usjSqy6NdUqd2fWUw2a-wHSSeJt3cdneA,6375
19
- fakesnow/variables.py,sha256=WXyPnkeNwD08gy52yF66CVe2twiYC50tztNfgXV4q1k,3032
20
- fakesnow/transforms/__init__.py,sha256=xFrpw28DaHvMt6LGaRMsPqTo8PWogg10JgEu3oa6jdA,49515
21
- fakesnow/transforms/merge.py,sha256=Pg7_rwbAT_vr1U4ocBofUSyqaK8_e3qdIz_2SDm2S3s,8320
22
- fakesnow/transforms/show.py,sha256=2qfK3Fi0RLylqTnkwSVgv5JIorXYb1y0fnf5oErRZ2o,16839
23
- fakesnow-0.9.34.dist-info/licenses/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
24
- tools/decode.py,sha256=kC5kUvLQxdCkMRsnH6BqCajlKxKeN77w6rwCKsY6gqU,1781
25
- fakesnow-0.9.34.dist-info/METADATA,sha256=Hqkb8CT1-QTNzQqRMVhEHWGwx3gJORv9YHy7wGSoBgQ,18128
26
- fakesnow-0.9.34.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
27
- fakesnow-0.9.34.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
28
- fakesnow-0.9.34.dist-info/top_level.txt,sha256=Yos7YveA3f03xVYuURqnBsfMV2DePXfu_yGcsj3pPzI,30
29
- fakesnow-0.9.34.dist-info/RECORD,,