fakesnow 0.5.1__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. {fakesnow-0.5.1/fakesnow.egg-info → fakesnow-0.7.0}/PKG-INFO +6 -2
  2. {fakesnow-0.5.1 → fakesnow-0.7.0}/README.md +5 -1
  3. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow/__init__.py +5 -6
  4. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow/fakes.py +32 -18
  5. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow/info_schema.py +21 -0
  6. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow/transforms.py +28 -17
  7. {fakesnow-0.5.1 → fakesnow-0.7.0/fakesnow.egg-info}/PKG-INFO +6 -2
  8. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow.egg-info/SOURCES.txt +0 -1
  9. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow.egg-info/requires.txt +1 -1
  10. {fakesnow-0.5.1 → fakesnow-0.7.0}/pyproject.toml +13 -10
  11. {fakesnow-0.5.1 → fakesnow-0.7.0}/tests/test_fakes.py +56 -8
  12. {fakesnow-0.5.1 → fakesnow-0.7.0}/tests/test_transforms.py +8 -8
  13. fakesnow-0.5.1/setup.py +0 -4
  14. {fakesnow-0.5.1 → fakesnow-0.7.0}/LICENSE +0 -0
  15. {fakesnow-0.5.1 → fakesnow-0.7.0}/MANIFEST.in +0 -0
  16. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow/checks.py +0 -0
  17. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow/expr.py +0 -0
  18. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow/fixtures.py +0 -0
  19. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow/py.typed +0 -0
  20. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow.egg-info/dependency_links.txt +0 -0
  21. {fakesnow-0.5.1 → fakesnow-0.7.0}/fakesnow.egg-info/top_level.txt +0 -0
  22. {fakesnow-0.5.1 → fakesnow-0.7.0}/setup.cfg +0 -0
  23. {fakesnow-0.5.1 → fakesnow-0.7.0}/tests/test_checks.py +0 -0
  24. {fakesnow-0.5.1 → fakesnow-0.7.0}/tests/test_expr.py +0 -0
  25. {fakesnow-0.5.1 → fakesnow-0.7.0}/tests/test_patch.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.5.1
3
+ Version: 0.7.0
4
4
  Summary: Fake Snowflake Connector for Python. Run Snowflake DB locally.
5
5
  License: MIT License
6
6
 
@@ -113,7 +113,7 @@ def _fakesnow_session() -> Iterator[None]:
113
113
  - [x] [get_result_batches()](https://docs.snowflake.com/en/user-guide/python-connector-api#get_result_batches)
114
114
  - [x] information schema
115
115
  - [x] multiple databases
116
- - [x] [qmark binding](https://docs.snowflake.com/en/user-guide/python-connector-example#binding-data)
116
+ - [x] [parameter binding](https://docs.snowflake.com/en/user-guide/python-connector-example#binding-data)
117
117
  - [x] table comments
118
118
  - [x] [write_pandas(..)](https://docs.snowflake.com/en/user-guide/python-connector-api#write_pandas)
119
119
  - [ ] [access control](https://docs.snowflake.com/en/user-guide/security-access-control-overview)
@@ -129,6 +129,10 @@ Partial support
129
129
 
130
130
  For more detail see [tests/test_fakes.py](tests/test_fakes.py)
131
131
 
132
+ ## Caveats
133
+
134
+ - VARCHAR field sizes are not enforced unlike Snowflake which will error with "User character length limit (xxx) exceeded by string" when you try to insert a string longer than the column limit.
135
+
132
136
  ## Contributing
133
137
 
134
138
  See [CONTRIBUTING.md](CONTRIBUTING.md) to get started and develop in this repo.
@@ -79,7 +79,7 @@ def _fakesnow_session() -> Iterator[None]:
79
79
  - [x] [get_result_batches()](https://docs.snowflake.com/en/user-guide/python-connector-api#get_result_batches)
80
80
  - [x] information schema
81
81
  - [x] multiple databases
82
- - [x] [qmark binding](https://docs.snowflake.com/en/user-guide/python-connector-example#binding-data)
82
+ - [x] [parameter binding](https://docs.snowflake.com/en/user-guide/python-connector-example#binding-data)
83
83
  - [x] table comments
84
84
  - [x] [write_pandas(..)](https://docs.snowflake.com/en/user-guide/python-connector-api#write_pandas)
85
85
  - [ ] [access control](https://docs.snowflake.com/en/user-guide/security-access-control-overview)
@@ -95,6 +95,10 @@ Partial support
95
95
 
96
96
  For more detail see [tests/test_fakes.py](tests/test_fakes.py)
97
97
 
98
+ ## Caveats
99
+
100
+ - VARCHAR field sizes are not enforced unlike Snowflake which will error with "User character length limit (xxx) exceeded by string" when you try to insert a string longer than the column limit.
101
+
98
102
  ## Contributing
99
103
 
100
104
  See [CONTRIBUTING.md](CONTRIBUTING.md) to get started and develop in this repo.
@@ -30,8 +30,9 @@ def patch(
30
30
  - snowflake.connector.pandas_tools.write_pandas
31
31
 
32
32
  Args:
33
- extra_targets (Sequence[types.ModuleType], optional): Extra targets to patch. Defaults to [].
34
- auto_create_database
33
+ extra_targets (str | Sequence[str], optional): Extra targets to patch. Defaults to [].
34
+ create_database_on_connect (bool, optional): Create database if provided in connection. Defaults to True.
35
+ create_schema_on_connect (bool, optional): Create schema if provided in connection. Defaults to True.
35
36
 
36
37
  Allows extra targets beyond the standard snowflake.connector targets to be patched. Needed because we cannot
37
38
  patch definitions, only usages, see https://docs.python.org/3/library/unittest.mock.html#where-to-patch
@@ -65,10 +66,8 @@ def patch(
65
66
  for im in std_targets + list([extra_targets] if isinstance(extra_targets, str) else extra_targets):
66
67
  module_name = ".".join(im.split(".")[:-1])
67
68
  fn_name = im.split(".")[-1]
68
- module = sys.modules.get(module_name)
69
- if not module:
70
- # module may not be loaded yet, try to import it
71
- module = importlib.import_module(module_name)
69
+ # get module or try to import it if not loaded yet
70
+ module = sys.modules.get(module_name) or importlib.import_module(module_name)
72
71
  fn = module.__dict__.get(fn_name)
73
72
  assert fn, f"No module var {im}"
74
73
 
@@ -10,6 +10,7 @@ if TYPE_CHECKING:
10
10
  import pandas as pd
11
11
  import pyarrow.lib
12
12
  import pyarrow
13
+ import snowflake.connector.converter
13
14
  import snowflake.connector.errors
14
15
  import sqlglot
15
16
  from duckdb import DuckDBPyConnection
@@ -47,6 +48,7 @@ class FakeSnowflakeCursor:
47
48
  self._last_sql = None
48
49
  self._last_params = None
49
50
  self._sqlstate = None
51
+ self._converter = snowflake.connector.converter.SnowflakeConverter()
50
52
 
51
53
  def __enter__(self) -> Self:
52
54
  return self
@@ -68,9 +70,9 @@ class FakeSnowflakeCursor:
68
70
  list[ResultMetadata]: _description_
69
71
  """
70
72
 
71
- describe = transforms.as_describe(parse_one(command, read="snowflake"))
73
+ describe = f"DESCRIBE {command}"
72
74
  self.execute(describe, *args, **kwargs)
73
- return FakeSnowflakeCursor._describe_as_result_metadata(self._duck_conn.fetchall()) # noqa: SLF001
75
+ return FakeSnowflakeCursor._describe_as_result_metadata(self._duck_conn.fetchall())
74
76
 
75
77
  @property
76
78
  def description(self) -> list[ResultMetadata]:
@@ -82,13 +84,13 @@ class FakeSnowflakeCursor:
82
84
  # match database and schema used on the main connection
83
85
  cur.execute(f"SET SCHEMA = '{self._conn.database}.{self._conn.schema}'")
84
86
  cur.execute(f"DESCRIBE {self._last_sql}", self._last_params)
85
- meta = FakeSnowflakeCursor._describe_as_result_metadata(cur.fetchall()) # noqa: SLF001
87
+ meta = FakeSnowflakeCursor._describe_as_result_metadata(cur.fetchall())
86
88
 
87
89
  return meta # type: ignore see https://github.com/duckdb/duckdb/issues/7816
88
90
 
89
91
  def execute(
90
92
  self,
91
- command: str | exp.Expression,
93
+ command: str,
92
94
  params: Sequence[Any] | dict[Any, Any] | None = None,
93
95
  *args: Any,
94
96
  **kwargs: Any,
@@ -102,17 +104,15 @@ class FakeSnowflakeCursor:
102
104
 
103
105
  def _execute(
104
106
  self,
105
- command: str | exp.Expression,
107
+ command: str,
106
108
  params: Sequence[Any] | dict[Any, Any] | None = None,
107
109
  *args: Any,
108
110
  **kwargs: Any,
109
111
  ) -> FakeSnowflakeCursor:
110
112
  self._arrow_table = None
111
113
 
112
- if isinstance(command, exp.Expression):
113
- expression = command
114
- else:
115
- expression = parse_one(self._rewrite_params(command, params), read="snowflake")
114
+ command, params = self._rewrite_with_params(command, params)
115
+ expression = parse_one(command, read="snowflake")
116
116
 
117
117
  cmd = expr.key_command(expression)
118
118
 
@@ -148,6 +148,7 @@ class FakeSnowflakeCursor:
148
148
  .transform(transforms.regex_substr)
149
149
  .transform(transforms.values_columns)
150
150
  .transform(transforms.to_date)
151
+ .transform(transforms.to_decimal)
151
152
  .transform(transforms.object_construct)
152
153
  .transform(transforms.timestamp_ntz_ns)
153
154
  .transform(transforms.float_to_double)
@@ -319,6 +320,14 @@ class FakeSnowflakeCursor:
319
320
  return ResultMetadata(
320
321
  name=column_name, type_code=12, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True # noqa: E501
321
322
  )
323
+ elif column_type == "JSON[]":
324
+ return ResultMetadata(
325
+ name=column_name, type_code=10, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True # noqa: E501
326
+ )
327
+ elif column_type == "JSON":
328
+ return ResultMetadata(
329
+ name=column_name, type_code=9, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True # noqa: E501
330
+ )
322
331
  else:
323
332
  # TODO handle more types
324
333
  raise NotImplementedError(f"for column type {column_type}")
@@ -331,20 +340,25 @@ class FakeSnowflakeCursor:
331
340
  ]
332
341
  return meta
333
342
 
334
- def _rewrite_params(
343
+ def _rewrite_with_params(
335
344
  self,
336
345
  command: str,
337
346
  params: Sequence[Any] | dict[Any, Any] | None = None,
338
- ) -> str:
339
- if isinstance(params, dict):
340
- # see https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-api
341
- raise NotImplementedError("dict params not supported yet")
342
-
347
+ ) -> tuple[str, Sequence[Any] | dict[Any, Any] | None]:
343
348
  if params and self._conn._paramstyle in ("pyformat", "format"): # noqa: SLF001
344
- # duckdb uses question mark style params
345
- return command.replace("%s", "?")
349
+ # handle client-side in the same manner as the snowflake python connector
350
+
351
+ def convert(param: Any) -> Any: # noqa: ANN401
352
+ return self._converter.quote(self._converter.escape(self._converter.to_snowflake(param)))
353
+
354
+ if isinstance(params, dict):
355
+ params = {k: convert(v) for k, v in params.items()}
356
+ else:
357
+ params = tuple(convert(v) for v in params)
358
+
359
+ return command % params, None
346
360
 
347
- return command
361
+ return command, params
348
362
 
349
363
 
350
364
  class FakeSnowflakeConnection:
@@ -40,6 +40,8 @@ case when starts_with(data_type, 'DECIMAL') or data_type='BIGINT' then 'NUMBER'
40
40
  when data_type='DOUBLE' then 'FLOAT'
41
41
  when data_type='BLOB' then 'BINARY'
42
42
  when data_type='TIMESTAMP' then 'TIMESTAMP_NTZ'
43
+ when data_type='JSON[]' then 'ARRAY'
44
+ when data_type='JSON' then 'OBJECT'
43
45
  else data_type end as data_type,
44
46
  ext_character_maximum_length as character_maximum_length, ext_character_octet_length as character_octet_length,
45
47
  case when data_type='BIGINT' then 38
@@ -57,12 +59,31 @@ AND ext_table_name = table_name AND ext_column_name = column_name
57
59
  """
58
60
  )
59
61
 
62
+ # replicates https://docs.snowflake.com/sql-reference/info-schema/databases
63
+ SQL_CREATE_INFORMATION_SCHEMA_DATABASES_VIEW = Template(
64
+ """
65
+ create view ${catalog}.information_schema.databases AS
66
+ select
67
+ catalog_name as database_name,
68
+ 'SYSADMIN' as database_owner,
69
+ 'NO' as is_transient,
70
+ null as comment,
71
+ to_timestamp(0)::timestamptz as created,
72
+ to_timestamp(0)::timestamptz as last_altered,
73
+ 1 as retention_time,
74
+ 'STANDARD' as type
75
+ from information_schema.schemata
76
+ where catalog_name not in ('memory', 'system', 'temp') and schema_name = 'information_schema'
77
+ """
78
+ )
79
+
60
80
 
61
81
  def creation_sql(catalog: str) -> str:
62
82
  return f"""
63
83
  {SQL_CREATE_INFORMATION_SCHEMA_TABLES_EXT.substitute(catalog=catalog)};
64
84
  {SQL_CREATE_INFORMATION_SCHEMA_COLUMNS_EXT.substitute(catalog=catalog)};
65
85
  {SQL_CREATE_INFORMATION_SCHEMA_COLUMNS_VIEW.substitute(catalog=catalog)};
86
+ {SQL_CREATE_INFORMATION_SCHEMA_DATABASES_VIEW.substitute(catalog=catalog)};
66
87
  """
67
88
 
68
89
 
@@ -10,23 +10,6 @@ MISSING_DATABASE = "missing_database"
10
10
  SUCCESS_NOP = sqlglot.parse_one("SELECT 'Statement executed successfully.'")
11
11
 
12
12
 
13
- def as_describe(expression: exp.Expression) -> exp.Expression:
14
- """Prepend describe to the expression.
15
-
16
- Example:
17
- >>> import sqlglot
18
- >>> sqlglot.parse_one("SELECT name FROM CUSTOMERS").transform(as_describe).sql()
19
- 'describe SELECT name FROM CUSTOMERS'
20
- Args:
21
- expression (exp.Expression): the expression that will be transformed.
22
-
23
- Returns:
24
- exp.Expression: The transformed expression.
25
- """
26
-
27
- return exp.Describe(this=expression)
28
-
29
-
30
13
  # TODO: move this into a Dialect as a transpilation
31
14
  def create_database(expression: exp.Expression) -> exp.Expression:
32
15
  """Transform create database to attach database.
@@ -535,6 +518,34 @@ def to_date(expression: exp.Expression) -> exp.Expression:
535
518
  return expression
536
519
 
537
520
 
521
+ def to_decimal(expression: exp.Expression) -> exp.Expression:
522
+ """Transform to_decimal, to_number, to_numeric expressions from snowflake to duckdb.
523
+
524
+ See https://docs.snowflake.com/en/sql-reference/functions/to_decimal
525
+ """
526
+
527
+ if (
528
+ isinstance(expression, exp.Anonymous)
529
+ and isinstance(expression.this, str)
530
+ and expression.this.upper() in ["TO_DECIMAL", "TO_NUMBER", "TO_NUMERIC"]
531
+ ):
532
+ expressions: list[exp.Expression] = expression.expressions
533
+
534
+ if len(expressions) > 1 and expressions[1].is_string:
535
+ # see https://docs.snowflake.com/en/sql-reference/functions/to_decimal#arguments
536
+ raise NotImplementedError(f"{expression.this} with format argument")
537
+
538
+ precision = expressions[1] if len(expressions) > 1 else exp.Literal(this="38", is_string=False)
539
+ scale = expressions[2] if len(expressions) > 2 else exp.Literal(this="0", is_string=False)
540
+
541
+ return exp.Cast(
542
+ this=expressions[0],
543
+ to=exp.DataType(this=exp.DataType.Type.DECIMAL, expressions=[precision, scale], nested=False, prefix=False),
544
+ )
545
+
546
+ return expression
547
+
548
+
538
549
  def timestamp_ntz_ns(expression: exp.Expression) -> exp.Expression:
539
550
  """Convert timestamp_ntz(9) to timestamp_ntz.
540
551
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.5.1
3
+ Version: 0.7.0
4
4
  Summary: Fake Snowflake Connector for Python. Run Snowflake DB locally.
5
5
  License: MIT License
6
6
 
@@ -113,7 +113,7 @@ def _fakesnow_session() -> Iterator[None]:
113
113
  - [x] [get_result_batches()](https://docs.snowflake.com/en/user-guide/python-connector-api#get_result_batches)
114
114
  - [x] information schema
115
115
  - [x] multiple databases
116
- - [x] [qmark binding](https://docs.snowflake.com/en/user-guide/python-connector-example#binding-data)
116
+ - [x] [parameter binding](https://docs.snowflake.com/en/user-guide/python-connector-example#binding-data)
117
117
  - [x] table comments
118
118
  - [x] [write_pandas(..)](https://docs.snowflake.com/en/user-guide/python-connector-api#write_pandas)
119
119
  - [ ] [access control](https://docs.snowflake.com/en/user-guide/security-access-control-overview)
@@ -129,6 +129,10 @@ Partial support
129
129
 
130
130
  For more detail see [tests/test_fakes.py](tests/test_fakes.py)
131
131
 
132
+ ## Caveats
133
+
134
+ - VARCHAR field sizes are not enforced unlike Snowflake which will error with "User character length limit (xxx) exceeded by string" when you try to insert a string longer than the column limit.
135
+
132
136
  ## Contributing
133
137
 
134
138
  See [CONTRIBUTING.md](CONTRIBUTING.md) to get started and develop in this repo.
@@ -2,7 +2,6 @@ LICENSE
2
2
  MANIFEST.in
3
3
  README.md
4
4
  pyproject.toml
5
- setup.py
6
5
  fakesnow/__init__.py
7
6
  fakesnow/checks.py
8
7
  fakesnow/expr.py
@@ -9,7 +9,7 @@ build~=0.10
9
9
  snowflake-connector-python[pandas,secure-local-storage]
10
10
  pre-commit~=3.2
11
11
  pytest~=7.3
12
- ruff~=0.0.263
12
+ ruff~=0.0.285
13
13
  twine~=4.0
14
14
 
15
15
  [notebook]
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  name = "fakesnow"
3
3
  description = "Fake Snowflake Connector for Python. Run Snowflake DB locally."
4
- version = "0.5.1"
4
+ version = "0.7.0"
5
5
  readme = "README.md"
6
6
  license = { file = "LICENSE" }
7
7
  classifiers = ["License :: OSI Approved :: MIT License"]
@@ -25,14 +25,14 @@ dev = [
25
25
  "snowflake-connector-python[pandas, secure-local-storage]",
26
26
  "pre-commit~=3.2",
27
27
  "pytest~=7.3",
28
- "ruff~=0.0.263",
28
+ "ruff~=0.0.285",
29
29
  "twine~=4.0",
30
30
  ]
31
31
  # for debugging, see https://duckdb.org/docs/guides/python/jupyter.html
32
32
  notebook = ["duckdb-engine", "ipykernel", "jupysql", "snowflake-sqlalchemy"]
33
33
 
34
34
  [build-system]
35
- requires = ["setuptools~=67.6", "wheel~=0.40"]
35
+ requires = ["setuptools~=67.7", "wheel~=0.40"]
36
36
 
37
37
  [tool.setuptools.packages.find]
38
38
  where = ["."]
@@ -42,11 +42,14 @@ exclude = ["tests*"]
42
42
  [tool.black]
43
43
  line-length = 120
44
44
 
45
- [tool.docformatter]
46
- pre-summary-newline = true
47
- recursive = true
48
- wrap-descriptions = 120
49
- wrap-summaries = 120
45
+ [tool.pyright]
46
+ venvPath = "."
47
+ venv = ".venv"
48
+ exclude = ["**/node_modules", "**/__pycache__", "**/.*", "build"]
49
+ strictListInference = true
50
+ strictDictionaryInference = true
51
+ strictParameterNoneValue = true
52
+ reportTypedDictNotRequiredAccess = false
50
53
 
51
54
  [tool.ruff]
52
55
  line-length = 120
@@ -80,8 +83,6 @@ ignore = [
80
83
  "ANN204",
81
84
  # allow == True because pandas dataframes overload equality
82
85
  "E712",
83
- # only relevant for python >= 3.10
84
- "B905",
85
86
  ]
86
87
  # first-party imports for sorting
87
88
  src = ["."]
@@ -89,6 +90,8 @@ fix = true
89
90
  show-fixes = true
90
91
 
91
92
  [tool.ruff.per-file-ignores]
93
+ # imports in __init__.py don't need to be used in __init__.py
94
+ "__init__.py" = ["F401"]
92
95
  # test functions don't need return types
93
96
  "tests/*" = ["ANN201", "ANN202"]
94
97
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  import datetime
4
4
  import json
5
+ from decimal import Decimal
5
6
 
6
7
  import pandas as pd
7
8
  import pytest
@@ -28,6 +29,17 @@ def test_binding_default_paramstyle(conn: snowflake.connector.SnowflakeConnectio
28
29
  assert cur.fetchall() == [(1, "Jenny", True)]
29
30
 
30
31
 
32
+ def test_binding_default_paramstyle_dict(conn: snowflake.connector.SnowflakeConnection):
33
+ assert conn._paramstyle == "pyformat" # noqa: SLF001
34
+ with conn.cursor() as cur:
35
+ cur.execute("create table customers (ID int, FIRST_NAME varchar, ACTIVE boolean)")
36
+ cur.execute(
37
+ "insert into customers values (%(id)s, %(name)s, %(active)s)", {"id": 1, "name": "Jenny", "active": True}
38
+ )
39
+ cur.execute("select * from customers")
40
+ assert cur.fetchall() == [(1, "Jenny", True)]
41
+
42
+
31
43
  def test_binding_qmark(conn: snowflake.connector.SnowflakeConnection):
32
44
  conn._paramstyle = "qmark" # noqa: SLF001
33
45
  with conn.cursor() as cur:
@@ -39,7 +51,7 @@ def test_binding_qmark(conn: snowflake.connector.SnowflakeConnection):
39
51
 
40
52
  def test_connect_auto_create(_fakesnow: None):
41
53
  with snowflake.connector.connect(database="db1", schema="schema1"):
42
- # creates db2 and schema1
54
+ # creates db1 and schema1
43
55
  pass
44
56
 
45
57
  with snowflake.connector.connect(database="db1", schema="schema1"):
@@ -193,7 +205,7 @@ def test_describe(cur: snowflake.connector.cursor.SnowflakeCursor):
193
205
  XINT INT, XINTEGER INTEGER, XBIGINT BIGINT, XSMALLINT SMALLINT, XTINYINT TINYINT, XBYTEINT BYTEINT,
194
206
  XVARCHAR20 VARCHAR(20), XVARCHAR VARCHAR, XTEXT TEXT,
195
207
  XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XDATE DATE, XTIME TIME,
196
- XBINARY BINARY
208
+ XBINARY BINARY, XARRAY ARRAY, XOBJECT OBJECT
197
209
  )
198
210
  """
199
211
  )
@@ -220,7 +232,9 @@ def test_describe(cur: snowflake.connector.cursor.SnowflakeCursor):
220
232
  ResultMetadata(name='XTIMESTAMP_NTZ9', type_code=8, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True),
221
233
  ResultMetadata(name='XDATE', type_code=3, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
222
234
  ResultMetadata(name='XTIME', type_code=12, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True),
223
- ResultMetadata(name='XBINARY', type_code=11, display_size=None, internal_size=8388608, precision=None, scale=None, is_nullable=True)
235
+ ResultMetadata(name='XBINARY', type_code=11, display_size=None, internal_size=8388608, precision=None, scale=None, is_nullable=True),
236
+ ResultMetadata(name='XARRAY', type_code=10, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
237
+ ResultMetadata(name='XOBJECT', type_code=9, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
224
238
  ]
225
239
  # fmt: on
226
240
 
@@ -229,13 +243,13 @@ def test_describe(cur: snowflake.connector.cursor.SnowflakeCursor):
229
243
  assert cur.description == expected_metadata
230
244
 
231
245
  # test with params
232
- assert cur.describe("select * from example where XNUMBER = ?", (1,)) == expected_metadata
233
- cur.execute("select * from example where XNUMBER = ?", (1,))
246
+ assert cur.describe("select * from example where XNUMBER = %s", (1,)) == expected_metadata
247
+ cur.execute("select * from example where XNUMBER = %s", (1,))
234
248
  assert cur.description == expected_metadata
235
249
 
236
250
 
237
- def test_describe_info_schema(cur: snowflake.connector.cursor.SnowflakeCursor):
238
- # tests we can handle the column types returned from the info schema, which are created by duckdb
251
+ def test_describe_info_schema_columns(cur: snowflake.connector.cursor.SnowflakeCursor):
252
+ # test we can handle the column types returned from the info schema, which are created by duckdb
239
253
  # and so don't go through our transforms
240
254
  cur.execute("select column_name, ordinal_position from information_schema.columns")
241
255
  # fmt: off
@@ -406,7 +420,7 @@ def test_information_schema_columns_other(cur: snowflake.connector.cursor.Snowfl
406
420
  """
407
421
  create or replace table example (
408
422
  XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XDATE DATE, XTIME TIME,
409
- XBINARY BINARY
423
+ XBINARY BINARY, XARRAY ARRAY, XOBJECT OBJECT
410
424
  )
411
425
  """
412
426
  )
@@ -424,6 +438,8 @@ def test_information_schema_columns_other(cur: snowflake.connector.cursor.Snowfl
424
438
  ("XDATE", "DATE"),
425
439
  ("XTIME", "TIME"),
426
440
  ("XBINARY", "BINARY"),
441
+ ("XARRAY", "ARRAY"),
442
+ ("XOBJECT", "OBJECT"),
427
443
  ]
428
444
 
429
445
 
@@ -451,6 +467,26 @@ def test_information_schema_columns_text(cur: snowflake.connector.cursor.Snowfla
451
467
  ]
452
468
 
453
469
 
470
+ def test_information_schema_databases(conn: snowflake.connector.SnowflakeConnection):
471
+ # see https://docs.snowflake.com/en/sql-reference/info-schema/databases
472
+
473
+ with conn.cursor(snowflake.connector.cursor.DictCursor) as cur:
474
+ cur.execute("select * from information_schema.databases")
475
+
476
+ assert cur.fetchall() == [
477
+ {
478
+ "database_name": "DB1",
479
+ "database_owner": "SYSADMIN",
480
+ "is_transient": "NO",
481
+ "comment": None,
482
+ "created": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
483
+ "last_altered": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
484
+ "retention_time": 1,
485
+ "type": "STANDARD",
486
+ },
487
+ ]
488
+
489
+
454
490
  def test_non_existent_table_throws_snowflake_exception(cur: snowflake.connector.cursor.SnowflakeCursor):
455
491
  with pytest.raises(snowflake.connector.errors.ProgrammingError) as _:
456
492
  cur.execute("select * from this_table_does_not_exist")
@@ -580,6 +616,18 @@ def test_timestamp_to_date(cur: snowflake.connector.cursor.SnowflakeCursor):
580
616
  assert cur.fetchall() == [(datetime.date(1970, 1, 1), datetime.date(1970, 1, 1))]
581
617
 
582
618
 
619
+ def test_to_decimal(cur: snowflake.connector.cursor.SnowflakeCursor):
620
+ # see https://docs.snowflake.com/en/sql-reference/functions/to_decimal#examples
621
+ cur.execute("create or replace table number_conv(expr varchar);")
622
+ cur.execute("insert into number_conv values ('12.3456'), ('98.76546');")
623
+ cur.execute("select expr, to_decimal(expr), to_number(expr, 10, 1), to_numeric(expr, 10, 8) from number_conv;")
624
+
625
+ assert cur.fetchall() == [
626
+ ("12.3456", 12, Decimal("12.3"), Decimal("12.34560000")),
627
+ ("98.76546", 99, Decimal("98.8"), Decimal("98.76546000")),
628
+ ]
629
+
630
+
583
631
  def test_write_pandas_timestamp_ntz(conn: snowflake.connector.SnowflakeConnection):
584
632
  # compensate for https://github.com/duckdb/duckdb/issues/7980
585
633
  with conn.cursor() as cur:
@@ -3,7 +3,6 @@ from sqlglot import exp
3
3
 
4
4
  from fakesnow.transforms import (
5
5
  SUCCESS_NOP,
6
- as_describe,
7
6
  create_database,
8
7
  drop_schema_cascade,
9
8
  extract_comment,
@@ -22,18 +21,12 @@ from fakesnow.transforms import (
22
21
  tag,
23
22
  timestamp_ntz_ns,
24
23
  to_date,
24
+ to_decimal,
25
25
  upper_case_unquoted_identifiers,
26
26
  values_columns,
27
27
  )
28
28
 
29
29
 
30
- def test_as_describe() -> None:
31
- assert (
32
- sqlglot.parse_one("SELECT name FROM CUSTOMERS").transform(as_describe).sql()
33
- == "DESCRIBE SELECT name FROM CUSTOMERS"
34
- )
35
-
36
-
37
30
  def test_create_database() -> None:
38
31
  e = sqlglot.parse_one("create database foobar").transform(create_database)
39
32
  assert e.sql() == "ATTACH DATABASE ':memory:' AS foobar"
@@ -196,6 +189,13 @@ def test_to_date() -> None:
196
189
  )
197
190
 
198
191
 
192
+ def test_to_decimal() -> None:
193
+ assert (
194
+ sqlglot.parse_one("SELECT to_decimal('1.245',10,2)").transform(to_decimal).sql()
195
+ == "SELECT CAST('1.245' AS DECIMAL(10, 2))"
196
+ )
197
+
198
+
199
199
  def test_use() -> None:
200
200
  assert (
201
201
  sqlglot.parse_one("use database marts").transform(set_schema, current_database=None).sql()
fakesnow-0.5.1/setup.py DELETED
@@ -1,4 +0,0 @@
1
- # minimal setup.py so pip install -e works
2
- from setuptools import setup
3
-
4
- setup()
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes