fakesnow 0.7.0__tar.gz → 0.8.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {fakesnow-0.7.0/fakesnow.egg-info → fakesnow-0.8.0}/PKG-INFO +17 -2
  2. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow/__init__.py +1 -4
  3. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow/fakes.py +19 -20
  4. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow/fixtures.py +1 -1
  5. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow/info_schema.py +1 -2
  6. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow/transforms.py +46 -48
  7. {fakesnow-0.7.0 → fakesnow-0.8.0/fakesnow.egg-info}/PKG-INFO +17 -2
  8. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow.egg-info/requires.txt +7 -7
  9. {fakesnow-0.7.0 → fakesnow-0.8.0}/pyproject.toml +22 -14
  10. {fakesnow-0.7.0 → fakesnow-0.8.0}/tests/test_fakes.py +43 -14
  11. {fakesnow-0.7.0 → fakesnow-0.8.0}/tests/test_transforms.py +13 -3
  12. {fakesnow-0.7.0 → fakesnow-0.8.0}/LICENSE +0 -0
  13. {fakesnow-0.7.0 → fakesnow-0.8.0}/MANIFEST.in +0 -0
  14. {fakesnow-0.7.0 → fakesnow-0.8.0}/README.md +0 -0
  15. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow/checks.py +0 -0
  16. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow/expr.py +0 -0
  17. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow/py.typed +0 -0
  18. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow.egg-info/SOURCES.txt +0 -0
  19. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow.egg-info/dependency_links.txt +0 -0
  20. {fakesnow-0.7.0 → fakesnow-0.8.0}/fakesnow.egg-info/top_level.txt +0 -0
  21. {fakesnow-0.7.0 → fakesnow-0.8.0}/setup.cfg +0 -0
  22. {fakesnow-0.7.0 → fakesnow-0.8.0}/tests/test_checks.py +0 -0
  23. {fakesnow-0.7.0 → fakesnow-0.8.0}/tests/test_expr.py +0 -0
  24. {fakesnow-0.7.0 → fakesnow-0.8.0}/tests/test_patch.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.7.0
3
+ Version: 0.8.0
4
4
  Summary: Fake Snowflake Connector for Python. Run Snowflake DB locally.
5
5
  License: MIT License
6
6
 
@@ -28,9 +28,24 @@ Keywords: snowflake,snowflakedb,fake,local,mock,testing
28
28
  Classifier: License :: OSI Approved :: MIT License
29
29
  Requires-Python: >=3.9
30
30
  Description-Content-Type: text/markdown
31
+ License-File: LICENSE
32
+ Requires-Dist: duckdb~=0.9.2
33
+ Requires-Dist: pyarrow
34
+ Requires-Dist: snowflake-connector-python
35
+ Requires-Dist: sqlglot~=19.5.1
31
36
  Provides-Extra: dev
37
+ Requires-Dist: black~=23.9; extra == "dev"
38
+ Requires-Dist: build~=1.0; extra == "dev"
39
+ Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
40
+ Requires-Dist: pre-commit~=3.4; extra == "dev"
41
+ Requires-Dist: pytest~=7.4; extra == "dev"
42
+ Requires-Dist: ruff~=0.1.6; extra == "dev"
43
+ Requires-Dist: twine~=4.0; extra == "dev"
32
44
  Provides-Extra: notebook
33
- License-File: LICENSE
45
+ Requires-Dist: duckdb-engine; extra == "notebook"
46
+ Requires-Dist: ipykernel; extra == "notebook"
47
+ Requires-Dist: jupysql; extra == "notebook"
48
+ Requires-Dist: snowflake-sqlalchemy; extra == "notebook"
34
49
 
35
50
  # fakesnow ❄️
36
51
 
@@ -4,11 +4,8 @@ import contextlib
4
4
  import importlib
5
5
  import sys
6
6
  import unittest.mock as mock
7
+ from collections.abc import Iterator, Sequence
7
8
  from contextlib import contextmanager
8
- from typing import (
9
- Iterator,
10
- Sequence,
11
- )
12
9
 
13
10
  import duckdb
14
11
  import snowflake.connector
@@ -1,8 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import re
4
+ from collections.abc import Iterable, Iterator, Sequence
4
5
  from types import TracebackType
5
- from typing import TYPE_CHECKING, Any, Iterable, Iterator, Literal, Optional, Sequence, Type, Union, cast
6
+ from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast
6
7
 
7
8
  import duckdb
8
9
 
@@ -55,9 +56,9 @@ class FakeSnowflakeCursor:
55
56
 
56
57
  def __exit__(
57
58
  self,
58
- exc_type: Optional[Type[BaseException]] = ...,
59
- exc_value: Optional[BaseException] = ...,
60
- traceback: Optional[TracebackType] = ...,
59
+ exc_type: type[BaseException] | None,
60
+ exc_value: BaseException | None,
61
+ traceback: TracebackType | None,
61
62
  ) -> bool:
62
63
  return False
63
64
 
@@ -149,6 +150,7 @@ class FakeSnowflakeCursor:
149
150
  .transform(transforms.values_columns)
150
151
  .transform(transforms.to_date)
151
152
  .transform(transforms.to_decimal)
153
+ .transform(transforms.to_timestamp)
152
154
  .transform(transforms.object_construct)
153
155
  .transform(transforms.timestamp_ntz_ns)
154
156
  .transform(transforms.float_to_double)
@@ -250,11 +252,11 @@ class FakeSnowflakeCursor:
250
252
  reader = self._duck_conn.fetch_record_batch(rows_per_batch=1000)
251
253
 
252
254
  batches = []
253
- while True:
254
- try:
255
+ try:
256
+ while True:
255
257
  batches.append(FakeResultBatch(self._use_dict_result, reader.read_next_batch()))
256
- except StopIteration:
257
- break
258
+ except StopIteration:
259
+ pass
258
260
 
259
261
  return batches
260
262
 
@@ -320,13 +322,10 @@ class FakeSnowflakeCursor:
320
322
  return ResultMetadata(
321
323
  name=column_name, type_code=12, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True # noqa: E501
322
324
  )
323
- elif column_type == "JSON[]":
324
- return ResultMetadata(
325
- name=column_name, type_code=10, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True # noqa: E501
326
- )
327
325
  elif column_type == "JSON":
326
+ # TODO: correctly map OBJECT and ARRAY see https://github.com/tekumara/fakesnow/issues/26
328
327
  return ResultMetadata(
329
- name=column_name, type_code=9, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True # noqa: E501
328
+ name=column_name, type_code=5, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True # noqa: E501
330
329
  )
331
330
  else:
332
331
  # TODO handle more types
@@ -365,8 +364,8 @@ class FakeSnowflakeConnection:
365
364
  def __init__(
366
365
  self,
367
366
  duck_conn: DuckDBPyConnection,
368
- database: Optional[str] = None,
369
- schema: Optional[str] = None,
367
+ database: str | None = None,
368
+ schema: str | None = None,
370
369
  create_database: bool = True,
371
370
  create_schema: bool = True,
372
371
  *args: Any,
@@ -435,16 +434,16 @@ class FakeSnowflakeConnection:
435
434
 
436
435
  def __exit__(
437
436
  self,
438
- exc_type: Optional[Type[BaseException]] = ...,
439
- exc_value: Optional[BaseException] = ...,
440
- traceback: Optional[TracebackType] = ...,
437
+ exc_type: type[BaseException] | None,
438
+ exc_value: BaseException | None,
439
+ traceback: TracebackType | None,
441
440
  ) -> bool:
442
441
  return False
443
442
 
444
443
  def commit(self) -> None:
445
444
  self.cursor().execute("COMMIT")
446
445
 
447
- def cursor(self, cursor_class: Type[SnowflakeCursor] = SnowflakeCursor) -> FakeSnowflakeCursor:
446
+ def cursor(self, cursor_class: type[SnowflakeCursor] = SnowflakeCursor) -> FakeSnowflakeCursor:
448
447
  return FakeSnowflakeCursor(conn=self, duck_conn=self._duck_conn, use_dict_result=cursor_class == DictCursor)
449
448
 
450
449
  def execute_string(
@@ -452,7 +451,7 @@ class FakeSnowflakeConnection:
452
451
  sql_text: str,
453
452
  remove_comments: bool = False,
454
453
  return_cursors: bool = True,
455
- cursor_class: Type[SnowflakeCursor] = SnowflakeCursor,
454
+ cursor_class: type[SnowflakeCursor] = SnowflakeCursor,
456
455
  **kwargs: dict[str, Any],
457
456
  ) -> Iterable[FakeSnowflakeCursor]:
458
457
  cursors = [
@@ -1,4 +1,4 @@
1
- from typing import Iterator
1
+ from collections.abc import Iterator
2
2
 
3
3
  import pytest
4
4
 
@@ -40,8 +40,7 @@ case when starts_with(data_type, 'DECIMAL') or data_type='BIGINT' then 'NUMBER'
40
40
  when data_type='DOUBLE' then 'FLOAT'
41
41
  when data_type='BLOB' then 'BINARY'
42
42
  when data_type='TIMESTAMP' then 'TIMESTAMP_NTZ'
43
- when data_type='JSON[]' then 'ARRAY'
44
- when data_type='JSON' then 'OBJECT'
43
+ when data_type='JSON' then 'VARIANT'
45
44
  else data_type end as data_type,
46
45
  ext_character_maximum_length as character_maximum_length, ext_character_octet_length as character_octet_length,
47
46
  case when data_type='BIGINT' then 38
@@ -2,7 +2,6 @@ from __future__ import annotations
2
2
 
3
3
  from typing import cast
4
4
 
5
- import snowflake.connector
6
5
  import sqlglot
7
6
  from sqlglot import exp
8
7
 
@@ -140,7 +139,7 @@ def extract_text_length(expression: exp.Expression) -> exp.Expression:
140
139
  for dt in expression.find_all(exp.DataType):
141
140
  if dt.this in (exp.DataType.Type.VARCHAR, exp.DataType.Type.TEXT):
142
141
  col_name = dt.parent and dt.parent.this and dt.parent.this.this
143
- if dt_size := dt.find(exp.DataTypeSize):
142
+ if dt_size := dt.find(exp.DataTypeParam):
144
143
  size = (
145
144
  isinstance(dt_size.this, exp.Literal)
146
145
  and isinstance(dt_size.this.this, str)
@@ -311,14 +310,8 @@ def parse_json(expression: exp.Expression) -> exp.Expression:
311
310
  def regex_replace(expression: exp.Expression) -> exp.Expression:
312
311
  """Transform regex_replace expressions from snowflake to duckdb."""
313
312
 
314
- if (
315
- isinstance(expression, exp.Anonymous)
316
- and isinstance(expression.this, str)
317
- and expression.this.upper() == "REGEXP_REPLACE"
318
- ):
319
- expressions = expression.expressions
320
-
321
- if len(expressions) > 3:
313
+ if isinstance(expression, exp.RegexpReplace) and isinstance(expression.expression, exp.Literal):
314
+ if len(expression.args) > 3:
322
315
  # see https://docs.snowflake.com/en/sql-reference/functions/regexp_replace
323
316
  raise NotImplementedError(
324
317
  "REGEXP_REPLACE with additional parameters (eg: <position>, <occurrence>, <parameters>) not supported"
@@ -326,64 +319,56 @@ def regex_replace(expression: exp.Expression) -> exp.Expression:
326
319
 
327
320
  # pattern: snowflake requires escaping backslashes in single-quoted string constants, but duckdb doesn't
328
321
  # see https://docs.snowflake.com/en/sql-reference/functions-regexp#label-regexp-escape-character-caveats
329
- expressions[1].args["this"] = expressions[1].this.replace("\\\\", "\\")
322
+ expression.args["expression"] = exp.Literal(
323
+ this=expression.expression.this.replace("\\\\", "\\"), is_string=True
324
+ )
330
325
 
331
- if len(expressions) == 2:
326
+ if not expression.args.get("replacement"):
332
327
  # if no replacement string, the snowflake default is ''
333
- expressions.append(exp.Literal(this="", is_string=True))
328
+ expression.args["replacement"] = exp.Literal(this="", is_string=True)
334
329
 
335
330
  # snowflake regex replacements are global
336
- expressions.append(exp.Literal(this="g", is_string=True))
331
+ expression.args["modifiers"] = exp.Literal(this="g", is_string=True)
337
332
 
338
333
  return expression
339
334
 
340
335
 
341
336
  def regex_substr(expression: exp.Expression) -> exp.Expression:
342
- """Transform regex_substr expressions from snowflake to duckdb."""
343
-
344
- if (
345
- isinstance(expression, exp.Anonymous)
346
- and isinstance(expression.this, str)
347
- and expression.this.upper() == "REGEXP_SUBSTR"
348
- ):
349
- expressions = expression.expressions
337
+ """Transform regex_substr expressions from snowflake to duckdb.
350
338
 
351
- if len(expressions) < 2:
352
- raise snowflake.connector.errors.ProgrammingError(
353
- msg=f"SQL compilation error:\nnot enough arguments for function [{expression.sql()}], expected 2, got {len(expressions)}", # noqa: E501
354
- errno=938,
355
- sqlstate="22023",
356
- )
339
+ See https://docs.snowflake.com/en/sql-reference/functions/regexp_substr
340
+ """
357
341
 
358
- subject = expressions[0]
342
+ if isinstance(expression, exp.RegexpExtract):
343
+ subject = expression.this
359
344
 
360
345
  # pattern: snowflake requires escaping backslashes in single-quoted string constants, but duckdb doesn't
361
346
  # see https://docs.snowflake.com/en/sql-reference/functions-regexp#label-regexp-escape-character-caveats
362
- pattern = expressions[1]
347
+ pattern = expression.expression
363
348
  pattern.args["this"] = pattern.this.replace("\\\\", "\\")
364
349
 
365
350
  # number of characters from the beginning of the string where the function starts searching for matches
366
351
  try:
367
- position = expressions[2]
368
- except IndexError:
352
+ position = expression.args["position"]
353
+ except KeyError:
369
354
  position = exp.Literal(this="1", is_string=False)
370
355
 
371
356
  # which occurrence of the pattern to match
372
357
  try:
373
- occurrence = expressions[3]
374
- except IndexError:
358
+ occurrence = expression.args["occurrence"]
359
+ except KeyError:
375
360
  occurrence = exp.Literal(this="1", is_string=False)
376
361
 
377
362
  try:
378
- regex_parameters_value = str(expressions[4].this)
363
+ regex_parameters_value = str(expression.args["parameters"].this)
379
364
  # 'e' parameter doesn't make sense for duckdb
380
365
  regex_parameters = exp.Literal(this=regex_parameters_value.replace("e", ""), is_string=True)
381
- except IndexError:
366
+ except KeyError:
382
367
  regex_parameters = exp.Literal(is_string=True)
383
368
 
384
369
  try:
385
- group_num = expressions[5]
386
- except IndexError:
370
+ group_num = expression.args["group"]
371
+ except KeyError:
387
372
  if isinstance(regex_parameters.this, str) and "e" in regex_parameters.this:
388
373
  group_num = exp.Literal(this="1", is_string=False)
389
374
  else:
@@ -546,6 +531,20 @@ def to_decimal(expression: exp.Expression) -> exp.Expression:
546
531
  return expression
547
532
 
548
533
 
534
+ def to_timestamp(expression: exp.Expression) -> exp.Expression:
535
+ """Convert to_timestamp(seconds) to timestamp without timezone (ie: TIMESTAMP_NTZ).
536
+
537
+ See https://docs.snowflake.com/en/sql-reference/functions/to_timestamp
538
+ """
539
+
540
+ if isinstance(expression, exp.UnixToTime):
541
+ return exp.Cast(
542
+ this=expression,
543
+ to=exp.DataType(this=exp.DataType.Type.TIMESTAMP, nested=False, prefix=False),
544
+ )
545
+ return expression
546
+
547
+
549
548
  def timestamp_ntz_ns(expression: exp.Expression) -> exp.Expression:
550
549
  """Convert timestamp_ntz(9) to timestamp_ntz.
551
550
 
@@ -555,7 +554,7 @@ def timestamp_ntz_ns(expression: exp.Expression) -> exp.Expression:
555
554
  if (
556
555
  isinstance(expression, exp.DataType)
557
556
  and expression.this == exp.DataType.Type.TIMESTAMP
558
- and exp.DataTypeSize(this=exp.Literal(this="9", is_string=False)) in expression.expressions
557
+ and exp.DataTypeParam(this=exp.Literal(this="9", is_string=False)) in expression.expressions
559
558
  ):
560
559
  new = expression.copy()
561
560
  del new.args["expressions"]
@@ -579,15 +578,14 @@ def semi_structured_types(expression: exp.Expression) -> exp.Expression:
579
578
  exp.Expression: The transformed expression.
580
579
  """
581
580
 
582
- if isinstance(expression, exp.DataType):
583
- if expression.this in [exp.DataType.Type.OBJECT, exp.DataType.Type.VARIANT]:
584
- new = expression.copy()
585
- new.args["this"] = exp.DataType.Type.JSON
586
- return new
587
- elif expression.this == exp.DataType.Type.ARRAY:
588
- new = expression.copy()
589
- new.set("expressions", [exp.DataType(this=exp.DataType.Type.JSON)])
590
- return new
581
+ if isinstance(expression, exp.DataType) and expression.this in [
582
+ exp.DataType.Type.ARRAY,
583
+ exp.DataType.Type.OBJECT,
584
+ exp.DataType.Type.VARIANT,
585
+ ]:
586
+ new = expression.copy()
587
+ new.args["this"] = exp.DataType.Type.JSON
588
+ return new
591
589
 
592
590
  return expression
593
591
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.7.0
3
+ Version: 0.8.0
4
4
  Summary: Fake Snowflake Connector for Python. Run Snowflake DB locally.
5
5
  License: MIT License
6
6
 
@@ -28,9 +28,24 @@ Keywords: snowflake,snowflakedb,fake,local,mock,testing
28
28
  Classifier: License :: OSI Approved :: MIT License
29
29
  Requires-Python: >=3.9
30
30
  Description-Content-Type: text/markdown
31
+ License-File: LICENSE
32
+ Requires-Dist: duckdb~=0.9.2
33
+ Requires-Dist: pyarrow
34
+ Requires-Dist: snowflake-connector-python
35
+ Requires-Dist: sqlglot~=19.5.1
31
36
  Provides-Extra: dev
37
+ Requires-Dist: black~=23.9; extra == "dev"
38
+ Requires-Dist: build~=1.0; extra == "dev"
39
+ Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
40
+ Requires-Dist: pre-commit~=3.4; extra == "dev"
41
+ Requires-Dist: pytest~=7.4; extra == "dev"
42
+ Requires-Dist: ruff~=0.1.6; extra == "dev"
43
+ Requires-Dist: twine~=4.0; extra == "dev"
32
44
  Provides-Extra: notebook
33
- License-File: LICENSE
45
+ Requires-Dist: duckdb-engine; extra == "notebook"
46
+ Requires-Dist: ipykernel; extra == "notebook"
47
+ Requires-Dist: jupysql; extra == "notebook"
48
+ Requires-Dist: snowflake-sqlalchemy; extra == "notebook"
34
49
 
35
50
  # fakesnow ❄️
36
51
 
@@ -1,15 +1,15 @@
1
- duckdb~=0.8.0
1
+ duckdb~=0.9.2
2
2
  pyarrow
3
3
  snowflake-connector-python
4
- sqlglot~=16.8.1
4
+ sqlglot~=19.5.1
5
5
 
6
6
  [dev]
7
- black~=23.3
8
- build~=0.10
7
+ black~=23.9
8
+ build~=1.0
9
9
  snowflake-connector-python[pandas,secure-local-storage]
10
- pre-commit~=3.2
11
- pytest~=7.3
12
- ruff~=0.0.285
10
+ pre-commit~=3.4
11
+ pytest~=7.4
12
+ ruff~=0.1.6
13
13
  twine~=4.0
14
14
 
15
15
  [notebook]
@@ -1,17 +1,17 @@
1
1
  [project]
2
2
  name = "fakesnow"
3
3
  description = "Fake Snowflake Connector for Python. Run Snowflake DB locally."
4
- version = "0.7.0"
4
+ version = "0.8.0"
5
5
  readme = "README.md"
6
6
  license = { file = "LICENSE" }
7
7
  classifiers = ["License :: OSI Approved :: MIT License"]
8
8
  keywords = ["snowflake", "snowflakedb", "fake", "local", "mock", "testing"]
9
9
  requires-python = ">=3.9"
10
10
  dependencies = [
11
- "duckdb~=0.8.0",
11
+ "duckdb~=0.9.2",
12
12
  "pyarrow",
13
13
  "snowflake-connector-python",
14
- "sqlglot~=16.8.1",
14
+ "sqlglot~=19.5.1",
15
15
  ]
16
16
 
17
17
  [project.urls]
@@ -19,27 +19,27 @@ homepage = "https://github.com/tekumara/fakesnow"
19
19
 
20
20
  [project.optional-dependencies]
21
21
  dev = [
22
- "black~=23.3",
23
- "build~=0.10",
22
+ "black~=23.9",
23
+ "build~=1.0",
24
24
  # include compatible version of pandas, and secure-local-storage for token caching
25
25
  "snowflake-connector-python[pandas, secure-local-storage]",
26
- "pre-commit~=3.2",
27
- "pytest~=7.3",
28
- "ruff~=0.0.285",
26
+ "pre-commit~=3.4",
27
+ "pytest~=7.4",
28
+ "ruff~=0.1.6",
29
29
  "twine~=4.0",
30
30
  ]
31
31
  # for debugging, see https://duckdb.org/docs/guides/python/jupyter.html
32
32
  notebook = ["duckdb-engine", "ipykernel", "jupysql", "snowflake-sqlalchemy"]
33
33
 
34
- [build-system]
35
- requires = ["setuptools~=67.7", "wheel~=0.40"]
36
-
37
34
  [tool.setuptools.packages.find]
38
35
  where = ["."]
39
36
  exclude = ["tests*"]
40
37
 
41
- # use PyCharm default line length of 120
38
+ [build-system]
39
+ requires = ["setuptools~=68.2", "wheel~=0.40"]
40
+
42
41
  [tool.black]
42
+ # use PyCharm default line length of 120
43
43
  line-length = 120
44
44
 
45
45
  [tool.pyright]
@@ -52,6 +52,8 @@ strictParameterNoneValue = true
52
52
  reportTypedDictNotRequiredAccess = false
53
53
 
54
54
  [tool.ruff]
55
+ # Compatibility between Ruff and Black
56
+ # https://beta.ruff.rs/docs/faq/#is-ruff-compatible-with-black
55
57
  line-length = 120
56
58
 
57
59
  # rules to enable/ignore
@@ -73,6 +75,10 @@ select = [
73
75
  # "ARG",
74
76
  # flake8-self
75
77
  "SLF",
78
+ # pyupgrade
79
+ "UP",
80
+ # perflint
81
+ "PERF",
76
82
  # ruff-specific
77
83
  "RUF",
78
84
  ]
@@ -89,9 +95,11 @@ src = ["."]
89
95
  fix = true
90
96
  show-fixes = true
91
97
 
98
+ [tool.ruff.isort]
99
+ combine-as-imports = true
100
+ force-wrap-aliases = true
101
+
92
102
  [tool.ruff.per-file-ignores]
93
- # imports in __init__.py don't need to be used in __init__.py
94
- "__init__.py" = ["F401"]
95
103
  # test functions don't need return types
96
104
  "tests/*" = ["ANN201", "ANN202"]
97
105
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  import datetime
4
4
  import json
5
+ from collections.abc import Sequence
5
6
  from decimal import Decimal
6
7
 
7
8
  import pandas as pd
@@ -205,7 +206,7 @@ def test_describe(cur: snowflake.connector.cursor.SnowflakeCursor):
205
206
  XINT INT, XINTEGER INTEGER, XBIGINT BIGINT, XSMALLINT SMALLINT, XTINYINT TINYINT, XBYTEINT BYTEINT,
206
207
  XVARCHAR20 VARCHAR(20), XVARCHAR VARCHAR, XTEXT TEXT,
207
208
  XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XDATE DATE, XTIME TIME,
208
- XBINARY BINARY, XARRAY ARRAY, XOBJECT OBJECT
209
+ XBINARY BINARY, /* XARRAY ARRAY, XOBJECT OBJECT */ XVARIANT VARIANT
209
210
  )
210
211
  """
211
212
  )
@@ -233,8 +234,10 @@ def test_describe(cur: snowflake.connector.cursor.SnowflakeCursor):
233
234
  ResultMetadata(name='XDATE', type_code=3, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
234
235
  ResultMetadata(name='XTIME', type_code=12, display_size=None, internal_size=None, precision=0, scale=9, is_nullable=True),
235
236
  ResultMetadata(name='XBINARY', type_code=11, display_size=None, internal_size=8388608, precision=None, scale=None, is_nullable=True),
236
- ResultMetadata(name='XARRAY', type_code=10, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
237
- ResultMetadata(name='XOBJECT', type_code=9, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
237
+ # TODO: handle ARRAY and OBJECT see https://github.com/tekumara/fakesnow/issues/26
238
+ # ResultMetadata(name='XARRAY', type_code=10, display_size=None, internal_size=16777216, precision=None, scale=None, is_nullable=True),
239
+ # ResultMetadata(name='XOBJECT', type_code=9, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
240
+ ResultMetadata(name='XVARIANT', type_code=5, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
238
241
  ]
239
242
  # fmt: on
240
243
 
@@ -247,6 +250,19 @@ def test_describe(cur: snowflake.connector.cursor.SnowflakeCursor):
247
250
  cur.execute("select * from example where XNUMBER = %s", (1,))
248
251
  assert cur.description == expected_metadata
249
252
 
253
+ # test semi-structured ops return variant ie: type_code=5
254
+ # fmt: off
255
+ assert (
256
+ cur.describe("SELECT ['A', 'B'][0] as array_index, OBJECT_CONSTRUCT('k','v1')['k'] as object_key, ARRAY_CONSTRUCT('foo')::VARIANT[0] as variant_key")
257
+ == [
258
+ # NB: snowflake returns internal_size = 16777216 for all columns
259
+ ResultMetadata(name="ARRAY_INDEX", type_code=5, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
260
+ ResultMetadata(name="OBJECT_KEY", type_code=5, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True),
261
+ ResultMetadata(name="VARIANT_KEY", type_code=5, display_size=None, internal_size=None, precision=None, scale=None, is_nullable=True)
262
+ ]
263
+ )
264
+ # fmt: on
265
+
250
266
 
251
267
  def test_describe_info_schema_columns(cur: snowflake.connector.cursor.SnowflakeCursor):
252
268
  # test we can handle the column types returned from the info schema, which are created by duckdb
@@ -420,7 +436,7 @@ def test_information_schema_columns_other(cur: snowflake.connector.cursor.Snowfl
420
436
  """
421
437
  create or replace table example (
422
438
  XTIMESTAMP TIMESTAMP, XTIMESTAMP_NTZ9 TIMESTAMP_NTZ(9), XDATE DATE, XTIME TIME,
423
- XBINARY BINARY, XARRAY ARRAY, XOBJECT OBJECT
439
+ XBINARY BINARY, /* XARRAY ARRAY, XOBJECT OBJECT */ XVARIANT VARIANT
424
440
  )
425
441
  """
426
442
  )
@@ -438,8 +454,10 @@ def test_information_schema_columns_other(cur: snowflake.connector.cursor.Snowfl
438
454
  ("XDATE", "DATE"),
439
455
  ("XTIME", "TIME"),
440
456
  ("XBINARY", "BINARY"),
441
- ("XARRAY", "ARRAY"),
442
- ("XOBJECT", "OBJECT"),
457
+ # TODO: support these types https://github.com/tekumara/fakesnow/issues/27
458
+ # ("XARRAY", "ARRAY"),
459
+ # ("XOBJECT", "OBJECT"),
460
+ ("XVARIANT", "VARIANT"),
443
461
  ]
444
462
 
445
463
 
@@ -547,18 +565,25 @@ def test_schema_drop(cur: snowflake.connector.cursor.SnowflakeCursor):
547
565
 
548
566
 
549
567
  def test_semi_structured_types(cur: snowflake.connector.cursor.SnowflakeCursor):
550
- cur.execute("create table semis (emails array, name object, notes variant)")
568
+ def indent(rows: Sequence[tuple]) -> list[tuple]:
569
+ # indent duckdb json strings to match snowflake json strings
570
+ return [(json.dumps(json.loads(r[0]), indent=2), *r[1:]) for r in rows]
571
+
572
+ cur.execute("create or replace table semis (emails array, name object, notes variant)")
551
573
  cur.execute(
552
- """insert into semis(emails, name, notes) SELECT [1, 2], parse_json('{"k": "v1"}'), parse_json('["foo"]')"""
574
+ """insert into semis(emails, name, notes) SELECT ['A', 'B'], OBJECT_CONSTRUCT('k','v1'), ARRAY_CONSTRUCT('foo')::VARIANT"""
553
575
  )
554
576
  cur.execute(
555
- """insert into semis(emails, name, notes) VALUES ([3,4], parse_json('{"k": "v2"}'), parse_json('{"b": "ar"}'))"""
577
+ """insert into semis(emails, name, notes) SELECT ['C','D'], parse_json('{"k": "v2"}'), parse_json('{"b": "ar"}')"""
556
578
  )
557
579
 
558
580
  # results are returned as strings, because the underlying type is JSON (duckdb) / VARIANT (snowflake)
559
581
 
582
+ cur.execute("select emails from semis")
583
+ assert indent(cur.fetchall()) == [('[\n "A",\n "B"\n]',), ('[\n "C",\n "D"\n]',)] # type: ignore
584
+
560
585
  cur.execute("select emails[0] from semis")
561
- assert cur.fetchall() == [("1",), ("3",)]
586
+ assert cur.fetchall() == [('"A"',), ('"C"',)]
562
587
 
563
588
  cur.execute("select name['k'] from semis")
564
589
  assert cur.fetchall() == [('"v1"',), ('"v2"',)]
@@ -606,10 +631,14 @@ def test_tags_noop(cur: snowflake.connector.cursor.SnowflakeCursor):
606
631
  cur.execute("ALTER TABLE table1 MODIFY COLUMN name1 SET TAG foo='bar'")
607
632
 
608
633
 
609
- def test_timestamp(cur: snowflake.connector.cursor.SnowflakeCursor):
634
+ def test_to_timestamp(cur: snowflake.connector.cursor.SnowflakeCursor):
635
+ # snowflake returns naive timestamps (ie: no timezone)
610
636
  cur.execute("SELECT to_timestamp(0)")
611
637
  assert cur.fetchall() == [(datetime.datetime(1970, 1, 1, 0, 0),)]
612
638
 
639
+ cur.execute("SELECT to_timestamp('2013-04-05 01:02:03')")
640
+ assert cur.fetchall() == [(datetime.datetime(2013, 4, 5, 1, 2, 3),)]
641
+
613
642
 
614
643
  def test_timestamp_to_date(cur: snowflake.connector.cursor.SnowflakeCursor):
615
644
  cur.execute("SELECT to_date(to_timestamp(0)), to_date(cast(to_timestamp(0) as timestamp(9)))")
@@ -724,12 +753,12 @@ def test_values(conn: snowflake.connector.SnowflakeConnection):
724
753
 
725
754
  def test_write_pandas(conn: snowflake.connector.SnowflakeConnection):
726
755
  with conn.cursor() as cur:
727
- cur.execute("create table customers (ID int, FIRST_NAME varchar, LAST_NAME varchar)")
756
+ cur.execute("create table customers (ID int, FIRST_NAME varchar, LAST_NAME varchar, ORDERS array)")
728
757
 
729
758
  df = pd.DataFrame.from_records(
730
759
  [
731
- {"ID": 1, "FIRST_NAME": "Jenny", "LAST_NAME": "P"},
732
- {"ID": 2, "FIRST_NAME": "Jasper", "LAST_NAME": "M"},
760
+ {"ID": 1, "FIRST_NAME": "Jenny", "LAST_NAME": "P", "ORDERS": ["A", "B"]},
761
+ {"ID": 2, "FIRST_NAME": "Jasper", "LAST_NAME": "M", "ORDERS": ["C", "D"]},
733
762
  ]
734
763
  )
735
764
  snowflake.connector.pandas_tools.write_pandas(conn, df, "customers")
@@ -22,6 +22,7 @@ from fakesnow.transforms import (
22
22
  timestamp_ntz_ns,
23
23
  to_date,
24
24
  to_decimal,
25
+ to_timestamp,
25
26
  upper_case_unquoted_identifiers,
26
27
  values_columns,
27
28
  )
@@ -133,7 +134,7 @@ def test_parse_json() -> None:
133
134
  assert (
134
135
  sqlglot.parse_one("""insert into table1 (name) select parse_json('{"first":"foo", "last":"bar"}')""")
135
136
  .transform(parse_json)
136
- .sql()
137
+ .sql(dialect="duckdb")
137
138
  == """INSERT INTO table1 (name) SELECT JSON('{"first":"foo", "last":"bar"}')"""
138
139
  )
139
140
 
@@ -147,7 +148,9 @@ def test_regex_replace() -> None:
147
148
 
148
149
  def test_regex_substr() -> None:
149
150
  assert (
150
- sqlglot.parse_one("SELECT regexp_substr(string1, 'the\\\\W+\\\\w+')").transform(regex_substr).sql()
151
+ sqlglot.parse_one("SELECT regexp_substr(string1, 'the\\\\W+\\\\w+')", read="snowflake")
152
+ .transform(regex_substr)
153
+ .sql()
151
154
  == "SELECT REGEXP_EXTRACT_ALL(string1[1 : ], 'the\\W+\\w+', 0, '')[1]"
152
155
  )
153
156
 
@@ -160,7 +163,7 @@ def test_semi_structured_types() -> None:
160
163
 
161
164
  assert (
162
165
  sqlglot.parse_one("CREATE TABLE table1 (name array)").transform(semi_structured_types).sql(dialect="duckdb")
163
- == "CREATE TABLE table1 (name JSON[])"
166
+ == "CREATE TABLE table1 (name JSON)"
164
167
  )
165
168
 
166
169
  assert (
@@ -196,6 +199,13 @@ def test_to_decimal() -> None:
196
199
  )
197
200
 
198
201
 
202
+ def test_to_timestamp() -> None:
203
+ assert (
204
+ sqlglot.parse_one("SELECT to_timestamp(0)", read="snowflake").transform(to_timestamp).sql(dialect="duckdb")
205
+ == "SELECT CAST(TO_TIMESTAMP(0) AS TIMESTAMP)"
206
+ )
207
+
208
+
199
209
  def test_use() -> None:
200
210
  assert (
201
211
  sqlglot.parse_one("use database marts").transform(set_schema, current_database=None).sql()
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes