fakesnow 0.9.15__tar.gz → 0.9.17__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {fakesnow-0.9.15 → fakesnow-0.9.17}/PKG-INFO +3 -3
  2. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/fakes.py +5 -3
  3. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/transforms.py +35 -3
  4. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow.egg-info/PKG-INFO +3 -3
  5. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow.egg-info/requires.txt +2 -2
  6. {fakesnow-0.9.15 → fakesnow-0.9.17}/pyproject.toml +3 -3
  7. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_fakes.py +41 -1
  8. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_transforms.py +41 -0
  9. {fakesnow-0.9.15 → fakesnow-0.9.17}/LICENSE +0 -0
  10. {fakesnow-0.9.15 → fakesnow-0.9.17}/README.md +0 -0
  11. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/__init__.py +0 -0
  12. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/__main__.py +0 -0
  13. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/checks.py +0 -0
  14. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/cli.py +0 -0
  15. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/expr.py +0 -0
  16. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/fixtures.py +0 -0
  17. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/global_database.py +0 -0
  18. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/info_schema.py +0 -0
  19. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/macros.py +0 -0
  20. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow/py.typed +0 -0
  21. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow.egg-info/SOURCES.txt +0 -0
  22. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow.egg-info/dependency_links.txt +0 -0
  23. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow.egg-info/entry_points.txt +0 -0
  24. {fakesnow-0.9.15 → fakesnow-0.9.17}/fakesnow.egg-info/top_level.txt +0 -0
  25. {fakesnow-0.9.15 → fakesnow-0.9.17}/setup.cfg +0 -0
  26. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_checks.py +0 -0
  27. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_cli.py +0 -0
  28. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_expr.py +0 -0
  29. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_info_schema.py +0 -0
  30. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_patch.py +0 -0
  31. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_sqlalchemy.py +0 -0
  32. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_users.py +0 -0
  33. {fakesnow-0.9.15 → fakesnow-0.9.17}/tests/test_write_pandas.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.9.15
3
+ Version: 0.9.17
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -210,10 +210,10 @@ Classifier: License :: OSI Approved :: MIT License
210
210
  Requires-Python: >=3.9
211
211
  Description-Content-Type: text/markdown
212
212
  License-File: LICENSE
213
- Requires-Dist: duckdb~=0.10.3
213
+ Requires-Dist: duckdb~=1.0.0
214
214
  Requires-Dist: pyarrow
215
215
  Requires-Dist: snowflake-connector-python
216
- Requires-Dist: sqlglot~=24.1.0
216
+ Requires-Dist: sqlglot~=25.3.0
217
217
  Provides-Extra: dev
218
218
  Requires-Dist: build~=1.0; extra == "dev"
219
219
  Requires-Dist: pandas-stubs; extra == "dev"
@@ -158,6 +158,7 @@ class FakeSnowflakeCursor:
158
158
  .transform(transforms.tag)
159
159
  .transform(transforms.semi_structured_types)
160
160
  .transform(transforms.try_parse_json)
161
+ .transform(transforms.split)
161
162
  # NOTE: trim_cast_varchar must be before json_extract_cast_as_varchar
162
163
  .transform(transforms.trim_cast_varchar)
163
164
  # indices_to_json_extract must be before regex_substr
@@ -165,6 +166,7 @@ class FakeSnowflakeCursor:
165
166
  .transform(transforms.json_extract_cast_as_varchar)
166
167
  .transform(transforms.json_extract_cased_as_varchar)
167
168
  .transform(transforms.json_extract_precedence)
169
+ .transform(transforms.flatten_value_cast_as_varchar)
168
170
  .transform(transforms.flatten)
169
171
  .transform(transforms.regex_replace)
170
172
  .transform(transforms.regex_substr)
@@ -184,7 +186,7 @@ class FakeSnowflakeCursor:
184
186
  .transform(transforms.random)
185
187
  .transform(transforms.identifier)
186
188
  .transform(transforms.array_agg_within_group)
187
- .transform(transforms.array_agg_to_json)
189
+ .transform(transforms.array_agg)
188
190
  .transform(transforms.dateadd_date_cast)
189
191
  .transform(transforms.dateadd_string_literal_timestamp_cast)
190
192
  .transform(transforms.datediff_string_literal_timestamp_cast)
@@ -520,7 +522,7 @@ class FakeSnowflakeConnection:
520
522
  self.schema = schema and schema.upper()
521
523
  self.database_set = False
522
524
  self.schema_set = False
523
- self.db_path = db_path
525
+ self.db_path = Path(db_path) if db_path else None
524
526
  self.nop_regexes = nop_regexes
525
527
  self._paramstyle = snowflake.connector.paramstyle
526
528
 
@@ -535,7 +537,7 @@ class FakeSnowflakeConnection:
535
537
  where catalog_name = '{self.database}'"""
536
538
  ).fetchone()
537
539
  ):
538
- db_file = f"{Path(db_path)/self.database}.db" if db_path else ":memory:"
540
+ db_file = f"{self.db_path/self.database}.db" if self.db_path else ":memory:"
539
541
  duck_conn.execute(f"ATTACH DATABASE '{db_file}' AS {self.database}")
540
542
  duck_conn.execute(info_schema.creation_sql(self.database))
541
543
  duck_conn.execute(macros.creation_sql(self.database))
@@ -41,8 +41,11 @@ def array_size(expression: exp.Expression) -> exp.Expression:
41
41
  return expression
42
42
 
43
43
 
44
- def array_agg_to_json(expression: exp.Expression) -> exp.Expression:
45
- if isinstance(expression, exp.ArrayAgg):
44
+ def array_agg(expression: exp.Expression) -> exp.Expression:
45
+ if isinstance(expression, exp.ArrayAgg) and not isinstance(expression.parent, exp.Window):
46
+ return exp.Anonymous(this="TO_JSON", expressions=[expression])
47
+
48
+ if isinstance(expression, exp.Window) and isinstance(expression.this, exp.ArrayAgg):
46
49
  return exp.Anonymous(this="TO_JSON", expressions=[expression])
47
50
 
48
51
  return expression
@@ -436,7 +439,7 @@ def flatten(expression: exp.Expression) -> exp.Expression:
436
439
  isinstance(expression, exp.Lateral)
437
440
  and isinstance(expression.this, exp.Explode)
438
441
  and (alias := expression.args.get("alias"))
439
- # always true; when no explicit alias provided this will be _flattened
442
+ # always true; when no explicit alias provided this will be flattened
440
443
  and isinstance(alias, exp.TableAlias)
441
444
  ):
442
445
  explode_expression = expression.this.this.expression
@@ -460,6 +463,25 @@ def flatten(expression: exp.Expression) -> exp.Expression:
460
463
  return expression
461
464
 
462
465
 
466
+ def flatten_value_cast_as_varchar(expression: exp.Expression) -> exp.Expression:
467
+ """Return raw unquoted string when flatten VALUE is cast to varchar.
468
+
469
+ Returns a raw string using the Duckdb ->> operator, aka the json_extract_string function, see
470
+ https://duckdb.org/docs/extensions/json#json-extraction-functions
471
+ """
472
+ if (
473
+ isinstance(expression, exp.Cast)
474
+ and isinstance(expression.this, exp.Column)
475
+ and expression.this.name.upper() == "VALUE"
476
+ and expression.to.this in [exp.DataType.Type.VARCHAR, exp.DataType.Type.TEXT]
477
+ and (select := expression.find_ancestor(exp.Select))
478
+ and select.find(exp.Explode)
479
+ ):
480
+ return exp.JSONExtractScalar(this=expression.this, expression=exp.JSONPath(expressions=[exp.JSONPathRoot()]))
481
+
482
+ return expression
483
+
484
+
463
485
  def float_to_double(expression: exp.Expression) -> exp.Expression:
464
486
  """Convert float to double for 64 bit precision.
465
487
 
@@ -931,6 +953,16 @@ def show_schemas(expression: exp.Expression, current_database: str | None = None
931
953
  return expression
932
954
 
933
955
 
956
+ def split(expression: exp.Expression) -> exp.Expression:
957
+ """
958
+ Convert output of duckdb str_split from varchar[] to JSON array to match Snowflake.
959
+ """
960
+ if isinstance(expression, exp.Split):
961
+ return exp.Anonymous(this="to_json", expressions=[expression])
962
+
963
+ return expression
964
+
965
+
934
966
  def tag(expression: exp.Expression) -> exp.Expression:
935
967
  """Handle tags. Transfer tags into upserts of the tag table.
936
968
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.9.15
3
+ Version: 0.9.17
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -210,10 +210,10 @@ Classifier: License :: OSI Approved :: MIT License
210
210
  Requires-Python: >=3.9
211
211
  Description-Content-Type: text/markdown
212
212
  License-File: LICENSE
213
- Requires-Dist: duckdb~=0.10.3
213
+ Requires-Dist: duckdb~=1.0.0
214
214
  Requires-Dist: pyarrow
215
215
  Requires-Dist: snowflake-connector-python
216
- Requires-Dist: sqlglot~=24.1.0
216
+ Requires-Dist: sqlglot~=25.3.0
217
217
  Provides-Extra: dev
218
218
  Requires-Dist: build~=1.0; extra == "dev"
219
219
  Requires-Dist: pandas-stubs; extra == "dev"
@@ -1,7 +1,7 @@
1
- duckdb~=0.10.3
1
+ duckdb~=1.0.0
2
2
  pyarrow
3
3
  snowflake-connector-python
4
- sqlglot~=24.1.0
4
+ sqlglot~=25.3.0
5
5
 
6
6
  [dev]
7
7
  build~=1.0
@@ -1,17 +1,17 @@
1
1
  [project]
2
2
  name = "fakesnow"
3
3
  description = "Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally."
4
- version = "0.9.15"
4
+ version = "0.9.17"
5
5
  readme = "README.md"
6
6
  license = { file = "LICENSE" }
7
7
  classifiers = ["License :: OSI Approved :: MIT License"]
8
8
  keywords = ["snowflake", "snowflakedb", "fake", "local", "mock", "testing"]
9
9
  requires-python = ">=3.9"
10
10
  dependencies = [
11
- "duckdb~=0.10.3",
11
+ "duckdb~=1.0.0",
12
12
  "pyarrow",
13
13
  "snowflake-connector-python",
14
- "sqlglot~=24.1.0",
14
+ "sqlglot~=25.3.0",
15
15
  ]
16
16
 
17
17
  [project.urls]
@@ -54,7 +54,7 @@ def test_array_size(cur: snowflake.connector.cursor.SnowflakeCursor):
54
54
  assert cur.fetchall() == [(None,)]
55
55
 
56
56
 
57
- def test_array_agg_to_json(dcur: snowflake.connector.cursor.DictCursor):
57
+ def test_array_agg(dcur: snowflake.connector.cursor.DictCursor):
58
58
  dcur.execute("create table table1 (id number, name varchar)")
59
59
  values = [(1, "foo"), (2, "bar"), (1, "baz"), (2, "qux")]
60
60
 
@@ -63,6 +63,24 @@ def test_array_agg_to_json(dcur: snowflake.connector.cursor.DictCursor):
63
63
  dcur.execute("select array_agg(name) as names from table1")
64
64
  assert dindent(dcur.fetchall()) == [{"NAMES": '[\n "foo",\n "bar",\n "baz",\n "qux"\n]'}]
65
65
 
66
+ # using over
67
+
68
+ dcur.execute(
69
+ """
70
+ SELECT DISTINCT
71
+ ID
72
+ , ANOTHER
73
+ , ARRAY_AGG(DISTINCT COL) OVER(PARTITION BY ID) AS COLS
74
+ FROM (select column1 as ID, column2 as COL, column3 as ANOTHER from
75
+ (VALUES (1, 's1', 'c1'),(1, 's2', 'c1'),(1, 's3', 'c1'),(2, 's1', 'c2'), (2,'s2','c2')))
76
+ ORDER BY ID
77
+ """
78
+ )
79
+ assert dindent(dcur.fetchall()) == [
80
+ {"ID": 1, "ANOTHER": "c1", "COLS": '[\n "s1",\n "s2",\n "s3"\n]'},
81
+ {"ID": 2, "ANOTHER": "c2", "COLS": '[\n "s1",\n "s2"\n]'},
82
+ ]
83
+
66
84
 
67
85
  def test_array_agg_within_group(dcur: snowflake.connector.cursor.DictCursor):
68
86
  dcur.execute("CREATE TABLE table1 (ID INT, amount INT)")
@@ -202,6 +220,12 @@ def test_connect_reuse_db():
202
220
  assert cur.execute("select * from example").fetchall() == [(420,)]
203
221
 
204
222
 
223
+ def test_connect_db_path_can_create_database() -> None:
224
+ with tempfile.TemporaryDirectory(prefix="fakesnow-test") as db_path, fakesnow.patch(db_path=db_path):
225
+ cursor = snowflake.connector.connect().cursor()
226
+ cursor.execute("CREATE DATABASE db2")
227
+
228
+
205
229
  def test_connect_without_database(_fakesnow_no_auto_create: None):
206
230
  with snowflake.connector.connect() as conn, conn.cursor() as cur:
207
231
  with pytest.raises(snowflake.connector.errors.ProgrammingError) as excinfo:
@@ -790,6 +814,18 @@ def test_flatten(cur: snowflake.connector.cursor.SnowflakeCursor):
790
814
  assert cur.fetchall() == [(1, '"banana"'), (2, '"coconut"'), (2, '"durian"')]
791
815
 
792
816
 
817
+ def test_flatten_value_cast_as_varchar(cur: snowflake.connector.cursor.SnowflakeCursor):
818
+ cur.execute(
819
+ """
820
+ select id, f.value::varchar as v
821
+ from (select column1 as id, column2 as col from (values (1, 's1,s2,s3'), (2, 's1,s2'))) as t
822
+ , lateral flatten(input => split(t.col, ',')) as f order by id
823
+ """
824
+ )
825
+ # should be raw string not json string with double quotes
826
+ assert cur.fetchall() == [(1, "s1"), (1, "s2"), (1, "s3"), (2, "s1"), (2, "s2")]
827
+
828
+
793
829
  def test_floats_are_64bit(cur: snowflake.connector.cursor.SnowflakeCursor):
794
830
  cur.execute("create or replace table example (f float, f4 float4, f8 float8, d double, r real)")
795
831
  cur.execute("insert into example values (1.23, 1.23, 1.23, 1.23, 1.23)")
@@ -1323,6 +1359,10 @@ def test_show_primary_keys(dcur: snowflake.connector.cursor.SnowflakeCursor):
1323
1359
  assert result3 == []
1324
1360
 
1325
1361
 
1362
+ def test_split(cur: snowflake.connector.cursor.SnowflakeCursor):
1363
+ assert indent(cur.execute("select split('a,b,c', ',')").fetchall()) == [('[\n "a",\n "b",\n "c"\n]',)]
1364
+
1365
+
1326
1366
  def test_sqlglot_regression(cur: snowflake.connector.cursor.SnowflakeCursor):
1327
1367
  assert cur.execute(
1328
1368
  """with SOURCE_TABLE AS (SELECT '2024-01-01' AS start_date)
@@ -8,6 +8,7 @@ from fakesnow.transforms import (
8
8
  SUCCESS_NOP,
9
9
  _get_to_number_args,
10
10
  alias_in_join,
11
+ array_agg,
11
12
  array_agg_within_group,
12
13
  array_size,
13
14
  create_clone,
@@ -21,6 +22,7 @@ from fakesnow.transforms import (
21
22
  extract_comment_on_table,
22
23
  extract_text_length,
23
24
  flatten,
25
+ flatten_value_cast_as_varchar,
24
26
  float_to_double,
25
27
  identifier,
26
28
  indices_to_json_extract,
@@ -40,6 +42,7 @@ from fakesnow.transforms import (
40
42
  sha256,
41
43
  show_objects_tables,
42
44
  show_schemas,
45
+ split,
43
46
  tag,
44
47
  timestamp_ntz,
45
48
  to_date,
@@ -78,6 +81,22 @@ def test_array_size() -> None:
78
81
  )
79
82
 
80
83
 
84
+ def test_array_agg() -> None:
85
+ assert (
86
+ sqlglot.parse_one("SELECT ARRAY_AGG(name) AS names FROM table1").transform(array_agg).sql(dialect="duckdb")
87
+ == "SELECT TO_JSON(ARRAY_AGG(name)) AS names FROM table1"
88
+ )
89
+
90
+ assert (
91
+ sqlglot.parse_one(
92
+ "SELECT DISTINCT ID, ANOTHER, ARRAY_AGG(DISTINCT COL) OVER(PARTITION BY ID) AS COLS FROM TEST"
93
+ )
94
+ .transform(array_agg)
95
+ .sql(dialect="duckdb")
96
+ == "SELECT DISTINCT ID, ANOTHER, TO_JSON(ARRAY_AGG(DISTINCT COL) OVER (PARTITION BY ID)) AS COLS FROM TEST"
97
+ )
98
+
99
+
81
100
  def test_array_agg_within_group() -> None:
82
101
  assert (
83
102
  sqlglot.parse_one(
@@ -400,6 +419,22 @@ def test_flatten() -> None:
400
419
  )
401
420
 
402
421
 
422
+ def test_flatten_value_cast_as_varchar() -> None:
423
+ assert (
424
+ sqlglot.parse_one(
425
+ """
426
+ SELECT ID , F.VALUE::varchar as V
427
+ FROM TEST AS T
428
+ , LATERAL FLATTEN(input => SPLIT(T.COL, ',')) AS F;
429
+ """,
430
+ read="snowflake",
431
+ )
432
+ .transform(flatten_value_cast_as_varchar)
433
+ .sql(dialect="duckdb")
434
+ == """SELECT ID, F.VALUE ->> '$' AS V FROM TEST AS T, LATERAL UNNEST(input => STR_SPLIT(T.COL, ',')) AS F(SEQ, KEY, PATH, INDEX, VALUE, THIS)""" # noqa: E501
435
+ )
436
+
437
+
403
438
  def test_float_to_double() -> None:
404
439
  assert (
405
440
  sqlglot.parse_one("create table example (f float, f4 float4, f8 float8, d double, r real)")
@@ -610,6 +645,12 @@ def test_show_schemas() -> None:
610
645
  )
611
646
 
612
647
 
648
+ def test_split() -> None:
649
+ assert (
650
+ sqlglot.parse_one("SELECT split('a,b,c', ',')").transform(split).sql() == "SELECT TO_JSON(SPLIT('a,b,c', ','))"
651
+ )
652
+
653
+
613
654
  def test_tag() -> None:
614
655
  assert sqlglot.parse_one("ALTER TABLE table1 SET TAG foo='bar'", read="snowflake").transform(tag) == SUCCESS_NOP
615
656
  assert (
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes