sql-testing-library 0.8.0__tar.gz → 0.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/CHANGELOG.md +6 -0
  2. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/PKG-INFO +4 -3
  3. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/README.md +3 -2
  4. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/pyproject.toml +1 -1
  5. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_adapters/redshift.py +31 -1
  6. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_sql_utils.py +19 -4
  7. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/LICENSE +0 -0
  8. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/__init__.py +0 -0
  9. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_adapters/__init__.py +0 -0
  10. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_adapters/athena.py +0 -0
  11. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_adapters/base.py +0 -0
  12. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_adapters/bigquery.py +0 -0
  13. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_adapters/presto.py +0 -0
  14. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_adapters/snowflake.py +0 -0
  15. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_adapters/trino.py +0 -0
  16. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_core.py +0 -0
  17. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_exceptions.py +0 -0
  18. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_mock_table.py +0 -0
  19. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_pytest_plugin.py +0 -0
  20. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_sql_logger.py +0 -0
  21. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/_types.py +0 -0
  22. {sql_testing_library-0.8.0 → sql_testing_library-0.9.0}/src/sql_testing_library/py.typed +0 -0
@@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## 0.9.0 (2025-06-10)
9
+
10
+ ### Feat
11
+
12
+ - **redshift**: add support for map datatype for redshift (#98)
13
+
8
14
  ## 0.8.0 (2025-06-09)
9
15
 
10
16
  ### Feat
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: sql-testing-library
3
- Version: 0.8.0
3
+ Version: 0.9.0
4
4
  Summary: A powerful Python framework for unit testing SQL queries across BigQuery, Snowflake, Redshift, Athena, and Trino with mock data
5
5
  License: MIT
6
6
  Keywords: sql,testing,unit-testing,mock-data,database-testing,bigquery,snowflake,redshift,athena,trino,data-engineering,etl-testing,sql-validation,query-testing
@@ -69,6 +69,7 @@ A powerful Python framework for unit testing SQL queries with mock data injectio
69
69
  [![Pepy Total Downloads](https://img.shields.io/pepy/dt/sql-testing-library?label=PyPI%20Downloads)](https://pepy.tech/projects/sql-testing-library)
70
70
  [![codecov](https://codecov.io/gh/gurmeetsaran/sqltesting/branch/master/graph/badge.svg?token=CN3G5X5ZA5)](https://codecov.io/gh/gurmeetsaran/sqltesting)
71
71
  ![python version](https://img.shields.io/badge/python-3.9%2B-yellowgreen)
72
+ [![Documentation](https://img.shields.io/badge/docs-GitHub%20Pages-blue)](https://gurmeetsaran.github.io/sqltesting/)
72
73
 
73
74
  ## 🎯 Motivation
74
75
 
@@ -135,8 +136,8 @@ The library supports different data types across database engines. All checkmark
135
136
  | **Integer Array** | `List[int]` | ✅ | ✅ | ✅ | ✅ | ✅ |
136
137
  | **Decimal Array** | `List[Decimal]` | ✅ | ✅ | ✅ | ✅ | ✅ |
137
138
  | **Optional Array** | `Optional[List[T]]` | ✅ | ✅ | ✅ | ✅ | ✅ |
139
+ | **Map/Object** | `Dict[K, V]` | ❌ | ✅ | ✅ | ✅ | ❌ |
138
140
  | **Struct/Record** | `dict`/`dataclass` | ❌ | ❌ | ❌ | ❌ | ❌ |
139
- | **Map/Object** | `Dict[K, V]` | ❌ | ✅ | ❌ | ✅ | ❌ |
140
141
  | **Nested Arrays** | `List[List[T]]` | ❌ | ❌ | ❌ | ❌ | ❌ |
141
142
  | **JSON Objects** | `JSON` | ❌ | ❌ | ❌ | ❌ | ❌ |
142
143
 
@@ -144,7 +145,7 @@ The library supports different data types across database engines. All checkmark
144
145
 
145
146
  - **BigQuery**: NULL arrays become empty arrays `[]`; uses scientific notation for large decimals
146
147
  - **Athena**: 256KB query size limit; supports arrays and maps using `ARRAY[]` and `MAP(ARRAY[], ARRAY[])` syntax
147
- - **Redshift**: Arrays implemented via JSON parsing; 16MB query size limit
148
+ - **Redshift**: Arrays and maps implemented via SUPER type (JSON parsing); 16MB query size limit
148
149
  - **Trino**: Memory catalog for testing; excellent decimal precision; supports arrays and maps
149
150
  - **Snowflake**: Column names normalized to lowercase; 1MB query size limit
150
151
 
@@ -12,6 +12,7 @@ A powerful Python framework for unit testing SQL queries with mock data injectio
12
12
  [![Pepy Total Downloads](https://img.shields.io/pepy/dt/sql-testing-library?label=PyPI%20Downloads)](https://pepy.tech/projects/sql-testing-library)
13
13
  [![codecov](https://codecov.io/gh/gurmeetsaran/sqltesting/branch/master/graph/badge.svg?token=CN3G5X5ZA5)](https://codecov.io/gh/gurmeetsaran/sqltesting)
14
14
  ![python version](https://img.shields.io/badge/python-3.9%2B-yellowgreen)
15
+ [![Documentation](https://img.shields.io/badge/docs-GitHub%20Pages-blue)](https://gurmeetsaran.github.io/sqltesting/)
15
16
 
16
17
  ## 🎯 Motivation
17
18
 
@@ -78,8 +79,8 @@ The library supports different data types across database engines. All checkmark
78
79
  | **Integer Array** | `List[int]` | ✅ | ✅ | ✅ | ✅ | ✅ |
79
80
  | **Decimal Array** | `List[Decimal]` | ✅ | ✅ | ✅ | ✅ | ✅ |
80
81
  | **Optional Array** | `Optional[List[T]]` | ✅ | ✅ | ✅ | ✅ | ✅ |
82
+ | **Map/Object** | `Dict[K, V]` | ❌ | ✅ | ✅ | ✅ | ❌ |
81
83
  | **Struct/Record** | `dict`/`dataclass` | ❌ | ❌ | ❌ | ❌ | ❌ |
82
- | **Map/Object** | `Dict[K, V]` | ❌ | ✅ | ❌ | ✅ | ❌ |
83
84
  | **Nested Arrays** | `List[List[T]]` | ❌ | ❌ | ❌ | ❌ | ❌ |
84
85
  | **JSON Objects** | `JSON` | ❌ | ❌ | ❌ | ❌ | ❌ |
85
86
 
@@ -87,7 +88,7 @@ The library supports different data types across database engines. All checkmark
87
88
 
88
89
  - **BigQuery**: NULL arrays become empty arrays `[]`; uses scientific notation for large decimals
89
90
  - **Athena**: 256KB query size limit; supports arrays and maps using `ARRAY[]` and `MAP(ARRAY[], ARRAY[])` syntax
90
- - **Redshift**: Arrays implemented via JSON parsing; 16MB query size limit
91
+ - **Redshift**: Arrays and maps implemented via SUPER type (JSON parsing); 16MB query size limit
91
92
  - **Trino**: Memory catalog for testing; excellent decimal precision; supports arrays and maps
92
93
  - **Snowflake**: Column names normalized to lowercase; 1MB query size limit
93
94
 
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "sql-testing-library"
7
- version = "0.8.0"
7
+ version = "0.9.0"
8
8
  description = "A powerful Python framework for unit testing SQL queries across BigQuery, Snowflake, Redshift, Athena, and Trino with mock data"
9
9
  authors = ["Gurmeet Saran <gurmeetx@gmail.com>", "Kushal Thakkar <kushal.thakkar@gmail.com>"]
10
10
  maintainers = ["Gurmeet Saran <gurmeetx@gmail.com>", "Kushal Thakkar <kushal.thakkar@gmail.com>"]
@@ -32,6 +32,32 @@ class RedshiftTypeConverter(BaseTypeConverter):
32
32
 
33
33
  def convert(self, value: Any, target_type: Type) -> Any:
34
34
  """Convert Redshift result value to target type."""
35
+ # Handle None/NULL values first
36
+ if value is None:
37
+ return None
38
+
39
+ # Handle Optional types
40
+ if self.is_optional_type(target_type):
41
+ if value is None:
42
+ return None
43
+ target_type = self.get_optional_inner_type(target_type)
44
+
45
+ # Handle dict/map types from Redshift SUPER columns
46
+ if hasattr(target_type, "__origin__") and target_type.__origin__ is dict:
47
+ # Redshift returns SUPER types as Python dicts already
48
+ if isinstance(value, dict):
49
+ return value
50
+ # If it's a string (shouldn't happen with psycopg2), parse it
51
+ elif isinstance(value, str):
52
+ import json
53
+
54
+ try:
55
+ return json.loads(value)
56
+ except json.JSONDecodeError:
57
+ return {}
58
+ else:
59
+ return {}
60
+
35
61
  # Redshift returns proper Python types in most cases, so use base converter
36
62
  return super().convert(value, target_type)
37
63
 
@@ -190,7 +216,11 @@ class RedshiftAdapter(DatabaseAdapter):
190
216
  if non_none_types:
191
217
  col_type = non_none_types[0]
192
218
 
193
- redshift_type = type_mapping.get(col_type, "VARCHAR")
219
+ # Handle dict/map types
220
+ if hasattr(col_type, "__origin__") and col_type.__origin__ is dict:
221
+ redshift_type = "SUPER"
222
+ else:
223
+ redshift_type = type_mapping.get(col_type, "VARCHAR")
194
224
  column_defs.append(f'"{col_name}" {redshift_type}')
195
225
 
196
226
  columns_sql = ",\n ".join(column_defs)
@@ -1,8 +1,19 @@
1
1
  """SQL utility functions for escaping and formatting values."""
2
2
 
3
+ import json
4
+ from decimal import Decimal
3
5
  from typing import Any, Type
4
6
 
5
7
 
8
+ class DecimalEncoder(json.JSONEncoder):
9
+ """JSON encoder that handles Decimal types."""
10
+
11
+ def default(self, o):
12
+ if isinstance(o, Decimal):
13
+ return float(o)
14
+ return super().default(o)
15
+
16
+
6
17
  def escape_sql_string(value: str) -> str:
7
18
  """
8
19
  Escape a string value for SQL using standard SQL escaping rules.
@@ -153,6 +164,9 @@ def format_sql_value(value: Any, column_type: Type, dialect: str = "standard") -
153
164
  sql_key_type = get_sql_type(key_type)
154
165
  sql_value_type = get_sql_type(value_type)
155
166
  return f"CAST(NULL AS MAP({sql_key_type}, {sql_value_type}))"
167
+ elif dialect == "redshift":
168
+ # Redshift SUPER type handles NULL maps
169
+ return "NULL::SUPER"
156
170
  else:
157
171
  return "NULL"
158
172
 
@@ -220,9 +234,6 @@ def format_sql_value(value: Any, column_type: Type, dialect: str = "standard") -
220
234
  elif dialect == "redshift":
221
235
  # Redshift uses JSON-like syntax for SUPER arrays
222
236
  # Format elements as JSON (double quotes for strings)
223
- import json
224
- from decimal import Decimal
225
-
226
237
  # Convert elements to JSON-serializable types
227
238
  json_elements = []
228
239
  for element in value:
@@ -274,9 +285,13 @@ def format_sql_value(value: Any, column_type: Type, dialect: str = "standard") -
274
285
  keys.append(format_sql_value(k, key_type, dialect))
275
286
  values.append(format_sql_value(v, value_type, dialect))
276
287
  return f"MAP(ARRAY[{', '.join(keys)}], ARRAY[{', '.join(values)}])"
288
+ elif dialect == "redshift":
289
+ # Redshift uses SUPER type with JSON-like syntax for maps
290
+ json_str = json.dumps(value, cls=DecimalEncoder)
291
+ return f"JSON_PARSE('{json_str}')"
277
292
  else:
278
293
  # Other databases don't have native map support yet
279
- # Could potentially use JSON for BigQuery, Redshift, Snowflake
294
+ # Could potentially use JSON for BigQuery, Snowflake
280
295
  raise NotImplementedError(f"Map type not yet supported for dialect: {dialect}")
281
296
 
282
297
  # Handle string types