sql-testing-library 0.17.0__tar.gz → 0.19.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/CHANGELOG.md +31 -0
  2. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/PKG-INFO +8 -4
  3. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/README.md +5 -1
  4. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/pyproject.toml +20 -3
  5. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/__init__.py +1 -1
  6. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/bigquery.py +10 -11
  7. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/duckdb.py +10 -11
  8. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/presto.py +9 -9
  9. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/redshift.py +5 -5
  10. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/snowflake.py +4 -5
  11. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_mock_table.py +8 -2
  12. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_types.py +39 -8
  13. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/LICENSE +0 -0
  14. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/__init__.py +0 -0
  15. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/athena.py +0 -0
  16. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/base.py +0 -0
  17. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_adapters/trino.py +0 -0
  18. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_core.py +0 -0
  19. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_exceptions.py +0 -0
  20. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_pytest_plugin.py +0 -0
  21. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_sql_logger.py +0 -0
  22. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/_sql_utils.py +0 -0
  23. {sql_testing_library-0.17.0 → sql_testing_library-0.19.0}/src/sql_testing_library/py.typed +0 -0
@@ -5,6 +5,37 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## 0.19.0 (2025-12-07)
9
+
10
+ ### Feat
11
+
12
+ - reorganize documentation navigation for better learning flow
13
+ - comprehensive SEO optimization for better search visibility
14
+ - restrict CodeCov uploads to master branch only
15
+ - enable CodeCov carryforward for all coverage flags
16
+
17
+ ### Fix
18
+
19
+ - add support for Python 3.10+ pipe-none (X | None) union syntax (#133)
20
+ - sanitize CTE aliases and sync package version (#132)
21
+ - align robots.txt and sitemap with Google's official guidelines
22
+ - shorten page titles for better readability and display
23
+ - optimize robots.txt for better Google Search Console compatibility
24
+
25
+ ## 0.18.0 (2025-12-01)
26
+
27
+ ### Feat
28
+
29
+ - add Google Analytics tracking
30
+ - improve SEO across PyPI, GitHub, and documentation
31
+
32
+ ### Fix
33
+
34
+ - update codecov-action parameter from 'file' to 'files'
35
+ - handle pytest-xdist environment in worker ID test
36
+ - shorten site title to prevent display issues
37
+ - update Twitter handle to @saran_gurmeet and remove non-existent image references
38
+
8
39
  ## 0.17.0 (2025-11-20)
9
40
 
10
41
  ### Feat
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sql-testing-library
3
- Version: 0.17.0
4
- Summary: A powerful Python framework for unit testing SQL queries across BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB with mock data
3
+ Version: 0.19.0
4
+ Summary: SQL Testing Framework for Python: Unit test SQL queries with mock data injection for BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB. Simplify data engineering ETL testing and analytics validation.
5
5
  License: MIT
6
6
  License-File: LICENSE
7
- Keywords: sql,testing,unit-testing,mock-data,database-testing,bigquery,snowflake,redshift,athena,trino,duckdb,data-engineering,etl-testing,sql-validation,query-testing
7
+ Keywords: sql,testing,unit-testing,mock-data,database-testing,bigquery,snowflake,redshift,athena,trino,duckdb,data-engineering,etl-testing,sql-validation,query-testing,pytest,data-quality,analytics-testing,sql-unit-test,database-mocking,sql-mock,data-warehouse-testing,cloud-database,aws-athena,google-bigquery,amazon-redshift,snowflake-testing,ci-cd-testing,test-automation
8
8
  Author: Gurmeet Saran
9
9
  Author-email: gurmeetx@gmail.com
10
10
  Maintainer: Gurmeet Saran
@@ -60,7 +60,11 @@ Description-Content-Type: text/markdown
60
60
 
61
61
  # SQL Testing Library
62
62
 
63
- A powerful Python framework for unit testing SQL queries with mock data injection across BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB.
63
+ > **A powerful Python framework for unit testing SQL queries with mock data injection**
64
+ >
65
+ > Test SQL queries across BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB with type-safe mock data, pytest integration, and automatic table resolution. Perfect for data engineering, ETL pipeline testing, and analytics validation.
66
+
67
+ **Quick Links:** [Installation](#installation) | [Quick Start](#quick-start) | [Documentation](https://gurmeetsaran.github.io/sqltesting/) | [Examples](https://gurmeetsaran.github.io/sqltesting/examples.html) | [PyPI Package](https://pypi.org/project/sql-testing-library/)
64
68
 
65
69
  [![Unit Tests](https://github.com/gurmeetsaran/sqltesting/actions/workflows/tests.yaml/badge.svg)](https://github.com/gurmeetsaran/sqltesting/actions/workflows/tests.yaml)
66
70
  [![Athena Integration](https://github.com/gurmeetsaran/sqltesting/actions/workflows/athena-integration.yml/badge.svg)](https://github.com/gurmeetsaran/sqltesting/actions/workflows/athena-integration.yml)
@@ -1,6 +1,10 @@
1
1
  # SQL Testing Library
2
2
 
3
- A powerful Python framework for unit testing SQL queries with mock data injection across BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB.
3
+ > **A powerful Python framework for unit testing SQL queries with mock data injection**
4
+ >
5
+ > Test SQL queries across BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB with type-safe mock data, pytest integration, and automatic table resolution. Perfect for data engineering, ETL pipeline testing, and analytics validation.
6
+
7
+ **Quick Links:** [Installation](#installation) | [Quick Start](#quick-start) | [Documentation](https://gurmeetsaran.github.io/sqltesting/) | [Examples](https://gurmeetsaran.github.io/sqltesting/examples.html) | [PyPI Package](https://pypi.org/project/sql-testing-library/)
4
8
 
5
9
  [![Unit Tests](https://github.com/gurmeetsaran/sqltesting/actions/workflows/tests.yaml/badge.svg)](https://github.com/gurmeetsaran/sqltesting/actions/workflows/tests.yaml)
6
10
  [![Athena Integration](https://github.com/gurmeetsaran/sqltesting/actions/workflows/athena-integration.yml/badge.svg)](https://github.com/gurmeetsaran/sqltesting/actions/workflows/athena-integration.yml)
@@ -4,8 +4,8 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "sql-testing-library"
7
- version = "0.17.0"
8
- description = "A powerful Python framework for unit testing SQL queries across BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB with mock data"
7
+ version = "0.19.0"
8
+ description = "SQL Testing Framework for Python: Unit test SQL queries with mock data injection for BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB. Simplify data engineering ETL testing and analytics validation."
9
9
  authors = ["Gurmeet Saran <gurmeetx@gmail.com>", "Kushal Thakkar <kushal.thakkar@gmail.com>"]
10
10
  maintainers = ["Gurmeet Saran <gurmeetx@gmail.com>", "Kushal Thakkar <kushal.thakkar@gmail.com>"]
11
11
  license = "MIT"
@@ -28,7 +28,21 @@ keywords = [
28
28
  "data-engineering",
29
29
  "etl-testing",
30
30
  "sql-validation",
31
- "query-testing"
31
+ "query-testing",
32
+ "pytest",
33
+ "data-quality",
34
+ "analytics-testing",
35
+ "sql-unit-test",
36
+ "database-mocking",
37
+ "sql-mock",
38
+ "data-warehouse-testing",
39
+ "cloud-database",
40
+ "aws-athena",
41
+ "google-bigquery",
42
+ "amazon-redshift",
43
+ "snowflake-testing",
44
+ "ci-cd-testing",
45
+ "test-automation"
32
46
  ]
33
47
  classifiers = [
34
48
  "Development Status :: 4 - Beta",
@@ -292,3 +306,6 @@ update_changelog_on_bump = true
292
306
  major_version_zero = true
293
307
  changelog_merge_prerelease = true
294
308
  changelog_start_rev = "0.1.0"
309
+ version_files = [
310
+ "src/sql_testing_library/__init__.py:__version__"
311
+ ]
@@ -27,7 +27,7 @@ try:
27
27
  except ImportError:
28
28
  __all__ = []
29
29
 
30
- __version__ = "0.3.0"
30
+ __version__ = "0.19.0"
31
31
  __all__.extend(
32
32
  [
33
33
  "SQLTestFramework",
@@ -10,7 +10,6 @@ from typing import (
10
10
  Optional,
11
11
  Tuple,
12
12
  Type,
13
- Union,
14
13
  get_args,
15
14
  get_type_hints,
16
15
  )
@@ -22,7 +21,7 @@ if TYPE_CHECKING:
22
21
 
23
22
  # Heavy imports moved to function level for better performance
24
23
  from .._mock_table import BaseMockTable
25
- from .._types import BaseTypeConverter
24
+ from .._types import BaseTypeConverter, is_union_type
26
25
  from .base import DatabaseAdapter
27
26
 
28
27
 
@@ -247,9 +246,9 @@ class BigQueryAdapter(DatabaseAdapter):
247
246
 
248
247
  schema = []
249
248
  for col_name, col_type in column_types.items():
250
- # Handle Optional types
251
- if hasattr(col_type, "__origin__") and col_type.__origin__ is Union:
252
- # Extract the non-None type from Optional[T]
249
+ # Handle Optional types (both Optional[X] and X | None)
250
+ if is_union_type(col_type):
251
+ # Extract the non-None type from Optional[T] or T | None
253
252
  non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
254
253
  if non_none_types:
255
254
  col_type = non_none_types[0]
@@ -319,9 +318,9 @@ class BigQueryAdapter(DatabaseAdapter):
319
318
  type_hints = get_type_hints(struct_type)
320
319
 
321
320
  for field_name, field_type in type_hints.items():
322
- # Handle Optional types
323
- if hasattr(field_type, "__origin__") and field_type.__origin__ is Union:
324
- # Extract the non-None type from Optional[T]
321
+ # Handle Optional types (both Optional[X] and X | None)
322
+ if is_union_type(field_type):
323
+ # Extract the non-None type from Optional[T] or T | None
325
324
  non_none_types = [arg for arg in get_args(field_type) if arg is not type(None)]
326
325
  if non_none_types:
327
326
  field_type = non_none_types[0]
@@ -388,9 +387,9 @@ class BigQueryAdapter(DatabaseAdapter):
388
387
  column_types = mock_table.get_column_types()
389
388
 
390
389
  for col_name, col_type in column_types.items():
391
- # Handle Optional types
392
- if hasattr(col_type, "__origin__") and col_type.__origin__ is Union:
393
- # Extract the non-None type from Optional[T]
390
+ # Handle Optional types (both Optional[X] and X | None)
391
+ if is_union_type(col_type):
392
+ # Extract the non-None type from Optional[T] or T | None
394
393
  non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
395
394
  if non_none_types:
396
395
  col_type = non_none_types[0]
@@ -10,7 +10,6 @@ from typing import (
10
10
  Optional,
11
11
  Tuple,
12
12
  Type,
13
- Union,
14
13
  get_args,
15
14
  get_type_hints,
16
15
  )
@@ -22,7 +21,7 @@ if TYPE_CHECKING:
22
21
 
23
22
  # Heavy imports moved to function level for better performance
24
23
  from .._mock_table import BaseMockTable
25
- from .._types import BaseTypeConverter
24
+ from .._types import BaseTypeConverter, is_union_type
26
25
  from .base import DatabaseAdapter
27
26
 
28
27
 
@@ -237,9 +236,9 @@ class DuckDBAdapter(DatabaseAdapter):
237
236
 
238
237
  column_defs = []
239
238
  for col_name, col_type in column_types.items():
240
- # Handle Optional types
241
- if hasattr(col_type, "__origin__") and col_type.__origin__ is Union:
242
- # Extract the non-None type from Optional[T]
239
+ # Handle Optional types (both Optional[X] and X | None)
240
+ if is_union_type(col_type):
241
+ # Extract the non-None type from Optional[T] or T | None
243
242
  non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
244
243
  if non_none_types:
245
244
  col_type = non_none_types[0]
@@ -301,9 +300,9 @@ class DuckDBAdapter(DatabaseAdapter):
301
300
  field_defs = []
302
301
 
303
302
  for field_name, field_type in type_hints.items():
304
- # Handle Optional types
305
- if hasattr(field_type, "__origin__") and field_type.__origin__ is Union:
306
- # Extract the non-None type from Optional[T]
303
+ # Handle Optional types (both Optional[X] and X | None)
304
+ if is_union_type(field_type):
305
+ # Extract the non-None type from Optional[T] or T | None
307
306
  non_none_types = [arg for arg in get_args(field_type) if arg is not type(None)]
308
307
  if non_none_types:
309
308
  field_type = non_none_types[0]
@@ -360,9 +359,9 @@ class DuckDBAdapter(DatabaseAdapter):
360
359
  column_types = mock_table.get_column_types()
361
360
 
362
361
  for col_name, col_type in column_types.items():
363
- # Handle Optional types
364
- if hasattr(col_type, "__origin__") and col_type.__origin__ is Union:
365
- # Extract the non-None type from Optional[T]
362
+ # Handle Optional types (both Optional[X] and X | None)
363
+ if is_union_type(col_type):
364
+ # Extract the non-None type from Optional[T] or T | None
366
365
  non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
367
366
  if non_none_types:
368
367
  col_type = non_none_types[0]
@@ -2,10 +2,10 @@
2
2
 
3
3
  from datetime import date, datetime
4
4
  from decimal import Decimal
5
- from typing import Any, List, Tuple, Type, Union, get_args
5
+ from typing import Any, List, Tuple, Type, get_args
6
6
 
7
7
  from .._mock_table import BaseMockTable
8
- from .._types import BaseTypeConverter, is_struct_type
8
+ from .._types import BaseTypeConverter, is_struct_type, is_union_type
9
9
  from .base import DatabaseAdapter
10
10
 
11
11
 
@@ -77,9 +77,9 @@ class PrestoBaseAdapter(DatabaseAdapter):
77
77
  """Convert Python type to SQL type string."""
78
78
  from .._sql_utils import get_sql_type_string
79
79
 
80
- # Handle Optional types
81
- if hasattr(python_type, "__origin__") and python_type.__origin__ is Union:
82
- # Extract the non-None type from Optional[T]
80
+ # Handle Optional types (both Optional[X] and X | None)
81
+ if is_union_type(python_type):
82
+ # Extract the non-None type from Optional[T] or T | None
83
83
  non_none_types = [arg for arg in get_args(python_type) if arg is not type(None)]
84
84
  if non_none_types:
85
85
  python_type = non_none_types[0]
@@ -164,8 +164,8 @@ class PrestoBaseAdapter(DatabaseAdapter):
164
164
 
165
165
  # Handle Optional types by extracting the non-None type for proper formatting
166
166
  actual_type = col_type
167
- if hasattr(col_type, "__origin__") and col_type.__origin__ is Union:
168
- # Extract the non-None type from Optional[T]
167
+ if is_union_type(col_type):
168
+ # Extract the non-None type from Optional[T] or T | None
169
169
  non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
170
170
  if non_none_types:
171
171
  actual_type = non_none_types[0]
@@ -186,8 +186,8 @@ class PrestoBaseAdapter(DatabaseAdapter):
186
186
 
187
187
  # Handle Optional types by extracting the non-None type for proper formatting
188
188
  actual_type = col_type
189
- if hasattr(col_type, "__origin__") and col_type.__origin__ is Union:
190
- # Extract the non-None type from Optional[T]
189
+ if is_union_type(col_type):
190
+ # Extract the non-None type from Optional[T] or T | None
191
191
  non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
192
192
  if non_none_types:
193
193
  actual_type = non_none_types[0]
@@ -2,7 +2,7 @@
2
2
 
3
3
  from datetime import date, datetime
4
4
  from decimal import Decimal
5
- from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Type, Union, get_args
5
+ from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Type, get_args
6
6
 
7
7
 
8
8
  if TYPE_CHECKING:
@@ -12,7 +12,7 @@ if TYPE_CHECKING:
12
12
 
13
13
  # Heavy import moved to function level for better performance
14
14
  from .._mock_table import BaseMockTable
15
- from .._types import BaseTypeConverter
15
+ from .._types import BaseTypeConverter, is_union_type
16
16
  from .base import DatabaseAdapter
17
17
 
18
18
 
@@ -202,9 +202,9 @@ class RedshiftAdapter(DatabaseAdapter):
202
202
  # Generate column definitions
203
203
  column_defs = []
204
204
  for col_name, col_type in column_types.items():
205
- # Handle Optional types
206
- if hasattr(col_type, "__origin__") and col_type.__origin__ is Union:
207
- # Extract the non-None type from Optional[T]
205
+ # Handle Optional types (both Optional[X] and X | None)
206
+ if is_union_type(col_type):
207
+ # Extract the non-None type from Optional[T] or T | None
208
208
  non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
209
209
  if non_none_types:
210
210
  col_type = non_none_types[0]
@@ -13,7 +13,6 @@ from typing import (
13
13
  Optional,
14
14
  Tuple,
15
15
  Type,
16
- Union,
17
16
  get_args,
18
17
  )
19
18
 
@@ -23,7 +22,7 @@ if TYPE_CHECKING:
23
22
 
24
23
  # Heavy import moved to function level for better performance
25
24
  from .._mock_table import BaseMockTable
26
- from .._types import BaseTypeConverter
25
+ from .._types import BaseTypeConverter, is_union_type
27
26
  from .base import DatabaseAdapter
28
27
 
29
28
 
@@ -364,9 +363,9 @@ class SnowflakeAdapter(DatabaseAdapter):
364
363
  # Generate column definitions
365
364
  column_defs = []
366
365
  for col_name, col_type in column_types.items():
367
- # Handle Optional types
368
- if hasattr(col_type, "__origin__") and col_type.__origin__ is Union:
369
- # Extract the non-None type from Optional[T]
366
+ # Handle Optional types (both Optional[X] and X | None)
367
+ if is_union_type(col_type):
368
+ # Extract the non-None type from Optional[T] or T | None
370
369
  non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
371
370
  if non_none_types:
372
371
  col_type = non_none_types[0]
@@ -196,5 +196,11 @@ class BaseMockTable(ABC):
196
196
  return df
197
197
 
198
198
  def get_cte_alias(self) -> str:
199
- """Get the CTE alias name (database__tablename)."""
200
- return f"{self.get_database_name().replace('-', '_').replace('.', '_')}__{self.get_table_name()}" # noqa: E501
199
+ """Get the CTE alias name (database__tablename).
200
+
201
+ Replaces '-' and '.' with '_' to ensure valid BigQuery CTE names,
202
+ as BigQuery CTEs cannot contain hyphens or dots.
203
+ """
204
+ db_name = self.get_database_name().replace("-", "_").replace(".", "_")
205
+ table_name = self.get_table_name().replace("-", "_").replace(".", "_")
206
+ return f"{db_name}__{table_name}"
@@ -29,6 +29,35 @@ except ImportError:
29
29
  pydantic_available = False
30
30
 
31
31
 
32
+ def is_union_type(type_hint: Type) -> bool:
33
+ """Check if a type is a Union type (including Optional).
34
+
35
+ This function handles both:
36
+ - typing.Optional[X] / typing.Union[X, None] (Python 3.9+)
37
+ - X | None syntax (Python 3.10+)
38
+
39
+ Args:
40
+ type_hint: The type to check
41
+
42
+ Returns:
43
+ True if the type is a union type, False otherwise
44
+ """
45
+ origin = get_origin(type_hint)
46
+ if origin is None:
47
+ return False
48
+
49
+ # Handle typing.Union (used by Optional[X])
50
+ if origin is Union:
51
+ return True
52
+
53
+ # Handle types.UnionType (used by X | None in Python 3.10+)
54
+ # We check the name because types.UnionType may not be available in Python 3.9
55
+ if hasattr(origin, "__name__") and origin.__name__ == "UnionType":
56
+ return True
57
+
58
+ return False
59
+
60
+
32
61
  def is_pydantic_model_class(cls: Type) -> bool:
33
62
  """Check if a class is a Pydantic model class."""
34
63
  if not pydantic_available or BaseModel is None:
@@ -42,9 +71,9 @@ def is_pydantic_model_class(cls: Type) -> bool:
42
71
 
43
72
  def is_struct_type(type_hint: Type) -> bool:
44
73
  """Check if a type is a struct type (dataclass or Pydantic model)."""
45
- # Handle Optional types
46
- if hasattr(type_hint, "__origin__") and type_hint.__origin__ is Union:
47
- # Extract the non-None type from Optional[T]
74
+ # Handle Optional types (both Optional[X] and X | None)
75
+ if is_union_type(type_hint):
76
+ # Extract the non-None type from Optional[T] or T | None
48
77
  non_none_types = [arg for arg in get_args(type_hint) if arg is not type(None)]
49
78
  if non_none_types:
50
79
  type_hint = non_none_types[0]
@@ -427,15 +456,17 @@ class BaseTypeConverter:
427
456
 
428
457
 
429
458
  def unwrap_optional_type(col_type: Type[Any]) -> Type[Any]:
430
- """Unwrap Optional[T] to T, leave other types unchanged.
459
+ """Unwrap Optional[T] or T | None to T, leave other types unchanged.
431
460
 
432
461
  This is a utility function that can be used by adapters and mock tables
433
- to handle Optional types consistently.
462
+ to handle Optional types consistently. Supports both:
463
+ - typing.Optional[T] / typing.Union[T, None] (Python 3.9+)
464
+ - T | None syntax (Python 3.10+)
434
465
  """
435
- # Check if this is a Union type (which Optional[T] is)
436
- if get_origin(col_type) is Union:
466
+ # Check if this is a Union type (which Optional[T] and T | None both are)
467
+ if is_union_type(col_type):
437
468
  args = get_args(col_type)
438
- # Optional[T] is Union[T, None], so filter out NoneType
469
+ # Optional[T] or T | None is Union[T, None], so filter out NoneType
439
470
  non_none_types = [arg for arg in args if arg is not type(None)]
440
471
  if non_none_types:
441
472
  return cast(Type[Any], non_none_types[0]) # Return the first non-None type