sql-testing-library 0.18.0__tar.gz → 0.20.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/CHANGELOG.md +23 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/PKG-INFO +102 -5
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/README.md +101 -4
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/pyproject.toml +4 -1
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/__init__.py +3 -2
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/bigquery.py +10 -11
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/duckdb.py +10 -11
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/presto.py +9 -9
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/redshift.py +5 -5
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/snowflake.py +4 -5
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_mock_table.py +99 -2
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_types.py +39 -8
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/LICENSE +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/__init__.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/athena.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/base.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/trino.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_core.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_exceptions.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_pytest_plugin.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_sql_logger.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_sql_utils.py +0 -0
- {sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/py.typed +0 -0
|
@@ -5,6 +5,29 @@ All notable changes to this project will be documented in this file.
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
7
|
|
|
8
|
+
## 0.20.0 (2025-12-07)
|
|
9
|
+
|
|
10
|
+
### Feat
|
|
11
|
+
|
|
12
|
+
- add BigQueryMockTable class for explicit three-part naming (#134)
|
|
13
|
+
|
|
14
|
+
## 0.19.0 (2025-12-07)
|
|
15
|
+
|
|
16
|
+
### Feat
|
|
17
|
+
|
|
18
|
+
- reorganize documentation navigation for better learning flow
|
|
19
|
+
- comprehensive SEO optimization for better search visibility
|
|
20
|
+
- restrict CodeCov uploads to master branch only
|
|
21
|
+
- enable CodeCov carryforward for all coverage flags
|
|
22
|
+
|
|
23
|
+
### Fix
|
|
24
|
+
|
|
25
|
+
- add support for Python 3.10+ pipe-none (X | None) union syntax (#133)
|
|
26
|
+
- sanitize CTE aliases and sync package version (#132)
|
|
27
|
+
- align robots.txt and sitemap with Google's official guidelines
|
|
28
|
+
- shorten page titles for better readability and display
|
|
29
|
+
- optimize robots.txt for better Google Search Console compatibility
|
|
30
|
+
|
|
8
31
|
## 0.18.0 (2025-12-01)
|
|
9
32
|
|
|
10
33
|
### Feat
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sql-testing-library
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.20.0
|
|
4
4
|
Summary: SQL Testing Framework for Python: Unit test SQL queries with mock data injection for BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB. Simplify data engineering ETL testing and analytics validation.
|
|
5
5
|
License: MIT
|
|
6
6
|
License-File: LICENSE
|
|
@@ -570,8 +570,16 @@ TestCase(
|
|
|
570
570
|
#### Example Mock Table Implementations:
|
|
571
571
|
|
|
572
572
|
```python
|
|
573
|
-
# BigQuery Mock Table
|
|
574
|
-
|
|
573
|
+
# BigQuery Mock Table (Recommended: Use BigQueryMockTable for clearer three-part naming)
|
|
574
|
+
from sql_testing_library import BigQueryMockTable
|
|
575
|
+
|
|
576
|
+
class UsersMockTable(BigQueryMockTable):
|
|
577
|
+
project_name = "test-project"
|
|
578
|
+
dataset_name = "test_dataset"
|
|
579
|
+
table_name = "users"
|
|
580
|
+
|
|
581
|
+
# BigQuery Mock Table (Alternative: Use BaseMockTable with combined project.dataset)
|
|
582
|
+
class UsersMockTableAlternative(BaseMockTable):
|
|
575
583
|
def get_database_name(self) -> str:
|
|
576
584
|
return "test-project.test_dataset" # project.dataset format
|
|
577
585
|
|
|
@@ -1047,6 +1055,7 @@ The adapter_type parameter will use the configuration from the corresponding sec
|
|
|
1047
1055
|
- Supports Google Cloud BigQuery service
|
|
1048
1056
|
- Uses UNION ALL pattern for CTE creation with complex data types
|
|
1049
1057
|
- Handles authentication via service account or application default credentials
|
|
1058
|
+
- **Special Feature**: `BigQueryMockTable` class for explicit three-part naming (project.dataset.table)
|
|
1050
1059
|
|
|
1051
1060
|
#### Athena Adapter
|
|
1052
1061
|
- Supports Amazon Athena service for querying data in S3
|
|
@@ -1085,6 +1094,94 @@ The adapter_type parameter will use the configuration from the corresponding sec
|
|
|
1085
1094
|
- No authentication required - perfect for local development and testing
|
|
1086
1095
|
- Excellent performance for analytical workloads
|
|
1087
1096
|
|
|
1097
|
+
### BigQuery-Specific: BigQueryMockTable
|
|
1098
|
+
|
|
1099
|
+
BigQuery uses a three-part naming scheme (`project.dataset.table`) which doesn't fit naturally into the two-part `database.table` model used by most databases. The `BigQueryMockTable` class provides explicit support for BigQuery's naming convention.
|
|
1100
|
+
|
|
1101
|
+
#### The Problem with BaseMockTable
|
|
1102
|
+
|
|
1103
|
+
Using `BaseMockTable` for BigQuery requires awkwardly cramming the project and dataset together:
|
|
1104
|
+
|
|
1105
|
+
```python
|
|
1106
|
+
# Awkward: Combines project.dataset into database_name
|
|
1107
|
+
class UsersMockTable(BaseMockTable):
|
|
1108
|
+
def get_database_name(self) -> str:
|
|
1109
|
+
return "test-project.test_dataset" # Confusing!
|
|
1110
|
+
|
|
1111
|
+
def get_table_name(self) -> str:
|
|
1112
|
+
return "users"
|
|
1113
|
+
```
|
|
1114
|
+
|
|
1115
|
+
#### The Solution: BigQueryMockTable
|
|
1116
|
+
|
|
1117
|
+
`BigQueryMockTable` makes BigQuery's three-part naming explicit and clear:
|
|
1118
|
+
|
|
1119
|
+
```python
|
|
1120
|
+
from sql_testing_library import BigQueryMockTable
|
|
1121
|
+
|
|
1122
|
+
class UsersMockTable(BigQueryMockTable):
|
|
1123
|
+
project_name = "test-project"
|
|
1124
|
+
dataset_name = "test_dataset"
|
|
1125
|
+
table_name = "users"
|
|
1126
|
+
```
|
|
1127
|
+
|
|
1128
|
+
#### Usage Examples
|
|
1129
|
+
|
|
1130
|
+
**Basic Usage:**
|
|
1131
|
+
```python
|
|
1132
|
+
from sql_testing_library import BigQueryMockTable
|
|
1133
|
+
|
|
1134
|
+
class UsersMockTable(BigQueryMockTable):
|
|
1135
|
+
project_name = "my-project"
|
|
1136
|
+
dataset_name = "analytics"
|
|
1137
|
+
table_name = "users"
|
|
1138
|
+
|
|
1139
|
+
class OrdersMockTable(BigQueryMockTable):
|
|
1140
|
+
project_name = "my-project"
|
|
1141
|
+
dataset_name = "analytics"
|
|
1142
|
+
table_name = "orders"
|
|
1143
|
+
```
|
|
1144
|
+
|
|
1145
|
+
**Avoid Repetition with Inheritance:**
|
|
1146
|
+
```python
|
|
1147
|
+
# Base class for all tables in the same project
|
|
1148
|
+
class MyProjectTable(BigQueryMockTable):
|
|
1149
|
+
project_name = "my-project"
|
|
1150
|
+
|
|
1151
|
+
# Subclasses only specify dataset and table
|
|
1152
|
+
class UsersTable(MyProjectTable):
|
|
1153
|
+
dataset_name = "analytics"
|
|
1154
|
+
table_name = "users"
|
|
1155
|
+
|
|
1156
|
+
class OrdersTable(MyProjectTable):
|
|
1157
|
+
dataset_name = "analytics"
|
|
1158
|
+
table_name = "orders"
|
|
1159
|
+
```
|
|
1160
|
+
|
|
1161
|
+
**Available Methods:**
|
|
1162
|
+
```python
|
|
1163
|
+
table = UsersMockTable([...])
|
|
1164
|
+
|
|
1165
|
+
# BigQuery-specific methods
|
|
1166
|
+
table.get_project_name() # "my-project"
|
|
1167
|
+
table.get_dataset_name() # "analytics"
|
|
1168
|
+
table.get_fully_qualified_name() # "my-project.analytics.users"
|
|
1169
|
+
|
|
1170
|
+
# Backwards compatible methods (from BaseMockTable)
|
|
1171
|
+
table.get_database_name() # "my-project.analytics"
|
|
1172
|
+
table.get_table_name() # "users"
|
|
1173
|
+
table.get_qualified_name() # "my-project.analytics.users"
|
|
1174
|
+
table.get_cte_alias() # "my_project_analytics__users"
|
|
1175
|
+
```
|
|
1176
|
+
|
|
1177
|
+
**Benefits:**
|
|
1178
|
+
- ✅ **Clear Semantics**: Each BigQuery component is explicit
|
|
1179
|
+
- ✅ **No Confusion**: No more cramming project.dataset together
|
|
1180
|
+
- ✅ **Type Safe**: Full type hints and IDE autocomplete
|
|
1181
|
+
- ✅ **Backwards Compatible**: Still implements all `BaseMockTable` methods
|
|
1182
|
+
- ✅ **Simple**: Just 3 class variables to set
|
|
1183
|
+
- ✅ **Flexible**: Use inheritance to share common properties
|
|
1184
|
+
|
|
1088
1185
|
**Default Behavior:**
|
|
1089
1186
|
- If adapter_type is not specified in the TestCase or decorator, the library will use the adapter specified in the `[sql_testing]` section's `adapter` setting.
|
|
1090
1187
|
- If no adapter is specified in the `[sql_testing]` section, it defaults to "bigquery".
|
|
@@ -1291,7 +1388,7 @@ The library automatically:
|
|
|
1291
1388
|
- Injects mock data via CTEs or temp tables
|
|
1292
1389
|
- Deserializes results to typed Python objects
|
|
1293
1390
|
|
|
1294
|
-
For detailed usage and configuration options, see the
|
|
1391
|
+
For detailed usage and configuration options, see the [documentation](https://gurmeetsaran.github.io/sqltesting/).
|
|
1295
1392
|
|
|
1296
1393
|
## Integration with Mocksmith
|
|
1297
1394
|
|
|
@@ -1326,7 +1423,7 @@ class Customer:
|
|
|
1326
1423
|
customers = [Customer.mock() for _ in range(100)]
|
|
1327
1424
|
```
|
|
1328
1425
|
|
|
1329
|
-
See the [Mocksmith Integration Guide](docs/mocksmith_integration.md)
|
|
1426
|
+
See the [Mocksmith Integration Guide](docs/mocksmith_integration.md) for detailed usage patterns.
|
|
1330
1427
|
|
|
1331
1428
|
## Known Limitations and TODOs
|
|
1332
1429
|
|
|
@@ -510,8 +510,16 @@ TestCase(
|
|
|
510
510
|
#### Example Mock Table Implementations:
|
|
511
511
|
|
|
512
512
|
```python
|
|
513
|
-
# BigQuery Mock Table
|
|
514
|
-
|
|
513
|
+
# BigQuery Mock Table (Recommended: Use BigQueryMockTable for clearer three-part naming)
|
|
514
|
+
from sql_testing_library import BigQueryMockTable
|
|
515
|
+
|
|
516
|
+
class UsersMockTable(BigQueryMockTable):
|
|
517
|
+
project_name = "test-project"
|
|
518
|
+
dataset_name = "test_dataset"
|
|
519
|
+
table_name = "users"
|
|
520
|
+
|
|
521
|
+
# BigQuery Mock Table (Alternative: Use BaseMockTable with combined project.dataset)
|
|
522
|
+
class UsersMockTableAlternative(BaseMockTable):
|
|
515
523
|
def get_database_name(self) -> str:
|
|
516
524
|
return "test-project.test_dataset" # project.dataset format
|
|
517
525
|
|
|
@@ -987,6 +995,7 @@ The adapter_type parameter will use the configuration from the corresponding sec
|
|
|
987
995
|
- Supports Google Cloud BigQuery service
|
|
988
996
|
- Uses UNION ALL pattern for CTE creation with complex data types
|
|
989
997
|
- Handles authentication via service account or application default credentials
|
|
998
|
+
- **Special Feature**: `BigQueryMockTable` class for explicit three-part naming (project.dataset.table)
|
|
990
999
|
|
|
991
1000
|
#### Athena Adapter
|
|
992
1001
|
- Supports Amazon Athena service for querying data in S3
|
|
@@ -1025,6 +1034,94 @@ The adapter_type parameter will use the configuration from the corresponding sec
|
|
|
1025
1034
|
- No authentication required - perfect for local development and testing
|
|
1026
1035
|
- Excellent performance for analytical workloads
|
|
1027
1036
|
|
|
1037
|
+
### BigQuery-Specific: BigQueryMockTable
|
|
1038
|
+
|
|
1039
|
+
BigQuery uses a three-part naming scheme (`project.dataset.table`) which doesn't fit naturally into the two-part `database.table` model used by most databases. The `BigQueryMockTable` class provides explicit support for BigQuery's naming convention.
|
|
1040
|
+
|
|
1041
|
+
#### The Problem with BaseMockTable
|
|
1042
|
+
|
|
1043
|
+
Using `BaseMockTable` for BigQuery requires awkwardly cramming the project and dataset together:
|
|
1044
|
+
|
|
1045
|
+
```python
|
|
1046
|
+
# Awkward: Combines project.dataset into database_name
|
|
1047
|
+
class UsersMockTable(BaseMockTable):
|
|
1048
|
+
def get_database_name(self) -> str:
|
|
1049
|
+
return "test-project.test_dataset" # Confusing!
|
|
1050
|
+
|
|
1051
|
+
def get_table_name(self) -> str:
|
|
1052
|
+
return "users"
|
|
1053
|
+
```
|
|
1054
|
+
|
|
1055
|
+
#### The Solution: BigQueryMockTable
|
|
1056
|
+
|
|
1057
|
+
`BigQueryMockTable` makes BigQuery's three-part naming explicit and clear:
|
|
1058
|
+
|
|
1059
|
+
```python
|
|
1060
|
+
from sql_testing_library import BigQueryMockTable
|
|
1061
|
+
|
|
1062
|
+
class UsersMockTable(BigQueryMockTable):
|
|
1063
|
+
project_name = "test-project"
|
|
1064
|
+
dataset_name = "test_dataset"
|
|
1065
|
+
table_name = "users"
|
|
1066
|
+
```
|
|
1067
|
+
|
|
1068
|
+
#### Usage Examples
|
|
1069
|
+
|
|
1070
|
+
**Basic Usage:**
|
|
1071
|
+
```python
|
|
1072
|
+
from sql_testing_library import BigQueryMockTable
|
|
1073
|
+
|
|
1074
|
+
class UsersMockTable(BigQueryMockTable):
|
|
1075
|
+
project_name = "my-project"
|
|
1076
|
+
dataset_name = "analytics"
|
|
1077
|
+
table_name = "users"
|
|
1078
|
+
|
|
1079
|
+
class OrdersMockTable(BigQueryMockTable):
|
|
1080
|
+
project_name = "my-project"
|
|
1081
|
+
dataset_name = "analytics"
|
|
1082
|
+
table_name = "orders"
|
|
1083
|
+
```
|
|
1084
|
+
|
|
1085
|
+
**Avoid Repetition with Inheritance:**
|
|
1086
|
+
```python
|
|
1087
|
+
# Base class for all tables in the same project
|
|
1088
|
+
class MyProjectTable(BigQueryMockTable):
|
|
1089
|
+
project_name = "my-project"
|
|
1090
|
+
|
|
1091
|
+
# Subclasses only specify dataset and table
|
|
1092
|
+
class UsersTable(MyProjectTable):
|
|
1093
|
+
dataset_name = "analytics"
|
|
1094
|
+
table_name = "users"
|
|
1095
|
+
|
|
1096
|
+
class OrdersTable(MyProjectTable):
|
|
1097
|
+
dataset_name = "analytics"
|
|
1098
|
+
table_name = "orders"
|
|
1099
|
+
```
|
|
1100
|
+
|
|
1101
|
+
**Available Methods:**
|
|
1102
|
+
```python
|
|
1103
|
+
table = UsersMockTable([...])
|
|
1104
|
+
|
|
1105
|
+
# BigQuery-specific methods
|
|
1106
|
+
table.get_project_name() # "my-project"
|
|
1107
|
+
table.get_dataset_name() # "analytics"
|
|
1108
|
+
table.get_fully_qualified_name() # "my-project.analytics.users"
|
|
1109
|
+
|
|
1110
|
+
# Backwards compatible methods (from BaseMockTable)
|
|
1111
|
+
table.get_database_name() # "my-project.analytics"
|
|
1112
|
+
table.get_table_name() # "users"
|
|
1113
|
+
table.get_qualified_name() # "my-project.analytics.users"
|
|
1114
|
+
table.get_cte_alias() # "my_project_analytics__users"
|
|
1115
|
+
```
|
|
1116
|
+
|
|
1117
|
+
**Benefits:**
|
|
1118
|
+
- ✅ **Clear Semantics**: Each BigQuery component is explicit
|
|
1119
|
+
- ✅ **No Confusion**: No more cramming project.dataset together
|
|
1120
|
+
- ✅ **Type Safe**: Full type hints and IDE autocomplete
|
|
1121
|
+
- ✅ **Backwards Compatible**: Still implements all `BaseMockTable` methods
|
|
1122
|
+
- ✅ **Simple**: Just 3 class variables to set
|
|
1123
|
+
- ✅ **Flexible**: Use inheritance to share common properties
|
|
1124
|
+
|
|
1028
1125
|
**Default Behavior:**
|
|
1029
1126
|
- If adapter_type is not specified in the TestCase or decorator, the library will use the adapter specified in the `[sql_testing]` section's `adapter` setting.
|
|
1030
1127
|
- If no adapter is specified in the `[sql_testing]` section, it defaults to "bigquery".
|
|
@@ -1231,7 +1328,7 @@ The library automatically:
|
|
|
1231
1328
|
- Injects mock data via CTEs or temp tables
|
|
1232
1329
|
- Deserializes results to typed Python objects
|
|
1233
1330
|
|
|
1234
|
-
For detailed usage and configuration options, see the
|
|
1331
|
+
For detailed usage and configuration options, see the [documentation](https://gurmeetsaran.github.io/sqltesting/).
|
|
1235
1332
|
|
|
1236
1333
|
## Integration with Mocksmith
|
|
1237
1334
|
|
|
@@ -1266,7 +1363,7 @@ class Customer:
|
|
|
1266
1363
|
customers = [Customer.mock() for _ in range(100)]
|
|
1267
1364
|
```
|
|
1268
1365
|
|
|
1269
|
-
See the [Mocksmith Integration Guide](docs/mocksmith_integration.md)
|
|
1366
|
+
See the [Mocksmith Integration Guide](docs/mocksmith_integration.md) for detailed usage patterns.
|
|
1270
1367
|
|
|
1271
1368
|
## Known Limitations and TODOs
|
|
1272
1369
|
|
|
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
|
|
4
4
|
|
|
5
5
|
[tool.poetry]
|
|
6
6
|
name = "sql-testing-library"
|
|
7
|
-
version = "0.
|
|
7
|
+
version = "0.20.0"
|
|
8
8
|
description = "SQL Testing Framework for Python: Unit test SQL queries with mock data injection for BigQuery, Snowflake, Redshift, Athena, Trino, and DuckDB. Simplify data engineering ETL testing and analytics validation."
|
|
9
9
|
authors = ["Gurmeet Saran <gurmeetx@gmail.com>", "Kushal Thakkar <kushal.thakkar@gmail.com>"]
|
|
10
10
|
maintainers = ["Gurmeet Saran <gurmeetx@gmail.com>", "Kushal Thakkar <kushal.thakkar@gmail.com>"]
|
|
@@ -306,3 +306,6 @@ update_changelog_on_bump = true
|
|
|
306
306
|
major_version_zero = true
|
|
307
307
|
changelog_merge_prerelease = true
|
|
308
308
|
changelog_start_rev = "0.1.0"
|
|
309
|
+
version_files = [
|
|
310
|
+
"src/sql_testing_library/__init__.py:__version__"
|
|
311
|
+
]
|
{sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/__init__.py
RENAMED
|
@@ -10,7 +10,7 @@ from ._exceptions import (
|
|
|
10
10
|
SQLTestingError, # noqa: F401
|
|
11
11
|
TypeConversionError, # noqa: F401
|
|
12
12
|
)
|
|
13
|
-
from ._mock_table import BaseMockTable # noqa: F401
|
|
13
|
+
from ._mock_table import BaseMockTable, BigQueryMockTable # noqa: F401
|
|
14
14
|
from ._pytest_plugin import sql_test # noqa: F401
|
|
15
15
|
|
|
16
16
|
|
|
@@ -27,12 +27,13 @@ try:
|
|
|
27
27
|
except ImportError:
|
|
28
28
|
__all__ = []
|
|
29
29
|
|
|
30
|
-
__version__ = "0.
|
|
30
|
+
__version__ = "0.20.0"
|
|
31
31
|
__all__.extend(
|
|
32
32
|
[
|
|
33
33
|
"SQLTestFramework",
|
|
34
34
|
"TestCase",
|
|
35
35
|
"BaseMockTable",
|
|
36
|
+
"BigQueryMockTable",
|
|
36
37
|
"DatabaseAdapter",
|
|
37
38
|
"sql_test",
|
|
38
39
|
"SQLTestingError",
|
|
@@ -10,7 +10,6 @@ from typing import (
|
|
|
10
10
|
Optional,
|
|
11
11
|
Tuple,
|
|
12
12
|
Type,
|
|
13
|
-
Union,
|
|
14
13
|
get_args,
|
|
15
14
|
get_type_hints,
|
|
16
15
|
)
|
|
@@ -22,7 +21,7 @@ if TYPE_CHECKING:
|
|
|
22
21
|
|
|
23
22
|
# Heavy imports moved to function level for better performance
|
|
24
23
|
from .._mock_table import BaseMockTable
|
|
25
|
-
from .._types import BaseTypeConverter
|
|
24
|
+
from .._types import BaseTypeConverter, is_union_type
|
|
26
25
|
from .base import DatabaseAdapter
|
|
27
26
|
|
|
28
27
|
|
|
@@ -247,9 +246,9 @@ class BigQueryAdapter(DatabaseAdapter):
|
|
|
247
246
|
|
|
248
247
|
schema = []
|
|
249
248
|
for col_name, col_type in column_types.items():
|
|
250
|
-
# Handle Optional types
|
|
251
|
-
if
|
|
252
|
-
# Extract the non-None type from Optional[T]
|
|
249
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
250
|
+
if is_union_type(col_type):
|
|
251
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
253
252
|
non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
|
|
254
253
|
if non_none_types:
|
|
255
254
|
col_type = non_none_types[0]
|
|
@@ -319,9 +318,9 @@ class BigQueryAdapter(DatabaseAdapter):
|
|
|
319
318
|
type_hints = get_type_hints(struct_type)
|
|
320
319
|
|
|
321
320
|
for field_name, field_type in type_hints.items():
|
|
322
|
-
# Handle Optional types
|
|
323
|
-
if
|
|
324
|
-
# Extract the non-None type from Optional[T]
|
|
321
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
322
|
+
if is_union_type(field_type):
|
|
323
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
325
324
|
non_none_types = [arg for arg in get_args(field_type) if arg is not type(None)]
|
|
326
325
|
if non_none_types:
|
|
327
326
|
field_type = non_none_types[0]
|
|
@@ -388,9 +387,9 @@ class BigQueryAdapter(DatabaseAdapter):
|
|
|
388
387
|
column_types = mock_table.get_column_types()
|
|
389
388
|
|
|
390
389
|
for col_name, col_type in column_types.items():
|
|
391
|
-
# Handle Optional types
|
|
392
|
-
if
|
|
393
|
-
# Extract the non-None type from Optional[T]
|
|
390
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
391
|
+
if is_union_type(col_type):
|
|
392
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
394
393
|
non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
|
|
395
394
|
if non_none_types:
|
|
396
395
|
col_type = non_none_types[0]
|
|
@@ -10,7 +10,6 @@ from typing import (
|
|
|
10
10
|
Optional,
|
|
11
11
|
Tuple,
|
|
12
12
|
Type,
|
|
13
|
-
Union,
|
|
14
13
|
get_args,
|
|
15
14
|
get_type_hints,
|
|
16
15
|
)
|
|
@@ -22,7 +21,7 @@ if TYPE_CHECKING:
|
|
|
22
21
|
|
|
23
22
|
# Heavy imports moved to function level for better performance
|
|
24
23
|
from .._mock_table import BaseMockTable
|
|
25
|
-
from .._types import BaseTypeConverter
|
|
24
|
+
from .._types import BaseTypeConverter, is_union_type
|
|
26
25
|
from .base import DatabaseAdapter
|
|
27
26
|
|
|
28
27
|
|
|
@@ -237,9 +236,9 @@ class DuckDBAdapter(DatabaseAdapter):
|
|
|
237
236
|
|
|
238
237
|
column_defs = []
|
|
239
238
|
for col_name, col_type in column_types.items():
|
|
240
|
-
# Handle Optional types
|
|
241
|
-
if
|
|
242
|
-
# Extract the non-None type from Optional[T]
|
|
239
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
240
|
+
if is_union_type(col_type):
|
|
241
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
243
242
|
non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
|
|
244
243
|
if non_none_types:
|
|
245
244
|
col_type = non_none_types[0]
|
|
@@ -301,9 +300,9 @@ class DuckDBAdapter(DatabaseAdapter):
|
|
|
301
300
|
field_defs = []
|
|
302
301
|
|
|
303
302
|
for field_name, field_type in type_hints.items():
|
|
304
|
-
# Handle Optional types
|
|
305
|
-
if
|
|
306
|
-
# Extract the non-None type from Optional[T]
|
|
303
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
304
|
+
if is_union_type(field_type):
|
|
305
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
307
306
|
non_none_types = [arg for arg in get_args(field_type) if arg is not type(None)]
|
|
308
307
|
if non_none_types:
|
|
309
308
|
field_type = non_none_types[0]
|
|
@@ -360,9 +359,9 @@ class DuckDBAdapter(DatabaseAdapter):
|
|
|
360
359
|
column_types = mock_table.get_column_types()
|
|
361
360
|
|
|
362
361
|
for col_name, col_type in column_types.items():
|
|
363
|
-
# Handle Optional types
|
|
364
|
-
if
|
|
365
|
-
# Extract the non-None type from Optional[T]
|
|
362
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
363
|
+
if is_union_type(col_type):
|
|
364
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
366
365
|
non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
|
|
367
366
|
if non_none_types:
|
|
368
367
|
col_type = non_none_types[0]
|
|
@@ -2,10 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
from datetime import date, datetime
|
|
4
4
|
from decimal import Decimal
|
|
5
|
-
from typing import Any, List, Tuple, Type,
|
|
5
|
+
from typing import Any, List, Tuple, Type, get_args
|
|
6
6
|
|
|
7
7
|
from .._mock_table import BaseMockTable
|
|
8
|
-
from .._types import BaseTypeConverter, is_struct_type
|
|
8
|
+
from .._types import BaseTypeConverter, is_struct_type, is_union_type
|
|
9
9
|
from .base import DatabaseAdapter
|
|
10
10
|
|
|
11
11
|
|
|
@@ -77,9 +77,9 @@ class PrestoBaseAdapter(DatabaseAdapter):
|
|
|
77
77
|
"""Convert Python type to SQL type string."""
|
|
78
78
|
from .._sql_utils import get_sql_type_string
|
|
79
79
|
|
|
80
|
-
# Handle Optional types
|
|
81
|
-
if
|
|
82
|
-
# Extract the non-None type from Optional[T]
|
|
80
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
81
|
+
if is_union_type(python_type):
|
|
82
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
83
83
|
non_none_types = [arg for arg in get_args(python_type) if arg is not type(None)]
|
|
84
84
|
if non_none_types:
|
|
85
85
|
python_type = non_none_types[0]
|
|
@@ -164,8 +164,8 @@ class PrestoBaseAdapter(DatabaseAdapter):
|
|
|
164
164
|
|
|
165
165
|
# Handle Optional types by extracting the non-None type for proper formatting
|
|
166
166
|
actual_type = col_type
|
|
167
|
-
if
|
|
168
|
-
# Extract the non-None type from Optional[T]
|
|
167
|
+
if is_union_type(col_type):
|
|
168
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
169
169
|
non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
|
|
170
170
|
if non_none_types:
|
|
171
171
|
actual_type = non_none_types[0]
|
|
@@ -186,8 +186,8 @@ class PrestoBaseAdapter(DatabaseAdapter):
|
|
|
186
186
|
|
|
187
187
|
# Handle Optional types by extracting the non-None type for proper formatting
|
|
188
188
|
actual_type = col_type
|
|
189
|
-
if
|
|
190
|
-
# Extract the non-None type from Optional[T]
|
|
189
|
+
if is_union_type(col_type):
|
|
190
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
191
191
|
non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
|
|
192
192
|
if non_none_types:
|
|
193
193
|
actual_type = non_none_types[0]
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from datetime import date, datetime
|
|
4
4
|
from decimal import Decimal
|
|
5
|
-
from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Type,
|
|
5
|
+
from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Type, get_args
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
@@ -12,7 +12,7 @@ if TYPE_CHECKING:
|
|
|
12
12
|
|
|
13
13
|
# Heavy import moved to function level for better performance
|
|
14
14
|
from .._mock_table import BaseMockTable
|
|
15
|
-
from .._types import BaseTypeConverter
|
|
15
|
+
from .._types import BaseTypeConverter, is_union_type
|
|
16
16
|
from .base import DatabaseAdapter
|
|
17
17
|
|
|
18
18
|
|
|
@@ -202,9 +202,9 @@ class RedshiftAdapter(DatabaseAdapter):
|
|
|
202
202
|
# Generate column definitions
|
|
203
203
|
column_defs = []
|
|
204
204
|
for col_name, col_type in column_types.items():
|
|
205
|
-
# Handle Optional types
|
|
206
|
-
if
|
|
207
|
-
# Extract the non-None type from Optional[T]
|
|
205
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
206
|
+
if is_union_type(col_type):
|
|
207
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
208
208
|
non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
|
|
209
209
|
if non_none_types:
|
|
210
210
|
col_type = non_none_types[0]
|
|
@@ -13,7 +13,6 @@ from typing import (
|
|
|
13
13
|
Optional,
|
|
14
14
|
Tuple,
|
|
15
15
|
Type,
|
|
16
|
-
Union,
|
|
17
16
|
get_args,
|
|
18
17
|
)
|
|
19
18
|
|
|
@@ -23,7 +22,7 @@ if TYPE_CHECKING:
|
|
|
23
22
|
|
|
24
23
|
# Heavy import moved to function level for better performance
|
|
25
24
|
from .._mock_table import BaseMockTable
|
|
26
|
-
from .._types import BaseTypeConverter
|
|
25
|
+
from .._types import BaseTypeConverter, is_union_type
|
|
27
26
|
from .base import DatabaseAdapter
|
|
28
27
|
|
|
29
28
|
|
|
@@ -364,9 +363,9 @@ class SnowflakeAdapter(DatabaseAdapter):
|
|
|
364
363
|
# Generate column definitions
|
|
365
364
|
column_defs = []
|
|
366
365
|
for col_name, col_type in column_types.items():
|
|
367
|
-
# Handle Optional types
|
|
368
|
-
if
|
|
369
|
-
# Extract the non-None type from Optional[T]
|
|
366
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
367
|
+
if is_union_type(col_type):
|
|
368
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
370
369
|
non_none_types = [arg for arg in get_args(col_type) if arg is not type(None)]
|
|
371
370
|
if non_none_types:
|
|
372
371
|
col_type = non_none_types[0]
|
{sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_mock_table.py
RENAMED
|
@@ -196,5 +196,102 @@ class BaseMockTable(ABC):
|
|
|
196
196
|
return df
|
|
197
197
|
|
|
198
198
|
def get_cte_alias(self) -> str:
|
|
199
|
-
"""Get the CTE alias name (database__tablename).
|
|
200
|
-
|
|
199
|
+
"""Get the CTE alias name (database__tablename).
|
|
200
|
+
|
|
201
|
+
Replaces '-' and '.' with '_' to ensure valid BigQuery CTE names,
|
|
202
|
+
as BigQuery CTEs cannot contain hyphens or dots.
|
|
203
|
+
"""
|
|
204
|
+
db_name = self.get_database_name().replace("-", "_").replace(".", "_")
|
|
205
|
+
table_name = self.get_table_name().replace("-", "_").replace(".", "_")
|
|
206
|
+
return f"{db_name}__{table_name}"
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
class BigQueryMockTable(BaseMockTable):
|
|
210
|
+
"""Mock table specifically for BigQuery with three-part naming support.
|
|
211
|
+
|
|
212
|
+
BigQuery uses a three-part naming scheme: project.dataset.table
|
|
213
|
+
This class makes it explicit and provides better semantics than cramming
|
|
214
|
+
project and dataset into the generic database_name field.
|
|
215
|
+
|
|
216
|
+
Two usage patterns are supported:
|
|
217
|
+
|
|
218
|
+
Usage - Class variables for table definition:
|
|
219
|
+
>>> class UsersMockTable(BigQueryMockTable):
|
|
220
|
+
... project_name = "my-project"
|
|
221
|
+
... dataset_name = "analytics"
|
|
222
|
+
... table_name = "users"
|
|
223
|
+
"""
|
|
224
|
+
|
|
225
|
+
# Class variables that subclasses must set (mandatory)
|
|
226
|
+
project_name: str
|
|
227
|
+
dataset_name: str
|
|
228
|
+
table_name: str
|
|
229
|
+
|
|
230
|
+
def get_bigquery_project(self) -> str:
|
|
231
|
+
"""Return the BigQuery project name from class variable.
|
|
232
|
+
|
|
233
|
+
Returns:
|
|
234
|
+
BigQuery project ID
|
|
235
|
+
|
|
236
|
+
Raises:
|
|
237
|
+
AttributeError: If project_name class variable not set
|
|
238
|
+
"""
|
|
239
|
+
return self.project_name
|
|
240
|
+
|
|
241
|
+
def get_bigquery_dataset(self) -> str:
|
|
242
|
+
"""Return the BigQuery dataset name from class variable.
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
BigQuery dataset name
|
|
246
|
+
|
|
247
|
+
Raises:
|
|
248
|
+
AttributeError: If dataset_name class variable not set
|
|
249
|
+
"""
|
|
250
|
+
return self.dataset_name
|
|
251
|
+
|
|
252
|
+
def get_bigquery_table(self) -> str:
|
|
253
|
+
"""Return the BigQuery table name from class variable.
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
BigQuery table name
|
|
257
|
+
|
|
258
|
+
Raises:
|
|
259
|
+
AttributeError: If table_name class variable not set
|
|
260
|
+
"""
|
|
261
|
+
return self.table_name
|
|
262
|
+
|
|
263
|
+
def get_project_name(self) -> str:
|
|
264
|
+
"""Return the BigQuery project name (alias for get_bigquery_project)."""
|
|
265
|
+
return self.get_bigquery_project()
|
|
266
|
+
|
|
267
|
+
def get_dataset_name(self) -> str:
|
|
268
|
+
"""Return the BigQuery dataset name (alias for get_bigquery_dataset)."""
|
|
269
|
+
return self.get_bigquery_dataset()
|
|
270
|
+
|
|
271
|
+
def get_database_name(self) -> str:
|
|
272
|
+
"""Return database name (for BigQuery, this is project.dataset).
|
|
273
|
+
|
|
274
|
+
This implements the BaseMockTable abstract method by combining
|
|
275
|
+
project and dataset to maintain backwards compatibility with
|
|
276
|
+
the two-part naming assumption in the base class.
|
|
277
|
+
"""
|
|
278
|
+
return f"{self.get_bigquery_project()}.{self.get_bigquery_dataset()}"
|
|
279
|
+
|
|
280
|
+
def get_table_name(self) -> str:
|
|
281
|
+
"""Return the table name (alias for get_bigquery_table)."""
|
|
282
|
+
return self.get_bigquery_table()
|
|
283
|
+
|
|
284
|
+
def get_fully_qualified_name(self) -> str:
|
|
285
|
+
"""Return the three-part BigQuery table reference.
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
Fully qualified table name in format: project.dataset.table
|
|
289
|
+
|
|
290
|
+
Example:
|
|
291
|
+
>>> table.get_fully_qualified_name()
|
|
292
|
+
'my-project.analytics.users'
|
|
293
|
+
"""
|
|
294
|
+
project = self.get_bigquery_project()
|
|
295
|
+
dataset = self.get_bigquery_dataset()
|
|
296
|
+
table = self.get_bigquery_table()
|
|
297
|
+
return f"{project}.{dataset}.{table}"
|
|
@@ -29,6 +29,35 @@ except ImportError:
|
|
|
29
29
|
pydantic_available = False
|
|
30
30
|
|
|
31
31
|
|
|
32
|
+
def is_union_type(type_hint: Type) -> bool:
|
|
33
|
+
"""Check if a type is a Union type (including Optional).
|
|
34
|
+
|
|
35
|
+
This function handles both:
|
|
36
|
+
- typing.Optional[X] / typing.Union[X, None] (Python 3.9+)
|
|
37
|
+
- X | None syntax (Python 3.10+)
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
type_hint: The type to check
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
True if the type is a union type, False otherwise
|
|
44
|
+
"""
|
|
45
|
+
origin = get_origin(type_hint)
|
|
46
|
+
if origin is None:
|
|
47
|
+
return False
|
|
48
|
+
|
|
49
|
+
# Handle typing.Union (used by Optional[X])
|
|
50
|
+
if origin is Union:
|
|
51
|
+
return True
|
|
52
|
+
|
|
53
|
+
# Handle types.UnionType (used by X | None in Python 3.10+)
|
|
54
|
+
# We check the name because types.UnionType may not be available in Python 3.9
|
|
55
|
+
if hasattr(origin, "__name__") and origin.__name__ == "UnionType":
|
|
56
|
+
return True
|
|
57
|
+
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
|
|
32
61
|
def is_pydantic_model_class(cls: Type) -> bool:
|
|
33
62
|
"""Check if a class is a Pydantic model class."""
|
|
34
63
|
if not pydantic_available or BaseModel is None:
|
|
@@ -42,9 +71,9 @@ def is_pydantic_model_class(cls: Type) -> bool:
|
|
|
42
71
|
|
|
43
72
|
def is_struct_type(type_hint: Type) -> bool:
|
|
44
73
|
"""Check if a type is a struct type (dataclass or Pydantic model)."""
|
|
45
|
-
# Handle Optional types
|
|
46
|
-
if
|
|
47
|
-
# Extract the non-None type from Optional[T]
|
|
74
|
+
# Handle Optional types (both Optional[X] and X | None)
|
|
75
|
+
if is_union_type(type_hint):
|
|
76
|
+
# Extract the non-None type from Optional[T] or T | None
|
|
48
77
|
non_none_types = [arg for arg in get_args(type_hint) if arg is not type(None)]
|
|
49
78
|
if non_none_types:
|
|
50
79
|
type_hint = non_none_types[0]
|
|
@@ -427,15 +456,17 @@ class BaseTypeConverter:
|
|
|
427
456
|
|
|
428
457
|
|
|
429
458
|
def unwrap_optional_type(col_type: Type[Any]) -> Type[Any]:
|
|
430
|
-
"""Unwrap Optional[T] to T, leave other types unchanged.
|
|
459
|
+
"""Unwrap Optional[T] or T | None to T, leave other types unchanged.
|
|
431
460
|
|
|
432
461
|
This is a utility function that can be used by adapters and mock tables
|
|
433
|
-
to handle Optional types consistently.
|
|
462
|
+
to handle Optional types consistently. Supports both:
|
|
463
|
+
- typing.Optional[T] / typing.Union[T, None] (Python 3.9+)
|
|
464
|
+
- T | None syntax (Python 3.10+)
|
|
434
465
|
"""
|
|
435
|
-
# Check if this is a Union type (which Optional[T]
|
|
436
|
-
if
|
|
466
|
+
# Check if this is a Union type (which Optional[T] and T | None both are)
|
|
467
|
+
if is_union_type(col_type):
|
|
437
468
|
args = get_args(col_type)
|
|
438
|
-
# Optional[T] is Union[T, None], so filter out NoneType
|
|
469
|
+
# Optional[T] or T | None is Union[T, None], so filter out NoneType
|
|
439
470
|
non_none_types = [arg for arg in args if arg is not type(None)]
|
|
440
471
|
if non_none_types:
|
|
441
472
|
return cast(Type[Any], non_none_types[0]) # Return the first non-None type
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/base.py
RENAMED
|
File without changes
|
{sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_adapters/trino.py
RENAMED
|
File without changes
|
|
File without changes
|
{sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_exceptions.py
RENAMED
|
File without changes
|
{sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_pytest_plugin.py
RENAMED
|
File without changes
|
{sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_sql_logger.py
RENAMED
|
File without changes
|
{sql_testing_library-0.18.0 → sql_testing_library-0.20.0}/src/sql_testing_library/_sql_utils.py
RENAMED
|
File without changes
|
|
File without changes
|