velocity-python 0.0.105__py3-none-any.whl → 0.0.155__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- velocity/__init__.py +3 -1
- velocity/app/orders.py +3 -4
- velocity/app/tests/__init__.py +1 -0
- velocity/app/tests/test_email_processing.py +112 -0
- velocity/app/tests/test_payment_profile_sorting.py +191 -0
- velocity/app/tests/test_spreadsheet_functions.py +124 -0
- velocity/aws/__init__.py +3 -0
- velocity/aws/amplify.py +10 -6
- velocity/aws/handlers/__init__.py +2 -0
- velocity/aws/handlers/base_handler.py +248 -0
- velocity/aws/handlers/context.py +167 -2
- velocity/aws/handlers/exceptions.py +16 -0
- velocity/aws/handlers/lambda_handler.py +24 -85
- velocity/aws/handlers/mixins/__init__.py +16 -0
- velocity/aws/handlers/mixins/activity_tracker.py +181 -0
- velocity/aws/handlers/mixins/aws_session_mixin.py +192 -0
- velocity/aws/handlers/mixins/error_handler.py +192 -0
- velocity/aws/handlers/mixins/legacy_mixin.py +53 -0
- velocity/aws/handlers/mixins/standard_mixin.py +73 -0
- velocity/aws/handlers/response.py +1 -1
- velocity/aws/handlers/sqs_handler.py +28 -143
- velocity/aws/tests/__init__.py +1 -0
- velocity/aws/tests/test_lambda_handler_json_serialization.py +120 -0
- velocity/aws/tests/test_response.py +163 -0
- velocity/db/__init__.py +16 -4
- velocity/db/core/decorators.py +20 -4
- velocity/db/core/engine.py +185 -792
- velocity/db/core/result.py +36 -22
- velocity/db/core/row.py +15 -3
- velocity/db/core/table.py +283 -44
- velocity/db/core/transaction.py +19 -11
- velocity/db/exceptions.py +42 -18
- velocity/db/servers/base/__init__.py +9 -0
- velocity/db/servers/base/initializer.py +70 -0
- velocity/db/servers/base/operators.py +98 -0
- velocity/db/servers/base/sql.py +503 -0
- velocity/db/servers/base/types.py +135 -0
- velocity/db/servers/mysql/__init__.py +73 -0
- velocity/db/servers/mysql/operators.py +54 -0
- velocity/db/servers/{mysql_reserved.py → mysql/reserved.py} +2 -14
- velocity/db/servers/mysql/sql.py +718 -0
- velocity/db/servers/mysql/types.py +107 -0
- velocity/db/servers/postgres/__init__.py +59 -11
- velocity/db/servers/postgres/operators.py +34 -0
- velocity/db/servers/postgres/sql.py +474 -120
- velocity/db/servers/postgres/types.py +88 -2
- velocity/db/servers/sqlite/__init__.py +61 -0
- velocity/db/servers/sqlite/operators.py +52 -0
- velocity/db/servers/sqlite/reserved.py +20 -0
- velocity/db/servers/sqlite/sql.py +677 -0
- velocity/db/servers/sqlite/types.py +92 -0
- velocity/db/servers/sqlserver/__init__.py +73 -0
- velocity/db/servers/sqlserver/operators.py +47 -0
- velocity/db/servers/sqlserver/reserved.py +32 -0
- velocity/db/servers/sqlserver/sql.py +805 -0
- velocity/db/servers/sqlserver/types.py +114 -0
- velocity/db/servers/tablehelper.py +117 -91
- velocity/db/tests/__init__.py +1 -0
- velocity/db/tests/common_db_test.py +0 -0
- velocity/db/tests/postgres/__init__.py +1 -0
- velocity/db/tests/postgres/common.py +49 -0
- velocity/db/tests/postgres/test_column.py +29 -0
- velocity/db/tests/postgres/test_connections.py +25 -0
- velocity/db/tests/postgres/test_database.py +21 -0
- velocity/db/tests/postgres/test_engine.py +205 -0
- velocity/db/tests/postgres/test_general_usage.py +88 -0
- velocity/db/tests/postgres/test_imports.py +8 -0
- velocity/db/tests/postgres/test_result.py +19 -0
- velocity/db/tests/postgres/test_row.py +137 -0
- velocity/db/tests/postgres/test_row_comprehensive.py +720 -0
- velocity/db/tests/postgres/test_schema_locking.py +335 -0
- velocity/db/tests/postgres/test_schema_locking_unit.py +115 -0
- velocity/db/tests/postgres/test_sequence.py +34 -0
- velocity/db/tests/postgres/test_sql_comprehensive.py +462 -0
- velocity/db/tests/postgres/test_table.py +101 -0
- velocity/db/tests/postgres/test_table_comprehensive.py +646 -0
- velocity/db/tests/postgres/test_transaction.py +106 -0
- velocity/db/tests/sql/__init__.py +1 -0
- velocity/db/tests/sql/common.py +177 -0
- velocity/db/tests/sql/test_postgres_select_advanced.py +285 -0
- velocity/db/tests/sql/test_postgres_select_variances.py +517 -0
- velocity/db/tests/test_cursor_rowcount_fix.py +150 -0
- velocity/db/tests/test_db_utils.py +221 -0
- velocity/db/tests/test_postgres.py +448 -0
- velocity/db/tests/test_postgres_unchanged.py +81 -0
- velocity/db/tests/test_process_error_robustness.py +292 -0
- velocity/db/tests/test_result_caching.py +279 -0
- velocity/db/tests/test_result_sql_aware.py +117 -0
- velocity/db/tests/test_row_get_missing_column.py +72 -0
- velocity/db/tests/test_schema_locking_initializers.py +226 -0
- velocity/db/tests/test_schema_locking_simple.py +97 -0
- velocity/db/tests/test_sql_builder.py +165 -0
- velocity/db/tests/test_tablehelper.py +486 -0
- velocity/db/utils.py +62 -47
- velocity/misc/conv/__init__.py +2 -0
- velocity/misc/conv/iconv.py +5 -4
- velocity/misc/export.py +1 -4
- velocity/misc/merge.py +1 -1
- velocity/misc/tests/__init__.py +1 -0
- velocity/misc/tests/test_db.py +90 -0
- velocity/misc/tests/test_fix.py +78 -0
- velocity/misc/tests/test_format.py +64 -0
- velocity/misc/tests/test_iconv.py +203 -0
- velocity/misc/tests/test_merge.py +82 -0
- velocity/misc/tests/test_oconv.py +144 -0
- velocity/misc/tests/test_original_error.py +52 -0
- velocity/misc/tests/test_timer.py +74 -0
- velocity/misc/tools.py +0 -1
- {velocity_python-0.0.105.dist-info → velocity_python-0.0.155.dist-info}/METADATA +2 -2
- velocity_python-0.0.155.dist-info/RECORD +129 -0
- velocity/db/core/exceptions.py +0 -70
- velocity/db/servers/mysql.py +0 -641
- velocity/db/servers/sqlite.py +0 -968
- velocity/db/servers/sqlite_reserved.py +0 -208
- velocity/db/servers/sqlserver.py +0 -921
- velocity/db/servers/sqlserver_reserved.py +0 -314
- velocity_python-0.0.105.dist-info/RECORD +0 -56
- {velocity_python-0.0.105.dist-info → velocity_python-0.0.155.dist-info}/WHEEL +0 -0
- {velocity_python-0.0.105.dist-info → velocity_python-0.0.155.dist-info}/licenses/LICENSE +0 -0
- {velocity_python-0.0.105.dist-info → velocity_python-0.0.155.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,646 @@
|
|
|
1
|
+
import unittest
|
|
2
|
+
import threading
|
|
3
|
+
import time
|
|
4
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
5
|
+
from velocity.db.exceptions import (
|
|
6
|
+
DbObjectExistsError, DbTableMissingError, DbColumnMissingError,
|
|
7
|
+
DbSchemaLockedError, DbDuplicateKeyError
|
|
8
|
+
)
|
|
9
|
+
from velocity.db.core.row import Row
|
|
10
|
+
from velocity.db.core.result import Result
|
|
11
|
+
from .common import CommonPostgresTest, engine, test_db
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@engine.transaction
|
|
15
|
+
class TestTableComprehensive(CommonPostgresTest):
|
|
16
|
+
"""Comprehensive tests for Table class including edge cases, race conditions, and error recovery."""
|
|
17
|
+
|
|
18
|
+
@classmethod
|
|
19
|
+
def create_test_tables(cls, tx):
|
|
20
|
+
"""Create comprehensive test tables for Table testing."""
|
|
21
|
+
# Basic table for general operations
|
|
22
|
+
tx.table("table_test_basic").create(
|
|
23
|
+
columns={
|
|
24
|
+
"name": str,
|
|
25
|
+
"age": int,
|
|
26
|
+
"email": str,
|
|
27
|
+
"active": bool,
|
|
28
|
+
"score": float,
|
|
29
|
+
"data": str,
|
|
30
|
+
}
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
# Table with constraints and indexes
|
|
34
|
+
tx.table("table_test_constraints").create(
|
|
35
|
+
columns={
|
|
36
|
+
"username": str,
|
|
37
|
+
"email": str,
|
|
38
|
+
"created_at": str,
|
|
39
|
+
"status": str,
|
|
40
|
+
}
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
# Create unique indexes for constraint testing
|
|
44
|
+
try:
|
|
45
|
+
tx.table("table_test_constraints").create_index("username", unique=True)
|
|
46
|
+
tx.table("table_test_constraints").create_index("email", unique=True)
|
|
47
|
+
except:
|
|
48
|
+
pass # Index might already exist
|
|
49
|
+
|
|
50
|
+
# Parent table for foreign key testing
|
|
51
|
+
tx.table("table_test_parent").create(
|
|
52
|
+
columns={
|
|
53
|
+
"parent_name": str,
|
|
54
|
+
"category": str,
|
|
55
|
+
"priority": int,
|
|
56
|
+
}
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Child table with foreign key
|
|
60
|
+
tx.table("table_test_child").create(
|
|
61
|
+
columns={
|
|
62
|
+
"parent_id": int,
|
|
63
|
+
"child_name": str,
|
|
64
|
+
"value": float,
|
|
65
|
+
"notes": str,
|
|
66
|
+
}
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Create foreign key
|
|
70
|
+
tx.table("table_test_child").create_foreign_key("parent_id", "table_test_parent", "sys_id")
|
|
71
|
+
|
|
72
|
+
# Large table for performance testing
|
|
73
|
+
tx.table("table_test_large").create(
|
|
74
|
+
columns={
|
|
75
|
+
"batch_id": int,
|
|
76
|
+
"sequence": int,
|
|
77
|
+
"data_payload": str,
|
|
78
|
+
"processed": bool,
|
|
79
|
+
"timestamp": str,
|
|
80
|
+
}
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
# Table for concurrent access testing
|
|
84
|
+
tx.table("table_test_concurrent").create(
|
|
85
|
+
columns={
|
|
86
|
+
"counter": int,
|
|
87
|
+
"worker_id": str,
|
|
88
|
+
"operation": str,
|
|
89
|
+
"timestamp": str,
|
|
90
|
+
}
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
# Insert initial test data
|
|
94
|
+
cls.insert_test_data(tx)
|
|
95
|
+
|
|
96
|
+
@classmethod
|
|
97
|
+
def insert_test_data(cls, tx):
|
|
98
|
+
"""Insert comprehensive test data."""
|
|
99
|
+
# Basic table data
|
|
100
|
+
basic_data = [
|
|
101
|
+
{"name": "Alice Johnson", "age": 30, "email": "alice@test.com", "active": True, "score": 95.5, "data": "user_data_1"},
|
|
102
|
+
{"name": "Bob Smith", "age": 25, "email": "bob@test.com", "active": False, "score": 87.2, "data": "user_data_2"},
|
|
103
|
+
{"name": "Charlie Brown", "age": 35, "email": "charlie@test.com", "active": True, "score": 92.8, "data": "user_data_3"},
|
|
104
|
+
{"name": "Diana Prince", "age": 28, "email": "diana@test.com", "active": True, "score": 88.9, "data": "user_data_4"},
|
|
105
|
+
{"name": "Eve Adams", "age": 32, "email": "eve@test.com", "active": False, "score": 91.1, "data": "user_data_5"},
|
|
106
|
+
]
|
|
107
|
+
|
|
108
|
+
for data in basic_data:
|
|
109
|
+
tx.table("table_test_basic").insert(data)
|
|
110
|
+
|
|
111
|
+
# Constraints table data
|
|
112
|
+
constraint_data = [
|
|
113
|
+
{"username": "admin", "email": "admin@test.com", "created_at": "2023-01-01", "status": "active"},
|
|
114
|
+
{"username": "user1", "email": "user1@test.com", "created_at": "2023-01-02", "status": "pending"},
|
|
115
|
+
{"username": "user2", "email": "user2@test.com", "created_at": "2023-01-03", "status": "active"},
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
for data in constraint_data:
|
|
119
|
+
tx.table("table_test_constraints").insert(data)
|
|
120
|
+
|
|
121
|
+
# Parent-child data
|
|
122
|
+
parent_data = [
|
|
123
|
+
{"parent_name": "Project Alpha", "category": "development", "priority": 1},
|
|
124
|
+
{"parent_name": "Project Beta", "category": "testing", "priority": 2},
|
|
125
|
+
{"parent_name": "Project Gamma", "category": "deployment", "priority": 3},
|
|
126
|
+
]
|
|
127
|
+
|
|
128
|
+
parent_ids = []
|
|
129
|
+
for data in parent_data:
|
|
130
|
+
row = tx.table("table_test_parent").new(data)
|
|
131
|
+
parent_ids.append(row["sys_id"])
|
|
132
|
+
|
|
133
|
+
child_data = [
|
|
134
|
+
{"parent_id": parent_ids[0], "child_name": "Task A1", "value": 10.5, "notes": "Important task"},
|
|
135
|
+
{"parent_id": parent_ids[0], "child_name": "Task A2", "value": 20.3, "notes": "Secondary task"},
|
|
136
|
+
{"parent_id": parent_ids[1], "child_name": "Task B1", "value": 15.7, "notes": "Testing task"},
|
|
137
|
+
{"parent_id": parent_ids[2], "child_name": "Task C1", "value": 30.9, "notes": "Deployment task"},
|
|
138
|
+
]
|
|
139
|
+
|
|
140
|
+
for data in child_data:
|
|
141
|
+
tx.table("table_test_child").insert(data)
|
|
142
|
+
|
|
143
|
+
# Large table data (batch insert)
|
|
144
|
+
large_data = []
|
|
145
|
+
for batch in range(10):
|
|
146
|
+
for seq in range(50):
|
|
147
|
+
large_data.append({
|
|
148
|
+
"batch_id": batch,
|
|
149
|
+
"sequence": seq,
|
|
150
|
+
"data_payload": f"batch_{batch}_seq_{seq}_data",
|
|
151
|
+
"processed": seq % 2 == 0,
|
|
152
|
+
"timestamp": f"2023-01-{(seq % 28) + 1:02d}",
|
|
153
|
+
})
|
|
154
|
+
|
|
155
|
+
for data in large_data:
|
|
156
|
+
tx.table("table_test_large").insert(data)
|
|
157
|
+
|
|
158
|
+
def test_table_creation_and_existence(self, tx):
|
|
159
|
+
"""Test table creation and existence checking."""
|
|
160
|
+
# Test table exists
|
|
161
|
+
table = tx.table("table_test_basic")
|
|
162
|
+
self.assertTrue(table.exists())
|
|
163
|
+
|
|
164
|
+
# Test non-existent table
|
|
165
|
+
non_existent = tx.table("non_existent_table")
|
|
166
|
+
self.assertFalse(non_existent.exists())
|
|
167
|
+
|
|
168
|
+
# Test creating new table
|
|
169
|
+
new_table = tx.table("table_test_new")
|
|
170
|
+
self.assertFalse(new_table.exists())
|
|
171
|
+
new_table.create(columns={"test_col": str, "test_num": int})
|
|
172
|
+
self.assertTrue(new_table.exists())
|
|
173
|
+
|
|
174
|
+
def test_table_string_representation(self, tx):
|
|
175
|
+
"""Test table string representation."""
|
|
176
|
+
table = tx.table("table_test_basic")
|
|
177
|
+
str_repr = str(table)
|
|
178
|
+
|
|
179
|
+
self.assertIn("Table: table_test_basic", str_repr)
|
|
180
|
+
self.assertIn("(table exists) True", str_repr)
|
|
181
|
+
self.assertIn("Columns:", str_repr)
|
|
182
|
+
self.assertIn("Rows:", str_repr)
|
|
183
|
+
|
|
184
|
+
def test_table_column_operations(self, tx):
|
|
185
|
+
"""Test table column operations."""
|
|
186
|
+
table = tx.table("table_test_basic")
|
|
187
|
+
|
|
188
|
+
# Test getting columns
|
|
189
|
+
columns = table.columns()
|
|
190
|
+
self.assertIn("name", columns)
|
|
191
|
+
self.assertIn("age", columns)
|
|
192
|
+
self.assertIn("email", columns)
|
|
193
|
+
|
|
194
|
+
# Test system columns
|
|
195
|
+
sys_columns = table.sys_columns()
|
|
196
|
+
self.assertIn("sys_id", sys_columns)
|
|
197
|
+
self.assertIn("sys_created_by", sys_columns)
|
|
198
|
+
self.assertIn("sys_created_on", sys_columns)
|
|
199
|
+
|
|
200
|
+
# Test column filtering (non-sys columns)
|
|
201
|
+
filtered_columns = table.columns()
|
|
202
|
+
sys_column_count = len(
|
|
203
|
+
[col for col in filtered_columns if table.is_system_column(col)]
|
|
204
|
+
)
|
|
205
|
+
self.assertEqual(sys_column_count, 0)
|
|
206
|
+
|
|
207
|
+
def test_table_row_count(self, tx):
|
|
208
|
+
"""Test table row counting."""
|
|
209
|
+
table = tx.table("table_test_basic")
|
|
210
|
+
|
|
211
|
+
# Test len() method
|
|
212
|
+
count = len(table)
|
|
213
|
+
self.assertEqual(count, 5) # We inserted 5 rows
|
|
214
|
+
|
|
215
|
+
# Test count() method
|
|
216
|
+
count_method = table.count()
|
|
217
|
+
self.assertEqual(count_method, 5)
|
|
218
|
+
|
|
219
|
+
# Test count with condition
|
|
220
|
+
count_active = table.count({"active": True})
|
|
221
|
+
self.assertEqual(count_active, 3) # 3 active users
|
|
222
|
+
|
|
223
|
+
def test_table_iteration(self, tx):
|
|
224
|
+
"""Test table iteration."""
|
|
225
|
+
table = tx.table("table_test_basic")
|
|
226
|
+
|
|
227
|
+
# Test basic iteration
|
|
228
|
+
rows = list(table)
|
|
229
|
+
self.assertEqual(len(rows), 5)
|
|
230
|
+
|
|
231
|
+
# Test that rows are Row objects
|
|
232
|
+
for row in rows:
|
|
233
|
+
self.assertIsInstance(row, Row)
|
|
234
|
+
|
|
235
|
+
# Test iteration with callable syntax
|
|
236
|
+
active_rows = list(table(where={"active": True}))
|
|
237
|
+
self.assertEqual(len(active_rows), 3)
|
|
238
|
+
|
|
239
|
+
def test_table_insert_operations(self, tx):
|
|
240
|
+
"""Test table insert operations including edge cases."""
|
|
241
|
+
table = tx.table("table_test_basic")
|
|
242
|
+
|
|
243
|
+
# Test basic insert
|
|
244
|
+
data = {"name": "Test User", "age": 40, "email": "test@test.com", "active": True, "score": 85.0}
|
|
245
|
+
row = table.new(data)
|
|
246
|
+
self.assertIsInstance(row, Row)
|
|
247
|
+
self.assertEqual(row["name"], "Test User")
|
|
248
|
+
|
|
249
|
+
# Test insert with None values
|
|
250
|
+
data_with_none = {"name": "User With None", "age": None, "email": "none@test.com"}
|
|
251
|
+
row_none = table.new(data_with_none)
|
|
252
|
+
self.assertIsNone(row_none["age"])
|
|
253
|
+
|
|
254
|
+
# Test insert with missing columns (should work with auto-creation)
|
|
255
|
+
data_partial = {"name": "Partial User", "age": 25}
|
|
256
|
+
row_partial = table.insert(data_partial)
|
|
257
|
+
self.assertEqual(row_partial["name"], "Partial User")
|
|
258
|
+
|
|
259
|
+
# Test insert with extra columns (should auto-create)
|
|
260
|
+
data_extra = {"name": "Extra User", "age": 25, "new_column": "new_value"}
|
|
261
|
+
row_extra = table.insert(data_extra)
|
|
262
|
+
self.assertEqual(row_extra["new_column"], "new_value")
|
|
263
|
+
|
|
264
|
+
def test_table_update_operations(self, tx):
|
|
265
|
+
"""Test table update operations."""
|
|
266
|
+
table = tx.table("table_test_basic")
|
|
267
|
+
|
|
268
|
+
# Get a row to update
|
|
269
|
+
row = table.select().one()
|
|
270
|
+
original_name = row["name"]
|
|
271
|
+
row_id = row["sys_id"]
|
|
272
|
+
|
|
273
|
+
# Test basic update
|
|
274
|
+
table.update({"name": "Updated Name"}, {"sys_id": row_id})
|
|
275
|
+
updated_row = table.find(row_id)
|
|
276
|
+
self.assertEqual(updated_row["name"], "Updated Name")
|
|
277
|
+
|
|
278
|
+
# Test bulk update
|
|
279
|
+
affected = table.update({"active": False}, {"active": True})
|
|
280
|
+
self.assertGreaterEqual(affected, 0)
|
|
281
|
+
|
|
282
|
+
# Test update with complex where clause
|
|
283
|
+
table.update({"score": 100.0}, {"age__gte": 30, "active": False})
|
|
284
|
+
|
|
285
|
+
def test_table_delete_operations(self, tx):
|
|
286
|
+
"""Test table delete operations."""
|
|
287
|
+
table = tx.table("table_test_basic")
|
|
288
|
+
|
|
289
|
+
initial_count = table.count()
|
|
290
|
+
|
|
291
|
+
# Test delete with specific condition
|
|
292
|
+
deleted = table.delete({"name": "Updated Name"})
|
|
293
|
+
self.assertGreaterEqual(deleted, 0)
|
|
294
|
+
|
|
295
|
+
# Test bulk delete
|
|
296
|
+
deleted_bulk = table.delete({"active": False})
|
|
297
|
+
self.assertGreaterEqual(deleted_bulk, 0)
|
|
298
|
+
|
|
299
|
+
# Verify count changed
|
|
300
|
+
final_count = table.count()
|
|
301
|
+
self.assertLess(final_count, initial_count)
|
|
302
|
+
|
|
303
|
+
def test_table_select_operations(self, tx):
|
|
304
|
+
"""Test table select operations with various conditions."""
|
|
305
|
+
table = tx.table("table_test_basic")
|
|
306
|
+
|
|
307
|
+
# Test select all
|
|
308
|
+
result = table.select()
|
|
309
|
+
self.assertIsInstance(result, Result)
|
|
310
|
+
|
|
311
|
+
# Test select with where clause
|
|
312
|
+
result_where = table.select(where={"active": True})
|
|
313
|
+
active_count = len(list(result_where))
|
|
314
|
+
self.assertGreaterEqual(active_count, 0)
|
|
315
|
+
|
|
316
|
+
# Test select with order by
|
|
317
|
+
result_ordered = table.select(orderby="name")
|
|
318
|
+
ordered_list = list(result_ordered)
|
|
319
|
+
self.assertGreaterEqual(len(ordered_list), 0)
|
|
320
|
+
|
|
321
|
+
# Test select with limit
|
|
322
|
+
result_limited = table.select(limit=2)
|
|
323
|
+
limited_list = list(result_limited)
|
|
324
|
+
self.assertLessEqual(len(limited_list), 2)
|
|
325
|
+
|
|
326
|
+
# Test select with complex conditions
|
|
327
|
+
result_complex = table.select(where={"age__gte": 25, "active": True}, orderby="-score", limit=3)
|
|
328
|
+
complex_list = list(result_complex)
|
|
329
|
+
self.assertLessEqual(len(complex_list), 3)
|
|
330
|
+
|
|
331
|
+
def test_table_upsert_operations(self, tx):
|
|
332
|
+
"""Test table upsert operations."""
|
|
333
|
+
table = tx.table("table_test_basic")
|
|
334
|
+
|
|
335
|
+
# Test upsert (insert new)
|
|
336
|
+
new_data = {"name": "Upsert User", "age": 45, "email": "upsert@test.com"}
|
|
337
|
+
row = table.get(new_data)
|
|
338
|
+
self.assertEqual(row["name"], "Upsert User")
|
|
339
|
+
|
|
340
|
+
# Test upsert (update existing)
|
|
341
|
+
existing_data = {"name": "Upsert User Updated", "age": 46}
|
|
342
|
+
where_clause = {"email": "upsert@test.com"}
|
|
343
|
+
row_updated = table.upsert(existing_data, where_clause)
|
|
344
|
+
self.assertEqual(row_updated["name"], "Upsert User Updated")
|
|
345
|
+
self.assertEqual(row_updated["age"], 46)
|
|
346
|
+
|
|
347
|
+
def test_table_find_operations(self, tx):
|
|
348
|
+
"""Test table find operations."""
|
|
349
|
+
table = tx.table("table_test_basic")
|
|
350
|
+
|
|
351
|
+
# Test find by sys_id
|
|
352
|
+
first_row = table.select().one()
|
|
353
|
+
found_row = table.find(first_row["sys_id"])
|
|
354
|
+
self.assertEqual(found_row["sys_id"], first_row["sys_id"])
|
|
355
|
+
|
|
356
|
+
# Test find with dictionary conditions
|
|
357
|
+
found_by_name = table.find({"name": first_row["name"]})
|
|
358
|
+
self.assertEqual(found_by_name["name"], first_row["name"])
|
|
359
|
+
|
|
360
|
+
# Test find non-existent
|
|
361
|
+
non_existent = table.find(99999999)
|
|
362
|
+
self.assertIsNone(non_existent)
|
|
363
|
+
|
|
364
|
+
def test_table_get_value_operations(self, tx):
|
|
365
|
+
"""Test table get_value operations."""
|
|
366
|
+
table = tx.table("table_test_basic")
|
|
367
|
+
|
|
368
|
+
# Get a known row
|
|
369
|
+
row = table.select().one()
|
|
370
|
+
row_id = row["sys_id"]
|
|
371
|
+
|
|
372
|
+
# Test get_value
|
|
373
|
+
name_value = table.get_value("name", {"sys_id": row_id})
|
|
374
|
+
self.assertEqual(name_value, row["name"])
|
|
375
|
+
|
|
376
|
+
# Test get_value with non-existent column
|
|
377
|
+
with self.assertRaises((DbColumnMissingError, Exception)):
|
|
378
|
+
table.get_value("non_existent_column", {"sys_id": row_id})
|
|
379
|
+
|
|
380
|
+
def test_table_constraint_violations(self, tx):
|
|
381
|
+
"""Test handling of constraint violations."""
|
|
382
|
+
table = tx.table("table_test_constraints")
|
|
383
|
+
|
|
384
|
+
# Test unique constraint violation
|
|
385
|
+
try:
|
|
386
|
+
# Try to insert duplicate username
|
|
387
|
+
table.insert({"username": "admin", "email": "admin2@test.com"})
|
|
388
|
+
except (DbDuplicateKeyError, Exception):
|
|
389
|
+
pass # Expected to fail
|
|
390
|
+
|
|
391
|
+
# Test foreign key constraint
|
|
392
|
+
child_table = tx.table("table_test_child")
|
|
393
|
+
try:
|
|
394
|
+
# Try to insert with non-existent parent_id
|
|
395
|
+
child_table.insert({"parent_id": 99999, "child_name": "Orphan Task"})
|
|
396
|
+
except Exception:
|
|
397
|
+
pass # Expected to fail
|
|
398
|
+
|
|
399
|
+
def test_table_transaction_rollback(self, tx):
|
|
400
|
+
"""Test table operations with transaction rollback."""
|
|
401
|
+
table = tx.table("table_test_basic")
|
|
402
|
+
|
|
403
|
+
initial_count = table.count()
|
|
404
|
+
|
|
405
|
+
# Insert some data
|
|
406
|
+
table.insert({"name": "Rollback Test", "age": 99})
|
|
407
|
+
|
|
408
|
+
# Verify the insert
|
|
409
|
+
after_insert_count = table.count()
|
|
410
|
+
self.assertEqual(after_insert_count, initial_count + 1)
|
|
411
|
+
|
|
412
|
+
# Rollback (this will happen automatically at end of test due to transaction decorator)
|
|
413
|
+
# The rollback behavior is tested by the framework itself
|
|
414
|
+
|
|
415
|
+
def test_table_concurrent_access(self, tx):
|
|
416
|
+
"""Test table operations under concurrent access."""
|
|
417
|
+
table = tx.table("table_test_concurrent")
|
|
418
|
+
|
|
419
|
+
# Clear any existing data
|
|
420
|
+
table.truncate()
|
|
421
|
+
|
|
422
|
+
def worker(worker_id, operations=10):
|
|
423
|
+
"""Worker function for concurrent testing."""
|
|
424
|
+
results = []
|
|
425
|
+
for i in range(operations):
|
|
426
|
+
try:
|
|
427
|
+
# Insert operation
|
|
428
|
+
row = table.insert({
|
|
429
|
+
"counter": i,
|
|
430
|
+
"worker_id": f"worker_{worker_id}",
|
|
431
|
+
"operation": "insert",
|
|
432
|
+
"timestamp": f"2023-01-01T{i:02d}:00:00"
|
|
433
|
+
})
|
|
434
|
+
results.append(("insert", row["sys_id"]))
|
|
435
|
+
|
|
436
|
+
# Update operation
|
|
437
|
+
table.update(
|
|
438
|
+
{"operation": "updated"},
|
|
439
|
+
{"sys_id": row["sys_id"]}
|
|
440
|
+
)
|
|
441
|
+
results.append(("update", row["sys_id"]))
|
|
442
|
+
|
|
443
|
+
time.sleep(0.001) # Small delay to encourage race conditions
|
|
444
|
+
|
|
445
|
+
except Exception as e:
|
|
446
|
+
results.append(("error", str(e)))
|
|
447
|
+
|
|
448
|
+
return results
|
|
449
|
+
|
|
450
|
+
# Run concurrent workers
|
|
451
|
+
with ThreadPoolExecutor(max_workers=3) as executor:
|
|
452
|
+
futures = [executor.submit(worker, i, 5) for i in range(3)]
|
|
453
|
+
all_results = []
|
|
454
|
+
for future in as_completed(futures):
|
|
455
|
+
all_results.extend(future.result())
|
|
456
|
+
|
|
457
|
+
# Verify operations completed
|
|
458
|
+
self.assertGreater(len(all_results), 0)
|
|
459
|
+
|
|
460
|
+
# Check final state
|
|
461
|
+
final_count = table.count()
|
|
462
|
+
self.assertGreaterEqual(final_count, 0)
|
|
463
|
+
|
|
464
|
+
def test_table_large_dataset_performance(self, tx):
|
|
465
|
+
"""Test table operations with large datasets."""
|
|
466
|
+
table = tx.table("table_test_large")
|
|
467
|
+
|
|
468
|
+
# Test count on large table
|
|
469
|
+
count = table.count()
|
|
470
|
+
self.assertEqual(count, 500) # 10 batches * 50 sequences
|
|
471
|
+
|
|
472
|
+
# Test filtered queries on large table
|
|
473
|
+
batch_0_count = table.count({"batch_id": 0})
|
|
474
|
+
self.assertEqual(batch_0_count, 50)
|
|
475
|
+
|
|
476
|
+
# Test complex query
|
|
477
|
+
complex_result = table.select(
|
|
478
|
+
where={"batch_id__gte": 5, "processed": True},
|
|
479
|
+
orderby="sequence",
|
|
480
|
+
limit=10
|
|
481
|
+
)
|
|
482
|
+
complex_list = list(complex_result)
|
|
483
|
+
self.assertLessEqual(len(complex_list), 10)
|
|
484
|
+
|
|
485
|
+
# Test bulk operations
|
|
486
|
+
affected = table.update(
|
|
487
|
+
{"processed": True},
|
|
488
|
+
{"batch_id": 1}
|
|
489
|
+
)
|
|
490
|
+
self.assertGreaterEqual(affected, 0)
|
|
491
|
+
|
|
492
|
+
def test_table_edge_cases_empty_operations(self, tx):
|
|
493
|
+
"""Test edge cases with empty operations."""
|
|
494
|
+
table = tx.table("table_test_basic")
|
|
495
|
+
|
|
496
|
+
# Test select with empty where clause
|
|
497
|
+
result = table.select(where={})
|
|
498
|
+
self.assertIsInstance(result, Result)
|
|
499
|
+
|
|
500
|
+
# Test update with empty data (should be handled gracefully)
|
|
501
|
+
try:
|
|
502
|
+
table.update({}, {"sys_id": 1})
|
|
503
|
+
except Exception:
|
|
504
|
+
pass # May legitimately fail
|
|
505
|
+
|
|
506
|
+
# Test delete with empty where (dangerous but should work)
|
|
507
|
+
# Don't actually run this as it would delete all data
|
|
508
|
+
# table.delete({})
|
|
509
|
+
|
|
510
|
+
def test_table_edge_cases_null_and_unicode(self, tx):
|
|
511
|
+
"""Test edge cases with NULL and Unicode data."""
|
|
512
|
+
table = tx.table("table_test_basic")
|
|
513
|
+
|
|
514
|
+
# Test with Unicode data
|
|
515
|
+
unicode_data = {
|
|
516
|
+
"name": "José María 🚀",
|
|
517
|
+
"email": "josé@español.com",
|
|
518
|
+
"data": "Unicode test: αβγδε"
|
|
519
|
+
}
|
|
520
|
+
row = table.new(unicode_data)
|
|
521
|
+
self.assertEqual(row["name"], "José María 🚀")
|
|
522
|
+
|
|
523
|
+
# Test with NULL values
|
|
524
|
+
null_data = {
|
|
525
|
+
"name": "NULL Test User",
|
|
526
|
+
"age": None,
|
|
527
|
+
"email": None,
|
|
528
|
+
"active": None,
|
|
529
|
+
"score": None
|
|
530
|
+
}
|
|
531
|
+
row_null = table.insert(null_data)
|
|
532
|
+
self.assertEqual(row_null["name"], "NULL Test User")
|
|
533
|
+
self.assertIsNone(row_null["age"])
|
|
534
|
+
|
|
535
|
+
def test_table_data_type_edge_cases(self, tx):
|
|
536
|
+
"""Test edge cases with different data types."""
|
|
537
|
+
table = tx.table("table_test_basic")
|
|
538
|
+
|
|
539
|
+
# Test with extreme values
|
|
540
|
+
extreme_data = {
|
|
541
|
+
"name": "Extreme User",
|
|
542
|
+
"age": 999999999, # Very large integer
|
|
543
|
+
"score": 999999.999999, # Large float
|
|
544
|
+
"active": True,
|
|
545
|
+
"data": "x" * 1000 # Long string
|
|
546
|
+
}
|
|
547
|
+
row = table.new(extreme_data)
|
|
548
|
+
self.assertEqual(row["name"], "Extreme User")
|
|
549
|
+
|
|
550
|
+
# Test with minimum values
|
|
551
|
+
min_data = {
|
|
552
|
+
"name": "", # Empty string
|
|
553
|
+
"age": 0,
|
|
554
|
+
"score": 0.0,
|
|
555
|
+
"active": False
|
|
556
|
+
}
|
|
557
|
+
row_min = table.new(min_data)
|
|
558
|
+
self.assertEqual(row_min["age"], 0)
|
|
559
|
+
|
|
560
|
+
def test_table_error_recovery(self, tx):
|
|
561
|
+
"""Test table error recovery scenarios."""
|
|
562
|
+
table = tx.table("table_test_basic")
|
|
563
|
+
|
|
564
|
+
# Test recovery from constraint violation
|
|
565
|
+
try:
|
|
566
|
+
# Insert with very long email that might exceed column limit
|
|
567
|
+
table.insert({
|
|
568
|
+
"name": "Long Email User",
|
|
569
|
+
"email": "very" + "long" * 100 + "@example.com"
|
|
570
|
+
})
|
|
571
|
+
except Exception:
|
|
572
|
+
# Should be able to continue after error
|
|
573
|
+
row = table.insert({
|
|
574
|
+
"name": "Recovery User",
|
|
575
|
+
"email": "recovery@test.com"
|
|
576
|
+
})
|
|
577
|
+
self.assertEqual(row["name"], "Recovery User")
|
|
578
|
+
|
|
579
|
+
# Test recovery from invalid column reference
|
|
580
|
+
try:
|
|
581
|
+
table.select(where={"invalid_column": "value"})
|
|
582
|
+
except Exception:
|
|
583
|
+
# Should be able to continue
|
|
584
|
+
result = table.select(where={"name": "Recovery User"})
|
|
585
|
+
self.assertIsInstance(result, Result)
|
|
586
|
+
|
|
587
|
+
def test_table_context_manager(self, tx):
|
|
588
|
+
"""Test table as context manager."""
|
|
589
|
+
# Test with context manager
|
|
590
|
+
with tx.table("table_test_basic") as table:
|
|
591
|
+
count = table.count()
|
|
592
|
+
self.assertGreaterEqual(count, 0)
|
|
593
|
+
|
|
594
|
+
# Table should be properly closed after context
|
|
595
|
+
|
|
596
|
+
def test_table_drop_and_recreate(self, tx):
|
|
597
|
+
"""Test dropping and recreating tables."""
|
|
598
|
+
# Create temporary table
|
|
599
|
+
temp_table = tx.table("table_test_temp")
|
|
600
|
+
temp_table.create(columns={"temp_col": str})
|
|
601
|
+
self.assertTrue(temp_table.exists())
|
|
602
|
+
|
|
603
|
+
# Drop table
|
|
604
|
+
temp_table.drop()
|
|
605
|
+
self.assertFalse(temp_table.exists())
|
|
606
|
+
|
|
607
|
+
# Recreate with different schema
|
|
608
|
+
temp_table.create(columns={"temp_col": str, "new_col": int})
|
|
609
|
+
self.assertTrue(temp_table.exists())
|
|
610
|
+
|
|
611
|
+
# Verify new schema
|
|
612
|
+
columns = temp_table.columns()
|
|
613
|
+
self.assertIn("temp_col", columns)
|
|
614
|
+
self.assertIn("new_col", columns)
|
|
615
|
+
|
|
616
|
+
def test_table_index_operations(self, tx):
|
|
617
|
+
"""Test table index operations."""
|
|
618
|
+
table = tx.table("table_test_basic")
|
|
619
|
+
|
|
620
|
+
# Create index
|
|
621
|
+
try:
|
|
622
|
+
table.create_index("name")
|
|
623
|
+
table.create_index("email", unique=True)
|
|
624
|
+
except Exception:
|
|
625
|
+
pass # Index might already exist
|
|
626
|
+
|
|
627
|
+
# Test that operations still work with indexes
|
|
628
|
+
result = table.select(where={"name": "Alice Johnson"})
|
|
629
|
+
self.assertIsInstance(result, Result)
|
|
630
|
+
|
|
631
|
+
def test_table_query_builder(self, tx):
|
|
632
|
+
"""Test table query builder functionality."""
|
|
633
|
+
table = tx.table("table_test_basic")
|
|
634
|
+
|
|
635
|
+
# Test that query building doesn't execute immediately
|
|
636
|
+
query = table.select(where={"active": True})
|
|
637
|
+
self.assertIsInstance(query, Result)
|
|
638
|
+
|
|
639
|
+
# Test select with different parameters
|
|
640
|
+
result_ordered = table.select(orderby="name")
|
|
641
|
+
ordered_list = list(result_ordered)
|
|
642
|
+
self.assertGreaterEqual(len(ordered_list), 0)
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
if __name__ == "__main__":
|
|
646
|
+
unittest.main()
|