velocity-python 0.0.105__py3-none-any.whl → 0.0.155__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- velocity/__init__.py +3 -1
- velocity/app/orders.py +3 -4
- velocity/app/tests/__init__.py +1 -0
- velocity/app/tests/test_email_processing.py +112 -0
- velocity/app/tests/test_payment_profile_sorting.py +191 -0
- velocity/app/tests/test_spreadsheet_functions.py +124 -0
- velocity/aws/__init__.py +3 -0
- velocity/aws/amplify.py +10 -6
- velocity/aws/handlers/__init__.py +2 -0
- velocity/aws/handlers/base_handler.py +248 -0
- velocity/aws/handlers/context.py +167 -2
- velocity/aws/handlers/exceptions.py +16 -0
- velocity/aws/handlers/lambda_handler.py +24 -85
- velocity/aws/handlers/mixins/__init__.py +16 -0
- velocity/aws/handlers/mixins/activity_tracker.py +181 -0
- velocity/aws/handlers/mixins/aws_session_mixin.py +192 -0
- velocity/aws/handlers/mixins/error_handler.py +192 -0
- velocity/aws/handlers/mixins/legacy_mixin.py +53 -0
- velocity/aws/handlers/mixins/standard_mixin.py +73 -0
- velocity/aws/handlers/response.py +1 -1
- velocity/aws/handlers/sqs_handler.py +28 -143
- velocity/aws/tests/__init__.py +1 -0
- velocity/aws/tests/test_lambda_handler_json_serialization.py +120 -0
- velocity/aws/tests/test_response.py +163 -0
- velocity/db/__init__.py +16 -4
- velocity/db/core/decorators.py +20 -4
- velocity/db/core/engine.py +185 -792
- velocity/db/core/result.py +36 -22
- velocity/db/core/row.py +15 -3
- velocity/db/core/table.py +283 -44
- velocity/db/core/transaction.py +19 -11
- velocity/db/exceptions.py +42 -18
- velocity/db/servers/base/__init__.py +9 -0
- velocity/db/servers/base/initializer.py +70 -0
- velocity/db/servers/base/operators.py +98 -0
- velocity/db/servers/base/sql.py +503 -0
- velocity/db/servers/base/types.py +135 -0
- velocity/db/servers/mysql/__init__.py +73 -0
- velocity/db/servers/mysql/operators.py +54 -0
- velocity/db/servers/{mysql_reserved.py → mysql/reserved.py} +2 -14
- velocity/db/servers/mysql/sql.py +718 -0
- velocity/db/servers/mysql/types.py +107 -0
- velocity/db/servers/postgres/__init__.py +59 -11
- velocity/db/servers/postgres/operators.py +34 -0
- velocity/db/servers/postgres/sql.py +474 -120
- velocity/db/servers/postgres/types.py +88 -2
- velocity/db/servers/sqlite/__init__.py +61 -0
- velocity/db/servers/sqlite/operators.py +52 -0
- velocity/db/servers/sqlite/reserved.py +20 -0
- velocity/db/servers/sqlite/sql.py +677 -0
- velocity/db/servers/sqlite/types.py +92 -0
- velocity/db/servers/sqlserver/__init__.py +73 -0
- velocity/db/servers/sqlserver/operators.py +47 -0
- velocity/db/servers/sqlserver/reserved.py +32 -0
- velocity/db/servers/sqlserver/sql.py +805 -0
- velocity/db/servers/sqlserver/types.py +114 -0
- velocity/db/servers/tablehelper.py +117 -91
- velocity/db/tests/__init__.py +1 -0
- velocity/db/tests/common_db_test.py +0 -0
- velocity/db/tests/postgres/__init__.py +1 -0
- velocity/db/tests/postgres/common.py +49 -0
- velocity/db/tests/postgres/test_column.py +29 -0
- velocity/db/tests/postgres/test_connections.py +25 -0
- velocity/db/tests/postgres/test_database.py +21 -0
- velocity/db/tests/postgres/test_engine.py +205 -0
- velocity/db/tests/postgres/test_general_usage.py +88 -0
- velocity/db/tests/postgres/test_imports.py +8 -0
- velocity/db/tests/postgres/test_result.py +19 -0
- velocity/db/tests/postgres/test_row.py +137 -0
- velocity/db/tests/postgres/test_row_comprehensive.py +720 -0
- velocity/db/tests/postgres/test_schema_locking.py +335 -0
- velocity/db/tests/postgres/test_schema_locking_unit.py +115 -0
- velocity/db/tests/postgres/test_sequence.py +34 -0
- velocity/db/tests/postgres/test_sql_comprehensive.py +462 -0
- velocity/db/tests/postgres/test_table.py +101 -0
- velocity/db/tests/postgres/test_table_comprehensive.py +646 -0
- velocity/db/tests/postgres/test_transaction.py +106 -0
- velocity/db/tests/sql/__init__.py +1 -0
- velocity/db/tests/sql/common.py +177 -0
- velocity/db/tests/sql/test_postgres_select_advanced.py +285 -0
- velocity/db/tests/sql/test_postgres_select_variances.py +517 -0
- velocity/db/tests/test_cursor_rowcount_fix.py +150 -0
- velocity/db/tests/test_db_utils.py +221 -0
- velocity/db/tests/test_postgres.py +448 -0
- velocity/db/tests/test_postgres_unchanged.py +81 -0
- velocity/db/tests/test_process_error_robustness.py +292 -0
- velocity/db/tests/test_result_caching.py +279 -0
- velocity/db/tests/test_result_sql_aware.py +117 -0
- velocity/db/tests/test_row_get_missing_column.py +72 -0
- velocity/db/tests/test_schema_locking_initializers.py +226 -0
- velocity/db/tests/test_schema_locking_simple.py +97 -0
- velocity/db/tests/test_sql_builder.py +165 -0
- velocity/db/tests/test_tablehelper.py +486 -0
- velocity/db/utils.py +62 -47
- velocity/misc/conv/__init__.py +2 -0
- velocity/misc/conv/iconv.py +5 -4
- velocity/misc/export.py +1 -4
- velocity/misc/merge.py +1 -1
- velocity/misc/tests/__init__.py +1 -0
- velocity/misc/tests/test_db.py +90 -0
- velocity/misc/tests/test_fix.py +78 -0
- velocity/misc/tests/test_format.py +64 -0
- velocity/misc/tests/test_iconv.py +203 -0
- velocity/misc/tests/test_merge.py +82 -0
- velocity/misc/tests/test_oconv.py +144 -0
- velocity/misc/tests/test_original_error.py +52 -0
- velocity/misc/tests/test_timer.py +74 -0
- velocity/misc/tools.py +0 -1
- {velocity_python-0.0.105.dist-info → velocity_python-0.0.155.dist-info}/METADATA +2 -2
- velocity_python-0.0.155.dist-info/RECORD +129 -0
- velocity/db/core/exceptions.py +0 -70
- velocity/db/servers/mysql.py +0 -641
- velocity/db/servers/sqlite.py +0 -968
- velocity/db/servers/sqlite_reserved.py +0 -208
- velocity/db/servers/sqlserver.py +0 -921
- velocity/db/servers/sqlserver_reserved.py +0 -314
- velocity_python-0.0.105.dist-info/RECORD +0 -56
- {velocity_python-0.0.105.dist-info → velocity_python-0.0.155.dist-info}/WHEEL +0 -0
- {velocity_python-0.0.105.dist-info → velocity_python-0.0.155.dist-info}/licenses/LICENSE +0 -0
- {velocity_python-0.0.105.dist-info → velocity_python-0.0.155.dist-info}/top_level.txt +0 -0
velocity/db/core/result.py
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
import datetime
|
|
2
|
-
import decimal
|
|
3
1
|
from velocity.misc.format import to_json
|
|
4
2
|
|
|
5
3
|
|
|
@@ -7,14 +5,14 @@ class Result:
|
|
|
7
5
|
"""
|
|
8
6
|
Wraps a database cursor to provide various convenience transformations
|
|
9
7
|
(dict, list, tuple, etc.) and helps iterate over query results.
|
|
10
|
-
|
|
8
|
+
|
|
11
9
|
Features:
|
|
12
10
|
- Pre-fetches first row for immediate boolean evaluation
|
|
13
11
|
- Boolean state changes as rows are consumed: bool(result) tells you if MORE rows are available
|
|
14
12
|
- Supports __bool__, is_empty(), has_results() for checking remaining results
|
|
15
13
|
- Efficient iteration without unnecessary fetchall() calls
|
|
16
14
|
- Caches next row to maintain accurate state without redundant database calls
|
|
17
|
-
|
|
15
|
+
|
|
18
16
|
Boolean Behavior:
|
|
19
17
|
- Initially: bool(result) = True if query returned any rows
|
|
20
18
|
- After each row: bool(result) = True if more rows are available to fetch
|
|
@@ -29,15 +27,15 @@ class Result:
|
|
|
29
27
|
description = getattr(cursor, "description", []) or []
|
|
30
28
|
self._headers = []
|
|
31
29
|
for col in description:
|
|
32
|
-
if hasattr(col,
|
|
30
|
+
if hasattr(col, "__getitem__"): # Tuple-like (col[0])
|
|
33
31
|
self._headers.append(col[0].lower())
|
|
34
|
-
elif hasattr(col,
|
|
32
|
+
elif hasattr(col, "name"): # Object with name attribute
|
|
35
33
|
self._headers.append(col.name.lower())
|
|
36
34
|
else:
|
|
37
|
-
self._headers.append(f
|
|
35
|
+
self._headers.append(f"column_{len(self._headers)}")
|
|
38
36
|
except (AttributeError, TypeError, IndexError):
|
|
39
37
|
self._headers = []
|
|
40
|
-
|
|
38
|
+
|
|
41
39
|
self.__as_strings = False
|
|
42
40
|
self.__enumerate = False
|
|
43
41
|
self.__count = -1
|
|
@@ -49,17 +47,27 @@ class Result:
|
|
|
49
47
|
self._cached_first_row = None
|
|
50
48
|
self._first_row_fetched = False
|
|
51
49
|
self._exhausted = False
|
|
52
|
-
|
|
50
|
+
|
|
53
51
|
# Pre-fetch the first row to enable immediate boolean evaluation
|
|
54
52
|
self._fetch_first_row()
|
|
55
53
|
|
|
56
54
|
def _fetch_first_row(self):
|
|
57
55
|
"""
|
|
58
56
|
Pre-fetch the first row from the cursor to enable immediate boolean evaluation.
|
|
57
|
+
Only attempts to fetch for SELECT-like operations that return rows.
|
|
59
58
|
"""
|
|
60
59
|
if self._first_row_fetched or not self._cursor:
|
|
61
60
|
return
|
|
62
|
-
|
|
61
|
+
|
|
62
|
+
# Don't try to fetch from INSERT/UPDATE/DELETE operations
|
|
63
|
+
# These operations don't return rows, only rowcount
|
|
64
|
+
if self.__sql and self.__sql.strip().upper().startswith(
|
|
65
|
+
("INSERT", "UPDATE", "DELETE", "TRUNCATE")
|
|
66
|
+
):
|
|
67
|
+
self._exhausted = True
|
|
68
|
+
self._first_row_fetched = True
|
|
69
|
+
return
|
|
70
|
+
|
|
63
71
|
try:
|
|
64
72
|
raw_row = self._cursor.fetchone()
|
|
65
73
|
if raw_row:
|
|
@@ -101,7 +109,9 @@ class Result:
|
|
|
101
109
|
Return True if there are more rows available to fetch.
|
|
102
110
|
This is based on whether we have a cached row or the cursor isn't exhausted.
|
|
103
111
|
"""
|
|
104
|
-
return self._cached_first_row is not None or (
|
|
112
|
+
return self._cached_first_row is not None or (
|
|
113
|
+
not self._exhausted and self._cursor
|
|
114
|
+
)
|
|
105
115
|
|
|
106
116
|
def __next__(self):
|
|
107
117
|
"""
|
|
@@ -119,7 +129,7 @@ class Result:
|
|
|
119
129
|
if not row:
|
|
120
130
|
self._exhausted = True
|
|
121
131
|
raise StopIteration
|
|
122
|
-
# Try to pre-fetch the next row to update our state
|
|
132
|
+
# Try to pre-fetch the next row to update our state
|
|
123
133
|
self._try_cache_next_row()
|
|
124
134
|
except Exception as e:
|
|
125
135
|
# Handle cursor errors (e.g., closed cursor)
|
|
@@ -146,7 +156,7 @@ class Result:
|
|
|
146
156
|
"""
|
|
147
157
|
if not self._cursor or self._cached_first_row is not None:
|
|
148
158
|
return
|
|
149
|
-
|
|
159
|
+
|
|
150
160
|
try:
|
|
151
161
|
next_row = self._cursor.fetchone()
|
|
152
162
|
if next_row:
|
|
@@ -206,18 +216,22 @@ class Result:
|
|
|
206
216
|
"""
|
|
207
217
|
if not self.__columns and self._cursor and hasattr(self._cursor, "description"):
|
|
208
218
|
for column in self._cursor.description:
|
|
209
|
-
data = {
|
|
210
|
-
|
|
211
|
-
}
|
|
212
|
-
|
|
219
|
+
data = {"type_name": "unknown"} # Default value
|
|
220
|
+
|
|
213
221
|
# Try to get type information (PostgreSQL specific)
|
|
214
222
|
try:
|
|
215
|
-
if
|
|
216
|
-
|
|
223
|
+
if (
|
|
224
|
+
hasattr(column, "type_code")
|
|
225
|
+
and self.__tx
|
|
226
|
+
and hasattr(self.__tx, "pg_types")
|
|
227
|
+
):
|
|
228
|
+
data["type_name"] = self.__tx.pg_types.get(
|
|
229
|
+
column.type_code, "unknown"
|
|
230
|
+
)
|
|
217
231
|
except (AttributeError, KeyError):
|
|
218
232
|
# Keep default value
|
|
219
233
|
pass
|
|
220
|
-
|
|
234
|
+
|
|
221
235
|
# Get all other column attributes safely
|
|
222
236
|
for key in dir(column):
|
|
223
237
|
if not key.startswith("__"):
|
|
@@ -226,8 +240,8 @@ class Result:
|
|
|
226
240
|
except (AttributeError, TypeError):
|
|
227
241
|
# Skip attributes that can't be accessed
|
|
228
242
|
continue
|
|
229
|
-
|
|
230
|
-
column_name = getattr(column,
|
|
243
|
+
|
|
244
|
+
column_name = getattr(column, "name", f"column_{len(self.__columns)}")
|
|
231
245
|
self.__columns[column_name] = data
|
|
232
246
|
return self.__columns
|
|
233
247
|
|
velocity/db/core/row.py
CHANGED
|
@@ -44,7 +44,12 @@ class Row:
|
|
|
44
44
|
def __setitem__(self, key, val):
|
|
45
45
|
if key in self.pk:
|
|
46
46
|
raise Exception("Cannot update a primary key.")
|
|
47
|
-
self.table
|
|
47
|
+
if hasattr(self.table, "updins"):
|
|
48
|
+
self.table.updins({key: val}, pk=self.pk)
|
|
49
|
+
elif hasattr(self.table, "upsert"):
|
|
50
|
+
self.table.upsert({key: val}, pk=self.pk)
|
|
51
|
+
else:
|
|
52
|
+
self.table.update({key: val}, pk=self.pk)
|
|
48
53
|
|
|
49
54
|
def __delitem__(self, key):
|
|
50
55
|
if key in self.pk:
|
|
@@ -97,7 +102,9 @@ class Row:
|
|
|
97
102
|
except Exception as e:
|
|
98
103
|
# Check if the error message indicates a missing column
|
|
99
104
|
error_msg = str(e).lower()
|
|
100
|
-
if
|
|
105
|
+
if "column" in error_msg and (
|
|
106
|
+
"does not exist" in error_msg or "not found" in error_msg
|
|
107
|
+
):
|
|
101
108
|
return failobj
|
|
102
109
|
# Re-raise other exceptions
|
|
103
110
|
raise
|
|
@@ -119,7 +126,12 @@ class Row:
|
|
|
119
126
|
if kwds:
|
|
120
127
|
data.update(kwds)
|
|
121
128
|
if data:
|
|
122
|
-
self.table
|
|
129
|
+
if hasattr(self.table, "updins"):
|
|
130
|
+
self.table.updins(data, pk=self.pk)
|
|
131
|
+
elif hasattr(self.table, "upsert"):
|
|
132
|
+
self.table.upsert(data, pk=self.pk)
|
|
133
|
+
else:
|
|
134
|
+
self.table.update(data, pk=self.pk)
|
|
123
135
|
return self
|
|
124
136
|
|
|
125
137
|
def __cmp__(self, other):
|
velocity/db/core/table.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import sqlparse
|
|
2
|
+
from collections.abc import Iterable, Mapping
|
|
2
3
|
from velocity.db import exceptions
|
|
3
4
|
from velocity.db.core.row import Row
|
|
4
5
|
from velocity.db.core.result import Result
|
|
@@ -23,7 +24,24 @@ class Query:
|
|
|
23
24
|
return self.sql
|
|
24
25
|
|
|
25
26
|
|
|
27
|
+
SYSTEM_COLUMN_NAMES = (
|
|
28
|
+
"sys_id",
|
|
29
|
+
"sys_created",
|
|
30
|
+
"sys_modified",
|
|
31
|
+
"sys_modified_by",
|
|
32
|
+
"sys_modified_row",
|
|
33
|
+
"sys_modified_count",
|
|
34
|
+
"sys_dirty",
|
|
35
|
+
"sys_table",
|
|
36
|
+
"description",
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
_SYSTEM_COLUMN_SET = {name.lower() for name in SYSTEM_COLUMN_NAMES}
|
|
40
|
+
|
|
41
|
+
|
|
26
42
|
class Table:
|
|
43
|
+
SYSTEM_COLUMNS = SYSTEM_COLUMN_NAMES
|
|
44
|
+
|
|
27
45
|
"""
|
|
28
46
|
Provides an interface for performing CRUD and metadata operations on a DB table.
|
|
29
47
|
"""
|
|
@@ -54,13 +72,13 @@ class Table:
|
|
|
54
72
|
"""
|
|
55
73
|
try:
|
|
56
74
|
self._cursor.close()
|
|
57
|
-
except:
|
|
75
|
+
except Exception:
|
|
58
76
|
pass
|
|
59
77
|
|
|
60
78
|
def cursor(self):
|
|
61
79
|
try:
|
|
62
80
|
return self._cursor
|
|
63
|
-
except:
|
|
81
|
+
except AttributeError:
|
|
64
82
|
pass
|
|
65
83
|
self._cursor = self.tx.cursor()
|
|
66
84
|
return self._cursor
|
|
@@ -93,9 +111,15 @@ class Table:
|
|
|
93
111
|
|
|
94
112
|
def columns(self):
|
|
95
113
|
"""
|
|
96
|
-
Returns column names
|
|
114
|
+
Returns non-system column names.
|
|
97
115
|
"""
|
|
98
|
-
return [col for col in self.sys_columns() if not
|
|
116
|
+
return [col for col in self.sys_columns() if not self.is_system_column(col)]
|
|
117
|
+
|
|
118
|
+
@staticmethod
|
|
119
|
+
def is_system_column(column_name):
|
|
120
|
+
if not column_name:
|
|
121
|
+
return False
|
|
122
|
+
return column_name.lower() in _SYSTEM_COLUMN_SET or column_name.lower().startswith("sys_")
|
|
99
123
|
|
|
100
124
|
@return_default(None, (exceptions.DbObjectExistsError,))
|
|
101
125
|
def create_index(
|
|
@@ -119,6 +143,59 @@ class Table:
|
|
|
119
143
|
return sql, vals
|
|
120
144
|
self.tx.execute(sql, vals, cursor=self.cursor())
|
|
121
145
|
|
|
146
|
+
def create_indexes(self, indexes, **kwds):
|
|
147
|
+
"""
|
|
148
|
+
Convenience wrapper to create multiple indexes in order.
|
|
149
|
+
|
|
150
|
+
Accepts an iterable of definitions. Each definition may be either:
|
|
151
|
+
- Mapping with a required "columns" entry plus optional "unique",
|
|
152
|
+
"direction", "where", and "lower" keys.
|
|
153
|
+
- A simple sequence/string of columns, in which case defaults apply.
|
|
154
|
+
|
|
155
|
+
When sql_only=True, a list of (sql, params) tuples is returned.
|
|
156
|
+
"""
|
|
157
|
+
|
|
158
|
+
if indexes is None:
|
|
159
|
+
return [] if kwds.get("sql_only", False) else None
|
|
160
|
+
|
|
161
|
+
if not isinstance(indexes, Iterable) or isinstance(indexes, (str, bytes)):
|
|
162
|
+
raise TypeError("indexes must be an iterable of index definitions")
|
|
163
|
+
|
|
164
|
+
sql_only = kwds.get("sql_only", False)
|
|
165
|
+
statements = []
|
|
166
|
+
|
|
167
|
+
for definition in indexes:
|
|
168
|
+
if isinstance(definition, Mapping):
|
|
169
|
+
columns = definition.get("columns")
|
|
170
|
+
if not columns:
|
|
171
|
+
raise ValueError("Index definition requires a non-empty 'columns' entry")
|
|
172
|
+
params = {
|
|
173
|
+
"unique": definition.get("unique", False),
|
|
174
|
+
"direction": definition.get("direction"),
|
|
175
|
+
"where": definition.get("where"),
|
|
176
|
+
"lower": definition.get("lower"),
|
|
177
|
+
}
|
|
178
|
+
else:
|
|
179
|
+
columns = definition
|
|
180
|
+
params = {
|
|
181
|
+
"unique": False,
|
|
182
|
+
"direction": None,
|
|
183
|
+
"where": None,
|
|
184
|
+
"lower": None,
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
if isinstance(columns, str):
|
|
188
|
+
columns = columns.split(",")
|
|
189
|
+
|
|
190
|
+
if not columns:
|
|
191
|
+
raise ValueError("Index columns cannot be empty")
|
|
192
|
+
|
|
193
|
+
result = self.create_index(columns, **params, **kwds)
|
|
194
|
+
if sql_only:
|
|
195
|
+
statements.append(result)
|
|
196
|
+
|
|
197
|
+
return statements if sql_only else None
|
|
198
|
+
|
|
122
199
|
@return_default(None)
|
|
123
200
|
def drop_index(self, columns, **kwds):
|
|
124
201
|
"""
|
|
@@ -163,6 +240,34 @@ class Table:
|
|
|
163
240
|
return self.name in [f"{x[0]}.{x[1]}" for x in result.as_tuple()]
|
|
164
241
|
return self.name in [x[1] for x in result.as_tuple()]
|
|
165
242
|
|
|
243
|
+
def ensure_system_columns(self, **kwds):
|
|
244
|
+
"""Ensure Velocity system columns and triggers exist for this table."""
|
|
245
|
+
force = kwds.get("force", False)
|
|
246
|
+
|
|
247
|
+
try:
|
|
248
|
+
columns = [col.lower() for col in self.sys_columns()]
|
|
249
|
+
except Exception:
|
|
250
|
+
columns = []
|
|
251
|
+
|
|
252
|
+
sql_method = getattr(self.sql, "ensure_system_columns", None)
|
|
253
|
+
|
|
254
|
+
if sql_method is None:
|
|
255
|
+
raise AttributeError(
|
|
256
|
+
f"{self.sql.__class__.__name__} does not implement ensure_system_columns"
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
result = sql_method(
|
|
260
|
+
self.name, existing_columns=columns, force=force
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
if not result:
|
|
264
|
+
return
|
|
265
|
+
|
|
266
|
+
sql, vals = result
|
|
267
|
+
if kwds.get("sql_only", False):
|
|
268
|
+
return sql, vals
|
|
269
|
+
self.tx.execute(sql, vals, cursor=self.cursor())
|
|
270
|
+
|
|
166
271
|
def column(self, name):
|
|
167
272
|
"""
|
|
168
273
|
Returns a Column object for the given column name.
|
|
@@ -241,53 +346,91 @@ class Table:
|
|
|
241
346
|
sys_id = self.tx.execute(sql, vals).scalar()
|
|
242
347
|
return self.row(sys_id, lock=lock)
|
|
243
348
|
|
|
349
|
+
def _normalize_lookup_where(self, where):
|
|
350
|
+
if where is None:
|
|
351
|
+
raise Exception("None is not allowed as a primary key.")
|
|
352
|
+
if isinstance(where, Row):
|
|
353
|
+
return dict(where.pk)
|
|
354
|
+
if isinstance(where, int):
|
|
355
|
+
return {"sys_id": where}
|
|
356
|
+
if not isinstance(where, Mapping):
|
|
357
|
+
raise TypeError(
|
|
358
|
+
"Lookup criteria must be an int, Row, or mapping of column -> value."
|
|
359
|
+
)
|
|
360
|
+
return dict(where)
|
|
361
|
+
|
|
362
|
+
def _select_sys_ids(
|
|
363
|
+
self,
|
|
364
|
+
where,
|
|
365
|
+
*,
|
|
366
|
+
lock=None,
|
|
367
|
+
orderby=None,
|
|
368
|
+
skip_locked=None,
|
|
369
|
+
limit=2,
|
|
370
|
+
):
|
|
371
|
+
select_kwargs = {
|
|
372
|
+
"where": where,
|
|
373
|
+
"lock": lock,
|
|
374
|
+
"orderby": orderby,
|
|
375
|
+
"skip_locked": skip_locked,
|
|
376
|
+
}
|
|
377
|
+
if limit is not None:
|
|
378
|
+
select_kwargs["qty"] = limit
|
|
379
|
+
return self.select("sys_id", **select_kwargs).all()
|
|
380
|
+
|
|
381
|
+
def _clean_where_for_insert(self, where):
|
|
382
|
+
clean = {}
|
|
383
|
+
for key, val in where.items():
|
|
384
|
+
if not isinstance(key, str):
|
|
385
|
+
continue
|
|
386
|
+
if set("<>!=%").intersection(key):
|
|
387
|
+
continue
|
|
388
|
+
clean.setdefault(key, val)
|
|
389
|
+
return clean
|
|
390
|
+
|
|
244
391
|
def get(self, where, lock=None, use_where=False):
|
|
245
392
|
"""
|
|
246
393
|
Gets or creates a row matching `where`. If multiple rows match, raises DuplicateRowsFoundError.
|
|
247
394
|
If none match, a new row is created with the non-operator aspects of `where`.
|
|
248
395
|
"""
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
if isinstance(where, int):
|
|
252
|
-
where = {"sys_id": where}
|
|
253
|
-
result = self.select("sys_id", where=where, lock=lock).all()
|
|
396
|
+
lookup = self._normalize_lookup_where(where)
|
|
397
|
+
result = self._select_sys_ids(lookup, lock=lock, limit=2)
|
|
254
398
|
if len(result) > 1:
|
|
255
|
-
sql = self.select("sys_id", sql_only=True, where=
|
|
399
|
+
sql = self.select("sys_id", sql_only=True, where=lookup, lock=lock)
|
|
256
400
|
raise exceptions.DuplicateRowsFoundError(
|
|
257
401
|
f"More than one entry found. {sql}"
|
|
258
402
|
)
|
|
259
403
|
if not result:
|
|
260
|
-
new_data =
|
|
261
|
-
for k in list(new_data.keys()):
|
|
262
|
-
if set("<>!=%").intersection(k):
|
|
263
|
-
new_data.pop(k)
|
|
404
|
+
new_data = self._clean_where_for_insert(lookup)
|
|
264
405
|
return self.new(new_data, lock=lock)
|
|
265
406
|
if use_where:
|
|
266
|
-
return Row(self,
|
|
407
|
+
return Row(self, lookup, lock=lock)
|
|
267
408
|
return Row(self, result[0]["sys_id"], lock=lock)
|
|
268
409
|
|
|
269
410
|
@return_default(None)
|
|
270
|
-
def find(self, where, lock=None, use_where=False):
|
|
411
|
+
def find(self, where, lock=None, use_where=False, raise_if_missing=False):
|
|
271
412
|
"""
|
|
272
|
-
Finds a single row matching `where`, or returns None if none found
|
|
273
|
-
Raises DuplicateRowsFoundError if multiple rows match.
|
|
413
|
+
Finds a single row matching `where`, or returns None if none found unless
|
|
414
|
+
``raise_if_missing`` is True. Raises DuplicateRowsFoundError if multiple rows match.
|
|
274
415
|
"""
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
if isinstance(where, int):
|
|
278
|
-
where = {"sys_id": where}
|
|
279
|
-
result = self.select("sys_id", where=where, lock=lock).all()
|
|
416
|
+
lookup = self._normalize_lookup_where(where)
|
|
417
|
+
result = self._select_sys_ids(lookup, lock=lock, limit=2)
|
|
280
418
|
if not result:
|
|
419
|
+
if raise_if_missing:
|
|
420
|
+
raise LookupError(
|
|
421
|
+
f"No rows found in `{self.name}` for criteria: {lookup!r}"
|
|
422
|
+
)
|
|
281
423
|
return None
|
|
282
424
|
if len(result) > 1:
|
|
283
|
-
sql = self.select("sys_id", sql_only=True, where=
|
|
425
|
+
sql = self.select("sys_id", sql_only=True, where=lookup, lock=lock)
|
|
284
426
|
raise exceptions.DuplicateRowsFoundError(
|
|
285
427
|
f"More than one entry found. {sql}"
|
|
286
428
|
)
|
|
287
429
|
if use_where:
|
|
288
|
-
return Row(self,
|
|
430
|
+
return Row(self, lookup, lock=lock)
|
|
289
431
|
return Row(self, result[0]["sys_id"], lock=lock)
|
|
290
|
-
|
|
432
|
+
|
|
433
|
+
one = find
|
|
291
434
|
|
|
292
435
|
@return_default(None)
|
|
293
436
|
def first(
|
|
@@ -302,23 +445,21 @@ class Table:
|
|
|
302
445
|
"""
|
|
303
446
|
Finds the first matching row (by `orderby`) or creates one if `create_new=True` and none found.
|
|
304
447
|
"""
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
448
|
+
lookup = self._normalize_lookup_where(where)
|
|
449
|
+
results = self._select_sys_ids(
|
|
450
|
+
lookup,
|
|
451
|
+
lock=lock,
|
|
452
|
+
orderby=orderby,
|
|
453
|
+
skip_locked=skip_locked,
|
|
454
|
+
limit=1,
|
|
455
|
+
)
|
|
312
456
|
if not results:
|
|
313
457
|
if create_new:
|
|
314
|
-
new_data =
|
|
315
|
-
for k in list(new_data.keys()):
|
|
316
|
-
if set("<>!=%").intersection(k):
|
|
317
|
-
new_data.pop(k)
|
|
458
|
+
new_data = self._clean_where_for_insert(lookup)
|
|
318
459
|
return self.new(new_data, lock=lock)
|
|
319
460
|
return None
|
|
320
461
|
if use_where:
|
|
321
|
-
return Row(self,
|
|
462
|
+
return Row(self, lookup, lock=lock)
|
|
322
463
|
return Row(self, results[0]["sys_id"], lock=lock)
|
|
323
464
|
|
|
324
465
|
def primary_keys(self):
|
|
@@ -431,7 +572,7 @@ class Table:
|
|
|
431
572
|
if kwds.get("sql_only", False):
|
|
432
573
|
return sql, vals
|
|
433
574
|
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
434
|
-
return result.cursor.rowcount
|
|
575
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
435
576
|
|
|
436
577
|
@reset_id_on_dup_key
|
|
437
578
|
@create_missing
|
|
@@ -443,7 +584,7 @@ class Table:
|
|
|
443
584
|
if kwds.get("sql_only", False):
|
|
444
585
|
return sql, vals
|
|
445
586
|
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
446
|
-
return result.cursor.rowcount
|
|
587
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
447
588
|
|
|
448
589
|
@reset_id_on_dup_key
|
|
449
590
|
@create_missing
|
|
@@ -462,7 +603,105 @@ class Table:
|
|
|
462
603
|
if kwds.get("sql_only", False):
|
|
463
604
|
return sql, vals
|
|
464
605
|
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
465
|
-
return result.cursor.rowcount
|
|
606
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
607
|
+
|
|
608
|
+
@create_missing
|
|
609
|
+
def update_or_insert(self, update_data, insert_data=None, where=None, pk=None, **kwds):
|
|
610
|
+
"""
|
|
611
|
+
Attempts an UPDATE first; if no rows change, performs an INSERT guarded by NOT EXISTS.
|
|
612
|
+
|
|
613
|
+
:param update_data: Mapping of columns to update.
|
|
614
|
+
:param insert_data: Optional mapping used for the INSERT. When omitted, values are
|
|
615
|
+
derived from update_data combined with simple equality predicates
|
|
616
|
+
from ``where`` and primary key values.
|
|
617
|
+
:param where: Criteria for the UPDATE and existence check.
|
|
618
|
+
:param pk: Optional primary key mapping for UPDATE (merged into WHERE) and INSERT.
|
|
619
|
+
:param sql_only: When True, return the SQL/parameter tuples for both phases instead of executing.
|
|
620
|
+
:return: Number of rows affected, or a dict with ``update``/``insert`` entries when sql_only=True.
|
|
621
|
+
"""
|
|
622
|
+
sql_only = kwds.get("sql_only", False)
|
|
623
|
+
if not isinstance(update_data, Mapping) or not update_data:
|
|
624
|
+
raise ValueError("update_data must be a non-empty mapping of column-value pairs.")
|
|
625
|
+
if where is None and pk is None:
|
|
626
|
+
raise ValueError("Either where or pk must be provided for update_or_insert.")
|
|
627
|
+
|
|
628
|
+
update_stmt = None
|
|
629
|
+
if sql_only:
|
|
630
|
+
update_stmt = self.update(update_data, where=where, pk=pk, sql_only=True)
|
|
631
|
+
else:
|
|
632
|
+
updated = self.update(update_data, where=where, pk=pk)
|
|
633
|
+
if updated:
|
|
634
|
+
return updated
|
|
635
|
+
|
|
636
|
+
if insert_data is not None:
|
|
637
|
+
if not isinstance(insert_data, Mapping):
|
|
638
|
+
raise ValueError("insert_data must be a mapping when provided.")
|
|
639
|
+
insert_payload = dict(insert_data)
|
|
640
|
+
else:
|
|
641
|
+
insert_payload = dict(update_data)
|
|
642
|
+
if isinstance(where, Mapping):
|
|
643
|
+
for key, val in where.items():
|
|
644
|
+
if not isinstance(key, str):
|
|
645
|
+
continue
|
|
646
|
+
if set("<>!=%").intersection(key):
|
|
647
|
+
continue
|
|
648
|
+
insert_payload.setdefault(key, val)
|
|
649
|
+
if isinstance(pk, Mapping):
|
|
650
|
+
for key, val in pk.items():
|
|
651
|
+
insert_payload.setdefault(key, val)
|
|
652
|
+
|
|
653
|
+
if not insert_payload:
|
|
654
|
+
raise ValueError("Unable to derive insert payload for update_or_insert.")
|
|
655
|
+
|
|
656
|
+
exists_where = None
|
|
657
|
+
if where is not None and pk is not None:
|
|
658
|
+
if isinstance(where, Mapping) and isinstance(pk, Mapping):
|
|
659
|
+
combined = dict(where)
|
|
660
|
+
combined.update(pk)
|
|
661
|
+
exists_where = combined
|
|
662
|
+
else:
|
|
663
|
+
exists_where = where
|
|
664
|
+
elif where is not None:
|
|
665
|
+
exists_where = where
|
|
666
|
+
else:
|
|
667
|
+
exists_where = pk
|
|
668
|
+
|
|
669
|
+
ins_builder = getattr(self.sql, "insnx", None) or getattr(
|
|
670
|
+
self.sql, "insert_if_not_exists", None
|
|
671
|
+
)
|
|
672
|
+
if ins_builder is None:
|
|
673
|
+
raise NotImplementedError(
|
|
674
|
+
"Current SQL dialect does not support insert-if-not-exists operations."
|
|
675
|
+
)
|
|
676
|
+
|
|
677
|
+
sql, vals = ins_builder(self.tx, self.name, insert_payload, exists_where)
|
|
678
|
+
if sql_only:
|
|
679
|
+
return {"update": update_stmt, "insert": (sql, vals)}
|
|
680
|
+
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
681
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
682
|
+
|
|
683
|
+
updins = update_or_insert
|
|
684
|
+
|
|
685
|
+
@create_missing
|
|
686
|
+
def insert_if_not_exists(self, data, where=None, **kwds):
|
|
687
|
+
"""
|
|
688
|
+
Inserts `data` into the table only if the existence check (`where`) does not match any rows.
|
|
689
|
+
|
|
690
|
+
Usage:
|
|
691
|
+
table.insert_if_not_exists({'key_col': 'k', 'value': 'v'}, where={'key_col': 'k'})
|
|
692
|
+
|
|
693
|
+
:param data: dict of column -> value for insert
|
|
694
|
+
:param where: mapping/list/str used for the EXISTS check; if None primary keys are used and
|
|
695
|
+
must be present in `data`.
|
|
696
|
+
:return: rowcount (0 or 1) or (sql, params) when sql_only=True
|
|
697
|
+
"""
|
|
698
|
+
sql, vals = self.sql.insert_if_not_exists(self.tx, self.name, data, where)
|
|
699
|
+
if kwds.get("sql_only", False):
|
|
700
|
+
return sql, vals
|
|
701
|
+
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
702
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
703
|
+
|
|
704
|
+
insnx = insert_if_not_exists
|
|
466
705
|
|
|
467
706
|
upsert = merge
|
|
468
707
|
indate = merge
|
|
@@ -662,7 +901,7 @@ class Table:
|
|
|
662
901
|
if kwds.get("sql_only", False):
|
|
663
902
|
return sql, vals
|
|
664
903
|
result = self.tx.execute(sql, vals)
|
|
665
|
-
return result.cursor.rowcount
|
|
904
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
666
905
|
|
|
667
906
|
def truncate(self, **kwds):
|
|
668
907
|
"""
|
|
@@ -886,8 +1125,8 @@ class Table:
|
|
|
886
1125
|
# Return a descriptive string of differences.
|
|
887
1126
|
if differences:
|
|
888
1127
|
differences.insert(0, f"Comparing {self.name}: {pk1} vs {pk2}")
|
|
889
|
-
differences.insert(0,
|
|
890
|
-
differences.append(
|
|
1128
|
+
differences.insert(0, "--------------------------------------")
|
|
1129
|
+
differences.append("--------------------------------------")
|
|
891
1130
|
return "\n".join(differences)
|
|
892
1131
|
else:
|
|
893
1132
|
return f"{self.name} rows {pk1} and {pk2} are identical."
|
velocity/db/core/transaction.py
CHANGED
|
@@ -164,17 +164,25 @@ class Transaction:
|
|
|
164
164
|
"""
|
|
165
165
|
return Row(self.table(tablename), pk, lock=lock)
|
|
166
166
|
|
|
167
|
-
def get(self, tablename, where, lock=None):
|
|
168
|
-
"""
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
167
|
+
def get(self, tablename, where, lock=None, use_where=False):
|
|
168
|
+
"""Shortcut to table.get() with optional ``use_where`` passthrough."""
|
|
169
|
+
return self.table(tablename).get(where, lock=lock, use_where=use_where)
|
|
170
|
+
|
|
171
|
+
def find(
|
|
172
|
+
self,
|
|
173
|
+
tablename,
|
|
174
|
+
where,
|
|
175
|
+
lock=None,
|
|
176
|
+
use_where=False,
|
|
177
|
+
raise_if_missing=False,
|
|
178
|
+
):
|
|
179
|
+
"""Shortcut to table.find() with ``use_where``/``raise_if_missing`` passthrough."""
|
|
180
|
+
return self.table(tablename).find(
|
|
181
|
+
where,
|
|
182
|
+
lock=lock,
|
|
183
|
+
use_where=use_where,
|
|
184
|
+
raise_if_missing=raise_if_missing,
|
|
185
|
+
)
|
|
178
186
|
|
|
179
187
|
def column(self, tablename, colname):
|
|
180
188
|
"""
|