velocity-python 0.0.138__py3-none-any.whl → 0.0.152__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of velocity-python might be problematic. Click here for more details.
- velocity/__init__.py +1 -1
- velocity/app/orders.py +1 -3
- velocity/aws/handlers/mixins/activity_tracker.py +53 -14
- velocity/aws/handlers/mixins/aws_session_mixin.py +192 -0
- velocity/db/core/row.py +12 -2
- velocity/db/core/table.py +178 -0
- velocity/db/core/transaction.py +6 -10
- velocity/db/servers/mysql/sql.py +132 -5
- velocity/db/servers/postgres/sql.py +254 -79
- velocity/db/servers/sqlite/sql.py +127 -4
- velocity/db/servers/sqlserver/sql.py +165 -4
- velocity/db/tests/test_postgres.py +189 -0
- {velocity_python-0.0.138.dist-info → velocity_python-0.0.152.dist-info}/METADATA +2 -2
- {velocity_python-0.0.138.dist-info → velocity_python-0.0.152.dist-info}/RECORD +17 -16
- {velocity_python-0.0.138.dist-info → velocity_python-0.0.152.dist-info}/WHEEL +0 -0
- {velocity_python-0.0.138.dist-info → velocity_python-0.0.152.dist-info}/licenses/LICENSE +0 -0
- {velocity_python-0.0.138.dist-info → velocity_python-0.0.152.dist-info}/top_level.txt +0 -0
velocity/__init__.py
CHANGED
velocity/app/orders.py
CHANGED
|
@@ -115,9 +115,7 @@ class Order:
|
|
|
115
115
|
for key, default in defaults.items():
|
|
116
116
|
if key not in target:
|
|
117
117
|
target[key] = default() if callable(default) else default
|
|
118
|
-
|
|
119
|
-
# Always update updated_at if present
|
|
120
|
-
target[key] = default() if callable(default) else default
|
|
118
|
+
|
|
121
119
|
|
|
122
120
|
def _validate(self):
|
|
123
121
|
self._apply_defaults()
|
|
@@ -9,33 +9,34 @@ import copy
|
|
|
9
9
|
import json
|
|
10
10
|
import os
|
|
11
11
|
import time
|
|
12
|
-
from abc import ABC
|
|
13
|
-
from
|
|
12
|
+
from abc import ABC
|
|
13
|
+
from datetime import date, datetime
|
|
14
|
+
from typing import Dict, Any
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
class ActivityTracker(ABC):
|
|
17
18
|
"""
|
|
18
19
|
Mixin class providing standardized activity tracking for Lambda handlers.
|
|
19
|
-
|
|
20
|
+
|
|
20
21
|
Tracks API calls to the aws_api_activity table with consistent data structure
|
|
21
22
|
and automatic duration calculation.
|
|
22
23
|
"""
|
|
23
|
-
|
|
24
|
+
|
|
24
25
|
def __init__(self, *args, **kwargs):
|
|
25
26
|
super().__init__(*args, **kwargs)
|
|
26
27
|
self.activity_log_key = None
|
|
27
28
|
self.start_time = None
|
|
28
29
|
self.end_time = None
|
|
29
30
|
self.activity_data = {}
|
|
30
|
-
|
|
31
|
+
|
|
31
32
|
def track_activity_start(self, tx, context):
|
|
32
33
|
"""Start tracking activity for the current request"""
|
|
33
34
|
self.start_time = time.time()
|
|
34
|
-
|
|
35
|
+
|
|
35
36
|
# Gather common activity data
|
|
36
37
|
postdata = context.postdata()
|
|
37
38
|
payload = context.payload()
|
|
38
|
-
|
|
39
|
+
|
|
39
40
|
self.activity_data = {
|
|
40
41
|
"action": context.action(),
|
|
41
42
|
"args": json.dumps(context.args()),
|
|
@@ -45,21 +46,34 @@ class ActivityTracker(ABC):
|
|
|
45
46
|
"user_branch": os.environ.get("USER_BRANCH", "Unknown"),
|
|
46
47
|
"start_timestamp": self.start_time,
|
|
47
48
|
}
|
|
48
|
-
|
|
49
|
+
|
|
49
50
|
# Add user information if available
|
|
50
51
|
user_info = self._extract_user_info(payload)
|
|
51
52
|
if user_info:
|
|
52
53
|
self.activity_data.update(user_info)
|
|
53
|
-
|
|
54
|
+
|
|
54
55
|
# Add session information
|
|
55
56
|
session_data = context.session()
|
|
56
57
|
if session_data:
|
|
57
|
-
self.activity_data.update(
|
|
58
|
-
|
|
59
|
-
|
|
58
|
+
self.activity_data.update(self._sanitize_session_data(session_data))
|
|
59
|
+
|
|
60
|
+
# Ensure all values are serializable before persisting
|
|
61
|
+
self.activity_data = {
|
|
62
|
+
key: self._normalize_activity_value(value)
|
|
63
|
+
for key, value in self.activity_data.items()
|
|
64
|
+
if value is not None
|
|
65
|
+
}
|
|
66
|
+
|
|
60
67
|
# Create the activity record
|
|
61
|
-
|
|
62
|
-
|
|
68
|
+
try:
|
|
69
|
+
self.activity_log_key = tx.table("aws_api_activity").new(self.activity_data).pk
|
|
70
|
+
except Exception as exc:
|
|
71
|
+
context.log(
|
|
72
|
+
f"ActivityTracker.track_activity_start failed: {exc}; keys={list(self.activity_data.keys())}",
|
|
73
|
+
"ActivityTracker.track_activity_start",
|
|
74
|
+
)
|
|
75
|
+
raise
|
|
76
|
+
|
|
63
77
|
return self.activity_log_key
|
|
64
78
|
|
|
65
79
|
def track_activity_success(self, tx, context):
|
|
@@ -140,3 +154,28 @@ class ActivityTracker(ABC):
|
|
|
140
154
|
pass
|
|
141
155
|
|
|
142
156
|
return user_info
|
|
157
|
+
|
|
158
|
+
def _sanitize_session_data(self, session: Dict[str, Any]) -> Dict[str, Any]:
|
|
159
|
+
"""Remove sensitive session keys and normalize value types"""
|
|
160
|
+
sanitized = {}
|
|
161
|
+
|
|
162
|
+
for key, value in session.items():
|
|
163
|
+
if key == "cognito_user":
|
|
164
|
+
continue
|
|
165
|
+
|
|
166
|
+
sanitized[key] = self._normalize_activity_value(value)
|
|
167
|
+
|
|
168
|
+
return sanitized
|
|
169
|
+
|
|
170
|
+
def _normalize_activity_value(self, value: Any) -> Any:
|
|
171
|
+
"""Convert activity data values into types acceptable by psycopg2"""
|
|
172
|
+
if isinstance(value, (dict, list, tuple, set)):
|
|
173
|
+
try:
|
|
174
|
+
return json.dumps(value)
|
|
175
|
+
except (TypeError, ValueError):
|
|
176
|
+
return str(value)
|
|
177
|
+
if isinstance(value, (datetime, date)):
|
|
178
|
+
return value.isoformat()
|
|
179
|
+
if isinstance(value, (bytes, bytearray)):
|
|
180
|
+
return value.decode("utf-8", errors="ignore")
|
|
181
|
+
return value
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Error Handler Mixin for Lambda Handlers.
|
|
3
|
+
|
|
4
|
+
Provides standardized error handling, logging, and notification functionality
|
|
5
|
+
for Lambda handlers.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import copy
|
|
9
|
+
import os
|
|
10
|
+
import pprint
|
|
11
|
+
import time
|
|
12
|
+
from abc import ABC, abstractmethod
|
|
13
|
+
from typing import Dict, Any, Optional
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class AwsSessionMixin(ABC):
|
|
17
|
+
"""
|
|
18
|
+
Mixin class providing standardized error handling for Lambda handlers.
|
|
19
|
+
|
|
20
|
+
Handles error logging to sys_log table, email notifications to administrators,
|
|
21
|
+
and error metrics collection.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def handle_standard_error(self, tx, context, exception: Exception, tb_string: str):
|
|
25
|
+
"""Handle errors with consistent logging and notification patterns"""
|
|
26
|
+
|
|
27
|
+
# Log to sys_log for centralized logging
|
|
28
|
+
self.log_error_to_system(tx, context, exception, tb_string)
|
|
29
|
+
|
|
30
|
+
# Determine if this error requires notification
|
|
31
|
+
if self._should_notify_error(exception):
|
|
32
|
+
self.send_error_notification(tx, context, exception, tb_string)
|
|
33
|
+
|
|
34
|
+
# Log error metrics for monitoring
|
|
35
|
+
self.log_error_metrics(tx, context, exception)
|
|
36
|
+
|
|
37
|
+
def log_error_to_system(self, tx, context, exception: Exception, tb_string: str):
|
|
38
|
+
"""Log error to sys_log table"""
|
|
39
|
+
error_data = {
|
|
40
|
+
"level": "ERROR",
|
|
41
|
+
"message": str(exception),
|
|
42
|
+
"function": f"{self.__class__.__name__}.{context.action()}",
|
|
43
|
+
"traceback": tb_string,
|
|
44
|
+
"exception_type": exception.__class__.__name__,
|
|
45
|
+
"handler_name": self.__class__.__name__,
|
|
46
|
+
"action": context.action(),
|
|
47
|
+
"user_branch": os.environ.get("USER_BRANCH", "Unknown"),
|
|
48
|
+
"function_name": os.environ.get("AWS_LAMBDA_FUNCTION_NAME", "Unknown"),
|
|
49
|
+
"app_name": os.environ.get("ProjectName", "Unknown"),
|
|
50
|
+
"user_agent": "AWS Lambda",
|
|
51
|
+
"device_type": "Lambda",
|
|
52
|
+
"sys_modified_by": "Lambda",
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
# Add user context if available
|
|
56
|
+
try:
|
|
57
|
+
if hasattr(self, 'current_user') and self.current_user:
|
|
58
|
+
error_data["user_email"] = self.current_user.get("email_address")
|
|
59
|
+
except:
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
tx.table("sys_log").insert(error_data)
|
|
63
|
+
|
|
64
|
+
def send_error_notification(self, tx, context, exception: Exception, tb_string: str):
|
|
65
|
+
"""Send error notification email to administrators"""
|
|
66
|
+
try:
|
|
67
|
+
# Import here to avoid circular dependency
|
|
68
|
+
from support.app import helpers
|
|
69
|
+
|
|
70
|
+
environment = os.environ.get('USER_BRANCH', 'Unknown').title()
|
|
71
|
+
function_name = os.environ.get('AWS_LAMBDA_FUNCTION_NAME', 'Unknown')
|
|
72
|
+
|
|
73
|
+
subject = f"{environment} Lambda Error - {function_name}"
|
|
74
|
+
|
|
75
|
+
body = f"""
|
|
76
|
+
Error Details:
|
|
77
|
+
- Handler: {self.__class__.__name__}
|
|
78
|
+
- Action: {context.action()}
|
|
79
|
+
- Exception: {exception.__class__.__name__}
|
|
80
|
+
- Message: {str(exception)}
|
|
81
|
+
- Environment: {environment}
|
|
82
|
+
- Function: {function_name}
|
|
83
|
+
|
|
84
|
+
Full Traceback:
|
|
85
|
+
{tb_string}
|
|
86
|
+
|
|
87
|
+
Request Details:
|
|
88
|
+
{self._get_error_context(context)}
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
sender = self._get_error_notification_sender()
|
|
92
|
+
recipients = self._get_error_notification_recipients()
|
|
93
|
+
|
|
94
|
+
helpers.sendmail(
|
|
95
|
+
tx,
|
|
96
|
+
subject=subject,
|
|
97
|
+
body=body,
|
|
98
|
+
html=None,
|
|
99
|
+
sender=sender,
|
|
100
|
+
recipient=recipients[0],
|
|
101
|
+
cc=recipients[1:] if len(recipients) > 1 else None,
|
|
102
|
+
bcc=None,
|
|
103
|
+
email_settings_id=1001,
|
|
104
|
+
)
|
|
105
|
+
except Exception as email_error:
|
|
106
|
+
print(f"Failed to send error notification email: {email_error}")
|
|
107
|
+
|
|
108
|
+
def _should_notify_error(self, exception: Exception) -> bool:
|
|
109
|
+
"""Determine if an error should trigger email notifications"""
|
|
110
|
+
# Don't notify for user authentication errors or validation errors
|
|
111
|
+
non_notification_types = [
|
|
112
|
+
"AuthenticationError",
|
|
113
|
+
"ValidationError",
|
|
114
|
+
"ValueError",
|
|
115
|
+
"AlertError"
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
exception_name = exception.__class__.__name__
|
|
119
|
+
|
|
120
|
+
# Check for authentication-related exceptions
|
|
121
|
+
if "Authentication" in exception_name or "Auth" in exception_name:
|
|
122
|
+
return False
|
|
123
|
+
|
|
124
|
+
return exception_name not in non_notification_types
|
|
125
|
+
|
|
126
|
+
@abstractmethod
|
|
127
|
+
def _get_error_notification_recipients(self) -> list:
|
|
128
|
+
"""
|
|
129
|
+
Get list of email recipients for error notifications.
|
|
130
|
+
|
|
131
|
+
Must be implemented by the handler class.
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
List of email addresses to notify when errors occur
|
|
135
|
+
|
|
136
|
+
Example:
|
|
137
|
+
return ["admin@company.com", "devops@company.com"]
|
|
138
|
+
"""
|
|
139
|
+
pass
|
|
140
|
+
|
|
141
|
+
@abstractmethod
|
|
142
|
+
def _get_error_notification_sender(self) -> str:
|
|
143
|
+
"""
|
|
144
|
+
Get email sender for error notifications.
|
|
145
|
+
|
|
146
|
+
Must be implemented by the handler class.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
Email address to use as sender for error notifications
|
|
150
|
+
|
|
151
|
+
Example:
|
|
152
|
+
return "no-reply@company.com"
|
|
153
|
+
"""
|
|
154
|
+
pass
|
|
155
|
+
|
|
156
|
+
def _get_error_context(self, context) -> str:
|
|
157
|
+
"""Get sanitized request context for error reporting"""
|
|
158
|
+
try:
|
|
159
|
+
postdata = context.postdata()
|
|
160
|
+
sanitized = copy.deepcopy(postdata)
|
|
161
|
+
|
|
162
|
+
# Remove sensitive data
|
|
163
|
+
if "payload" in sanitized and isinstance(sanitized["payload"], dict):
|
|
164
|
+
sanitized["payload"].pop("cognito_user", None)
|
|
165
|
+
|
|
166
|
+
return pprint.pformat(sanitized)
|
|
167
|
+
except:
|
|
168
|
+
return "Unable to retrieve request context"
|
|
169
|
+
|
|
170
|
+
def log_error_metrics(self, tx, context, exception: Exception):
|
|
171
|
+
"""Log error metrics for monitoring and alerting"""
|
|
172
|
+
try:
|
|
173
|
+
metrics_data = {
|
|
174
|
+
"metric_type": "error_count",
|
|
175
|
+
"handler_name": self.__class__.__name__,
|
|
176
|
+
"action": context.action(),
|
|
177
|
+
"exception_type": exception.__class__.__name__,
|
|
178
|
+
"environment": os.environ.get("USER_BRANCH", "Unknown"),
|
|
179
|
+
"function_name": os.environ.get("AWS_LAMBDA_FUNCTION_NAME", "Unknown"),
|
|
180
|
+
"timestamp": time.time(),
|
|
181
|
+
"sys_modified_by": "Lambda"
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
# Try to insert into metrics table if it exists
|
|
185
|
+
try:
|
|
186
|
+
tx.table("lambda_metrics").insert(metrics_data)
|
|
187
|
+
except:
|
|
188
|
+
# Metrics table might not exist yet, don't fail error handler
|
|
189
|
+
pass
|
|
190
|
+
except:
|
|
191
|
+
# Don't fail the error handler if metrics logging fails
|
|
192
|
+
pass
|
velocity/db/core/row.py
CHANGED
|
@@ -44,7 +44,12 @@ class Row:
|
|
|
44
44
|
def __setitem__(self, key, val):
|
|
45
45
|
if key in self.pk:
|
|
46
46
|
raise Exception("Cannot update a primary key.")
|
|
47
|
-
self.table
|
|
47
|
+
if hasattr(self.table, "updins"):
|
|
48
|
+
self.table.updins({key: val}, pk=self.pk)
|
|
49
|
+
elif hasattr(self.table, "upsert"):
|
|
50
|
+
self.table.upsert({key: val}, pk=self.pk)
|
|
51
|
+
else:
|
|
52
|
+
self.table.update({key: val}, pk=self.pk)
|
|
48
53
|
|
|
49
54
|
def __delitem__(self, key):
|
|
50
55
|
if key in self.pk:
|
|
@@ -121,7 +126,12 @@ class Row:
|
|
|
121
126
|
if kwds:
|
|
122
127
|
data.update(kwds)
|
|
123
128
|
if data:
|
|
124
|
-
self.table
|
|
129
|
+
if hasattr(self.table, "updins"):
|
|
130
|
+
self.table.updins(data, pk=self.pk)
|
|
131
|
+
elif hasattr(self.table, "upsert"):
|
|
132
|
+
self.table.upsert(data, pk=self.pk)
|
|
133
|
+
else:
|
|
134
|
+
self.table.update(data, pk=self.pk)
|
|
125
135
|
return self
|
|
126
136
|
|
|
127
137
|
def __cmp__(self, other):
|
velocity/db/core/table.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import sqlparse
|
|
2
|
+
from collections.abc import Iterable, Mapping
|
|
2
3
|
from velocity.db import exceptions
|
|
3
4
|
from velocity.db.core.row import Row
|
|
4
5
|
from velocity.db.core.result import Result
|
|
@@ -119,6 +120,59 @@ class Table:
|
|
|
119
120
|
return sql, vals
|
|
120
121
|
self.tx.execute(sql, vals, cursor=self.cursor())
|
|
121
122
|
|
|
123
|
+
def create_indexes(self, indexes, **kwds):
|
|
124
|
+
"""
|
|
125
|
+
Convenience wrapper to create multiple indexes in order.
|
|
126
|
+
|
|
127
|
+
Accepts an iterable of definitions. Each definition may be either:
|
|
128
|
+
- Mapping with a required "columns" entry plus optional "unique",
|
|
129
|
+
"direction", "where", and "lower" keys.
|
|
130
|
+
- A simple sequence/string of columns, in which case defaults apply.
|
|
131
|
+
|
|
132
|
+
When sql_only=True, a list of (sql, params) tuples is returned.
|
|
133
|
+
"""
|
|
134
|
+
|
|
135
|
+
if indexes is None:
|
|
136
|
+
return [] if kwds.get("sql_only", False) else None
|
|
137
|
+
|
|
138
|
+
if not isinstance(indexes, Iterable) or isinstance(indexes, (str, bytes)):
|
|
139
|
+
raise TypeError("indexes must be an iterable of index definitions")
|
|
140
|
+
|
|
141
|
+
sql_only = kwds.get("sql_only", False)
|
|
142
|
+
statements = []
|
|
143
|
+
|
|
144
|
+
for definition in indexes:
|
|
145
|
+
if isinstance(definition, Mapping):
|
|
146
|
+
columns = definition.get("columns")
|
|
147
|
+
if not columns:
|
|
148
|
+
raise ValueError("Index definition requires a non-empty 'columns' entry")
|
|
149
|
+
params = {
|
|
150
|
+
"unique": definition.get("unique", False),
|
|
151
|
+
"direction": definition.get("direction"),
|
|
152
|
+
"where": definition.get("where"),
|
|
153
|
+
"lower": definition.get("lower"),
|
|
154
|
+
}
|
|
155
|
+
else:
|
|
156
|
+
columns = definition
|
|
157
|
+
params = {
|
|
158
|
+
"unique": False,
|
|
159
|
+
"direction": None,
|
|
160
|
+
"where": None,
|
|
161
|
+
"lower": None,
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
if isinstance(columns, str):
|
|
165
|
+
columns = columns.split(",")
|
|
166
|
+
|
|
167
|
+
if not columns:
|
|
168
|
+
raise ValueError("Index columns cannot be empty")
|
|
169
|
+
|
|
170
|
+
result = self.create_index(columns, **params, **kwds)
|
|
171
|
+
if sql_only:
|
|
172
|
+
statements.append(result)
|
|
173
|
+
|
|
174
|
+
return statements if sql_only else None
|
|
175
|
+
|
|
122
176
|
@return_default(None)
|
|
123
177
|
def drop_index(self, columns, **kwds):
|
|
124
178
|
"""
|
|
@@ -163,6 +217,32 @@ class Table:
|
|
|
163
217
|
return self.name in [f"{x[0]}.{x[1]}" for x in result.as_tuple()]
|
|
164
218
|
return self.name in [x[1] for x in result.as_tuple()]
|
|
165
219
|
|
|
220
|
+
def ensure_sys_modified_count(self, **kwds):
|
|
221
|
+
"""
|
|
222
|
+
Ensure the sys_modified_count column and trigger exist for this table.
|
|
223
|
+
|
|
224
|
+
Returns early when the column is already present unless `force=True` is provided.
|
|
225
|
+
"""
|
|
226
|
+
force = kwds.get("force", False)
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
columns = [col.lower() for col in self.sys_columns()]
|
|
230
|
+
except Exception:
|
|
231
|
+
columns = []
|
|
232
|
+
|
|
233
|
+
has_column = "sys_modified_count" in columns
|
|
234
|
+
has_row_column = "sys_modified_row" in columns
|
|
235
|
+
|
|
236
|
+
if has_column and has_row_column and not force:
|
|
237
|
+
return
|
|
238
|
+
|
|
239
|
+
sql, vals = self.sql.ensure_sys_modified_count(
|
|
240
|
+
self.name, has_column=has_column, has_row_column=has_row_column
|
|
241
|
+
)
|
|
242
|
+
if kwds.get("sql_only", False):
|
|
243
|
+
return sql, vals
|
|
244
|
+
self.tx.execute(sql, vals, cursor=self.cursor())
|
|
245
|
+
|
|
166
246
|
def column(self, name):
|
|
167
247
|
"""
|
|
168
248
|
Returns a Column object for the given column name.
|
|
@@ -465,6 +545,104 @@ class Table:
|
|
|
465
545
|
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
466
546
|
return result.cursor.rowcount if result.cursor else 0
|
|
467
547
|
|
|
548
|
+
@create_missing
|
|
549
|
+
def update_or_insert(self, update_data, insert_data=None, where=None, pk=None, **kwds):
|
|
550
|
+
"""
|
|
551
|
+
Attempts an UPDATE first; if no rows change, performs an INSERT guarded by NOT EXISTS.
|
|
552
|
+
|
|
553
|
+
:param update_data: Mapping of columns to update.
|
|
554
|
+
:param insert_data: Optional mapping used for the INSERT. When omitted, values are
|
|
555
|
+
derived from update_data combined with simple equality predicates
|
|
556
|
+
from ``where`` and primary key values.
|
|
557
|
+
:param where: Criteria for the UPDATE and existence check.
|
|
558
|
+
:param pk: Optional primary key mapping for UPDATE (merged into WHERE) and INSERT.
|
|
559
|
+
:param sql_only: When True, return the SQL/parameter tuples for both phases instead of executing.
|
|
560
|
+
:return: Number of rows affected, or a dict with ``update``/``insert`` entries when sql_only=True.
|
|
561
|
+
"""
|
|
562
|
+
sql_only = kwds.get("sql_only", False)
|
|
563
|
+
if not isinstance(update_data, Mapping) or not update_data:
|
|
564
|
+
raise ValueError("update_data must be a non-empty mapping of column-value pairs.")
|
|
565
|
+
if where is None and pk is None:
|
|
566
|
+
raise ValueError("Either where or pk must be provided for update_or_insert.")
|
|
567
|
+
|
|
568
|
+
update_stmt = None
|
|
569
|
+
if sql_only:
|
|
570
|
+
update_stmt = self.update(update_data, where=where, pk=pk, sql_only=True)
|
|
571
|
+
else:
|
|
572
|
+
updated = self.update(update_data, where=where, pk=pk)
|
|
573
|
+
if updated:
|
|
574
|
+
return updated
|
|
575
|
+
|
|
576
|
+
if insert_data is not None:
|
|
577
|
+
if not isinstance(insert_data, Mapping):
|
|
578
|
+
raise ValueError("insert_data must be a mapping when provided.")
|
|
579
|
+
insert_payload = dict(insert_data)
|
|
580
|
+
else:
|
|
581
|
+
insert_payload = dict(update_data)
|
|
582
|
+
if isinstance(where, Mapping):
|
|
583
|
+
for key, val in where.items():
|
|
584
|
+
if not isinstance(key, str):
|
|
585
|
+
continue
|
|
586
|
+
if set("<>!=%").intersection(key):
|
|
587
|
+
continue
|
|
588
|
+
insert_payload.setdefault(key, val)
|
|
589
|
+
if isinstance(pk, Mapping):
|
|
590
|
+
for key, val in pk.items():
|
|
591
|
+
insert_payload.setdefault(key, val)
|
|
592
|
+
|
|
593
|
+
if not insert_payload:
|
|
594
|
+
raise ValueError("Unable to derive insert payload for update_or_insert.")
|
|
595
|
+
|
|
596
|
+
exists_where = None
|
|
597
|
+
if where is not None and pk is not None:
|
|
598
|
+
if isinstance(where, Mapping) and isinstance(pk, Mapping):
|
|
599
|
+
combined = dict(where)
|
|
600
|
+
combined.update(pk)
|
|
601
|
+
exists_where = combined
|
|
602
|
+
else:
|
|
603
|
+
exists_where = where
|
|
604
|
+
elif where is not None:
|
|
605
|
+
exists_where = where
|
|
606
|
+
else:
|
|
607
|
+
exists_where = pk
|
|
608
|
+
|
|
609
|
+
ins_builder = getattr(self.sql, "insnx", None) or getattr(
|
|
610
|
+
self.sql, "insert_if_not_exists", None
|
|
611
|
+
)
|
|
612
|
+
if ins_builder is None:
|
|
613
|
+
raise NotImplementedError(
|
|
614
|
+
"Current SQL dialect does not support insert-if-not-exists operations."
|
|
615
|
+
)
|
|
616
|
+
|
|
617
|
+
sql, vals = ins_builder(self.tx, self.name, insert_payload, exists_where)
|
|
618
|
+
if sql_only:
|
|
619
|
+
return {"update": update_stmt, "insert": (sql, vals)}
|
|
620
|
+
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
621
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
622
|
+
|
|
623
|
+
updins = update_or_insert
|
|
624
|
+
|
|
625
|
+
@create_missing
|
|
626
|
+
def insert_if_not_exists(self, data, where=None, **kwds):
|
|
627
|
+
"""
|
|
628
|
+
Inserts `data` into the table only if the existence check (`where`) does not match any rows.
|
|
629
|
+
|
|
630
|
+
Usage:
|
|
631
|
+
table.insert_if_not_exists({'key_col': 'k', 'value': 'v'}, where={'key_col': 'k'})
|
|
632
|
+
|
|
633
|
+
:param data: dict of column -> value for insert
|
|
634
|
+
:param where: mapping/list/str used for the EXISTS check; if None primary keys are used and
|
|
635
|
+
must be present in `data`.
|
|
636
|
+
:return: rowcount (0 or 1) or (sql, params) when sql_only=True
|
|
637
|
+
"""
|
|
638
|
+
sql, vals = self.sql.insert_if_not_exists(self.tx, self.name, data, where)
|
|
639
|
+
if kwds.get("sql_only", False):
|
|
640
|
+
return sql, vals
|
|
641
|
+
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
642
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
643
|
+
|
|
644
|
+
insnx = insert_if_not_exists
|
|
645
|
+
|
|
468
646
|
upsert = merge
|
|
469
647
|
indate = merge
|
|
470
648
|
|
velocity/db/core/transaction.py
CHANGED
|
@@ -164,17 +164,13 @@ class Transaction:
|
|
|
164
164
|
"""
|
|
165
165
|
return Row(self.table(tablename), pk, lock=lock)
|
|
166
166
|
|
|
167
|
-
def get(self, tablename, where, lock=None):
|
|
168
|
-
"""
|
|
169
|
-
|
|
170
|
-
"""
|
|
171
|
-
return self.table(tablename).get(where, lock=lock)
|
|
167
|
+
def get(self, tablename, where, lock=None, use_where=False):
|
|
168
|
+
"""Shortcut to table.get() with optional ``use_where`` passthrough."""
|
|
169
|
+
return self.table(tablename).get(where, lock=lock, use_where=use_where)
|
|
172
170
|
|
|
173
|
-
def find(self, tablename, where, lock=None):
|
|
174
|
-
"""
|
|
175
|
-
|
|
176
|
-
"""
|
|
177
|
-
return self.table(tablename).find(where, lock=lock)
|
|
171
|
+
def find(self, tablename, where, lock=None, use_where=False):
|
|
172
|
+
"""Shortcut to table.find() with optional ``use_where`` passthrough."""
|
|
173
|
+
return self.table(tablename).find(where, lock=lock, use_where=use_where)
|
|
178
174
|
|
|
179
175
|
def column(self, tablename, colname):
|
|
180
176
|
"""
|