velocity-python 0.0.145__tar.gz → 0.0.146__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of velocity-python might be problematic. Click here for more details.
- {velocity_python-0.0.145 → velocity_python-0.0.146}/PKG-INFO +1 -1
- {velocity_python-0.0.145 → velocity_python-0.0.146}/pyproject.toml +1 -1
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/__init__.py +1 -1
- velocity_python-0.0.146/src/velocity/aws/handlers/mixins/aws_session_mixin.py +192 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/table.py +105 -2
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/mysql/sql.py +1 -1
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/postgres/sql.py +162 -37
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlite/sql.py +1 -1
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlserver/sql.py +3 -1
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_postgres.py +189 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity_python.egg-info/PKG-INFO +1 -1
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity_python.egg-info/SOURCES.txt +1 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/LICENSE +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/README.md +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/setup.cfg +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/invoices.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/orders.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/payments.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/purchase_orders.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/tests/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/tests/test_email_processing.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/tests/test_payment_profile_sorting.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/app/tests/test_spreadsheet_functions.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/amplify.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/base_handler.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/context.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/exceptions.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/lambda_handler.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/mixins/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/mixins/activity_tracker.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/mixins/error_handler.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/mixins/legacy_mixin.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/mixins/standard_mixin.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/response.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/handlers/sqs_handler.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/tests/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/tests/test_lambda_handler_json_serialization.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/aws/tests/test_response.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/column.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/database.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/decorators.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/engine.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/result.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/row.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/sequence.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/core/transaction.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/exceptions.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/base/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/base/initializer.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/base/operators.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/base/sql.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/base/types.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/mysql/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/mysql/operators.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/mysql/reserved.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/mysql/types.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/postgres/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/postgres/operators.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/postgres/reserved.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/postgres/types.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlite/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlite/operators.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlite/reserved.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlite/types.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlserver/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlserver/operators.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlserver/reserved.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlserver/types.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/tablehelper.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/common_db_test.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/common.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_column.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_connections.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_database.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_engine.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_general_usage.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_imports.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_result.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_row.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_row_comprehensive.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_schema_locking.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_schema_locking_unit.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_sequence.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_sql_comprehensive.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_table.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_table_comprehensive.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/postgres/test_transaction.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/sql/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/sql/common.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/sql/test_postgres_select_advanced.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/sql/test_postgres_select_variances.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_cursor_rowcount_fix.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_db_utils.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_postgres_unchanged.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_process_error_robustness.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_result_caching.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_result_sql_aware.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_row_get_missing_column.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_schema_locking_initializers.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_schema_locking_simple.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_sql_builder.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/tests/test_tablehelper.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/utils.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/conv/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/conv/iconv.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/conv/oconv.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/db.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/export.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/format.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/mail.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/merge.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/__init__.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/test_db.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/test_fix.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/test_format.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/test_iconv.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/test_merge.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/test_oconv.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/test_original_error.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tests/test_timer.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/timer.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/misc/tools.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity_python.egg-info/dependency_links.txt +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity_python.egg-info/requires.txt +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity_python.egg-info/top_level.txt +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/tests/test_sys_modified_count_postgres_demo.py +0 -0
- {velocity_python-0.0.145 → velocity_python-0.0.146}/tests/test_where_clause_validation.py +0 -0
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Error Handler Mixin for Lambda Handlers.
|
|
3
|
+
|
|
4
|
+
Provides standardized error handling, logging, and notification functionality
|
|
5
|
+
for Lambda handlers.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import copy
|
|
9
|
+
import os
|
|
10
|
+
import pprint
|
|
11
|
+
import time
|
|
12
|
+
from abc import ABC, abstractmethod
|
|
13
|
+
from typing import Dict, Any, Optional
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class AwsSessionMixin(ABC):
|
|
17
|
+
"""
|
|
18
|
+
Mixin class providing standardized error handling for Lambda handlers.
|
|
19
|
+
|
|
20
|
+
Handles error logging to sys_log table, email notifications to administrators,
|
|
21
|
+
and error metrics collection.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def handle_standard_error(self, tx, context, exception: Exception, tb_string: str):
|
|
25
|
+
"""Handle errors with consistent logging and notification patterns"""
|
|
26
|
+
|
|
27
|
+
# Log to sys_log for centralized logging
|
|
28
|
+
self.log_error_to_system(tx, context, exception, tb_string)
|
|
29
|
+
|
|
30
|
+
# Determine if this error requires notification
|
|
31
|
+
if self._should_notify_error(exception):
|
|
32
|
+
self.send_error_notification(tx, context, exception, tb_string)
|
|
33
|
+
|
|
34
|
+
# Log error metrics for monitoring
|
|
35
|
+
self.log_error_metrics(tx, context, exception)
|
|
36
|
+
|
|
37
|
+
def log_error_to_system(self, tx, context, exception: Exception, tb_string: str):
|
|
38
|
+
"""Log error to sys_log table"""
|
|
39
|
+
error_data = {
|
|
40
|
+
"level": "ERROR",
|
|
41
|
+
"message": str(exception),
|
|
42
|
+
"function": f"{self.__class__.__name__}.{context.action()}",
|
|
43
|
+
"traceback": tb_string,
|
|
44
|
+
"exception_type": exception.__class__.__name__,
|
|
45
|
+
"handler_name": self.__class__.__name__,
|
|
46
|
+
"action": context.action(),
|
|
47
|
+
"user_branch": os.environ.get("USER_BRANCH", "Unknown"),
|
|
48
|
+
"function_name": os.environ.get("AWS_LAMBDA_FUNCTION_NAME", "Unknown"),
|
|
49
|
+
"app_name": os.environ.get("ProjectName", "Unknown"),
|
|
50
|
+
"user_agent": "AWS Lambda",
|
|
51
|
+
"device_type": "Lambda",
|
|
52
|
+
"sys_modified_by": "Lambda",
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
# Add user context if available
|
|
56
|
+
try:
|
|
57
|
+
if hasattr(self, 'current_user') and self.current_user:
|
|
58
|
+
error_data["user_email"] = self.current_user.get("email_address")
|
|
59
|
+
except:
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
tx.table("sys_log").insert(error_data)
|
|
63
|
+
|
|
64
|
+
def send_error_notification(self, tx, context, exception: Exception, tb_string: str):
|
|
65
|
+
"""Send error notification email to administrators"""
|
|
66
|
+
try:
|
|
67
|
+
# Import here to avoid circular dependency
|
|
68
|
+
from support.app import helpers
|
|
69
|
+
|
|
70
|
+
environment = os.environ.get('USER_BRANCH', 'Unknown').title()
|
|
71
|
+
function_name = os.environ.get('AWS_LAMBDA_FUNCTION_NAME', 'Unknown')
|
|
72
|
+
|
|
73
|
+
subject = f"{environment} Lambda Error - {function_name}"
|
|
74
|
+
|
|
75
|
+
body = f"""
|
|
76
|
+
Error Details:
|
|
77
|
+
- Handler: {self.__class__.__name__}
|
|
78
|
+
- Action: {context.action()}
|
|
79
|
+
- Exception: {exception.__class__.__name__}
|
|
80
|
+
- Message: {str(exception)}
|
|
81
|
+
- Environment: {environment}
|
|
82
|
+
- Function: {function_name}
|
|
83
|
+
|
|
84
|
+
Full Traceback:
|
|
85
|
+
{tb_string}
|
|
86
|
+
|
|
87
|
+
Request Details:
|
|
88
|
+
{self._get_error_context(context)}
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
sender = self._get_error_notification_sender()
|
|
92
|
+
recipients = self._get_error_notification_recipients()
|
|
93
|
+
|
|
94
|
+
helpers.sendmail(
|
|
95
|
+
tx,
|
|
96
|
+
subject=subject,
|
|
97
|
+
body=body,
|
|
98
|
+
html=None,
|
|
99
|
+
sender=sender,
|
|
100
|
+
recipient=recipients[0],
|
|
101
|
+
cc=recipients[1:] if len(recipients) > 1 else None,
|
|
102
|
+
bcc=None,
|
|
103
|
+
email_settings_id=1001,
|
|
104
|
+
)
|
|
105
|
+
except Exception as email_error:
|
|
106
|
+
print(f"Failed to send error notification email: {email_error}")
|
|
107
|
+
|
|
108
|
+
def _should_notify_error(self, exception: Exception) -> bool:
|
|
109
|
+
"""Determine if an error should trigger email notifications"""
|
|
110
|
+
# Don't notify for user authentication errors or validation errors
|
|
111
|
+
non_notification_types = [
|
|
112
|
+
"AuthenticationError",
|
|
113
|
+
"ValidationError",
|
|
114
|
+
"ValueError",
|
|
115
|
+
"AlertError"
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
exception_name = exception.__class__.__name__
|
|
119
|
+
|
|
120
|
+
# Check for authentication-related exceptions
|
|
121
|
+
if "Authentication" in exception_name or "Auth" in exception_name:
|
|
122
|
+
return False
|
|
123
|
+
|
|
124
|
+
return exception_name not in non_notification_types
|
|
125
|
+
|
|
126
|
+
@abstractmethod
|
|
127
|
+
def _get_error_notification_recipients(self) -> list:
|
|
128
|
+
"""
|
|
129
|
+
Get list of email recipients for error notifications.
|
|
130
|
+
|
|
131
|
+
Must be implemented by the handler class.
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
List of email addresses to notify when errors occur
|
|
135
|
+
|
|
136
|
+
Example:
|
|
137
|
+
return ["admin@company.com", "devops@company.com"]
|
|
138
|
+
"""
|
|
139
|
+
pass
|
|
140
|
+
|
|
141
|
+
@abstractmethod
|
|
142
|
+
def _get_error_notification_sender(self) -> str:
|
|
143
|
+
"""
|
|
144
|
+
Get email sender for error notifications.
|
|
145
|
+
|
|
146
|
+
Must be implemented by the handler class.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
Email address to use as sender for error notifications
|
|
150
|
+
|
|
151
|
+
Example:
|
|
152
|
+
return "no-reply@company.com"
|
|
153
|
+
"""
|
|
154
|
+
pass
|
|
155
|
+
|
|
156
|
+
def _get_error_context(self, context) -> str:
|
|
157
|
+
"""Get sanitized request context for error reporting"""
|
|
158
|
+
try:
|
|
159
|
+
postdata = context.postdata()
|
|
160
|
+
sanitized = copy.deepcopy(postdata)
|
|
161
|
+
|
|
162
|
+
# Remove sensitive data
|
|
163
|
+
if "payload" in sanitized and isinstance(sanitized["payload"], dict):
|
|
164
|
+
sanitized["payload"].pop("cognito_user", None)
|
|
165
|
+
|
|
166
|
+
return pprint.pformat(sanitized)
|
|
167
|
+
except:
|
|
168
|
+
return "Unable to retrieve request context"
|
|
169
|
+
|
|
170
|
+
def log_error_metrics(self, tx, context, exception: Exception):
|
|
171
|
+
"""Log error metrics for monitoring and alerting"""
|
|
172
|
+
try:
|
|
173
|
+
metrics_data = {
|
|
174
|
+
"metric_type": "error_count",
|
|
175
|
+
"handler_name": self.__class__.__name__,
|
|
176
|
+
"action": context.action(),
|
|
177
|
+
"exception_type": exception.__class__.__name__,
|
|
178
|
+
"environment": os.environ.get("USER_BRANCH", "Unknown"),
|
|
179
|
+
"function_name": os.environ.get("AWS_LAMBDA_FUNCTION_NAME", "Unknown"),
|
|
180
|
+
"timestamp": time.time(),
|
|
181
|
+
"sys_modified_by": "Lambda"
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
# Try to insert into metrics table if it exists
|
|
185
|
+
try:
|
|
186
|
+
tx.table("lambda_metrics").insert(metrics_data)
|
|
187
|
+
except:
|
|
188
|
+
# Metrics table might not exist yet, don't fail error handler
|
|
189
|
+
pass
|
|
190
|
+
except:
|
|
191
|
+
# Don't fail the error handler if metrics logging fails
|
|
192
|
+
pass
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import sqlparse
|
|
2
|
+
from collections.abc import Mapping
|
|
2
3
|
from velocity.db import exceptions
|
|
3
4
|
from velocity.db.core.row import Row
|
|
4
5
|
from velocity.db.core.result import Result
|
|
@@ -177,10 +178,14 @@ class Table:
|
|
|
177
178
|
columns = []
|
|
178
179
|
|
|
179
180
|
has_column = "sys_modified_count" in columns
|
|
180
|
-
|
|
181
|
+
has_row_column = "sys_modified_row" in columns
|
|
182
|
+
|
|
183
|
+
if has_column and has_row_column and not force:
|
|
181
184
|
return
|
|
182
185
|
|
|
183
|
-
sql, vals = self.sql.ensure_sys_modified_count(
|
|
186
|
+
sql, vals = self.sql.ensure_sys_modified_count(
|
|
187
|
+
self.name, has_column=has_column, has_row_column=has_row_column
|
|
188
|
+
)
|
|
184
189
|
if kwds.get("sql_only", False):
|
|
185
190
|
return sql, vals
|
|
186
191
|
self.tx.execute(sql, vals, cursor=self.cursor())
|
|
@@ -487,6 +492,104 @@ class Table:
|
|
|
487
492
|
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
488
493
|
return result.cursor.rowcount if result.cursor else 0
|
|
489
494
|
|
|
495
|
+
@create_missing
|
|
496
|
+
def update_or_insert(self, update_data, insert_data=None, where=None, pk=None, **kwds):
|
|
497
|
+
"""
|
|
498
|
+
Attempts an UPDATE first; if no rows change, performs an INSERT guarded by NOT EXISTS.
|
|
499
|
+
|
|
500
|
+
:param update_data: Mapping of columns to update.
|
|
501
|
+
:param insert_data: Optional mapping used for the INSERT. When omitted, values are
|
|
502
|
+
derived from update_data combined with simple equality predicates
|
|
503
|
+
from ``where`` and primary key values.
|
|
504
|
+
:param where: Criteria for the UPDATE and existence check.
|
|
505
|
+
:param pk: Optional primary key mapping for UPDATE (merged into WHERE) and INSERT.
|
|
506
|
+
:param sql_only: When True, return the SQL/parameter tuples for both phases instead of executing.
|
|
507
|
+
:return: Number of rows affected, or a dict with ``update``/``insert`` entries when sql_only=True.
|
|
508
|
+
"""
|
|
509
|
+
sql_only = kwds.get("sql_only", False)
|
|
510
|
+
if not isinstance(update_data, Mapping) or not update_data:
|
|
511
|
+
raise ValueError("update_data must be a non-empty mapping of column-value pairs.")
|
|
512
|
+
if where is None and pk is None:
|
|
513
|
+
raise ValueError("Either where or pk must be provided for update_or_insert.")
|
|
514
|
+
|
|
515
|
+
update_stmt = None
|
|
516
|
+
if sql_only:
|
|
517
|
+
update_stmt = self.update(update_data, where=where, pk=pk, sql_only=True)
|
|
518
|
+
else:
|
|
519
|
+
updated = self.update(update_data, where=where, pk=pk)
|
|
520
|
+
if updated:
|
|
521
|
+
return updated
|
|
522
|
+
|
|
523
|
+
if insert_data is not None:
|
|
524
|
+
if not isinstance(insert_data, Mapping):
|
|
525
|
+
raise ValueError("insert_data must be a mapping when provided.")
|
|
526
|
+
insert_payload = dict(insert_data)
|
|
527
|
+
else:
|
|
528
|
+
insert_payload = dict(update_data)
|
|
529
|
+
if isinstance(where, Mapping):
|
|
530
|
+
for key, val in where.items():
|
|
531
|
+
if not isinstance(key, str):
|
|
532
|
+
continue
|
|
533
|
+
if set("<>!=%").intersection(key):
|
|
534
|
+
continue
|
|
535
|
+
insert_payload.setdefault(key, val)
|
|
536
|
+
if isinstance(pk, Mapping):
|
|
537
|
+
for key, val in pk.items():
|
|
538
|
+
insert_payload.setdefault(key, val)
|
|
539
|
+
|
|
540
|
+
if not insert_payload:
|
|
541
|
+
raise ValueError("Unable to derive insert payload for update_or_insert.")
|
|
542
|
+
|
|
543
|
+
exists_where = None
|
|
544
|
+
if where is not None and pk is not None:
|
|
545
|
+
if isinstance(where, Mapping) and isinstance(pk, Mapping):
|
|
546
|
+
combined = dict(where)
|
|
547
|
+
combined.update(pk)
|
|
548
|
+
exists_where = combined
|
|
549
|
+
else:
|
|
550
|
+
exists_where = where
|
|
551
|
+
elif where is not None:
|
|
552
|
+
exists_where = where
|
|
553
|
+
else:
|
|
554
|
+
exists_where = pk
|
|
555
|
+
|
|
556
|
+
ins_builder = getattr(self.sql, "insnx", None) or getattr(
|
|
557
|
+
self.sql, "insert_if_not_exists", None
|
|
558
|
+
)
|
|
559
|
+
if ins_builder is None:
|
|
560
|
+
raise NotImplementedError(
|
|
561
|
+
"Current SQL dialect does not support insert-if-not-exists operations."
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
sql, vals = ins_builder(self.tx, self.name, insert_payload, exists_where)
|
|
565
|
+
if sql_only:
|
|
566
|
+
return {"update": update_stmt, "insert": (sql, vals)}
|
|
567
|
+
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
568
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
569
|
+
|
|
570
|
+
updins = update_or_insert
|
|
571
|
+
|
|
572
|
+
@create_missing
|
|
573
|
+
def insert_if_not_exists(self, data, where=None, **kwds):
|
|
574
|
+
"""
|
|
575
|
+
Inserts `data` into the table only if the existence check (`where`) does not match any rows.
|
|
576
|
+
|
|
577
|
+
Usage:
|
|
578
|
+
table.insert_if_not_exists({'key_col': 'k', 'value': 'v'}, where={'key_col': 'k'})
|
|
579
|
+
|
|
580
|
+
:param data: dict of column -> value for insert
|
|
581
|
+
:param where: mapping/list/str used for the EXISTS check; if None primary keys are used and
|
|
582
|
+
must be present in `data`.
|
|
583
|
+
:return: rowcount (0 or 1) or (sql, params) when sql_only=True
|
|
584
|
+
"""
|
|
585
|
+
sql, vals = self.sql.insert_if_not_exists(self.tx, self.name, data, where)
|
|
586
|
+
if kwds.get("sql_only", False):
|
|
587
|
+
return sql, vals
|
|
588
|
+
result = self.tx.execute(sql, vals, cursor=self.cursor())
|
|
589
|
+
return result.cursor.rowcount if result.cursor else 0
|
|
590
|
+
|
|
591
|
+
insnx = insert_if_not_exists
|
|
592
|
+
|
|
490
593
|
upsert = merge
|
|
491
594
|
indate = merge
|
|
492
595
|
|
|
@@ -450,7 +450,7 @@ END;
|
|
|
450
450
|
return "\n".join(statements), tuple()
|
|
451
451
|
|
|
452
452
|
@classmethod
|
|
453
|
-
def ensure_sys_modified_count(cls, name):
|
|
453
|
+
def ensure_sys_modified_count(cls, name, has_column=False, has_row_column=False):
|
|
454
454
|
"""Ensure sys_modified_count column and associated triggers exist for the table."""
|
|
455
455
|
table_identifier = quote(name)
|
|
456
456
|
base_name = name.split(".")[-1].replace("`", "")
|
|
@@ -562,51 +562,164 @@ class SQL(BaseSQLDialect):
|
|
|
562
562
|
|
|
563
563
|
@classmethod
|
|
564
564
|
def merge(cls, tx, table, data, pk, on_conflict_do_nothing, on_conflict_update):
|
|
565
|
+
if not isinstance(data, Mapping) or not data:
|
|
566
|
+
raise ValueError("data must be a non-empty mapping of column-value pairs.")
|
|
567
|
+
|
|
568
|
+
table_helper = TableHelper(tx, table)
|
|
569
|
+
data = dict(data) # work with a copy to avoid mutating the caller's dict
|
|
570
|
+
|
|
565
571
|
if pk is None:
|
|
566
572
|
pkeys = tx.table(table).primary_keys()
|
|
567
573
|
if not pkeys:
|
|
568
574
|
raise ValueError("Primary key required for merge.")
|
|
569
|
-
|
|
570
|
-
if
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
575
|
+
missing = [key for key in pkeys if key not in data]
|
|
576
|
+
if missing:
|
|
577
|
+
missing_cols = ", ".join(missing)
|
|
578
|
+
raise ValueError(
|
|
579
|
+
"Primary key values missing from data for merge: "
|
|
580
|
+
f"{missing_cols}. Provide pk=... or include the key values in data."
|
|
581
|
+
)
|
|
582
|
+
pk = {key: data[key] for key in pkeys}
|
|
583
|
+
else:
|
|
584
|
+
pk = dict(pk)
|
|
585
|
+
for key, value in pk.items():
|
|
586
|
+
if key in data and data[key] != value:
|
|
587
|
+
raise ValueError(
|
|
588
|
+
f"Conflicting values for primary key '{key}' between data and pk arguments."
|
|
589
|
+
)
|
|
576
590
|
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
full_data.update(data)
|
|
580
|
-
full_data.update(pk)
|
|
591
|
+
insert_data = dict(data)
|
|
592
|
+
insert_data.update(pk)
|
|
581
593
|
|
|
582
|
-
|
|
583
|
-
sql = [sql]
|
|
584
|
-
vals = list(vals) # Convert to a mutable list
|
|
594
|
+
update_data = {k: v for k, v in data.items() if k not in pk}
|
|
585
595
|
|
|
586
|
-
if
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
if on_conflict_do_nothing:
|
|
593
|
-
sql.append("NOTHING")
|
|
594
|
-
elif on_conflict_update:
|
|
595
|
-
# Call update() with excluded=True to produce the SET clause for the upsert.
|
|
596
|
-
sql_update, vals_update = cls.update(tx, table, data, pk, excluded=True)
|
|
597
|
-
sql.append(sql_update)
|
|
598
|
-
# Use list.extend to add the update values to vals.
|
|
599
|
-
vals.extend(vals_update)
|
|
600
|
-
else:
|
|
596
|
+
if not update_data and on_conflict_update:
|
|
597
|
+
# Nothing to update, fall back to a no-op on conflict resolution.
|
|
598
|
+
on_conflict_do_nothing = True
|
|
599
|
+
on_conflict_update = False
|
|
600
|
+
|
|
601
|
+
if on_conflict_do_nothing == on_conflict_update:
|
|
601
602
|
raise Exception(
|
|
602
603
|
"Update on conflict must have one and only one option to complete on conflict."
|
|
603
604
|
)
|
|
604
605
|
|
|
606
|
+
sql, vals = cls.insert(table, insert_data)
|
|
607
|
+
sql = [sql]
|
|
608
|
+
vals = list(vals) # Convert to a mutable list
|
|
609
|
+
|
|
610
|
+
sql.append("ON CONFLICT")
|
|
611
|
+
conflict_columns = [TableHelper.quote(column) for column in pk.keys()]
|
|
612
|
+
sql.append("(")
|
|
613
|
+
sql.append(", ".join(conflict_columns))
|
|
614
|
+
sql.append(")")
|
|
615
|
+
sql.append("DO")
|
|
616
|
+
if on_conflict_do_nothing:
|
|
617
|
+
sql.append("NOTHING")
|
|
618
|
+
elif on_conflict_update:
|
|
619
|
+
sql_update, vals_update = cls.update(
|
|
620
|
+
tx, table, update_data, pk, excluded=True
|
|
621
|
+
)
|
|
622
|
+
sql.append(sql_update)
|
|
623
|
+
vals.extend(vals_update)
|
|
624
|
+
|
|
605
625
|
import sqlparse
|
|
606
626
|
|
|
607
627
|
final_sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
608
628
|
return final_sql, tuple(vals)
|
|
609
629
|
|
|
630
|
+
@classmethod
|
|
631
|
+
def insnx(cls, tx, table, data, where=None):
|
|
632
|
+
"""Insert only when the supplied predicate finds no existing row."""
|
|
633
|
+
if not table:
|
|
634
|
+
raise ValueError("Table name is required.")
|
|
635
|
+
if not isinstance(data, Mapping) or not data:
|
|
636
|
+
raise ValueError("data must be a non-empty mapping of column-value pairs.")
|
|
637
|
+
|
|
638
|
+
# Helper used for quoting and foreign key resolution
|
|
639
|
+
th = TableHelper(tx, table)
|
|
640
|
+
quote_helper = TableHelper(None, table)
|
|
641
|
+
|
|
642
|
+
columns_sql = []
|
|
643
|
+
select_parts = []
|
|
644
|
+
vals = []
|
|
645
|
+
|
|
646
|
+
for key, val in data.items():
|
|
647
|
+
columns_sql.append(quote_helper.quote(key.lower()))
|
|
648
|
+
if isinstance(val, str) and len(val) > 2 and val.startswith("@@") and val[2:]:
|
|
649
|
+
select_parts.append(val[2:])
|
|
650
|
+
else:
|
|
651
|
+
select_parts.append("%s")
|
|
652
|
+
vals.append(val)
|
|
653
|
+
|
|
654
|
+
if not select_parts:
|
|
655
|
+
raise ValueError("At least one column is required for insert.")
|
|
656
|
+
|
|
657
|
+
if where is None:
|
|
658
|
+
if tx is None:
|
|
659
|
+
raise ValueError(
|
|
660
|
+
"A transaction context is required when deriving WHERE from primary keys."
|
|
661
|
+
)
|
|
662
|
+
pk_cols = tx.table(table).primary_keys()
|
|
663
|
+
if not pk_cols:
|
|
664
|
+
raise ValueError("Primary key required to derive WHERE clause.")
|
|
665
|
+
missing = [pk for pk in pk_cols if pk not in data]
|
|
666
|
+
if missing:
|
|
667
|
+
raise ValueError(
|
|
668
|
+
"Missing primary key value(s) for insert condition: " + ", ".join(missing)
|
|
669
|
+
)
|
|
670
|
+
where = {pk: data[pk] for pk in pk_cols}
|
|
671
|
+
|
|
672
|
+
where_clauses = []
|
|
673
|
+
where_vals = []
|
|
674
|
+
|
|
675
|
+
if isinstance(where, Mapping):
|
|
676
|
+
compiled = []
|
|
677
|
+
for key, val in where.items():
|
|
678
|
+
compiled.append(th.make_predicate(key, val))
|
|
679
|
+
where = compiled
|
|
680
|
+
|
|
681
|
+
if isinstance(where, str):
|
|
682
|
+
where_clauses.append(where)
|
|
683
|
+
else:
|
|
684
|
+
try:
|
|
685
|
+
for predicate, value in where:
|
|
686
|
+
where_clauses.append(predicate)
|
|
687
|
+
if value is None:
|
|
688
|
+
continue
|
|
689
|
+
if isinstance(value, tuple):
|
|
690
|
+
where_vals.extend(value)
|
|
691
|
+
else:
|
|
692
|
+
where_vals.append(value)
|
|
693
|
+
except (TypeError, ValueError) as exc:
|
|
694
|
+
raise ValueError(
|
|
695
|
+
"Invalid WHERE clause format. Expected mapping, SQL string, or iterable of predicate/value pairs."
|
|
696
|
+
) from exc
|
|
697
|
+
|
|
698
|
+
vals.extend(where_vals)
|
|
699
|
+
|
|
700
|
+
exists_sql = [
|
|
701
|
+
"SELECT 1 FROM",
|
|
702
|
+
TableHelper.quote(table),
|
|
703
|
+
]
|
|
704
|
+
if where_clauses:
|
|
705
|
+
exists_sql.append("WHERE " + " AND ".join(where_clauses))
|
|
706
|
+
|
|
707
|
+
sql_parts = [
|
|
708
|
+
"INSERT INTO",
|
|
709
|
+
TableHelper.quote(table),
|
|
710
|
+
f"({','.join(columns_sql)})",
|
|
711
|
+
"SELECT",
|
|
712
|
+
", ".join(select_parts),
|
|
713
|
+
"WHERE NOT EXISTS (",
|
|
714
|
+
" ".join(exists_sql),
|
|
715
|
+
")",
|
|
716
|
+
]
|
|
717
|
+
|
|
718
|
+
final_sql = sqlparse.format(" ".join(sql_parts), reindent=True, keyword_case="upper")
|
|
719
|
+
return final_sql, tuple(vals)
|
|
720
|
+
|
|
721
|
+
insert_if_not_exists = insnx
|
|
722
|
+
|
|
610
723
|
@classmethod
|
|
611
724
|
def version(cls):
|
|
612
725
|
return "select version()", tuple()
|
|
@@ -704,6 +817,7 @@ class SQL(BaseSQLDialect):
|
|
|
704
817
|
sys_created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
705
818
|
sys_modified TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
706
819
|
sys_modified_by TEXT NOT NULL DEFAULT 'SYSTEM',
|
|
820
|
+
sys_modified_row TIMESTAMP NOT NULL DEFAULT CLOCK_TIMESTAMP(),
|
|
707
821
|
sys_modified_count INTEGER NOT NULL DEFAULT 0,
|
|
708
822
|
sys_dirty BOOLEAN NOT NULL DEFAULT FALSE,
|
|
709
823
|
sys_table TEXT NOT NULL,
|
|
@@ -718,8 +832,9 @@ class SQL(BaseSQLDialect):
|
|
|
718
832
|
BEGIN
|
|
719
833
|
IF (TG_OP = 'INSERT') THEN
|
|
720
834
|
NEW.sys_table := TG_TABLE_NAME;
|
|
721
|
-
NEW.sys_created :=
|
|
722
|
-
NEW.sys_modified :=
|
|
835
|
+
NEW.sys_created := transaction_timestamp();
|
|
836
|
+
NEW.sys_modified := transaction_timestamp();
|
|
837
|
+
NEW.sys_modified_row := clock_timestamp();
|
|
723
838
|
NEW.sys_modified_count := 0;
|
|
724
839
|
ELSIF (TG_OP = 'UPDATE') THEN
|
|
725
840
|
NEW.sys_table := TG_TABLE_NAME;
|
|
@@ -731,7 +846,7 @@ class SQL(BaseSQLDialect):
|
|
|
731
846
|
ELSE
|
|
732
847
|
NEW.sys_dirty := TRUE;
|
|
733
848
|
END IF;
|
|
734
|
-
NEW.sys_modified :=
|
|
849
|
+
NEW.sys_modified := transaction_timestamp();
|
|
735
850
|
NEW.sys_modified_count := COALESCE(OLD.sys_modified_count, 0) + 1;
|
|
736
851
|
END IF;
|
|
737
852
|
END IF;
|
|
@@ -761,8 +876,10 @@ class SQL(BaseSQLDialect):
|
|
|
761
876
|
return sql, tuple()
|
|
762
877
|
|
|
763
878
|
@classmethod
|
|
764
|
-
def ensure_sys_modified_count(
|
|
765
|
-
|
|
879
|
+
def ensure_sys_modified_count(
|
|
880
|
+
cls, name, has_column=False, has_row_column=False
|
|
881
|
+
):
|
|
882
|
+
"""Return SQL to backfill sys_modified_count/sys_modified_row and refresh the on_sys_modified trigger."""
|
|
766
883
|
if "." in name:
|
|
767
884
|
schema_name, table_name = name.split(".", 1)
|
|
768
885
|
else:
|
|
@@ -780,15 +897,21 @@ class SQL(BaseSQLDialect):
|
|
|
780
897
|
)
|
|
781
898
|
trigger_identifier = TableHelper.quote(trigger_name)
|
|
782
899
|
column_name = TableHelper.quote("sys_modified_count")
|
|
900
|
+
row_column_name = TableHelper.quote("sys_modified_row")
|
|
783
901
|
|
|
784
902
|
statements = []
|
|
785
903
|
if not has_column:
|
|
786
904
|
statements.append(
|
|
787
905
|
f"ALTER TABLE {fqtn} ADD COLUMN {column_name} INTEGER NOT NULL DEFAULT 0;"
|
|
788
906
|
)
|
|
907
|
+
if not has_row_column:
|
|
908
|
+
statements.append(
|
|
909
|
+
f"ALTER TABLE {fqtn} ADD COLUMN {row_column_name} TIMESTAMPTZ;"
|
|
910
|
+
)
|
|
789
911
|
|
|
790
912
|
statements.extend([
|
|
791
913
|
f"UPDATE {fqtn} SET {column_name} = 0 WHERE {column_name} IS NULL;",
|
|
914
|
+
f"UPDATE {fqtn} SET {row_column_name} = COALESCE({row_column_name}, clock_timestamp());",
|
|
792
915
|
f"""
|
|
793
916
|
CREATE OR REPLACE FUNCTION {schema_identifier}.on_sys_modified()
|
|
794
917
|
RETURNS TRIGGER AS
|
|
@@ -796,8 +919,9 @@ class SQL(BaseSQLDialect):
|
|
|
796
919
|
BEGIN
|
|
797
920
|
IF (TG_OP = 'INSERT') THEN
|
|
798
921
|
NEW.sys_table := TG_TABLE_NAME;
|
|
799
|
-
NEW.sys_created :=
|
|
800
|
-
NEW.sys_modified :=
|
|
922
|
+
NEW.sys_created := transaction_timestamp();
|
|
923
|
+
NEW.sys_modified := transaction_timestamp();
|
|
924
|
+
NEW.sys_modified_row := clock_timestamp();
|
|
801
925
|
NEW.sys_modified_count := 0;
|
|
802
926
|
ELSIF (TG_OP = 'UPDATE') THEN
|
|
803
927
|
NEW.sys_table := TG_TABLE_NAME;
|
|
@@ -809,7 +933,8 @@ class SQL(BaseSQLDialect):
|
|
|
809
933
|
ELSE
|
|
810
934
|
NEW.sys_dirty := TRUE;
|
|
811
935
|
END IF;
|
|
812
|
-
NEW.sys_modified :=
|
|
936
|
+
NEW.sys_modified := transaction_timestamp();
|
|
937
|
+
NEW.sys_modified_row := clock_timestamp();
|
|
813
938
|
NEW.sys_modified_count := COALESCE(OLD.sys_modified_count, 0) + 1;
|
|
814
939
|
END IF;
|
|
815
940
|
END IF;
|
|
@@ -431,7 +431,7 @@ END;
|
|
|
431
431
|
return "\n".join(statements), tuple()
|
|
432
432
|
|
|
433
433
|
@classmethod
|
|
434
|
-
def ensure_sys_modified_count(cls, name):
|
|
434
|
+
def ensure_sys_modified_count(cls, name, has_column=False, has_row_column=False):
|
|
435
435
|
"""Ensure sys_modified_count exists for SQLite tables."""
|
|
436
436
|
table_identifier = quote(name)
|
|
437
437
|
base_name = name.split(".")[-1].replace('"', "")
|
{velocity_python-0.0.145 → velocity_python-0.0.146}/src/velocity/db/servers/sqlserver/sql.py
RENAMED
|
@@ -485,7 +485,9 @@ END;
|
|
|
485
485
|
return "\n".join(statements), tuple()
|
|
486
486
|
|
|
487
487
|
@classmethod
|
|
488
|
-
def ensure_sys_modified_count(
|
|
488
|
+
def ensure_sys_modified_count(
|
|
489
|
+
cls, name, has_column=False, has_row_column=False
|
|
490
|
+
):
|
|
489
491
|
"""Ensure sys_modified_count exists for SQL Server tables along with maintenance triggers."""
|
|
490
492
|
if "." in name:
|
|
491
493
|
schema, table_name = name.split(".", 1)
|