velocity-python 0.0.98__tar.gz → 0.0.101__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of velocity-python might be problematic. Click here for more details.
- {velocity_python-0.0.98 → velocity_python-0.0.101}/PKG-INFO +1 -1
- {velocity_python-0.0.98 → velocity_python-0.0.101}/pyproject.toml +1 -1
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/engine.py +186 -75
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/sql.py +6 -6
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity_python.egg-info/PKG-INFO +1 -1
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity_python.egg-info/SOURCES.txt +3 -0
- velocity_python-0.0.101/tests/test_fix.py +48 -0
- velocity_python-0.0.101/tests/test_original_error.py +34 -0
- velocity_python-0.0.101/tests/test_process_error_robustness.py +236 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/LICENSE +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/README.md +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/setup.cfg +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/app/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/app/invoices.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/app/orders.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/app/payments.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/app/purchase_orders.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/aws/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/aws/amplify.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/aws/handlers/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/aws/handlers/context.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/aws/handlers/lambda_handler.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/aws/handlers/response.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/aws/handlers/sqs_handler.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/column.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/database.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/decorators.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/exceptions.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/result.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/row.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/sequence.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/table.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/core/transaction.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/mysql.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/mysql_reserved.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/operators.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/reserved.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/types.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/sqlite.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/sqlite_reserved.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/sqlserver.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/sqlserver_reserved.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/tablehelper.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/conv/__init__.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/conv/iconv.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/conv/oconv.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/db.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/export.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/format.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/mail.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/merge.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/timer.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/misc/tools.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity_python.egg-info/dependency_links.txt +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity_python.egg-info/requires.txt +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity_python.egg-info/top_level.txt +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_db.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_email_processing.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_format.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_iconv.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_merge.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_oconv.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_postgres.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_response.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_spreadsheet_functions.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_sql_builder.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_tablehelper.py +0 -0
- {velocity_python-0.0.98 → velocity_python-0.0.101}/tests/test_timer.py +0 -0
|
@@ -325,81 +325,192 @@ class Engine:
|
|
|
325
325
|
result = tx.execute(sql, vals)
|
|
326
326
|
return [f"{x[0]}.{x[1]}" for x in result.as_tuple()]
|
|
327
327
|
|
|
328
|
-
def process_error(self,
|
|
329
|
-
"""
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
328
|
+
def process_error(self, exception, sql=None, parameters=None):
|
|
329
|
+
"""
|
|
330
|
+
Process database errors and raise appropriate velocity exceptions.
|
|
331
|
+
Enhanced for robustness with exception chaining and comprehensive error handling.
|
|
332
|
+
|
|
333
|
+
Args:
|
|
334
|
+
exception: The original exception from the database driver
|
|
335
|
+
sql: The SQL statement that caused the error (optional)
|
|
336
|
+
parameters: The parameters passed to the SQL statement (optional)
|
|
337
|
+
|
|
338
|
+
Returns:
|
|
339
|
+
The appropriate velocity exception to raise
|
|
340
|
+
"""
|
|
341
|
+
logger = logging.getLogger(__name__)
|
|
342
|
+
|
|
343
|
+
# Enhanced logging with context
|
|
344
|
+
extra_data = {
|
|
345
|
+
'exception_type': type(exception).__name__,
|
|
346
|
+
'sql': sql,
|
|
347
|
+
'parameters': parameters
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
logger.error(
|
|
351
|
+
f"Database error caught. Attempting to transform: "
|
|
352
|
+
f"type={type(exception).__name__}, sql={sql[:100] if sql else 'None'}...",
|
|
353
|
+
extra=extra_data
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
# Safely get error code and message with fallbacks
|
|
357
|
+
try:
|
|
358
|
+
error_code = getattr(exception, 'pgcode', None) or self.get_error(exception)
|
|
359
|
+
except Exception as e:
|
|
360
|
+
logger.warning(f"Failed to extract error code: {e}")
|
|
361
|
+
error_code = None
|
|
362
|
+
|
|
363
|
+
try:
|
|
364
|
+
error_message = str(exception)
|
|
365
|
+
except Exception as e:
|
|
366
|
+
logger.warning(f"Failed to convert exception to string: {e}")
|
|
367
|
+
error_message = f"<Error converting exception: {type(exception).__name__}>"
|
|
368
|
+
|
|
369
|
+
# Primary error classification by error code
|
|
370
|
+
if error_code and hasattr(self, 'error_codes'):
|
|
371
|
+
for error_class, codes in self.error_codes.items():
|
|
372
|
+
if error_code in codes:
|
|
373
|
+
logger.info(f"Classified error by code: {error_code} -> {error_class}")
|
|
374
|
+
try:
|
|
375
|
+
return self._create_exception_with_chaining(
|
|
376
|
+
error_class, error_message, exception, sql, parameters
|
|
377
|
+
)
|
|
378
|
+
except Exception as creation_error:
|
|
379
|
+
logger.error(f"Failed to create {error_class} exception: {creation_error}")
|
|
380
|
+
# Fall through to regex classification
|
|
381
|
+
break
|
|
382
|
+
|
|
383
|
+
# Secondary error classification by message patterns (regex fallback)
|
|
384
|
+
error_message_lower = error_message.lower()
|
|
385
|
+
|
|
386
|
+
# Enhanced connection error patterns
|
|
387
|
+
connection_patterns = [
|
|
388
|
+
r'connection.*refused|could not connect',
|
|
389
|
+
r'network.*unreachable|network.*down',
|
|
390
|
+
r'broken pipe|connection.*broken',
|
|
391
|
+
r'timeout.*connection|connection.*timeout',
|
|
392
|
+
r'server.*closed.*connection|connection.*lost',
|
|
393
|
+
r'no route to host|host.*unreachable',
|
|
394
|
+
r'connection.*reset|reset.*connection'
|
|
395
|
+
]
|
|
396
|
+
|
|
397
|
+
# Enhanced duplicate key patterns
|
|
398
|
+
duplicate_patterns = [
|
|
399
|
+
r'duplicate.*key.*value|unique.*constraint.*violated',
|
|
400
|
+
r'duplicate.*entry|key.*already.*exists',
|
|
401
|
+
r'violates.*unique.*constraint',
|
|
402
|
+
r'unique.*violation|constraint.*unique'
|
|
403
|
+
]
|
|
404
|
+
|
|
405
|
+
# Enhanced permission/authorization patterns
|
|
406
|
+
permission_patterns = [
|
|
407
|
+
r'permission.*denied|access.*denied|authorization.*failed',
|
|
408
|
+
r'insufficient.*privileges|privilege.*denied',
|
|
409
|
+
r'not.*authorized|unauthorized.*access',
|
|
410
|
+
r'authentication.*failed|login.*failed'
|
|
411
|
+
]
|
|
412
|
+
|
|
413
|
+
# Enhanced database/table not found patterns
|
|
414
|
+
not_found_patterns = [
|
|
415
|
+
r'database.*does.*not.*exist|unknown.*database',
|
|
416
|
+
r'table.*does.*not.*exist|relation.*does.*not.*exist',
|
|
417
|
+
r'no.*such.*database|database.*not.*found',
|
|
418
|
+
r'schema.*does.*not.*exist|unknown.*table'
|
|
419
|
+
]
|
|
420
|
+
|
|
421
|
+
# Enhanced syntax error patterns
|
|
422
|
+
syntax_patterns = [
|
|
423
|
+
r'syntax.*error|invalid.*syntax',
|
|
424
|
+
r'malformed.*query|bad.*sql.*grammar',
|
|
425
|
+
r'unexpected.*token|parse.*error'
|
|
426
|
+
]
|
|
427
|
+
|
|
428
|
+
# Enhanced deadlock/timeout patterns
|
|
429
|
+
deadlock_patterns = [
|
|
430
|
+
r'deadlock.*detected|lock.*timeout',
|
|
431
|
+
r'timeout.*waiting.*for.*lock|query.*timeout',
|
|
432
|
+
r'lock.*wait.*timeout|deadlock.*found'
|
|
433
|
+
]
|
|
434
|
+
|
|
435
|
+
# Comprehensive pattern matching with error class mapping
|
|
436
|
+
pattern_mappings = [
|
|
437
|
+
(connection_patterns, 'ConnectionError'),
|
|
438
|
+
(duplicate_patterns, 'DuplicateError'),
|
|
439
|
+
(permission_patterns, 'PermissionError'),
|
|
440
|
+
(not_found_patterns, 'NotFoundError'),
|
|
441
|
+
(syntax_patterns, 'SyntaxError'),
|
|
442
|
+
(deadlock_patterns, 'DeadlockError')
|
|
443
|
+
]
|
|
444
|
+
|
|
445
|
+
# Apply pattern matching
|
|
446
|
+
for patterns, error_class in pattern_mappings:
|
|
447
|
+
for pattern in patterns:
|
|
448
|
+
try:
|
|
449
|
+
if re.search(pattern, error_message_lower):
|
|
450
|
+
logger.info(f"Classified error by pattern: '{pattern}' -> {error_class}")
|
|
451
|
+
return self._create_exception_with_chaining(
|
|
452
|
+
error_class, error_message, exception, sql, parameters
|
|
453
|
+
)
|
|
454
|
+
except re.error as regex_error:
|
|
455
|
+
logger.warning(f"Regex pattern error '{pattern}': {regex_error}")
|
|
456
|
+
continue
|
|
457
|
+
except Exception as pattern_error:
|
|
458
|
+
logger.error(f"Error applying pattern '{pattern}': {pattern_error}")
|
|
459
|
+
continue
|
|
460
|
+
|
|
461
|
+
# Fallback: return generic database error with full context
|
|
340
462
|
logger.warning(
|
|
341
|
-
"
|
|
342
|
-
error_code,
|
|
343
|
-
error_mesg,
|
|
463
|
+
f"Could not classify error. Returning generic DatabaseError. "
|
|
464
|
+
f"Error code: {error_code}, Available error codes: {list(getattr(self, 'error_codes', {}).keys()) if hasattr(self, 'error_codes') else 'None'}"
|
|
344
465
|
)
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
if error_code in self.sql.ColumnMissingErrorCodes:
|
|
349
|
-
raise exceptions.DbColumnMissingError from None
|
|
350
|
-
if error_code in self.sql.TableMissingErrorCodes:
|
|
351
|
-
raise exceptions.DbTableMissingError from None
|
|
352
|
-
if error_code in self.sql.DatabaseMissingErrorCodes:
|
|
353
|
-
raise exceptions.DbDatabaseMissingError from None
|
|
354
|
-
if error_code in self.sql.ForeignKeyMissingErrorCodes:
|
|
355
|
-
raise exceptions.DbForeignKeyMissingError from None
|
|
356
|
-
if error_code in self.sql.TruncationErrorCodes:
|
|
357
|
-
raise exceptions.DbTruncationError from None
|
|
358
|
-
if error_code in self.sql.DataIntegrityErrorCodes:
|
|
359
|
-
raise exceptions.DbDataIntegrityError from None
|
|
360
|
-
if error_code in self.sql.ConnectionErrorCodes:
|
|
361
|
-
raise exceptions.DbConnectionError from None
|
|
362
|
-
if error_code in self.sql.DuplicateKeyErrorCodes:
|
|
363
|
-
raise exceptions.DbDuplicateKeyError from None
|
|
364
|
-
if re.search(r"key \(sys_id\)=\(\d+\) already exists.", msg, re.M):
|
|
365
|
-
raise exceptions.DbDuplicateKeyError from None
|
|
366
|
-
if error_code in self.sql.DatabaseObjectExistsErrorCodes:
|
|
367
|
-
raise exceptions.DbObjectExistsError from None
|
|
368
|
-
if error_code in self.sql.LockTimeoutErrorCodes:
|
|
369
|
-
raise exceptions.DbLockTimeoutError from None
|
|
370
|
-
if error_code in self.sql.RetryTransactionCodes:
|
|
371
|
-
raise exceptions.DbRetryTransaction from None
|
|
372
|
-
if re.findall(r"database.*does not exist", msg, re.M):
|
|
373
|
-
raise exceptions.DbDatabaseMissingError from None
|
|
374
|
-
if re.findall(r"no such database", msg, re.M):
|
|
375
|
-
raise exceptions.DbDatabaseMissingError from None
|
|
376
|
-
if re.findall(r"already exists", msg, re.M):
|
|
377
|
-
raise exceptions.DbObjectExistsError from None
|
|
378
|
-
if re.findall(r"server closed the connection unexpectedly", msg, re.M):
|
|
379
|
-
raise exceptions.DbConnectionError from None
|
|
380
|
-
if re.findall(r"no connection to the server", msg, re.M):
|
|
381
|
-
raise exceptions.DbConnectionError from None
|
|
382
|
-
if re.findall(r"connection timed out", msg, re.M):
|
|
383
|
-
raise exceptions.DbConnectionError from None
|
|
384
|
-
if re.findall(r"could not connect to server", msg, re.M):
|
|
385
|
-
raise exceptions.DbConnectionError from None
|
|
386
|
-
if re.findall(r"cannot connect to server", msg, re.M):
|
|
387
|
-
raise exceptions.DbConnectionError from None
|
|
388
|
-
if re.findall(r"connection already closed", msg, re.M):
|
|
389
|
-
raise exceptions.DbConnectionError from None
|
|
390
|
-
if re.findall(r"cursor already closed", msg, re.M):
|
|
391
|
-
raise exceptions.DbConnectionError from None
|
|
392
|
-
if "no such table:" in msg:
|
|
393
|
-
raise exceptions.DbTableMissingError from None
|
|
394
|
-
|
|
395
|
-
logger.error(
|
|
396
|
-
"Unhandled/Unknown Error in engine.process_error",
|
|
397
|
-
exc_info=True,
|
|
398
|
-
extra={
|
|
399
|
-
"error_code": error_code,
|
|
400
|
-
"error_msg": error_mesg,
|
|
401
|
-
"sql_stmt": sql_stmt,
|
|
402
|
-
"sql_params": sql_params,
|
|
403
|
-
},
|
|
466
|
+
|
|
467
|
+
return self._create_exception_with_chaining(
|
|
468
|
+
'DatabaseError', error_message, exception, sql, parameters
|
|
404
469
|
)
|
|
405
|
-
|
|
470
|
+
|
|
471
|
+
def _create_exception_with_chaining(self, error_class, message, original_exception, sql=None, parameters=None):
|
|
472
|
+
"""
|
|
473
|
+
Create a velocity exception with proper exception chaining.
|
|
474
|
+
|
|
475
|
+
Args:
|
|
476
|
+
error_class: The name of the exception class to create
|
|
477
|
+
message: The error message
|
|
478
|
+
original_exception: The original exception to chain
|
|
479
|
+
sql: The SQL statement (optional)
|
|
480
|
+
parameters: The SQL parameters (optional)
|
|
481
|
+
|
|
482
|
+
Returns:
|
|
483
|
+
The created exception with proper chaining
|
|
484
|
+
"""
|
|
485
|
+
logger = logging.getLogger(__name__)
|
|
486
|
+
|
|
487
|
+
try:
|
|
488
|
+
# Import the exception class dynamically
|
|
489
|
+
exception_module = __import__('velocity.db.exceptions', fromlist=[error_class])
|
|
490
|
+
ExceptionClass = getattr(exception_module, error_class)
|
|
491
|
+
|
|
492
|
+
# Create enhanced message with context
|
|
493
|
+
if sql:
|
|
494
|
+
enhanced_message = f"{message} (SQL: {sql[:200]}{'...' if len(sql) > 200 else ''})"
|
|
495
|
+
else:
|
|
496
|
+
enhanced_message = message
|
|
497
|
+
|
|
498
|
+
# Create the exception with chaining
|
|
499
|
+
new_exception = ExceptionClass(enhanced_message)
|
|
500
|
+
new_exception.__cause__ = original_exception # Preserve exception chain
|
|
501
|
+
|
|
502
|
+
return new_exception
|
|
503
|
+
|
|
504
|
+
except (ImportError, AttributeError) as e:
|
|
505
|
+
logger.error(f"Could not import exception class {error_class}: {e}")
|
|
506
|
+
# Fallback to generic database error
|
|
507
|
+
try:
|
|
508
|
+
exception_module = __import__('velocity.db.exceptions', fromlist=['DatabaseError'])
|
|
509
|
+
DatabaseError = getattr(exception_module, 'DatabaseError')
|
|
510
|
+
fallback_exception = DatabaseError(f"Database error: {message}")
|
|
511
|
+
fallback_exception.__cause__ = original_exception
|
|
512
|
+
return fallback_exception
|
|
513
|
+
except Exception as fallback_error:
|
|
514
|
+
logger.critical(f"Failed to create fallback exception: {fallback_error}")
|
|
515
|
+
# Last resort: return the original exception
|
|
516
|
+
return original_exception
|
|
@@ -68,20 +68,20 @@ class SQL:
|
|
|
68
68
|
|
|
69
69
|
default_schema = "public"
|
|
70
70
|
|
|
71
|
-
ApplicationErrorCodes = ["22P02", "42883"]
|
|
71
|
+
ApplicationErrorCodes = ["22P02", "42883", "42501", "42601", "25P01", "25P02"]
|
|
72
72
|
|
|
73
|
-
DatabaseMissingErrorCodes = []
|
|
73
|
+
DatabaseMissingErrorCodes = ["3D000"]
|
|
74
74
|
TableMissingErrorCodes = ["42P01"]
|
|
75
75
|
ColumnMissingErrorCodes = ["42703"]
|
|
76
76
|
ForeignKeyMissingErrorCodes = ["42704"]
|
|
77
77
|
|
|
78
|
-
ConnectionErrorCodes = ["08001", "08S01", "57P03", "08006", "53300"]
|
|
79
|
-
DuplicateKeyErrorCodes = [] #
|
|
80
|
-
RetryTransactionCodes = []
|
|
78
|
+
ConnectionErrorCodes = ["08001", "08S01", "57P03", "08006", "53300", "08003", "08004", "08P01"]
|
|
79
|
+
DuplicateKeyErrorCodes = ["23505"] # unique_violation - no longer relying only on regex
|
|
80
|
+
RetryTransactionCodes = ["40001", "40P01", "40002"]
|
|
81
81
|
TruncationErrorCodes = ["22001"]
|
|
82
82
|
LockTimeoutErrorCodes = ["55P03"]
|
|
83
83
|
DatabaseObjectExistsErrorCodes = ["42710", "42P07", "42P04"]
|
|
84
|
-
DataIntegrityErrorCodes = ["23503"]
|
|
84
|
+
DataIntegrityErrorCodes = ["23503", "23502", "23514", "23P01", "22003"]
|
|
85
85
|
|
|
86
86
|
@classmethod
|
|
87
87
|
def get_error(self, e):
|
|
@@ -57,11 +57,14 @@ src/velocity_python.egg-info/requires.txt
|
|
|
57
57
|
src/velocity_python.egg-info/top_level.txt
|
|
58
58
|
tests/test_db.py
|
|
59
59
|
tests/test_email_processing.py
|
|
60
|
+
tests/test_fix.py
|
|
60
61
|
tests/test_format.py
|
|
61
62
|
tests/test_iconv.py
|
|
62
63
|
tests/test_merge.py
|
|
63
64
|
tests/test_oconv.py
|
|
65
|
+
tests/test_original_error.py
|
|
64
66
|
tests/test_postgres.py
|
|
67
|
+
tests/test_process_error_robustness.py
|
|
65
68
|
tests/test_response.py
|
|
66
69
|
tests/test_spreadsheet_functions.py
|
|
67
70
|
tests/test_sql_builder.py
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
# Test script to verify the duplicate_rows fix
|
|
4
|
+
|
|
5
|
+
def test_grouping_fix():
|
|
6
|
+
"""Test the fixed grouping logic"""
|
|
7
|
+
|
|
8
|
+
# Simulate duplicate rows that would come from duplicate_rows()
|
|
9
|
+
duplicate_rows = [
|
|
10
|
+
{"sys_id": 1, "email_address": "test1@example.com", "card_number": "1234", "expiration_date": "2024-01", "status": None},
|
|
11
|
+
{"sys_id": 2, "email_address": "test1@example.com", "card_number": "1234", "expiration_date": "2024-02", "status": None},
|
|
12
|
+
{"sys_id": 3, "email_address": "test2@example.com", "card_number": "5678", "expiration_date": "2024-03", "status": None},
|
|
13
|
+
{"sys_id": 4, "email_address": "test2@example.com", "card_number": "5678", "expiration_date": "2024-01", "status": None},
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
# Group rows by email_address and card_number (the fixed logic)
|
|
17
|
+
groups = {}
|
|
18
|
+
for row in duplicate_rows:
|
|
19
|
+
key = (row["email_address"], row["card_number"])
|
|
20
|
+
if key not in groups:
|
|
21
|
+
groups[key] = []
|
|
22
|
+
groups[key].append(row)
|
|
23
|
+
|
|
24
|
+
print("Groups found:")
|
|
25
|
+
for key, group in groups.items():
|
|
26
|
+
print(f" Key: {key}, Group size: {len(group)}")
|
|
27
|
+
|
|
28
|
+
# Test the sorting that was causing the original error
|
|
29
|
+
try:
|
|
30
|
+
sorted_group = sorted(group, key=lambda x: x["expiration_date"])
|
|
31
|
+
print(f" Sorted by expiration_date: {[row['expiration_date'] for row in sorted_group]}")
|
|
32
|
+
|
|
33
|
+
# Test the enumeration that happens in the original code
|
|
34
|
+
for idx, row in enumerate(sorted_group):
|
|
35
|
+
print(f" {idx}: {row['sys_id']}, {row['email_address']}, {row['card_number']}, {row['expiration_date']}")
|
|
36
|
+
|
|
37
|
+
except TypeError as e:
|
|
38
|
+
print(f" ERROR: {e}")
|
|
39
|
+
return False
|
|
40
|
+
|
|
41
|
+
return True
|
|
42
|
+
|
|
43
|
+
if __name__ == "__main__":
|
|
44
|
+
success = test_grouping_fix()
|
|
45
|
+
if success:
|
|
46
|
+
print("\n✓ Fix appears to work correctly!")
|
|
47
|
+
else:
|
|
48
|
+
print("\n✗ Fix has issues")
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
# Test script to demonstrate the original error
|
|
4
|
+
|
|
5
|
+
def test_original_error():
|
|
6
|
+
"""Demonstrate the original error that was happening"""
|
|
7
|
+
|
|
8
|
+
# Simulate what duplicate_rows() was returning (individual dicts, not groups)
|
|
9
|
+
# The code was expecting groups but getting individual rows
|
|
10
|
+
fake_groups = [
|
|
11
|
+
{"sys_id": 1, "email_address": "test1@example.com", "card_number": "1234", "expiration_date": "2024-01", "status": None},
|
|
12
|
+
{"sys_id": 2, "email_address": "test1@example.com", "card_number": "1234", "expiration_date": "2024-02", "status": None},
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
print("Testing original problematic code pattern:")
|
|
16
|
+
|
|
17
|
+
for group in fake_groups: # group is actually a single row/dict
|
|
18
|
+
print(f"Processing 'group': {group}")
|
|
19
|
+
try:
|
|
20
|
+
# This is the line that was failing: sorted(group, key=lambda x: x["expiration_date"])
|
|
21
|
+
# When group is a dict, sorted() iterates over the keys (strings), not the values
|
|
22
|
+
sorted_group = sorted(group, key=lambda x: x["expiration_date"])
|
|
23
|
+
print(f" Sorted result: {sorted_group}")
|
|
24
|
+
except TypeError as e:
|
|
25
|
+
print(f" ERROR: {e}")
|
|
26
|
+
print(f" This happened because 'group' is a dict, so sorted() iterates over keys: {list(group.keys())}")
|
|
27
|
+
print(f" The lambda tries to access x['expiration_date'] where x is a string key, not a dict")
|
|
28
|
+
return False
|
|
29
|
+
|
|
30
|
+
return True
|
|
31
|
+
|
|
32
|
+
if __name__ == "__main__":
|
|
33
|
+
print("Demonstrating the original error:")
|
|
34
|
+
test_original_error()
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Test the robustness of the improved process_error method
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
import unittest
|
|
9
|
+
from unittest.mock import Mock, patch
|
|
10
|
+
import logging
|
|
11
|
+
|
|
12
|
+
# Add the source directory to the path
|
|
13
|
+
sys.path.insert(0, '/home/ubuntu/tenspace/velocity-python/src')
|
|
14
|
+
|
|
15
|
+
from velocity.db.core.engine import Engine
|
|
16
|
+
from velocity.db.core import exceptions
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class MockException(Exception):
|
|
20
|
+
"""Mock exception for testing"""
|
|
21
|
+
def __init__(self, message, pgcode=None, pgerror=None):
|
|
22
|
+
super().__init__(message)
|
|
23
|
+
self.pgcode = pgcode
|
|
24
|
+
self.pgerror = pgerror
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class MockSQL:
|
|
28
|
+
"""Mock SQL class for testing"""
|
|
29
|
+
server = "PostgreSQL"
|
|
30
|
+
|
|
31
|
+
ApplicationErrorCodes = ["22P02", "42883", "42501", "42601", "25P01", "25P02"]
|
|
32
|
+
DatabaseMissingErrorCodes = ["3D000"]
|
|
33
|
+
TableMissingErrorCodes = ["42P01"]
|
|
34
|
+
ColumnMissingErrorCodes = ["42703"]
|
|
35
|
+
ForeignKeyMissingErrorCodes = ["42704"]
|
|
36
|
+
ConnectionErrorCodes = ["08001", "08S01", "57P03", "08006", "53300", "08003", "08004", "08P01"]
|
|
37
|
+
DuplicateKeyErrorCodes = ["23505"]
|
|
38
|
+
RetryTransactionCodes = ["40001", "40P01", "40002"]
|
|
39
|
+
TruncationErrorCodes = ["22001"]
|
|
40
|
+
LockTimeoutErrorCodes = ["55P03"]
|
|
41
|
+
DatabaseObjectExistsErrorCodes = ["42710", "42P07", "42P04"]
|
|
42
|
+
DataIntegrityErrorCodes = ["23503", "23502", "23514", "23P01", "22003"]
|
|
43
|
+
|
|
44
|
+
@classmethod
|
|
45
|
+
def get_error(cls, e):
|
|
46
|
+
return getattr(e, 'pgcode', None), getattr(e, 'pgerror', None)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class TestProcessErrorRobustness(unittest.TestCase):
|
|
50
|
+
|
|
51
|
+
def setUp(self):
|
|
52
|
+
"""Set up test fixtures"""
|
|
53
|
+
self.engine = Engine(
|
|
54
|
+
driver=Mock(),
|
|
55
|
+
config={'test': 'config'},
|
|
56
|
+
sql=MockSQL()
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Capture logs for testing
|
|
60
|
+
self.log_handler = logging.StreamHandler()
|
|
61
|
+
self.log_handler.setLevel(logging.DEBUG)
|
|
62
|
+
logger = logging.getLogger("velocity.db.engine")
|
|
63
|
+
logger.addHandler(self.log_handler)
|
|
64
|
+
logger.setLevel(logging.DEBUG)
|
|
65
|
+
|
|
66
|
+
def test_error_code_classification(self):
|
|
67
|
+
"""Test that error codes are properly classified"""
|
|
68
|
+
test_cases = [
|
|
69
|
+
# (pgcode, expected_exception_class, description)
|
|
70
|
+
("23505", exceptions.DbDuplicateKeyError, "unique violation"),
|
|
71
|
+
("40001", exceptions.DbRetryTransaction, "serialization failure"),
|
|
72
|
+
("40P01", exceptions.DbRetryTransaction, "deadlock detected"),
|
|
73
|
+
("42501", exceptions.DbApplicationError, "insufficient privilege"),
|
|
74
|
+
("42601", exceptions.DbApplicationError, "syntax error"),
|
|
75
|
+
("25P01", exceptions.DbApplicationError, "no active sql transaction"),
|
|
76
|
+
("3D000", exceptions.DbDatabaseMissingError, "invalid catalog name"),
|
|
77
|
+
("08003", exceptions.DbConnectionError, "connection does not exist"),
|
|
78
|
+
("23502", exceptions.DbDataIntegrityError, "not null violation"),
|
|
79
|
+
("42P01", exceptions.DbTableMissingError, "undefined table"),
|
|
80
|
+
("42703", exceptions.DbColumnMissingError, "undefined column"),
|
|
81
|
+
]
|
|
82
|
+
|
|
83
|
+
for pgcode, expected_exception, description in test_cases:
|
|
84
|
+
with self.subTest(pgcode=pgcode, description=description):
|
|
85
|
+
mock_exc = MockException(f"Test error: {description}", pgcode=pgcode)
|
|
86
|
+
|
|
87
|
+
with patch('sys.exc_info', return_value=(type(mock_exc), mock_exc, None)):
|
|
88
|
+
with self.assertRaises(expected_exception):
|
|
89
|
+
self.engine.process_error("test sql", {"param": "value"})
|
|
90
|
+
|
|
91
|
+
def test_regex_fallback_patterns(self):
|
|
92
|
+
"""Test regex pattern fallback when error codes aren't available"""
|
|
93
|
+
test_cases = [
|
|
94
|
+
# (message, expected_exception_class, description)
|
|
95
|
+
("key (sys_id)=(123) already exists.", exceptions.DbDuplicateKeyError, "sys_id duplicate"),
|
|
96
|
+
("duplicate key value violates unique constraint", exceptions.DbDuplicateKeyError, "unique constraint"),
|
|
97
|
+
("database 'testdb' does not exist", exceptions.DbDatabaseMissingError, "database missing"),
|
|
98
|
+
("no such database: mydb", exceptions.DbDatabaseMissingError, "database not found"),
|
|
99
|
+
("table 'users' already exists", exceptions.DbObjectExistsError, "object exists"),
|
|
100
|
+
("server closed the connection unexpectedly", exceptions.DbConnectionError, "connection closed"),
|
|
101
|
+
("connection timed out", exceptions.DbConnectionError, "connection timeout"),
|
|
102
|
+
("no such table: users", exceptions.DbTableMissingError, "table missing"),
|
|
103
|
+
("permission denied for table users", exceptions.DbApplicationError, "permission denied"),
|
|
104
|
+
("syntax error at or near 'SELCT'", exceptions.DbApplicationError, "syntax error"),
|
|
105
|
+
("deadlock detected", exceptions.DbLockTimeoutError, "deadlock"),
|
|
106
|
+
]
|
|
107
|
+
|
|
108
|
+
for message, expected_exception, description in test_cases:
|
|
109
|
+
with self.subTest(message=message, description=description):
|
|
110
|
+
mock_exc = MockException(message) # No pgcode - will trigger regex fallback
|
|
111
|
+
|
|
112
|
+
with patch('sys.exc_info', return_value=(type(mock_exc), mock_exc, None)):
|
|
113
|
+
with self.assertRaises(expected_exception):
|
|
114
|
+
self.engine.process_error("test sql", {"param": "value"})
|
|
115
|
+
|
|
116
|
+
def test_already_custom_exception(self):
|
|
117
|
+
"""Test that custom exceptions are re-raised as-is"""
|
|
118
|
+
custom_exc = exceptions.DbConnectionError("Already a custom exception")
|
|
119
|
+
|
|
120
|
+
with patch('sys.exc_info', return_value=(type(custom_exc), custom_exc, None)):
|
|
121
|
+
with self.assertRaises(exceptions.DbConnectionError):
|
|
122
|
+
self.engine.process_error()
|
|
123
|
+
|
|
124
|
+
def test_no_active_exception(self):
|
|
125
|
+
"""Test handling when no exception is active"""
|
|
126
|
+
with patch('sys.exc_info', return_value=(None, None, None)):
|
|
127
|
+
with self.assertRaises(RuntimeError) as cm:
|
|
128
|
+
self.engine.process_error()
|
|
129
|
+
self.assertIn("no active exception", str(cm.exception))
|
|
130
|
+
|
|
131
|
+
def test_get_error_failure(self):
|
|
132
|
+
"""Test handling when get_error fails"""
|
|
133
|
+
mock_exc = MockException("Test error")
|
|
134
|
+
|
|
135
|
+
# Mock get_error to raise an exception
|
|
136
|
+
with patch.object(self.engine.sql, 'get_error', side_effect=Exception("get_error failed")):
|
|
137
|
+
with patch('sys.exc_info', return_value=(type(mock_exc), mock_exc, None)):
|
|
138
|
+
# Should still handle the error using fallback mechanisms
|
|
139
|
+
with self.assertRaises(Exception): # Original exception should be re-raised
|
|
140
|
+
self.engine.process_error()
|
|
141
|
+
|
|
142
|
+
def test_exception_str_failure(self):
|
|
143
|
+
"""Test handling when converting exception to string fails"""
|
|
144
|
+
class UnstringableException(Exception):
|
|
145
|
+
def __str__(self):
|
|
146
|
+
raise Exception("Cannot convert to string")
|
|
147
|
+
|
|
148
|
+
mock_exc = UnstringableException("Test error")
|
|
149
|
+
|
|
150
|
+
with patch('sys.exc_info', return_value=(type(mock_exc), mock_exc, None)):
|
|
151
|
+
with self.assertRaises(UnstringableException):
|
|
152
|
+
self.engine.process_error()
|
|
153
|
+
|
|
154
|
+
def test_exception_chaining(self):
|
|
155
|
+
"""Test that exception chaining is preserved"""
|
|
156
|
+
mock_exc = MockException("Original error", pgcode="23505")
|
|
157
|
+
|
|
158
|
+
with patch('sys.exc_info', return_value=(type(mock_exc), mock_exc, None)):
|
|
159
|
+
try:
|
|
160
|
+
self.engine.process_error()
|
|
161
|
+
except exceptions.DbDuplicateKeyError as e:
|
|
162
|
+
# Check that the original exception is chained
|
|
163
|
+
self.assertIsInstance(e.__cause__, MockException)
|
|
164
|
+
self.assertEqual(str(e.__cause__), "Original error")
|
|
165
|
+
|
|
166
|
+
def test_enhanced_logging(self):
|
|
167
|
+
"""Test that enhanced logging provides good context"""
|
|
168
|
+
mock_exc = MockException("Test error for logging", pgcode="23505", pgerror="duplicate key")
|
|
169
|
+
|
|
170
|
+
with patch('sys.exc_info', return_value=(type(mock_exc), mock_exc, None)):
|
|
171
|
+
with patch('velocity.db.core.engine.logger') as mock_logger:
|
|
172
|
+
with self.assertRaises(exceptions.DbDuplicateKeyError):
|
|
173
|
+
self.engine.process_error("SELECT * FROM test", {"id": 123})
|
|
174
|
+
|
|
175
|
+
# Verify warning log was called with proper context
|
|
176
|
+
mock_logger.warning.assert_called_once()
|
|
177
|
+
call_args = mock_logger.warning.call_args
|
|
178
|
+
|
|
179
|
+
# Check the message contains key information
|
|
180
|
+
message = call_args[0][0]
|
|
181
|
+
self.assertIn("code=23505", message)
|
|
182
|
+
self.assertIn("message=duplicate key", message)
|
|
183
|
+
self.assertIn("type=MockException", message)
|
|
184
|
+
|
|
185
|
+
# Check extra context is provided
|
|
186
|
+
extra = call_args[1]['extra']
|
|
187
|
+
self.assertEqual(extra['error_code'], "23505")
|
|
188
|
+
self.assertEqual(extra['sql_stmt'], "SELECT * FROM test")
|
|
189
|
+
self.assertEqual(extra['sql_params'], {"id": 123})
|
|
190
|
+
|
|
191
|
+
def test_unknown_error_logging(self):
|
|
192
|
+
"""Test logging for unhandled/unknown errors"""
|
|
193
|
+
class UnknownException(Exception):
|
|
194
|
+
pass
|
|
195
|
+
|
|
196
|
+
mock_exc = UnknownException("Unknown error type")
|
|
197
|
+
|
|
198
|
+
with patch('sys.exc_info', return_value=(type(mock_exc), mock_exc, None)):
|
|
199
|
+
with patch('velocity.db.core.engine.logger') as mock_logger:
|
|
200
|
+
with self.assertRaises(UnknownException):
|
|
201
|
+
self.engine.process_error("SELECT unknown", {"param": "test"})
|
|
202
|
+
|
|
203
|
+
# Verify error log was called for unhandled case
|
|
204
|
+
mock_logger.error.assert_called_once()
|
|
205
|
+
call_args = mock_logger.error.call_args
|
|
206
|
+
|
|
207
|
+
# Check that comprehensive context is logged
|
|
208
|
+
extra = call_args[1]['extra']
|
|
209
|
+
self.assertIn('available_error_codes', extra)
|
|
210
|
+
self.assertIn('original_exception_type', extra)
|
|
211
|
+
self.assertEqual(extra['original_exception_type'], 'UnknownException')
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def main():
|
|
215
|
+
print("Testing robustness of improved process_error method...")
|
|
216
|
+
|
|
217
|
+
# Configure logging to see the output
|
|
218
|
+
logging.basicConfig(
|
|
219
|
+
level=logging.DEBUG,
|
|
220
|
+
format='%(levelname)s - %(name)s - %(message)s'
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
# Run the tests
|
|
224
|
+
unittest.main(argv=[''], exit=False, verbosity=2)
|
|
225
|
+
|
|
226
|
+
print("\n=== Summary ===")
|
|
227
|
+
print("✅ Enhanced error code classification")
|
|
228
|
+
print("✅ Robust regex pattern fallback")
|
|
229
|
+
print("✅ Exception chaining preservation")
|
|
230
|
+
print("✅ Enhanced logging with context")
|
|
231
|
+
print("✅ Graceful handling of edge cases")
|
|
232
|
+
print("✅ Better debugging information")
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
if __name__ == "__main__":
|
|
236
|
+
main()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/aws/handlers/lambda_handler.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/mysql_reserved.py
RENAMED
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/__init__.py
RENAMED
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/operators.py
RENAMED
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/reserved.py
RENAMED
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/postgres/types.py
RENAMED
|
File without changes
|
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/sqlite_reserved.py
RENAMED
|
File without changes
|
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity/db/servers/sqlserver_reserved.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity_python.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity_python.egg-info/requires.txt
RENAMED
|
File without changes
|
{velocity_python-0.0.98 → velocity_python-0.0.101}/src/velocity_python.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|