memorisdk 1.0.2__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

Files changed (46) hide show
  1. memori/__init__.py +24 -8
  2. memori/agents/conscious_agent.py +252 -414
  3. memori/agents/memory_agent.py +487 -224
  4. memori/agents/retrieval_agent.py +416 -60
  5. memori/config/memory_manager.py +323 -0
  6. memori/core/conversation.py +393 -0
  7. memori/core/database.py +386 -371
  8. memori/core/memory.py +1638 -531
  9. memori/core/providers.py +217 -0
  10. memori/database/adapters/__init__.py +10 -0
  11. memori/database/adapters/mysql_adapter.py +331 -0
  12. memori/database/adapters/postgresql_adapter.py +291 -0
  13. memori/database/adapters/sqlite_adapter.py +229 -0
  14. memori/database/auto_creator.py +320 -0
  15. memori/database/connection_utils.py +207 -0
  16. memori/database/connectors/base_connector.py +283 -0
  17. memori/database/connectors/mysql_connector.py +240 -18
  18. memori/database/connectors/postgres_connector.py +277 -4
  19. memori/database/connectors/sqlite_connector.py +178 -3
  20. memori/database/models.py +400 -0
  21. memori/database/queries/base_queries.py +1 -1
  22. memori/database/queries/memory_queries.py +91 -2
  23. memori/database/query_translator.py +222 -0
  24. memori/database/schema_generators/__init__.py +7 -0
  25. memori/database/schema_generators/mysql_schema_generator.py +215 -0
  26. memori/database/search/__init__.py +8 -0
  27. memori/database/search/mysql_search_adapter.py +255 -0
  28. memori/database/search/sqlite_search_adapter.py +180 -0
  29. memori/database/search_service.py +548 -0
  30. memori/database/sqlalchemy_manager.py +839 -0
  31. memori/integrations/__init__.py +36 -11
  32. memori/integrations/litellm_integration.py +340 -6
  33. memori/integrations/openai_integration.py +506 -240
  34. memori/utils/input_validator.py +395 -0
  35. memori/utils/pydantic_models.py +138 -36
  36. memori/utils/query_builder.py +530 -0
  37. memori/utils/security_audit.py +594 -0
  38. memori/utils/security_integration.py +339 -0
  39. memori/utils/transaction_manager.py +547 -0
  40. {memorisdk-1.0.2.dist-info → memorisdk-2.0.0.dist-info}/METADATA +44 -17
  41. memorisdk-2.0.0.dist-info/RECORD +67 -0
  42. memorisdk-1.0.2.dist-info/RECORD +0 -44
  43. memorisdk-1.0.2.dist-info/entry_points.txt +0 -2
  44. {memorisdk-1.0.2.dist-info → memorisdk-2.0.0.dist-info}/WHEEL +0 -0
  45. {memorisdk-1.0.2.dist-info → memorisdk-2.0.0.dist-info}/licenses/LICENSE +0 -0
  46. {memorisdk-1.0.2.dist-info → memorisdk-2.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,530 @@
1
+ """
2
+ Unified query builder with database-agnostic parameter binding
3
+ Provides consistent parameter handling across SQLite, PostgreSQL, and MySQL
4
+ """
5
+
6
+ from enum import Enum
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ from loguru import logger
10
+
11
+ from .exceptions import DatabaseError, ValidationError
12
+ from .input_validator import InputValidator
13
+
14
+
15
+ class DatabaseDialect(str, Enum):
16
+ """Supported database dialects"""
17
+
18
+ SQLITE = "sqlite"
19
+ POSTGRESQL = "postgresql"
20
+ MYSQL = "mysql"
21
+
22
+
23
+ class QueryBuilder:
24
+ """Database-agnostic query builder with proper parameter binding"""
25
+
26
+ # Parameter styles for different databases
27
+ PARAM_STYLES = {
28
+ DatabaseDialect.SQLITE: "?",
29
+ DatabaseDialect.POSTGRESQL: "%s",
30
+ DatabaseDialect.MYSQL: "%s",
31
+ }
32
+
33
+ # SQL keywords that need special handling per database
34
+ LIMIT_SYNTAX = {
35
+ DatabaseDialect.SQLITE: "LIMIT ?",
36
+ DatabaseDialect.POSTGRESQL: "LIMIT %s",
37
+ DatabaseDialect.MYSQL: "LIMIT %s",
38
+ }
39
+
40
+ def __init__(self, dialect: DatabaseDialect):
41
+ self.dialect = dialect
42
+ self.param_placeholder = self.PARAM_STYLES[dialect]
43
+
44
+ def build_search_query(
45
+ self,
46
+ tables: List[str],
47
+ search_columns: List[str],
48
+ query_text: str,
49
+ namespace: str,
50
+ category_filter: Optional[List[str]] = None,
51
+ limit: int = 10,
52
+ use_fts: bool = False,
53
+ ) -> Tuple[str, List[Any]]:
54
+ """Build a database-specific search query with proper parameter binding"""
55
+
56
+ try:
57
+ # Validate inputs using our validator
58
+ query_text = InputValidator.validate_and_sanitize_query(query_text)
59
+ namespace = InputValidator.validate_namespace(namespace)
60
+ category_filter = InputValidator.validate_category_filter(category_filter)
61
+ limit = InputValidator.validate_limit(limit)
62
+
63
+ # Validate table and column names
64
+ for table in tables:
65
+ InputValidator.sanitize_sql_identifier(table)
66
+ for column in search_columns:
67
+ InputValidator.sanitize_sql_identifier(column)
68
+
69
+ except ValidationError as e:
70
+ raise DatabaseError(f"Invalid query parameters: {e}")
71
+
72
+ params = []
73
+ where_conditions = []
74
+
75
+ # Build FTS-specific or LIKE-based search conditions
76
+ if use_fts and self.dialect == DatabaseDialect.SQLITE:
77
+ # SQLite FTS5 syntax
78
+ fts_condition = f"memory_search_fts MATCH {self.param_placeholder}"
79
+ params.append(f'"{query_text}"' if query_text else "*")
80
+ where_conditions.append(fts_condition)
81
+ elif use_fts and self.dialect == DatabaseDialect.POSTGRESQL:
82
+ # PostgreSQL full-text search
83
+ search_conditions = []
84
+ for column in search_columns:
85
+ search_conditions.append(
86
+ f"to_tsvector('english', {column}) @@ plainto_tsquery('english', {self.param_placeholder})"
87
+ )
88
+ params.append(query_text)
89
+ where_conditions.append(f"({' OR '.join(search_conditions)})")
90
+ elif use_fts and self.dialect == DatabaseDialect.MYSQL:
91
+ # MySQL FULLTEXT search
92
+ search_conditions = []
93
+ for column in search_columns:
94
+ search_conditions.append(
95
+ f"MATCH({column}) AGAINST ({self.param_placeholder} IN BOOLEAN MODE)"
96
+ )
97
+ params.append(query_text)
98
+ where_conditions.append(f"({' OR '.join(search_conditions)})")
99
+ else:
100
+ # Fallback LIKE search for all databases
101
+ like_conditions = []
102
+ for column in search_columns:
103
+ like_conditions.append(f"{column} LIKE {self.param_placeholder}")
104
+ params.append(f"%{query_text}%")
105
+ where_conditions.append(f"({' OR '.join(like_conditions)})")
106
+
107
+ # Add namespace condition
108
+ where_conditions.append(f"namespace = {self.param_placeholder}")
109
+ params.append(namespace)
110
+
111
+ # Add category filter if provided
112
+ if category_filter:
113
+ placeholders = ",".join([self.param_placeholder] * len(category_filter))
114
+ where_conditions.append(f"category_primary IN ({placeholders})")
115
+ params.extend(category_filter)
116
+
117
+ # Build the complete query
118
+ if len(tables) == 1:
119
+ # Single table query
120
+ query = f"""
121
+ SELECT *, '{tables[0]}' as memory_type
122
+ FROM {tables[0]}
123
+ WHERE {' AND '.join(where_conditions)}
124
+ ORDER BY importance_score DESC, created_at DESC
125
+ {self.LIMIT_SYNTAX[self.dialect]}
126
+ """
127
+ else:
128
+ # Multi-table UNION query
129
+ union_parts = []
130
+ for table in tables:
131
+ table_query = f"""
132
+ SELECT *, '{table}' as memory_type
133
+ FROM {table}
134
+ WHERE {' AND '.join(where_conditions)}
135
+ """
136
+ union_parts.append(table_query)
137
+
138
+ query = f"""
139
+ SELECT * FROM (
140
+ {' UNION ALL '.join(union_parts)}
141
+ ) combined
142
+ ORDER BY importance_score DESC, created_at DESC
143
+ {self.LIMIT_SYNTAX[self.dialect]}
144
+ """
145
+
146
+ params.append(limit)
147
+
148
+ return query, params
149
+
150
+ def build_insert_query(
151
+ self, table: str, data: Dict[str, Any], on_conflict: str = "REPLACE"
152
+ ) -> Tuple[str, List[Any]]:
153
+ """Build database-specific insert query with proper parameter binding"""
154
+
155
+ try:
156
+ # Validate table name
157
+ table = InputValidator.sanitize_sql_identifier(table)
158
+
159
+ # Validate column names and data
160
+ validated_data = {}
161
+ for key, value in data.items():
162
+ validated_key = InputValidator.sanitize_sql_identifier(key)
163
+ validated_data[validated_key] = value
164
+
165
+ except ValidationError as e:
166
+ raise DatabaseError(f"Invalid insert parameters: {e}")
167
+
168
+ columns = list(validated_data.keys())
169
+ values = list(validated_data.values())
170
+
171
+ # Build column list and placeholders
172
+ columns_str = ", ".join(columns)
173
+ placeholders = ", ".join([self.param_placeholder] * len(values))
174
+
175
+ # Handle different conflict resolution strategies per database
176
+ if on_conflict == "REPLACE":
177
+ if self.dialect == DatabaseDialect.SQLITE:
178
+ query = f"INSERT OR REPLACE INTO {table} ({columns_str}) VALUES ({placeholders})"
179
+ elif self.dialect == DatabaseDialect.POSTGRESQL:
180
+ # PostgreSQL uses ON CONFLICT clause
181
+ primary_key = self._get_primary_key_column(columns)
182
+ if primary_key:
183
+ update_clause = ", ".join(
184
+ [
185
+ f"{col} = EXCLUDED.{col}"
186
+ for col in columns
187
+ if col != primary_key
188
+ ]
189
+ )
190
+ query = f"""
191
+ INSERT INTO {table} ({columns_str}) VALUES ({placeholders})
192
+ ON CONFLICT ({primary_key}) DO UPDATE SET {update_clause}
193
+ """
194
+ else:
195
+ # Fallback to simple insert if no primary key detected
196
+ query = (
197
+ f"INSERT INTO {table} ({columns_str}) VALUES ({placeholders})"
198
+ )
199
+ elif self.dialect == DatabaseDialect.MYSQL:
200
+ # MySQL uses ON DUPLICATE KEY UPDATE
201
+ update_clause = ", ".join([f"{col} = VALUES({col})" for col in columns])
202
+ query = f"""
203
+ INSERT INTO {table} ({columns_str}) VALUES ({placeholders})
204
+ ON DUPLICATE KEY UPDATE {update_clause}
205
+ """
206
+ else:
207
+ # Simple insert for all databases
208
+ query = f"INSERT INTO {table} ({columns_str}) VALUES ({placeholders})"
209
+
210
+ return query, values
211
+
212
+ def build_update_query(
213
+ self, table: str, data: Dict[str, Any], where_conditions: Dict[str, Any]
214
+ ) -> Tuple[str, List[Any]]:
215
+ """Build database-specific update query"""
216
+
217
+ try:
218
+ # Validate table name
219
+ table = InputValidator.sanitize_sql_identifier(table)
220
+
221
+ # Validate all column names
222
+ for key in list(data.keys()) + list(where_conditions.keys()):
223
+ InputValidator.sanitize_sql_identifier(key)
224
+
225
+ except ValidationError as e:
226
+ raise DatabaseError(f"Invalid update parameters: {e}")
227
+
228
+ # Build SET clause
229
+ set_conditions = []
230
+ params = []
231
+
232
+ for column, value in data.items():
233
+ set_conditions.append(f"{column} = {self.param_placeholder}")
234
+ params.append(value)
235
+
236
+ # Build WHERE clause
237
+ where_parts = []
238
+ for column, value in where_conditions.items():
239
+ where_parts.append(f"{column} = {self.param_placeholder}")
240
+ params.append(value)
241
+
242
+ query = f"""
243
+ UPDATE {table}
244
+ SET {', '.join(set_conditions)}
245
+ WHERE {' AND '.join(where_parts)}
246
+ """
247
+
248
+ return query, params
249
+
250
+ def build_delete_query(
251
+ self, table: str, where_conditions: Dict[str, Any]
252
+ ) -> Tuple[str, List[Any]]:
253
+ """Build database-specific delete query"""
254
+
255
+ try:
256
+ # Validate table name and columns
257
+ table = InputValidator.sanitize_sql_identifier(table)
258
+ for key in where_conditions.keys():
259
+ InputValidator.sanitize_sql_identifier(key)
260
+
261
+ except ValidationError as e:
262
+ raise DatabaseError(f"Invalid delete parameters: {e}")
263
+
264
+ where_parts = []
265
+ params = []
266
+
267
+ for column, value in where_conditions.items():
268
+ where_parts.append(f"{column} = {self.param_placeholder}")
269
+ params.append(value)
270
+
271
+ query = f"DELETE FROM {table} WHERE {' AND '.join(where_parts)}"
272
+
273
+ return query, params
274
+
275
+ def build_fts_query(
276
+ self,
277
+ query_text: str,
278
+ namespace: str,
279
+ category_filter: Optional[List[str]] = None,
280
+ limit: int = 10,
281
+ ) -> Tuple[str, List[Any]]:
282
+ """Build database-specific full-text search query"""
283
+
284
+ try:
285
+ query_text = InputValidator.validate_and_sanitize_query(query_text)
286
+ namespace = InputValidator.validate_namespace(namespace)
287
+ category_filter = InputValidator.validate_category_filter(category_filter)
288
+ limit = InputValidator.validate_limit(limit)
289
+ except ValidationError as e:
290
+ raise DatabaseError(f"Invalid FTS parameters: {e}")
291
+
292
+ params = []
293
+ where_conditions = []
294
+
295
+ if self.dialect == DatabaseDialect.SQLITE:
296
+ # SQLite FTS5
297
+ where_conditions.append(f"memory_search_fts MATCH {self.param_placeholder}")
298
+ params.append(f'"{query_text}"' if query_text else "*")
299
+
300
+ where_conditions.append(f"fts.namespace = {self.param_placeholder}")
301
+ params.append(namespace)
302
+
303
+ if category_filter:
304
+ placeholders = ",".join([self.param_placeholder] * len(category_filter))
305
+ where_conditions.append(f"fts.category_primary IN ({placeholders})")
306
+ params.extend(category_filter)
307
+
308
+ query = f"""
309
+ SELECT
310
+ fts.memory_id, fts.memory_type, fts.category_primary,
311
+ CASE
312
+ WHEN fts.memory_type = 'short_term' THEN st.processed_data
313
+ WHEN fts.memory_type = 'long_term' THEN lt.processed_data
314
+ END as processed_data,
315
+ CASE
316
+ WHEN fts.memory_type = 'short_term' THEN st.importance_score
317
+ WHEN fts.memory_type = 'long_term' THEN lt.importance_score
318
+ ELSE 0.5
319
+ END as importance_score,
320
+ CASE
321
+ WHEN fts.memory_type = 'short_term' THEN st.created_at
322
+ WHEN fts.memory_type = 'long_term' THEN lt.created_at
323
+ END as created_at,
324
+ fts.summary,
325
+ rank
326
+ FROM memory_search_fts fts
327
+ LEFT JOIN short_term_memory st ON fts.memory_id = st.memory_id AND fts.memory_type = 'short_term'
328
+ LEFT JOIN long_term_memory lt ON fts.memory_id = lt.memory_id AND fts.memory_type = 'long_term'
329
+ WHERE {' AND '.join(where_conditions)}
330
+ ORDER BY rank, importance_score DESC
331
+ {self.LIMIT_SYNTAX[self.dialect]}
332
+ """
333
+
334
+ elif self.dialect == DatabaseDialect.POSTGRESQL:
335
+ # PostgreSQL full-text search using tsvector
336
+ where_conditions.append(
337
+ "(to_tsvector('english', st.searchable_content) @@ plainto_tsquery('english', %s) OR to_tsvector('english', lt.searchable_content) @@ plainto_tsquery('english', %s))"
338
+ )
339
+ params.extend([query_text, query_text])
340
+
341
+ where_conditions.append("(st.namespace = %s OR lt.namespace = %s)")
342
+ params.extend([namespace, namespace])
343
+
344
+ if category_filter:
345
+ placeholders = ",".join(["%s"] * len(category_filter))
346
+ where_conditions.append(
347
+ f"(st.category_primary IN ({placeholders}) OR lt.category_primary IN ({placeholders}))"
348
+ )
349
+ params.extend(category_filter * 2) # For both tables
350
+
351
+ query = f"""
352
+ SELECT DISTINCT
353
+ COALESCE(st.memory_id, lt.memory_id) as memory_id,
354
+ CASE WHEN st.memory_id IS NOT NULL THEN 'short_term' ELSE 'long_term' END as memory_type,
355
+ COALESCE(st.category_primary, lt.category_primary) as category_primary,
356
+ COALESCE(st.processed_data, lt.processed_data) as processed_data,
357
+ COALESCE(st.importance_score, lt.importance_score) as importance_score,
358
+ COALESCE(st.created_at, lt.created_at) as created_at,
359
+ COALESCE(st.summary, lt.summary) as summary,
360
+ ts_rank(COALESCE(to_tsvector('english', st.searchable_content), to_tsvector('english', lt.searchable_content)), plainto_tsquery('english', %s)) as rank
361
+ FROM short_term_memory st
362
+ FULL OUTER JOIN long_term_memory lt ON FALSE -- Force separate processing
363
+ WHERE {' AND '.join(where_conditions)}
364
+ ORDER BY rank DESC, importance_score DESC
365
+ {self.LIMIT_SYNTAX[self.dialect]}
366
+ """
367
+ params.append(query_text) # For ts_rank
368
+
369
+ elif self.dialect == DatabaseDialect.MYSQL:
370
+ # MySQL FULLTEXT search
371
+ where_conditions.append(
372
+ "(MATCH(st.searchable_content) AGAINST(%s IN BOOLEAN MODE) OR MATCH(lt.searchable_content) AGAINST(%s IN BOOLEAN MODE))"
373
+ )
374
+ params.extend([query_text, query_text])
375
+
376
+ where_conditions.append("(st.namespace = %s OR lt.namespace = %s)")
377
+ params.extend([namespace, namespace])
378
+
379
+ if category_filter:
380
+ placeholders = ",".join(["%s"] * len(category_filter))
381
+ where_conditions.append(
382
+ f"(st.category_primary IN ({placeholders}) OR lt.category_primary IN ({placeholders}))"
383
+ )
384
+ params.extend(category_filter * 2)
385
+
386
+ query = f"""
387
+ SELECT
388
+ COALESCE(st.memory_id, lt.memory_id) as memory_id,
389
+ CASE WHEN st.memory_id IS NOT NULL THEN 'short_term' ELSE 'long_term' END as memory_type,
390
+ COALESCE(st.category_primary, lt.category_primary) as category_primary,
391
+ COALESCE(st.processed_data, lt.processed_data) as processed_data,
392
+ COALESCE(st.importance_score, lt.importance_score) as importance_score,
393
+ COALESCE(st.created_at, lt.created_at) as created_at,
394
+ COALESCE(st.summary, lt.summary) as summary,
395
+ GREATEST(
396
+ COALESCE(MATCH(st.searchable_content) AGAINST(%s IN BOOLEAN MODE), 0),
397
+ COALESCE(MATCH(lt.searchable_content) AGAINST(%s IN BOOLEAN MODE), 0)
398
+ ) as rank
399
+ FROM short_term_memory st
400
+ LEFT JOIN long_term_memory lt ON FALSE -- Force UNION behavior
401
+ UNION ALL
402
+ SELECT
403
+ lt.memory_id,
404
+ 'long_term' as memory_type,
405
+ lt.category_primary,
406
+ lt.processed_data,
407
+ lt.importance_score,
408
+ lt.created_at,
409
+ lt.summary,
410
+ MATCH(lt.searchable_content) AGAINST(%s IN BOOLEAN MODE) as rank
411
+ FROM long_term_memory lt
412
+ WHERE {' AND '.join(where_conditions)}
413
+ ORDER BY rank DESC, importance_score DESC
414
+ {self.LIMIT_SYNTAX[self.dialect]}
415
+ """
416
+ params.extend(
417
+ [query_text, query_text, query_text]
418
+ ) # For MATCH calculations
419
+
420
+ params.append(limit)
421
+ return query, params
422
+
423
+ def _get_primary_key_column(self, columns: List[str]) -> Optional[str]:
424
+ """Detect likely primary key column from column names"""
425
+ pk_candidates = [
426
+ "id",
427
+ "memory_id",
428
+ "chat_id",
429
+ "entity_id",
430
+ "relationship_id",
431
+ "rule_id",
432
+ ]
433
+
434
+ for candidate in pk_candidates:
435
+ if candidate in columns:
436
+ return candidate
437
+
438
+ # If no standard primary key found, use the first column ending with '_id'
439
+ for column in columns:
440
+ if column.endswith("_id"):
441
+ return column
442
+
443
+ return None
444
+
445
+
446
+ class DatabaseQueryExecutor:
447
+ """Execute queries with proper error handling and transaction management"""
448
+
449
+ def __init__(self, connector, dialect: DatabaseDialect):
450
+ self.connector = connector
451
+ self.query_builder = QueryBuilder(dialect)
452
+
453
+ def execute_search(
454
+ self,
455
+ query_text: str,
456
+ namespace: str = "default",
457
+ category_filter: Optional[List[str]] = None,
458
+ limit: int = 10,
459
+ use_fts: bool = True,
460
+ ) -> List[Dict[str, Any]]:
461
+ """Execute search with proper error handling"""
462
+ try:
463
+ if use_fts:
464
+ # Try FTS first
465
+ try:
466
+ sql_query, params = self.query_builder.build_fts_query(
467
+ query_text, namespace, category_filter, limit
468
+ )
469
+ results = self.connector.execute_query(sql_query, params)
470
+ if results:
471
+ return results
472
+ except Exception as e:
473
+ logger.debug(f"FTS search failed, falling back to LIKE: {e}")
474
+
475
+ # Fallback to LIKE search
476
+ tables = ["short_term_memory", "long_term_memory"]
477
+ columns = ["searchable_content", "summary"]
478
+
479
+ sql_query, params = self.query_builder.build_search_query(
480
+ tables,
481
+ columns,
482
+ query_text,
483
+ namespace,
484
+ category_filter,
485
+ limit,
486
+ use_fts=False,
487
+ )
488
+
489
+ return self.connector.execute_query(sql_query, params)
490
+
491
+ except Exception as e:
492
+ logger.error(f"Search execution failed: {e}")
493
+ return []
494
+
495
+ def execute_safe_insert(
496
+ self, table: str, data: Dict[str, Any], on_conflict: str = "REPLACE"
497
+ ) -> Optional[str]:
498
+ """Execute insert with proper error handling"""
499
+ try:
500
+ sql_query, params = self.query_builder.build_insert_query(
501
+ table, data, on_conflict
502
+ )
503
+ return self.connector.execute_insert(sql_query, params)
504
+ except Exception as e:
505
+ logger.error(f"Insert execution failed: {e}")
506
+ raise DatabaseError(f"Failed to insert into {table}: {e}")
507
+
508
+ def execute_safe_update(
509
+ self, table: str, data: Dict[str, Any], where_conditions: Dict[str, Any]
510
+ ) -> int:
511
+ """Execute update with proper error handling"""
512
+ try:
513
+ sql_query, params = self.query_builder.build_update_query(
514
+ table, data, where_conditions
515
+ )
516
+ return self.connector.execute_update(sql_query, params)
517
+ except Exception as e:
518
+ logger.error(f"Update execution failed: {e}")
519
+ raise DatabaseError(f"Failed to update {table}: {e}")
520
+
521
+ def execute_safe_delete(self, table: str, where_conditions: Dict[str, Any]) -> int:
522
+ """Execute delete with proper error handling"""
523
+ try:
524
+ sql_query, params = self.query_builder.build_delete_query(
525
+ table, where_conditions
526
+ )
527
+ return self.connector.execute_delete(sql_query, params)
528
+ except Exception as e:
529
+ logger.error(f"Delete execution failed: {e}")
530
+ raise DatabaseError(f"Failed to delete from {table}: {e}")