qase-python-commons 3.1.9__py3-none-any.whl → 4.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qase/__init__.py +3 -0
- qase/commons/client/api_v1_client.py +269 -175
- qase/commons/client/api_v2_client.py +163 -26
- qase/commons/client/base_api_client.py +23 -6
- qase/commons/config.py +162 -23
- qase/commons/logger.py +82 -13
- qase/commons/models/__init__.py +0 -2
- qase/commons/models/attachment.py +11 -8
- qase/commons/models/basemodel.py +12 -3
- qase/commons/models/config/framework.py +17 -0
- qase/commons/models/config/qaseconfig.py +34 -0
- qase/commons/models/config/run.py +19 -0
- qase/commons/models/config/testops.py +45 -3
- qase/commons/models/external_link.py +41 -0
- qase/commons/models/relation.py +16 -6
- qase/commons/models/result.py +16 -31
- qase/commons/models/run.py +17 -2
- qase/commons/models/runtime.py +9 -0
- qase/commons/models/step.py +45 -12
- qase/commons/profilers/__init__.py +4 -3
- qase/commons/profilers/db.py +965 -5
- qase/commons/reporters/core.py +60 -10
- qase/commons/reporters/report.py +11 -6
- qase/commons/reporters/testops.py +56 -27
- qase/commons/status_mapping/__init__.py +12 -0
- qase/commons/status_mapping/status_mapping.py +237 -0
- qase/commons/util/__init__.py +9 -0
- qase/commons/util/host_data.py +147 -0
- qase/commons/utils.py +95 -0
- {qase_python_commons-3.1.9.dist-info → qase_python_commons-4.1.9.dist-info}/METADATA +16 -11
- qase_python_commons-4.1.9.dist-info/RECORD +45 -0
- {qase_python_commons-3.1.9.dist-info → qase_python_commons-4.1.9.dist-info}/WHEEL +1 -1
- qase/commons/models/suite.py +0 -13
- qase_python_commons-3.1.9.dist-info/RECORD +0 -40
- {qase_python_commons-3.1.9.dist-info → qase_python_commons-4.1.9.dist-info}/top_level.txt +0 -0
qase/commons/profilers/db.py
CHANGED
|
@@ -1,8 +1,17 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import time
|
|
3
|
+
import uuid
|
|
4
|
+
import threading
|
|
5
|
+
from functools import wraps
|
|
6
|
+
from typing import Optional, Any, Dict
|
|
7
|
+
|
|
1
8
|
from ..models.runtime import Runtime
|
|
9
|
+
from ..models.step import Step, StepDbQueryData, StepType
|
|
2
10
|
|
|
3
11
|
|
|
4
|
-
class
|
|
12
|
+
class DatabaseProfiler:
|
|
5
13
|
_instance = None
|
|
14
|
+
_lock = threading.Lock()
|
|
6
15
|
|
|
7
16
|
def __init__(self, runtime: Runtime, track_on_fail: bool = True):
|
|
8
17
|
self._original_functions = {}
|
|
@@ -11,9 +20,960 @@ class DbProfiler:
|
|
|
11
20
|
self.step = None
|
|
12
21
|
|
|
13
22
|
def enable(self):
|
|
14
|
-
|
|
15
|
-
|
|
23
|
+
"""Enable database profiling for all supported database libraries."""
|
|
24
|
+
# SQLAlchemy - try to enable (will skip if not available)
|
|
25
|
+
self._enable_sqlalchemy()
|
|
26
|
+
|
|
27
|
+
# psycopg2 (PostgreSQL) - try to enable (will skip if not available)
|
|
28
|
+
self._enable_psycopg2()
|
|
29
|
+
|
|
30
|
+
# pymysql (MySQL) - try to enable (will skip if not available)
|
|
31
|
+
self._enable_pymysql()
|
|
32
|
+
|
|
33
|
+
# sqlite3 (built-in) - try to enable (will skip if not available)
|
|
34
|
+
self._enable_sqlite3()
|
|
35
|
+
|
|
36
|
+
# pymongo (MongoDB) - try to enable (will skip if not available)
|
|
37
|
+
self._enable_pymongo()
|
|
38
|
+
|
|
39
|
+
# redis-py - try to enable (will skip if not available)
|
|
40
|
+
self._enable_redis()
|
|
16
41
|
|
|
17
42
|
def disable(self):
|
|
18
|
-
|
|
19
|
-
|
|
43
|
+
"""Disable database profiling and restore original functions."""
|
|
44
|
+
for module_name, original_func in self._original_functions.items():
|
|
45
|
+
if module_name == 'sqlalchemy':
|
|
46
|
+
# SQLAlchemy 2.0+ uses event listeners
|
|
47
|
+
try:
|
|
48
|
+
from sqlalchemy import event
|
|
49
|
+
from sqlalchemy.engine import Engine
|
|
50
|
+
|
|
51
|
+
if isinstance(original_func, dict):
|
|
52
|
+
# Remove event listeners
|
|
53
|
+
if 'before' in original_func:
|
|
54
|
+
event.remove(Engine, "before_cursor_execute", original_func['before'])
|
|
55
|
+
if 'after' in original_func:
|
|
56
|
+
event.remove(Engine, "after_cursor_execute", original_func['after'])
|
|
57
|
+
except (ImportError, AttributeError):
|
|
58
|
+
pass
|
|
59
|
+
elif module_name == 'psycopg2':
|
|
60
|
+
import psycopg2
|
|
61
|
+
psycopg2.connect = original_func
|
|
62
|
+
elif module_name == 'pymysql':
|
|
63
|
+
import pymysql.cursors
|
|
64
|
+
pymysql.cursors.Cursor.execute = original_func
|
|
65
|
+
elif module_name == 'sqlite3':
|
|
66
|
+
import sqlite3
|
|
67
|
+
sqlite3.connect = original_func
|
|
68
|
+
elif module_name == 'pymongo':
|
|
69
|
+
import pymongo.collection
|
|
70
|
+
pymongo.collection.Collection.find = original_func.get('find')
|
|
71
|
+
pymongo.collection.Collection.find_one = original_func.get('find_one')
|
|
72
|
+
pymongo.collection.Collection.insert_one = original_func.get('insert_one')
|
|
73
|
+
pymongo.collection.Collection.update_one = original_func.get('update_one')
|
|
74
|
+
pymongo.collection.Collection.delete_one = original_func.get('delete_one')
|
|
75
|
+
elif module_name == 'redis':
|
|
76
|
+
import redis
|
|
77
|
+
redis.Redis.execute_command = original_func
|
|
78
|
+
|
|
79
|
+
self._original_functions.clear()
|
|
80
|
+
|
|
81
|
+
def _enable_sqlalchemy(self):
|
|
82
|
+
"""Enable profiling for SQLAlchemy."""
|
|
83
|
+
try:
|
|
84
|
+
import sqlalchemy
|
|
85
|
+
from sqlalchemy import event
|
|
86
|
+
from sqlalchemy.engine import Engine
|
|
87
|
+
|
|
88
|
+
if 'sqlalchemy' not in self._original_functions:
|
|
89
|
+
# SQLAlchemy 2.0+ uses event listeners instead of monkey patching
|
|
90
|
+
# We'll use the before_cursor_execute and after_cursor_execute events
|
|
91
|
+
|
|
92
|
+
def receive_before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
|
93
|
+
conn.info.setdefault('query_start_time', []).append(time.time())
|
|
94
|
+
return statement, parameters
|
|
95
|
+
|
|
96
|
+
def receive_after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
|
97
|
+
try:
|
|
98
|
+
if conn.info.get('query_start_time'):
|
|
99
|
+
start_time = conn.info['query_start_time'].pop()
|
|
100
|
+
execution_time = time.time() - start_time
|
|
101
|
+
|
|
102
|
+
query = str(statement)
|
|
103
|
+
if parameters:
|
|
104
|
+
try:
|
|
105
|
+
query += f" | params: {parameters}"
|
|
106
|
+
except:
|
|
107
|
+
pass
|
|
108
|
+
|
|
109
|
+
DatabaseProfilerSingleton.get_instance()._log_db_query(
|
|
110
|
+
query=query,
|
|
111
|
+
database_type="SQLAlchemy",
|
|
112
|
+
execution_time=execution_time,
|
|
113
|
+
rows_affected=getattr(cursor, 'rowcount', None),
|
|
114
|
+
connection_info=f"SQLAlchemy Engine: {conn.engine.url}"
|
|
115
|
+
)
|
|
116
|
+
except Exception:
|
|
117
|
+
pass # Don't break SQLAlchemy execution
|
|
118
|
+
|
|
119
|
+
# Register event listeners
|
|
120
|
+
event.listen(Engine, "before_cursor_execute", receive_before_cursor_execute)
|
|
121
|
+
event.listen(Engine, "after_cursor_execute", receive_after_cursor_execute)
|
|
122
|
+
|
|
123
|
+
# Store listeners for later removal
|
|
124
|
+
self._original_functions['sqlalchemy'] = {
|
|
125
|
+
'before': receive_before_cursor_execute,
|
|
126
|
+
'after': receive_after_cursor_execute
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
except (ImportError, AttributeError):
|
|
130
|
+
pass
|
|
131
|
+
|
|
132
|
+
def _enable_psycopg2(self):
|
|
133
|
+
"""Enable profiling for psycopg2 (PostgreSQL)."""
|
|
134
|
+
try:
|
|
135
|
+
import psycopg2
|
|
136
|
+
if 'psycopg2' not in self._original_functions:
|
|
137
|
+
# psycopg2.extensions.cursor is a C extension and cannot be monkey-patched directly
|
|
138
|
+
# Use monkey patching through connect function instead
|
|
139
|
+
self._original_functions['psycopg2'] = psycopg2.connect
|
|
140
|
+
psycopg2.connect = self._psycopg2_connect_wrapper(psycopg2.connect)
|
|
141
|
+
except ImportError:
|
|
142
|
+
pass
|
|
143
|
+
|
|
144
|
+
def _enable_pymysql(self):
|
|
145
|
+
"""Enable profiling for pymysql (MySQL)."""
|
|
146
|
+
try:
|
|
147
|
+
import pymysql.cursors
|
|
148
|
+
if 'pymysql' not in self._original_functions:
|
|
149
|
+
self._original_functions['pymysql'] = pymysql.cursors.Cursor.execute
|
|
150
|
+
pymysql.cursors.Cursor.execute = self._pymysql_execute_wrapper(
|
|
151
|
+
pymysql.cursors.Cursor.execute
|
|
152
|
+
)
|
|
153
|
+
except ImportError:
|
|
154
|
+
pass
|
|
155
|
+
|
|
156
|
+
def _enable_sqlite3(self):
|
|
157
|
+
"""Enable profiling for sqlite3."""
|
|
158
|
+
try:
|
|
159
|
+
import sqlite3
|
|
160
|
+
if 'sqlite3' not in self._original_functions:
|
|
161
|
+
# SQLite3 методы нельзя переопределить напрямую
|
|
162
|
+
# Используем monkey patching через connect функцию
|
|
163
|
+
self._original_functions['sqlite3'] = sqlite3.connect
|
|
164
|
+
sqlite3.connect = self._sqlite3_connect_wrapper(sqlite3.connect)
|
|
165
|
+
except ImportError:
|
|
166
|
+
pass
|
|
167
|
+
|
|
168
|
+
def _enable_pymongo(self):
|
|
169
|
+
"""Enable profiling for pymongo (MongoDB)."""
|
|
170
|
+
try:
|
|
171
|
+
import pymongo.collection
|
|
172
|
+
if 'pymongo' not in self._original_functions:
|
|
173
|
+
self._original_functions['pymongo'] = {
|
|
174
|
+
'find': pymongo.collection.Collection.find,
|
|
175
|
+
'find_one': pymongo.collection.Collection.find_one,
|
|
176
|
+
'insert_one': pymongo.collection.Collection.insert_one,
|
|
177
|
+
'update_one': pymongo.collection.Collection.update_one,
|
|
178
|
+
'delete_one': pymongo.collection.Collection.delete_one,
|
|
179
|
+
}
|
|
180
|
+
pymongo.collection.Collection.find = self._pymongo_find_wrapper(
|
|
181
|
+
pymongo.collection.Collection.find
|
|
182
|
+
)
|
|
183
|
+
pymongo.collection.Collection.find_one = self._pymongo_find_one_wrapper(
|
|
184
|
+
pymongo.collection.Collection.find_one
|
|
185
|
+
)
|
|
186
|
+
pymongo.collection.Collection.insert_one = self._pymongo_insert_wrapper(
|
|
187
|
+
pymongo.collection.Collection.insert_one
|
|
188
|
+
)
|
|
189
|
+
pymongo.collection.Collection.update_one = self._pymongo_update_wrapper(
|
|
190
|
+
pymongo.collection.Collection.update_one
|
|
191
|
+
)
|
|
192
|
+
pymongo.collection.Collection.delete_one = self._pymongo_delete_wrapper(
|
|
193
|
+
pymongo.collection.Collection.delete_one
|
|
194
|
+
)
|
|
195
|
+
except ImportError:
|
|
196
|
+
pass
|
|
197
|
+
|
|
198
|
+
def _enable_redis(self):
|
|
199
|
+
"""Enable profiling for redis-py."""
|
|
200
|
+
try:
|
|
201
|
+
import redis
|
|
202
|
+
if 'redis' not in self._original_functions:
|
|
203
|
+
self._original_functions['redis'] = redis.Redis.execute_command
|
|
204
|
+
redis.Redis.execute_command = self._redis_execute_wrapper(
|
|
205
|
+
redis.Redis.execute_command
|
|
206
|
+
)
|
|
207
|
+
except ImportError:
|
|
208
|
+
pass
|
|
209
|
+
|
|
210
|
+
def _sqlalchemy_execute_wrapper(self, func):
|
|
211
|
+
@wraps(func)
|
|
212
|
+
def wrapper(self, statement, *args, **kwargs):
|
|
213
|
+
start_time = time.time()
|
|
214
|
+
query = str(statement) if hasattr(statement, '__str__') else str(statement)
|
|
215
|
+
|
|
216
|
+
try:
|
|
217
|
+
result = func(self, statement, *args, **kwargs)
|
|
218
|
+
execution_time = time.time() - start_time
|
|
219
|
+
|
|
220
|
+
self._log_db_query(
|
|
221
|
+
query=query,
|
|
222
|
+
database_type="SQLAlchemy",
|
|
223
|
+
execution_time=execution_time,
|
|
224
|
+
rows_affected=getattr(result, 'rowcount', None),
|
|
225
|
+
connection_info=f"SQLAlchemy Engine: {self.url}"
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
return result
|
|
229
|
+
except Exception as e:
|
|
230
|
+
execution_time = time.time() - start_time
|
|
231
|
+
if self.track_on_fail:
|
|
232
|
+
self._log_db_query(
|
|
233
|
+
query=query,
|
|
234
|
+
database_type="SQLAlchemy",
|
|
235
|
+
execution_time=execution_time,
|
|
236
|
+
connection_info=f"SQLAlchemy Engine: {self.url}",
|
|
237
|
+
error=str(e)
|
|
238
|
+
)
|
|
239
|
+
raise
|
|
240
|
+
|
|
241
|
+
return wrapper
|
|
242
|
+
|
|
243
|
+
def _psycopg2_connect_wrapper(self, func):
|
|
244
|
+
track_on_fail = self.track_on_fail
|
|
245
|
+
profiler_instance = self # Capture profiler instance
|
|
246
|
+
|
|
247
|
+
class CursorProxy:
|
|
248
|
+
"""Proxy class for psycopg2 cursor to intercept execute method."""
|
|
249
|
+
def __init__(self, cursor, conn):
|
|
250
|
+
self._cursor = cursor
|
|
251
|
+
self._conn = conn
|
|
252
|
+
|
|
253
|
+
def execute(self, query, *args, **kwargs):
|
|
254
|
+
"""Execute query and log it."""
|
|
255
|
+
start_time = time.time()
|
|
256
|
+
error_msg = None
|
|
257
|
+
|
|
258
|
+
try:
|
|
259
|
+
result = self._cursor.execute(query, *args, **kwargs)
|
|
260
|
+
execution_time = time.time() - start_time
|
|
261
|
+
|
|
262
|
+
# Get connection info
|
|
263
|
+
try:
|
|
264
|
+
dsn_params = self._conn.get_dsn_parameters()
|
|
265
|
+
host = dsn_params.get('host', 'localhost')
|
|
266
|
+
except Exception:
|
|
267
|
+
host = 'localhost'
|
|
268
|
+
|
|
269
|
+
# Get rowcount safely
|
|
270
|
+
try:
|
|
271
|
+
rows_affected = self._cursor.rowcount
|
|
272
|
+
except Exception:
|
|
273
|
+
rows_affected = None
|
|
274
|
+
|
|
275
|
+
# Log query - don't let logging break the execution
|
|
276
|
+
try:
|
|
277
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
278
|
+
profiler._log_db_query(
|
|
279
|
+
query=query,
|
|
280
|
+
database_type="PostgreSQL (psycopg2)",
|
|
281
|
+
execution_time=execution_time,
|
|
282
|
+
rows_affected=rows_affected,
|
|
283
|
+
connection_info=f"PostgreSQL: {host}"
|
|
284
|
+
)
|
|
285
|
+
except Exception:
|
|
286
|
+
# Silently ignore logging errors
|
|
287
|
+
pass
|
|
288
|
+
|
|
289
|
+
return result
|
|
290
|
+
except Exception as e:
|
|
291
|
+
error_msg = str(e)
|
|
292
|
+
execution_time = time.time() - start_time
|
|
293
|
+
|
|
294
|
+
if track_on_fail:
|
|
295
|
+
try:
|
|
296
|
+
dsn_params = self._conn.get_dsn_parameters()
|
|
297
|
+
host = dsn_params.get('host', 'localhost')
|
|
298
|
+
except Exception:
|
|
299
|
+
host = 'localhost'
|
|
300
|
+
|
|
301
|
+
# Log error - don't let logging break the exception propagation
|
|
302
|
+
try:
|
|
303
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
304
|
+
profiler._log_db_query(
|
|
305
|
+
query=query,
|
|
306
|
+
database_type="PostgreSQL (psycopg2)",
|
|
307
|
+
execution_time=execution_time,
|
|
308
|
+
connection_info=f"PostgreSQL: {host}",
|
|
309
|
+
error=error_msg
|
|
310
|
+
)
|
|
311
|
+
except Exception:
|
|
312
|
+
# Silently ignore logging errors
|
|
313
|
+
pass
|
|
314
|
+
|
|
315
|
+
# Re-raise the original exception
|
|
316
|
+
raise
|
|
317
|
+
|
|
318
|
+
def __getattr__(self, name):
|
|
319
|
+
"""Delegate all other attributes to the original cursor."""
|
|
320
|
+
return getattr(self._cursor, name)
|
|
321
|
+
|
|
322
|
+
class ConnectionProxy:
|
|
323
|
+
"""Proxy class for psycopg2 connection to intercept cursor creation."""
|
|
324
|
+
def __init__(self, conn):
|
|
325
|
+
self._conn = conn
|
|
326
|
+
|
|
327
|
+
def cursor(self, *args, **kwargs):
|
|
328
|
+
"""Create cursor and return proxy."""
|
|
329
|
+
cursor = self._conn.cursor(*args, **kwargs)
|
|
330
|
+
return CursorProxy(cursor, self._conn)
|
|
331
|
+
|
|
332
|
+
def __getattr__(self, name):
|
|
333
|
+
"""Delegate all other attributes to the original connection."""
|
|
334
|
+
return getattr(self._conn, name)
|
|
335
|
+
|
|
336
|
+
@wraps(func)
|
|
337
|
+
def wrapper(*args, **kwargs):
|
|
338
|
+
# Get the original connection
|
|
339
|
+
conn = func(*args, **kwargs)
|
|
340
|
+
|
|
341
|
+
# Return proxy instead of original connection
|
|
342
|
+
return ConnectionProxy(conn)
|
|
343
|
+
|
|
344
|
+
return wrapper
|
|
345
|
+
|
|
346
|
+
def _psycopg2_execute_wrapper(self, func):
|
|
347
|
+
@wraps(func)
|
|
348
|
+
def wrapper(self, query, *args, **kwargs):
|
|
349
|
+
start_time = time.time()
|
|
350
|
+
|
|
351
|
+
try:
|
|
352
|
+
result = func(self, query, *args, **kwargs)
|
|
353
|
+
execution_time = time.time() - start_time
|
|
354
|
+
|
|
355
|
+
self._log_db_query(
|
|
356
|
+
query=query,
|
|
357
|
+
database_type="PostgreSQL (psycopg2)",
|
|
358
|
+
execution_time=execution_time,
|
|
359
|
+
rows_affected=self.rowcount,
|
|
360
|
+
connection_info=f"PostgreSQL: {self.connection.get_dsn_parameters().get('host', 'localhost')}"
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
return result
|
|
364
|
+
except Exception as e:
|
|
365
|
+
execution_time = time.time() - start_time
|
|
366
|
+
if self.track_on_fail:
|
|
367
|
+
self._log_db_query(
|
|
368
|
+
query=query,
|
|
369
|
+
database_type="PostgreSQL (psycopg2)",
|
|
370
|
+
execution_time=execution_time,
|
|
371
|
+
connection_info=f"PostgreSQL: {self.connection.get_dsn_parameters().get('host', 'localhost')}",
|
|
372
|
+
error=str(e)
|
|
373
|
+
)
|
|
374
|
+
raise
|
|
375
|
+
|
|
376
|
+
return wrapper
|
|
377
|
+
|
|
378
|
+
def _pymysql_execute_wrapper(self, func):
|
|
379
|
+
track_on_fail = self.track_on_fail
|
|
380
|
+
|
|
381
|
+
@wraps(func)
|
|
382
|
+
def wrapper(self, query, *args, **kwargs):
|
|
383
|
+
start_time = time.time()
|
|
384
|
+
error_msg = None
|
|
385
|
+
|
|
386
|
+
try:
|
|
387
|
+
result = func(self, query, *args, **kwargs)
|
|
388
|
+
execution_time = time.time() - start_time
|
|
389
|
+
|
|
390
|
+
# Get connection info safely
|
|
391
|
+
try:
|
|
392
|
+
connection_info = f"MySQL: {self.connection.get_host_info()}"
|
|
393
|
+
except Exception:
|
|
394
|
+
connection_info = "MySQL"
|
|
395
|
+
|
|
396
|
+
# Get rowcount safely
|
|
397
|
+
try:
|
|
398
|
+
rows_affected = self.rowcount
|
|
399
|
+
except Exception:
|
|
400
|
+
rows_affected = None
|
|
401
|
+
|
|
402
|
+
# Log query - don't let logging break the execution
|
|
403
|
+
try:
|
|
404
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
405
|
+
profiler._log_db_query(
|
|
406
|
+
query=query,
|
|
407
|
+
database_type="MySQL (pymysql)",
|
|
408
|
+
execution_time=execution_time,
|
|
409
|
+
rows_affected=rows_affected,
|
|
410
|
+
connection_info=connection_info
|
|
411
|
+
)
|
|
412
|
+
except Exception:
|
|
413
|
+
# Silently ignore logging errors
|
|
414
|
+
pass
|
|
415
|
+
|
|
416
|
+
return result
|
|
417
|
+
except Exception as e:
|
|
418
|
+
error_msg = str(e)
|
|
419
|
+
execution_time = time.time() - start_time
|
|
420
|
+
|
|
421
|
+
if track_on_fail:
|
|
422
|
+
try:
|
|
423
|
+
connection_info = f"MySQL: {self.connection.get_host_info()}"
|
|
424
|
+
except Exception:
|
|
425
|
+
connection_info = "MySQL"
|
|
426
|
+
|
|
427
|
+
# Log error - don't let logging break the exception propagation
|
|
428
|
+
try:
|
|
429
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
430
|
+
profiler._log_db_query(
|
|
431
|
+
query=query,
|
|
432
|
+
database_type="MySQL (pymysql)",
|
|
433
|
+
execution_time=execution_time,
|
|
434
|
+
connection_info=connection_info,
|
|
435
|
+
error=error_msg
|
|
436
|
+
)
|
|
437
|
+
except Exception:
|
|
438
|
+
# Silently ignore logging errors
|
|
439
|
+
pass
|
|
440
|
+
|
|
441
|
+
# Re-raise the original exception
|
|
442
|
+
raise
|
|
443
|
+
|
|
444
|
+
return wrapper
|
|
445
|
+
|
|
446
|
+
def _sqlite3_connect_wrapper(self, func):
|
|
447
|
+
track_on_fail = self.track_on_fail
|
|
448
|
+
|
|
449
|
+
class CursorProxy:
|
|
450
|
+
"""Proxy class for sqlite3 cursor to intercept execute method."""
|
|
451
|
+
def __init__(self, cursor, conn):
|
|
452
|
+
self._cursor = cursor
|
|
453
|
+
self._conn = conn
|
|
454
|
+
|
|
455
|
+
def execute(self, sql, *args, **kwargs):
|
|
456
|
+
"""Execute query and log it."""
|
|
457
|
+
start_time = time.time()
|
|
458
|
+
error_msg = None
|
|
459
|
+
|
|
460
|
+
try:
|
|
461
|
+
result = self._cursor.execute(sql, *args, **kwargs)
|
|
462
|
+
execution_time = time.time() - start_time
|
|
463
|
+
|
|
464
|
+
# Get rowcount safely
|
|
465
|
+
try:
|
|
466
|
+
rows_affected = self._cursor.rowcount
|
|
467
|
+
except Exception:
|
|
468
|
+
rows_affected = None
|
|
469
|
+
|
|
470
|
+
# Log query - don't let logging break the execution
|
|
471
|
+
try:
|
|
472
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
473
|
+
profiler._log_db_query(
|
|
474
|
+
query=sql,
|
|
475
|
+
database_type="SQLite",
|
|
476
|
+
execution_time=execution_time,
|
|
477
|
+
rows_affected=rows_affected,
|
|
478
|
+
connection_info="SQLite"
|
|
479
|
+
)
|
|
480
|
+
except Exception:
|
|
481
|
+
# Silently ignore logging errors
|
|
482
|
+
pass
|
|
483
|
+
|
|
484
|
+
return result
|
|
485
|
+
except Exception as e:
|
|
486
|
+
error_msg = str(e)
|
|
487
|
+
execution_time = time.time() - start_time
|
|
488
|
+
|
|
489
|
+
if track_on_fail:
|
|
490
|
+
# Log error - don't let logging break the exception propagation
|
|
491
|
+
try:
|
|
492
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
493
|
+
profiler._log_db_query(
|
|
494
|
+
query=sql,
|
|
495
|
+
database_type="SQLite",
|
|
496
|
+
execution_time=execution_time,
|
|
497
|
+
connection_info="SQLite",
|
|
498
|
+
error=error_msg
|
|
499
|
+
)
|
|
500
|
+
except Exception:
|
|
501
|
+
# Silently ignore logging errors
|
|
502
|
+
pass
|
|
503
|
+
|
|
504
|
+
# Re-raise the original exception
|
|
505
|
+
raise
|
|
506
|
+
|
|
507
|
+
def __getattr__(self, name):
|
|
508
|
+
"""Delegate all other attributes to the original cursor."""
|
|
509
|
+
return getattr(self._cursor, name)
|
|
510
|
+
|
|
511
|
+
class ConnectionProxy:
|
|
512
|
+
"""Proxy class for sqlite3 connection to intercept cursor creation."""
|
|
513
|
+
def __init__(self, conn):
|
|
514
|
+
self._conn = conn
|
|
515
|
+
|
|
516
|
+
def cursor(self, *args, **kwargs):
|
|
517
|
+
"""Create cursor and return proxy."""
|
|
518
|
+
cursor = self._conn.cursor(*args, **kwargs)
|
|
519
|
+
return CursorProxy(cursor, self._conn)
|
|
520
|
+
|
|
521
|
+
def __getattr__(self, name):
|
|
522
|
+
"""Delegate all other attributes to the original connection."""
|
|
523
|
+
return getattr(self._conn, name)
|
|
524
|
+
|
|
525
|
+
@wraps(func)
|
|
526
|
+
def wrapper(*args, **kwargs):
|
|
527
|
+
# Get the original connection
|
|
528
|
+
conn = func(*args, **kwargs)
|
|
529
|
+
|
|
530
|
+
# Return proxy instead of original connection
|
|
531
|
+
return ConnectionProxy(conn)
|
|
532
|
+
|
|
533
|
+
return wrapper
|
|
534
|
+
|
|
535
|
+
def _sqlite3_execute_wrapper(self, func):
|
|
536
|
+
@wraps(func)
|
|
537
|
+
def wrapper(self, sql, *args, **kwargs):
|
|
538
|
+
start_time = time.time()
|
|
539
|
+
|
|
540
|
+
try:
|
|
541
|
+
result = func(self, sql, *args, **kwargs)
|
|
542
|
+
execution_time = time.time() - start_time
|
|
543
|
+
|
|
544
|
+
self._log_db_query(
|
|
545
|
+
query=sql,
|
|
546
|
+
database_type="SQLite",
|
|
547
|
+
execution_time=execution_time,
|
|
548
|
+
rows_affected=self.rowcount,
|
|
549
|
+
connection_info=f"SQLite: {self.connection.execute('PRAGMA database_list').fetchone()}"
|
|
550
|
+
)
|
|
551
|
+
|
|
552
|
+
return result
|
|
553
|
+
except Exception as e:
|
|
554
|
+
execution_time = time.time() - start_time
|
|
555
|
+
if self.track_on_fail:
|
|
556
|
+
self._log_db_query(
|
|
557
|
+
query=sql,
|
|
558
|
+
database_type="SQLite",
|
|
559
|
+
execution_time=execution_time,
|
|
560
|
+
connection_info="SQLite",
|
|
561
|
+
error=str(e)
|
|
562
|
+
)
|
|
563
|
+
raise
|
|
564
|
+
|
|
565
|
+
return wrapper
|
|
566
|
+
|
|
567
|
+
def _pymongo_find_wrapper(self, func):
|
|
568
|
+
track_on_fail = self.track_on_fail
|
|
569
|
+
|
|
570
|
+
@wraps(func)
|
|
571
|
+
def wrapper(self, filter=None, *args, **kwargs):
|
|
572
|
+
start_time = time.time()
|
|
573
|
+
query = f"find({filter})"
|
|
574
|
+
|
|
575
|
+
try:
|
|
576
|
+
result = func(self, filter, *args, **kwargs)
|
|
577
|
+
execution_time = time.time() - start_time
|
|
578
|
+
|
|
579
|
+
# Get connection info safely
|
|
580
|
+
try:
|
|
581
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
582
|
+
except Exception:
|
|
583
|
+
connection_info = "MongoDB"
|
|
584
|
+
|
|
585
|
+
# Log query - don't let logging break the execution
|
|
586
|
+
try:
|
|
587
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
588
|
+
profiler._log_db_query(
|
|
589
|
+
query=query,
|
|
590
|
+
database_type="MongoDB (pymongo)",
|
|
591
|
+
execution_time=execution_time,
|
|
592
|
+
connection_info=connection_info
|
|
593
|
+
)
|
|
594
|
+
except Exception:
|
|
595
|
+
# Silently ignore logging errors
|
|
596
|
+
pass
|
|
597
|
+
|
|
598
|
+
return result
|
|
599
|
+
except Exception as e:
|
|
600
|
+
execution_time = time.time() - start_time
|
|
601
|
+
if track_on_fail:
|
|
602
|
+
try:
|
|
603
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
604
|
+
except Exception:
|
|
605
|
+
connection_info = "MongoDB"
|
|
606
|
+
|
|
607
|
+
# Log error - don't let logging break the exception propagation
|
|
608
|
+
try:
|
|
609
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
610
|
+
profiler._log_db_query(
|
|
611
|
+
query=query,
|
|
612
|
+
database_type="MongoDB (pymongo)",
|
|
613
|
+
execution_time=execution_time,
|
|
614
|
+
connection_info=connection_info,
|
|
615
|
+
error=str(e)
|
|
616
|
+
)
|
|
617
|
+
except Exception:
|
|
618
|
+
# Silently ignore logging errors
|
|
619
|
+
pass
|
|
620
|
+
raise
|
|
621
|
+
|
|
622
|
+
return wrapper
|
|
623
|
+
|
|
624
|
+
def _pymongo_find_one_wrapper(self, func):
|
|
625
|
+
track_on_fail = self.track_on_fail
|
|
626
|
+
|
|
627
|
+
@wraps(func)
|
|
628
|
+
def wrapper(self, filter=None, *args, **kwargs):
|
|
629
|
+
start_time = time.time()
|
|
630
|
+
query = f"find_one({filter})"
|
|
631
|
+
|
|
632
|
+
try:
|
|
633
|
+
result = func(self, filter, *args, **kwargs)
|
|
634
|
+
execution_time = time.time() - start_time
|
|
635
|
+
|
|
636
|
+
# Get connection info safely
|
|
637
|
+
try:
|
|
638
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
639
|
+
except Exception:
|
|
640
|
+
connection_info = "MongoDB"
|
|
641
|
+
|
|
642
|
+
# Log query - don't let logging break the execution
|
|
643
|
+
try:
|
|
644
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
645
|
+
profiler._log_db_query(
|
|
646
|
+
query=query,
|
|
647
|
+
database_type="MongoDB (pymongo)",
|
|
648
|
+
execution_time=execution_time,
|
|
649
|
+
rows_affected=1 if result is not None else 0,
|
|
650
|
+
connection_info=connection_info
|
|
651
|
+
)
|
|
652
|
+
except Exception:
|
|
653
|
+
# Silently ignore logging errors
|
|
654
|
+
pass
|
|
655
|
+
|
|
656
|
+
return result
|
|
657
|
+
except Exception as e:
|
|
658
|
+
execution_time = time.time() - start_time
|
|
659
|
+
if track_on_fail:
|
|
660
|
+
try:
|
|
661
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
662
|
+
except Exception:
|
|
663
|
+
connection_info = "MongoDB"
|
|
664
|
+
|
|
665
|
+
# Log error - don't let logging break the exception propagation
|
|
666
|
+
try:
|
|
667
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
668
|
+
profiler._log_db_query(
|
|
669
|
+
query=query,
|
|
670
|
+
database_type="MongoDB (pymongo)",
|
|
671
|
+
execution_time=execution_time,
|
|
672
|
+
connection_info=connection_info,
|
|
673
|
+
error=str(e)
|
|
674
|
+
)
|
|
675
|
+
except Exception:
|
|
676
|
+
# Silently ignore logging errors
|
|
677
|
+
pass
|
|
678
|
+
raise
|
|
679
|
+
|
|
680
|
+
return wrapper
|
|
681
|
+
|
|
682
|
+
def _pymongo_insert_wrapper(self, func):
|
|
683
|
+
track_on_fail = self.track_on_fail
|
|
684
|
+
|
|
685
|
+
@wraps(func)
|
|
686
|
+
def wrapper(self, document, *args, **kwargs):
|
|
687
|
+
start_time = time.time()
|
|
688
|
+
query = f"insert_one({document})"
|
|
689
|
+
|
|
690
|
+
try:
|
|
691
|
+
result = func(self, document, *args, **kwargs)
|
|
692
|
+
execution_time = time.time() - start_time
|
|
693
|
+
|
|
694
|
+
# Get connection info safely
|
|
695
|
+
try:
|
|
696
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
697
|
+
except Exception:
|
|
698
|
+
connection_info = "MongoDB"
|
|
699
|
+
|
|
700
|
+
# Log query - don't let logging break the execution
|
|
701
|
+
try:
|
|
702
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
703
|
+
profiler._log_db_query(
|
|
704
|
+
query=query,
|
|
705
|
+
database_type="MongoDB (pymongo)",
|
|
706
|
+
execution_time=execution_time,
|
|
707
|
+
rows_affected=1,
|
|
708
|
+
connection_info=connection_info
|
|
709
|
+
)
|
|
710
|
+
except Exception:
|
|
711
|
+
# Silently ignore logging errors
|
|
712
|
+
pass
|
|
713
|
+
|
|
714
|
+
return result
|
|
715
|
+
except Exception as e:
|
|
716
|
+
execution_time = time.time() - start_time
|
|
717
|
+
if track_on_fail:
|
|
718
|
+
try:
|
|
719
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
720
|
+
except Exception:
|
|
721
|
+
connection_info = "MongoDB"
|
|
722
|
+
|
|
723
|
+
# Log error - don't let logging break the exception propagation
|
|
724
|
+
try:
|
|
725
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
726
|
+
profiler._log_db_query(
|
|
727
|
+
query=query,
|
|
728
|
+
database_type="MongoDB (pymongo)",
|
|
729
|
+
execution_time=execution_time,
|
|
730
|
+
connection_info=connection_info,
|
|
731
|
+
error=str(e)
|
|
732
|
+
)
|
|
733
|
+
except Exception:
|
|
734
|
+
# Silently ignore logging errors
|
|
735
|
+
pass
|
|
736
|
+
raise
|
|
737
|
+
|
|
738
|
+
return wrapper
|
|
739
|
+
|
|
740
|
+
def _pymongo_update_wrapper(self, func):
|
|
741
|
+
track_on_fail = self.track_on_fail
|
|
742
|
+
|
|
743
|
+
@wraps(func)
|
|
744
|
+
def wrapper(self, filter, update, *args, **kwargs):
|
|
745
|
+
start_time = time.time()
|
|
746
|
+
query = f"update_one({filter}, {update})"
|
|
747
|
+
|
|
748
|
+
try:
|
|
749
|
+
result = func(self, filter, update, *args, **kwargs)
|
|
750
|
+
execution_time = time.time() - start_time
|
|
751
|
+
|
|
752
|
+
# Get connection info safely
|
|
753
|
+
try:
|
|
754
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
755
|
+
except Exception:
|
|
756
|
+
connection_info = "MongoDB"
|
|
757
|
+
|
|
758
|
+
# Get modified count safely
|
|
759
|
+
try:
|
|
760
|
+
rows_affected = result.modified_count
|
|
761
|
+
except Exception:
|
|
762
|
+
rows_affected = None
|
|
763
|
+
|
|
764
|
+
# Log query - don't let logging break the execution
|
|
765
|
+
try:
|
|
766
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
767
|
+
profiler._log_db_query(
|
|
768
|
+
query=query,
|
|
769
|
+
database_type="MongoDB (pymongo)",
|
|
770
|
+
execution_time=execution_time,
|
|
771
|
+
rows_affected=rows_affected,
|
|
772
|
+
connection_info=connection_info
|
|
773
|
+
)
|
|
774
|
+
except Exception:
|
|
775
|
+
# Silently ignore logging errors
|
|
776
|
+
pass
|
|
777
|
+
|
|
778
|
+
return result
|
|
779
|
+
except Exception as e:
|
|
780
|
+
execution_time = time.time() - start_time
|
|
781
|
+
if track_on_fail:
|
|
782
|
+
try:
|
|
783
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
784
|
+
except Exception:
|
|
785
|
+
connection_info = "MongoDB"
|
|
786
|
+
|
|
787
|
+
# Log error - don't let logging break the exception propagation
|
|
788
|
+
try:
|
|
789
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
790
|
+
profiler._log_db_query(
|
|
791
|
+
query=query,
|
|
792
|
+
database_type="MongoDB (pymongo)",
|
|
793
|
+
execution_time=execution_time,
|
|
794
|
+
connection_info=connection_info,
|
|
795
|
+
error=str(e)
|
|
796
|
+
)
|
|
797
|
+
except Exception:
|
|
798
|
+
# Silently ignore logging errors
|
|
799
|
+
pass
|
|
800
|
+
raise
|
|
801
|
+
|
|
802
|
+
return wrapper
|
|
803
|
+
|
|
804
|
+
def _pymongo_delete_wrapper(self, func):
|
|
805
|
+
track_on_fail = self.track_on_fail
|
|
806
|
+
|
|
807
|
+
@wraps(func)
|
|
808
|
+
def wrapper(self, filter, *args, **kwargs):
|
|
809
|
+
start_time = time.time()
|
|
810
|
+
query = f"delete_one({filter})"
|
|
811
|
+
|
|
812
|
+
try:
|
|
813
|
+
result = func(self, filter, *args, **kwargs)
|
|
814
|
+
execution_time = time.time() - start_time
|
|
815
|
+
|
|
816
|
+
# Get connection info safely
|
|
817
|
+
try:
|
|
818
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
819
|
+
except Exception:
|
|
820
|
+
connection_info = "MongoDB"
|
|
821
|
+
|
|
822
|
+
# Get deleted count safely
|
|
823
|
+
try:
|
|
824
|
+
rows_affected = result.deleted_count
|
|
825
|
+
except Exception:
|
|
826
|
+
rows_affected = None
|
|
827
|
+
|
|
828
|
+
# Log query - don't let logging break the execution
|
|
829
|
+
try:
|
|
830
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
831
|
+
profiler._log_db_query(
|
|
832
|
+
query=query,
|
|
833
|
+
database_type="MongoDB (pymongo)",
|
|
834
|
+
execution_time=execution_time,
|
|
835
|
+
rows_affected=rows_affected,
|
|
836
|
+
connection_info=connection_info
|
|
837
|
+
)
|
|
838
|
+
except Exception:
|
|
839
|
+
# Silently ignore logging errors
|
|
840
|
+
pass
|
|
841
|
+
|
|
842
|
+
return result
|
|
843
|
+
except Exception as e:
|
|
844
|
+
execution_time = time.time() - start_time
|
|
845
|
+
if track_on_fail:
|
|
846
|
+
try:
|
|
847
|
+
connection_info = f"MongoDB: {self.database.name}.{self.name}"
|
|
848
|
+
except Exception:
|
|
849
|
+
connection_info = "MongoDB"
|
|
850
|
+
|
|
851
|
+
# Log error - don't let logging break the exception propagation
|
|
852
|
+
try:
|
|
853
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
854
|
+
profiler._log_db_query(
|
|
855
|
+
query=query,
|
|
856
|
+
database_type="MongoDB (pymongo)",
|
|
857
|
+
execution_time=execution_time,
|
|
858
|
+
connection_info=connection_info,
|
|
859
|
+
error=str(e)
|
|
860
|
+
)
|
|
861
|
+
except Exception:
|
|
862
|
+
# Silently ignore logging errors
|
|
863
|
+
pass
|
|
864
|
+
raise
|
|
865
|
+
|
|
866
|
+
return wrapper
|
|
867
|
+
|
|
868
|
+
def _redis_execute_wrapper(self, func):
|
|
869
|
+
track_on_fail = self.track_on_fail
|
|
870
|
+
|
|
871
|
+
@wraps(func)
|
|
872
|
+
def wrapper(self, command, *args, **kwargs):
|
|
873
|
+
start_time = time.time()
|
|
874
|
+
query = f"{command} {' '.join(map(str, args))}"
|
|
875
|
+
|
|
876
|
+
try:
|
|
877
|
+
result = func(self, command, *args, **kwargs)
|
|
878
|
+
execution_time = time.time() - start_time
|
|
879
|
+
|
|
880
|
+
# Get connection info safely
|
|
881
|
+
try:
|
|
882
|
+
host = self.connection_pool.connection_kwargs.get('host', 'localhost')
|
|
883
|
+
port = self.connection_pool.connection_kwargs.get('port', 6379)
|
|
884
|
+
connection_info = f"Redis: {host}:{port}"
|
|
885
|
+
except Exception:
|
|
886
|
+
connection_info = "Redis"
|
|
887
|
+
|
|
888
|
+
# Log query - don't let logging break the execution
|
|
889
|
+
try:
|
|
890
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
891
|
+
profiler._log_db_query(
|
|
892
|
+
query=query,
|
|
893
|
+
database_type="Redis",
|
|
894
|
+
execution_time=execution_time,
|
|
895
|
+
connection_info=connection_info
|
|
896
|
+
)
|
|
897
|
+
except Exception:
|
|
898
|
+
# Silently ignore logging errors
|
|
899
|
+
pass
|
|
900
|
+
|
|
901
|
+
return result
|
|
902
|
+
except Exception as e:
|
|
903
|
+
execution_time = time.time() - start_time
|
|
904
|
+
if track_on_fail:
|
|
905
|
+
try:
|
|
906
|
+
host = self.connection_pool.connection_kwargs.get('host', 'localhost')
|
|
907
|
+
port = self.connection_pool.connection_kwargs.get('port', 6379)
|
|
908
|
+
connection_info = f"Redis: {host}:{port}"
|
|
909
|
+
except Exception:
|
|
910
|
+
connection_info = "Redis"
|
|
911
|
+
|
|
912
|
+
# Log error - don't let logging break the exception propagation
|
|
913
|
+
try:
|
|
914
|
+
profiler = DatabaseProfilerSingleton.get_instance()
|
|
915
|
+
profiler._log_db_query(
|
|
916
|
+
query=query,
|
|
917
|
+
database_type="Redis",
|
|
918
|
+
execution_time=execution_time,
|
|
919
|
+
connection_info=connection_info,
|
|
920
|
+
error=str(e)
|
|
921
|
+
)
|
|
922
|
+
except Exception:
|
|
923
|
+
# Silently ignore logging errors
|
|
924
|
+
pass
|
|
925
|
+
|
|
926
|
+
# Re-raise the original exception
|
|
927
|
+
raise
|
|
928
|
+
|
|
929
|
+
return wrapper
|
|
930
|
+
|
|
931
|
+
def _log_db_query(self, query: str, database_type: str, execution_time: float,
|
|
932
|
+
rows_affected: Optional[int] = None, connection_info: Optional[str] = None,
|
|
933
|
+
error: Optional[str] = None):
|
|
934
|
+
"""Log database query as a step."""
|
|
935
|
+
step_data = StepDbQueryData(
|
|
936
|
+
query=query,
|
|
937
|
+
database_type=database_type,
|
|
938
|
+
execution_time=execution_time,
|
|
939
|
+
rows_affected=rows_affected,
|
|
940
|
+
connection_info=connection_info
|
|
941
|
+
)
|
|
942
|
+
|
|
943
|
+
step = Step(
|
|
944
|
+
id=str(uuid.uuid4()),
|
|
945
|
+
step_type=StepType.DB_QUERY,
|
|
946
|
+
data=step_data
|
|
947
|
+
)
|
|
948
|
+
|
|
949
|
+
self.runtime.add_step(step)
|
|
950
|
+
|
|
951
|
+
# Determine step status based on error
|
|
952
|
+
status = 'failed' if error else 'passed'
|
|
953
|
+
self.runtime.finish_step(
|
|
954
|
+
id=step.id,
|
|
955
|
+
status=status
|
|
956
|
+
)
|
|
957
|
+
|
|
958
|
+
|
|
959
|
+
class DatabaseProfilerSingleton:
|
|
960
|
+
_instance = None
|
|
961
|
+
_lock = threading.Lock()
|
|
962
|
+
|
|
963
|
+
@staticmethod
|
|
964
|
+
def init(**kwargs):
|
|
965
|
+
if DatabaseProfilerSingleton._instance is None:
|
|
966
|
+
with DatabaseProfilerSingleton._lock:
|
|
967
|
+
if DatabaseProfilerSingleton._instance is None:
|
|
968
|
+
DatabaseProfilerSingleton._instance = DatabaseProfiler(**kwargs)
|
|
969
|
+
|
|
970
|
+
@staticmethod
|
|
971
|
+
def get_instance() -> DatabaseProfiler:
|
|
972
|
+
"""Static access method"""
|
|
973
|
+
if DatabaseProfilerSingleton._instance is None:
|
|
974
|
+
raise Exception("Init plugin first")
|
|
975
|
+
return DatabaseProfilerSingleton._instance
|
|
976
|
+
|
|
977
|
+
def __init__(self):
|
|
978
|
+
"""Virtually private constructor"""
|
|
979
|
+
raise Exception("Use get_instance()")
|