pyquerytracker 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. examples/async_example.py +63 -0
  2. examples/core/async_usage.py +53 -0
  3. examples/core/basic_usage.py +17 -0
  4. examples/core/error_handling.py +24 -0
  5. examples/core/quick_test.py +39 -0
  6. examples/core/with_arguments.py +27 -0
  7. examples/core/with_config.py +30 -0
  8. examples/exporter/csv_exporter_1.py +36 -0
  9. examples/exporter/json_exporter.py +36 -0
  10. examples/fastapi_app.py +119 -0
  11. examples/test_endpoints.py +73 -0
  12. pyquerytracker/__init__.py +3 -2
  13. pyquerytracker/api.py +72 -0
  14. pyquerytracker/config.py +26 -10
  15. pyquerytracker/core.py +122 -58
  16. pyquerytracker/db/models.py +20 -0
  17. pyquerytracker/db/session.py +8 -0
  18. pyquerytracker/db/writer.py +64 -0
  19. pyquerytracker/exporter/__init__.py +0 -0
  20. pyquerytracker/exporter/base.py +25 -0
  21. pyquerytracker/exporter/csv_exporter.py +52 -0
  22. pyquerytracker/exporter/json_exporter.py +47 -0
  23. pyquerytracker/exporter/manager.py +32 -0
  24. pyquerytracker/main.py +6 -0
  25. pyquerytracker/tracker.py +17 -0
  26. pyquerytracker/utils/logger.py +18 -0
  27. pyquerytracker/websocket.py +33 -0
  28. {pyquerytracker-0.1.0.dist-info → pyquerytracker-0.1.1.dist-info}/METADATA +93 -12
  29. pyquerytracker-0.1.1.dist-info/RECORD +39 -0
  30. {pyquerytracker-0.1.0.dist-info → pyquerytracker-0.1.1.dist-info}/WHEEL +1 -1
  31. {pyquerytracker-0.1.0.dist-info → pyquerytracker-0.1.1.dist-info}/top_level.txt +2 -0
  32. tests/exporter/test_json_exporter.py +182 -0
  33. tests/test_async_core.py +93 -0
  34. tests/test_config.py +40 -0
  35. tests/test_core.py +72 -0
  36. tests/test_dashboard.py +31 -0
  37. tests/test_persist.py +9 -0
  38. tests/test_websocket.py +58 -0
  39. pyquerytracker-0.1.0.dist-info/RECORD +0 -8
  40. {pyquerytracker-0.1.0.dist-info → pyquerytracker-0.1.1.dist-info}/licenses/LICENSE +0 -0
pyquerytracker/core.py CHANGED
@@ -1,86 +1,156 @@
1
+ import asyncio
1
2
  import time
2
- import logging
3
3
  from functools import update_wrapper
4
- from typing import Any, Callable, TypeVar, Generic
4
+ from typing import Any, Callable, Generic, Optional, TypeVar
5
+
5
6
  from pyquerytracker.config import get_config
7
+ from pyquerytracker.db.writer import DBWriter
8
+ from pyquerytracker.exporter.base import NullExporter
9
+ from pyquerytracker.exporter.manager import ExporterManager
10
+ from pyquerytracker.tracker import store_tracked_query
11
+ from pyquerytracker.utils.logger import QueryLogger
6
12
 
7
- # Set up logger
8
- logger = logging.getLogger("pyquerytracker")
9
- if not logger.handlers:
10
- handler = logging.StreamHandler()
11
- formatter = logging.Formatter(
12
- "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
13
- )
14
- handler.setFormatter(formatter)
15
- logger.addHandler(handler)
16
- logger.setLevel(logging.INFO)
13
+ logger = QueryLogger.get_logger()
17
14
 
18
15
  T = TypeVar("T")
19
16
 
20
17
 
21
18
  class TrackQuery(Generic[T]):
22
- """
23
- Class-based decorator to track and log the execution time of functions or methods.
24
-
25
- Logs include:
26
- - Function name
27
- - Class name (if method)
28
- - Execution time (ms)
29
- - Arguments
30
- - Errors (if any)
31
-
32
- Usage:
33
- @TrackQuery()
34
- def my_function():
35
- ...
36
- """
37
-
38
19
  def __init__(self) -> None:
39
20
  self.config = get_config()
21
+ if self.config.export_type and self.config.export_path:
22
+ exporter = ExporterManager.create_exporter(self.config)
23
+ ExporterManager.set(exporter)
24
+ self.exporter = exporter
25
+ else:
26
+ self.exporter = NullExporter()
27
+
28
+ def _extract_class_name(self, args: Any) -> Optional[str]:
29
+ if args:
30
+ obj = args[0]
31
+ if hasattr(obj, "__class__"):
32
+ return obj.__name__ if isinstance(obj, type) else obj.__class__.__name__
33
+ return None
34
+
35
+ # pylint: disable=too-many-positional-arguments
36
+ def _build_log_data(self, func, class_name, duration, args, kwargs, error=None):
37
+ data = {
38
+ "event": (
39
+ "error"
40
+ if error
41
+ else (
42
+ "slow_execution"
43
+ if duration > self.config.slow_log_threshold_ms
44
+ else "normal_execution"
45
+ )
46
+ ),
47
+ "function_name": func.__name__,
48
+ "class_name": class_name,
49
+ "duration_ms": duration,
50
+ "func_args": repr(args),
51
+ "func_kwargs": repr(kwargs),
52
+ }
53
+ if error:
54
+ data["error"] = str(error)
55
+ return data
56
+
57
+ def _handle_export(self, log_data):
58
+ self.exporter.append(log_data)
59
+ if self.config.persist_to_db:
60
+ DBWriter.save(log_data)
61
+ store_tracked_query(log_data)
40
62
 
41
63
  def __call__(self, func: Callable[..., T]) -> Callable[..., T]:
64
+ if asyncio.iscoroutinefunction(func):
65
+
66
+ async def async_wrapped(*args: Any, **kwargs: Any) -> T:
67
+ start = time.perf_counter()
68
+ class_name = self._extract_class_name(args)
69
+
70
+ try:
71
+ result = await func(*args, **kwargs)
72
+ duration = (time.perf_counter() - start) * 1000
73
+ log_data = self._build_log_data(
74
+ func, class_name, duration, args, kwargs
75
+ )
76
+
77
+ if duration > self.config.slow_log_threshold_ms:
78
+ logger.log(
79
+ self.config.slow_log_level,
80
+ "%s%s -> Slow execution: took %.2fms",
81
+ f"{class_name}." if class_name else "",
82
+ func.__name__,
83
+ duration,
84
+ extra=log_data,
85
+ )
86
+ else:
87
+ logger.info(
88
+ "Function %s%s executed successfully in %.2fms",
89
+ f"{class_name}." if class_name else "",
90
+ func.__name__,
91
+ duration,
92
+ extra=log_data,
93
+ )
94
+
95
+ self._handle_export(log_data)
96
+ return result
97
+
98
+ except Exception as e:
99
+ duration = (time.perf_counter() - start) * 1000
100
+ log_data = self._build_log_data(
101
+ func, class_name, duration, args, kwargs, error=e
102
+ )
103
+ logger.error(
104
+ "Function %s%s failed after %.2fms: %s",
105
+ f"{class_name}." if class_name else "",
106
+ func.__name__,
107
+ duration,
108
+ str(e),
109
+ exc_info=True,
110
+ extra=log_data,
111
+ )
112
+ self._handle_export(log_data)
113
+ return None
114
+
115
+ return update_wrapper(async_wrapped, func)
116
+
42
117
  def wrapped(*args: Any, **kwargs: Any) -> T:
43
118
  start = time.perf_counter()
44
- class_name = None
45
-
46
- # Try to detect if this is an instance or class method
47
- if args:
48
- possible_self_or_cls = args[0]
49
- if hasattr(possible_self_or_cls, "__class__"):
50
- if isinstance(possible_self_or_cls, type):
51
- # classmethod
52
- class_name = possible_self_or_cls.__name__
53
- else:
54
- # instance method
55
- class_name = possible_self_or_cls.__class__.__name__
119
+ class_name = self._extract_class_name(args)
56
120
 
57
121
  try:
58
122
  result = func(*args, **kwargs)
59
123
  duration = (time.perf_counter() - start) * 1000
124
+ log_data = self._build_log_data(
125
+ func, class_name, duration, args, kwargs
126
+ )
127
+
60
128
  if duration > self.config.slow_log_threshold_ms:
61
129
  logger.log(
62
130
  self.config.slow_log_level,
63
- f"{class_name}.{func.__name__} -> Slow execution: took %.2fms",
131
+ "%s%s -> Slow execution: took %.2fms",
132
+ f"{class_name}." if class_name else "",
133
+ func.__name__,
64
134
  duration,
135
+ extra=log_data,
65
136
  )
66
- # logger.warning("Slow execution: %s took %.2fms", func.__name__, duration)
67
137
  else:
68
138
  logger.info(
69
139
  "Function %s%s executed successfully in %.2fms",
70
140
  f"{class_name}." if class_name else "",
71
141
  func.__name__,
72
142
  duration,
73
- extra={
74
- "function_name": func.__name__,
75
- "class_name": class_name,
76
- "duration_ms": duration,
77
- "func_args": args,
78
- "func_kwargs": kwargs,
79
- },
143
+ extra=log_data,
80
144
  )
145
+
146
+ self._handle_export(log_data)
81
147
  return result
148
+
82
149
  except Exception as e:
83
150
  duration = (time.perf_counter() - start) * 1000
151
+ log_data = self._build_log_data(
152
+ func, class_name, duration, args, kwargs, error=e
153
+ )
84
154
  logger.error(
85
155
  "Function %s%s failed after %.2fms: %s",
86
156
  f"{class_name}." if class_name else "",
@@ -88,15 +158,9 @@ class TrackQuery(Generic[T]):
88
158
  duration,
89
159
  str(e),
90
160
  exc_info=True,
91
- extra={
92
- "function_name": func.__name__,
93
- "class_name": class_name,
94
- "duration_ms": duration,
95
- "func_args": args,
96
- "func_kwargs": kwargs,
97
- "error": str(e),
98
- },
161
+ extra=log_data,
99
162
  )
100
- raise
163
+ self._handle_export(log_data)
164
+ return None
101
165
 
102
166
  return update_wrapper(wrapped, func)
@@ -0,0 +1,20 @@
1
+ from datetime import datetime
2
+
3
+ from sqlalchemy import Column, DateTime, Float, Integer, String
4
+ from sqlalchemy.orm import declarative_base
5
+
6
+ Base = declarative_base()
7
+
8
+
9
+ class TrackedQuery(Base):
10
+ __tablename__ = "tracked_queries"
11
+
12
+ id = Column(Integer, primary_key=True, index=True)
13
+ function_name = Column(String)
14
+ class_name = Column(String, nullable=True)
15
+ duration_ms = Column(Float)
16
+ timestamp = Column(DateTime, default=datetime.utcnow())
17
+ event = Column(String) # "slow_execution", "normal_execution", "error"
18
+ func_args = Column(String)
19
+ func_kwargs = Column(String)
20
+ error = Column(String, nullable=True)
@@ -0,0 +1,8 @@
1
+ from sqlalchemy import create_engine
2
+ from sqlalchemy.orm import sessionmaker
3
+
4
+ engine = create_engine(
5
+ "sqlite:///pyquerytracker/db/querytracker.db",
6
+ connect_args={"check_same_thread": False},
7
+ )
8
+ SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
@@ -0,0 +1,64 @@
1
+ from datetime import datetime, timedelta, timezone
2
+
3
+ from sqlalchemy import select
4
+ from sqlalchemy.exc import SQLAlchemyError
5
+
6
+ from pyquerytracker.db.models import TrackedQuery
7
+ from pyquerytracker.db.session import SessionLocal
8
+
9
+
10
+ class DBWriter:
11
+ @staticmethod
12
+ def save(log_data: dict):
13
+ session = SessionLocal()
14
+ try:
15
+ entry = TrackedQuery(
16
+ function_name=log_data.get("function_name"),
17
+ class_name=log_data.get("class_name"),
18
+ duration_ms=log_data.get("duration_ms"),
19
+ event=log_data.get("event"),
20
+ func_args=log_data.get("func_args"),
21
+ func_kwargs=log_data.get("func_kwargs"),
22
+ error=log_data.get("error"),
23
+ timestamp=log_data.get("timestamp")
24
+ or datetime.now(timezone.utc), # Ensure timestamp is set
25
+ )
26
+ session.add(entry)
27
+ session.commit()
28
+ except SQLAlchemyError as e:
29
+ session.rollback()
30
+ print(f"DBWriter error: {e}")
31
+ finally:
32
+ session.close()
33
+
34
+ @staticmethod
35
+ def fetch_all(minutes: int = 5):
36
+ session = SessionLocal()
37
+ try:
38
+ cutoff = datetime.utcnow() - timedelta(minutes=minutes)
39
+ stmt = (
40
+ select(TrackedQuery)
41
+ .where(TrackedQuery.timestamp >= cutoff)
42
+ .order_by(TrackedQuery.timestamp.desc())
43
+ )
44
+ results = session.execute(stmt).scalars().all()
45
+
46
+ return [
47
+ {
48
+ "function_name": row.function_name,
49
+ "class_name": row.class_name,
50
+ "duration_ms": row.duration_ms,
51
+ "timestamp": row.timestamp,
52
+ "event": row.event,
53
+ "error": row.error,
54
+ "func_args": row.func_args,
55
+ "func_kwargs": row.func_kwargs,
56
+ }
57
+ for row in results
58
+ ]
59
+
60
+ except SQLAlchemyError as e:
61
+ print(f"DB fetch error: {e}")
62
+ return []
63
+ finally:
64
+ session.close()
File without changes
@@ -0,0 +1,25 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+ from pyquerytracker.config import Config
4
+
5
+
6
+ class Exporter(ABC):
7
+ def __init__(self, config: Config):
8
+ self.config = config
9
+
10
+ @abstractmethod
11
+ def append(self, data: dict) -> None:
12
+ pass
13
+
14
+ @abstractmethod
15
+ def flush(self) -> None:
16
+ pass
17
+
18
+
19
+ class NullExporter:
20
+
21
+ def append(self, log_data):
22
+ pass
23
+
24
+ def flush(self):
25
+ pass
@@ -0,0 +1,52 @@
1
+ import atexit
2
+ import csv
3
+ import os
4
+ from threading import Lock
5
+
6
+ from pyquerytracker.exporter.base import Exporter
7
+ from pyquerytracker.utils.logger import QueryLogger
8
+
9
+ logger = QueryLogger.get_logger()
10
+
11
+
12
+ class CsvExporter(Exporter):
13
+ def __init__(self, config):
14
+ super().__init__(config)
15
+ self._lock = Lock()
16
+ self._buffer = []
17
+ self._header_written = os.path.exists(self.config.export_path)
18
+ # Ensure logs are flushed when program exits.
19
+ atexit.register(self.flush)
20
+
21
+ def append(self, data: dict):
22
+ with self._lock:
23
+ self._buffer.append(data)
24
+
25
+ def flush(self):
26
+
27
+ with self._lock:
28
+ if not self._buffer:
29
+ return
30
+
31
+ os.makedirs(os.path.dirname(self.config.export_path), exist_ok=True)
32
+
33
+ # Gather all possible fieldnames (union of keys)
34
+ all_keys = set()
35
+ for entry in self._buffer:
36
+ all_keys.update(entry.keys())
37
+ fieldnames = sorted(all_keys) # consistent ordering
38
+
39
+ with open(self.config.export_path, "a", newline="", encoding="utf-8") as f:
40
+ writer = csv.DictWriter(f, fieldnames=fieldnames)
41
+
42
+ if not self._header_written:
43
+ writer.writeheader()
44
+ self._header_written = True
45
+
46
+ for row in self._buffer:
47
+ # Fill missing keys with None
48
+ full_row = {key: row.get(key) for key in fieldnames}
49
+ writer.writerow(full_row)
50
+
51
+ logger.info("Flushed %d logs to CSV", len(self._buffer))
52
+ self._buffer.clear()
@@ -0,0 +1,47 @@
1
+ import atexit
2
+ import json
3
+ import os
4
+ from threading import Lock
5
+
6
+ from pyquerytracker.exporter.base import Exporter
7
+ from pyquerytracker.utils.logger import QueryLogger
8
+
9
+ logger = QueryLogger.get_logger()
10
+
11
+
12
+ class JsonExporter(Exporter):
13
+ def __init__(self, config):
14
+ super().__init__(config)
15
+ self.config = config
16
+ self._lock = Lock()
17
+ self._buffer = []
18
+ atexit.register(self.flush)
19
+
20
+ def append(self, data: dict):
21
+ with self._lock:
22
+ self._buffer.append(data)
23
+
24
+ def flush(self):
25
+ with self._lock:
26
+ if not self._buffer:
27
+ return
28
+
29
+ os.makedirs(os.path.dirname(self.config.export_path), exist_ok=True)
30
+
31
+ existing_data = []
32
+ if os.path.exists(self.config.export_path):
33
+ try:
34
+ with open(self.config.export_path, "r", encoding="utf-8") as f:
35
+ existing_data = json.load(f)
36
+ if not isinstance(existing_data, list):
37
+ existing_data = []
38
+ except json.JSONDecodeError:
39
+ pass
40
+
41
+ existing_data.extend(self._buffer)
42
+
43
+ with open(self.config.export_path, "w", encoding="utf-8") as f:
44
+ json.dump(existing_data, f, indent=2, default=str)
45
+
46
+ logger.info(f"Flushed {len(self._buffer)} logs to JSON")
47
+ self._buffer.clear()
@@ -0,0 +1,32 @@
1
+ from pyquerytracker.config import Config, ExportType
2
+ from pyquerytracker.exporter.base import Exporter
3
+ from pyquerytracker.exporter.csv_exporter import CsvExporter
4
+ from pyquerytracker.exporter.json_exporter import JsonExporter
5
+
6
+
7
+ class ExporterManager:
8
+ _exporter: Exporter = None
9
+
10
+ _exporter_classes = {
11
+ ExportType.CSV: CsvExporter,
12
+ ExportType.JSON: JsonExporter,
13
+ }
14
+
15
+ @classmethod
16
+ def create_exporter(cls, config: Config) -> Exporter:
17
+ exporter_cls = cls._exporter_classes.get(config.export_type)
18
+ if not exporter_cls:
19
+ raise ValueError(f"Unsupported export type: {config.export_type}")
20
+ exporter = exporter_cls(config)
21
+ cls.set(exporter)
22
+ return exporter
23
+
24
+ @staticmethod
25
+ def set(exporter: Exporter):
26
+ ExporterManager._exporter = exporter
27
+
28
+ @staticmethod
29
+ def get() -> Exporter:
30
+ if ExporterManager._exporter is None:
31
+ raise RuntimeError("Exporter not set")
32
+ return ExporterManager._exporter
pyquerytracker/main.py ADDED
@@ -0,0 +1,6 @@
1
+ if __name__ == "__main__":
2
+ import uvicorn
3
+
4
+ from pyquerytracker.api import app
5
+
6
+ uvicorn.run(app, host="127.0.0.1", port=8000)
@@ -0,0 +1,17 @@
1
+ from datetime import datetime, timedelta
2
+ from typing import Any, Dict, List
3
+
4
+ # In-memory store to collect tracked query data
5
+ query_data_store: List[Dict[str, Any]] = []
6
+
7
+
8
+ def store_tracked_query(log: Dict[str, Any]):
9
+ """Store a single tracked query log entry."""
10
+ log["timestamp"] = datetime.utcnow()
11
+ query_data_store.append(log)
12
+
13
+
14
+ def get_tracked_queries(minutes: int) -> List[Dict[str, Any]]:
15
+ """Return all tracked queries within the last `minutes`."""
16
+ cutoff = datetime.utcnow() - timedelta(minutes=minutes)
17
+ return [log for log in query_data_store if log["timestamp"] >= cutoff]
@@ -0,0 +1,18 @@
1
+ import logging
2
+
3
+
4
+ class QueryLogger:
5
+ @staticmethod
6
+ def get_logger(name: str = "pyquerytracker") -> logging.Logger:
7
+ logger = logging.getLogger(name)
8
+
9
+ if not logger.handlers:
10
+ handler = logging.StreamHandler()
11
+ fmt = logging.Formatter(
12
+ "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
13
+ )
14
+ handler.setFormatter(fmt)
15
+ logger.addHandler(handler)
16
+ logger.setLevel(logging.INFO)
17
+
18
+ return logger
@@ -0,0 +1,33 @@
1
+ import asyncio
2
+ from typing import List
3
+
4
+ from fastapi import WebSocket, WebSocketDisconnect
5
+
6
+ from pyquerytracker.db.writer import DBWriter
7
+
8
+ connected_clients: List[WebSocket] = []
9
+
10
+
11
+ async def websocket_endpoint(websocket: WebSocket):
12
+ await websocket.accept()
13
+ connected_clients.append(websocket)
14
+ try:
15
+ while True:
16
+ await asyncio.sleep(2) # every 2 seconds
17
+ recent_logs = DBWriter.fetch_all(minutes=5) # or a custom method
18
+ await websocket.send_json(recent_logs)
19
+ except WebSocketDisconnect:
20
+ pass
21
+ finally:
22
+ connected_clients.remove(websocket)
23
+
24
+
25
+ async def broadcast(message: str):
26
+ disconnected = []
27
+ for client in connected_clients:
28
+ try:
29
+ await client.send_text(message)
30
+ except Exception:
31
+ disconnected.append(client)
32
+ for client in disconnected:
33
+ connected_clients.remove(client)