brakit 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brakit-0.1.0/.gitignore +6 -0
- brakit-0.1.0/PKG-INFO +37 -0
- brakit-0.1.0/brakit/__init__.py +5 -0
- brakit-0.1.0/brakit/_setup.py +162 -0
- brakit-0.1.0/brakit/adapters/__init__.py +46 -0
- brakit-0.1.0/brakit/adapters/_normalize.py +82 -0
- brakit-0.1.0/brakit/adapters/_protocol.py +16 -0
- brakit-0.1.0/brakit/adapters/asyncpg.py +131 -0
- brakit-0.1.0/brakit/adapters/sqlalchemy.py +121 -0
- brakit-0.1.0/brakit/constants/__init__.py +102 -0
- brakit-0.1.0/brakit/constants/encoding.py +5 -0
- brakit-0.1.0/brakit/constants/events.py +14 -0
- brakit-0.1.0/brakit/constants/headers.py +24 -0
- brakit-0.1.0/brakit/constants/limits.py +16 -0
- brakit-0.1.0/brakit/constants/logger.py +4 -0
- brakit-0.1.0/brakit/constants/network.py +35 -0
- brakit-0.1.0/brakit/constants/patterns.py +7 -0
- brakit-0.1.0/brakit/constants/routes.py +5 -0
- brakit-0.1.0/brakit/constants/transport.py +19 -0
- brakit-0.1.0/brakit/core/__init__.py +14 -0
- brakit-0.1.0/brakit/core/circuit_breaker.py +49 -0
- brakit-0.1.0/brakit/core/context.py +80 -0
- brakit-0.1.0/brakit/core/decompress.py +34 -0
- brakit-0.1.0/brakit/core/event_bus.py +47 -0
- brakit-0.1.0/brakit/core/guards.py +32 -0
- brakit-0.1.0/brakit/core/registry.py +25 -0
- brakit-0.1.0/brakit/core/safe_wrap.py +69 -0
- brakit-0.1.0/brakit/core/sanitize.py +59 -0
- brakit-0.1.0/brakit/frameworks/__init__.py +24 -0
- brakit-0.1.0/brakit/frameworks/_protocol.py +18 -0
- brakit-0.1.0/brakit/frameworks/_shared.py +12 -0
- brakit-0.1.0/brakit/frameworks/fastapi.py +213 -0
- brakit-0.1.0/brakit/frameworks/flask.py +170 -0
- brakit-0.1.0/brakit/hooks/__init__.py +53 -0
- brakit-0.1.0/brakit/hooks/_shared.py +39 -0
- brakit-0.1.0/brakit/hooks/aiohttp_hook.py +73 -0
- brakit-0.1.0/brakit/hooks/exceptions.py +78 -0
- brakit-0.1.0/brakit/hooks/http_client.py +75 -0
- brakit-0.1.0/brakit/hooks/httpx_hook.py +93 -0
- brakit-0.1.0/brakit/hooks/logging.py +66 -0
- brakit-0.1.0/brakit/py.typed +0 -0
- brakit-0.1.0/brakit/store/__init__.py +14 -0
- brakit-0.1.0/brakit/store/base.py +68 -0
- brakit-0.1.0/brakit/store/error_store.py +7 -0
- brakit-0.1.0/brakit/store/fetch_store.py +7 -0
- brakit-0.1.0/brakit/store/log_store.py +7 -0
- brakit-0.1.0/brakit/store/query_store.py +7 -0
- brakit-0.1.0/brakit/store/request_store.py +20 -0
- brakit-0.1.0/brakit/transport/__init__.py +8 -0
- brakit-0.1.0/brakit/transport/discovery.py +59 -0
- brakit-0.1.0/brakit/transport/forwarder.py +118 -0
- brakit-0.1.0/brakit/transport/port_file.py +71 -0
- brakit-0.1.0/brakit/types/__init__.py +26 -0
- brakit-0.1.0/brakit/types/events.py +15 -0
- brakit-0.1.0/brakit/types/http.py +23 -0
- brakit-0.1.0/brakit/types/telemetry.py +63 -0
- brakit-0.1.0/pyproject.toml +62 -0
- brakit-0.1.0/tests/adapters/__init__.py +0 -0
- brakit-0.1.0/tests/conftest.py +61 -0
- brakit-0.1.0/tests/frameworks/__init__.py +0 -0
- brakit-0.1.0/tests/frameworks/test_fastapi.py +120 -0
- brakit-0.1.0/tests/frameworks/test_flask.py +118 -0
- brakit-0.1.0/tests/test_circuit_breaker.py +41 -0
- brakit-0.1.0/tests/test_context.py +40 -0
- brakit-0.1.0/tests/test_decompress.py +39 -0
- brakit-0.1.0/tests/test_event_bus.py +81 -0
- brakit-0.1.0/tests/test_guards.py +44 -0
- brakit-0.1.0/tests/test_normalize.py +106 -0
- brakit-0.1.0/tests/test_safe_wrap.py +75 -0
- brakit-0.1.0/tests/test_sanitize.py +76 -0
- brakit-0.1.0/tests/test_stores.py +113 -0
- brakit-0.1.0/tests/transport/__init__.py +0 -0
brakit-0.1.0/.gitignore
ADDED
brakit-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: brakit
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Zero-config observability for Python web frameworks
|
|
5
|
+
License-Expression: MIT
|
|
6
|
+
Keywords: debugging,fastapi,flask,observability,telemetry
|
|
7
|
+
Classifier: Development Status :: 3 - Alpha
|
|
8
|
+
Classifier: Framework :: FastAPI
|
|
9
|
+
Classifier: Framework :: Flask
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
18
|
+
Classifier: Topic :: Software Development :: Debuggers
|
|
19
|
+
Classifier: Topic :: System :: Monitoring
|
|
20
|
+
Classifier: Typing :: Typed
|
|
21
|
+
Requires-Python: >=3.9
|
|
22
|
+
Provides-Extra: dev
|
|
23
|
+
Requires-Dist: fastapi>=0.100; extra == 'dev'
|
|
24
|
+
Requires-Dist: flask>=2.0; extra == 'dev'
|
|
25
|
+
Requires-Dist: httpx>=0.24; extra == 'dev'
|
|
26
|
+
Requires-Dist: pytest-asyncio>=0.21; extra == 'dev'
|
|
27
|
+
Requires-Dist: pytest-cov>=4.0; extra == 'dev'
|
|
28
|
+
Requires-Dist: pytest>=7.0; extra == 'dev'
|
|
29
|
+
Requires-Dist: sqlalchemy>=2.0; extra == 'dev'
|
|
30
|
+
Provides-Extra: fastapi
|
|
31
|
+
Requires-Dist: fastapi>=0.100; extra == 'fastapi'
|
|
32
|
+
Requires-Dist: uvicorn[standard]>=0.20; extra == 'fastapi'
|
|
33
|
+
Provides-Extra: flask
|
|
34
|
+
Requires-Dist: flask>=2.0; extra == 'flask'
|
|
35
|
+
Description-Content-Type: text/plain
|
|
36
|
+
|
|
37
|
+
Zero-config observability for Python web frameworks.
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
"""Brakit SDK initialization. Wires stores, hooks, adapters, transport, and frameworks."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import dataclasses
|
|
5
|
+
import logging
|
|
6
|
+
import threading
|
|
7
|
+
import time
|
|
8
|
+
|
|
9
|
+
from brakit.constants.events import (
|
|
10
|
+
CHANNEL_REQUEST_COMPLETED,
|
|
11
|
+
CHANNEL_TELEMETRY_ERROR,
|
|
12
|
+
CHANNEL_TELEMETRY_FETCH,
|
|
13
|
+
CHANNEL_TELEMETRY_LOG,
|
|
14
|
+
CHANNEL_TELEMETRY_QUERY,
|
|
15
|
+
EVENT_TYPE_ERROR,
|
|
16
|
+
EVENT_TYPE_FETCH,
|
|
17
|
+
EVENT_TYPE_LOG,
|
|
18
|
+
EVENT_TYPE_QUERY,
|
|
19
|
+
EVENT_TYPE_REQUEST,
|
|
20
|
+
)
|
|
21
|
+
from brakit.constants.logger import LOGGER_NAME
|
|
22
|
+
from brakit.constants.transport import PORT_RETRY_COUNT, PORT_RETRY_INTERVAL_S
|
|
23
|
+
from brakit.core.guards import should_activate
|
|
24
|
+
from brakit.types.events import EventType, SDKEvent
|
|
25
|
+
from brakit.types.http import TracedRequest
|
|
26
|
+
from brakit.types.telemetry import TelemetryEntry
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
29
|
+
|
|
30
|
+
_init_lock = threading.Lock()
|
|
31
|
+
_initialized = False
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _auto_setup() -> None:
|
|
35
|
+
global _initialized
|
|
36
|
+
with _init_lock:
|
|
37
|
+
if _initialized:
|
|
38
|
+
return
|
|
39
|
+
_initialized = True
|
|
40
|
+
|
|
41
|
+
if not should_activate():
|
|
42
|
+
logger.debug("skipped (production/CI/cloud/disabled)")
|
|
43
|
+
return
|
|
44
|
+
|
|
45
|
+
registry = _create_registry()
|
|
46
|
+
_install_hooks(registry)
|
|
47
|
+
adapters = _install_adapters(registry)
|
|
48
|
+
logger.debug("adapters: %s", adapters)
|
|
49
|
+
_start_transport(registry)
|
|
50
|
+
_install_frameworks(registry)
|
|
51
|
+
|
|
52
|
+
logger.debug("initialized")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _create_registry() -> "ServiceRegistry":
|
|
56
|
+
from brakit.core.event_bus import EventBus
|
|
57
|
+
from brakit.core.registry import ServiceRegistry
|
|
58
|
+
from brakit.store.error_store import ErrorStore
|
|
59
|
+
from brakit.store.fetch_store import FetchStore
|
|
60
|
+
from brakit.store.log_store import LogStore
|
|
61
|
+
from brakit.store.query_store import QueryStore
|
|
62
|
+
from brakit.store.request_store import RequestStore
|
|
63
|
+
|
|
64
|
+
return ServiceRegistry(
|
|
65
|
+
bus=EventBus(),
|
|
66
|
+
request_store=RequestStore(),
|
|
67
|
+
query_store=QueryStore(),
|
|
68
|
+
fetch_store=FetchStore(),
|
|
69
|
+
log_store=LogStore(),
|
|
70
|
+
error_store=ErrorStore(),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _install_hooks(registry: "ServiceRegistry") -> None:
|
|
75
|
+
from brakit.hooks import patch_all
|
|
76
|
+
|
|
77
|
+
patch_all(registry.log_store, registry.fetch_store, registry.error_store, registry.bus)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _install_adapters(registry: "ServiceRegistry") -> list[str]:
|
|
81
|
+
from brakit.adapters import detect_and_patch as detect_db_adapters
|
|
82
|
+
|
|
83
|
+
return detect_db_adapters(registry.query_store, registry.bus)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _start_transport(registry: "ServiceRegistry") -> None:
|
|
87
|
+
from brakit.transport.discovery import discover_port
|
|
88
|
+
from brakit.transport.port_file import cleanup_stale_port_file, enable_port_writing
|
|
89
|
+
|
|
90
|
+
cleanup_stale_port_file()
|
|
91
|
+
port = discover_port()
|
|
92
|
+
|
|
93
|
+
if port is not None:
|
|
94
|
+
_setup_forwarder(registry, port)
|
|
95
|
+
return
|
|
96
|
+
|
|
97
|
+
# Port file not found — the Node.js server may not have received its first
|
|
98
|
+
# request yet (the port file is written on first request, not on startup).
|
|
99
|
+
# Retry discovery in a background thread so we don't block import.
|
|
100
|
+
def _retry() -> None:
|
|
101
|
+
for _ in range(PORT_RETRY_COUNT):
|
|
102
|
+
time.sleep(PORT_RETRY_INTERVAL_S)
|
|
103
|
+
found = discover_port()
|
|
104
|
+
if found is not None:
|
|
105
|
+
_setup_forwarder(registry, found)
|
|
106
|
+
return
|
|
107
|
+
logger.debug("no port found after retries, standalone mode")
|
|
108
|
+
enable_port_writing()
|
|
109
|
+
|
|
110
|
+
threading.Thread(
|
|
111
|
+
target=_retry, daemon=True, name="brakit-port-discovery",
|
|
112
|
+
).start()
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _setup_forwarder(registry: "ServiceRegistry", port: int) -> None:
|
|
116
|
+
from brakit.transport.forwarder import Forwarder
|
|
117
|
+
|
|
118
|
+
forwarder = Forwarder(port=port)
|
|
119
|
+
forwarder.start()
|
|
120
|
+
|
|
121
|
+
registry.bus.on(CHANNEL_REQUEST_COMPLETED, lambda r: _forward_request(forwarder, r))
|
|
122
|
+
registry.bus.on(CHANNEL_TELEMETRY_FETCH, lambda e: _forward_telemetry(forwarder, EVENT_TYPE_FETCH, e))
|
|
123
|
+
registry.bus.on(CHANNEL_TELEMETRY_LOG, lambda e: _forward_telemetry(forwarder, EVENT_TYPE_LOG, e))
|
|
124
|
+
registry.bus.on(CHANNEL_TELEMETRY_ERROR, lambda e: _forward_telemetry(forwarder, EVENT_TYPE_ERROR, e))
|
|
125
|
+
registry.bus.on(CHANNEL_TELEMETRY_QUERY, lambda e: _forward_telemetry(forwarder, EVENT_TYPE_QUERY, e))
|
|
126
|
+
|
|
127
|
+
logger.debug("transport ready on port %d", port)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _install_frameworks(registry: "ServiceRegistry") -> None:
|
|
131
|
+
from brakit.frameworks import detect_and_patch as detect_frameworks
|
|
132
|
+
|
|
133
|
+
detect_frameworks(registry)
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def _forward_request(forwarder: "Forwarder", request: TracedRequest) -> None:
|
|
137
|
+
raw = dataclasses.asdict(request)
|
|
138
|
+
data = {_to_camel(k): v for k, v in raw.items()}
|
|
139
|
+
event = SDKEvent(
|
|
140
|
+
type=EVENT_TYPE_REQUEST,
|
|
141
|
+
request_id=raw.get("id"),
|
|
142
|
+
timestamp=time.time() * 1_000,
|
|
143
|
+
data=data,
|
|
144
|
+
)
|
|
145
|
+
forwarder.send(event)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def _to_camel(name: str) -> str:
|
|
149
|
+
parts = name.split("_")
|
|
150
|
+
return parts[0] + "".join(p.capitalize() for p in parts[1:])
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def _forward_telemetry(forwarder: "Forwarder", event_type: EventType, entry: TelemetryEntry) -> None:
|
|
154
|
+
raw = dataclasses.asdict(entry)
|
|
155
|
+
data = {_to_camel(k): v for k, v in raw.items()}
|
|
156
|
+
event = SDKEvent(
|
|
157
|
+
type=event_type,
|
|
158
|
+
request_id=raw.get("parent_request_id"),
|
|
159
|
+
timestamp=time.time() * 1_000,
|
|
160
|
+
data=data,
|
|
161
|
+
)
|
|
162
|
+
forwarder.send(event)
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""Detect and patch database adapters (SQLAlchemy, etc.)."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import logging
|
|
5
|
+
from typing import TYPE_CHECKING
|
|
6
|
+
|
|
7
|
+
from brakit.constants.logger import LOGGER_NAME
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from brakit.adapters._protocol import DBAdapter
|
|
11
|
+
from brakit.core.event_bus import EventBus
|
|
12
|
+
from brakit.store import QueryStore
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def detect_and_patch(
|
|
18
|
+
query_store: QueryStore,
|
|
19
|
+
bus: EventBus,
|
|
20
|
+
) -> list[str]:
|
|
21
|
+
from brakit.adapters.asyncpg import AsyncpgAdapter
|
|
22
|
+
from brakit.adapters.sqlalchemy import SQLAlchemyAdapter
|
|
23
|
+
|
|
24
|
+
active: list[str] = []
|
|
25
|
+
|
|
26
|
+
# SQLAlchemy hooks into engine events and captures all queries including
|
|
27
|
+
# those routed through asyncpg. If both are present, only use SQLAlchemy
|
|
28
|
+
# to avoid double-capturing.
|
|
29
|
+
sa = SQLAlchemyAdapter()
|
|
30
|
+
if sa.detect():
|
|
31
|
+
try:
|
|
32
|
+
sa.patch(query_store, bus)
|
|
33
|
+
active.append(sa.name)
|
|
34
|
+
except Exception:
|
|
35
|
+
logger.debug("failed to patch %s adapter", sa.name, exc_info=True)
|
|
36
|
+
else:
|
|
37
|
+
# No SQLAlchemy — try raw asyncpg.
|
|
38
|
+
apg = AsyncpgAdapter()
|
|
39
|
+
if apg.detect():
|
|
40
|
+
try:
|
|
41
|
+
apg.patch(query_store, bus)
|
|
42
|
+
active.append(apg.name)
|
|
43
|
+
except Exception:
|
|
44
|
+
logger.debug("failed to patch %s adapter", apg.name, exc_info=True)
|
|
45
|
+
|
|
46
|
+
return active
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"""SQL normalization: operation detection, table extraction, noise filtering."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import re
|
|
5
|
+
|
|
6
|
+
from brakit.types.telemetry import NormalizedOp
|
|
7
|
+
|
|
8
|
+
_OP_PATTERN = re.compile(
|
|
9
|
+
r"^\s*(SELECT|INSERT|UPDATE|DELETE)\b",
|
|
10
|
+
re.IGNORECASE,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
_TABLE_PATTERN = re.compile(
|
|
14
|
+
r"(?:FROM|INTO|UPDATE|JOIN)\s+[\"'`]?(?:\w+\.)?[\"'`]?(\w+)",
|
|
15
|
+
re.IGNORECASE,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
_VALID_OPS: dict[str, NormalizedOp] = {
|
|
19
|
+
"SELECT": "SELECT",
|
|
20
|
+
"INSERT": "INSERT",
|
|
21
|
+
"UPDATE": "UPDATE",
|
|
22
|
+
"DELETE": "DELETE",
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
# Transaction-management statements emitted by ORMs / drivers, not user queries.
|
|
26
|
+
_TRANSACTION_PREFIXES: tuple[str, ...] = (
|
|
27
|
+
"BEGIN", "COMMIT", "ROLLBACK", "SAVEPOINT", "RELEASE SAVEPOINT",
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
# Substrings that identify asyncpg/psycopg driver-internal type introspection.
|
|
31
|
+
_INTERNAL_QUERY_MARKERS: tuple[str, ...] = ("pg_catalog",)
|
|
32
|
+
|
|
33
|
+
# Exact prefixes for driver probe/setup queries (e.g. asyncpg search_path probe).
|
|
34
|
+
_INTERNAL_EXACT_PREFIXES: tuple[str, ...] = ("set ",)
|
|
35
|
+
|
|
36
|
+
# Exact strings for driver probe queries (e.g. asyncpg "select public" search_path check).
|
|
37
|
+
_INTERNAL_EXACT_QUERIES: tuple[str, ...] = ("select public",)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def normalize_sql(sql: str) -> tuple[NormalizedOp, str]:
|
|
41
|
+
op: NormalizedOp = "OTHER"
|
|
42
|
+
table = ""
|
|
43
|
+
|
|
44
|
+
op_match = _OP_PATTERN.match(sql)
|
|
45
|
+
if op_match:
|
|
46
|
+
op = _VALID_OPS.get(op_match.group(1).upper(), "OTHER")
|
|
47
|
+
|
|
48
|
+
table_match = _TABLE_PATTERN.search(sql)
|
|
49
|
+
if table_match:
|
|
50
|
+
table = table_match.group(1)
|
|
51
|
+
|
|
52
|
+
return op, table
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def is_noise_query(sql: str) -> bool:
|
|
56
|
+
"""Return True for transaction management, driver-internal, and setup queries.
|
|
57
|
+
|
|
58
|
+
Centralises noise filtering so all database adapters share a single
|
|
59
|
+
definition of what constitutes a non-application query.
|
|
60
|
+
"""
|
|
61
|
+
stripped = sql.strip()
|
|
62
|
+
upper = stripped.rstrip(";").upper()
|
|
63
|
+
|
|
64
|
+
for prefix in _TRANSACTION_PREFIXES:
|
|
65
|
+
if upper == prefix or upper.startswith(prefix + " "):
|
|
66
|
+
return True
|
|
67
|
+
|
|
68
|
+
low = stripped.lower()
|
|
69
|
+
|
|
70
|
+
if low.startswith("select") and any(m in low for m in _INTERNAL_QUERY_MARKERS):
|
|
71
|
+
return True
|
|
72
|
+
|
|
73
|
+
for prefix in _INTERNAL_EXACT_PREFIXES:
|
|
74
|
+
if low.startswith(prefix):
|
|
75
|
+
return True
|
|
76
|
+
|
|
77
|
+
# Strip trailing semicolons before exact comparison.
|
|
78
|
+
bare = low.rstrip(";").rstrip()
|
|
79
|
+
if bare in _INTERNAL_EXACT_QUERIES:
|
|
80
|
+
return True
|
|
81
|
+
|
|
82
|
+
return False
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""Protocol definition for database adapters."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from typing import Protocol, runtime_checkable
|
|
5
|
+
|
|
6
|
+
from brakit.core.event_bus import EventBus
|
|
7
|
+
from brakit.store.query_store import QueryStore
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@runtime_checkable
|
|
11
|
+
class DBAdapter(Protocol):
|
|
12
|
+
name: str
|
|
13
|
+
|
|
14
|
+
def detect(self) -> bool: ...
|
|
15
|
+
def patch(self, query_store: QueryStore, bus: EventBus) -> None: ...
|
|
16
|
+
def unpatch(self) -> None: ...
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"""asyncpg adapter: monkey-patches Pool methods to capture query telemetry."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
import uuid
|
|
7
|
+
from collections.abc import Callable, Coroutine
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
from brakit.adapters._normalize import is_noise_query, normalize_sql
|
|
11
|
+
from brakit.constants.events import CHANNEL_TELEMETRY_QUERY
|
|
12
|
+
from brakit.constants.limits import MAX_SQL_LENGTH
|
|
13
|
+
from brakit.constants.logger import LOGGER_NAME
|
|
14
|
+
from brakit.core.context import get_fetch_id, get_request_id
|
|
15
|
+
from brakit.core.event_bus import EventBus
|
|
16
|
+
from brakit.store.query_store import QueryStore
|
|
17
|
+
from brakit.types.telemetry import TracedQuery
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
20
|
+
|
|
21
|
+
# All asyncpg methods that accept a SQL string as their first positional
|
|
22
|
+
# argument. Connection.prepare() is excluded because it does not execute
|
|
23
|
+
# immediately.
|
|
24
|
+
_POOL_METHODS = ("fetch", "fetchrow", "fetchval", "execute")
|
|
25
|
+
_CONN_METHODS = ("fetch", "fetchrow", "fetchval", "execute")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class AsyncpgAdapter:
|
|
29
|
+
name = "asyncpg"
|
|
30
|
+
|
|
31
|
+
_patched = False
|
|
32
|
+
_originals_pool: dict[str, Any] = {}
|
|
33
|
+
_originals_conn: dict[str, Any] = {}
|
|
34
|
+
|
|
35
|
+
def detect(self) -> bool:
|
|
36
|
+
try:
|
|
37
|
+
import asyncpg # noqa: F401
|
|
38
|
+
return True
|
|
39
|
+
except ImportError:
|
|
40
|
+
return False
|
|
41
|
+
|
|
42
|
+
def patch(self, query_store: QueryStore, bus: EventBus) -> None:
|
|
43
|
+
if AsyncpgAdapter._patched:
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
import asyncpg.pool
|
|
48
|
+
import asyncpg.connection
|
|
49
|
+
except ImportError:
|
|
50
|
+
return
|
|
51
|
+
|
|
52
|
+
pool_cls = asyncpg.pool.Pool
|
|
53
|
+
conn_cls = asyncpg.connection.Connection
|
|
54
|
+
|
|
55
|
+
for method_name in _POOL_METHODS:
|
|
56
|
+
original = getattr(pool_cls, method_name, None)
|
|
57
|
+
if original is None:
|
|
58
|
+
continue
|
|
59
|
+
AsyncpgAdapter._originals_pool[method_name] = original
|
|
60
|
+
wrapped = _make_wrapper(original, query_store, bus)
|
|
61
|
+
setattr(pool_cls, method_name, wrapped)
|
|
62
|
+
|
|
63
|
+
for method_name in _CONN_METHODS:
|
|
64
|
+
original = getattr(conn_cls, method_name, None)
|
|
65
|
+
if original is None:
|
|
66
|
+
continue
|
|
67
|
+
AsyncpgAdapter._originals_conn[method_name] = original
|
|
68
|
+
wrapped = _make_wrapper(original, query_store, bus)
|
|
69
|
+
setattr(conn_cls, method_name, wrapped)
|
|
70
|
+
|
|
71
|
+
AsyncpgAdapter._patched = True
|
|
72
|
+
|
|
73
|
+
def unpatch(self) -> None:
|
|
74
|
+
if not AsyncpgAdapter._patched:
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
try:
|
|
78
|
+
import asyncpg.pool
|
|
79
|
+
import asyncpg.connection
|
|
80
|
+
|
|
81
|
+
for method_name, original in AsyncpgAdapter._originals_pool.items():
|
|
82
|
+
setattr(asyncpg.pool.Pool, method_name, original)
|
|
83
|
+
for method_name, original in AsyncpgAdapter._originals_conn.items():
|
|
84
|
+
setattr(asyncpg.connection.Connection, method_name, original)
|
|
85
|
+
AsyncpgAdapter._originals_pool.clear()
|
|
86
|
+
AsyncpgAdapter._originals_conn.clear()
|
|
87
|
+
except Exception:
|
|
88
|
+
logger.debug("asyncpg unpatch failed", exc_info=True)
|
|
89
|
+
|
|
90
|
+
AsyncpgAdapter._patched = False
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _make_wrapper(
|
|
94
|
+
original: Callable[..., Coroutine[Any, Any, Any]],
|
|
95
|
+
query_store: QueryStore,
|
|
96
|
+
bus: EventBus,
|
|
97
|
+
) -> Callable[..., Coroutine[Any, Any, Any]]:
|
|
98
|
+
async def wrapper(self: Any, query: str, *args: Any, **kwargs: Any) -> Any:
|
|
99
|
+
# Skip telemetry for queries outside a request context (connection setup)
|
|
100
|
+
# or known asyncpg internal queries.
|
|
101
|
+
request_id = get_request_id()
|
|
102
|
+
should_capture = request_id is not None and not is_noise_query(query)
|
|
103
|
+
|
|
104
|
+
if not should_capture:
|
|
105
|
+
return await original(self, query, *args, **kwargs)
|
|
106
|
+
|
|
107
|
+
start = time.perf_counter()
|
|
108
|
+
try:
|
|
109
|
+
result = await original(self, query, *args, **kwargs)
|
|
110
|
+
return result
|
|
111
|
+
finally:
|
|
112
|
+
duration_ms = (time.perf_counter() - start) * 1_000
|
|
113
|
+
try:
|
|
114
|
+
operation, table = normalize_sql(query)
|
|
115
|
+
entry = TracedQuery(
|
|
116
|
+
id=uuid.uuid4().hex,
|
|
117
|
+
parent_request_id=request_id,
|
|
118
|
+
timestamp=time.time() * 1_000,
|
|
119
|
+
driver="asyncpg",
|
|
120
|
+
sql=query[:MAX_SQL_LENGTH] if query else None,
|
|
121
|
+
operation=operation,
|
|
122
|
+
table=table,
|
|
123
|
+
duration_ms=round(duration_ms, 2),
|
|
124
|
+
parent_fetch_id=get_fetch_id(),
|
|
125
|
+
)
|
|
126
|
+
query_store.add(entry)
|
|
127
|
+
bus.emit(CHANNEL_TELEMETRY_QUERY, entry)
|
|
128
|
+
except Exception:
|
|
129
|
+
logger.debug("asyncpg telemetry capture failed", exc_info=True)
|
|
130
|
+
|
|
131
|
+
return wrapper
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""SQLAlchemy adapter: hooks into engine events to capture queries."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
import uuid
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from brakit.adapters._normalize import is_noise_query, normalize_sql
|
|
10
|
+
from brakit.constants.events import CHANNEL_TELEMETRY_QUERY
|
|
11
|
+
from brakit.constants.limits import MAX_SQL_LENGTH
|
|
12
|
+
from brakit.constants.logger import LOGGER_NAME
|
|
13
|
+
from brakit.core.context import get_fetch_id, get_request_id
|
|
14
|
+
from brakit.core.event_bus import EventBus
|
|
15
|
+
from brakit.store.query_store import QueryStore
|
|
16
|
+
from brakit.types.telemetry import TracedQuery
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(LOGGER_NAME)
|
|
19
|
+
|
|
20
|
+
_SA_INFO_START_TIME = "_brakit_start"
|
|
21
|
+
_SA_INFO_SQL = "_brakit_sql"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class SQLAlchemyAdapter:
|
|
25
|
+
name = "sqlalchemy"
|
|
26
|
+
|
|
27
|
+
_patched = False
|
|
28
|
+
_on_before_listener: Any = None
|
|
29
|
+
_on_after_listener: Any = None
|
|
30
|
+
|
|
31
|
+
def detect(self) -> bool:
|
|
32
|
+
try:
|
|
33
|
+
import sqlalchemy # noqa: F401
|
|
34
|
+
return True
|
|
35
|
+
except ImportError:
|
|
36
|
+
return False
|
|
37
|
+
|
|
38
|
+
def patch(self, query_store: QueryStore, bus: EventBus) -> None:
|
|
39
|
+
if SQLAlchemyAdapter._patched:
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
from sqlalchemy import event
|
|
44
|
+
from sqlalchemy.engine import Engine
|
|
45
|
+
except ImportError:
|
|
46
|
+
return
|
|
47
|
+
|
|
48
|
+
@event.listens_for(Engine, "before_cursor_execute")
|
|
49
|
+
def _on_before_execute(
|
|
50
|
+
conn: Any,
|
|
51
|
+
cursor: Any,
|
|
52
|
+
statement: str,
|
|
53
|
+
parameters: Any,
|
|
54
|
+
context: Any,
|
|
55
|
+
executemany: bool,
|
|
56
|
+
) -> None:
|
|
57
|
+
conn.info[_SA_INFO_START_TIME] = time.perf_counter()
|
|
58
|
+
conn.info[_SA_INFO_SQL] = statement
|
|
59
|
+
|
|
60
|
+
@event.listens_for(Engine, "after_cursor_execute")
|
|
61
|
+
def _on_after_execute(
|
|
62
|
+
conn: Any,
|
|
63
|
+
cursor: Any,
|
|
64
|
+
statement: str,
|
|
65
|
+
parameters: Any,
|
|
66
|
+
context: Any,
|
|
67
|
+
executemany: bool,
|
|
68
|
+
) -> None:
|
|
69
|
+
start: float | None = conn.info.pop(_SA_INFO_START_TIME, None)
|
|
70
|
+
sql: str = conn.info.pop(_SA_INFO_SQL, "")
|
|
71
|
+
|
|
72
|
+
if start is None:
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
request_id = get_request_id()
|
|
76
|
+
if request_id is None:
|
|
77
|
+
return
|
|
78
|
+
|
|
79
|
+
if is_noise_query(sql):
|
|
80
|
+
return
|
|
81
|
+
|
|
82
|
+
duration_ms = (time.perf_counter() - start) * 1_000
|
|
83
|
+
operation, table = normalize_sql(sql)
|
|
84
|
+
|
|
85
|
+
entry = TracedQuery(
|
|
86
|
+
id=uuid.uuid4().hex,
|
|
87
|
+
parent_request_id=request_id,
|
|
88
|
+
timestamp=time.time() * 1_000,
|
|
89
|
+
driver="sqlalchemy",
|
|
90
|
+
sql=sql[:MAX_SQL_LENGTH] if sql else None,
|
|
91
|
+
operation=operation,
|
|
92
|
+
table=table,
|
|
93
|
+
duration_ms=round(duration_ms, 2),
|
|
94
|
+
parent_fetch_id=get_fetch_id(),
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
query_store.add(entry)
|
|
98
|
+
bus.emit(CHANNEL_TELEMETRY_QUERY, entry)
|
|
99
|
+
|
|
100
|
+
SQLAlchemyAdapter._on_before_listener = _on_before_execute
|
|
101
|
+
SQLAlchemyAdapter._on_after_listener = _on_after_execute
|
|
102
|
+
SQLAlchemyAdapter._patched = True
|
|
103
|
+
|
|
104
|
+
def unpatch(self) -> None:
|
|
105
|
+
if not SQLAlchemyAdapter._patched:
|
|
106
|
+
return
|
|
107
|
+
|
|
108
|
+
try:
|
|
109
|
+
from sqlalchemy import event
|
|
110
|
+
from sqlalchemy.engine import Engine
|
|
111
|
+
|
|
112
|
+
if SQLAlchemyAdapter._on_before_listener is not None:
|
|
113
|
+
event.remove(Engine, "before_cursor_execute", SQLAlchemyAdapter._on_before_listener)
|
|
114
|
+
if SQLAlchemyAdapter._on_after_listener is not None:
|
|
115
|
+
event.remove(Engine, "after_cursor_execute", SQLAlchemyAdapter._on_after_listener)
|
|
116
|
+
SQLAlchemyAdapter._on_before_listener = None
|
|
117
|
+
SQLAlchemyAdapter._on_after_listener = None
|
|
118
|
+
except Exception:
|
|
119
|
+
logger.debug("sqlalchemy unpatch failed", exc_info=True)
|
|
120
|
+
|
|
121
|
+
SQLAlchemyAdapter._patched = False
|