sf-veritas 0.10.3__cp39-cp39-manylinux_2_28_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sf-veritas might be problematic. Click here for more details.
- sf_veritas/__init__.py +20 -0
- sf_veritas/_sffastlog.c +889 -0
- sf_veritas/_sffastlog.cpython-39-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sffastnet.c +924 -0
- sf_veritas/_sffastnet.cpython-39-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sffastnetworkrequest.c +730 -0
- sf_veritas/_sffastnetworkrequest.cpython-39-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sffuncspan.c +2155 -0
- sf_veritas/_sffuncspan.cpython-39-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sffuncspan_config.c +617 -0
- sf_veritas/_sffuncspan_config.cpython-39-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sfheadercheck.c +341 -0
- sf_veritas/_sfheadercheck.cpython-39-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sfnetworkhop.c +1451 -0
- sf_veritas/_sfnetworkhop.cpython-39-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sfservice.c +1175 -0
- sf_veritas/_sfservice.cpython-39-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sfteepreload.c +5167 -0
- sf_veritas/app_config.py +49 -0
- sf_veritas/cli.py +336 -0
- sf_veritas/constants.py +10 -0
- sf_veritas/custom_excepthook.py +304 -0
- sf_veritas/custom_log_handler.py +129 -0
- sf_veritas/custom_output_wrapper.py +144 -0
- sf_veritas/custom_print.py +146 -0
- sf_veritas/django_app.py +5 -0
- sf_veritas/env_vars.py +186 -0
- sf_veritas/exception_handling_middleware.py +18 -0
- sf_veritas/exception_metaclass.py +69 -0
- sf_veritas/fast_frame_info.py +116 -0
- sf_veritas/fast_network_hop.py +293 -0
- sf_veritas/frame_tools.py +112 -0
- sf_veritas/funcspan_config_loader.py +556 -0
- sf_veritas/function_span_profiler.py +1174 -0
- sf_veritas/import_hook.py +62 -0
- sf_veritas/infra_details/__init__.py +3 -0
- sf_veritas/infra_details/get_infra_details.py +24 -0
- sf_veritas/infra_details/kubernetes/__init__.py +3 -0
- sf_veritas/infra_details/kubernetes/get_cluster_name.py +147 -0
- sf_veritas/infra_details/kubernetes/get_details.py +7 -0
- sf_veritas/infra_details/running_on/__init__.py +17 -0
- sf_veritas/infra_details/running_on/kubernetes.py +11 -0
- sf_veritas/interceptors.py +497 -0
- sf_veritas/libsfnettee.so +0 -0
- sf_veritas/local_env_detect.py +118 -0
- sf_veritas/package_metadata.py +6 -0
- sf_veritas/patches/__init__.py +0 -0
- sf_veritas/patches/concurrent_futures.py +19 -0
- sf_veritas/patches/constants.py +1 -0
- sf_veritas/patches/exceptions.py +82 -0
- sf_veritas/patches/multiprocessing.py +32 -0
- sf_veritas/patches/network_libraries/__init__.py +76 -0
- sf_veritas/patches/network_libraries/aiohttp.py +281 -0
- sf_veritas/patches/network_libraries/curl_cffi.py +363 -0
- sf_veritas/patches/network_libraries/http_client.py +419 -0
- sf_veritas/patches/network_libraries/httpcore.py +515 -0
- sf_veritas/patches/network_libraries/httplib2.py +204 -0
- sf_veritas/patches/network_libraries/httpx.py +515 -0
- sf_veritas/patches/network_libraries/niquests.py +211 -0
- sf_veritas/patches/network_libraries/pycurl.py +385 -0
- sf_veritas/patches/network_libraries/requests.py +633 -0
- sf_veritas/patches/network_libraries/tornado.py +341 -0
- sf_veritas/patches/network_libraries/treq.py +270 -0
- sf_veritas/patches/network_libraries/urllib_request.py +468 -0
- sf_veritas/patches/network_libraries/utils.py +398 -0
- sf_veritas/patches/os.py +17 -0
- sf_veritas/patches/threading.py +218 -0
- sf_veritas/patches/web_frameworks/__init__.py +54 -0
- sf_veritas/patches/web_frameworks/aiohttp.py +793 -0
- sf_veritas/patches/web_frameworks/async_websocket_consumer.py +317 -0
- sf_veritas/patches/web_frameworks/blacksheep.py +527 -0
- sf_veritas/patches/web_frameworks/bottle.py +502 -0
- sf_veritas/patches/web_frameworks/cherrypy.py +678 -0
- sf_veritas/patches/web_frameworks/cors_utils.py +122 -0
- sf_veritas/patches/web_frameworks/django.py +944 -0
- sf_veritas/patches/web_frameworks/eve.py +395 -0
- sf_veritas/patches/web_frameworks/falcon.py +926 -0
- sf_veritas/patches/web_frameworks/fastapi.py +724 -0
- sf_veritas/patches/web_frameworks/flask.py +520 -0
- sf_veritas/patches/web_frameworks/klein.py +501 -0
- sf_veritas/patches/web_frameworks/litestar.py +551 -0
- sf_veritas/patches/web_frameworks/pyramid.py +428 -0
- sf_veritas/patches/web_frameworks/quart.py +824 -0
- sf_veritas/patches/web_frameworks/robyn.py +697 -0
- sf_veritas/patches/web_frameworks/sanic.py +857 -0
- sf_veritas/patches/web_frameworks/starlette.py +723 -0
- sf_veritas/patches/web_frameworks/strawberry.py +813 -0
- sf_veritas/patches/web_frameworks/tornado.py +481 -0
- sf_veritas/patches/web_frameworks/utils.py +91 -0
- sf_veritas/print_override.py +13 -0
- sf_veritas/regular_data_transmitter.py +409 -0
- sf_veritas/request_interceptor.py +401 -0
- sf_veritas/request_utils.py +550 -0
- sf_veritas/server_status.py +1 -0
- sf_veritas/shutdown_flag.py +11 -0
- sf_veritas/subprocess_startup.py +3 -0
- sf_veritas/test_cli.py +145 -0
- sf_veritas/thread_local.py +970 -0
- sf_veritas/timeutil.py +114 -0
- sf_veritas/transmit_exception_to_sailfish.py +28 -0
- sf_veritas/transmitter.py +132 -0
- sf_veritas/types.py +47 -0
- sf_veritas/unified_interceptor.py +1580 -0
- sf_veritas/utils.py +39 -0
- sf_veritas-0.10.3.dist-info/METADATA +97 -0
- sf_veritas-0.10.3.dist-info/RECORD +132 -0
- sf_veritas-0.10.3.dist-info/WHEEL +5 -0
- sf_veritas-0.10.3.dist-info/entry_points.txt +2 -0
- sf_veritas-0.10.3.dist-info/top_level.txt +1 -0
- sf_veritas.libs/libbrotlicommon-6ce2a53c.so.1.0.6 +0 -0
- sf_veritas.libs/libbrotlidec-811d1be3.so.1.0.6 +0 -0
- sf_veritas.libs/libcom_err-730ca923.so.2.1 +0 -0
- sf_veritas.libs/libcrypt-52aca757.so.1.1.0 +0 -0
- sf_veritas.libs/libcrypto-bdaed0ea.so.1.1.1k +0 -0
- sf_veritas.libs/libcurl-eaa3cf66.so.4.5.0 +0 -0
- sf_veritas.libs/libgssapi_krb5-323bbd21.so.2.2 +0 -0
- sf_veritas.libs/libidn2-2f4a5893.so.0.3.6 +0 -0
- sf_veritas.libs/libk5crypto-9a74ff38.so.3.1 +0 -0
- sf_veritas.libs/libkeyutils-2777d33d.so.1.6 +0 -0
- sf_veritas.libs/libkrb5-a55300e8.so.3.3 +0 -0
- sf_veritas.libs/libkrb5support-e6594cfc.so.0.1 +0 -0
- sf_veritas.libs/liblber-2-d20824ef.4.so.2.10.9 +0 -0
- sf_veritas.libs/libldap-2-cea2a960.4.so.2.10.9 +0 -0
- sf_veritas.libs/libnghttp2-39367a22.so.14.17.0 +0 -0
- sf_veritas.libs/libpcre2-8-516f4c9d.so.0.7.1 +0 -0
- sf_veritas.libs/libpsl-99becdd3.so.5.3.1 +0 -0
- sf_veritas.libs/libsasl2-7de4d792.so.3.0.0 +0 -0
- sf_veritas.libs/libselinux-d0805dcb.so.1 +0 -0
- sf_veritas.libs/libssh-c11d285b.so.4.8.7 +0 -0
- sf_veritas.libs/libssl-60250281.so.1.1.1k +0 -0
- sf_veritas.libs/libunistring-05abdd40.so.2.1.0 +0 -0
- sf_veritas.libs/libuuid-95b83d40.so.1.3.0 +0 -0
|
@@ -0,0 +1,515 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Patch httpx to inject tracing headers and capture network requests using event hooks.
|
|
3
|
+
|
|
4
|
+
• For every outbound request, propagate the SAILFISH_TRACING_HEADER + FUNCSPAN_OVERRIDE_HEADER
|
|
5
|
+
unless the destination host is in `domains_to_not_propagate_headers_to`.
|
|
6
|
+
• Fire NetworkRequestTransmitter via utils.record_network_request
|
|
7
|
+
so we always capture (url, status, timings, success, error).
|
|
8
|
+
• When LD_PRELOAD is active, ONLY inject headers (skip capture - socket layer handles it).
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import os
|
|
14
|
+
import threading
|
|
15
|
+
import time
|
|
16
|
+
from typing import Dict, List, Optional, Tuple
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
import wrapt
|
|
20
|
+
|
|
21
|
+
HAS_WRAPT = True
|
|
22
|
+
except ImportError:
|
|
23
|
+
HAS_WRAPT = False
|
|
24
|
+
|
|
25
|
+
from ...constants import FUNCSPAN_OVERRIDE_HEADER, SAILFISH_TRACING_HEADER
|
|
26
|
+
from ...thread_local import (
|
|
27
|
+
activate_reentrancy_guards_exception,
|
|
28
|
+
activate_reentrancy_guards_logging,
|
|
29
|
+
activate_reentrancy_guards_print,
|
|
30
|
+
get_funcspan_override,
|
|
31
|
+
trace_id_ctx,
|
|
32
|
+
)
|
|
33
|
+
from .utils import (
|
|
34
|
+
get_trace_and_should_propagate,
|
|
35
|
+
get_trace_and_should_propagate_fast,
|
|
36
|
+
init_fast_header_check,
|
|
37
|
+
inject_headers_ultrafast,
|
|
38
|
+
record_network_request,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# JSON serialization - try fast orjson first, fallback to stdlib json
|
|
42
|
+
try:
|
|
43
|
+
import orjson
|
|
44
|
+
|
|
45
|
+
HAS_ORJSON = True
|
|
46
|
+
except ImportError:
|
|
47
|
+
import json
|
|
48
|
+
|
|
49
|
+
HAS_ORJSON = False
|
|
50
|
+
|
|
51
|
+
###############################################################################
|
|
52
|
+
# Internal helpers
|
|
53
|
+
###############################################################################
|
|
54
|
+
|
|
55
|
+
# header names used for re-entrancy guards
|
|
56
|
+
REENTRANCY_GUARD_LOGGING_PREACTIVE = "reentrancy_guard_logging_preactive"
|
|
57
|
+
REENTRANCY_GUARD_PRINT_PREACTIVE = "reentrancy_guard_print_preactive"
|
|
58
|
+
REENTRANCY_GUARD_EXCEPTIONS_PREACTIVE = "reentrancy_guard_exception_preactive"
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _tee_preload_active() -> bool:
|
|
62
|
+
"""Detect if LD_PRELOAD tee is active (same logic as requests.py)."""
|
|
63
|
+
if os.getenv("SF_TEE_PRELOAD_ONLY", "0") == "1":
|
|
64
|
+
return True
|
|
65
|
+
ld = os.getenv("LD_PRELOAD", "")
|
|
66
|
+
return "libsfnettee.so" in ld or "_sfteepreload" in ld
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _activate_rg(headers: Dict[str, str]) -> None:
|
|
70
|
+
"""Turn the three 'preactive' guard flags ON for downstream hops."""
|
|
71
|
+
headers[REENTRANCY_GUARD_LOGGING_PREACTIVE] = "true"
|
|
72
|
+
headers[REENTRANCY_GUARD_PRINT_PREACTIVE] = "true"
|
|
73
|
+
headers[REENTRANCY_GUARD_EXCEPTIONS_PREACTIVE] = "true"
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _check_rg(headers: Dict[str, str]) -> None:
|
|
77
|
+
"""If any pre-active guard present, switch the corresponding guard on."""
|
|
78
|
+
if headers.get(REENTRANCY_GUARD_LOGGING_PREACTIVE, "false").lower() == "true":
|
|
79
|
+
activate_reentrancy_guards_logging()
|
|
80
|
+
if headers.get(REENTRANCY_GUARD_PRINT_PREACTIVE, "false").lower() == "true":
|
|
81
|
+
activate_reentrancy_guards_print()
|
|
82
|
+
if headers.get(REENTRANCY_GUARD_EXCEPTIONS_PREACTIVE, "false").lower() == "true":
|
|
83
|
+
activate_reentrancy_guards_exception()
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _prepare(
|
|
87
|
+
url: str,
|
|
88
|
+
domains_to_skip: List[str],
|
|
89
|
+
headers: Dict[str, str],
|
|
90
|
+
) -> Tuple[str, Dict[str, str], int]:
|
|
91
|
+
"""
|
|
92
|
+
Inject trace header + funcspan override header (unless excluded) and return:
|
|
93
|
+
trace_id, merged_headers, timestamp_ms
|
|
94
|
+
|
|
95
|
+
ULTRA-FAST: <20ns overhead for header injection.
|
|
96
|
+
"""
|
|
97
|
+
trace_id, propagate = get_trace_and_should_propagate(url, domains_to_skip)
|
|
98
|
+
hdrs: Dict[str, str] = dict(headers or {})
|
|
99
|
+
_check_rg(hdrs)
|
|
100
|
+
if propagate:
|
|
101
|
+
hdrs[SAILFISH_TRACING_HEADER] = trace_id
|
|
102
|
+
|
|
103
|
+
# Inject funcspan override header if present (ContextVar lookup ~8ns)
|
|
104
|
+
try:
|
|
105
|
+
funcspan_override = get_funcspan_override()
|
|
106
|
+
if funcspan_override is not None:
|
|
107
|
+
hdrs[FUNCSPAN_OVERRIDE_HEADER] = funcspan_override
|
|
108
|
+
except Exception:
|
|
109
|
+
pass
|
|
110
|
+
|
|
111
|
+
_activate_rg(hdrs)
|
|
112
|
+
return trace_id, hdrs, int(time.time() * 1_000)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _capture_request_data(request) -> bytes:
|
|
116
|
+
"""Capture request body data as bytes."""
|
|
117
|
+
req_data = b""
|
|
118
|
+
try:
|
|
119
|
+
# Check if content is available
|
|
120
|
+
if hasattr(request, "content"):
|
|
121
|
+
content = request.content
|
|
122
|
+
if isinstance(content, bytes):
|
|
123
|
+
req_data = content
|
|
124
|
+
elif isinstance(content, str):
|
|
125
|
+
req_data = content.encode("utf-8")
|
|
126
|
+
except Exception: # noqa: BLE001
|
|
127
|
+
pass
|
|
128
|
+
return req_data
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _capture_and_record(
|
|
132
|
+
trace_id: str,
|
|
133
|
+
url: str,
|
|
134
|
+
method: str,
|
|
135
|
+
status: int,
|
|
136
|
+
success: bool,
|
|
137
|
+
err: str | None,
|
|
138
|
+
t0: int,
|
|
139
|
+
t1: int,
|
|
140
|
+
req_data: bytes,
|
|
141
|
+
req_headers: bytes,
|
|
142
|
+
resp_data: bytes,
|
|
143
|
+
resp_headers: bytes,
|
|
144
|
+
) -> None:
|
|
145
|
+
"""Schedule capture and record in background thread AFTER response is returned to user."""
|
|
146
|
+
|
|
147
|
+
def _do_record():
|
|
148
|
+
record_network_request(
|
|
149
|
+
trace_id,
|
|
150
|
+
url,
|
|
151
|
+
method,
|
|
152
|
+
status,
|
|
153
|
+
success,
|
|
154
|
+
err,
|
|
155
|
+
timestamp_start=t0,
|
|
156
|
+
timestamp_end=t1,
|
|
157
|
+
request_data=req_data,
|
|
158
|
+
response_data=resp_data,
|
|
159
|
+
request_headers=req_headers,
|
|
160
|
+
response_headers=resp_headers,
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
threading.Thread(target=_do_record, daemon=True).start()
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
###############################################################################
|
|
167
|
+
# Event hook factories
|
|
168
|
+
###############################################################################
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _make_request_hook(domains_to_skip: List[str], preload_active: bool):
|
|
172
|
+
"""Create a sync request hook that injects headers before request is sent."""
|
|
173
|
+
|
|
174
|
+
if preload_active:
|
|
175
|
+
# ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
|
|
176
|
+
def request_hook(request):
|
|
177
|
+
"""Inject tracing headers into outbound request (ultra-fast C extension)."""
|
|
178
|
+
try:
|
|
179
|
+
url = str(request.url)
|
|
180
|
+
# CRITICAL: Skip if already injected (prevents double injection)
|
|
181
|
+
if SAILFISH_TRACING_HEADER not in request.headers:
|
|
182
|
+
# ULTRA-FAST: Thread-local cache + direct ContextVar.get() (<100ns!)
|
|
183
|
+
inject_headers_ultrafast(request.headers, url, domains_to_skip)
|
|
184
|
+
except Exception: # noqa: BLE001
|
|
185
|
+
pass # Fail silently to not break requests
|
|
186
|
+
|
|
187
|
+
else:
|
|
188
|
+
# ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
|
|
189
|
+
def request_hook(request):
|
|
190
|
+
"""Inject tracing headers into outbound request (optimized - no debug logging)."""
|
|
191
|
+
try:
|
|
192
|
+
url = str(request.url)
|
|
193
|
+
trace_id, hdrs, t0 = _prepare(
|
|
194
|
+
url, domains_to_skip, dict(request.headers)
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
# Update request headers
|
|
198
|
+
request.headers.update(hdrs)
|
|
199
|
+
|
|
200
|
+
# Store metadata on request for response hook to use
|
|
201
|
+
# httpx Request objects always have an extensions dict
|
|
202
|
+
request.extensions["sf_trace_id"] = trace_id
|
|
203
|
+
request.extensions["sf_timestamp_start"] = t0
|
|
204
|
+
|
|
205
|
+
# Capture request data
|
|
206
|
+
request.extensions["sf_request_data"] = _capture_request_data(request)
|
|
207
|
+
|
|
208
|
+
# Capture request headers
|
|
209
|
+
if HAS_ORJSON:
|
|
210
|
+
request.extensions["sf_request_headers"] = orjson.dumps(
|
|
211
|
+
dict(request.headers)
|
|
212
|
+
)
|
|
213
|
+
else:
|
|
214
|
+
request.extensions["sf_request_headers"] = json.dumps(
|
|
215
|
+
dict(request.headers)
|
|
216
|
+
).encode("utf-8")
|
|
217
|
+
except Exception: # noqa: BLE001
|
|
218
|
+
pass # Fail silently to not break requests
|
|
219
|
+
|
|
220
|
+
return request_hook
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def _make_async_request_hook(domains_to_skip: List[str], preload_active: bool):
|
|
224
|
+
"""Create an async request hook that injects headers before request is sent."""
|
|
225
|
+
|
|
226
|
+
if preload_active:
|
|
227
|
+
# ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
|
|
228
|
+
async def async_request_hook(request):
|
|
229
|
+
"""Inject tracing headers into outbound request (ultra-fast C extension)."""
|
|
230
|
+
# Get trace ID and check if we should propagate
|
|
231
|
+
url = str(request.url)
|
|
232
|
+
# CRITICAL: Skip if already injected (prevents double injection)
|
|
233
|
+
if SAILFISH_TRACING_HEADER not in request.headers:
|
|
234
|
+
# ULTRA-FAST: Thread-local cache + direct ContextVar.get() (<100ns!)
|
|
235
|
+
inject_headers_ultrafast(request.headers, url, domains_to_skip)
|
|
236
|
+
|
|
237
|
+
else:
|
|
238
|
+
# ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
|
|
239
|
+
async def async_request_hook(request):
|
|
240
|
+
"""Inject tracing headers into outbound request (optimized - no debug logging)."""
|
|
241
|
+
# Get trace ID and timing
|
|
242
|
+
url = str(request.url)
|
|
243
|
+
trace_id = trace_id_ctx.get(None) or ""
|
|
244
|
+
t0 = int(time.time() * 1_000)
|
|
245
|
+
|
|
246
|
+
# Check and activate re-entrancy guards from incoming headers (avoid dict copy)
|
|
247
|
+
req_headers = request.headers
|
|
248
|
+
if (
|
|
249
|
+
req_headers.get(REENTRANCY_GUARD_LOGGING_PREACTIVE, "false").lower()
|
|
250
|
+
== "true"
|
|
251
|
+
):
|
|
252
|
+
activate_reentrancy_guards_logging()
|
|
253
|
+
if (
|
|
254
|
+
req_headers.get(REENTRANCY_GUARD_PRINT_PREACTIVE, "false").lower()
|
|
255
|
+
== "true"
|
|
256
|
+
):
|
|
257
|
+
activate_reentrancy_guards_print()
|
|
258
|
+
if (
|
|
259
|
+
req_headers.get(REENTRANCY_GUARD_EXCEPTIONS_PREACTIVE, "false").lower()
|
|
260
|
+
== "true"
|
|
261
|
+
):
|
|
262
|
+
activate_reentrancy_guards_exception()
|
|
263
|
+
|
|
264
|
+
# CRITICAL: Skip if already injected (prevents double injection)
|
|
265
|
+
if SAILFISH_TRACING_HEADER not in req_headers:
|
|
266
|
+
# ULTRA-FAST: Thread-local cache + direct ContextVar.get() (<100ns!)
|
|
267
|
+
inject_headers_ultrafast(req_headers, url, domains_to_skip)
|
|
268
|
+
|
|
269
|
+
# Activate re-entrancy guards for downstream (inject into request)
|
|
270
|
+
req_headers[REENTRANCY_GUARD_LOGGING_PREACTIVE] = "true"
|
|
271
|
+
req_headers[REENTRANCY_GUARD_PRINT_PREACTIVE] = "true"
|
|
272
|
+
req_headers[REENTRANCY_GUARD_EXCEPTIONS_PREACTIVE] = "true"
|
|
273
|
+
|
|
274
|
+
# Store metadata on request for response hook to use
|
|
275
|
+
request.extensions["sf_trace_id"] = trace_id
|
|
276
|
+
request.extensions["sf_timestamp_start"] = t0
|
|
277
|
+
|
|
278
|
+
# Capture request data
|
|
279
|
+
request.extensions["sf_request_data"] = _capture_request_data(request)
|
|
280
|
+
|
|
281
|
+
# Capture request headers (AFTER injection)
|
|
282
|
+
if HAS_ORJSON:
|
|
283
|
+
request.extensions["sf_request_headers"] = orjson.dumps(
|
|
284
|
+
dict(req_headers)
|
|
285
|
+
)
|
|
286
|
+
else:
|
|
287
|
+
request.extensions["sf_request_headers"] = json.dumps(
|
|
288
|
+
dict(req_headers)
|
|
289
|
+
).encode("utf-8")
|
|
290
|
+
|
|
291
|
+
return async_request_hook
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def _make_response_hook(preload_active: bool):
|
|
295
|
+
"""Create a response hook that captures and records response data."""
|
|
296
|
+
|
|
297
|
+
def response_hook(response):
|
|
298
|
+
"""Capture response data and record the network request."""
|
|
299
|
+
# Skip recording if LD_PRELOAD is active (socket layer already captured it)
|
|
300
|
+
if preload_active:
|
|
301
|
+
return
|
|
302
|
+
|
|
303
|
+
# Extract metadata from request
|
|
304
|
+
request = response.request
|
|
305
|
+
trace_id = request.extensions.get("sf_trace_id", "")
|
|
306
|
+
t0 = request.extensions.get("sf_timestamp_start", 0)
|
|
307
|
+
req_data = request.extensions.get("sf_request_data", b"")
|
|
308
|
+
req_headers = request.extensions.get("sf_request_headers", b"")
|
|
309
|
+
|
|
310
|
+
# Capture response data
|
|
311
|
+
url = str(request.url)
|
|
312
|
+
method = str(request.method).upper()
|
|
313
|
+
status = response.status_code
|
|
314
|
+
success = status < 400
|
|
315
|
+
t1 = int(time.time() * 1_000)
|
|
316
|
+
|
|
317
|
+
resp_data = b""
|
|
318
|
+
resp_headers = b""
|
|
319
|
+
|
|
320
|
+
try:
|
|
321
|
+
# Capture response body (already loaded in httpx)
|
|
322
|
+
resp_data = response.content
|
|
323
|
+
|
|
324
|
+
# Capture response headers
|
|
325
|
+
if HAS_ORJSON:
|
|
326
|
+
resp_headers = orjson.dumps({str(k): str(v) for k, v in response.headers.items()})
|
|
327
|
+
else:
|
|
328
|
+
resp_headers = json.dumps({str(k): str(v) for k, v in response.headers.items()}).encode("utf-8")
|
|
329
|
+
except Exception: # noqa: BLE001
|
|
330
|
+
pass
|
|
331
|
+
|
|
332
|
+
# Record in background thread
|
|
333
|
+
_capture_and_record(
|
|
334
|
+
trace_id,
|
|
335
|
+
url,
|
|
336
|
+
method,
|
|
337
|
+
status,
|
|
338
|
+
success,
|
|
339
|
+
None,
|
|
340
|
+
t0,
|
|
341
|
+
t1,
|
|
342
|
+
req_data,
|
|
343
|
+
req_headers,
|
|
344
|
+
resp_data,
|
|
345
|
+
resp_headers,
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
return response_hook
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def _make_async_response_hook(preload_active: bool):
|
|
352
|
+
"""Create an async response hook that captures and records response data."""
|
|
353
|
+
|
|
354
|
+
async def async_response_hook(response):
|
|
355
|
+
"""Capture response data and record the network request (optimized - no debug logging)."""
|
|
356
|
+
# Skip recording if LD_PRELOAD is active (socket layer already captured it)
|
|
357
|
+
if preload_active:
|
|
358
|
+
return
|
|
359
|
+
|
|
360
|
+
try:
|
|
361
|
+
# Extract metadata from request
|
|
362
|
+
request = response.request
|
|
363
|
+
trace_id = request.extensions.get("sf_trace_id", "")
|
|
364
|
+
t0 = request.extensions.get("sf_timestamp_start", 0)
|
|
365
|
+
req_data = request.extensions.get("sf_request_data", b"")
|
|
366
|
+
req_headers = request.extensions.get("sf_request_headers", b"")
|
|
367
|
+
|
|
368
|
+
# Capture response data
|
|
369
|
+
url = str(request.url)
|
|
370
|
+
method = str(request.method).upper()
|
|
371
|
+
status = response.status_code
|
|
372
|
+
success = status < 400
|
|
373
|
+
t1 = int(time.time() * 1_000)
|
|
374
|
+
|
|
375
|
+
resp_data = b""
|
|
376
|
+
resp_headers = b""
|
|
377
|
+
|
|
378
|
+
try:
|
|
379
|
+
# For async responses, ensure body is read
|
|
380
|
+
# Check if already read (has _content), otherwise read it
|
|
381
|
+
if not hasattr(response, "_content") or response._content is None:
|
|
382
|
+
await response.aread()
|
|
383
|
+
|
|
384
|
+
# Now capture response body
|
|
385
|
+
resp_data = response.content
|
|
386
|
+
|
|
387
|
+
# Capture response headers
|
|
388
|
+
if HAS_ORJSON:
|
|
389
|
+
resp_headers = orjson.dumps({str(k): str(v) for k, v in response.headers.items()})
|
|
390
|
+
else:
|
|
391
|
+
resp_headers = json.dumps({str(k): str(v) for k, v in response.headers.items()}).encode("utf-8")
|
|
392
|
+
except Exception: # noqa: BLE001
|
|
393
|
+
pass
|
|
394
|
+
|
|
395
|
+
# Record in background thread
|
|
396
|
+
_capture_and_record(
|
|
397
|
+
trace_id,
|
|
398
|
+
url,
|
|
399
|
+
method,
|
|
400
|
+
status,
|
|
401
|
+
success,
|
|
402
|
+
None,
|
|
403
|
+
t0,
|
|
404
|
+
t1,
|
|
405
|
+
req_data,
|
|
406
|
+
req_headers,
|
|
407
|
+
resp_data,
|
|
408
|
+
resp_headers,
|
|
409
|
+
)
|
|
410
|
+
except Exception: # noqa: BLE001
|
|
411
|
+
pass # Silently fail to not break requests
|
|
412
|
+
|
|
413
|
+
return async_response_hook
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
###############################################################################
|
|
417
|
+
# Top-level patch function
|
|
418
|
+
###############################################################################
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def patch_httpx(domains_to_not_propagate_headers_to: Optional[List[str]] = None):
|
|
422
|
+
"""
|
|
423
|
+
Patch httpx to inject SAILFISH_TRACING_HEADER into all outbound requests
|
|
424
|
+
using event hooks. Safe to call even if httpx is not installed.
|
|
425
|
+
|
|
426
|
+
When LD_PRELOAD is active:
|
|
427
|
+
- ALWAYS inject headers (trace_id + funcspan_override)
|
|
428
|
+
- SKIP capture/emission (LD_PRELOAD handles at socket layer)
|
|
429
|
+
- Uses ultra-fast C extension for <10ns overhead
|
|
430
|
+
"""
|
|
431
|
+
try:
|
|
432
|
+
import httpx
|
|
433
|
+
except ImportError:
|
|
434
|
+
return # No httpx installed—nothing to patch
|
|
435
|
+
|
|
436
|
+
domains = domains_to_not_propagate_headers_to or []
|
|
437
|
+
preload_active = _tee_preload_active()
|
|
438
|
+
|
|
439
|
+
# Initialize C extension for ultra-fast header checking (if available)
|
|
440
|
+
if preload_active:
|
|
441
|
+
init_fast_header_check(domains)
|
|
442
|
+
|
|
443
|
+
# Create hooks
|
|
444
|
+
sync_request_hook = _make_request_hook(domains, preload_active)
|
|
445
|
+
async_request_hook = _make_async_request_hook(domains, preload_active)
|
|
446
|
+
sync_response_hook = _make_response_hook(preload_active)
|
|
447
|
+
async_response_hook = _make_async_response_hook(preload_active)
|
|
448
|
+
|
|
449
|
+
# Patch Client.__init__ to attach sync hooks
|
|
450
|
+
if HAS_WRAPT:
|
|
451
|
+
|
|
452
|
+
def instrumented_client_init(wrapped, instance, args, kwargs):
|
|
453
|
+
"""Ultra-fast hook injection using wrapt."""
|
|
454
|
+
# Get existing event_hooks or create empty dict
|
|
455
|
+
event_hooks = kwargs.get("event_hooks", {})
|
|
456
|
+
|
|
457
|
+
# Add our sync hooks to the request and response lists
|
|
458
|
+
event_hooks.setdefault("request", []).append(sync_request_hook)
|
|
459
|
+
event_hooks.setdefault("response", []).append(sync_response_hook)
|
|
460
|
+
|
|
461
|
+
kwargs["event_hooks"] = event_hooks
|
|
462
|
+
return wrapped(*args, **kwargs)
|
|
463
|
+
|
|
464
|
+
wrapt.wrap_function_wrapper(
|
|
465
|
+
"httpx", "Client.__init__", instrumented_client_init
|
|
466
|
+
)
|
|
467
|
+
else:
|
|
468
|
+
original_client_init = httpx.Client.__init__
|
|
469
|
+
|
|
470
|
+
def patched_client_init(self, *args, **kwargs):
|
|
471
|
+
# Get existing event_hooks or create empty dict
|
|
472
|
+
event_hooks = kwargs.get("event_hooks", {})
|
|
473
|
+
|
|
474
|
+
# Add our sync hooks to the request and response lists
|
|
475
|
+
event_hooks.setdefault("request", []).append(sync_request_hook)
|
|
476
|
+
event_hooks.setdefault("response", []).append(sync_response_hook)
|
|
477
|
+
|
|
478
|
+
kwargs["event_hooks"] = event_hooks
|
|
479
|
+
original_client_init(self, *args, **kwargs)
|
|
480
|
+
|
|
481
|
+
httpx.Client.__init__ = patched_client_init
|
|
482
|
+
|
|
483
|
+
# Patch AsyncClient.__init__ to attach async hooks
|
|
484
|
+
if HAS_WRAPT:
|
|
485
|
+
|
|
486
|
+
def instrumented_async_client_init(wrapped, instance, args, kwargs):
|
|
487
|
+
"""Ultra-fast hook injection using wrapt."""
|
|
488
|
+
# Get existing event_hooks or create empty dict
|
|
489
|
+
event_hooks = kwargs.get("event_hooks", {})
|
|
490
|
+
|
|
491
|
+
# Add our ASYNC hooks to the request and response lists
|
|
492
|
+
event_hooks.setdefault("request", []).append(async_request_hook)
|
|
493
|
+
event_hooks.setdefault("response", []).append(async_response_hook)
|
|
494
|
+
|
|
495
|
+
kwargs["event_hooks"] = event_hooks
|
|
496
|
+
return wrapped(*args, **kwargs)
|
|
497
|
+
|
|
498
|
+
wrapt.wrap_function_wrapper(
|
|
499
|
+
"httpx", "AsyncClient.__init__", instrumented_async_client_init
|
|
500
|
+
)
|
|
501
|
+
else:
|
|
502
|
+
original_async_client_init = httpx.AsyncClient.__init__
|
|
503
|
+
|
|
504
|
+
def patched_async_client_init(self, *args, **kwargs):
|
|
505
|
+
# Get existing event_hooks or create empty dict
|
|
506
|
+
event_hooks = kwargs.get("event_hooks", {})
|
|
507
|
+
|
|
508
|
+
# Add our ASYNC hooks to the request and response lists
|
|
509
|
+
event_hooks.setdefault("request", []).append(async_request_hook)
|
|
510
|
+
event_hooks.setdefault("response", []).append(async_response_hook)
|
|
511
|
+
|
|
512
|
+
kwargs["event_hooks"] = event_hooks
|
|
513
|
+
original_async_client_init(self, *args, **kwargs)
|
|
514
|
+
|
|
515
|
+
httpx.AsyncClient.__init__ = patched_async_client_init
|