sf-veritas 0.10.3__cp314-cp314-manylinux_2_28_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sf-veritas might be problematic. Click here for more details.
- sf_veritas/__init__.py +20 -0
- sf_veritas/_sffastlog.c +889 -0
- sf_veritas/_sffastlog.cpython-314-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sffastnet.c +924 -0
- sf_veritas/_sffastnet.cpython-314-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sffastnetworkrequest.c +730 -0
- sf_veritas/_sffastnetworkrequest.cpython-314-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sffuncspan.c +2155 -0
- sf_veritas/_sffuncspan.cpython-314-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sffuncspan_config.c +617 -0
- sf_veritas/_sffuncspan_config.cpython-314-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sfheadercheck.c +341 -0
- sf_veritas/_sfheadercheck.cpython-314-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sfnetworkhop.c +1451 -0
- sf_veritas/_sfnetworkhop.cpython-314-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sfservice.c +1175 -0
- sf_veritas/_sfservice.cpython-314-x86_64-linux-gnu.so +0 -0
- sf_veritas/_sfteepreload.c +5167 -0
- sf_veritas/app_config.py +49 -0
- sf_veritas/cli.py +336 -0
- sf_veritas/constants.py +10 -0
- sf_veritas/custom_excepthook.py +304 -0
- sf_veritas/custom_log_handler.py +129 -0
- sf_veritas/custom_output_wrapper.py +144 -0
- sf_veritas/custom_print.py +146 -0
- sf_veritas/django_app.py +5 -0
- sf_veritas/env_vars.py +186 -0
- sf_veritas/exception_handling_middleware.py +18 -0
- sf_veritas/exception_metaclass.py +69 -0
- sf_veritas/fast_frame_info.py +116 -0
- sf_veritas/fast_network_hop.py +293 -0
- sf_veritas/frame_tools.py +112 -0
- sf_veritas/funcspan_config_loader.py +556 -0
- sf_veritas/function_span_profiler.py +1174 -0
- sf_veritas/import_hook.py +62 -0
- sf_veritas/infra_details/__init__.py +3 -0
- sf_veritas/infra_details/get_infra_details.py +24 -0
- sf_veritas/infra_details/kubernetes/__init__.py +3 -0
- sf_veritas/infra_details/kubernetes/get_cluster_name.py +147 -0
- sf_veritas/infra_details/kubernetes/get_details.py +7 -0
- sf_veritas/infra_details/running_on/__init__.py +17 -0
- sf_veritas/infra_details/running_on/kubernetes.py +11 -0
- sf_veritas/interceptors.py +497 -0
- sf_veritas/libsfnettee.so +0 -0
- sf_veritas/local_env_detect.py +118 -0
- sf_veritas/package_metadata.py +6 -0
- sf_veritas/patches/__init__.py +0 -0
- sf_veritas/patches/concurrent_futures.py +19 -0
- sf_veritas/patches/constants.py +1 -0
- sf_veritas/patches/exceptions.py +82 -0
- sf_veritas/patches/multiprocessing.py +32 -0
- sf_veritas/patches/network_libraries/__init__.py +76 -0
- sf_veritas/patches/network_libraries/aiohttp.py +281 -0
- sf_veritas/patches/network_libraries/curl_cffi.py +363 -0
- sf_veritas/patches/network_libraries/http_client.py +419 -0
- sf_veritas/patches/network_libraries/httpcore.py +515 -0
- sf_veritas/patches/network_libraries/httplib2.py +204 -0
- sf_veritas/patches/network_libraries/httpx.py +515 -0
- sf_veritas/patches/network_libraries/niquests.py +211 -0
- sf_veritas/patches/network_libraries/pycurl.py +385 -0
- sf_veritas/patches/network_libraries/requests.py +633 -0
- sf_veritas/patches/network_libraries/tornado.py +341 -0
- sf_veritas/patches/network_libraries/treq.py +270 -0
- sf_veritas/patches/network_libraries/urllib_request.py +468 -0
- sf_veritas/patches/network_libraries/utils.py +398 -0
- sf_veritas/patches/os.py +17 -0
- sf_veritas/patches/threading.py +218 -0
- sf_veritas/patches/web_frameworks/__init__.py +54 -0
- sf_veritas/patches/web_frameworks/aiohttp.py +793 -0
- sf_veritas/patches/web_frameworks/async_websocket_consumer.py +317 -0
- sf_veritas/patches/web_frameworks/blacksheep.py +527 -0
- sf_veritas/patches/web_frameworks/bottle.py +502 -0
- sf_veritas/patches/web_frameworks/cherrypy.py +678 -0
- sf_veritas/patches/web_frameworks/cors_utils.py +122 -0
- sf_veritas/patches/web_frameworks/django.py +944 -0
- sf_veritas/patches/web_frameworks/eve.py +395 -0
- sf_veritas/patches/web_frameworks/falcon.py +926 -0
- sf_veritas/patches/web_frameworks/fastapi.py +724 -0
- sf_veritas/patches/web_frameworks/flask.py +520 -0
- sf_veritas/patches/web_frameworks/klein.py +501 -0
- sf_veritas/patches/web_frameworks/litestar.py +551 -0
- sf_veritas/patches/web_frameworks/pyramid.py +428 -0
- sf_veritas/patches/web_frameworks/quart.py +824 -0
- sf_veritas/patches/web_frameworks/robyn.py +697 -0
- sf_veritas/patches/web_frameworks/sanic.py +857 -0
- sf_veritas/patches/web_frameworks/starlette.py +723 -0
- sf_veritas/patches/web_frameworks/strawberry.py +813 -0
- sf_veritas/patches/web_frameworks/tornado.py +481 -0
- sf_veritas/patches/web_frameworks/utils.py +91 -0
- sf_veritas/print_override.py +13 -0
- sf_veritas/regular_data_transmitter.py +409 -0
- sf_veritas/request_interceptor.py +401 -0
- sf_veritas/request_utils.py +550 -0
- sf_veritas/server_status.py +1 -0
- sf_veritas/shutdown_flag.py +11 -0
- sf_veritas/subprocess_startup.py +3 -0
- sf_veritas/test_cli.py +145 -0
- sf_veritas/thread_local.py +970 -0
- sf_veritas/timeutil.py +114 -0
- sf_veritas/transmit_exception_to_sailfish.py +28 -0
- sf_veritas/transmitter.py +132 -0
- sf_veritas/types.py +47 -0
- sf_veritas/unified_interceptor.py +1580 -0
- sf_veritas/utils.py +39 -0
- sf_veritas-0.10.3.dist-info/METADATA +97 -0
- sf_veritas-0.10.3.dist-info/RECORD +132 -0
- sf_veritas-0.10.3.dist-info/WHEEL +5 -0
- sf_veritas-0.10.3.dist-info/entry_points.txt +2 -0
- sf_veritas-0.10.3.dist-info/top_level.txt +1 -0
- sf_veritas.libs/libbrotlicommon-6ce2a53c.so.1.0.6 +0 -0
- sf_veritas.libs/libbrotlidec-811d1be3.so.1.0.6 +0 -0
- sf_veritas.libs/libcom_err-730ca923.so.2.1 +0 -0
- sf_veritas.libs/libcrypt-52aca757.so.1.1.0 +0 -0
- sf_veritas.libs/libcrypto-bdaed0ea.so.1.1.1k +0 -0
- sf_veritas.libs/libcurl-eaa3cf66.so.4.5.0 +0 -0
- sf_veritas.libs/libgssapi_krb5-323bbd21.so.2.2 +0 -0
- sf_veritas.libs/libidn2-2f4a5893.so.0.3.6 +0 -0
- sf_veritas.libs/libk5crypto-9a74ff38.so.3.1 +0 -0
- sf_veritas.libs/libkeyutils-2777d33d.so.1.6 +0 -0
- sf_veritas.libs/libkrb5-a55300e8.so.3.3 +0 -0
- sf_veritas.libs/libkrb5support-e6594cfc.so.0.1 +0 -0
- sf_veritas.libs/liblber-2-d20824ef.4.so.2.10.9 +0 -0
- sf_veritas.libs/libldap-2-cea2a960.4.so.2.10.9 +0 -0
- sf_veritas.libs/libnghttp2-39367a22.so.14.17.0 +0 -0
- sf_veritas.libs/libpcre2-8-516f4c9d.so.0.7.1 +0 -0
- sf_veritas.libs/libpsl-99becdd3.so.5.3.1 +0 -0
- sf_veritas.libs/libsasl2-7de4d792.so.3.0.0 +0 -0
- sf_veritas.libs/libselinux-d0805dcb.so.1 +0 -0
- sf_veritas.libs/libssh-c11d285b.so.4.8.7 +0 -0
- sf_veritas.libs/libssl-60250281.so.1.1.1k +0 -0
- sf_veritas.libs/libunistring-05abdd40.so.2.1.0 +0 -0
- sf_veritas.libs/libuuid-95b83d40.so.1.3.0 +0 -0
|
@@ -0,0 +1,515 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
from typing import List, Optional
|
|
4
|
+
|
|
5
|
+
try:
|
|
6
|
+
import wrapt
|
|
7
|
+
|
|
8
|
+
HAS_WRAPT = True
|
|
9
|
+
except ImportError:
|
|
10
|
+
HAS_WRAPT = False
|
|
11
|
+
|
|
12
|
+
from ...constants import SAILFISH_TRACING_HEADER
|
|
13
|
+
from ...thread_local import trace_id_ctx
|
|
14
|
+
from .utils import (
|
|
15
|
+
init_fast_header_check,
|
|
16
|
+
inject_headers_ultrafast,
|
|
17
|
+
record_network_request,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
# JSON serialization - try fast orjson first, fallback to stdlib json
|
|
21
|
+
try:
|
|
22
|
+
import orjson
|
|
23
|
+
|
|
24
|
+
HAS_ORJSON = True
|
|
25
|
+
except ImportError:
|
|
26
|
+
import json
|
|
27
|
+
|
|
28
|
+
HAS_ORJSON = False
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _tee_preload_active() -> bool:
|
|
32
|
+
"""Detect if LD_PRELOAD tee is active (same logic as http_client.py)."""
|
|
33
|
+
if os.getenv("SF_TEE_PRELOAD_ONLY", "0") == "1":
|
|
34
|
+
return True
|
|
35
|
+
ld = os.getenv("LD_PRELOAD", "")
|
|
36
|
+
return "libsfnettee.so" in ld or "_sfteepreload" in ld
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def patch_httpcore(domains_to_not_propagate_headers_to: Optional[List[str]] = None):
|
|
40
|
+
"""
|
|
41
|
+
Monkey-patch httpcore.ConnectionPool and AsyncConnectionPool
|
|
42
|
+
to inject SAILFISH_TRACING_HEADER + FUNCSPAN_OVERRIDE_HEADER (when allowed)
|
|
43
|
+
and to record every outbound request.
|
|
44
|
+
|
|
45
|
+
When LD_PRELOAD is active: ULTRA-FAST path with <10ns overhead (header injection only).
|
|
46
|
+
When LD_PRELOAD is NOT active: Full capture path with body/header recording.
|
|
47
|
+
"""
|
|
48
|
+
try:
|
|
49
|
+
import httpcore
|
|
50
|
+
except ImportError:
|
|
51
|
+
return # HTTP Core not present—skip patch
|
|
52
|
+
|
|
53
|
+
# Keep original methods
|
|
54
|
+
orig_sync_req = httpcore.ConnectionPool.request
|
|
55
|
+
orig_sync_stream = httpcore.ConnectionPool.stream
|
|
56
|
+
orig_async_req = httpcore.AsyncConnectionPool.request
|
|
57
|
+
orig_async_stream = httpcore.AsyncConnectionPool.stream
|
|
58
|
+
|
|
59
|
+
# Normalize exclude list
|
|
60
|
+
exclude = domains_to_not_propagate_headers_to or []
|
|
61
|
+
|
|
62
|
+
# Check if LD_PRELOAD is active
|
|
63
|
+
preload_active = _tee_preload_active()
|
|
64
|
+
|
|
65
|
+
# Initialize C extension for ultra-fast header checking (if available)
|
|
66
|
+
if preload_active:
|
|
67
|
+
init_fast_header_check(exclude)
|
|
68
|
+
|
|
69
|
+
# Unified _prepare_headers function for both fast and slow paths
|
|
70
|
+
def _prepare_headers(url, existing_headers):
|
|
71
|
+
"""
|
|
72
|
+
Returns (new_headers, trace_id, funcspan_override).
|
|
73
|
+
Uses inject_headers_ultrafast() for ultra-fast header injection (~100ns).
|
|
74
|
+
|
|
75
|
+
OPTIMIZED: Works with tuples directly, avoids dict conversion roundtrip.
|
|
76
|
+
"""
|
|
77
|
+
# CRITICAL: Early exit if header already exists (prevents double injection when httpx->httpcore)
|
|
78
|
+
trace_header_bytes = SAILFISH_TRACING_HEADER.encode()
|
|
79
|
+
if existing_headers:
|
|
80
|
+
for name, _ in existing_headers:
|
|
81
|
+
if name.lower() == trace_header_bytes.lower():
|
|
82
|
+
# Header already injected by httpx - just return as-is
|
|
83
|
+
return list(existing_headers), "", None
|
|
84
|
+
|
|
85
|
+
# OPTIMIZED: Use inject_headers_ultrafast with temporary dict, then append as tuples
|
|
86
|
+
# This avoids the expensive dict→tuple→dict→tuple conversion cycle
|
|
87
|
+
headers_dict = {}
|
|
88
|
+
inject_headers_ultrafast(headers_dict, str(url), exclude)
|
|
89
|
+
|
|
90
|
+
# OPTIMIZED: Build new header list (existing + new) in single pass
|
|
91
|
+
hdrs = list(existing_headers) if existing_headers else []
|
|
92
|
+
for key, value in headers_dict.items():
|
|
93
|
+
key_bytes = key.encode("utf-8") if isinstance(key, str) else key
|
|
94
|
+
value_bytes = value.encode("utf-8") if isinstance(value, str) else value
|
|
95
|
+
hdrs.append((key_bytes, value_bytes))
|
|
96
|
+
|
|
97
|
+
# Get trace_id for capture (only needed in slow path)
|
|
98
|
+
trace_id = trace_id_ctx.get(None) or "" if not preload_active else ""
|
|
99
|
+
|
|
100
|
+
return hdrs, trace_id, None
|
|
101
|
+
|
|
102
|
+
# 1. Sync .request(...)
|
|
103
|
+
if preload_active:
|
|
104
|
+
# ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
|
|
105
|
+
if HAS_WRAPT:
|
|
106
|
+
|
|
107
|
+
def instrumented_sync_request(wrapped, instance, args, kwargs):
|
|
108
|
+
"""Ultra-fast header injection using C extension via wrapt."""
|
|
109
|
+
# args = (method, url, ...), kwargs = {...}
|
|
110
|
+
url = args[1] if len(args) > 1 else kwargs.get("url", "")
|
|
111
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
112
|
+
url, kwargs.get("headers")
|
|
113
|
+
)
|
|
114
|
+
kwargs["headers"] = headers
|
|
115
|
+
return wrapped(*args, **kwargs)
|
|
116
|
+
|
|
117
|
+
wrapt.wrap_function_wrapper(
|
|
118
|
+
"httpcore", "ConnectionPool.request", instrumented_sync_request
|
|
119
|
+
)
|
|
120
|
+
else:
|
|
121
|
+
|
|
122
|
+
def _patched_sync_request(self, method, url, **kwargs):
|
|
123
|
+
# prepare headers & trace (ultra-fast C extension)
|
|
124
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
125
|
+
url, kwargs.get("headers")
|
|
126
|
+
)
|
|
127
|
+
kwargs["headers"] = headers
|
|
128
|
+
|
|
129
|
+
# Immediately call original and return - NO timing, NO capture!
|
|
130
|
+
return orig_sync_req(self, method, url, **kwargs)
|
|
131
|
+
|
|
132
|
+
httpcore.ConnectionPool.request = _patched_sync_request
|
|
133
|
+
else:
|
|
134
|
+
# ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
|
|
135
|
+
def _patched_sync_request(self, method, url, **kwargs):
|
|
136
|
+
ts0 = int(time.time() * 1_000)
|
|
137
|
+
# prepare headers & trace
|
|
138
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
139
|
+
url, kwargs.get("headers")
|
|
140
|
+
)
|
|
141
|
+
kwargs["headers"] = headers
|
|
142
|
+
|
|
143
|
+
# Capture request data
|
|
144
|
+
req_data = b""
|
|
145
|
+
req_headers = b""
|
|
146
|
+
try:
|
|
147
|
+
if "content" in kwargs:
|
|
148
|
+
content = kwargs["content"]
|
|
149
|
+
if isinstance(content, bytes):
|
|
150
|
+
req_data = content
|
|
151
|
+
elif isinstance(content, str):
|
|
152
|
+
req_data = content.encode("utf-8")
|
|
153
|
+
|
|
154
|
+
# Capture request headers
|
|
155
|
+
if HAS_ORJSON:
|
|
156
|
+
req_headers = orjson.dumps([list(h) for h in headers])
|
|
157
|
+
else:
|
|
158
|
+
req_headers = json.dumps([list(h) for h in headers]).encode("utf-8")
|
|
159
|
+
except Exception: # noqa: BLE001
|
|
160
|
+
pass
|
|
161
|
+
|
|
162
|
+
error = None
|
|
163
|
+
resp_data = b""
|
|
164
|
+
resp_headers = b""
|
|
165
|
+
try:
|
|
166
|
+
resp = orig_sync_req(self, method, url, **kwargs)
|
|
167
|
+
success = True
|
|
168
|
+
status = getattr(resp, "status_code", 0)
|
|
169
|
+
|
|
170
|
+
# Capture response data and headers
|
|
171
|
+
try:
|
|
172
|
+
resp_data = getattr(resp, "content", b"")
|
|
173
|
+
if HAS_ORJSON:
|
|
174
|
+
resp_headers = orjson.dumps([list(h) for h in resp.headers])
|
|
175
|
+
else:
|
|
176
|
+
resp_headers = json.dumps(
|
|
177
|
+
[list(h) for h in resp.headers]
|
|
178
|
+
).encode("utf-8")
|
|
179
|
+
except Exception: # noqa: BLE001
|
|
180
|
+
pass
|
|
181
|
+
|
|
182
|
+
return resp
|
|
183
|
+
except Exception as e:
|
|
184
|
+
success = False
|
|
185
|
+
status = 0
|
|
186
|
+
error = str(e)[:255]
|
|
187
|
+
raise
|
|
188
|
+
finally:
|
|
189
|
+
ts1 = int(time.time() * 1_000)
|
|
190
|
+
record_network_request(
|
|
191
|
+
trace_id,
|
|
192
|
+
str(url),
|
|
193
|
+
method,
|
|
194
|
+
status,
|
|
195
|
+
success,
|
|
196
|
+
error,
|
|
197
|
+
ts0,
|
|
198
|
+
ts1,
|
|
199
|
+
request_data=req_data,
|
|
200
|
+
response_data=resp_data,
|
|
201
|
+
request_headers=req_headers,
|
|
202
|
+
response_headers=resp_headers,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
# 2. Sync .stream(...)
|
|
206
|
+
if preload_active:
|
|
207
|
+
# ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
|
|
208
|
+
if HAS_WRAPT:
|
|
209
|
+
|
|
210
|
+
def instrumented_sync_stream(wrapped, instance, args, kwargs):
|
|
211
|
+
"""Ultra-fast header injection using C extension via wrapt."""
|
|
212
|
+
url = args[1] if len(args) > 1 else kwargs.get("url", "")
|
|
213
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
214
|
+
url, kwargs.get("headers")
|
|
215
|
+
)
|
|
216
|
+
kwargs["headers"] = headers
|
|
217
|
+
return wrapped(*args, **kwargs)
|
|
218
|
+
|
|
219
|
+
wrapt.wrap_function_wrapper(
|
|
220
|
+
"httpcore", "ConnectionPool.stream", instrumented_sync_stream
|
|
221
|
+
)
|
|
222
|
+
else:
|
|
223
|
+
|
|
224
|
+
def _patched_sync_stream(self, method, url, **kwargs):
|
|
225
|
+
# prepare headers & trace (ultra-fast C extension)
|
|
226
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
227
|
+
url, kwargs.get("headers")
|
|
228
|
+
)
|
|
229
|
+
kwargs["headers"] = headers
|
|
230
|
+
|
|
231
|
+
# Immediately call original and return - NO timing, NO capture!
|
|
232
|
+
return orig_sync_stream(self, method, url, **kwargs)
|
|
233
|
+
|
|
234
|
+
httpcore.ConnectionPool.stream = _patched_sync_stream
|
|
235
|
+
else:
|
|
236
|
+
# ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
|
|
237
|
+
def _patched_sync_stream(self, method, url, **kwargs):
|
|
238
|
+
ts0 = int(time.time() * 1_000)
|
|
239
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
240
|
+
url, kwargs.get("headers")
|
|
241
|
+
)
|
|
242
|
+
kwargs["headers"] = headers
|
|
243
|
+
|
|
244
|
+
# Capture request data
|
|
245
|
+
req_data = b""
|
|
246
|
+
req_headers = b""
|
|
247
|
+
try:
|
|
248
|
+
if "content" in kwargs:
|
|
249
|
+
content = kwargs["content"]
|
|
250
|
+
if isinstance(content, bytes):
|
|
251
|
+
req_data = content
|
|
252
|
+
elif isinstance(content, str):
|
|
253
|
+
req_data = content.encode("utf-8")
|
|
254
|
+
|
|
255
|
+
# Capture request headers
|
|
256
|
+
if HAS_ORJSON:
|
|
257
|
+
req_headers = orjson.dumps([list(h) for h in headers])
|
|
258
|
+
else:
|
|
259
|
+
req_headers = json.dumps([list(h) for h in headers]).encode("utf-8")
|
|
260
|
+
except Exception: # noqa: BLE001
|
|
261
|
+
pass
|
|
262
|
+
|
|
263
|
+
error = None
|
|
264
|
+
resp_headers = b""
|
|
265
|
+
try:
|
|
266
|
+
stream = orig_sync_stream(self, method, url, **kwargs)
|
|
267
|
+
success = True
|
|
268
|
+
# stream itself yields the body; status often on returned object
|
|
269
|
+
status = 0
|
|
270
|
+
|
|
271
|
+
# Capture response headers if available
|
|
272
|
+
try:
|
|
273
|
+
if HAS_ORJSON:
|
|
274
|
+
resp_headers = orjson.dumps([list(h) for h in stream.headers])
|
|
275
|
+
else:
|
|
276
|
+
resp_headers = json.dumps(
|
|
277
|
+
[list(h) for h in stream.headers]
|
|
278
|
+
).encode("utf-8")
|
|
279
|
+
except Exception: # noqa: BLE001
|
|
280
|
+
pass
|
|
281
|
+
|
|
282
|
+
return stream
|
|
283
|
+
except Exception as e:
|
|
284
|
+
success = False
|
|
285
|
+
status = 0
|
|
286
|
+
error = str(e)[:255]
|
|
287
|
+
raise
|
|
288
|
+
finally:
|
|
289
|
+
ts1 = int(time.time() * 1_000)
|
|
290
|
+
record_network_request(
|
|
291
|
+
trace_id,
|
|
292
|
+
str(url),
|
|
293
|
+
method,
|
|
294
|
+
status,
|
|
295
|
+
success,
|
|
296
|
+
error,
|
|
297
|
+
ts0,
|
|
298
|
+
ts1,
|
|
299
|
+
request_data=req_data,
|
|
300
|
+
request_headers=req_headers,
|
|
301
|
+
response_headers=resp_headers,
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
# 3. Async .request(...)
|
|
305
|
+
if preload_active:
|
|
306
|
+
# ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
|
|
307
|
+
if HAS_WRAPT:
|
|
308
|
+
|
|
309
|
+
async def instrumented_async_request(wrapped, instance, args, kwargs):
|
|
310
|
+
"""Ultra-fast header injection using C extension via wrapt."""
|
|
311
|
+
url = args[1] if len(args) > 1 else kwargs.get("url", "")
|
|
312
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
313
|
+
url, kwargs.get("headers")
|
|
314
|
+
)
|
|
315
|
+
kwargs["headers"] = headers
|
|
316
|
+
return await wrapped(*args, **kwargs)
|
|
317
|
+
|
|
318
|
+
wrapt.wrap_function_wrapper(
|
|
319
|
+
"httpcore", "AsyncConnectionPool.request", instrumented_async_request
|
|
320
|
+
)
|
|
321
|
+
else:
|
|
322
|
+
|
|
323
|
+
async def _patched_async_request(self, method, url, **kwargs):
|
|
324
|
+
# prepare headers & trace (ultra-fast C extension)
|
|
325
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
326
|
+
url, kwargs.get("headers")
|
|
327
|
+
)
|
|
328
|
+
kwargs["headers"] = headers
|
|
329
|
+
|
|
330
|
+
# Immediately call original and return - NO timing, NO capture!
|
|
331
|
+
return await orig_async_req(self, method, url, **kwargs)
|
|
332
|
+
|
|
333
|
+
else:
|
|
334
|
+
# ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
|
|
335
|
+
async def _patched_async_request(self, method, url, **kwargs):
|
|
336
|
+
ts0 = int(time.time() * 1_000)
|
|
337
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
338
|
+
url, kwargs.get("headers")
|
|
339
|
+
)
|
|
340
|
+
kwargs["headers"] = headers
|
|
341
|
+
|
|
342
|
+
# Capture request data
|
|
343
|
+
req_data = b""
|
|
344
|
+
req_headers = b""
|
|
345
|
+
try:
|
|
346
|
+
if "content" in kwargs:
|
|
347
|
+
content = kwargs["content"]
|
|
348
|
+
if isinstance(content, bytes):
|
|
349
|
+
req_data = content
|
|
350
|
+
elif isinstance(content, str):
|
|
351
|
+
req_data = content.encode("utf-8")
|
|
352
|
+
|
|
353
|
+
# Capture request headers
|
|
354
|
+
if HAS_ORJSON:
|
|
355
|
+
req_headers = orjson.dumps([list(h) for h in headers])
|
|
356
|
+
else:
|
|
357
|
+
req_headers = json.dumps([list(h) for h in headers]).encode("utf-8")
|
|
358
|
+
except Exception: # noqa: BLE001
|
|
359
|
+
pass
|
|
360
|
+
|
|
361
|
+
error = None
|
|
362
|
+
resp_data = b""
|
|
363
|
+
resp_headers = b""
|
|
364
|
+
try:
|
|
365
|
+
resp = await orig_async_req(self, method, url, **kwargs)
|
|
366
|
+
success = True
|
|
367
|
+
status = getattr(resp, "status_code", 0)
|
|
368
|
+
|
|
369
|
+
# Capture response data and headers
|
|
370
|
+
try:
|
|
371
|
+
resp_data = getattr(resp, "content", b"")
|
|
372
|
+
if HAS_ORJSON:
|
|
373
|
+
resp_headers = orjson.dumps([list(h) for h in resp.headers])
|
|
374
|
+
else:
|
|
375
|
+
resp_headers = json.dumps(
|
|
376
|
+
[list(h) for h in resp.headers]
|
|
377
|
+
).encode("utf-8")
|
|
378
|
+
except Exception: # noqa: BLE001
|
|
379
|
+
pass
|
|
380
|
+
|
|
381
|
+
return resp
|
|
382
|
+
except Exception as e:
|
|
383
|
+
success = False
|
|
384
|
+
status = 0
|
|
385
|
+
error = str(e)[:255]
|
|
386
|
+
raise
|
|
387
|
+
finally:
|
|
388
|
+
ts1 = int(time.time() * 1_000)
|
|
389
|
+
record_network_request(
|
|
390
|
+
trace_id,
|
|
391
|
+
str(url),
|
|
392
|
+
method,
|
|
393
|
+
status,
|
|
394
|
+
success,
|
|
395
|
+
error,
|
|
396
|
+
ts0,
|
|
397
|
+
ts1,
|
|
398
|
+
request_data=req_data,
|
|
399
|
+
response_data=resp_data,
|
|
400
|
+
request_headers=req_headers,
|
|
401
|
+
response_headers=resp_headers,
|
|
402
|
+
)
|
|
403
|
+
|
|
404
|
+
# 4. Async .stream(...)
|
|
405
|
+
if preload_active:
|
|
406
|
+
# ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
|
|
407
|
+
if HAS_WRAPT:
|
|
408
|
+
|
|
409
|
+
def instrumented_async_stream(wrapped, instance, args, kwargs):
|
|
410
|
+
"""Ultra-fast header injection using C extension via wrapt."""
|
|
411
|
+
url = args[1] if len(args) > 1 else kwargs.get("url", "")
|
|
412
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
413
|
+
url, kwargs.get("headers")
|
|
414
|
+
)
|
|
415
|
+
kwargs["headers"] = headers
|
|
416
|
+
return wrapped(*args, **kwargs)
|
|
417
|
+
|
|
418
|
+
wrapt.wrap_function_wrapper(
|
|
419
|
+
"httpcore", "AsyncConnectionPool.stream", instrumented_async_stream
|
|
420
|
+
)
|
|
421
|
+
else:
|
|
422
|
+
|
|
423
|
+
def _patched_async_stream(self, method, url, **kwargs):
|
|
424
|
+
# prepare headers & trace (ultra-fast C extension)
|
|
425
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
426
|
+
url, kwargs.get("headers")
|
|
427
|
+
)
|
|
428
|
+
kwargs["headers"] = headers
|
|
429
|
+
|
|
430
|
+
# Immediately call original and return - NO timing, NO capture!
|
|
431
|
+
return orig_async_stream(self, method, url, **kwargs)
|
|
432
|
+
|
|
433
|
+
else:
|
|
434
|
+
# ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
|
|
435
|
+
async def _patched_async_stream(self, method, url, **kwargs):
|
|
436
|
+
ts0 = int(time.time() * 1_000)
|
|
437
|
+
headers, trace_id, funcspan_override = _prepare_headers(
|
|
438
|
+
url, kwargs.get("headers")
|
|
439
|
+
)
|
|
440
|
+
kwargs["headers"] = headers
|
|
441
|
+
|
|
442
|
+
# Capture request data
|
|
443
|
+
req_data = b""
|
|
444
|
+
req_headers = b""
|
|
445
|
+
try:
|
|
446
|
+
if "content" in kwargs:
|
|
447
|
+
content = kwargs["content"]
|
|
448
|
+
if isinstance(content, bytes):
|
|
449
|
+
req_data = content
|
|
450
|
+
elif isinstance(content, str):
|
|
451
|
+
req_data = content.encode("utf-8")
|
|
452
|
+
|
|
453
|
+
# Capture request headers
|
|
454
|
+
if HAS_ORJSON:
|
|
455
|
+
req_headers = orjson.dumps([list(h) for h in headers])
|
|
456
|
+
else:
|
|
457
|
+
req_headers = json.dumps([list(h) for h in headers]).encode("utf-8")
|
|
458
|
+
except Exception: # noqa: BLE001
|
|
459
|
+
pass
|
|
460
|
+
|
|
461
|
+
original_cm = orig_async_stream(self, method, url, **kwargs)
|
|
462
|
+
|
|
463
|
+
class _StreamCM:
|
|
464
|
+
def __init__(self, cm, req_d, req_h):
|
|
465
|
+
self._cm = cm
|
|
466
|
+
self._status = 0
|
|
467
|
+
self._req_data = req_d
|
|
468
|
+
self._req_headers = req_h
|
|
469
|
+
self._resp_headers = b""
|
|
470
|
+
|
|
471
|
+
async def __aenter__(self):
|
|
472
|
+
response = await self._cm.__aenter__() # now a single Response
|
|
473
|
+
# capture status (httpcore.Response.status or status_code)
|
|
474
|
+
self._status = getattr(
|
|
475
|
+
response, "status_code", getattr(response, "status", 0)
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
# Capture response headers
|
|
479
|
+
if HAS_ORJSON:
|
|
480
|
+
self._resp_headers = orjson.dumps(
|
|
481
|
+
[list(h) for h in response.headers]
|
|
482
|
+
)
|
|
483
|
+
else:
|
|
484
|
+
self._resp_headers = json.dumps(
|
|
485
|
+
[list(h) for h in response.headers]
|
|
486
|
+
).encode("utf-8")
|
|
487
|
+
|
|
488
|
+
return response
|
|
489
|
+
|
|
490
|
+
async def __aexit__(self, exc_type, exc, tb):
|
|
491
|
+
success = exc_type is None
|
|
492
|
+
ts1 = int(time.time() * 1_000)
|
|
493
|
+
record_network_request(
|
|
494
|
+
trace_id,
|
|
495
|
+
str(url),
|
|
496
|
+
method,
|
|
497
|
+
self._status,
|
|
498
|
+
success,
|
|
499
|
+
None if success else str(exc)[:255],
|
|
500
|
+
ts0,
|
|
501
|
+
ts1,
|
|
502
|
+
request_data=self._req_data,
|
|
503
|
+
request_headers=self._req_headers,
|
|
504
|
+
response_headers=self._resp_headers,
|
|
505
|
+
)
|
|
506
|
+
return await self._cm.__aexit__(exc_type, exc, tb)
|
|
507
|
+
|
|
508
|
+
return _StreamCM(original_cm, req_data, req_headers)
|
|
509
|
+
|
|
510
|
+
# Apply patches (only if NOT using wrapt - wrapt already applied them)
|
|
511
|
+
if not (HAS_WRAPT and preload_active):
|
|
512
|
+
httpcore.ConnectionPool.request = _patched_sync_request
|
|
513
|
+
httpcore.ConnectionPool.stream = _patched_sync_stream
|
|
514
|
+
httpcore.AsyncConnectionPool.request = _patched_async_request
|
|
515
|
+
httpcore.AsyncConnectionPool.stream = _patched_async_stream
|