sf-veritas 0.11.10__cp314-cp314-manylinux_2_28_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. sf_veritas/__init__.py +46 -0
  2. sf_veritas/_auto_preload.py +73 -0
  3. sf_veritas/_sfconfig.c +162 -0
  4. sf_veritas/_sfconfig.cpython-314-x86_64-linux-gnu.so +0 -0
  5. sf_veritas/_sfcrashhandler.c +267 -0
  6. sf_veritas/_sfcrashhandler.cpython-314-x86_64-linux-gnu.so +0 -0
  7. sf_veritas/_sffastlog.c +953 -0
  8. sf_veritas/_sffastlog.cpython-314-x86_64-linux-gnu.so +0 -0
  9. sf_veritas/_sffastnet.c +994 -0
  10. sf_veritas/_sffastnet.cpython-314-x86_64-linux-gnu.so +0 -0
  11. sf_veritas/_sffastnetworkrequest.c +727 -0
  12. sf_veritas/_sffastnetworkrequest.cpython-314-x86_64-linux-gnu.so +0 -0
  13. sf_veritas/_sffuncspan.c +2791 -0
  14. sf_veritas/_sffuncspan.cpython-314-x86_64-linux-gnu.so +0 -0
  15. sf_veritas/_sffuncspan_config.c +730 -0
  16. sf_veritas/_sffuncspan_config.cpython-314-x86_64-linux-gnu.so +0 -0
  17. sf_veritas/_sfheadercheck.c +341 -0
  18. sf_veritas/_sfheadercheck.cpython-314-x86_64-linux-gnu.so +0 -0
  19. sf_veritas/_sfnetworkhop.c +1454 -0
  20. sf_veritas/_sfnetworkhop.cpython-314-x86_64-linux-gnu.so +0 -0
  21. sf_veritas/_sfservice.c +1223 -0
  22. sf_veritas/_sfservice.cpython-314-x86_64-linux-gnu.so +0 -0
  23. sf_veritas/_sfteepreload.c +6227 -0
  24. sf_veritas/app_config.py +57 -0
  25. sf_veritas/cli.py +336 -0
  26. sf_veritas/constants.py +10 -0
  27. sf_veritas/custom_excepthook.py +304 -0
  28. sf_veritas/custom_log_handler.py +146 -0
  29. sf_veritas/custom_output_wrapper.py +153 -0
  30. sf_veritas/custom_print.py +153 -0
  31. sf_veritas/django_app.py +5 -0
  32. sf_veritas/env_vars.py +186 -0
  33. sf_veritas/exception_handling_middleware.py +18 -0
  34. sf_veritas/exception_metaclass.py +69 -0
  35. sf_veritas/fast_frame_info.py +116 -0
  36. sf_veritas/fast_network_hop.py +293 -0
  37. sf_veritas/frame_tools.py +112 -0
  38. sf_veritas/funcspan_config_loader.py +693 -0
  39. sf_veritas/function_span_profiler.py +1313 -0
  40. sf_veritas/get_preload_path.py +34 -0
  41. sf_veritas/import_hook.py +62 -0
  42. sf_veritas/infra_details/__init__.py +3 -0
  43. sf_veritas/infra_details/get_infra_details.py +24 -0
  44. sf_veritas/infra_details/kubernetes/__init__.py +3 -0
  45. sf_veritas/infra_details/kubernetes/get_cluster_name.py +147 -0
  46. sf_veritas/infra_details/kubernetes/get_details.py +7 -0
  47. sf_veritas/infra_details/running_on/__init__.py +17 -0
  48. sf_veritas/infra_details/running_on/kubernetes.py +11 -0
  49. sf_veritas/interceptors.py +543 -0
  50. sf_veritas/libsfnettee.so +0 -0
  51. sf_veritas/local_env_detect.py +118 -0
  52. sf_veritas/package_metadata.py +6 -0
  53. sf_veritas/patches/__init__.py +0 -0
  54. sf_veritas/patches/_patch_tracker.py +74 -0
  55. sf_veritas/patches/concurrent_futures.py +19 -0
  56. sf_veritas/patches/constants.py +1 -0
  57. sf_veritas/patches/exceptions.py +82 -0
  58. sf_veritas/patches/multiprocessing.py +32 -0
  59. sf_veritas/patches/network_libraries/__init__.py +99 -0
  60. sf_veritas/patches/network_libraries/aiohttp.py +294 -0
  61. sf_veritas/patches/network_libraries/curl_cffi.py +363 -0
  62. sf_veritas/patches/network_libraries/http_client.py +670 -0
  63. sf_veritas/patches/network_libraries/httpcore.py +580 -0
  64. sf_veritas/patches/network_libraries/httplib2.py +315 -0
  65. sf_veritas/patches/network_libraries/httpx.py +557 -0
  66. sf_veritas/patches/network_libraries/niquests.py +218 -0
  67. sf_veritas/patches/network_libraries/pycurl.py +399 -0
  68. sf_veritas/patches/network_libraries/requests.py +595 -0
  69. sf_veritas/patches/network_libraries/ssl_socket.py +822 -0
  70. sf_veritas/patches/network_libraries/tornado.py +360 -0
  71. sf_veritas/patches/network_libraries/treq.py +270 -0
  72. sf_veritas/patches/network_libraries/urllib_request.py +483 -0
  73. sf_veritas/patches/network_libraries/utils.py +598 -0
  74. sf_veritas/patches/os.py +17 -0
  75. sf_veritas/patches/threading.py +231 -0
  76. sf_veritas/patches/web_frameworks/__init__.py +54 -0
  77. sf_veritas/patches/web_frameworks/aiohttp.py +798 -0
  78. sf_veritas/patches/web_frameworks/async_websocket_consumer.py +337 -0
  79. sf_veritas/patches/web_frameworks/blacksheep.py +532 -0
  80. sf_veritas/patches/web_frameworks/bottle.py +513 -0
  81. sf_veritas/patches/web_frameworks/cherrypy.py +683 -0
  82. sf_veritas/patches/web_frameworks/cors_utils.py +122 -0
  83. sf_veritas/patches/web_frameworks/django.py +963 -0
  84. sf_veritas/patches/web_frameworks/eve.py +401 -0
  85. sf_veritas/patches/web_frameworks/falcon.py +931 -0
  86. sf_veritas/patches/web_frameworks/fastapi.py +738 -0
  87. sf_veritas/patches/web_frameworks/flask.py +526 -0
  88. sf_veritas/patches/web_frameworks/klein.py +501 -0
  89. sf_veritas/patches/web_frameworks/litestar.py +616 -0
  90. sf_veritas/patches/web_frameworks/pyramid.py +440 -0
  91. sf_veritas/patches/web_frameworks/quart.py +841 -0
  92. sf_veritas/patches/web_frameworks/robyn.py +708 -0
  93. sf_veritas/patches/web_frameworks/sanic.py +874 -0
  94. sf_veritas/patches/web_frameworks/starlette.py +742 -0
  95. sf_veritas/patches/web_frameworks/strawberry.py +1446 -0
  96. sf_veritas/patches/web_frameworks/tornado.py +485 -0
  97. sf_veritas/patches/web_frameworks/utils.py +170 -0
  98. sf_veritas/print_override.py +13 -0
  99. sf_veritas/regular_data_transmitter.py +444 -0
  100. sf_veritas/request_interceptor.py +401 -0
  101. sf_veritas/request_utils.py +550 -0
  102. sf_veritas/segfault_handler.py +116 -0
  103. sf_veritas/server_status.py +1 -0
  104. sf_veritas/shutdown_flag.py +11 -0
  105. sf_veritas/subprocess_startup.py +3 -0
  106. sf_veritas/test_cli.py +145 -0
  107. sf_veritas/thread_local.py +1319 -0
  108. sf_veritas/timeutil.py +114 -0
  109. sf_veritas/transmit_exception_to_sailfish.py +28 -0
  110. sf_veritas/transmitter.py +132 -0
  111. sf_veritas/types.py +47 -0
  112. sf_veritas/unified_interceptor.py +1678 -0
  113. sf_veritas/utils.py +39 -0
  114. sf_veritas-0.11.10.dist-info/METADATA +97 -0
  115. sf_veritas-0.11.10.dist-info/RECORD +141 -0
  116. sf_veritas-0.11.10.dist-info/WHEEL +5 -0
  117. sf_veritas-0.11.10.dist-info/entry_points.txt +2 -0
  118. sf_veritas-0.11.10.dist-info/top_level.txt +1 -0
  119. sf_veritas.libs/libbrotlicommon-6ce2a53c.so.1.0.6 +0 -0
  120. sf_veritas.libs/libbrotlidec-811d1be3.so.1.0.6 +0 -0
  121. sf_veritas.libs/libcom_err-730ca923.so.2.1 +0 -0
  122. sf_veritas.libs/libcrypt-52aca757.so.1.1.0 +0 -0
  123. sf_veritas.libs/libcrypto-bdaed0ea.so.1.1.1k +0 -0
  124. sf_veritas.libs/libcurl-eaa3cf66.so.4.5.0 +0 -0
  125. sf_veritas.libs/libgssapi_krb5-323bbd21.so.2.2 +0 -0
  126. sf_veritas.libs/libidn2-2f4a5893.so.0.3.6 +0 -0
  127. sf_veritas.libs/libk5crypto-9a74ff38.so.3.1 +0 -0
  128. sf_veritas.libs/libkeyutils-2777d33d.so.1.6 +0 -0
  129. sf_veritas.libs/libkrb5-a55300e8.so.3.3 +0 -0
  130. sf_veritas.libs/libkrb5support-e6594cfc.so.0.1 +0 -0
  131. sf_veritas.libs/liblber-2-d20824ef.4.so.2.10.9 +0 -0
  132. sf_veritas.libs/libldap-2-cea2a960.4.so.2.10.9 +0 -0
  133. sf_veritas.libs/libnghttp2-39367a22.so.14.17.0 +0 -0
  134. sf_veritas.libs/libpcre2-8-516f4c9d.so.0.7.1 +0 -0
  135. sf_veritas.libs/libpsl-99becdd3.so.5.3.1 +0 -0
  136. sf_veritas.libs/libsasl2-7de4d792.so.3.0.0 +0 -0
  137. sf_veritas.libs/libselinux-d0805dcb.so.1 +0 -0
  138. sf_veritas.libs/libssh-c11d285b.so.4.8.7 +0 -0
  139. sf_veritas.libs/libssl-60250281.so.1.1.1k +0 -0
  140. sf_veritas.libs/libunistring-05abdd40.so.2.1.0 +0 -0
  141. sf_veritas.libs/libuuid-95b83d40.so.1.3.0 +0 -0
@@ -0,0 +1,580 @@
1
+ import os
2
+ import time
3
+ from typing import List, Optional
4
+
5
+ try:
6
+ import wrapt
7
+
8
+ HAS_WRAPT = True
9
+ except ImportError:
10
+ HAS_WRAPT = False
11
+
12
+ from ...constants import SAILFISH_TRACING_HEADER
13
+ from ...thread_local import trace_id_ctx
14
+ from .utils import (
15
+ init_fast_header_check,
16
+ inject_headers_ultrafast,
17
+ is_ssl_socket_active, # Used by sync methods to avoid double-capture with ssl_socket.py
18
+ record_network_request,
19
+ )
20
+
21
+ # JSON serialization - try fast orjson first, fallback to stdlib json
22
+ try:
23
+ import orjson
24
+
25
+ HAS_ORJSON = True
26
+ except ImportError:
27
+ import json
28
+
29
+ HAS_ORJSON = False
30
+
31
+
32
+ def _tee_preload_active() -> bool:
33
+ """Detect if LD_PRELOAD tee is active (same logic as http_client.py)."""
34
+ if os.getenv("SF_TEE_PRELOAD_ONLY", "0") == "1":
35
+ return True
36
+ ld = os.getenv("LD_PRELOAD", "")
37
+ return "libsfnettee.so" in ld or "_sfteepreload" in ld
38
+
39
+
40
+ def _to_str(value):
41
+ """Convert bytes or any value to str. Properly handles bytes URLs/methods."""
42
+ if isinstance(value, bytes):
43
+ return value.decode('utf-8', errors='replace')
44
+ return str(value)
45
+
46
+
47
+ def patch_httpcore(domains_to_not_propagate_headers_to: Optional[List[str]] = None):
48
+ """
49
+ Monkey-patch httpcore.ConnectionPool and AsyncConnectionPool
50
+ to inject SAILFISH_TRACING_HEADER + FUNCSPAN_OVERRIDE_HEADER (when allowed)
51
+ and to record every outbound request.
52
+
53
+ When LD_PRELOAD is active: ULTRA-FAST path with <10ns overhead (header injection only).
54
+ When LD_PRELOAD is NOT active: Full capture path with body/header recording.
55
+ """
56
+ try:
57
+ import httpcore
58
+ except ImportError:
59
+ return # HTTP Core not present—skip patch
60
+
61
+ # Keep original methods
62
+ orig_sync_req = httpcore.ConnectionPool.request
63
+ orig_sync_stream = httpcore.ConnectionPool.stream
64
+ orig_async_req = httpcore.AsyncConnectionPool.request
65
+ orig_async_stream = httpcore.AsyncConnectionPool.stream
66
+
67
+ # Normalize exclude list
68
+ exclude = domains_to_not_propagate_headers_to or []
69
+
70
+ # Check if LD_PRELOAD is active
71
+ preload_active = _tee_preload_active()
72
+
73
+ # Initialize C extension for ultra-fast header checking (if available)
74
+ if preload_active:
75
+ init_fast_header_check(exclude)
76
+
77
+ # Unified _prepare_headers function for both fast and slow paths
78
+ def _prepare_headers(url, existing_headers):
79
+ """
80
+ Returns (new_headers, trace_id, funcspan_override).
81
+ Uses inject_headers_ultrafast() for ultra-fast header injection (~100ns).
82
+
83
+ OPTIMIZED: Works with tuples directly, avoids dict conversion roundtrip.
84
+ """
85
+ # CRITICAL: Early exit if header already exists (prevents double injection when httpx->httpcore)
86
+ trace_header_bytes = SAILFISH_TRACING_HEADER.encode()
87
+ if existing_headers:
88
+ for name, _ in existing_headers:
89
+ if name.lower() == trace_header_bytes.lower():
90
+ # Header already injected by httpx - just return as-is
91
+ return list(existing_headers), "", None
92
+
93
+ # OPTIMIZED: Use inject_headers_ultrafast with temporary dict, then append as tuples
94
+ # This avoids the expensive dict→tuple→dict→tuple conversion cycle
95
+ headers_dict = {}
96
+ inject_headers_ultrafast(headers_dict, str(url), exclude)
97
+
98
+ # OPTIMIZED: Build new header list (existing + new) in single pass
99
+ hdrs = list(existing_headers) if existing_headers else []
100
+ for key, value in headers_dict.items():
101
+ key_bytes = key.encode("utf-8") if isinstance(key, str) else key
102
+ value_bytes = value.encode("utf-8") if isinstance(value, str) else value
103
+ hdrs.append((key_bytes, value_bytes))
104
+
105
+ # Get trace_id for capture (only needed in slow path)
106
+ trace_id = trace_id_ctx.get(None) or "" if not preload_active else ""
107
+
108
+ return hdrs, trace_id, None
109
+
110
+ # 1. Sync .request(...)
111
+ if preload_active:
112
+ # ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
113
+ if HAS_WRAPT:
114
+
115
+ def instrumented_sync_request(wrapped, instance, args, kwargs):
116
+ """Ultra-fast header injection using C extension via wrapt."""
117
+ # args = (method, url, ...), kwargs = {...}
118
+ url = args[1] if len(args) > 1 else kwargs.get("url", "")
119
+ headers, trace_id, funcspan_override = _prepare_headers(
120
+ url, kwargs.get("headers")
121
+ )
122
+ kwargs["headers"] = headers
123
+ return wrapped(*args, **kwargs)
124
+
125
+ wrapt.wrap_function_wrapper(
126
+ "httpcore", "ConnectionPool.request", instrumented_sync_request
127
+ )
128
+ else:
129
+
130
+ def _patched_sync_request(self, method, url, **kwargs):
131
+ # prepare headers & trace (ultra-fast C extension)
132
+ headers, trace_id, funcspan_override = _prepare_headers(
133
+ url, kwargs.get("headers")
134
+ )
135
+ kwargs["headers"] = headers
136
+
137
+ # Immediately call original and return - NO timing, NO capture!
138
+ return orig_sync_req(self, method, url, **kwargs)
139
+
140
+ httpcore.ConnectionPool.request = _patched_sync_request
141
+ else:
142
+ # ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
143
+ def _patched_sync_request(self, method, url, **kwargs):
144
+ # SYNC httpcore DOES use ssl.SSLSocket underneath, so skip if ssl_socket is active
145
+ url_str = _to_str(url)
146
+ is_https = url_str.startswith("https://")
147
+ if is_https and is_ssl_socket_active():
148
+ # ssl_socket.py will handle capture, just make the request
149
+ headers, _, _ = _prepare_headers(url, kwargs.get("headers"))
150
+ kwargs["headers"] = headers
151
+ return orig_sync_req(self, method, url, **kwargs)
152
+
153
+ ts0 = int(time.time() * 1_000)
154
+ # prepare headers & trace
155
+ headers, trace_id, funcspan_override = _prepare_headers(
156
+ url, kwargs.get("headers")
157
+ )
158
+ kwargs["headers"] = headers
159
+
160
+ # Capture request data
161
+ req_data = b""
162
+ req_headers = b""
163
+ try:
164
+ if "content" in kwargs:
165
+ content = kwargs["content"]
166
+ if isinstance(content, bytes):
167
+ req_data = content
168
+ elif isinstance(content, str):
169
+ req_data = content.encode("utf-8")
170
+
171
+ # Capture request headers
172
+ if HAS_ORJSON:
173
+ req_headers = orjson.dumps([list(h) for h in headers])
174
+ else:
175
+ req_headers = json.dumps([list(h) for h in headers]).encode("utf-8")
176
+ except Exception: # noqa: BLE001
177
+ pass
178
+
179
+ error = None
180
+ resp_data = b""
181
+ resp_headers = b""
182
+ try:
183
+ resp = orig_sync_req(self, method, url, **kwargs)
184
+ success = True
185
+ status = getattr(resp, "status_code", 0)
186
+
187
+ # Capture response data and headers
188
+ try:
189
+ resp_data = getattr(resp, "content", b"")
190
+ if HAS_ORJSON:
191
+ resp_headers = orjson.dumps([list(h) for h in resp.headers])
192
+ else:
193
+ resp_headers = json.dumps(
194
+ [list(h) for h in resp.headers]
195
+ ).encode("utf-8")
196
+ except Exception: # noqa: BLE001
197
+ pass
198
+
199
+ return resp
200
+ except Exception as e:
201
+ success = False
202
+ status = 0
203
+ error = str(e)[:255]
204
+ raise
205
+ finally:
206
+ ts1 = int(time.time() * 1_000)
207
+ record_network_request(
208
+ trace_id,
209
+ _to_str(url),
210
+ _to_str(method),
211
+ status,
212
+ success,
213
+ error,
214
+ ts0,
215
+ ts1,
216
+ request_data=req_data,
217
+ response_data=resp_data,
218
+ request_headers=req_headers,
219
+ response_headers=resp_headers,
220
+ )
221
+
222
+ # 2. Sync .stream(...)
223
+ if preload_active:
224
+ # ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
225
+ if HAS_WRAPT:
226
+
227
+ def instrumented_sync_stream(wrapped, instance, args, kwargs):
228
+ """Ultra-fast header injection using C extension via wrapt."""
229
+ url = args[1] if len(args) > 1 else kwargs.get("url", "")
230
+ headers, trace_id, funcspan_override = _prepare_headers(
231
+ url, kwargs.get("headers")
232
+ )
233
+ kwargs["headers"] = headers
234
+ return wrapped(*args, **kwargs)
235
+
236
+ wrapt.wrap_function_wrapper(
237
+ "httpcore", "ConnectionPool.stream", instrumented_sync_stream
238
+ )
239
+ else:
240
+
241
+ def _patched_sync_stream(self, method, url, **kwargs):
242
+ # prepare headers & trace (ultra-fast C extension)
243
+ headers, trace_id, funcspan_override = _prepare_headers(
244
+ url, kwargs.get("headers")
245
+ )
246
+ kwargs["headers"] = headers
247
+
248
+ # Immediately call original and return - NO timing, NO capture!
249
+ return orig_sync_stream(self, method, url, **kwargs)
250
+
251
+ httpcore.ConnectionPool.stream = _patched_sync_stream
252
+ else:
253
+ # ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
254
+ def _patched_sync_stream(self, method, url, **kwargs):
255
+ # SYNC httpcore DOES use ssl.SSLSocket underneath, so skip if ssl_socket is active
256
+ url_str = _to_str(url)
257
+ is_https = url_str.startswith("https://")
258
+ if is_https and is_ssl_socket_active():
259
+ # ssl_socket.py will handle capture, just make the request
260
+ headers, _, _ = _prepare_headers(url, kwargs.get("headers"))
261
+ kwargs["headers"] = headers
262
+ return orig_sync_stream(self, method, url, **kwargs)
263
+
264
+ ts0 = int(time.time() * 1_000)
265
+ headers, trace_id, funcspan_override = _prepare_headers(
266
+ url, kwargs.get("headers")
267
+ )
268
+ kwargs["headers"] = headers
269
+
270
+ # Capture request data
271
+ req_data = b""
272
+ req_headers = b""
273
+ try:
274
+ if "content" in kwargs:
275
+ content = kwargs["content"]
276
+ if isinstance(content, bytes):
277
+ req_data = content
278
+ elif isinstance(content, str):
279
+ req_data = content.encode("utf-8")
280
+
281
+ # Capture request headers
282
+ if HAS_ORJSON:
283
+ req_headers = orjson.dumps([list(h) for h in headers])
284
+ else:
285
+ req_headers = json.dumps([list(h) for h in headers]).encode("utf-8")
286
+ except Exception: # noqa: BLE001
287
+ pass
288
+
289
+ error = None
290
+ resp_headers = b""
291
+ try:
292
+ stream = orig_sync_stream(self, method, url, **kwargs)
293
+ success = True
294
+ # stream itself yields the body; status often on returned object
295
+ status = 0
296
+
297
+ # Capture response headers if available
298
+ try:
299
+ if HAS_ORJSON:
300
+ resp_headers = orjson.dumps([list(h) for h in stream.headers])
301
+ else:
302
+ resp_headers = json.dumps(
303
+ [list(h) for h in stream.headers]
304
+ ).encode("utf-8")
305
+ except Exception: # noqa: BLE001
306
+ pass
307
+
308
+ return stream
309
+ except Exception as e:
310
+ success = False
311
+ status = 0
312
+ error = str(e)[:255]
313
+ raise
314
+ finally:
315
+ ts1 = int(time.time() * 1_000)
316
+ record_network_request(
317
+ trace_id,
318
+ _to_str(url),
319
+ _to_str(method),
320
+ status,
321
+ success,
322
+ error,
323
+ ts0,
324
+ ts1,
325
+ request_data=req_data,
326
+ request_headers=req_headers,
327
+ response_headers=resp_headers,
328
+ )
329
+
330
+ # 3. Async .request(...)
331
+ if preload_active:
332
+ # ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
333
+ if HAS_WRAPT:
334
+
335
+ async def instrumented_async_request(wrapped, instance, args, kwargs):
336
+ """Ultra-fast header injection using C extension via wrapt."""
337
+ url = args[1] if len(args) > 1 else kwargs.get("url", "")
338
+ headers, trace_id, funcspan_override = _prepare_headers(
339
+ url, kwargs.get("headers")
340
+ )
341
+ kwargs["headers"] = headers
342
+ return await wrapped(*args, **kwargs)
343
+
344
+ wrapt.wrap_function_wrapper(
345
+ "httpcore", "AsyncConnectionPool.request", instrumented_async_request
346
+ )
347
+ else:
348
+
349
+ async def _patched_async_request(self, method, url, **kwargs):
350
+ # prepare headers & trace (ultra-fast C extension)
351
+ headers, trace_id, funcspan_override = _prepare_headers(
352
+ url, kwargs.get("headers")
353
+ )
354
+ kwargs["headers"] = headers
355
+
356
+ # Immediately call original and return - NO timing, NO capture!
357
+ return await orig_async_req(self, method, url, **kwargs)
358
+
359
+ else:
360
+ # ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
361
+ async def _patched_async_request(self, method, url, **kwargs):
362
+ # ASYNC httpcore has its own SSL implementation that bypasses ssl.SSLSocket,
363
+ # so we must ALWAYS capture here regardless of is_ssl_socket_active().
364
+ # ssl_socket.py cannot capture httpcore async traffic.
365
+
366
+ ts0 = int(time.time() * 1_000)
367
+ headers, trace_id, funcspan_override = _prepare_headers(
368
+ url, kwargs.get("headers")
369
+ )
370
+ kwargs["headers"] = headers
371
+
372
+ # Capture request data
373
+ req_data = b""
374
+ req_headers = b""
375
+ try:
376
+ if "content" in kwargs:
377
+ content = kwargs["content"]
378
+ if isinstance(content, bytes):
379
+ req_data = content
380
+ elif isinstance(content, str):
381
+ req_data = content.encode("utf-8")
382
+
383
+ # Capture request headers
384
+ if HAS_ORJSON:
385
+ req_headers = orjson.dumps([list(h) for h in headers])
386
+ else:
387
+ req_headers = json.dumps([list(h) for h in headers]).encode("utf-8")
388
+ except Exception: # noqa: BLE001
389
+ pass
390
+
391
+ error = None
392
+ resp_data = b""
393
+ resp_headers = b""
394
+ try:
395
+ resp = await orig_async_req(self, method, url, **kwargs)
396
+ success = True
397
+ status = getattr(resp, "status_code", 0)
398
+
399
+ # Capture response data and headers
400
+ try:
401
+ resp_data = getattr(resp, "content", b"")
402
+ if HAS_ORJSON:
403
+ resp_headers = orjson.dumps([list(h) for h in resp.headers])
404
+ else:
405
+ resp_headers = json.dumps(
406
+ [list(h) for h in resp.headers]
407
+ ).encode("utf-8")
408
+ except Exception: # noqa: BLE001
409
+ pass
410
+
411
+ return resp
412
+ except Exception as e:
413
+ success = False
414
+ status = 0
415
+ error = str(e)[:255]
416
+ raise
417
+ finally:
418
+ ts1 = int(time.time() * 1_000)
419
+ record_network_request(
420
+ trace_id,
421
+ _to_str(url),
422
+ _to_str(method),
423
+ status,
424
+ success,
425
+ error,
426
+ ts0,
427
+ ts1,
428
+ request_data=req_data,
429
+ response_data=resp_data,
430
+ request_headers=req_headers,
431
+ response_headers=resp_headers,
432
+ )
433
+
434
+ # 4. Async .stream(...)
435
+ if preload_active:
436
+ # ========== ULTRA-FAST PATH: When LD_PRELOAD is active ==========
437
+ if HAS_WRAPT:
438
+
439
+ def instrumented_async_stream(wrapped, instance, args, kwargs):
440
+ """Ultra-fast header injection using C extension via wrapt."""
441
+ url = args[1] if len(args) > 1 else kwargs.get("url", "")
442
+ headers, trace_id, funcspan_override = _prepare_headers(
443
+ url, kwargs.get("headers")
444
+ )
445
+ kwargs["headers"] = headers
446
+ return wrapped(*args, **kwargs)
447
+
448
+ wrapt.wrap_function_wrapper(
449
+ "httpcore", "AsyncConnectionPool.stream", instrumented_async_stream
450
+ )
451
+ else:
452
+
453
+ def _patched_async_stream(self, method, url, **kwargs):
454
+ # prepare headers & trace (ultra-fast C extension)
455
+ headers, trace_id, funcspan_override = _prepare_headers(
456
+ url, kwargs.get("headers")
457
+ )
458
+ kwargs["headers"] = headers
459
+
460
+ # Return the async context manager directly (do NOT await!)
461
+ return orig_async_stream(self, method, url, **kwargs)
462
+
463
+ else:
464
+ # ========== FULL CAPTURE PATH: When LD_PRELOAD is NOT active ==========
465
+ def _patched_async_stream(self, method, url, **kwargs):
466
+ # ASYNC httpcore has its own SSL implementation that bypasses ssl.SSLSocket,
467
+ # so we must ALWAYS capture here regardless of is_ssl_socket_active().
468
+ # ssl_socket.py cannot capture httpcore async traffic.
469
+
470
+ # Debug: Log decision
471
+ import os as _os
472
+ if _os.getenv('SF_DEBUG', 'false').lower() == 'true':
473
+ url_str = _to_str(url)
474
+ print(f"[httpcore.py] _patched_async_stream: CAPTURING request url={url_str} (httpcore async has own SSL stack, bypasses ssl.SSLSocket)", log=False)
475
+
476
+ ts0 = int(time.time() * 1_000)
477
+ headers, trace_id, funcspan_override = _prepare_headers(
478
+ url, kwargs.get("headers")
479
+ )
480
+ kwargs["headers"] = headers
481
+
482
+ # Capture request data
483
+ req_data = b""
484
+ req_headers = b""
485
+ try:
486
+ if "content" in kwargs:
487
+ content = kwargs["content"]
488
+ if isinstance(content, bytes):
489
+ req_data = content
490
+ elif isinstance(content, str):
491
+ req_data = content.encode("utf-8")
492
+
493
+ # Capture request headers
494
+ if HAS_ORJSON:
495
+ req_headers = orjson.dumps([list(h) for h in headers])
496
+ else:
497
+ req_headers = json.dumps([list(h) for h in headers]).encode("utf-8")
498
+ except Exception: # noqa: BLE001
499
+ pass
500
+
501
+ original_cm = orig_async_stream(self, method, url, **kwargs)
502
+
503
+ class _StreamCM:
504
+ def __init__(self, cm, req_d, req_h):
505
+ self._cm = cm
506
+ self._status = 0
507
+ self._req_data = req_d
508
+ self._req_headers = req_h
509
+ self._resp_headers = b""
510
+
511
+ async def __aenter__(self):
512
+ result = await self._cm.__aenter__()
513
+
514
+ headers_iter = None
515
+ if isinstance(result, tuple) and len(result) == 4:
516
+ # Legacy httpcore stream signature -> (status, headers, stream, ext)
517
+ self._status = result[0]
518
+ headers_iter = result[1]
519
+ else:
520
+ # Newer httpcore returns Response
521
+ self._status = getattr(
522
+ result, "status_code", getattr(result, "status", 0)
523
+ )
524
+ headers_iter = getattr(result, "headers", None)
525
+
526
+ def _normalize_headers(iterable):
527
+ rows = []
528
+ if not iterable:
529
+ return rows
530
+ try:
531
+ iterator = iterable.items() if hasattr(iterable, "items") else iterable
532
+ except Exception: # noqa: BLE001
533
+ return rows
534
+ for item in iterator or []:
535
+ try:
536
+ name, value = item
537
+ except Exception: # noqa: BLE001
538
+ continue
539
+ if isinstance(name, (bytes, bytearray)):
540
+ name = name.decode("latin-1", "ignore")
541
+ if isinstance(value, (bytes, bytearray)):
542
+ value = value.decode("latin-1", "ignore")
543
+ rows.append([name, value])
544
+ return rows
545
+
546
+ header_list = _normalize_headers(headers_iter)
547
+
548
+ if HAS_ORJSON:
549
+ self._resp_headers = orjson.dumps(header_list)
550
+ else:
551
+ self._resp_headers = json.dumps(header_list).encode("utf-8")
552
+
553
+ return result
554
+
555
+ async def __aexit__(self, exc_type, exc, tb):
556
+ success = exc_type is None
557
+ ts1 = int(time.time() * 1_000)
558
+ record_network_request(
559
+ trace_id,
560
+ _to_str(url),
561
+ _to_str(method),
562
+ self._status,
563
+ success,
564
+ None if success else str(exc)[:255],
565
+ ts0,
566
+ ts1,
567
+ request_data=self._req_data,
568
+ request_headers=self._req_headers,
569
+ response_headers=self._resp_headers,
570
+ )
571
+ return await self._cm.__aexit__(exc_type, exc, tb)
572
+
573
+ return _StreamCM(original_cm, req_data, req_headers)
574
+
575
+ # Apply patches (only if NOT using wrapt - wrapt already applied them)
576
+ if not (HAS_WRAPT and preload_active):
577
+ httpcore.ConnectionPool.request = _patched_sync_request
578
+ httpcore.ConnectionPool.stream = _patched_sync_stream
579
+ httpcore.AsyncConnectionPool.request = _patched_async_request
580
+ httpcore.AsyncConnectionPool.stream = _patched_async_stream