sf-veritas 0.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sf-veritas might be problematic. Click here for more details.

Files changed (86) hide show
  1. sf_veritas/.gitignore +2 -0
  2. sf_veritas/__init__.py +4 -0
  3. sf_veritas/app_config.py +49 -0
  4. sf_veritas/cli.py +336 -0
  5. sf_veritas/constants.py +3 -0
  6. sf_veritas/custom_excepthook.py +285 -0
  7. sf_veritas/custom_log_handler.py +53 -0
  8. sf_veritas/custom_output_wrapper.py +107 -0
  9. sf_veritas/custom_print.py +34 -0
  10. sf_veritas/django_app.py +5 -0
  11. sf_veritas/env_vars.py +83 -0
  12. sf_veritas/exception_handling_middleware.py +18 -0
  13. sf_veritas/exception_metaclass.py +69 -0
  14. sf_veritas/frame_tools.py +112 -0
  15. sf_veritas/import_hook.py +62 -0
  16. sf_veritas/infra_details/__init__.py +3 -0
  17. sf_veritas/infra_details/get_infra_details.py +24 -0
  18. sf_veritas/infra_details/kubernetes/__init__.py +3 -0
  19. sf_veritas/infra_details/kubernetes/get_cluster_name.py +147 -0
  20. sf_veritas/infra_details/kubernetes/get_details.py +7 -0
  21. sf_veritas/infra_details/running_on/__init__.py +17 -0
  22. sf_veritas/infra_details/running_on/kubernetes.py +11 -0
  23. sf_veritas/interceptors.py +252 -0
  24. sf_veritas/local_env_detect.py +118 -0
  25. sf_veritas/package_metadata.py +6 -0
  26. sf_veritas/patches/__init__.py +0 -0
  27. sf_veritas/patches/concurrent_futures.py +19 -0
  28. sf_veritas/patches/constants.py +1 -0
  29. sf_veritas/patches/exceptions.py +82 -0
  30. sf_veritas/patches/multiprocessing.py +32 -0
  31. sf_veritas/patches/network_libraries/__init__.py +51 -0
  32. sf_veritas/patches/network_libraries/aiohttp.py +100 -0
  33. sf_veritas/patches/network_libraries/curl_cffi.py +93 -0
  34. sf_veritas/patches/network_libraries/http_client.py +64 -0
  35. sf_veritas/patches/network_libraries/httpcore.py +152 -0
  36. sf_veritas/patches/network_libraries/httplib2.py +76 -0
  37. sf_veritas/patches/network_libraries/httpx.py +123 -0
  38. sf_veritas/patches/network_libraries/niquests.py +192 -0
  39. sf_veritas/patches/network_libraries/pycurl.py +71 -0
  40. sf_veritas/patches/network_libraries/requests.py +187 -0
  41. sf_veritas/patches/network_libraries/tornado.py +139 -0
  42. sf_veritas/patches/network_libraries/treq.py +122 -0
  43. sf_veritas/patches/network_libraries/urllib_request.py +129 -0
  44. sf_veritas/patches/network_libraries/utils.py +101 -0
  45. sf_veritas/patches/os.py +17 -0
  46. sf_veritas/patches/threading.py +32 -0
  47. sf_veritas/patches/web_frameworks/__init__.py +45 -0
  48. sf_veritas/patches/web_frameworks/aiohttp.py +133 -0
  49. sf_veritas/patches/web_frameworks/async_websocket_consumer.py +132 -0
  50. sf_veritas/patches/web_frameworks/blacksheep.py +107 -0
  51. sf_veritas/patches/web_frameworks/bottle.py +142 -0
  52. sf_veritas/patches/web_frameworks/cherrypy.py +246 -0
  53. sf_veritas/patches/web_frameworks/django.py +307 -0
  54. sf_veritas/patches/web_frameworks/eve.py +138 -0
  55. sf_veritas/patches/web_frameworks/falcon.py +229 -0
  56. sf_veritas/patches/web_frameworks/fastapi.py +145 -0
  57. sf_veritas/patches/web_frameworks/flask.py +186 -0
  58. sf_veritas/patches/web_frameworks/klein.py +40 -0
  59. sf_veritas/patches/web_frameworks/litestar.py +217 -0
  60. sf_veritas/patches/web_frameworks/pyramid.py +89 -0
  61. sf_veritas/patches/web_frameworks/quart.py +155 -0
  62. sf_veritas/patches/web_frameworks/robyn.py +114 -0
  63. sf_veritas/patches/web_frameworks/sanic.py +120 -0
  64. sf_veritas/patches/web_frameworks/starlette.py +144 -0
  65. sf_veritas/patches/web_frameworks/strawberry.py +269 -0
  66. sf_veritas/patches/web_frameworks/tornado.py +129 -0
  67. sf_veritas/patches/web_frameworks/utils.py +55 -0
  68. sf_veritas/print_override.py +13 -0
  69. sf_veritas/regular_data_transmitter.py +358 -0
  70. sf_veritas/request_interceptor.py +399 -0
  71. sf_veritas/request_utils.py +104 -0
  72. sf_veritas/server_status.py +1 -0
  73. sf_veritas/shutdown_flag.py +11 -0
  74. sf_veritas/subprocess_startup.py +3 -0
  75. sf_veritas/test_cli.py +145 -0
  76. sf_veritas/thread_local.py +436 -0
  77. sf_veritas/timeutil.py +114 -0
  78. sf_veritas/transmit_exception_to_sailfish.py +28 -0
  79. sf_veritas/transmitter.py +58 -0
  80. sf_veritas/types.py +44 -0
  81. sf_veritas/unified_interceptor.py +323 -0
  82. sf_veritas/utils.py +39 -0
  83. sf_veritas-0.9.7.dist-info/METADATA +83 -0
  84. sf_veritas-0.9.7.dist-info/RECORD +86 -0
  85. sf_veritas-0.9.7.dist-info/WHEEL +4 -0
  86. sf_veritas-0.9.7.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,192 @@
1
+ """
2
+ Monkey-patch Niquests so that every flavour of request
3
+ (sync / async, Session / AsyncSession, streaming or not)
4
+ propagates the SAILFISH_TRACING_HEADER *and* records the request.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import time
10
+ from typing import List, Optional, Tuple
11
+
12
+ from ...constants import SAILFISH_TRACING_HEADER
13
+ from ..constants import supported_network_verbs as verbs
14
+ from .utils import get_trace_and_should_propagate, record_network_request
15
+
16
+
17
+ def patch_niquests(domains_to_not_propagate_headers_to: Optional[List[str]] = None):
18
+ try:
19
+ import niquests # type: ignore
20
+ except ImportError:
21
+ return
22
+
23
+ skip = domains_to_not_propagate_headers_to or []
24
+
25
+ # --------------------------------------------------------------------- helpers
26
+ SessionCls = niquests.Session
27
+ AsyncSessionCls = getattr(niquests, "AsyncSession", type("._Dummy", (), {}))
28
+
29
+ def _is_session_self(args: Tuple) -> bool:
30
+ """True when args[0] is a Session/AsyncSession instance."""
31
+ return bool(args) and isinstance(args[0], (SessionCls, AsyncSessionCls))
32
+
33
+ def _resolve_method_url(
34
+ verb_tag: str, args: Tuple, kwargs: dict
35
+ ) -> Tuple[str, str]:
36
+ """
37
+ Robust extraction of (method, url) for every call style.
38
+ Never throws IndexError – falls back to kwargs when positional args missing.
39
+ """
40
+ # ----- 1. REQUEST / AREQUEST (generic entry-points) ----------------
41
+ if verb_tag in ("REQUEST", "AREQUEST"):
42
+ if _is_session_self(args):
43
+ method = (
44
+ str(args[1]).upper()
45
+ if len(args) > 1
46
+ else str(kwargs.get("method", "GET")).upper()
47
+ )
48
+ url = args[2] if len(args) > 2 else kwargs.get("url", "")
49
+ else:
50
+ method = (
51
+ str(args[0]).upper()
52
+ if len(args) > 0
53
+ else str(kwargs.get("method", "GET")).upper()
54
+ )
55
+ url = args[1] if len(args) > 1 else kwargs.get("url", "")
56
+
57
+ # ----- 2. Convenience verbs get/post/… and their async variants ----
58
+ else:
59
+ method = verb_tag.lstrip("A").upper()
60
+ if _is_session_self(args):
61
+ url = args[1] if len(args) > 1 else kwargs.get("url", "")
62
+ else:
63
+ url = args[0] if len(args) > 0 else kwargs.get("url", "")
64
+
65
+ return method, url
66
+
67
+ def _prepare(url: str, method: str, headers: Optional[dict]):
68
+ trace_id, allow = get_trace_and_should_propagate(url, skip)
69
+ hdrs = dict(headers or {})
70
+ if allow:
71
+ hdrs[SAILFISH_TRACING_HEADER] = trace_id
72
+ return trace_id, hdrs, int(time.time() * 1_000)
73
+
74
+ def _record(trace_id, url, method, status, success, err, start_ms):
75
+ record_network_request(
76
+ trace_id,
77
+ url,
78
+ method,
79
+ status or 0,
80
+ success,
81
+ err,
82
+ timestamp_start=start_ms,
83
+ timestamp_end=int(time.time() * 1_000),
84
+ )
85
+
86
+ # ----------------------------------------------------------------- header merge
87
+ def _headers_pos_index(args: Tuple) -> Optional[int]:
88
+ """
89
+ Return the positional index that already holds a headers dict, or None.
90
+ """
91
+ if not args:
92
+ return None
93
+ if isinstance(args[0], (SessionCls, AsyncSessionCls)):
94
+ return 3 if len(args) > 3 else None
95
+ return 2 if len(args) > 2 else None
96
+
97
+ def _inject_header(args: Tuple, kwargs: dict, key: str, val: str) -> Tuple:
98
+ idx = _headers_pos_index(args)
99
+ if idx is not None:
100
+ merged = dict(args[idx] or {})
101
+ merged[key] = val
102
+ args = (*args[:idx], merged, *args[idx + 1 :])
103
+ else:
104
+ hdrs = dict(kwargs.get("headers") or {})
105
+ hdrs[key] = val
106
+ kwargs["headers"] = hdrs
107
+ return args, kwargs
108
+
109
+ # -------------------------------------------------------------------- wrappers
110
+ def _wrap_sync(fn, verb_tag: str):
111
+ def wrapper(*args, **kwargs):
112
+ method, url = _resolve_method_url(verb_tag, args, kwargs)
113
+ trace_id, _hdrs, t0 = _prepare(url, method, kwargs.get("headers"))
114
+ args, kwargs = _inject_header(
115
+ args, kwargs, SAILFISH_TRACING_HEADER, trace_id
116
+ )
117
+
118
+ status = 0
119
+ success = False
120
+ err = None
121
+ try:
122
+ resp = fn(*args, **kwargs)
123
+ status = getattr(resp, "status_code", 0)
124
+ success = True
125
+ return resp
126
+ except Exception as exc: # noqa: BLE001
127
+ err = str(exc)[:255]
128
+ raise
129
+ finally:
130
+ _record(trace_id, url, method, status, success, err, t0)
131
+
132
+ return wrapper
133
+
134
+ def _wrap_async(fn, verb_tag: str):
135
+ async def wrapper(*args, **kwargs):
136
+ method, url = _resolve_method_url(verb_tag, args, kwargs)
137
+ trace_id, _hdrs, t0 = _prepare(url, method, kwargs.get("headers"))
138
+ args, kwargs = _inject_header(
139
+ args, kwargs, SAILFISH_TRACING_HEADER, trace_id
140
+ )
141
+
142
+ status = 0
143
+ success = False
144
+ err = None
145
+ try:
146
+ resp = await fn(*args, **kwargs)
147
+ status = getattr(resp, "status_code", 0)
148
+ success = True
149
+ return resp
150
+ except Exception as exc: # noqa: BLE001
151
+ err = str(exc)[:255]
152
+ raise
153
+ finally:
154
+ _record(trace_id, url, method, status, success, err, t0)
155
+
156
+ return wrapper
157
+
158
+ # ------------------------------------------------------------- apply patches
159
+ niquests.request = _wrap_sync(niquests.request, "REQUEST")
160
+ for v in verbs:
161
+ setattr(niquests, v, _wrap_sync(getattr(niquests, v), v.upper()))
162
+
163
+ SessionCls.request = _wrap_sync(SessionCls.request, "REQUEST")
164
+ for v in verbs:
165
+ setattr(SessionCls, v, _wrap_sync(getattr(SessionCls, v), v.upper()))
166
+
167
+ if hasattr(niquests, "arequest"):
168
+ niquests.arequest = _wrap_async(niquests.arequest, "AREQUEST")
169
+ for av in verbs:
170
+ async_name = f"a{av}"
171
+ setattr(
172
+ niquests,
173
+ async_name,
174
+ _wrap_async(getattr(niquests, async_name), async_name.upper()),
175
+ )
176
+
177
+ if hasattr(SessionCls, "arequest"):
178
+ SessionCls.arequest = _wrap_async(SessionCls.arequest, "AREQUEST")
179
+ for av in verbs:
180
+ async_name = f"a{av}"
181
+ setattr(
182
+ SessionCls,
183
+ async_name,
184
+ _wrap_async(getattr(SessionCls, async_name), async_name.upper()),
185
+ )
186
+
187
+ if AsyncSessionCls is not type("._Dummy", (), {}):
188
+ AsyncSessionCls.request = _wrap_async(AsyncSessionCls.request, "REQUEST")
189
+ for v in verbs:
190
+ setattr(
191
+ AsyncSessionCls, v, _wrap_async(getattr(AsyncSessionCls, v), v.upper())
192
+ )
@@ -0,0 +1,71 @@
1
+ import time
2
+ from typing import List, Optional
3
+
4
+ from ...constants import SAILFISH_TRACING_HEADER
5
+ from .utils import get_trace_and_should_propagate, record_network_request
6
+
7
+
8
+ def patch_pycurl(domains_to_not_propagate_headers_to: Optional[List[str]] = None):
9
+ try:
10
+ import pycurl
11
+ except ImportError:
12
+ return
13
+
14
+ _OrigCurl = pycurl.Curl
15
+
16
+ class WrappedCurl(_OrigCurl): # ➊ subclass libcurl handle
17
+ def __init__(self, *args, **kwargs):
18
+ super().__init__(*args, **kwargs)
19
+ self._sf_url: str | None = None
20
+ self._sf_method: str | None = None
21
+ self._sf_headers: list[str] = []
22
+
23
+ # --- intercept option setting -------------------------------------------------
24
+ def setopt(self, opt, val):
25
+ if opt == pycurl.URL:
26
+ self._sf_url = val
27
+ elif opt == pycurl.CUSTOMREQUEST:
28
+ self._sf_method = val.upper()
29
+ elif opt == pycurl.HTTPHEADER:
30
+ self._sf_headers = list(val)
31
+ return super().setopt(opt, val)
32
+
33
+ # --- wrapped perform() --------------------------------------------------------
34
+ def perform(self):
35
+ url = self._sf_url or ""
36
+ method = (self._sf_method or "GET").upper()
37
+
38
+ trace_id, allow = get_trace_and_should_propagate(
39
+ url, domains_to_not_propagate_headers_to or []
40
+ )
41
+
42
+ # Build merged header list
43
+ merged = list(self._sf_headers)
44
+ if allow:
45
+ merged.append(f"{SAILFISH_TRACING_HEADER}: {trace_id}")
46
+
47
+ # Let libcurl negotiate & decode encodings for us
48
+ super().setopt(pycurl.ACCEPT_ENCODING, "")
49
+
50
+ # push merged headers down
51
+ # NOTE: HTTPHEADER expects List[str] (or List[bytes]), ensure consistency
52
+ super().setopt(pycurl.HTTPHEADER, merged)
53
+
54
+ # timing / status / error capture
55
+ ts0 = int(time.time() * 1_000)
56
+ status = 0
57
+ err: str | None = None
58
+ try:
59
+ rv = super().perform()
60
+ status = int(self.getinfo(pycurl.RESPONSE_CODE) or 0)
61
+ return rv
62
+ except Exception as e:
63
+ err = str(e)[:255]
64
+ raise
65
+ finally:
66
+ ts1 = int(time.time() * 1_000)
67
+ record_network_request(
68
+ trace_id, url, method, status, err is None, err, ts0, ts1
69
+ )
70
+
71
+ pycurl.Curl = WrappedCurl
@@ -0,0 +1,187 @@
1
+ """
2
+ Monkey-patch the `requests` stack (requests → urllib3 → http.client):
3
+
4
+ • For every outbound request, propagate the SAILFISH_TRACING_HEADER
5
+ unless the destination host is in `domains_to_not_propagate_headers_to`.
6
+ • Fire NetworkRequestTransmitter via utils.record_network_request
7
+ so we always capture (url, status, timings, success, error).
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import http.client
13
+ import time
14
+ from typing import Dict, List, Optional, Tuple
15
+
16
+ import requests
17
+ import urllib3
18
+ from requests.sessions import Session
19
+
20
+ from ...constants import SAILFISH_TRACING_HEADER
21
+ from ...thread_local import (
22
+ activate_reentrancy_guards_exception,
23
+ activate_reentrancy_guards_logging,
24
+ activate_reentrancy_guards_print,
25
+ )
26
+ from .utils import get_trace_and_should_propagate, record_network_request
27
+
28
+ ###############################################################################
29
+ # Internal helpers
30
+ ###############################################################################
31
+
32
+ # header names used for re-entrancy guards
33
+ REENTRANCY_GUARD_LOGGING_PREACTIVE = "reentrancy_guard_logging_preactive"
34
+ REENTRANCY_GUARD_PRINT_PREACTIVE = "reentrancy_guard_print_preactive"
35
+ REENTRANCY_GUARD_EXCEPTIONS_PREACTIVE = "reentrancy_guard_exception_preactive"
36
+
37
+
38
+ def _activate_rg(headers: Dict[str, str]) -> None:
39
+ """Turn the three ‘preactive' guard flags ON for downstream hops."""
40
+ headers[REENTRANCY_GUARD_LOGGING_PREACTIVE] = "true"
41
+ headers[REENTRANCY_GUARD_PRINT_PREACTIVE] = "true"
42
+ headers[REENTRANCY_GUARD_EXCEPTIONS_PREACTIVE] = "true"
43
+
44
+
45
+ def _check_rg(headers: Dict[str, str]) -> None:
46
+ """If any pre-active guard present, switch the corresponding guard on."""
47
+ if headers.get(REENTRANCY_GUARD_LOGGING_PREACTIVE, "false").lower() == "true":
48
+ activate_reentrancy_guards_logging()
49
+ if headers.get(REENTRANCY_GUARD_PRINT_PREACTIVE, "false").lower() == "true":
50
+ activate_reentrancy_guards_print()
51
+ if headers.get(REENTRANCY_GUARD_EXCEPTIONS_PREACTIVE, "false").lower() == "true":
52
+ activate_reentrancy_guards_exception()
53
+
54
+
55
+ def _prepare(
56
+ url: str,
57
+ domains_to_skip: List[str],
58
+ headers: Optional[Dict[str, str]],
59
+ ) -> Tuple[str, Dict[str, str], int]:
60
+ """
61
+ Inject the trace header (unless excluded) and return:
62
+ trace_id, merged_headers, timestamp_ms
63
+ """
64
+ trace_id, propagate = get_trace_and_should_propagate(url, domains_to_skip)
65
+ hdrs: Dict[str, str] = dict(headers or {})
66
+ _check_rg(hdrs)
67
+ if propagate:
68
+ hdrs[SAILFISH_TRACING_HEADER] = trace_id
69
+ _activate_rg(hdrs)
70
+ return trace_id, hdrs, int(time.time() * 1_000)
71
+
72
+
73
+ ###############################################################################
74
+ # Top-level patch function
75
+ ###############################################################################
76
+ def patch_requests(domains_to_not_propagate_headers_to: Optional[List[str]] = None):
77
+ """Apply all monkey-patches. Safe to call multiple times."""
78
+ exclude = domains_to_not_propagate_headers_to or []
79
+
80
+ # --------------------------------------------------------------------- #
81
+ # 1. Patch `requests.Session.request`
82
+ # --------------------------------------------------------------------- #
83
+ original_request = Session.request
84
+
85
+ def patched_request(self: Session, method, url, **kwargs): # type: ignore[override]
86
+ # --- header handling / injection --------------------------------- #
87
+ trace_id, hdrs, t0 = _prepare(url, exclude, kwargs.pop("headers", {}))
88
+ kwargs["headers"] = hdrs
89
+
90
+ status: int = 0
91
+ success: bool = False
92
+ err: str | None = None
93
+ try:
94
+ resp = original_request(self, method, url, **kwargs)
95
+ status = resp.status_code
96
+ success = resp.ok
97
+ return resp
98
+ except Exception as exc: # noqa: BLE001
99
+ err = str(exc)[:255]
100
+ raise
101
+ finally:
102
+ record_network_request(
103
+ trace_id,
104
+ url,
105
+ str(method).upper(),
106
+ status,
107
+ success,
108
+ err,
109
+ timestamp_start=t0,
110
+ timestamp_end=int(time.time() * 1_000),
111
+ )
112
+
113
+ Session.request = patched_request
114
+ requests.Session.request = patched_request # cover direct `requests.Session(...)`
115
+
116
+ # --------------------------------------------------------------------- #
117
+ # 2. Patch urllib3's low-level ConnectionPool.urlopen (used by requests)
118
+ # --------------------------------------------------------------------- #
119
+ original_urlopen = urllib3.connectionpool.HTTPConnectionPool.urlopen
120
+
121
+ def patched_urlopen(self, method, url, body=None, headers=None, **kw): # type: ignore[override]
122
+ trace_id, hdrs, t0 = _prepare(url, exclude, headers)
123
+ status: int = 0
124
+ success: bool = False
125
+ err: str | None = None
126
+ try:
127
+ resp = original_urlopen(self, method, url, body=body, headers=hdrs, **kw)
128
+ status = getattr(resp, "status", 0)
129
+ success = status < 400
130
+ return resp
131
+ except Exception as exc: # noqa: BLE001
132
+ err = str(exc)[:255]
133
+ raise
134
+ finally:
135
+ record_network_request(
136
+ trace_id,
137
+ url,
138
+ str(method).upper(),
139
+ status,
140
+ success,
141
+ err,
142
+ timestamp_start=t0,
143
+ timestamp_end=int(time.time() * 1_000),
144
+ )
145
+
146
+ urllib3.connectionpool.HTTPConnectionPool.urlopen = patched_urlopen
147
+
148
+ # --------------------------------------------------------------------- #
149
+ # 3. Patch http.client for “raw” stdlib usage (rare but easy to support)
150
+ # --------------------------------------------------------------------- #
151
+ original_http_client_request = http.client.HTTPConnection.request
152
+
153
+ def patched_http_request(self, method, url, body=None, headers=None, *, encode_chunked=False): # type: ignore[override]
154
+ trace_id, hdrs, t0 = _prepare(url, exclude, headers)
155
+ status: int = 0
156
+ success: bool = False
157
+ err: str | None = None
158
+ try:
159
+ resp = original_http_client_request(
160
+ self,
161
+ method,
162
+ url,
163
+ body=body,
164
+ headers=hdrs,
165
+ encode_chunked=encode_chunked,
166
+ )
167
+ status = getattr(
168
+ self, "response", getattr(resp, "status", 0)
169
+ ) # best-effort
170
+ success = bool(status) and status < 400
171
+ return resp
172
+ except Exception as exc: # noqa: BLE001
173
+ err = str(exc)[:255]
174
+ raise
175
+ finally:
176
+ record_network_request(
177
+ trace_id,
178
+ url,
179
+ str(method).upper(),
180
+ status,
181
+ success,
182
+ err,
183
+ timestamp_start=t0,
184
+ timestamp_end=int(time.time() * 1_000),
185
+ )
186
+
187
+ http.client.HTTPConnection.request = patched_http_request
@@ -0,0 +1,139 @@
1
+ """
2
+ Monkey-patch Tornado's HTTP clients so that
3
+
4
+ • Every outbound request carries SAILFISH_TRACING_HEADER
5
+ (unless the destination host is excluded).
6
+ • Every request – success or failure – triggers record_network_request(…).
7
+
8
+ Covers
9
+ • tornado.httpclient.AsyncHTTPClient.fetch (await-able)
10
+ • tornado.httpclient.HTTPClient.fetch (blocking/sync)
11
+ Safe to call repeatedly; patches only once per process.
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import time
17
+ from typing import List, Optional, Tuple
18
+
19
+ from ...constants import SAILFISH_TRACING_HEADER
20
+ from .utils import get_trace_and_should_propagate, record_network_request
21
+
22
+
23
+ def patch_tornado(domains_to_not_propagate_headers_to: Optional[List[str]] = None):
24
+ try:
25
+ # Tornado is optional; exit silently if missing
26
+ from tornado.httpclient import AsyncHTTPClient, HTTPClient, HTTPRequest
27
+ except ImportError:
28
+ return
29
+
30
+ exclude: List[str] = domains_to_not_propagate_headers_to or []
31
+
32
+ # ------------------------------------------------------------------ #
33
+ # Helpers shared by sync & async wrappers
34
+ # ------------------------------------------------------------------ #
35
+ def _resolve(
36
+ req_or_url, kwargs
37
+ ) -> Tuple[str, str, dict]: # → (url, METHOD, headers_dict)
38
+ """
39
+ Handle both call styles:
40
+
41
+ client.fetch("https://foo", method="POST", headers={...})
42
+ client.fetch(HTTPRequest(...))
43
+
44
+ Always returns a mutable *headers* dict.
45
+ """
46
+ if isinstance(req_or_url, HTTPRequest):
47
+ url = req_or_url.url
48
+ method = (req_or_url.method or "GET").upper()
49
+ hdrs = dict(req_or_url.headers or {})
50
+ else:
51
+ url = str(req_or_url)
52
+ method = kwargs.get("method", "GET").upper()
53
+ hdrs = dict(kwargs.get("headers", {}) or {})
54
+ return url, method, hdrs
55
+
56
+ def _inject(
57
+ req_or_url, kwargs, hdrs: dict
58
+ ): # mutate request object *or* kwargs to carry hdrs
59
+ from tornado.httpclient import HTTPRequest # local import to avoid MRO issues
60
+
61
+ if isinstance(req_or_url, HTTPRequest):
62
+ req_or_url.headers = hdrs
63
+ else:
64
+ kwargs["headers"] = hdrs
65
+ return req_or_url, kwargs
66
+
67
+ def _prepare(url: str, hdrs: dict):
68
+ """Return (trace_id, merged_headers, start_ms)."""
69
+ trace_id, allow = get_trace_and_should_propagate(url, exclude)
70
+ out = dict(hdrs)
71
+ if allow:
72
+ out[SAILFISH_TRACING_HEADER] = trace_id
73
+ return trace_id, out, int(time.time() * 1_000)
74
+
75
+ # ------------------------------------------------------------------ #
76
+ # AsyncHTTPClient.fetch wrapper
77
+ # ------------------------------------------------------------------ #
78
+ original_async_fetch = AsyncHTTPClient.fetch
79
+
80
+ async def patched_async_fetch(self, req_or_url, *args, **kwargs):
81
+ url, method, hdrs_cur = _resolve(req_or_url, kwargs)
82
+ trace_id, hdrs_new, t0 = _prepare(url, hdrs_cur)
83
+ req_or_url, kwargs = _inject(req_or_url, kwargs, hdrs_new)
84
+
85
+ status, success, err = 0, False, None
86
+ try:
87
+ resp = await original_async_fetch(self, req_or_url, *args, **kwargs)
88
+ status = getattr(resp, "code", 0)
89
+ success = status < 400
90
+ return resp
91
+ except Exception as exc: # noqa: BLE001
92
+ err = str(exc)[:255]
93
+ raise
94
+ finally:
95
+ record_network_request(
96
+ trace_id,
97
+ url,
98
+ method,
99
+ status,
100
+ success,
101
+ err,
102
+ timestamp_start=t0,
103
+ timestamp_end=int(time.time() * 1_000),
104
+ )
105
+
106
+ AsyncHTTPClient.fetch = patched_async_fetch # type: ignore[assignment]
107
+
108
+ # ------------------------------------------------------------------ #
109
+ # HTTPClient.fetch wrapper (blocking)
110
+ # ------------------------------------------------------------------ #
111
+ original_sync_fetch = HTTPClient.fetch
112
+
113
+ def patched_sync_fetch(self, req_or_url, *args, **kwargs):
114
+ url, method, hdrs_cur = _resolve(req_or_url, kwargs)
115
+ trace_id, hdrs_new, t0 = _prepare(url, hdrs_cur)
116
+ req_or_url, kwargs = _inject(req_or_url, kwargs, hdrs_new)
117
+
118
+ status, success, err = 0, False, None
119
+ try:
120
+ resp = original_sync_fetch(self, req_or_url, *args, **kwargs)
121
+ status = getattr(resp, "code", 0)
122
+ success = status < 400
123
+ return resp
124
+ except Exception as exc: # noqa: BLE001
125
+ err = str(exc)[:255]
126
+ raise
127
+ finally:
128
+ record_network_request(
129
+ trace_id,
130
+ url,
131
+ method,
132
+ status,
133
+ success,
134
+ err,
135
+ timestamp_start=t0,
136
+ timestamp_end=int(time.time() * 1_000),
137
+ )
138
+
139
+ HTTPClient.fetch = patched_sync_fetch # type: ignore[assignment]