sf-veritas 0.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sf-veritas might be problematic. Click here for more details.

Files changed (86) hide show
  1. sf_veritas/.gitignore +2 -0
  2. sf_veritas/__init__.py +4 -0
  3. sf_veritas/app_config.py +49 -0
  4. sf_veritas/cli.py +336 -0
  5. sf_veritas/constants.py +3 -0
  6. sf_veritas/custom_excepthook.py +285 -0
  7. sf_veritas/custom_log_handler.py +53 -0
  8. sf_veritas/custom_output_wrapper.py +107 -0
  9. sf_veritas/custom_print.py +34 -0
  10. sf_veritas/django_app.py +5 -0
  11. sf_veritas/env_vars.py +83 -0
  12. sf_veritas/exception_handling_middleware.py +18 -0
  13. sf_veritas/exception_metaclass.py +69 -0
  14. sf_veritas/frame_tools.py +112 -0
  15. sf_veritas/import_hook.py +62 -0
  16. sf_veritas/infra_details/__init__.py +3 -0
  17. sf_veritas/infra_details/get_infra_details.py +24 -0
  18. sf_veritas/infra_details/kubernetes/__init__.py +3 -0
  19. sf_veritas/infra_details/kubernetes/get_cluster_name.py +147 -0
  20. sf_veritas/infra_details/kubernetes/get_details.py +7 -0
  21. sf_veritas/infra_details/running_on/__init__.py +17 -0
  22. sf_veritas/infra_details/running_on/kubernetes.py +11 -0
  23. sf_veritas/interceptors.py +252 -0
  24. sf_veritas/local_env_detect.py +118 -0
  25. sf_veritas/package_metadata.py +6 -0
  26. sf_veritas/patches/__init__.py +0 -0
  27. sf_veritas/patches/concurrent_futures.py +19 -0
  28. sf_veritas/patches/constants.py +1 -0
  29. sf_veritas/patches/exceptions.py +82 -0
  30. sf_veritas/patches/multiprocessing.py +32 -0
  31. sf_veritas/patches/network_libraries/__init__.py +51 -0
  32. sf_veritas/patches/network_libraries/aiohttp.py +100 -0
  33. sf_veritas/patches/network_libraries/curl_cffi.py +93 -0
  34. sf_veritas/patches/network_libraries/http_client.py +64 -0
  35. sf_veritas/patches/network_libraries/httpcore.py +152 -0
  36. sf_veritas/patches/network_libraries/httplib2.py +76 -0
  37. sf_veritas/patches/network_libraries/httpx.py +123 -0
  38. sf_veritas/patches/network_libraries/niquests.py +192 -0
  39. sf_veritas/patches/network_libraries/pycurl.py +71 -0
  40. sf_veritas/patches/network_libraries/requests.py +187 -0
  41. sf_veritas/patches/network_libraries/tornado.py +139 -0
  42. sf_veritas/patches/network_libraries/treq.py +122 -0
  43. sf_veritas/patches/network_libraries/urllib_request.py +129 -0
  44. sf_veritas/patches/network_libraries/utils.py +101 -0
  45. sf_veritas/patches/os.py +17 -0
  46. sf_veritas/patches/threading.py +32 -0
  47. sf_veritas/patches/web_frameworks/__init__.py +45 -0
  48. sf_veritas/patches/web_frameworks/aiohttp.py +133 -0
  49. sf_veritas/patches/web_frameworks/async_websocket_consumer.py +132 -0
  50. sf_veritas/patches/web_frameworks/blacksheep.py +107 -0
  51. sf_veritas/patches/web_frameworks/bottle.py +142 -0
  52. sf_veritas/patches/web_frameworks/cherrypy.py +246 -0
  53. sf_veritas/patches/web_frameworks/django.py +307 -0
  54. sf_veritas/patches/web_frameworks/eve.py +138 -0
  55. sf_veritas/patches/web_frameworks/falcon.py +229 -0
  56. sf_veritas/patches/web_frameworks/fastapi.py +145 -0
  57. sf_veritas/patches/web_frameworks/flask.py +186 -0
  58. sf_veritas/patches/web_frameworks/klein.py +40 -0
  59. sf_veritas/patches/web_frameworks/litestar.py +217 -0
  60. sf_veritas/patches/web_frameworks/pyramid.py +89 -0
  61. sf_veritas/patches/web_frameworks/quart.py +155 -0
  62. sf_veritas/patches/web_frameworks/robyn.py +114 -0
  63. sf_veritas/patches/web_frameworks/sanic.py +120 -0
  64. sf_veritas/patches/web_frameworks/starlette.py +144 -0
  65. sf_veritas/patches/web_frameworks/strawberry.py +269 -0
  66. sf_veritas/patches/web_frameworks/tornado.py +129 -0
  67. sf_veritas/patches/web_frameworks/utils.py +55 -0
  68. sf_veritas/print_override.py +13 -0
  69. sf_veritas/regular_data_transmitter.py +358 -0
  70. sf_veritas/request_interceptor.py +399 -0
  71. sf_veritas/request_utils.py +104 -0
  72. sf_veritas/server_status.py +1 -0
  73. sf_veritas/shutdown_flag.py +11 -0
  74. sf_veritas/subprocess_startup.py +3 -0
  75. sf_veritas/test_cli.py +145 -0
  76. sf_veritas/thread_local.py +436 -0
  77. sf_veritas/timeutil.py +114 -0
  78. sf_veritas/transmit_exception_to_sailfish.py +28 -0
  79. sf_veritas/transmitter.py +58 -0
  80. sf_veritas/types.py +44 -0
  81. sf_veritas/unified_interceptor.py +323 -0
  82. sf_veritas/utils.py +39 -0
  83. sf_veritas-0.9.7.dist-info/METADATA +83 -0
  84. sf_veritas-0.9.7.dist-info/RECORD +86 -0
  85. sf_veritas-0.9.7.dist-info/WHEEL +4 -0
  86. sf_veritas-0.9.7.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,122 @@
1
+ """
2
+ Header propagation + network-recording patch for **Treq**.
3
+
4
+ • Propagates SAILFISH_TRACING_HEADER (unless excluded destination).
5
+ • Records every outbound request via record_network_request(…).
6
+
7
+ It also guarantees that Twisted's reactor is *running*:
8
+
9
+ 1. Prefer installing the asyncio reactor early.
10
+ 2. If a different reactor is already installed, start it in a background thread
11
+ (if it isn't running yet), so Deferreds produced by treq will fire.
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import asyncio
17
+ import threading
18
+ import time
19
+ from typing import List, Optional
20
+
21
+ from ...constants import SAILFISH_TRACING_HEADER
22
+ from ..constants import supported_network_verbs as verbs
23
+ from .utils import get_trace_and_should_propagate, record_network_request
24
+
25
+
26
+ def _ensure_reactor_running() -> None:
27
+ """
28
+ • Try to replace Twisted's default reactor with the asyncio one.
29
+ • If that fails because a reactor is already installed, make sure the
30
+ existing reactor is *running* (start it in a daemon thread if needed).
31
+ """
32
+ # Twisted import must be inside this function to avoid premature reactor load
33
+ from twisted.internet import reactor
34
+
35
+ try:
36
+ from twisted.internet import asyncioreactor
37
+
38
+ # Already an asyncio reactor? -> nothing to do
39
+ if reactor.__class__.__module__ == "twisted.internet.asyncioreactor":
40
+ return
41
+
42
+ # Try upgrade to asyncio-reactor (will raise if another reactor in use)
43
+ asyncioreactor.install(asyncio.get_event_loop()) # type: ignore[arg-type]
44
+ return
45
+ except Exception:
46
+ # Could not swap reactors (already installed). Make sure current one runs.
47
+ if not reactor.running:
48
+ threading.Thread(
49
+ target=reactor.run,
50
+ kwargs={"installSignalHandlers": False},
51
+ daemon=True,
52
+ ).start()
53
+
54
+
55
+ def patch_treq(domains_to_not_propagate_headers_to: Optional[List[str]] = None):
56
+ try:
57
+ # Ensure a live reactor *before* importing treq
58
+ _ensure_reactor_running()
59
+
60
+ import treq
61
+ except ImportError:
62
+ return # treq is not installed; nothing to patch
63
+
64
+ exclude = domains_to_not_propagate_headers_to or []
65
+ orig_request = treq.request
66
+
67
+ # ------------------------------------------------------------------ #
68
+ def patched_request(method: str, url: str, **kwargs):
69
+ # -------- header propagation
70
+ hdrs = dict(kwargs.pop("headers", {}) or {})
71
+ trace_id, allow = get_trace_and_should_propagate(url, exclude)
72
+ if allow:
73
+ hdrs[SAILFISH_TRACING_HEADER] = trace_id
74
+ kwargs["headers"] = hdrs
75
+
76
+ t0 = int(time.time() * 1_000)
77
+ d = orig_request(method, url, **kwargs) # Deferred
78
+
79
+ # -------- record on success
80
+ def _ok(resp):
81
+ status = getattr(resp, "code", 0)
82
+ record_network_request(
83
+ trace_id,
84
+ url,
85
+ method.upper(),
86
+ status,
87
+ status < 400,
88
+ None,
89
+ timestamp_start=t0,
90
+ timestamp_end=int(time.time() * 1_000),
91
+ )
92
+ return resp
93
+
94
+ # -------- record on failure
95
+ def _err(f):
96
+ record_network_request(
97
+ trace_id,
98
+ url,
99
+ method.upper(),
100
+ 0,
101
+ False,
102
+ str(f.value)[:255],
103
+ timestamp_start=t0,
104
+ timestamp_end=int(time.time() * 1_000),
105
+ )
106
+ return f
107
+
108
+ d.addCallbacks(_ok, _err)
109
+ return d
110
+
111
+ treq.request = patched_request # type: ignore[assignment]
112
+
113
+ # Convenience verbs → reuse patched_request
114
+ def _verb_factory(v: str):
115
+ def _verb(url, **k):
116
+ return treq.request(v.upper(), url, **k)
117
+
118
+ _verb.__name__ = v
119
+ return _verb
120
+
121
+ for verb in verbs:
122
+ setattr(treq, verb, _verb_factory(verb))
@@ -0,0 +1,129 @@
1
+ """
2
+ Instrument urllib.request so that
3
+
4
+ • Every call to urlopen() or OpenerDirector.open() propagates
5
+ SAILFISH_TRACING_HEADER (unless destination host is excluded).
6
+ • Every call triggers record_network_request(…).
7
+
8
+ The patch is safe to import multiple times.
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ import time
14
+ from typing import List, Optional
15
+
16
+ from ...constants import SAILFISH_TRACING_HEADER
17
+ from .utils import get_trace_and_should_propagate, record_network_request
18
+
19
+
20
+ def patch_urllib_request(
21
+ domains_to_not_propagate_headers_to: Optional[List[str]] = None,
22
+ ) -> None:
23
+ try:
24
+ import urllib.error
25
+ import urllib.request as _ur
26
+ from urllib.parse import urlparse
27
+ except ImportError: # extremely unlikely
28
+ return
29
+
30
+ exclude: List[str] = domains_to_not_propagate_headers_to or []
31
+ _orig_urlopen = _ur.urlopen
32
+ _orig_opener_open = _ur.OpenerDirector.open # type: ignore[attr-defined]
33
+
34
+ # ------------------------------------------------------------------ #
35
+ # Helper shared by urlopen / OpenerDirector.open
36
+ # ------------------------------------------------------------------ #
37
+ def _inject_and_record(
38
+ opener_call, # either _orig_urlopen or _orig_opener_open(self, ...)
39
+ req_or_url,
40
+ data,
41
+ timeout,
42
+ *args,
43
+ **kwargs,
44
+ ):
45
+ # 1. Build a Request object
46
+ if isinstance(req_or_url, _ur.Request):
47
+ req = req_or_url # already a Request
48
+ else:
49
+ req = _ur.Request(req_or_url, data=data)
50
+
51
+ # Method (GET/POST/…) is resolved only *after* data & method props
52
+ method = req.get_method()
53
+
54
+ # 2. Header propagation decision
55
+ trace_id, allow = get_trace_and_should_propagate(req.full_url, exclude)
56
+ if allow:
57
+ req.add_header(SAILFISH_TRACING_HEADER, trace_id)
58
+
59
+ # 3. Perform the real I/O
60
+ t0 = int(time.time() * 1_000)
61
+ try:
62
+ resp = opener_call(req, timeout=timeout, *args, **kwargs)
63
+ status = (
64
+ getattr(resp, "status", None) or getattr(resp, "getcode", lambda: 0)()
65
+ )
66
+ success = status < 400
67
+ record_network_request(
68
+ trace_id,
69
+ req.full_url,
70
+ method,
71
+ status,
72
+ success,
73
+ None,
74
+ timestamp_start=t0,
75
+ timestamp_end=int(time.time() * 1_000),
76
+ )
77
+ return resp
78
+
79
+ except urllib.error.HTTPError as e:
80
+ record_network_request(
81
+ trace_id,
82
+ req.full_url,
83
+ method,
84
+ e.code,
85
+ False,
86
+ str(e),
87
+ timestamp_start=t0,
88
+ timestamp_end=int(time.time() * 1_000),
89
+ )
90
+ raise
91
+
92
+ except Exception as e: # noqa: BLE001
93
+ record_network_request(
94
+ trace_id,
95
+ req.full_url,
96
+ method,
97
+ 0,
98
+ False,
99
+ str(e)[:255],
100
+ timestamp_start=t0,
101
+ timestamp_end=int(time.time() * 1_000),
102
+ )
103
+ raise
104
+
105
+ # ------------------------------------------------------------------ #
106
+ # Module-level urlopen patch
107
+ # ------------------------------------------------------------------ #
108
+ def patched_urlopen(url, data=None, timeout=_ur.socket._GLOBAL_DEFAULT_TIMEOUT, *a, **kw): # type: ignore
109
+ return _inject_and_record(_orig_urlopen, url, data, timeout, *a, **kw)
110
+
111
+ _ur.urlopen = patched_urlopen # type: ignore[assignment]
112
+
113
+ # ------------------------------------------------------------------ #
114
+ # OpenerDirector.open patch (covers build_opener, install_opener, etc.)
115
+ # ------------------------------------------------------------------ #
116
+ def patched_opener_open(self, fullurl, data=None, timeout=None, *a, **kw): # type: ignore[override]
117
+ # self is the OpenerDirector instance
118
+ return _inject_and_record(
119
+ lambda req, timeout=None, *aa, **kk: _orig_opener_open( # bind self
120
+ self, req, data=data, timeout=timeout, *aa, **kk
121
+ ),
122
+ fullurl,
123
+ data,
124
+ timeout,
125
+ *a,
126
+ **kw,
127
+ )
128
+
129
+ _ur.OpenerDirector.open = patched_opener_open # type: ignore[assignment]
@@ -0,0 +1,101 @@
1
+ """
2
+ Shared helpers used by all network-patch modules.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import time
8
+ from typing import List, Tuple
9
+ from urllib.parse import urlparse
10
+
11
+ from ...regular_data_transmitter import NetworkRequestTransmitter
12
+ from ...thread_local import get_or_set_sf_trace_id, is_network_recording_suppressed
13
+
14
+
15
+ ###############################################################################
16
+ # Domain-parsing utility (no external network / no tldextract needed)
17
+ ###############################################################################
18
+ def extract_domain(url: str) -> str:
19
+ """
20
+ Return a canonical host name for header-propagation checks.
21
+
22
+ • Works entirely offline (std-lib only) – no remote download or file locks.
23
+ • Keeps sub-domains intact, just strips a leading “www.” and port numbers.
24
+
25
+ Examples
26
+ --------
27
+ >>> extract_domain("https://www.example.com:443/path")
28
+ 'example.com'
29
+ >>> extract_domain("https://api.foo.bar.example.co.uk/v1")
30
+ 'api.foo.bar.example.co.uk'
31
+ """
32
+ try:
33
+ host = urlparse(url).hostname or url
34
+ except Exception:
35
+ host = url # fall back to raw string on malformed URLs
36
+ if host.startswith("www."):
37
+ host = host[4:]
38
+ return host.lower()
39
+
40
+
41
+ ###############################################################################
42
+ # Header-propagation + network-recording helpers
43
+ ###############################################################################
44
+ def get_trace_and_should_propagate(
45
+ url: str,
46
+ domains_to_not_propagate: List[str],
47
+ ) -> Tuple[str, bool]:
48
+ """
49
+ Returns (trace_id, should_propagate?) for the given destination `url`.
50
+ """
51
+ _, trace_id = get_or_set_sf_trace_id()
52
+ domain = extract_domain(url)
53
+ allow_header = domain not in domains_to_not_propagate
54
+ return trace_id, allow_header
55
+
56
+
57
+ def record_network_request(
58
+ trace_id: str,
59
+ url: str,
60
+ method: str,
61
+ status_code: int,
62
+ success: bool,
63
+ error: str | None = None,
64
+ timestamp_start: int | None = None,
65
+ timestamp_end: int | None = None,
66
+ ) -> None:
67
+ """
68
+ Fire off a GraphQL NetworkRequest mutation via NetworkRequestTransmitter.
69
+ Handles tripartite trace-ID splitting and default timestamps.
70
+ """
71
+ if is_network_recording_suppressed():
72
+ return
73
+
74
+ session_id, page_visit_id, request_id = None, None, None
75
+ parts = trace_id.split("/")
76
+ if parts:
77
+ session_id = parts[0]
78
+ if len(parts) > 1:
79
+ page_visit_id = parts[1]
80
+ if len(parts) > 2:
81
+ request_id = parts[2]
82
+
83
+ now_ms = lambda: int(time.time() * 1_000) # noqa: E731
84
+ ts0 = timestamp_start or now_ms()
85
+ ts1 = timestamp_end or now_ms()
86
+
87
+ NetworkRequestTransmitter().do_send(
88
+ (
89
+ request_id,
90
+ page_visit_id,
91
+ session_id,
92
+ None, # service_uuid (set by transmitter middleware)
93
+ ts0,
94
+ ts1,
95
+ status_code,
96
+ success,
97
+ None if success else (error or "")[:255],
98
+ url,
99
+ method.upper(),
100
+ )
101
+ )
@@ -0,0 +1,17 @@
1
+ import os
2
+
3
+ from ..thread_local import get_context, set_context
4
+
5
+ _original_fork = os.fork
6
+
7
+
8
+ def patched_fork():
9
+ current_context = get_context()
10
+ pid = _original_fork()
11
+ if pid == 0: # Child process
12
+ set_context(current_context)
13
+ return pid
14
+
15
+
16
+ def patch_os():
17
+ os.fork = patched_fork
@@ -0,0 +1,32 @@
1
+ import threading
2
+
3
+ from ..thread_local import get_context, set_context
4
+
5
+ _original_thread_init = threading.Thread.__init__
6
+
7
+
8
+ def patched_thread_init(self, *args, **kwargs):
9
+ current_context = get_context()
10
+
11
+ original_target = kwargs.get("target")
12
+ if original_target:
13
+
14
+ def wrapped_target(*targs, **tkwargs):
15
+ set_context(current_context)
16
+ original_target(*targs, **tkwargs)
17
+
18
+ kwargs["target"] = wrapped_target
19
+ elif args and callable(args[0]):
20
+ original_target = args[0]
21
+
22
+ def wrapped_target(*targs, **tkwargs):
23
+ set_context(current_context)
24
+ original_target(*targs, **tkwargs)
25
+
26
+ args = (wrapped_target,) + args[1:]
27
+
28
+ _original_thread_init(self, *args, **kwargs)
29
+
30
+
31
+ def patch_threading():
32
+ threading.Thread.__init__ = patched_thread_init
@@ -0,0 +1,45 @@
1
+ from .aiohttp import patch_aiohttp
2
+ from .async_websocket_consumer import patch_async_consumer_call
3
+ from .blacksheep import patch_blacksheep
4
+ from .bottle import patch_bottle
5
+ from .cherrypy import patch_cherrypy
6
+ from .django import find_and_modify_output_wrapper, patch_django_middleware
7
+ from .eve import patch_eve
8
+ from .falcon import patch_falcon
9
+ from .fastapi import patch_fastapi
10
+ from .flask import patch_flask
11
+ from .klein import patch_klein
12
+ from .litestar import patch_litestar
13
+ from .pyramid import patch_pyramid
14
+ from .quart import patch_quart
15
+ from .robyn import patch_robyn
16
+ from .sanic import patch_sanic
17
+ from .starlette import patch_starlette
18
+ from .strawberry import patch_strawberry_schema
19
+ from .tornado import patch_tornado
20
+
21
+
22
+ def patch_web_frameworks():
23
+ patch_strawberry_schema()
24
+ patch_async_consumer_call()
25
+ find_and_modify_output_wrapper()
26
+ patch_django_middleware()
27
+ patch_fastapi()
28
+ patch_flask()
29
+ patch_falcon()
30
+ patch_bottle()
31
+ patch_quart()
32
+ patch_tornado()
33
+ patch_aiohttp()
34
+ patch_blacksheep()
35
+ patch_cherrypy()
36
+ patch_pyramid()
37
+ patch_litestar()
38
+ patch_klein()
39
+ patch_eve()
40
+ patch_sanic()
41
+ patch_starlette()
42
+ patch_robyn()
43
+
44
+
45
+ __all__ = ["patch_web_frameworks"]
@@ -0,0 +1,133 @@
1
+ """
2
+ Context-propagation + user-code NetworkHop emission for every aiohttp
3
+ request, while skipping Strawberry GraphQL views.
4
+ """
5
+
6
+ from ...constants import SAILFISH_TRACING_HEADER
7
+ from ...custom_excepthook import custom_excepthook
8
+ from ...env_vars import SF_DEBUG
9
+ from ...regular_data_transmitter import NetworkHopsTransmitter
10
+ from ...thread_local import get_or_set_sf_trace_id
11
+
12
+ # ------------------------------------------------------------------ #
13
+ # shared helpers
14
+ # ------------------------------------------------------------------ #
15
+ from .utils import _is_user_code, _unwrap_user_func # cached
16
+
17
+
18
+ # ------------------------------------------------------------------ #
19
+ # monkey-patch
20
+ # ------------------------------------------------------------------ #
21
+ def patch_aiohttp():
22
+ """
23
+ • prepend a middleware that propagates SAILFISH_TRACING_HEADER _and_
24
+ emits a single NetworkHop for user handlers;
25
+ • patch Application.add_route(s) so every future handler
26
+ goes through the wrapper (works for RouteTableDef too).
27
+ Safe no-op if aiohttp isn't installed.
28
+ """
29
+ try:
30
+ from aiohttp import web
31
+ except ImportError: # aiohttp missing
32
+ return
33
+
34
+ # ===========================================================
35
+ # 1 | Middleware (1 ContextVar; 2 Hop emission)
36
+ # ===========================================================
37
+ @web.middleware
38
+ async def _sf_tracing_middleware(request: web.Request, handler):
39
+ """
40
+ 1 - Seed ContextVar from the inbound SAILFISH_TRACING_HEADER header.
41
+ 2 - Emit exactly one NetworkHop per user handler.
42
+ 3 - Capture *all* exceptions—including aiohttp.web.HTTPException—and
43
+ route them through `custom_excepthook` before letting aiohttp
44
+ continue its normal error handling.
45
+ """
46
+ # 1. Trace-id propagation
47
+ incoming = request.headers.get(SAILFISH_TRACING_HEADER)
48
+ if incoming:
49
+ get_or_set_sf_trace_id(incoming, is_associated_with_inbound_request=True)
50
+
51
+ # 2. Hop emission (same logic as before)
52
+ real_fn = _unwrap_user_func(handler)
53
+ if callable(real_fn) and not real_fn.__module__.startswith("strawberry"):
54
+ code = getattr(real_fn, "__code__", None)
55
+ if code and _is_user_code(code.co_filename):
56
+ key = (code.co_filename, code.co_firstlineno)
57
+ sent = request.setdefault("sf_hops_sent", set())
58
+ if key not in sent:
59
+ _, session_id = get_or_set_sf_trace_id()
60
+ if SF_DEBUG:
61
+ print(
62
+ f"[[AiohttpHop]] → {real_fn.__name__} "
63
+ f"({code.co_filename}:{code.co_firstlineno}) "
64
+ f"session={session_id}",
65
+ log=False,
66
+ )
67
+ NetworkHopsTransmitter().send(
68
+ session_id=session_id,
69
+ line=str(code.co_firstlineno),
70
+ column="0",
71
+ name=real_fn.__name__,
72
+ entrypoint=code.co_filename,
73
+ )
74
+ sent.add(key)
75
+
76
+ # 3. Exception capture
77
+ try:
78
+ return await handler(request)
79
+ except Exception as exc: # ← captures *all* errors
80
+ custom_excepthook(type(exc), exc, exc.__traceback__)
81
+ raise # re-raise for aiohttp
82
+
83
+ # ===========================================================
84
+ # 2 | Patch Application.__init__ to insert middleware
85
+ # ===========================================================
86
+ original_init = web.Application.__init__
87
+
88
+ def patched_init(self, *args, middlewares=None, **kwargs):
89
+ mlist = list(middlewares or [])
90
+ mlist.insert(0, _sf_tracing_middleware) # prepend → runs first
91
+ original_init(self, *args, middlewares=mlist, **kwargs)
92
+ _patch_router(self.router) # apply once per app
93
+
94
+ web.Application.__init__ = patched_init
95
+
96
+ # ===========================================================
97
+ # 3 | Patch router.add_route / add_routes for future calls
98
+ # ===========================================================
99
+ def _patch_router(router):
100
+ if getattr(router, "_sf_tracing_patched", False):
101
+ return # already done
102
+
103
+ orig_add_route = router.add_route
104
+ orig_add_routes = router.add_routes
105
+
106
+ def _wrap_and_add(method, path, handler, *a, **kw): # noqa: ANN001
107
+ return orig_add_route(method, path, _wrap_handler(handler), *a, **kw)
108
+
109
+ def _wrap_handler(h):
110
+ # strawberry skip & user-code check happen in middleware,
111
+ # but wrapping here avoids duplicate stack frames
112
+ return _unwrap_user_func(h) or h
113
+
114
+ def _new_add_routes(routes):
115
+ wrapped = [
116
+ (
117
+ (m, p, _wrap_handler(h), *rest) # route is (method,path,handler,…)
118
+ if len(r) >= 3
119
+ else r
120
+ )
121
+ for r in routes
122
+ for (m, p, h, *rest) in (r,) # unpack safely
123
+ ]
124
+ return orig_add_routes(wrapped)
125
+
126
+ router.add_route = _wrap_and_add
127
+ router.add_routes = _new_add_routes
128
+ router._sf_tracing_patched = True
129
+ if SF_DEBUG:
130
+ print("[[patch_aiohttp]] router hooks installed", log=False)
131
+
132
+ if SF_DEBUG:
133
+ print("[[patch_aiohttp]] middleware + init patch applied", log=False)