sf-veritas 0.10.3__cp311-cp311-manylinux_2_28_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sf-veritas might be problematic. Click here for more details.

Files changed (132) hide show
  1. sf_veritas/__init__.py +20 -0
  2. sf_veritas/_sffastlog.c +889 -0
  3. sf_veritas/_sffastlog.cpython-311-x86_64-linux-gnu.so +0 -0
  4. sf_veritas/_sffastnet.c +924 -0
  5. sf_veritas/_sffastnet.cpython-311-x86_64-linux-gnu.so +0 -0
  6. sf_veritas/_sffastnetworkrequest.c +730 -0
  7. sf_veritas/_sffastnetworkrequest.cpython-311-x86_64-linux-gnu.so +0 -0
  8. sf_veritas/_sffuncspan.c +2155 -0
  9. sf_veritas/_sffuncspan.cpython-311-x86_64-linux-gnu.so +0 -0
  10. sf_veritas/_sffuncspan_config.c +617 -0
  11. sf_veritas/_sffuncspan_config.cpython-311-x86_64-linux-gnu.so +0 -0
  12. sf_veritas/_sfheadercheck.c +341 -0
  13. sf_veritas/_sfheadercheck.cpython-311-x86_64-linux-gnu.so +0 -0
  14. sf_veritas/_sfnetworkhop.c +1451 -0
  15. sf_veritas/_sfnetworkhop.cpython-311-x86_64-linux-gnu.so +0 -0
  16. sf_veritas/_sfservice.c +1175 -0
  17. sf_veritas/_sfservice.cpython-311-x86_64-linux-gnu.so +0 -0
  18. sf_veritas/_sfteepreload.c +5167 -0
  19. sf_veritas/app_config.py +49 -0
  20. sf_veritas/cli.py +336 -0
  21. sf_veritas/constants.py +10 -0
  22. sf_veritas/custom_excepthook.py +304 -0
  23. sf_veritas/custom_log_handler.py +129 -0
  24. sf_veritas/custom_output_wrapper.py +144 -0
  25. sf_veritas/custom_print.py +146 -0
  26. sf_veritas/django_app.py +5 -0
  27. sf_veritas/env_vars.py +186 -0
  28. sf_veritas/exception_handling_middleware.py +18 -0
  29. sf_veritas/exception_metaclass.py +69 -0
  30. sf_veritas/fast_frame_info.py +116 -0
  31. sf_veritas/fast_network_hop.py +293 -0
  32. sf_veritas/frame_tools.py +112 -0
  33. sf_veritas/funcspan_config_loader.py +556 -0
  34. sf_veritas/function_span_profiler.py +1174 -0
  35. sf_veritas/import_hook.py +62 -0
  36. sf_veritas/infra_details/__init__.py +3 -0
  37. sf_veritas/infra_details/get_infra_details.py +24 -0
  38. sf_veritas/infra_details/kubernetes/__init__.py +3 -0
  39. sf_veritas/infra_details/kubernetes/get_cluster_name.py +147 -0
  40. sf_veritas/infra_details/kubernetes/get_details.py +7 -0
  41. sf_veritas/infra_details/running_on/__init__.py +17 -0
  42. sf_veritas/infra_details/running_on/kubernetes.py +11 -0
  43. sf_veritas/interceptors.py +497 -0
  44. sf_veritas/libsfnettee.so +0 -0
  45. sf_veritas/local_env_detect.py +118 -0
  46. sf_veritas/package_metadata.py +6 -0
  47. sf_veritas/patches/__init__.py +0 -0
  48. sf_veritas/patches/concurrent_futures.py +19 -0
  49. sf_veritas/patches/constants.py +1 -0
  50. sf_veritas/patches/exceptions.py +82 -0
  51. sf_veritas/patches/multiprocessing.py +32 -0
  52. sf_veritas/patches/network_libraries/__init__.py +76 -0
  53. sf_veritas/patches/network_libraries/aiohttp.py +281 -0
  54. sf_veritas/patches/network_libraries/curl_cffi.py +363 -0
  55. sf_veritas/patches/network_libraries/http_client.py +419 -0
  56. sf_veritas/patches/network_libraries/httpcore.py +515 -0
  57. sf_veritas/patches/network_libraries/httplib2.py +204 -0
  58. sf_veritas/patches/network_libraries/httpx.py +515 -0
  59. sf_veritas/patches/network_libraries/niquests.py +211 -0
  60. sf_veritas/patches/network_libraries/pycurl.py +385 -0
  61. sf_veritas/patches/network_libraries/requests.py +633 -0
  62. sf_veritas/patches/network_libraries/tornado.py +341 -0
  63. sf_veritas/patches/network_libraries/treq.py +270 -0
  64. sf_veritas/patches/network_libraries/urllib_request.py +468 -0
  65. sf_veritas/patches/network_libraries/utils.py +398 -0
  66. sf_veritas/patches/os.py +17 -0
  67. sf_veritas/patches/threading.py +218 -0
  68. sf_veritas/patches/web_frameworks/__init__.py +54 -0
  69. sf_veritas/patches/web_frameworks/aiohttp.py +793 -0
  70. sf_veritas/patches/web_frameworks/async_websocket_consumer.py +317 -0
  71. sf_veritas/patches/web_frameworks/blacksheep.py +527 -0
  72. sf_veritas/patches/web_frameworks/bottle.py +502 -0
  73. sf_veritas/patches/web_frameworks/cherrypy.py +678 -0
  74. sf_veritas/patches/web_frameworks/cors_utils.py +122 -0
  75. sf_veritas/patches/web_frameworks/django.py +944 -0
  76. sf_veritas/patches/web_frameworks/eve.py +395 -0
  77. sf_veritas/patches/web_frameworks/falcon.py +926 -0
  78. sf_veritas/patches/web_frameworks/fastapi.py +724 -0
  79. sf_veritas/patches/web_frameworks/flask.py +520 -0
  80. sf_veritas/patches/web_frameworks/klein.py +501 -0
  81. sf_veritas/patches/web_frameworks/litestar.py +551 -0
  82. sf_veritas/patches/web_frameworks/pyramid.py +428 -0
  83. sf_veritas/patches/web_frameworks/quart.py +824 -0
  84. sf_veritas/patches/web_frameworks/robyn.py +697 -0
  85. sf_veritas/patches/web_frameworks/sanic.py +857 -0
  86. sf_veritas/patches/web_frameworks/starlette.py +723 -0
  87. sf_veritas/patches/web_frameworks/strawberry.py +813 -0
  88. sf_veritas/patches/web_frameworks/tornado.py +481 -0
  89. sf_veritas/patches/web_frameworks/utils.py +91 -0
  90. sf_veritas/print_override.py +13 -0
  91. sf_veritas/regular_data_transmitter.py +409 -0
  92. sf_veritas/request_interceptor.py +401 -0
  93. sf_veritas/request_utils.py +550 -0
  94. sf_veritas/server_status.py +1 -0
  95. sf_veritas/shutdown_flag.py +11 -0
  96. sf_veritas/subprocess_startup.py +3 -0
  97. sf_veritas/test_cli.py +145 -0
  98. sf_veritas/thread_local.py +970 -0
  99. sf_veritas/timeutil.py +114 -0
  100. sf_veritas/transmit_exception_to_sailfish.py +28 -0
  101. sf_veritas/transmitter.py +132 -0
  102. sf_veritas/types.py +47 -0
  103. sf_veritas/unified_interceptor.py +1580 -0
  104. sf_veritas/utils.py +39 -0
  105. sf_veritas-0.10.3.dist-info/METADATA +97 -0
  106. sf_veritas-0.10.3.dist-info/RECORD +132 -0
  107. sf_veritas-0.10.3.dist-info/WHEEL +5 -0
  108. sf_veritas-0.10.3.dist-info/entry_points.txt +2 -0
  109. sf_veritas-0.10.3.dist-info/top_level.txt +1 -0
  110. sf_veritas.libs/libbrotlicommon-6ce2a53c.so.1.0.6 +0 -0
  111. sf_veritas.libs/libbrotlidec-811d1be3.so.1.0.6 +0 -0
  112. sf_veritas.libs/libcom_err-730ca923.so.2.1 +0 -0
  113. sf_veritas.libs/libcrypt-52aca757.so.1.1.0 +0 -0
  114. sf_veritas.libs/libcrypto-bdaed0ea.so.1.1.1k +0 -0
  115. sf_veritas.libs/libcurl-eaa3cf66.so.4.5.0 +0 -0
  116. sf_veritas.libs/libgssapi_krb5-323bbd21.so.2.2 +0 -0
  117. sf_veritas.libs/libidn2-2f4a5893.so.0.3.6 +0 -0
  118. sf_veritas.libs/libk5crypto-9a74ff38.so.3.1 +0 -0
  119. sf_veritas.libs/libkeyutils-2777d33d.so.1.6 +0 -0
  120. sf_veritas.libs/libkrb5-a55300e8.so.3.3 +0 -0
  121. sf_veritas.libs/libkrb5support-e6594cfc.so.0.1 +0 -0
  122. sf_veritas.libs/liblber-2-d20824ef.4.so.2.10.9 +0 -0
  123. sf_veritas.libs/libldap-2-cea2a960.4.so.2.10.9 +0 -0
  124. sf_veritas.libs/libnghttp2-39367a22.so.14.17.0 +0 -0
  125. sf_veritas.libs/libpcre2-8-516f4c9d.so.0.7.1 +0 -0
  126. sf_veritas.libs/libpsl-99becdd3.so.5.3.1 +0 -0
  127. sf_veritas.libs/libsasl2-7de4d792.so.3.0.0 +0 -0
  128. sf_veritas.libs/libselinux-d0805dcb.so.1 +0 -0
  129. sf_veritas.libs/libssh-c11d285b.so.4.8.7 +0 -0
  130. sf_veritas.libs/libssl-60250281.so.1.1.1k +0 -0
  131. sf_veritas.libs/libunistring-05abdd40.so.2.1.0 +0 -0
  132. sf_veritas.libs/libuuid-95b83d40.so.1.3.0 +0 -0
@@ -0,0 +1,550 @@
1
+ # request_utils.py
2
+ import logging
3
+ import os
4
+ import threading
5
+ import time
6
+ from typing import Any, Callable, List, Optional, Tuple
7
+
8
+ import orjson
9
+
10
+ from .env_vars import SF_DEBUG
11
+ from .server_status import server_running
12
+ from .shutdown_flag import is_shutting_down
13
+ from .thread_local import _thread_locals, suppress_logs, suppress_network_recording
14
+
15
+ # ==========================================================
16
+ # Tunables
17
+ # ==========================================================
18
+ BATCH_MAX = int(os.getenv("SF_NBPOST_BATCH_MAX", "512"))
19
+ BATCH_FLUSH_MS = float(os.getenv("SF_NBPOST_FLUSH_MS", "2"))
20
+ CONNECT_TIMEOUT_S = float(os.getenv("SF_NBPOST_CONNECT_TIMEOUT", "0.1"))
21
+ READ_TIMEOUT_S = float(os.getenv("SF_NBPOST_READ_TIMEOUT", "0.3"))
22
+ TOTAL_TIMEOUT_S = float(os.getenv("SF_NBPOST_TOTAL_TIMEOUT", "0.7"))
23
+
24
+ # Check if h2 is available for HTTP/2 support
25
+ _HAS_H2 = False
26
+ try:
27
+ import h2 # noqa: F401
28
+ _HAS_H2 = True
29
+ except ImportError:
30
+ pass
31
+
32
+ # Only enable HTTP/2 if h2 is installed AND env var is set
33
+ HTTP2_ENABLED = os.getenv("SF_NBPOST_HTTP2", "0") == "1" and _HAS_H2
34
+ DISABLE_BATCHING = os.getenv("SF_NBPOST_DISABLE_BATCHING", "0") == "1"
35
+ # Keep gzip OFF by default per perf tests
36
+ GZIP_ENABLED = os.getenv("SF_NBPOST_GZIP", "0") == "1"
37
+
38
+
39
+ # ==========================================================
40
+ # Minimal public helpers
41
+ # ==========================================================
42
+ def get_header(request, header_name):
43
+ return request.headers.get(header_name)
44
+
45
+
46
+ def set_header(request, header_name, header_value):
47
+ request.headers[header_name] = header_value
48
+
49
+
50
+ def is_server_running(url="http://localhost:8000/healthz"):
51
+ """
52
+ Lightweight liveness probe using stdlib to avoid async spin in caller thread.
53
+ """
54
+ global server_running
55
+ if server_running:
56
+ return True
57
+ try:
58
+ import urllib.request
59
+
60
+ with suppress_network_recording(), suppress_logs():
61
+ with urllib.request.urlopen(url, timeout=0.5) as r: # nosec
62
+ if getattr(r, "status", 0) == 200:
63
+ server_running = True
64
+ return True
65
+ except Exception:
66
+ pass
67
+ return False
68
+
69
+
70
+ # ==========================================================
71
+ # Queue & worker
72
+ # ==========================================================
73
+ try:
74
+ from queue import SimpleQueue # C fast path
75
+ except Exception: # pragma: no cover
76
+ from queue import Queue as SimpleQueue # fallback
77
+
78
+ # item variants:
79
+ # ("POST", url, op, query, variables, wants_response, future)
80
+ # ("DEFER", builder_callable) # builder returns (url, op, query, variables)
81
+ _Item = Tuple[Any, ...]
82
+
83
+ _q: "SimpleQueue[_Item]" = SimpleQueue()
84
+ _started = False
85
+ _start_lock = threading.Lock()
86
+
87
+ # Worker backends
88
+ _HAS_PYCURL = False
89
+ try:
90
+ import pycurl # type: ignore
91
+
92
+ _HAS_PYCURL = True
93
+ except Exception:
94
+ _HAS_PYCURL = False
95
+
96
+ # httpx client (fallback path)
97
+ _client = None # type: ignore[assignment]
98
+
99
+
100
+ def _ensure_started():
101
+ global _started
102
+ if _started:
103
+ return
104
+ with _start_lock:
105
+ if _started:
106
+ return
107
+ t = threading.Thread(target=_bg_thread, name="nbpost-batcher", daemon=True)
108
+ t.start()
109
+ _started = True
110
+ if SF_DEBUG:
111
+ http2_status = HTTP2_ENABLED
112
+ if not _HAS_H2 and os.getenv("SF_NBPOST_HTTP2", "0") == "1":
113
+ http2_status = "disabled (h2 not installed)"
114
+ print(
115
+ f"[nbpost] started batcher (HTTP/2={http2_status}, batching={'off' if DISABLE_BATCHING else 'on'}, backend={'pycurl' if _HAS_PYCURL else 'httpx'})",
116
+ log=False,
117
+ )
118
+
119
+
120
+ def _bg_thread():
121
+ """
122
+ Background thread: pycurl multi (preferred, C fast path). Falls back to httpx.
123
+ """
124
+ if _HAS_PYCURL:
125
+ _run_worker_pycurl()
126
+ else:
127
+ # optional uvloop for httpx path
128
+ try:
129
+ import uvloop # type: ignore
130
+
131
+ uvloop.install()
132
+ except Exception:
133
+ pass
134
+ import asyncio
135
+
136
+ asyncio.run(_run_worker_httpx())
137
+
138
+
139
+ # ==========================================================
140
+ # pycurl backend (C, HTTP/2 via libcurl/nghttp2)
141
+ # ==========================================================
142
+ def _run_worker_pycurl():
143
+ import pycurl # type: ignore
144
+
145
+ # CRITICAL: Set suppress flag for the ENTIRE thread since this thread is dedicated to sending telemetry
146
+ # This prevents the C preload library from capturing our telemetry requests
147
+ from .thread_local import suppress_network_recording_ctx
148
+ suppress_network_recording_ctx.set(True)
149
+ if SF_DEBUG:
150
+ print(f"[pycurl worker] Set suppress_network_recording_ctx to True, current value: {suppress_network_recording_ctx.get()}", log=False)
151
+
152
+ m = pycurl.CurlMulti()
153
+ m.setopt(pycurl.M_MAX_HOST_CONNECTIONS, 1024)
154
+ m.setopt(pycurl.M_MAXCONNECTS, 1024)
155
+
156
+ # CRITICAL: Add marker header so C preload library can identify and skip telemetry requests
157
+ base_headers = [
158
+ "Content-Type: application/json",
159
+ "X-Sf3-TelemetryOutbound: True"
160
+ ]
161
+ in_flight = {}
162
+
163
+ last_flush = time.monotonic()
164
+ batch: List[dict] = []
165
+ url_for_batch: Optional[str] = None
166
+
167
+ def _add_easy(url: str, body: bytes, future):
168
+ c = pycurl.Curl()
169
+ c.setopt(pycurl.CONNECTTIMEOUT_MS, int(CONNECT_TIMEOUT_S * 1000))
170
+ c.setopt(pycurl.TIMEOUT_MS, int(TOTAL_TIMEOUT_S * 1000))
171
+ if HTTP2_ENABLED:
172
+ c.setopt(pycurl.HTTP_VERSION, pycurl.CURL_HTTP_VERSION_2TLS)
173
+ c.setopt(pycurl.URL, url.encode("utf-8"))
174
+ c.setopt(pycurl.NOPROGRESS, True)
175
+ c.setopt(pycurl.NOSIGNAL, 1)
176
+ c.setopt(pycurl.POST, 1)
177
+ c.setopt(pycurl.POSTFIELDS, body)
178
+ c.setopt(pycurl.POSTFIELDSIZE, len(body))
179
+ c.setopt(pycurl.WRITEFUNCTION, lambda _b: len(_b))
180
+ c.setopt(pycurl.HEADERFUNCTION, lambda _b: len(_b))
181
+
182
+ headers = list(base_headers)
183
+ if GZIP_ENABLED:
184
+ headers.append("Content-Encoding: gzip")
185
+ c.setopt(pycurl.HTTPHEADER, headers)
186
+
187
+ m.add_handle(c)
188
+ if future is not None:
189
+ in_flight[c] = future
190
+
191
+ def _flush_batch(url: Optional[str], batch_payload: List[dict]):
192
+ if not batch_payload or not url:
193
+ return
194
+ body = orjson.dumps(batch_payload)
195
+ if GZIP_ENABLED:
196
+ import gzip
197
+
198
+ body = gzip.compress(body)
199
+ # Note: suppress_network_recording_ctx is already set for the entire thread
200
+ with suppress_logs():
201
+ _add_easy(url, body, future=None)
202
+
203
+ def _send_one(url: str, op: str, query: str, variables: dict, future):
204
+ payload = {"query": query, "variables": variables, "operationName": op}
205
+ body = orjson.dumps(payload)
206
+ if GZIP_ENABLED:
207
+ import gzip
208
+
209
+ body = gzip.compress(body)
210
+ # Note: suppress_network_recording_ctx is already set for the entire thread
211
+ with suppress_logs():
212
+ _add_easy(url, body, future=future)
213
+
214
+ while True:
215
+ now = time.monotonic()
216
+ elapsed_ms = (now - last_flush) * 1000.0
217
+ should_flush = len(batch) >= BATCH_MAX or (
218
+ batch and elapsed_ms >= BATCH_FLUSH_MS
219
+ )
220
+
221
+ drained = 0
222
+ try:
223
+ item = _q.get(timeout=max(0.0005, BATCH_FLUSH_MS / 1000.0))
224
+ drained += 1
225
+
226
+ tag = item[0]
227
+ if tag == "DEFER":
228
+ # Build tuple on the worker; keeps request thread extremely light
229
+ builder: Callable[[], Tuple[str, str, str, dict]] = item[1]
230
+ url, op, query, variables = builder()
231
+ if DISABLE_BATCHING:
232
+ _send_one(url, op, query, variables, None)
233
+ else:
234
+ if url_for_batch is None:
235
+ url_for_batch = url
236
+ elif url != url_for_batch:
237
+ _flush_batch(url_for_batch, batch)
238
+ last_flush = time.monotonic()
239
+ batch.clear()
240
+ url_for_batch = url
241
+ batch.append(
242
+ {"query": query, "variables": variables, "operationName": op}
243
+ )
244
+ else:
245
+ # ("POST", url, op, query, variables, wants_response, future)
246
+ _, url, op, query, variables, wants_response, fut = item
247
+ if getattr(_thread_locals, "reentrancy_guard_logging_preactive", False):
248
+ variables["reentrancyGuardPreactive"] = True
249
+ if wants_response and fut is not None:
250
+ _send_one(url, op, query, variables, fut)
251
+ else:
252
+ if DISABLE_BATCHING:
253
+ _send_one(url, op, query, variables, None)
254
+ else:
255
+ if url_for_batch is None:
256
+ url_for_batch = url
257
+ elif url != url_for_batch:
258
+ _flush_batch(url_for_batch, batch)
259
+ last_flush = time.monotonic()
260
+ batch.clear()
261
+ url_for_batch = url
262
+ batch.append(
263
+ {
264
+ "query": query,
265
+ "variables": variables,
266
+ "operationName": op,
267
+ }
268
+ )
269
+ except Exception:
270
+ pass
271
+
272
+ if should_flush and not DISABLE_BATCHING:
273
+ _flush_batch(url_for_batch, batch)
274
+ last_flush = time.monotonic()
275
+ batch.clear()
276
+ url_for_batch = None
277
+
278
+ # Pump the multi interface
279
+ import pycurl as _pc # local alias for speed
280
+
281
+ while True:
282
+ stat, _ = m.perform()
283
+ if stat != _pc.E_CALL_MULTI_PERFORM:
284
+ break
285
+
286
+ # Completed transfers
287
+ while True:
288
+ num_q, ok_list, err_list = m.info_read()
289
+ for c in ok_list:
290
+ fut = in_flight.pop(c, None)
291
+ if fut is not None:
292
+ try:
293
+ code = c.getinfo(_pc.RESPONSE_CODE)
294
+ fut.set_result(code == 200)
295
+ except Exception as e:
296
+ fut.set_exception(e)
297
+ m.remove_handle(c)
298
+ c.close()
299
+
300
+ for c, errno, errmsg in err_list:
301
+ fut = in_flight.pop(c, None)
302
+ if fut is not None:
303
+ try:
304
+ fut.set_result(False)
305
+ except Exception:
306
+ pass
307
+ if SF_DEBUG:
308
+ print(f"[nbpost] pycurl error {errno}: {errmsg}", log=False)
309
+ m.remove_handle(c)
310
+ c.close()
311
+
312
+ if num_q == 0:
313
+ break
314
+
315
+ if drained == 0 and not batch:
316
+ try:
317
+ m.select(0.01) # wait for activity (max 10ms)
318
+ except Exception:
319
+ pass
320
+
321
+
322
+ # ==========================================================
323
+ # httpx backend (fallback)
324
+ # ==========================================================
325
+ async def _run_worker_httpx():
326
+ global _client
327
+ import asyncio
328
+
329
+ import httpx
330
+
331
+ # CRITICAL: Set suppress flag for the ENTIRE thread since this thread is dedicated to sending telemetry
332
+ # This prevents the C preload library from capturing our telemetry requests
333
+ from .thread_local import suppress_network_recording_ctx
334
+ suppress_network_recording_ctx.set(True)
335
+ if SF_DEBUG:
336
+ print(f"[httpx worker] Set suppress_network_recording_ctx to True, current value: {suppress_network_recording_ctx.get()}", log=False)
337
+
338
+ limits = httpx.Limits(
339
+ max_connections=1024,
340
+ max_keepalive_connections=1024,
341
+ keepalive_expiry=30.0,
342
+ )
343
+ timeout = httpx.Timeout(
344
+ connect=CONNECT_TIMEOUT_S,
345
+ read=READ_TIMEOUT_S,
346
+ write=READ_TIMEOUT_S,
347
+ pool=TOTAL_TIMEOUT_S,
348
+ )
349
+ # CRITICAL: Add marker header so C preload library can identify and skip telemetry requests
350
+ _client = httpx.AsyncClient(
351
+ http2=HTTP2_ENABLED,
352
+ limits=limits,
353
+ timeout=timeout,
354
+ headers={
355
+ "Content-Type": "application/json",
356
+ "X-Sf3-TelemetryOutbound": "True"
357
+ },
358
+ )
359
+
360
+ try:
361
+ last_flush = time.monotonic()
362
+ batch: List[dict] = []
363
+ url_for_batch: Optional[str] = None
364
+
365
+ while True:
366
+ now = time.monotonic()
367
+ elapsed_ms = (now - last_flush) * 1000.0
368
+ should_flush = len(batch) >= BATCH_MAX or (
369
+ batch and elapsed_ms >= BATCH_FLUSH_MS
370
+ )
371
+
372
+ drained = 0
373
+ try:
374
+ item = _q.get(timeout=max(0.0005, BATCH_FLUSH_MS / 1000.0))
375
+ drained += 1
376
+
377
+ tag = item[0]
378
+ if tag == "DEFER":
379
+ builder: Callable[[], Tuple[str, str, str, dict]] = item[1]
380
+ url, op, query, variables = builder()
381
+ if DISABLE_BATCHING:
382
+ await _send_one_httpx(_client, url, op, query, variables, None)
383
+ else:
384
+ if url_for_batch is None:
385
+ url_for_batch = url
386
+ elif url != url_for_batch:
387
+ await _flush_batch_httpx(_client, url_for_batch, batch)
388
+ last_flush = time.monotonic()
389
+ batch.clear()
390
+ url_for_batch = url
391
+ batch.append(
392
+ {
393
+ "query": query,
394
+ "variables": variables,
395
+ "operationName": op,
396
+ }
397
+ )
398
+ else:
399
+ _, url, op, query, variables, wants_response, fut = item
400
+ if getattr(
401
+ _thread_locals, "reentrancy_guard_logging_preactive", False
402
+ ):
403
+ variables["reentrancyGuardPreactive"] = True
404
+ if wants_response and fut is not None:
405
+ await _send_one_httpx(_client, url, op, query, variables, fut)
406
+ else:
407
+ if DISABLE_BATCHING:
408
+ await _send_one_httpx(
409
+ _client, url, op, query, variables, None
410
+ )
411
+ else:
412
+ if url_for_batch is None:
413
+ url_for_batch = url
414
+ elif url != url_for_batch:
415
+ await _flush_batch_httpx(_client, url_for_batch, batch)
416
+ last_flush = time.monotonic()
417
+ batch.clear()
418
+ url_for_batch = url
419
+ batch.append(
420
+ {
421
+ "query": query,
422
+ "variables": variables,
423
+ "operationName": op,
424
+ }
425
+ )
426
+ except Exception:
427
+ pass
428
+
429
+ if should_flush and not DISABLE_BATCHING:
430
+ if batch:
431
+ await _flush_batch_httpx(_client, url_for_batch, batch)
432
+ last_flush = time.monotonic()
433
+ batch.clear()
434
+ url_for_batch = None
435
+
436
+ if drained == 0 and not batch:
437
+ await asyncio.sleep(0)
438
+ finally:
439
+ try:
440
+ await _client.aclose()
441
+ except Exception:
442
+ pass
443
+ _client = None
444
+
445
+
446
+ async def _flush_batch_httpx(client, url: Optional[str], batch: List[dict]):
447
+ if not batch or not url:
448
+ return
449
+ import httpx
450
+
451
+ body = orjson.dumps(batch)
452
+ headers = {}
453
+ if GZIP_ENABLED:
454
+ import gzip
455
+
456
+ body = gzip.compress(body)
457
+ headers["Content-Encoding"] = "gzip"
458
+ try:
459
+ # Note: suppress_network_recording_ctx is already set for the entire thread
460
+ with suppress_logs():
461
+ r: httpx.Response = await client.post(url, content=body, headers=headers)
462
+ try:
463
+ await r.aclose()
464
+ except Exception:
465
+ pass
466
+ if SF_DEBUG:
467
+ print(f"[nbpost] batch -> {r.status_code}, items={len(batch)}", log=False)
468
+ except Exception as e:
469
+ if SF_DEBUG:
470
+ print(f"[nbpost] batch POST failed: {e}", log=False)
471
+
472
+
473
+ async def _send_one_httpx(
474
+ client, url: str, op: str, query: str, variables: dict, future=None
475
+ ):
476
+ import httpx
477
+
478
+ payload = {"query": query, "variables": variables, "operationName": op}
479
+ body = orjson.dumps(payload)
480
+ headers = {}
481
+ if GZIP_ENABLED:
482
+ import gzip
483
+
484
+ body = gzip.compress(body)
485
+ headers["Content-Encoding"] = "gzip"
486
+ try:
487
+ # Note: suppress_network_recording_ctx is already set for the entire thread
488
+ with suppress_logs():
489
+ r: httpx.Response = await client.post(url, content=body, headers=headers)
490
+ status = r.status_code
491
+ try:
492
+ await r.aclose()
493
+ except Exception:
494
+ pass
495
+ if future is not None:
496
+ future.set_result(status == 200)
497
+ except Exception as e:
498
+ if future is not None:
499
+ future.set_exception(e)
500
+ if SF_DEBUG:
501
+ print(f"[nbpost] POST failed: {op} {e}", log=False)
502
+
503
+
504
+ # ==========================================================
505
+ # Public API (hot path)
506
+ # ==========================================================
507
+ def non_blocking_post(url, operation_name, query, variables):
508
+ """Enqueue and return immediately."""
509
+ if is_shutting_down:
510
+ return None
511
+ _ensure_started()
512
+ try:
513
+ _q.put_nowait(("POST", url, operation_name, query, variables, False, None))
514
+ except Exception:
515
+ if SF_DEBUG:
516
+ print("[nbpost] queue put failed", log=False)
517
+ return None
518
+
519
+
520
+ def non_blocking_post_with_response(url, operation_name, query, variables):
521
+ """Enqueue and return a Future-like that resolves to bool (success)."""
522
+ if is_shutting_down:
523
+ return None
524
+ _ensure_started()
525
+ from concurrent.futures import Future
526
+
527
+ fut = Future()
528
+ try:
529
+ _q.put_nowait(("POST", url, operation_name, query, variables, True, fut))
530
+ except Exception as e:
531
+ fut.set_exception(e)
532
+ return fut
533
+
534
+
535
+ def non_blocking_post_deferred(builder: Callable[[], Tuple[str, str, str, dict]]):
536
+ """
537
+ Enqueue a zero-alloc builder closure; the worker will call it to build
538
+ (url, operation_name, query, variables) just-in-time before sending.
539
+
540
+ This moves dict construction off the request thread entirely.
541
+ """
542
+ if is_shutting_down:
543
+ return None
544
+ _ensure_started()
545
+ try:
546
+ _q.put_nowait(("DEFER", builder))
547
+ except Exception:
548
+ if SF_DEBUG:
549
+ print("[nbpost] queue put failed (deferred)", log=False)
550
+ return None
@@ -0,0 +1 @@
1
+ server_running = False
@@ -0,0 +1,11 @@
1
+ import atexit
2
+
3
+ is_shutting_down = False
4
+
5
+
6
+ def set_shutdown_flag():
7
+ global is_shutting_down
8
+ is_shutting_down = True
9
+
10
+
11
+ atexit.register(set_shutdown_flag)
@@ -0,0 +1,3 @@
1
+ from sf_veritas import setup_interceptors
2
+
3
+ setup_interceptors() # Set up the interceptors immediately