pulse-framework 0.1.71__py3-none-any.whl → 0.1.72__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pulse/proxy.py CHANGED
@@ -1,47 +1,125 @@
1
1
  """
2
- Proxy handler for forwarding requests to React Router server in single-server mode.
2
+ ASGI proxy for forwarding requests to the React Router server in single-server mode.
3
+
4
+ Design goals:
5
+ - ASGI-only surface area.
6
+ - Avoid upstream cookie persistence and connection/stream leaks.
7
+ - Shut down cleanly even with open dev connections.
3
8
  """
4
9
 
5
10
  import asyncio
6
11
  import logging
7
- from typing import cast
12
+ from collections.abc import AsyncGenerator, Iterable
13
+ from contextlib import suppress
14
+ from dataclasses import dataclass
15
+ from typing import Any, cast
8
16
 
9
- import httpx
10
- import websockets
11
- from fastapi.responses import StreamingResponse
12
- from starlette.requests import Request
13
- from starlette.responses import PlainTextResponse, Response
17
+ import aiohttp
18
+ from starlette.datastructures import URL
19
+ from starlette.types import Receive, Scope, Send
14
20
  from starlette.websockets import WebSocket, WebSocketDisconnect
15
- from websockets.typing import Subprotocol
16
21
 
17
22
  from pulse.context import PulseContext
18
23
  from pulse.cookies import parse_cookie_header
19
24
 
20
25
  logger = logging.getLogger(__name__)
21
26
 
27
+ # Streaming/concurrency defaults, informed by asgiproxy.
28
+ _INCOMING_STREAMING_THRESHOLD = 512 * 1024
29
+ _OUTGOING_STREAMING_THRESHOLD = 5 * 1024 * 1024
30
+ _STREAM_CHUNK_SIZE = 512 * 1024
31
+ _MAX_CONCURRENCY = 100
32
+
33
+
34
+ @dataclass
35
+ class ProxyConfig:
36
+ """Configuration for the React proxy in single-server mode."""
37
+
38
+ max_concurrency: int = _MAX_CONCURRENCY
39
+ incoming_streaming_threshold: int = _INCOMING_STREAMING_THRESHOLD
40
+ outgoing_streaming_threshold: int = _OUTGOING_STREAMING_THRESHOLD
41
+ stream_chunk_size: int = _STREAM_CHUNK_SIZE
42
+
43
+
44
+ # Hop-by-hop headers should not be proxied per RFC 7230.
45
+ _HOP_BY_HOP_HEADERS = {
46
+ "connection",
47
+ "keep-alive",
48
+ "proxy-authenticate",
49
+ "proxy-authorization",
50
+ "te",
51
+ "trailers",
52
+ "transfer-encoding",
53
+ "upgrade",
54
+ }
55
+
56
+ # WebSocket-specific headers to drop when dialing upstream.
57
+ _WEBSOCKET_EXCLUDED_HEADERS = {
58
+ "host",
59
+ "upgrade",
60
+ "connection",
61
+ "sec-websocket-key",
62
+ "sec-websocket-version",
63
+ "sec-websocket-protocol",
64
+ "sec-websocket-extensions",
65
+ }
66
+
67
+ _URL_REWRITE_HEADERS = {"location", "content-location"}
68
+
69
+
70
+ def _http_to_ws_url(http_url: str) -> str:
71
+ if http_url.startswith("https://"):
72
+ return http_url.replace("https://", "wss://", 1)
73
+ if http_url.startswith("http://"):
74
+ return http_url.replace("http://", "ws://", 1)
75
+ return http_url
76
+
77
+
78
+ def _decode_header(value: bytes) -> str:
79
+ return value.decode("latin-1")
22
80
 
23
- class ReactProxy:
24
- """
25
- Handles proxying HTTP requests and WebSocket connections to React Router server.
26
81
 
27
- In single-server mode, the Python server proxies unmatched routes to the React
28
- dev server. This proxy rewrites URLs in responses to use the external server
29
- address instead of the internal React server address.
30
- """
82
+ def _encode_header(value: str) -> bytes:
83
+ return value.encode("latin-1")
84
+
85
+
86
+ class ReactProxy:
87
+ """ASGI-level proxy for React Router HTTP/WebSocket requests."""
31
88
 
32
89
  react_server_address: str
33
90
  server_address: str
34
- _client: httpx.AsyncClient | None
91
+ config: ProxyConfig
92
+ _session: aiohttp.ClientSession | None
93
+ _active_responses: set[aiohttp.ClientResponse]
94
+ _active_websockets: set[aiohttp.ClientWebSocketResponse]
95
+ _tasks: set[asyncio.Task[Any]]
96
+ _closing: asyncio.Event
35
97
 
36
- def __init__(self, react_server_address: str, server_address: str):
98
+ def __init__(
99
+ self,
100
+ react_server_address: str,
101
+ server_address: str,
102
+ *,
103
+ config: ProxyConfig | None = None,
104
+ ) -> None:
37
105
  """
38
106
  Args:
39
107
  react_server_address: Internal React Router server URL (e.g., http://localhost:5173)
40
108
  server_address: External server URL exposed to clients (e.g., http://localhost:8000)
109
+ config: Proxy configuration (uses defaults if not provided).
41
110
  """
42
111
  self.react_server_address = react_server_address
43
112
  self.server_address = server_address
44
- self._client = None
113
+ self.config = config or ProxyConfig()
114
+ self._session = None
115
+ self._active_responses = set()
116
+ self._active_websockets = set()
117
+ self._tasks = set()
118
+ self._closing = asyncio.Event()
119
+
120
+ def _track_task(self, task: asyncio.Task[Any]) -> None:
121
+ self._tasks.add(task)
122
+ task.add_done_callback(self._tasks.discard)
45
123
 
46
124
  def rewrite_url(self, url: str) -> str:
47
125
  """Rewrite internal React server URLs to external server address."""
@@ -50,200 +128,656 @@ class ReactProxy:
50
128
  return url
51
129
 
52
130
  @property
53
- def client(self) -> httpx.AsyncClient:
54
- """Lazy initialization of HTTP client."""
55
- if self._client is None:
56
- self._client = httpx.AsyncClient(
57
- timeout=httpx.Timeout(30.0),
58
- follow_redirects=False,
59
- )
60
- return self._client
61
-
62
- def _is_websocket_upgrade(self, request: Request) -> bool:
63
- """Check if request is a WebSocket upgrade."""
64
- upgrade = request.headers.get("upgrade", "").lower()
65
- connection = request.headers.get("connection", "").lower()
66
- return upgrade == "websocket" and "upgrade" in connection
67
-
68
- def _http_to_ws_url(self, http_url: str) -> str:
69
- """Convert HTTP URL to WebSocket URL."""
70
- if http_url.startswith("https://"):
71
- return http_url.replace("https://", "wss://", 1)
72
- elif http_url.startswith("http://"):
73
- return http_url.replace("http://", "ws://", 1)
74
- return http_url
131
+ def session(self) -> aiohttp.ClientSession:
132
+ """Lazy initialization of upstream HTTP/WebSocket client session."""
133
+ if self._session is None:
134
+ # Keep connect timeouts; avoid total/read timeouts for long streams.
135
+ timeout = aiohttp.ClientTimeout(total=None, sock_connect=30)
136
+ connector = aiohttp.TCPConnector(
137
+ limit=self.config.max_concurrency,
138
+ limit_per_host=self.config.max_concurrency,
139
+ )
140
+ self._session = aiohttp.ClientSession(
141
+ connector=connector,
142
+ cookie_jar=aiohttp.DummyCookieJar(),
143
+ auto_decompress=False,
144
+ timeout=timeout,
145
+ )
146
+ return self._session
147
+
148
+ def _determine_incoming_streaming(
149
+ self, method: str, content_length: int | None
150
+ ) -> bool:
151
+ if method in ("GET", "HEAD"):
152
+ return False
153
+ if content_length is None:
154
+ return True
155
+ return content_length > self.config.incoming_streaming_threshold
156
+
157
+ def _determine_outgoing_streaming(self, response: aiohttp.ClientResponse) -> bool:
158
+ if response.status != 200:
159
+ return False
160
+ content_length = response.headers.get("content-length")
161
+ if not content_length:
162
+ return True
163
+ try:
164
+ return int(content_length) > self.config.outgoing_streaming_threshold
165
+ except Exception:
166
+ return True
167
+
168
+ def _rewrite_raw_headers(
169
+ self, raw_headers: Iterable[tuple[bytes, bytes]]
170
+ ) -> list[tuple[bytes, bytes]]:
171
+ result: list[tuple[bytes, bytes]] = []
172
+ for key_bytes, value_bytes in raw_headers:
173
+ key = _decode_header(key_bytes)
174
+ key_lower = key.lower()
175
+ if key_lower in _HOP_BY_HOP_HEADERS:
176
+ continue
177
+ value = _decode_header(value_bytes)
178
+ if key_lower in _URL_REWRITE_HEADERS:
179
+ value = self.rewrite_url(value)
180
+ result.append((key_bytes, _encode_header(value)))
181
+ return result
182
+
183
+ def _merge_session_cookie(
184
+ self, cookie_header: str | None, cookie_name: str, cookie_value: str | None
185
+ ) -> str | None:
186
+ if not cookie_value:
187
+ return cookie_header
188
+ existing = parse_cookie_header(cookie_header)
189
+ if existing.get(cookie_name) == cookie_value:
190
+ return cookie_header
191
+ existing[cookie_name] = cookie_value
192
+ return "; ".join(f"{key}={value}" for key, value in existing.items())
75
193
 
76
194
  async def proxy_websocket(self, websocket: WebSocket) -> None:
77
- """
78
- Proxy WebSocket connection to React Router server.
79
- Only allowed in dev mode and on root path "/".
80
- """
195
+ """Proxy a WebSocket connection to the React Router server."""
196
+ if self._closing.is_set():
197
+ await websocket.close(code=1012, reason="Proxy shutting down")
198
+ return
81
199
 
82
- # Build target WebSocket URL
83
- ws_url = self._http_to_ws_url(self.react_server_address)
200
+ ws_url = _http_to_ws_url(self.react_server_address)
84
201
  target_url = ws_url.rstrip("/") + websocket.url.path
85
202
  if websocket.url.query:
86
203
  target_url += "?" + websocket.url.query
87
204
 
88
- # Extract subprotocols from client request
89
- subprotocol_header = websocket.headers.get("sec-websocket-protocol")
90
- subprotocols: list[Subprotocol] | None = None
91
- if subprotocol_header:
92
- # Parse comma-separated list of subprotocols
93
- # Subprotocol is a NewType (just a type annotation), so cast strings to it
94
- subprotocols = cast(
95
- list[Subprotocol], [p.strip() for p in subprotocol_header.split(",")]
96
- )
97
-
98
- # Extract headers for WebSocket connection (excluding WebSocket-specific headers)
99
- headers = {
100
- k: v
101
- for k, v in websocket.headers.items()
102
- if k.lower()
103
- not in (
104
- "host",
105
- "upgrade",
106
- "connection",
107
- "sec-websocket-key",
108
- "sec-websocket-version",
109
- "sec-websocket-protocol",
110
- )
111
- }
112
-
113
- # Connect to target WebSocket server first to negotiate subprotocol
205
+ # Prefer negotiated subprotocols from ASGI scope.
206
+ scope_subprotocols = cast(list[str] | None, websocket.scope.get("subprotocols"))
207
+ subprotocols = list(scope_subprotocols or [])
208
+ if not subprotocols:
209
+ subprotocol_header = websocket.headers.get("sec-websocket-protocol")
210
+ if subprotocol_header:
211
+ subprotocols = [
212
+ p.strip() for p in subprotocol_header.split(",") if p.strip()
213
+ ]
214
+
215
+ headers: list[tuple[str, str]] = []
216
+ cookie_header: str | None = None
217
+ for key, value in websocket.headers.items():
218
+ key_lower = key.lower()
219
+ if key_lower in _WEBSOCKET_EXCLUDED_HEADERS:
220
+ continue
221
+ if key_lower in _HOP_BY_HOP_HEADERS:
222
+ continue
223
+ if key_lower == "cookie":
224
+ cookie_header = value
225
+ continue
226
+ headers.append((key, value))
227
+
228
+ ctx = PulseContext.get()
229
+ session = ctx.session
230
+ if session is not None:
231
+ session_cookie = session.get_cookie_value(ctx.app.cookie.name)
232
+ cookie_header = self._merge_session_cookie(
233
+ cookie_header,
234
+ ctx.app.cookie.name,
235
+ session_cookie,
236
+ )
237
+ if cookie_header:
238
+ headers.append(("cookie", cookie_header))
239
+
240
+ upstream_ws: aiohttp.ClientWebSocketResponse | None = None
241
+ client_to_upstream_task: asyncio.Task[Any] | None = None
242
+ upstream_to_client_task: asyncio.Task[Any] | None = None
243
+
114
244
  try:
115
- async with websockets.connect(
245
+ upstream_ws = await self.session.ws_connect(
116
246
  target_url,
117
- additional_headers=headers,
118
- subprotocols=subprotocols,
119
- ping_interval=None, # Let the target server handle ping/pong
120
- ) as target_ws:
121
- # Accept client connection with the negotiated subprotocol
122
- await websocket.accept(subprotocol=target_ws.subprotocol)
123
-
124
- # Forward messages bidirectionally
125
- async def forward_client_to_target():
126
- try:
127
- async for message in websocket.iter_text():
128
- await target_ws.send(message)
129
- except (WebSocketDisconnect, websockets.ConnectionClosed):
130
- # Client disconnected, close target connection
131
- logger.debug("Client disconnected, closing target connection")
132
- try:
133
- await target_ws.close()
134
- except Exception:
135
- pass
136
- except Exception as e:
137
- logger.error(f"Error forwarding client message: {e}")
138
- raise
139
-
140
- async def forward_target_to_client():
247
+ headers=headers,
248
+ protocols=subprotocols,
249
+ )
250
+ self._active_websockets.add(upstream_ws)
251
+
252
+ await websocket.accept(subprotocol=upstream_ws.protocol)
253
+
254
+ async def _client_to_upstream() -> None:
255
+ assert upstream_ws is not None
256
+ while not self._closing.is_set():
141
257
  try:
142
- async for message in target_ws:
143
- if isinstance(message, str):
144
- await websocket.send_text(message)
145
- else:
146
- await websocket.send_bytes(message)
147
- except (WebSocketDisconnect, websockets.ConnectionClosed) as e:
148
- # Client or target disconnected, stop forwarding
149
- logger.debug(
150
- "Connection closed, stopping forward_target_to_client"
151
- )
152
- # If target disconnected, close client connection
153
- if isinstance(e, websockets.ConnectionClosed):
154
- try:
155
- await websocket.close()
156
- except Exception:
157
- pass
158
- except Exception as e:
159
- logger.error(f"Error forwarding target message: {e}")
160
- raise
161
-
162
- # Run both forwarding tasks concurrently
163
- # If one side closes, the other will detect it and stop gracefully
164
- await asyncio.gather(
165
- forward_client_to_target(),
166
- forward_target_to_client(),
167
- return_exceptions=True,
258
+ message = await websocket.receive()
259
+ except WebSocketDisconnect:
260
+ return
261
+ message_type = message.get("type")
262
+ if message_type == "websocket.disconnect":
263
+ return
264
+ if message_type != "websocket.receive":
265
+ continue
266
+ text = message.get("text")
267
+ if text is not None:
268
+ await upstream_ws.send_str(text)
269
+ continue
270
+ data = message.get("bytes")
271
+ if data is not None:
272
+ await upstream_ws.send_bytes(data)
273
+
274
+ async def _upstream_to_client() -> None:
275
+ assert upstream_ws is not None
276
+ while not self._closing.is_set():
277
+ msg = await upstream_ws.receive()
278
+ msg_type = msg.type
279
+ if msg_type == aiohttp.WSMsgType.TEXT:
280
+ await websocket.send_text(msg.data)
281
+ continue
282
+ if msg_type == aiohttp.WSMsgType.BINARY:
283
+ await websocket.send_bytes(msg.data)
284
+ continue
285
+ if msg_type in (
286
+ aiohttp.WSMsgType.CLOSE,
287
+ aiohttp.WSMsgType.CLOSED,
288
+ aiohttp.WSMsgType.CLOSING,
289
+ ):
290
+ return
291
+ if msg_type == aiohttp.WSMsgType.ERROR:
292
+ exc = upstream_ws.exception()
293
+ if exc:
294
+ logger.debug("Upstream websocket error", exc_info=exc)
295
+ return
296
+
297
+ client_to_upstream_task = asyncio.create_task(_client_to_upstream())
298
+ upstream_to_client_task = asyncio.create_task(_upstream_to_client())
299
+ self._track_task(client_to_upstream_task)
300
+ self._track_task(upstream_to_client_task)
301
+
302
+ done, pending = await asyncio.wait(
303
+ {client_to_upstream_task, upstream_to_client_task},
304
+ return_when=asyncio.FIRST_COMPLETED,
305
+ )
306
+ for task in pending:
307
+ task.cancel()
308
+ with suppress(Exception):
309
+ await asyncio.gather(*pending, return_exceptions=True)
310
+ # Surface unexpected errors from the completed task.
311
+ for task in done:
312
+ exc = task.exception()
313
+ if exc and not isinstance(exc, asyncio.CancelledError):
314
+ raise exc
315
+
316
+ except (aiohttp.ClientError, asyncio.TimeoutError, TimeoutError) as exc:
317
+ logger.error("WebSocket proxy connection failed: %s", exc)
318
+ with suppress(asyncio.CancelledError, Exception):
319
+ await websocket.close(
320
+ code=1014,
321
+ reason="Bad Gateway: Could not connect to React Router server",
168
322
  )
323
+ except Exception as exc:
324
+ logger.error("WebSocket proxy error: %s", exc)
325
+ with suppress(asyncio.CancelledError, Exception):
326
+ await websocket.close(code=1011, reason="Bad Gateway: Proxy error")
327
+ finally:
328
+ if client_to_upstream_task is not None:
329
+ client_to_upstream_task.cancel()
330
+ with suppress(asyncio.CancelledError, Exception):
331
+ await client_to_upstream_task
332
+ if upstream_to_client_task is not None:
333
+ upstream_to_client_task.cancel()
334
+ with suppress(asyncio.CancelledError, Exception):
335
+ await upstream_to_client_task
336
+ if upstream_ws is not None:
337
+ self._active_websockets.discard(upstream_ws)
338
+ with suppress(asyncio.CancelledError, Exception):
339
+ await upstream_ws.close()
340
+ with suppress(asyncio.CancelledError, Exception):
341
+ await websocket.close()
342
+
343
+ async def close(self) -> None:
344
+ """Stop accepting work, cancel tasks, and close upstream resources."""
345
+ self._closing.set()
346
+
347
+ tasks = list(self._tasks)
348
+ for task in tasks:
349
+ task.cancel()
350
+ if tasks:
351
+ with suppress(Exception):
352
+ await asyncio.gather(*tasks, return_exceptions=True)
353
+ self._tasks.clear()
354
+
355
+ for response in list(self._active_responses):
356
+ self._active_responses.discard(response)
357
+ with suppress(Exception):
358
+ response.close()
359
+
360
+ for websocket in list(self._active_websockets):
361
+ self._active_websockets.discard(websocket)
362
+ with suppress(Exception):
363
+ await websocket.close()
364
+
365
+ if self._session is not None:
366
+ with suppress(Exception):
367
+ await self._session.close()
368
+ self._session = None
169
369
 
170
- except (websockets.WebSocketException, websockets.ConnectionClosedError) as e:
171
- logger.error(f"WebSocket proxy connection failed: {e}")
172
- await websocket.close(
173
- code=1014, # Bad Gateway
174
- reason="Bad Gateway: Could not connect to React Router server",
370
+ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
371
+ scope_type = scope["type"]
372
+ if scope_type == "websocket":
373
+ websocket = WebSocket(scope, receive=receive, send=send)
374
+ await self.proxy_websocket(websocket)
375
+ return
376
+ if scope_type != "http":
377
+ return
378
+ if self._closing.is_set():
379
+ await send(
380
+ {
381
+ "type": "http.response.start",
382
+ "status": 503,
383
+ "headers": [(b"content-type", b"text/plain; charset=utf-8")],
384
+ }
175
385
  )
176
- except Exception as e:
177
- logger.error(f"WebSocket proxy error: {e}")
178
- await websocket.close(
179
- code=1011, # Internal Server Error
180
- reason="Bad Gateway: Proxy error",
386
+ await send(
387
+ {
388
+ "type": "http.response.body",
389
+ "body": b"Service Unavailable: Proxy shutting down",
390
+ "more_body": False,
391
+ }
181
392
  )
393
+ return
182
394
 
183
- async def __call__(self, request: Request) -> Response:
184
- """
185
- Forward HTTP request to React Router server and stream response back.
186
- """
187
- # Build target URL
188
- url = self.react_server_address.rstrip("/") + request.url.path
189
- if request.url.query:
190
- url += "?" + request.url.query
395
+ request_url = URL(scope=scope)
396
+ root_path = scope.get("root_path", "")
397
+ path = request_url.path
398
+ if root_path and not path.startswith(root_path):
399
+ path = root_path.rstrip("/") + path
400
+ url = self.react_server_address.rstrip("/") + path
401
+ if request_url.query:
402
+ url += "?" + request_url.query
403
+
404
+ raw_headers = cast(list[tuple[bytes, bytes]], scope.get("headers") or [])
405
+ headers: list[tuple[str, str]] = []
406
+ cookie_header: str | None = None
407
+ content_length: int | None = None
408
+ for key_bytes, value_bytes in raw_headers:
409
+ key = _decode_header(key_bytes)
410
+ key_lower = key.lower()
411
+ if key_lower == "host":
412
+ continue
413
+ if key_lower in _HOP_BY_HOP_HEADERS:
414
+ continue
415
+ value = _decode_header(value_bytes)
416
+ if key_lower == "cookie":
417
+ cookie_header = value
418
+ continue
419
+ if key_lower == "content-length":
420
+ try:
421
+ content_length = int(value)
422
+ except Exception:
423
+ content_length = None
424
+ headers.append((key, value))
191
425
 
192
- # Extract headers, skip host header (will be set by httpx)
193
- headers = {k: v for k, v in request.headers.items() if k.lower() != "host"}
194
426
  ctx = PulseContext.get()
195
427
  session = ctx.session
196
428
  if session is not None:
197
429
  session_cookie = session.get_cookie_value(ctx.app.cookie.name)
198
- if session_cookie:
199
- existing = parse_cookie_header(headers.get("cookie"))
200
- if existing.get(ctx.app.cookie.name) != session_cookie:
201
- existing[ctx.app.cookie.name] = session_cookie
202
- headers["cookie"] = "; ".join(
203
- f"{key}={value}" for key, value in existing.items()
204
- )
430
+ cookie_header = self._merge_session_cookie(
431
+ cookie_header,
432
+ ctx.app.cookie.name,
433
+ session_cookie,
434
+ )
435
+ if cookie_header:
436
+ headers.append(("cookie", cookie_header))
205
437
 
206
- try:
207
- # Build request
208
- req = self.client.build_request(
209
- method=request.method,
438
+ disconnect_event = asyncio.Event()
439
+ body_complete = asyncio.Event()
440
+
441
+ async def _stream_body() -> AsyncGenerator[bytes, None]:
442
+ try:
443
+ while not self._closing.is_set():
444
+ message = await receive()
445
+ if message["type"] == "http.disconnect":
446
+ disconnect_event.set()
447
+ return
448
+ if message["type"] != "http.request":
449
+ continue
450
+ body = message.get("body", b"")
451
+ if body:
452
+ yield body
453
+ if not message.get("more_body", False):
454
+ return
455
+ finally:
456
+ body_complete.set()
457
+
458
+ async def _read_full_body() -> bytes:
459
+ parts: list[bytes] = []
460
+ try:
461
+ while not self._closing.is_set():
462
+ message = await receive()
463
+ if message["type"] == "http.disconnect":
464
+ disconnect_event.set()
465
+ return b""
466
+ if message["type"] != "http.request":
467
+ continue
468
+ body = message.get("body", b"")
469
+ if body:
470
+ parts.append(body)
471
+ if not message.get("more_body", False):
472
+ break
473
+ return b"".join(parts)
474
+ finally:
475
+ body_complete.set()
476
+
477
+ async def _watch_disconnect() -> None:
478
+ await body_complete.wait()
479
+ if disconnect_event.is_set() or self._closing.is_set():
480
+ return
481
+ while not self._closing.is_set():
482
+ message = await receive()
483
+ if message["type"] == "http.disconnect":
484
+ disconnect_event.set()
485
+ return
486
+ if message["type"] != "http.request":
487
+ continue
488
+ if not message.get("more_body", False):
489
+ continue
490
+
491
+ watch_task = asyncio.create_task(_watch_disconnect())
492
+ self._track_task(watch_task)
493
+
494
+ should_stream_incoming = self._determine_incoming_streaming(
495
+ scope["method"], content_length
496
+ )
497
+ if should_stream_incoming:
498
+ headers = [
499
+ (key, value)
500
+ for key, value in headers
501
+ if key.lower() != "content-length"
502
+ ]
503
+
504
+ data: AsyncGenerator[bytes, None] | bytes | None = None
505
+ if scope["method"] not in ("GET", "HEAD"):
506
+ if should_stream_incoming:
507
+ data = _stream_body()
508
+ else:
509
+ data = await _read_full_body()
510
+
511
+ if disconnect_event.is_set() or self._closing.is_set():
512
+ watch_task.cancel()
513
+ with suppress(asyncio.CancelledError, Exception):
514
+ await watch_task
515
+ return
516
+ else:
517
+ body_complete.set()
518
+
519
+ proxy_response: aiohttp.ClientResponse | None = None
520
+ request_task = asyncio.create_task(
521
+ self.session.request(
522
+ method=scope["method"],
210
523
  url=url,
211
524
  headers=headers,
212
- content=request.stream(),
525
+ data=data,
526
+ allow_redirects=False,
213
527
  )
528
+ )
529
+ self._track_task(request_task)
530
+ disconnect_task = asyncio.create_task(disconnect_event.wait())
531
+ closing_task = asyncio.create_task(self._closing.wait())
532
+ self._track_task(disconnect_task)
533
+ self._track_task(closing_task)
214
534
 
215
- # Send request with streaming
216
- r = await self.client.send(req, stream=True)
217
-
218
- # Rewrite headers that may contain internal React server URLs
219
- response_headers: dict[str, str] = {}
220
- for k, v in r.headers.items():
221
- if k.lower() in ("location", "content-location"):
222
- v = self.rewrite_url(v)
223
- response_headers[k] = v
535
+ try:
536
+ done, pending = await asyncio.wait(
537
+ {request_task, disconnect_task, closing_task},
538
+ return_when=asyncio.FIRST_COMPLETED,
539
+ )
540
+ closing = self._closing.is_set()
541
+ disconnect_done = (
542
+ disconnect_task in done and not disconnect_task.cancelled()
543
+ )
544
+ disconnected = disconnect_event.is_set() or disconnect_done
545
+ should_send_unavailable = closing and not disconnected
546
+ if request_task not in done or request_task.cancelled():
547
+ request_task.cancel()
548
+ with suppress(asyncio.CancelledError, Exception):
549
+ await request_task
550
+ for task in pending:
551
+ task.cancel()
552
+ for task in pending:
553
+ with suppress(asyncio.CancelledError, Exception):
554
+ await task
555
+ watch_task.cancel()
556
+ with suppress(asyncio.CancelledError, Exception):
557
+ await watch_task
558
+ if should_send_unavailable:
559
+ with suppress(Exception):
560
+ await send(
561
+ {
562
+ "type": "http.response.start",
563
+ "status": 503,
564
+ "headers": [
565
+ (b"content-type", b"text/plain; charset=utf-8")
566
+ ],
567
+ }
568
+ )
569
+ await send(
570
+ {
571
+ "type": "http.response.body",
572
+ "body": b"Service Unavailable: Proxy shutting down",
573
+ "more_body": False,
574
+ }
575
+ )
576
+ return
577
+ proxy_response = request_task.result()
578
+ except asyncio.CancelledError:
579
+ disconnect_task.cancel()
580
+ closing_task.cancel()
581
+ with suppress(asyncio.CancelledError, Exception):
582
+ await asyncio.gather(
583
+ disconnect_task, closing_task, return_exceptions=True
584
+ )
585
+ watch_task.cancel()
586
+ with suppress(asyncio.CancelledError, Exception):
587
+ await watch_task
588
+ raise
589
+ except (asyncio.TimeoutError, TimeoutError) as exc:
590
+ logger.error("Proxy request timed out: %s", exc)
591
+ disconnect_task.cancel()
592
+ closing_task.cancel()
593
+ await send(
594
+ {
595
+ "type": "http.response.start",
596
+ "status": 504,
597
+ "headers": [(b"content-type", b"text/plain; charset=utf-8")],
598
+ }
599
+ )
600
+ await send(
601
+ {
602
+ "type": "http.response.body",
603
+ "body": b"Gateway Timeout: React Router server took too long to respond",
604
+ "more_body": False,
605
+ }
606
+ )
607
+ with suppress(asyncio.CancelledError, Exception):
608
+ await asyncio.gather(
609
+ disconnect_task, closing_task, return_exceptions=True
610
+ )
611
+ watch_task.cancel()
612
+ with suppress(asyncio.CancelledError, Exception):
613
+ await watch_task
614
+ return
615
+ except aiohttp.ClientError as exc:
616
+ logger.error("Proxy request failed: %s", exc)
617
+ disconnect_task.cancel()
618
+ closing_task.cancel()
619
+ await send(
620
+ {
621
+ "type": "http.response.start",
622
+ "status": 502,
623
+ "headers": [(b"content-type", b"text/plain; charset=utf-8")],
624
+ }
625
+ )
626
+ await send(
627
+ {
628
+ "type": "http.response.body",
629
+ "body": b"Bad Gateway: Could not reach React Router server",
630
+ "more_body": False,
631
+ }
632
+ )
633
+ with suppress(asyncio.CancelledError, Exception):
634
+ await asyncio.gather(
635
+ disconnect_task, closing_task, return_exceptions=True
636
+ )
637
+ watch_task.cancel()
638
+ with suppress(asyncio.CancelledError, Exception):
639
+ await watch_task
640
+ return
224
641
 
225
- async def _iter():
226
- try:
227
- async for chunk in r.aiter_raw():
228
- if await request.is_disconnected():
229
- break
230
- yield chunk
231
- finally:
232
- await r.aclose()
642
+ assert proxy_response is not None
643
+ closing = self._closing.is_set()
644
+ disconnected = disconnect_event.is_set()
645
+ if disconnected or closing:
646
+ proxy_response.close()
647
+ disconnect_task.cancel()
648
+ closing_task.cancel()
649
+ with suppress(asyncio.CancelledError, Exception):
650
+ await asyncio.gather(
651
+ disconnect_task, closing_task, return_exceptions=True
652
+ )
653
+ watch_task.cancel()
654
+ with suppress(asyncio.CancelledError, Exception):
655
+ await watch_task
656
+ if closing and not disconnected:
657
+ with suppress(Exception):
658
+ await send(
659
+ {
660
+ "type": "http.response.start",
661
+ "status": 503,
662
+ "headers": [
663
+ (b"content-type", b"text/plain; charset=utf-8")
664
+ ],
665
+ }
666
+ )
667
+ await send(
668
+ {
669
+ "type": "http.response.body",
670
+ "body": b"Service Unavailable: Proxy shutting down",
671
+ "more_body": False,
672
+ }
673
+ )
674
+ return
675
+ self._active_responses.add(proxy_response)
233
676
 
234
- return StreamingResponse(
235
- _iter(),
236
- status_code=r.status_code,
237
- headers=response_headers,
677
+ response_headers = self._rewrite_raw_headers(proxy_response.raw_headers)
678
+ try:
679
+ await send(
680
+ {
681
+ "type": "http.response.start",
682
+ "status": proxy_response.status,
683
+ "headers": response_headers,
684
+ }
238
685
  )
686
+ except Exception:
687
+ proxy_response.close()
688
+ self._active_responses.discard(proxy_response)
689
+ disconnect_task.cancel()
690
+ closing_task.cancel()
691
+ with suppress(asyncio.CancelledError, Exception):
692
+ await asyncio.gather(
693
+ disconnect_task, closing_task, return_exceptions=True
694
+ )
695
+ watch_task.cancel()
696
+ with suppress(asyncio.CancelledError, Exception):
697
+ await watch_task
698
+ return
699
+
700
+ should_stream_outgoing = self._determine_outgoing_streaming(proxy_response)
701
+ if not should_stream_outgoing:
702
+ try:
703
+ body = await proxy_response.read()
704
+ await send(
705
+ {
706
+ "type": "http.response.body",
707
+ "body": body,
708
+ "more_body": False,
709
+ }
710
+ )
711
+ finally:
712
+ disconnect_task.cancel()
713
+ closing_task.cancel()
714
+ with suppress(asyncio.CancelledError, Exception):
715
+ await asyncio.gather(
716
+ disconnect_task, closing_task, return_exceptions=True
717
+ )
718
+ watch_task.cancel()
719
+ with suppress(asyncio.CancelledError, Exception):
720
+ await watch_task
721
+ proxy_response.close()
722
+ self._active_responses.discard(proxy_response)
723
+ return
724
+
725
+ aiter = proxy_response.content.iter_chunked(
726
+ self.config.stream_chunk_size
727
+ ).__aiter__()
728
+
729
+ async def _next_chunk() -> bytes:
730
+ return await aiter.__anext__()
731
+
732
+ try:
733
+ while True:
734
+ next_chunk_task = asyncio.create_task(_next_chunk())
735
+ self._track_task(next_chunk_task)
736
+ done, _ = await asyncio.wait(
737
+ {next_chunk_task, disconnect_task, closing_task},
738
+ return_when=asyncio.FIRST_COMPLETED,
739
+ )
740
+ if disconnect_task in done or closing_task in done:
741
+ if not next_chunk_task.done():
742
+ next_chunk_task.cancel()
743
+ with suppress(asyncio.CancelledError, Exception):
744
+ await next_chunk_task
745
+ break
746
+ try:
747
+ chunk = next_chunk_task.result()
748
+ except StopAsyncIteration:
749
+ break
750
+ if disconnect_event.is_set() or self._closing.is_set():
751
+ break
752
+ await send(
753
+ {
754
+ "type": "http.response.body",
755
+ "body": chunk,
756
+ "more_body": True,
757
+ }
758
+ )
759
+ if not disconnect_event.is_set() and not self._closing.is_set():
760
+ await send(
761
+ {
762
+ "type": "http.response.body",
763
+ "body": b"",
764
+ "more_body": False,
765
+ }
766
+ )
767
+ finally:
768
+ disconnect_task.cancel()
769
+ closing_task.cancel()
770
+ with suppress(asyncio.CancelledError, Exception):
771
+ await disconnect_task
772
+ with suppress(asyncio.CancelledError, Exception):
773
+ await closing_task
774
+ watch_task.cancel()
775
+ with suppress(asyncio.CancelledError, Exception):
776
+ await watch_task
777
+ proxy_response.close()
778
+ self._active_responses.discard(proxy_response)
239
779
 
240
- except httpx.RequestError as e:
241
- logger.error(f"Proxy request failed: {e}")
242
- return PlainTextResponse(
243
- "Bad Gateway: Could not reach React Router server", status_code=502
244
- )
245
780
 
246
- async def close(self):
247
- """Close the HTTP client."""
248
- if self._client is not None:
249
- await self._client.aclose()
781
+ # Backwards-friendly alias inside the repo; ASGI-only implementation.
782
+ class ReactAsgiProxy(ReactProxy):
783
+ pass