redis 7.0.0b2__py3-none-any.whl → 7.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. redis/__init__.py +1 -1
  2. redis/_parsers/base.py +6 -0
  3. redis/_parsers/helpers.py +64 -6
  4. redis/asyncio/client.py +14 -5
  5. redis/asyncio/cluster.py +5 -1
  6. redis/asyncio/connection.py +19 -1
  7. redis/asyncio/http/__init__.py +0 -0
  8. redis/asyncio/http/http_client.py +265 -0
  9. redis/asyncio/multidb/__init__.py +0 -0
  10. redis/asyncio/multidb/client.py +530 -0
  11. redis/asyncio/multidb/command_executor.py +339 -0
  12. redis/asyncio/multidb/config.py +210 -0
  13. redis/asyncio/multidb/database.py +69 -0
  14. redis/asyncio/multidb/event.py +84 -0
  15. redis/asyncio/multidb/failover.py +125 -0
  16. redis/asyncio/multidb/failure_detector.py +38 -0
  17. redis/asyncio/multidb/healthcheck.py +285 -0
  18. redis/background.py +204 -0
  19. redis/client.py +49 -27
  20. redis/cluster.py +9 -1
  21. redis/commands/core.py +64 -29
  22. redis/commands/json/commands.py +2 -2
  23. redis/commands/search/__init__.py +2 -2
  24. redis/commands/search/aggregation.py +24 -26
  25. redis/commands/search/commands.py +10 -10
  26. redis/commands/search/field.py +2 -2
  27. redis/commands/search/query.py +12 -12
  28. redis/connection.py +1613 -1263
  29. redis/data_structure.py +81 -0
  30. redis/event.py +84 -10
  31. redis/exceptions.py +8 -0
  32. redis/http/__init__.py +0 -0
  33. redis/http/http_client.py +425 -0
  34. redis/maint_notifications.py +18 -7
  35. redis/multidb/__init__.py +0 -0
  36. redis/multidb/circuit.py +144 -0
  37. redis/multidb/client.py +526 -0
  38. redis/multidb/command_executor.py +350 -0
  39. redis/multidb/config.py +207 -0
  40. redis/multidb/database.py +130 -0
  41. redis/multidb/event.py +89 -0
  42. redis/multidb/exception.py +17 -0
  43. redis/multidb/failover.py +125 -0
  44. redis/multidb/failure_detector.py +104 -0
  45. redis/multidb/healthcheck.py +282 -0
  46. redis/retry.py +14 -1
  47. redis/utils.py +34 -0
  48. {redis-7.0.0b2.dist-info → redis-7.0.1.dist-info}/METADATA +17 -4
  49. {redis-7.0.0b2.dist-info → redis-7.0.1.dist-info}/RECORD +51 -25
  50. {redis-7.0.0b2.dist-info → redis-7.0.1.dist-info}/WHEEL +0 -0
  51. {redis-7.0.0b2.dist-info → redis-7.0.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,81 @@
1
+ import threading
2
+ from typing import Any, Generic, List, TypeVar
3
+
4
+ from redis.typing import Number
5
+
6
+ T = TypeVar("T")
7
+
8
+
9
+ class WeightedList(Generic[T]):
10
+ """
11
+ Thread-safe weighted list.
12
+ """
13
+
14
+ def __init__(self):
15
+ self._items: List[tuple[Any, Number]] = []
16
+ self._lock = threading.RLock()
17
+
18
+ def add(self, item: Any, weight: float) -> None:
19
+ """Add item with weight, maintaining sorted order"""
20
+ with self._lock:
21
+ # Find insertion point using binary search
22
+ left, right = 0, len(self._items)
23
+ while left < right:
24
+ mid = (left + right) // 2
25
+ if self._items[mid][1] < weight:
26
+ right = mid
27
+ else:
28
+ left = mid + 1
29
+
30
+ self._items.insert(left, (item, weight))
31
+
32
+ def remove(self, item):
33
+ """Remove first occurrence of item"""
34
+ with self._lock:
35
+ for i, (stored_item, weight) in enumerate(self._items):
36
+ if stored_item == item:
37
+ self._items.pop(i)
38
+ return weight
39
+ raise ValueError("Item not found")
40
+
41
+ def get_by_weight_range(
42
+ self, min_weight: float, max_weight: float
43
+ ) -> List[tuple[Any, Number]]:
44
+ """Get all items within weight range"""
45
+ with self._lock:
46
+ result = []
47
+ for item, weight in self._items:
48
+ if min_weight <= weight <= max_weight:
49
+ result.append((item, weight))
50
+ return result
51
+
52
+ def get_top_n(self, n: int) -> List[tuple[Any, Number]]:
53
+ """Get top N the highest weighted items"""
54
+ with self._lock:
55
+ return [(item, weight) for item, weight in self._items[:n]]
56
+
57
+ def update_weight(self, item, new_weight: float):
58
+ with self._lock:
59
+ """Update weight of an item"""
60
+ old_weight = self.remove(item)
61
+ self.add(item, new_weight)
62
+ return old_weight
63
+
64
+ def __iter__(self):
65
+ """Iterate in descending weight order"""
66
+ with self._lock:
67
+ items_copy = (
68
+ self._items.copy()
69
+ ) # Create snapshot as lock released after each 'yield'
70
+
71
+ for item, weight in items_copy:
72
+ yield item, weight
73
+
74
+ def __len__(self):
75
+ with self._lock:
76
+ return len(self._items)
77
+
78
+ def __getitem__(self, index) -> tuple[Any, Number]:
79
+ with self._lock:
80
+ item, weight = self._items[index]
81
+ return item, weight
redis/event.py CHANGED
@@ -2,7 +2,7 @@ import asyncio
2
2
  import threading
3
3
  from abc import ABC, abstractmethod
4
4
  from enum import Enum
5
- from typing import List, Optional, Union
5
+ from typing import Dict, List, Optional, Type, Union
6
6
 
7
7
  from redis.auth.token import TokenInterface
8
8
  from redis.credentials import CredentialProvider, StreamingCredentialProvider
@@ -42,6 +42,17 @@ class EventDispatcherInterface(ABC):
42
42
  async def dispatch_async(self, event: object):
43
43
  pass
44
44
 
45
+ @abstractmethod
46
+ def register_listeners(
47
+ self,
48
+ mappings: Dict[
49
+ Type[object],
50
+ List[Union[EventListenerInterface, AsyncEventListenerInterface]],
51
+ ],
52
+ ):
53
+ """Register additional listeners."""
54
+ pass
55
+
45
56
 
46
57
  class EventException(Exception):
47
58
  """
@@ -56,11 +67,18 @@ class EventException(Exception):
56
67
 
57
68
  class EventDispatcher(EventDispatcherInterface):
58
69
  # TODO: Make dispatcher to accept external mappings.
59
- def __init__(self):
70
+ def __init__(
71
+ self,
72
+ event_listeners: Optional[
73
+ Dict[Type[object], List[EventListenerInterface]]
74
+ ] = None,
75
+ ):
60
76
  """
61
- Mapping should be extended for any new events or listeners to be added.
77
+ Dispatcher that dispatches events to listeners associated with given event.
62
78
  """
63
- self._event_listeners_mapping = {
79
+ self._event_listeners_mapping: Dict[
80
+ Type[object], List[EventListenerInterface]
81
+ ] = {
64
82
  AfterConnectionReleasedEvent: [
65
83
  ReAuthConnectionListener(),
66
84
  ],
@@ -77,17 +95,47 @@ class EventDispatcher(EventDispatcherInterface):
77
95
  ],
78
96
  }
79
97
 
98
+ self._lock = threading.Lock()
99
+ self._async_lock = None
100
+
101
+ if event_listeners:
102
+ self.register_listeners(event_listeners)
103
+
80
104
  def dispatch(self, event: object):
81
- listeners = self._event_listeners_mapping.get(type(event))
105
+ with self._lock:
106
+ listeners = self._event_listeners_mapping.get(type(event), [])
82
107
 
83
- for listener in listeners:
84
- listener.listen(event)
108
+ for listener in listeners:
109
+ listener.listen(event)
85
110
 
86
111
  async def dispatch_async(self, event: object):
87
- listeners = self._event_listeners_mapping.get(type(event))
112
+ if self._async_lock is None:
113
+ self._async_lock = asyncio.Lock()
114
+
115
+ async with self._async_lock:
116
+ listeners = self._event_listeners_mapping.get(type(event), [])
88
117
 
89
- for listener in listeners:
90
- await listener.listen(event)
118
+ for listener in listeners:
119
+ await listener.listen(event)
120
+
121
+ def register_listeners(
122
+ self,
123
+ mappings: Dict[
124
+ Type[object],
125
+ List[Union[EventListenerInterface, AsyncEventListenerInterface]],
126
+ ],
127
+ ):
128
+ with self._lock:
129
+ for event_type in mappings:
130
+ if event_type in self._event_listeners_mapping:
131
+ self._event_listeners_mapping[event_type] = list(
132
+ set(
133
+ self._event_listeners_mapping[event_type]
134
+ + mappings[event_type]
135
+ )
136
+ )
137
+ else:
138
+ self._event_listeners_mapping[event_type] = mappings[event_type]
91
139
 
92
140
 
93
141
  class AfterConnectionReleasedEvent:
@@ -226,6 +274,32 @@ class AfterAsyncClusterInstantiationEvent:
226
274
  return self._credential_provider
227
275
 
228
276
 
277
+ class OnCommandsFailEvent:
278
+ """
279
+ Event fired whenever a command fails during the execution.
280
+ """
281
+
282
+ def __init__(
283
+ self,
284
+ commands: tuple,
285
+ exception: Exception,
286
+ ):
287
+ self._commands = commands
288
+ self._exception = exception
289
+
290
+ @property
291
+ def commands(self) -> tuple:
292
+ return self._commands
293
+
294
+ @property
295
+ def exception(self) -> Exception:
296
+ return self._exception
297
+
298
+
299
+ class AsyncOnCommandsFailEvent(OnCommandsFailEvent):
300
+ pass
301
+
302
+
229
303
  class ReAuthConnectionListener(EventListenerInterface):
230
304
  """
231
305
  Listener that performs re-authentication of given connection.
redis/exceptions.py CHANGED
@@ -245,3 +245,11 @@ class InvalidPipelineStack(RedisClusterException):
245
245
  """
246
246
 
247
247
  pass
248
+
249
+
250
+ class ExternalAuthProviderError(ConnectionError):
251
+ """
252
+ Raised when an external authentication provider returns an error.
253
+ """
254
+
255
+ pass
redis/http/__init__.py ADDED
File without changes
@@ -0,0 +1,425 @@
1
+ from __future__ import annotations
2
+
3
+ import base64
4
+ import gzip
5
+ import json
6
+ import ssl
7
+ import zlib
8
+ from dataclasses import dataclass
9
+ from typing import Any, Dict, Mapping, Optional, Tuple, Union
10
+ from urllib.error import HTTPError, URLError
11
+ from urllib.parse import urlencode, urljoin
12
+ from urllib.request import Request, urlopen
13
+
14
+ __all__ = ["HttpClient", "HttpResponse", "HttpError", "DEFAULT_TIMEOUT"]
15
+
16
+ from redis.backoff import ExponentialWithJitterBackoff
17
+ from redis.retry import Retry
18
+ from redis.utils import dummy_fail
19
+
20
+ DEFAULT_USER_AGENT = "HttpClient/1.0 (+https://example.invalid)"
21
+ DEFAULT_TIMEOUT = 30.0
22
+ RETRY_STATUS_CODES = {429, 500, 502, 503, 504}
23
+
24
+
25
+ @dataclass
26
+ class HttpResponse:
27
+ status: int
28
+ headers: Dict[str, str]
29
+ url: str
30
+ content: bytes
31
+
32
+ def text(self, encoding: Optional[str] = None) -> str:
33
+ enc = encoding or self._get_encoding()
34
+ return self.content.decode(enc, errors="replace")
35
+
36
+ def json(self) -> Any:
37
+ return json.loads(self.text(encoding=self._get_encoding()))
38
+
39
+ def _get_encoding(self) -> str:
40
+ # Try to infer encoding from headers; default to utf-8
41
+ ctype = self.headers.get("content-type", "")
42
+ # Example: application/json; charset=utf-8
43
+ for part in ctype.split(";"):
44
+ p = part.strip()
45
+ if p.lower().startswith("charset="):
46
+ return p.split("=", 1)[1].strip() or "utf-8"
47
+ return "utf-8"
48
+
49
+
50
+ class HttpError(Exception):
51
+ def __init__(self, status: int, url: str, message: Optional[str] = None):
52
+ self.status = status
53
+ self.url = url
54
+ self.message = message or f"HTTP {status} for {url}"
55
+ super().__init__(self.message)
56
+
57
+
58
+ class HttpClient:
59
+ """
60
+ A lightweight HTTP client for REST API calls.
61
+ """
62
+
63
+ def __init__(
64
+ self,
65
+ base_url: str = "",
66
+ headers: Optional[Mapping[str, str]] = None,
67
+ timeout: float = DEFAULT_TIMEOUT,
68
+ retry: Retry = Retry(
69
+ backoff=ExponentialWithJitterBackoff(base=1, cap=10), retries=3
70
+ ),
71
+ verify_tls: bool = True,
72
+ # TLS verification (server) options
73
+ ca_file: Optional[str] = None,
74
+ ca_path: Optional[str] = None,
75
+ ca_data: Optional[Union[str, bytes]] = None,
76
+ # Mutual TLS (client cert) options
77
+ client_cert_file: Optional[str] = None,
78
+ client_key_file: Optional[str] = None,
79
+ client_key_password: Optional[str] = None,
80
+ auth_basic: Optional[Tuple[str, str]] = None, # (username, password)
81
+ user_agent: str = DEFAULT_USER_AGENT,
82
+ ) -> None:
83
+ """
84
+ Initialize a new HTTP client instance.
85
+
86
+ Args:
87
+ base_url: Base URL for all requests. Will be prefixed to all paths.
88
+ headers: Default headers to include in all requests.
89
+ timeout: Default timeout in seconds for requests.
90
+ retry: Retry configuration for failed requests.
91
+ verify_tls: Whether to verify TLS certificates.
92
+ ca_file: Path to CA certificate file for TLS verification.
93
+ ca_path: Path to a directory containing CA certificates.
94
+ ca_data: CA certificate data as string or bytes.
95
+ client_cert_file: Path to client certificate for mutual TLS.
96
+ client_key_file: Path to a client private key for mutual TLS.
97
+ client_key_password: Password for an encrypted client private key.
98
+ auth_basic: Tuple of (username, password) for HTTP basic auth.
99
+ user_agent: User-Agent header value for requests.
100
+
101
+ The client supports both regular HTTPS with server verification and mutual TLS
102
+ authentication. For server verification, provide CA certificate information via
103
+ ca_file, ca_path or ca_data. For mutual TLS, additionally provide a client
104
+ certificate and key via client_cert_file and client_key_file.
105
+ """
106
+ self.base_url = (
107
+ base_url.rstrip() + "/"
108
+ if base_url and not base_url.endswith("/")
109
+ else base_url
110
+ )
111
+ self._default_headers = {k.lower(): v for k, v in (headers or {}).items()}
112
+ self.timeout = timeout
113
+ self.retry = retry
114
+ self.retry.update_supported_errors((HTTPError, URLError, ssl.SSLError))
115
+ self.verify_tls = verify_tls
116
+
117
+ # TLS settings
118
+ self.ca_file = ca_file
119
+ self.ca_path = ca_path
120
+ self.ca_data = ca_data
121
+ self.client_cert_file = client_cert_file
122
+ self.client_key_file = client_key_file
123
+ self.client_key_password = client_key_password
124
+
125
+ self.auth_basic = auth_basic
126
+ self.user_agent = user_agent
127
+
128
+ # Public JSON-centric helpers
129
+ def get(
130
+ self,
131
+ path: str,
132
+ params: Optional[
133
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
134
+ ] = None,
135
+ headers: Optional[Mapping[str, str]] = None,
136
+ timeout: Optional[float] = None,
137
+ expect_json: bool = True,
138
+ ) -> Union[HttpResponse, Any]:
139
+ return self._json_call(
140
+ "GET",
141
+ path,
142
+ params=params,
143
+ headers=headers,
144
+ timeout=timeout,
145
+ body=None,
146
+ expect_json=expect_json,
147
+ )
148
+
149
+ def delete(
150
+ self,
151
+ path: str,
152
+ params: Optional[
153
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
154
+ ] = None,
155
+ headers: Optional[Mapping[str, str]] = None,
156
+ timeout: Optional[float] = None,
157
+ expect_json: bool = True,
158
+ ) -> Union[HttpResponse, Any]:
159
+ return self._json_call(
160
+ "DELETE",
161
+ path,
162
+ params=params,
163
+ headers=headers,
164
+ timeout=timeout,
165
+ body=None,
166
+ expect_json=expect_json,
167
+ )
168
+
169
+ def post(
170
+ self,
171
+ path: str,
172
+ json_body: Optional[Any] = None,
173
+ data: Optional[Union[bytes, str]] = None,
174
+ params: Optional[
175
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
176
+ ] = None,
177
+ headers: Optional[Mapping[str, str]] = None,
178
+ timeout: Optional[float] = None,
179
+ expect_json: bool = True,
180
+ ) -> Union[HttpResponse, Any]:
181
+ return self._json_call(
182
+ "POST",
183
+ path,
184
+ params=params,
185
+ headers=headers,
186
+ timeout=timeout,
187
+ body=self._prepare_body(json_body=json_body, data=data),
188
+ expect_json=expect_json,
189
+ )
190
+
191
+ def put(
192
+ self,
193
+ path: str,
194
+ json_body: Optional[Any] = None,
195
+ data: Optional[Union[bytes, str]] = None,
196
+ params: Optional[
197
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
198
+ ] = None,
199
+ headers: Optional[Mapping[str, str]] = None,
200
+ timeout: Optional[float] = None,
201
+ expect_json: bool = True,
202
+ ) -> Union[HttpResponse, Any]:
203
+ return self._json_call(
204
+ "PUT",
205
+ path,
206
+ params=params,
207
+ headers=headers,
208
+ timeout=timeout,
209
+ body=self._prepare_body(json_body=json_body, data=data),
210
+ expect_json=expect_json,
211
+ )
212
+
213
+ def patch(
214
+ self,
215
+ path: str,
216
+ json_body: Optional[Any] = None,
217
+ data: Optional[Union[bytes, str]] = None,
218
+ params: Optional[
219
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
220
+ ] = None,
221
+ headers: Optional[Mapping[str, str]] = None,
222
+ timeout: Optional[float] = None,
223
+ expect_json: bool = True,
224
+ ) -> Union[HttpResponse, Any]:
225
+ return self._json_call(
226
+ "PATCH",
227
+ path,
228
+ params=params,
229
+ headers=headers,
230
+ timeout=timeout,
231
+ body=self._prepare_body(json_body=json_body, data=data),
232
+ expect_json=expect_json,
233
+ )
234
+
235
+ # Low-level request
236
+ def request(
237
+ self,
238
+ method: str,
239
+ path: str,
240
+ params: Optional[
241
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
242
+ ] = None,
243
+ headers: Optional[Mapping[str, str]] = None,
244
+ body: Optional[Union[bytes, str]] = None,
245
+ timeout: Optional[float] = None,
246
+ ) -> HttpResponse:
247
+ url = self._build_url(path, params)
248
+ all_headers = self._prepare_headers(headers, body)
249
+ data = body.encode("utf-8") if isinstance(body, str) else body
250
+
251
+ req = Request(url=url, method=method.upper(), data=data, headers=all_headers)
252
+
253
+ context: Optional[ssl.SSLContext] = None
254
+ if url.lower().startswith("https"):
255
+ if self.verify_tls:
256
+ # Use provided CA material if any; fall back to system defaults
257
+ context = ssl.create_default_context(
258
+ cafile=self.ca_file,
259
+ capath=self.ca_path,
260
+ cadata=self.ca_data,
261
+ )
262
+ # Load client certificate for mTLS if configured
263
+ if self.client_cert_file:
264
+ context.load_cert_chain(
265
+ certfile=self.client_cert_file,
266
+ keyfile=self.client_key_file,
267
+ password=self.client_key_password,
268
+ )
269
+ else:
270
+ # Verification disabled
271
+ context = ssl.create_default_context()
272
+ context.check_hostname = False
273
+ context.verify_mode = ssl.CERT_NONE
274
+
275
+ try:
276
+ return self.retry.call_with_retry(
277
+ lambda: self._make_request(req, context=context, timeout=timeout),
278
+ lambda _: dummy_fail(),
279
+ lambda error: self._is_retryable_http_error(error),
280
+ )
281
+ except HTTPError as e:
282
+ # Read error body, build response, and decide on retry
283
+ err_body = b""
284
+ try:
285
+ err_body = e.read()
286
+ except Exception:
287
+ pass
288
+ headers_map = {k.lower(): v for k, v in (e.headers or {}).items()}
289
+ err_body = self._maybe_decompress(err_body, headers_map)
290
+ status = getattr(e, "code", 0) or 0
291
+ response = HttpResponse(
292
+ status=status,
293
+ headers=headers_map,
294
+ url=url,
295
+ content=err_body,
296
+ )
297
+ return response
298
+
299
+ def _make_request(
300
+ self,
301
+ request: Request,
302
+ context: Optional[ssl.SSLContext] = None,
303
+ timeout: Optional[float] = None,
304
+ ):
305
+ with urlopen(request, timeout=timeout or self.timeout, context=context) as resp:
306
+ raw = resp.read()
307
+ headers_map = {k.lower(): v for k, v in resp.headers.items()}
308
+ raw = self._maybe_decompress(raw, headers_map)
309
+ return HttpResponse(
310
+ status=resp.status,
311
+ headers=headers_map,
312
+ url=resp.geturl(),
313
+ content=raw,
314
+ )
315
+
316
+ def _is_retryable_http_error(self, error: Exception) -> bool:
317
+ if isinstance(error, HTTPError):
318
+ return self._should_retry_status(error.code)
319
+ return False
320
+
321
+ # Internal utilities
322
+ def _json_call(
323
+ self,
324
+ method: str,
325
+ path: str,
326
+ params: Optional[
327
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
328
+ ] = None,
329
+ headers: Optional[Mapping[str, str]] = None,
330
+ timeout: Optional[float] = None,
331
+ body: Optional[Union[bytes, str]] = None,
332
+ expect_json: bool = True,
333
+ ) -> Union[HttpResponse, Any]:
334
+ resp = self.request(
335
+ method=method,
336
+ path=path,
337
+ params=params,
338
+ headers=headers,
339
+ body=body,
340
+ timeout=timeout,
341
+ )
342
+ if not (200 <= resp.status < 400):
343
+ raise HttpError(resp.status, resp.url, resp.text())
344
+ if expect_json:
345
+ return resp.json()
346
+ return resp
347
+
348
+ def _prepare_body(
349
+ self, json_body: Optional[Any] = None, data: Optional[Union[bytes, str]] = None
350
+ ) -> Optional[Union[bytes, str]]:
351
+ if json_body is not None and data is not None:
352
+ raise ValueError("Provide either json_body or data, not both.")
353
+ if json_body is not None:
354
+ return json.dumps(json_body, ensure_ascii=False, separators=(",", ":"))
355
+ return data
356
+
357
+ def _build_url(
358
+ self,
359
+ path: str,
360
+ params: Optional[
361
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
362
+ ] = None,
363
+ ) -> str:
364
+ url = urljoin(self.base_url or "", path)
365
+ if params:
366
+ # urlencode with doseq=True supports list/tuple values
367
+ query = urlencode(
368
+ {k: v for k, v in params.items() if v is not None}, doseq=True
369
+ )
370
+ separator = "&" if ("?" in url) else "?"
371
+ url = f"{url}{separator}{query}" if query else url
372
+ return url
373
+
374
+ def _prepare_headers(
375
+ self, headers: Optional[Mapping[str, str]], body: Optional[Union[bytes, str]]
376
+ ) -> Dict[str, str]:
377
+ # Start with defaults
378
+ prepared: Dict[str, str] = {}
379
+ prepared.update(self._default_headers)
380
+
381
+ # Standard defaults for JSON REST usage
382
+ prepared.setdefault("accept", "application/json")
383
+ prepared.setdefault("user-agent", self.user_agent)
384
+ # We will send gzip accept-encoding; handle decompression manually
385
+ prepared.setdefault("accept-encoding", "gzip, deflate")
386
+
387
+ # If we have a string body and content-type not specified, assume JSON
388
+ if body is not None and isinstance(body, str):
389
+ prepared.setdefault("content-type", "application/json; charset=utf-8")
390
+
391
+ # Basic authentication if provided and not overridden
392
+ if self.auth_basic and "authorization" not in prepared:
393
+ user, pwd = self.auth_basic
394
+ token = base64.b64encode(f"{user}:{pwd}".encode("utf-8")).decode("ascii")
395
+ prepared["authorization"] = f"Basic {token}"
396
+
397
+ # Merge per-call headers (case-insensitive)
398
+ if headers:
399
+ for k, v in headers.items():
400
+ prepared[k.lower()] = v
401
+
402
+ # urllib expects header keys in canonical capitalization sometimes; but it’s tolerant.
403
+ # We'll return as provided; urllib will handle it.
404
+ return prepared
405
+
406
+ def _should_retry_status(self, status: int) -> bool:
407
+ return status in RETRY_STATUS_CODES
408
+
409
+ def _maybe_decompress(self, content: bytes, headers: Mapping[str, str]) -> bytes:
410
+ if not content:
411
+ return content
412
+ encoding = (headers.get("content-encoding") or "").lower()
413
+ try:
414
+ if "gzip" in encoding:
415
+ return gzip.decompress(content)
416
+ if "deflate" in encoding:
417
+ # Try raw deflate, then zlib-wrapped
418
+ try:
419
+ return zlib.decompress(content, -zlib.MAX_WBITS)
420
+ except zlib.error:
421
+ return zlib.decompress(content)
422
+ except Exception:
423
+ # If decompression fails, return original bytes
424
+ return content
425
+ return content