affinity-sdk 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- affinity/__init__.py +139 -0
- affinity/cli/__init__.py +7 -0
- affinity/cli/click_compat.py +27 -0
- affinity/cli/commands/__init__.py +1 -0
- affinity/cli/commands/_entity_files_dump.py +219 -0
- affinity/cli/commands/_list_entry_fields.py +41 -0
- affinity/cli/commands/_v1_parsing.py +77 -0
- affinity/cli/commands/company_cmds.py +2139 -0
- affinity/cli/commands/completion_cmd.py +33 -0
- affinity/cli/commands/config_cmds.py +540 -0
- affinity/cli/commands/entry_cmds.py +33 -0
- affinity/cli/commands/field_cmds.py +413 -0
- affinity/cli/commands/interaction_cmds.py +875 -0
- affinity/cli/commands/list_cmds.py +3152 -0
- affinity/cli/commands/note_cmds.py +433 -0
- affinity/cli/commands/opportunity_cmds.py +1174 -0
- affinity/cli/commands/person_cmds.py +1980 -0
- affinity/cli/commands/query_cmd.py +444 -0
- affinity/cli/commands/relationship_strength_cmds.py +62 -0
- affinity/cli/commands/reminder_cmds.py +595 -0
- affinity/cli/commands/resolve_url_cmd.py +127 -0
- affinity/cli/commands/session_cmds.py +84 -0
- affinity/cli/commands/task_cmds.py +110 -0
- affinity/cli/commands/version_cmd.py +29 -0
- affinity/cli/commands/whoami_cmd.py +36 -0
- affinity/cli/config.py +108 -0
- affinity/cli/context.py +749 -0
- affinity/cli/csv_utils.py +195 -0
- affinity/cli/date_utils.py +42 -0
- affinity/cli/decorators.py +77 -0
- affinity/cli/errors.py +28 -0
- affinity/cli/field_utils.py +355 -0
- affinity/cli/formatters.py +551 -0
- affinity/cli/help_json.py +283 -0
- affinity/cli/logging.py +100 -0
- affinity/cli/main.py +261 -0
- affinity/cli/options.py +53 -0
- affinity/cli/paths.py +32 -0
- affinity/cli/progress.py +183 -0
- affinity/cli/query/__init__.py +163 -0
- affinity/cli/query/aggregates.py +357 -0
- affinity/cli/query/dates.py +194 -0
- affinity/cli/query/exceptions.py +147 -0
- affinity/cli/query/executor.py +1236 -0
- affinity/cli/query/filters.py +248 -0
- affinity/cli/query/models.py +333 -0
- affinity/cli/query/output.py +331 -0
- affinity/cli/query/parser.py +619 -0
- affinity/cli/query/planner.py +430 -0
- affinity/cli/query/progress.py +270 -0
- affinity/cli/query/schema.py +439 -0
- affinity/cli/render.py +1589 -0
- affinity/cli/resolve.py +222 -0
- affinity/cli/resolvers.py +249 -0
- affinity/cli/results.py +308 -0
- affinity/cli/runner.py +218 -0
- affinity/cli/serialization.py +65 -0
- affinity/cli/session_cache.py +276 -0
- affinity/cli/types.py +70 -0
- affinity/client.py +771 -0
- affinity/clients/__init__.py +19 -0
- affinity/clients/http.py +3664 -0
- affinity/clients/pipeline.py +165 -0
- affinity/compare.py +501 -0
- affinity/downloads.py +114 -0
- affinity/exceptions.py +615 -0
- affinity/filters.py +1128 -0
- affinity/hooks.py +198 -0
- affinity/inbound_webhooks.py +302 -0
- affinity/models/__init__.py +163 -0
- affinity/models/entities.py +798 -0
- affinity/models/pagination.py +513 -0
- affinity/models/rate_limit_snapshot.py +48 -0
- affinity/models/secondary.py +413 -0
- affinity/models/types.py +663 -0
- affinity/policies.py +40 -0
- affinity/progress.py +22 -0
- affinity/py.typed +0 -0
- affinity/services/__init__.py +42 -0
- affinity/services/companies.py +1286 -0
- affinity/services/lists.py +1892 -0
- affinity/services/opportunities.py +1330 -0
- affinity/services/persons.py +1348 -0
- affinity/services/rate_limits.py +173 -0
- affinity/services/tasks.py +193 -0
- affinity/services/v1_only.py +2445 -0
- affinity/types.py +83 -0
- affinity_sdk-0.9.5.dist-info/METADATA +622 -0
- affinity_sdk-0.9.5.dist-info/RECORD +92 -0
- affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
- affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
- affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
affinity/clients/http.py
ADDED
|
@@ -0,0 +1,3664 @@
|
|
|
1
|
+
"""
|
|
2
|
+
HTTP client implementation for the Affinity API.
|
|
3
|
+
|
|
4
|
+
Handles:
|
|
5
|
+
- Authentication
|
|
6
|
+
- Rate limiting with automatic retries
|
|
7
|
+
- Request/response logging
|
|
8
|
+
- V1/V2 API routing
|
|
9
|
+
- Optional response caching
|
|
10
|
+
- Request/response hooks (DX-008)
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import asyncio
|
|
16
|
+
import base64
|
|
17
|
+
import email.utils
|
|
18
|
+
import hashlib
|
|
19
|
+
import inspect
|
|
20
|
+
import json
|
|
21
|
+
import logging
|
|
22
|
+
import math
|
|
23
|
+
import re
|
|
24
|
+
import threading
|
|
25
|
+
import time
|
|
26
|
+
import uuid
|
|
27
|
+
from collections.abc import AsyncIterator, Awaitable, Callable, Iterator, Mapping, Sequence
|
|
28
|
+
from dataclasses import dataclass, field, replace
|
|
29
|
+
from datetime import datetime, timezone
|
|
30
|
+
from enum import Enum
|
|
31
|
+
from typing import Any, Literal, TypeAlias, TypeVar, cast
|
|
32
|
+
from urllib.parse import urljoin, urlsplit, urlunsplit
|
|
33
|
+
|
|
34
|
+
import httpx
|
|
35
|
+
|
|
36
|
+
from ..downloads import (
|
|
37
|
+
AsyncDownloadedFile,
|
|
38
|
+
DownloadedFile,
|
|
39
|
+
_download_info_from_headers,
|
|
40
|
+
)
|
|
41
|
+
from ..exceptions import (
|
|
42
|
+
AffinityError,
|
|
43
|
+
ConfigurationError,
|
|
44
|
+
ErrorDiagnostics,
|
|
45
|
+
NetworkError,
|
|
46
|
+
RateLimitError,
|
|
47
|
+
TimeoutError,
|
|
48
|
+
UnsafeUrlError,
|
|
49
|
+
VersionCompatibilityError,
|
|
50
|
+
WriteNotAllowedError,
|
|
51
|
+
error_from_response,
|
|
52
|
+
)
|
|
53
|
+
from ..hooks import (
|
|
54
|
+
AnyEventHook,
|
|
55
|
+
ErrorHook,
|
|
56
|
+
ErrorInfo,
|
|
57
|
+
HookEvent,
|
|
58
|
+
RedirectFollowed,
|
|
59
|
+
RequestFailed,
|
|
60
|
+
RequestHook,
|
|
61
|
+
RequestInfo,
|
|
62
|
+
RequestRetrying,
|
|
63
|
+
RequestStarted,
|
|
64
|
+
RequestSucceeded,
|
|
65
|
+
ResponseHeadersReceived,
|
|
66
|
+
ResponseHook,
|
|
67
|
+
ResponseInfo,
|
|
68
|
+
StreamAborted,
|
|
69
|
+
StreamCompleted,
|
|
70
|
+
StreamFailed,
|
|
71
|
+
)
|
|
72
|
+
from ..models.types import V1_BASE_URL, V2_BASE_URL
|
|
73
|
+
from ..policies import ExternalHookPolicy, Policies, WritePolicy
|
|
74
|
+
from ..progress import ProgressCallback
|
|
75
|
+
from .pipeline import (
|
|
76
|
+
AsyncMiddleware,
|
|
77
|
+
Middleware,
|
|
78
|
+
RequestContext,
|
|
79
|
+
SDKBaseResponse,
|
|
80
|
+
SDKRawResponse,
|
|
81
|
+
SDKRawStreamResponse,
|
|
82
|
+
SDKRequest,
|
|
83
|
+
SDKResponse,
|
|
84
|
+
compose,
|
|
85
|
+
compose_async,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
logger = logging.getLogger("affinity_sdk")
|
|
89
|
+
|
|
90
|
+
RepeatableQueryParam: TypeAlias = Literal["fieldIds", "fieldTypes"]
|
|
91
|
+
REPEATABLE_QUERY_PARAMS: frozenset[str] = frozenset({"fieldIds", "fieldTypes"})
|
|
92
|
+
|
|
93
|
+
_RETRYABLE_METHODS: frozenset[str] = frozenset({"GET", "HEAD"})
|
|
94
|
+
_WRITE_METHODS: frozenset[str] = frozenset({"POST", "PUT", "PATCH", "DELETE"})
|
|
95
|
+
_MAX_RETRY_DELAY_SECONDS: float = 60.0
|
|
96
|
+
_MAX_DOWNLOAD_REDIRECTS: int = 10
|
|
97
|
+
|
|
98
|
+
_DEFAULT_HEADERS: dict[str, str] = {"Accept": "application/json"}
|
|
99
|
+
|
|
100
|
+
T = TypeVar("T")
|
|
101
|
+
R_resp = TypeVar("R_resp", bound=SDKBaseResponse)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _to_wire_value(value: Any) -> str:
|
|
105
|
+
if isinstance(value, Enum):
|
|
106
|
+
return str(value.value)
|
|
107
|
+
return str(value)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _encode_query_params(
|
|
111
|
+
params: Mapping[str, Any] | Sequence[tuple[str, Any]] | None,
|
|
112
|
+
) -> list[tuple[str, str]] | None:
|
|
113
|
+
"""
|
|
114
|
+
Convert params into deterministic ordered key/value pairs for `httpx`.
|
|
115
|
+
|
|
116
|
+
- Repeatable params are encoded as repeated keys (e.g., fieldIds=a&fieldIds=b).
|
|
117
|
+
- Repeatable values are de-duplicated while preserving caller order.
|
|
118
|
+
- Non-repeatable params are emitted in sorted-key order for determinism.
|
|
119
|
+
"""
|
|
120
|
+
if params is None:
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
if isinstance(params, Mapping):
|
|
124
|
+
ordered: list[tuple[str, str]] = []
|
|
125
|
+
for key in sorted(params.keys()):
|
|
126
|
+
value = params[key]
|
|
127
|
+
if value is None:
|
|
128
|
+
continue
|
|
129
|
+
|
|
130
|
+
if key in REPEATABLE_QUERY_PARAMS:
|
|
131
|
+
raw_values: Sequence[Any]
|
|
132
|
+
if isinstance(value, Sequence) and not isinstance(value, (str, bytes)):
|
|
133
|
+
raw_values = value
|
|
134
|
+
else:
|
|
135
|
+
raw_values = [value]
|
|
136
|
+
|
|
137
|
+
seen: set[str] = set()
|
|
138
|
+
for item in raw_values:
|
|
139
|
+
wire = _to_wire_value(item)
|
|
140
|
+
if wire in seen:
|
|
141
|
+
continue
|
|
142
|
+
ordered.append((key, wire))
|
|
143
|
+
seen.add(wire)
|
|
144
|
+
else:
|
|
145
|
+
ordered.append((key, _to_wire_value(value)))
|
|
146
|
+
return ordered
|
|
147
|
+
|
|
148
|
+
return [(key, _to_wire_value(value)) for key, value in params]
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def _freeze_v1_query_signature(
|
|
152
|
+
params: Mapping[str, Any] | Sequence[tuple[str, Any]] | None,
|
|
153
|
+
) -> list[tuple[str, str]]:
|
|
154
|
+
"""
|
|
155
|
+
Freeze the canonical v1 query signature for token pagination.
|
|
156
|
+
|
|
157
|
+
The returned sequence MUST NOT include the v1 `page_token` param so that the
|
|
158
|
+
signature can be reused verbatim across pages (TR-017/TR-017a).
|
|
159
|
+
"""
|
|
160
|
+
encoded = _encode_query_params(params) or []
|
|
161
|
+
return [(key, value) for (key, value) in encoded if key != "page_token"]
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _compute_backoff_seconds(attempt: int, *, base: float) -> float:
|
|
165
|
+
# "Full jitter": random(0, min(cap, base * 2^attempt))
|
|
166
|
+
max_delay = float(min(_MAX_RETRY_DELAY_SECONDS, base * (2**attempt)))
|
|
167
|
+
jitter = float((time.time_ns() % 1_000_000) / 1_000_000.0)
|
|
168
|
+
return jitter * max_delay
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _throttle_jitter(delay: float) -> float:
|
|
172
|
+
if delay <= 0:
|
|
173
|
+
return 0.0
|
|
174
|
+
cap = float(min(1.0, delay * 0.1))
|
|
175
|
+
jitter = float((time.time_ns() % 1_000_000) / 1_000_000.0)
|
|
176
|
+
return jitter * cap
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
@dataclass(frozen=True, slots=True)
|
|
180
|
+
class _RetryOutcome:
|
|
181
|
+
action: Literal["sleep", "break", "raise", "raise_wrapped"]
|
|
182
|
+
wait_time: float | None = None
|
|
183
|
+
last_error: Exception | None = None
|
|
184
|
+
log_message: str | None = None
|
|
185
|
+
wrapped_error: Exception | None = None
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _retry_outcome(
|
|
189
|
+
*,
|
|
190
|
+
method: str,
|
|
191
|
+
attempt: int,
|
|
192
|
+
max_retries: int,
|
|
193
|
+
retry_delay: float,
|
|
194
|
+
error: Exception,
|
|
195
|
+
) -> _RetryOutcome:
|
|
196
|
+
"""
|
|
197
|
+
Decide whether to retry after an exception.
|
|
198
|
+
|
|
199
|
+
The caller is responsible for raising via `raise` (to preserve tracebacks) when
|
|
200
|
+
outcome.action == "raise", and for chaining `from error` when
|
|
201
|
+
outcome.action == "raise_wrapped".
|
|
202
|
+
"""
|
|
203
|
+
if isinstance(error, RateLimitError):
|
|
204
|
+
if method not in _RETRYABLE_METHODS:
|
|
205
|
+
return _RetryOutcome(action="raise")
|
|
206
|
+
if attempt >= max_retries:
|
|
207
|
+
return _RetryOutcome(action="break", last_error=error)
|
|
208
|
+
wait_time = (
|
|
209
|
+
float(error.retry_after)
|
|
210
|
+
if error.retry_after is not None
|
|
211
|
+
else _compute_backoff_seconds(attempt, base=retry_delay)
|
|
212
|
+
)
|
|
213
|
+
return _RetryOutcome(
|
|
214
|
+
action="sleep",
|
|
215
|
+
wait_time=wait_time,
|
|
216
|
+
last_error=error,
|
|
217
|
+
log_message=f"Rate limited, waiting {wait_time}s (attempt {attempt + 1})",
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
if isinstance(error, AffinityError):
|
|
221
|
+
status = error.status_code
|
|
222
|
+
if method not in _RETRYABLE_METHODS or status is None or status < 500 or status >= 600:
|
|
223
|
+
return _RetryOutcome(action="raise")
|
|
224
|
+
if attempt >= max_retries:
|
|
225
|
+
return _RetryOutcome(action="break", last_error=error)
|
|
226
|
+
wait_time = _compute_backoff_seconds(attempt, base=retry_delay)
|
|
227
|
+
return _RetryOutcome(
|
|
228
|
+
action="sleep",
|
|
229
|
+
wait_time=wait_time,
|
|
230
|
+
last_error=error,
|
|
231
|
+
log_message=f"Server error {status}, waiting {wait_time}s (attempt {attempt + 1})",
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
if isinstance(error, httpx.TimeoutException):
|
|
235
|
+
if method not in _RETRYABLE_METHODS:
|
|
236
|
+
return _RetryOutcome(
|
|
237
|
+
action="raise_wrapped",
|
|
238
|
+
wrapped_error=TimeoutError(f"Request timed out: {error}"),
|
|
239
|
+
)
|
|
240
|
+
if attempt >= max_retries:
|
|
241
|
+
timeout_error = TimeoutError(f"Request timed out: {error}")
|
|
242
|
+
timeout_error.__cause__ = error
|
|
243
|
+
return _RetryOutcome(action="break", last_error=timeout_error)
|
|
244
|
+
wait_time = _compute_backoff_seconds(attempt, base=retry_delay)
|
|
245
|
+
return _RetryOutcome(action="sleep", wait_time=wait_time, last_error=error)
|
|
246
|
+
|
|
247
|
+
if isinstance(error, httpx.NetworkError):
|
|
248
|
+
if method not in _RETRYABLE_METHODS:
|
|
249
|
+
return _RetryOutcome(
|
|
250
|
+
action="raise_wrapped",
|
|
251
|
+
wrapped_error=NetworkError(f"Network error: {error}"),
|
|
252
|
+
)
|
|
253
|
+
if attempt >= max_retries:
|
|
254
|
+
network_error = NetworkError(f"Network error: {error}")
|
|
255
|
+
network_error.__cause__ = error
|
|
256
|
+
return _RetryOutcome(action="break", last_error=network_error)
|
|
257
|
+
wait_time = _compute_backoff_seconds(attempt, base=retry_delay)
|
|
258
|
+
return _RetryOutcome(action="sleep", wait_time=wait_time, last_error=error)
|
|
259
|
+
|
|
260
|
+
return _RetryOutcome(action="raise")
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def _parse_retry_after(value: str) -> int | None:
|
|
264
|
+
"""
|
|
265
|
+
Parse `Retry-After` header per RFC7231.
|
|
266
|
+
|
|
267
|
+
Supports:
|
|
268
|
+
- delta-seconds (e.g. "60")
|
|
269
|
+
- HTTP-date (e.g. "Wed, 21 Oct 2015 07:28:00 GMT")
|
|
270
|
+
"""
|
|
271
|
+
candidate = value.strip()
|
|
272
|
+
if not candidate:
|
|
273
|
+
return None
|
|
274
|
+
|
|
275
|
+
if candidate.isdigit():
|
|
276
|
+
return int(candidate)
|
|
277
|
+
|
|
278
|
+
try:
|
|
279
|
+
parsed = email.utils.parsedate_to_datetime(candidate)
|
|
280
|
+
except (TypeError, ValueError):
|
|
281
|
+
return None
|
|
282
|
+
|
|
283
|
+
if parsed.tzinfo is None:
|
|
284
|
+
parsed = parsed.replace(tzinfo=timezone.utc)
|
|
285
|
+
|
|
286
|
+
now = datetime.now(timezone.utc)
|
|
287
|
+
delta = (parsed.astimezone(timezone.utc) - now).total_seconds()
|
|
288
|
+
return max(0, math.ceil(delta))
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def _default_port(scheme: str) -> int | None:
|
|
292
|
+
if scheme == "http":
|
|
293
|
+
return 80
|
|
294
|
+
if scheme == "https":
|
|
295
|
+
return 443
|
|
296
|
+
return None
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def _host_port(url: str) -> tuple[str, int | None]:
|
|
300
|
+
parts = urlsplit(url)
|
|
301
|
+
host = parts.hostname or ""
|
|
302
|
+
port = parts.port
|
|
303
|
+
if port is None:
|
|
304
|
+
port = _default_port(parts.scheme)
|
|
305
|
+
return (host, port)
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def _safe_follow_url(
|
|
309
|
+
url: str,
|
|
310
|
+
*,
|
|
311
|
+
v1_base_url: str,
|
|
312
|
+
v2_base_url: str,
|
|
313
|
+
) -> tuple[str, bool]:
|
|
314
|
+
"""
|
|
315
|
+
Validate and normalize a server-provided URL under SafeFollowUrl policy.
|
|
316
|
+
|
|
317
|
+
Returns: (absolute_url_without_fragment, is_v1)
|
|
318
|
+
"""
|
|
319
|
+
v1_base = v1_base_url.rstrip("/") + "/"
|
|
320
|
+
v2_base = v2_base_url.rstrip("/") + "/"
|
|
321
|
+
|
|
322
|
+
parsed = urlsplit(url)
|
|
323
|
+
# Relative URL: resolve against v2 base by default (v2 pagination/task URLs)
|
|
324
|
+
absolute = urljoin(v2_base, url) if not parsed.scheme and not parsed.netloc else url
|
|
325
|
+
|
|
326
|
+
parts = urlsplit(absolute)
|
|
327
|
+
if parts.username is not None or parts.password is not None:
|
|
328
|
+
raise UnsafeUrlError("Refusing URL with userinfo", url=absolute)
|
|
329
|
+
|
|
330
|
+
# Strip fragments (never sent, never used for routing decisions)
|
|
331
|
+
absolute = urlunsplit(parts._replace(fragment=""))
|
|
332
|
+
parts = urlsplit(absolute)
|
|
333
|
+
|
|
334
|
+
v2_parts = urlsplit(v2_base)
|
|
335
|
+
v1_parts = urlsplit(v1_base)
|
|
336
|
+
|
|
337
|
+
v2_host, v2_port = _host_port(v2_base)
|
|
338
|
+
v1_host, v1_port = _host_port(v1_base)
|
|
339
|
+
host, port = _host_port(absolute)
|
|
340
|
+
|
|
341
|
+
if host not in {v1_host, v2_host} or port not in {v1_port, v2_port}:
|
|
342
|
+
raise UnsafeUrlError("Refusing URL with unexpected host", url=absolute)
|
|
343
|
+
|
|
344
|
+
v2_prefix = (v2_parts.path or "").rstrip("/") + "/"
|
|
345
|
+
is_v1 = not (parts.path or "").startswith(v2_prefix)
|
|
346
|
+
expected_scheme = v1_parts.scheme if is_v1 else v2_parts.scheme
|
|
347
|
+
if parts.scheme != expected_scheme:
|
|
348
|
+
raise UnsafeUrlError("Refusing URL with unexpected scheme", url=absolute)
|
|
349
|
+
|
|
350
|
+
return absolute, is_v1
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
def _redact_url(url: str, api_key: str) -> str:
|
|
354
|
+
parts = urlsplit(url)
|
|
355
|
+
query_params = []
|
|
356
|
+
if parts.query:
|
|
357
|
+
for pair in parts.query.split("&"):
|
|
358
|
+
if "=" in pair:
|
|
359
|
+
key, _value = pair.split("=", 1)
|
|
360
|
+
lowered = key.lower()
|
|
361
|
+
if any(token in lowered for token in ("key", "token", "authorization")):
|
|
362
|
+
query_params.append(f"{key}=[REDACTED]")
|
|
363
|
+
else:
|
|
364
|
+
query_params.append(pair)
|
|
365
|
+
else:
|
|
366
|
+
query_params.append(pair)
|
|
367
|
+
redacted = urlunsplit(
|
|
368
|
+
parts._replace(
|
|
369
|
+
netloc=parts.hostname or parts.netloc,
|
|
370
|
+
query="&".join(query_params),
|
|
371
|
+
)
|
|
372
|
+
)
|
|
373
|
+
return redacted.replace(api_key, "[REDACTED]")
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def _select_response_headers(headers: Mapping[str, str]) -> dict[str, str]:
|
|
377
|
+
allow = [
|
|
378
|
+
"Retry-After",
|
|
379
|
+
"Date",
|
|
380
|
+
"X-Ratelimit-Limit-User",
|
|
381
|
+
"X-Ratelimit-Limit-User-Remaining",
|
|
382
|
+
"X-Ratelimit-Limit-User-Reset",
|
|
383
|
+
"X-Ratelimit-Limit-Org",
|
|
384
|
+
"X-Ratelimit-Limit-Org-Remaining",
|
|
385
|
+
"X-Ratelimit-Limit-Org-Reset",
|
|
386
|
+
"X-Request-Id",
|
|
387
|
+
"Request-Id",
|
|
388
|
+
]
|
|
389
|
+
selected: dict[str, str] = {}
|
|
390
|
+
for name in allow:
|
|
391
|
+
value = headers.get(name) or headers.get(name.lower())
|
|
392
|
+
if value is not None:
|
|
393
|
+
selected[name] = value
|
|
394
|
+
return selected
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def _extract_request_id(headers: dict[str, str]) -> str | None:
|
|
398
|
+
for key in ("X-Request-Id", "Request-Id"):
|
|
399
|
+
if key in headers:
|
|
400
|
+
return headers[key]
|
|
401
|
+
return None
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def _diagnostic_request_params(
|
|
405
|
+
params: Sequence[tuple[str, str]] | None,
|
|
406
|
+
) -> dict[str, Any] | None:
|
|
407
|
+
if not params:
|
|
408
|
+
return None
|
|
409
|
+
out: dict[str, Any] = {}
|
|
410
|
+
for k, v in params:
|
|
411
|
+
if k in out:
|
|
412
|
+
existing = out[k]
|
|
413
|
+
if isinstance(existing, list):
|
|
414
|
+
existing.append(v)
|
|
415
|
+
else:
|
|
416
|
+
out[k] = [existing, v]
|
|
417
|
+
else:
|
|
418
|
+
out[k] = v
|
|
419
|
+
return out or None
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
def _redact_external_url(url: str) -> str:
|
|
423
|
+
"""
|
|
424
|
+
Redact external URLs for logs/diagnostics.
|
|
425
|
+
|
|
426
|
+
External download URLs are often signed; stripping the query avoids leaking tokens.
|
|
427
|
+
"""
|
|
428
|
+
parts = urlsplit(url)
|
|
429
|
+
return urlunsplit((parts.scheme, parts.netloc, parts.path, "", ""))
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
_URL_IN_TEXT_RE = re.compile(r"https?://[^\s\"']+")
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
def _safe_body_preview(content: bytes, *, api_key: str, external: bool) -> str:
|
|
436
|
+
"""
|
|
437
|
+
Produce a small, safe body snippet for diagnostics/logging.
|
|
438
|
+
|
|
439
|
+
- Internal: redact known-sensitive query parameters + API key.
|
|
440
|
+
- External: scrub any URLs to remove query/fragment (signed URLs often embed secrets there).
|
|
441
|
+
"""
|
|
442
|
+
text = content[:512].decode("utf-8", errors="replace")
|
|
443
|
+
if not external:
|
|
444
|
+
return _redact_url(text, api_key)[:512]
|
|
445
|
+
|
|
446
|
+
def _scrub(match: re.Match[str]) -> str:
|
|
447
|
+
try:
|
|
448
|
+
return _redact_external_url(match.group(0))
|
|
449
|
+
except Exception:
|
|
450
|
+
return "[REDACTED_URL]"
|
|
451
|
+
|
|
452
|
+
return _URL_IN_TEXT_RE.sub(_scrub, text)[:512]
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def _sanitize_hook_url(
|
|
456
|
+
url: str,
|
|
457
|
+
*,
|
|
458
|
+
api_key: str,
|
|
459
|
+
external: bool,
|
|
460
|
+
external_hook_policy: ExternalHookPolicy,
|
|
461
|
+
) -> str | None:
|
|
462
|
+
if not external:
|
|
463
|
+
return _redact_url(url, api_key)
|
|
464
|
+
if external_hook_policy is ExternalHookPolicy.SUPPRESS:
|
|
465
|
+
return None
|
|
466
|
+
if external_hook_policy is ExternalHookPolicy.EMIT_UNSAFE:
|
|
467
|
+
return url
|
|
468
|
+
return _redact_external_url(url)
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
def _sanitize_hook_headers(headers: Sequence[tuple[str, str]]) -> dict[str, str]:
|
|
472
|
+
sanitized: dict[str, str] = {}
|
|
473
|
+
for key, value in headers:
|
|
474
|
+
if key.lower() == "authorization":
|
|
475
|
+
continue
|
|
476
|
+
sanitized[key] = value
|
|
477
|
+
return sanitized
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
_CREDENTIAL_HEADER_NAMES: frozenset[str] = frozenset(
|
|
481
|
+
{
|
|
482
|
+
"authorization",
|
|
483
|
+
"proxy-authorization",
|
|
484
|
+
"cookie",
|
|
485
|
+
"set-cookie",
|
|
486
|
+
"x-api-key",
|
|
487
|
+
}
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
def _strip_credential_headers(headers: Sequence[tuple[str, str]]) -> list[tuple[str, str]]:
|
|
492
|
+
return [(k, v) for (k, v) in headers if k.lower() not in _CREDENTIAL_HEADER_NAMES]
|
|
493
|
+
|
|
494
|
+
|
|
495
|
+
def _extract_bytes_total(headers: Sequence[tuple[str, str]]) -> int | None:
|
|
496
|
+
"""
|
|
497
|
+
Extract a safe `bytes_total` from headers.
|
|
498
|
+
|
|
499
|
+
Rules:
|
|
500
|
+
- If Transfer-Encoding: chunked is present => unknown
|
|
501
|
+
- If Content-Encoding is present => unknown (httpx may decode bytes)
|
|
502
|
+
- If Content-Length is missing/invalid/multiple conflicting => unknown
|
|
503
|
+
"""
|
|
504
|
+
transfer_encodings: list[str] = []
|
|
505
|
+
content_encodings: list[str] = []
|
|
506
|
+
content_lengths: list[str] = []
|
|
507
|
+
|
|
508
|
+
for key, value in headers:
|
|
509
|
+
lowered = key.lower()
|
|
510
|
+
if lowered == "transfer-encoding":
|
|
511
|
+
transfer_encodings.append(value)
|
|
512
|
+
elif lowered == "content-encoding":
|
|
513
|
+
content_encodings.append(value)
|
|
514
|
+
elif lowered == "content-length":
|
|
515
|
+
content_lengths.append(value)
|
|
516
|
+
|
|
517
|
+
if any("chunked" in v.lower() for v in transfer_encodings):
|
|
518
|
+
return None
|
|
519
|
+
if content_encodings:
|
|
520
|
+
return None
|
|
521
|
+
if not content_lengths:
|
|
522
|
+
return None
|
|
523
|
+
|
|
524
|
+
parsed: set[int] = set()
|
|
525
|
+
for raw in content_lengths:
|
|
526
|
+
raw = raw.strip()
|
|
527
|
+
if not raw.isdigit():
|
|
528
|
+
return None
|
|
529
|
+
parsed.add(int(raw))
|
|
530
|
+
if len(parsed) != 1:
|
|
531
|
+
return None
|
|
532
|
+
return next(iter(parsed))
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
class _HTTPXSyncStream:
|
|
536
|
+
def __init__(
|
|
537
|
+
self,
|
|
538
|
+
*,
|
|
539
|
+
context_manager: Any,
|
|
540
|
+
response: httpx.Response,
|
|
541
|
+
headers: list[tuple[str, str]],
|
|
542
|
+
request_info: RequestInfo | None,
|
|
543
|
+
client_request_id: str,
|
|
544
|
+
external: bool,
|
|
545
|
+
started_at: float,
|
|
546
|
+
deadline_seconds: float | None,
|
|
547
|
+
on_progress: ProgressCallback | None,
|
|
548
|
+
emit_event: Callable[[HookEvent], Any] | None,
|
|
549
|
+
):
|
|
550
|
+
self._cm = context_manager
|
|
551
|
+
self._resp = response
|
|
552
|
+
self._headers = headers
|
|
553
|
+
self._bytes_total = _extract_bytes_total(headers)
|
|
554
|
+
self._request_info = request_info
|
|
555
|
+
self._client_request_id = client_request_id
|
|
556
|
+
self._external = external
|
|
557
|
+
self._started_at = started_at
|
|
558
|
+
self._deadline_at = started_at + deadline_seconds if deadline_seconds is not None else None
|
|
559
|
+
self._on_progress = on_progress
|
|
560
|
+
self._emit_event = emit_event
|
|
561
|
+
self._closed = False
|
|
562
|
+
self._iterated = False
|
|
563
|
+
|
|
564
|
+
def __enter__(self) -> _HTTPXSyncStream:
|
|
565
|
+
return self
|
|
566
|
+
|
|
567
|
+
def __exit__(self, exc_type: object, exc: object, tb: object) -> None:
|
|
568
|
+
self.close()
|
|
569
|
+
|
|
570
|
+
def close(self) -> None:
|
|
571
|
+
if self._closed:
|
|
572
|
+
return
|
|
573
|
+
self._closed = True
|
|
574
|
+
try:
|
|
575
|
+
self._cm.__exit__(None, None, None)
|
|
576
|
+
except Exception:
|
|
577
|
+
self._resp.close()
|
|
578
|
+
|
|
579
|
+
def iter_bytes(self, *, chunk_size: int) -> Iterator[bytes]:
|
|
580
|
+
self._iterated = True
|
|
581
|
+
bytes_read = 0
|
|
582
|
+
completed = False
|
|
583
|
+
aborted_reason: str | None = None
|
|
584
|
+
raised: BaseException | None = None
|
|
585
|
+
|
|
586
|
+
def check_deadline() -> None:
|
|
587
|
+
if self._deadline_at is None:
|
|
588
|
+
return
|
|
589
|
+
if time.monotonic() >= self._deadline_at:
|
|
590
|
+
raise TimeoutError("Download deadline exceeded")
|
|
591
|
+
|
|
592
|
+
if self._on_progress:
|
|
593
|
+
self._on_progress(0, self._bytes_total, phase="download")
|
|
594
|
+
|
|
595
|
+
try:
|
|
596
|
+
for chunk in self._resp.iter_bytes(chunk_size=chunk_size):
|
|
597
|
+
check_deadline()
|
|
598
|
+
bytes_read += len(chunk)
|
|
599
|
+
if self._on_progress:
|
|
600
|
+
self._on_progress(bytes_read, self._bytes_total, phase="download")
|
|
601
|
+
yield chunk
|
|
602
|
+
completed = True
|
|
603
|
+
except GeneratorExit:
|
|
604
|
+
aborted_reason = "closed"
|
|
605
|
+
raise
|
|
606
|
+
except KeyboardInterrupt:
|
|
607
|
+
aborted_reason = "keyboard_interrupt"
|
|
608
|
+
raise
|
|
609
|
+
except BaseException as exc:
|
|
610
|
+
if isinstance(exc, httpx.TimeoutException):
|
|
611
|
+
raised = TimeoutError(f"Request timed out: {exc}")
|
|
612
|
+
elif isinstance(exc, httpx.NetworkError):
|
|
613
|
+
raised = NetworkError(f"Network error: {exc}")
|
|
614
|
+
else:
|
|
615
|
+
raised = exc
|
|
616
|
+
raise raised from exc
|
|
617
|
+
finally:
|
|
618
|
+
elapsed_ms = (time.monotonic() - self._started_at) * 1000
|
|
619
|
+
if self._emit_event is not None and self._request_info is not None:
|
|
620
|
+
if completed:
|
|
621
|
+
self._emit_event(
|
|
622
|
+
StreamCompleted(
|
|
623
|
+
client_request_id=self._client_request_id,
|
|
624
|
+
request=self._request_info,
|
|
625
|
+
bytes_read=bytes_read,
|
|
626
|
+
bytes_total=self._bytes_total,
|
|
627
|
+
elapsed_ms=elapsed_ms,
|
|
628
|
+
external=self._external,
|
|
629
|
+
)
|
|
630
|
+
)
|
|
631
|
+
elif aborted_reason is not None:
|
|
632
|
+
self._emit_event(
|
|
633
|
+
StreamAborted(
|
|
634
|
+
client_request_id=self._client_request_id,
|
|
635
|
+
request=self._request_info,
|
|
636
|
+
reason=aborted_reason,
|
|
637
|
+
bytes_read=bytes_read,
|
|
638
|
+
bytes_total=self._bytes_total,
|
|
639
|
+
elapsed_ms=elapsed_ms,
|
|
640
|
+
external=self._external,
|
|
641
|
+
)
|
|
642
|
+
)
|
|
643
|
+
elif raised is not None:
|
|
644
|
+
self._emit_event(
|
|
645
|
+
StreamFailed(
|
|
646
|
+
client_request_id=self._client_request_id,
|
|
647
|
+
request=self._request_info,
|
|
648
|
+
error=raised,
|
|
649
|
+
bytes_read=bytes_read,
|
|
650
|
+
bytes_total=self._bytes_total,
|
|
651
|
+
elapsed_ms=elapsed_ms,
|
|
652
|
+
external=self._external,
|
|
653
|
+
)
|
|
654
|
+
)
|
|
655
|
+
self.close()
|
|
656
|
+
|
|
657
|
+
|
|
658
|
+
class _HTTPXAsyncStream:
|
|
659
|
+
def __init__(
|
|
660
|
+
self,
|
|
661
|
+
*,
|
|
662
|
+
context_manager: Any,
|
|
663
|
+
response: httpx.Response,
|
|
664
|
+
headers: list[tuple[str, str]],
|
|
665
|
+
request_info: RequestInfo | None,
|
|
666
|
+
client_request_id: str,
|
|
667
|
+
external: bool,
|
|
668
|
+
started_at: float,
|
|
669
|
+
deadline_seconds: float | None,
|
|
670
|
+
on_progress: ProgressCallback | None,
|
|
671
|
+
emit_event: Callable[[HookEvent], Any] | None,
|
|
672
|
+
):
|
|
673
|
+
self._cm = context_manager
|
|
674
|
+
self._resp = response
|
|
675
|
+
self._headers = headers
|
|
676
|
+
self._bytes_total = _extract_bytes_total(headers)
|
|
677
|
+
self._request_info = request_info
|
|
678
|
+
self._client_request_id = client_request_id
|
|
679
|
+
self._external = external
|
|
680
|
+
self._started_at = started_at
|
|
681
|
+
self._deadline_at = started_at + deadline_seconds if deadline_seconds is not None else None
|
|
682
|
+
self._on_progress = on_progress
|
|
683
|
+
self._emit_event = emit_event
|
|
684
|
+
self._closed = False
|
|
685
|
+
self._iterated = False
|
|
686
|
+
|
|
687
|
+
async def __aenter__(self) -> _HTTPXAsyncStream:
|
|
688
|
+
return self
|
|
689
|
+
|
|
690
|
+
async def __aexit__(self, exc_type: object, exc: object, tb: object) -> None:
|
|
691
|
+
await self.aclose()
|
|
692
|
+
|
|
693
|
+
async def aclose(self) -> None:
|
|
694
|
+
if self._closed:
|
|
695
|
+
return
|
|
696
|
+
self._closed = True
|
|
697
|
+
await self._cm.__aexit__(None, None, None)
|
|
698
|
+
|
|
699
|
+
def aiter_bytes(self, *, chunk_size: int) -> AsyncIterator[bytes]:
|
|
700
|
+
async def _gen() -> AsyncIterator[bytes]:
|
|
701
|
+
self._iterated = True
|
|
702
|
+
bytes_read = 0
|
|
703
|
+
completed = False
|
|
704
|
+
aborted_reason: str | None = None
|
|
705
|
+
raised: BaseException | None = None
|
|
706
|
+
|
|
707
|
+
def check_deadline() -> None:
|
|
708
|
+
if self._deadline_at is None:
|
|
709
|
+
return
|
|
710
|
+
if time.monotonic() >= self._deadline_at:
|
|
711
|
+
raise TimeoutError("Download deadline exceeded")
|
|
712
|
+
|
|
713
|
+
if self._on_progress:
|
|
714
|
+
self._on_progress(0, self._bytes_total, phase="download")
|
|
715
|
+
|
|
716
|
+
try:
|
|
717
|
+
async for chunk in self._resp.aiter_bytes(chunk_size=chunk_size):
|
|
718
|
+
check_deadline()
|
|
719
|
+
bytes_read += len(chunk)
|
|
720
|
+
if self._on_progress:
|
|
721
|
+
self._on_progress(bytes_read, self._bytes_total, phase="download")
|
|
722
|
+
yield chunk
|
|
723
|
+
completed = True
|
|
724
|
+
except asyncio.CancelledError:
|
|
725
|
+
aborted_reason = "cancelled"
|
|
726
|
+
raise
|
|
727
|
+
except KeyboardInterrupt:
|
|
728
|
+
aborted_reason = "keyboard_interrupt"
|
|
729
|
+
raise
|
|
730
|
+
except BaseException as exc:
|
|
731
|
+
if isinstance(exc, httpx.TimeoutException):
|
|
732
|
+
raised = TimeoutError(f"Request timed out: {exc}")
|
|
733
|
+
elif isinstance(exc, httpx.NetworkError):
|
|
734
|
+
raised = NetworkError(f"Network error: {exc}")
|
|
735
|
+
else:
|
|
736
|
+
raised = exc
|
|
737
|
+
raise raised from exc
|
|
738
|
+
finally:
|
|
739
|
+
elapsed_ms = (time.monotonic() - self._started_at) * 1000
|
|
740
|
+
if self._emit_event is not None and self._request_info is not None:
|
|
741
|
+
if completed:
|
|
742
|
+
maybe = self._emit_event(
|
|
743
|
+
StreamCompleted(
|
|
744
|
+
client_request_id=self._client_request_id,
|
|
745
|
+
request=self._request_info,
|
|
746
|
+
bytes_read=bytes_read,
|
|
747
|
+
bytes_total=self._bytes_total,
|
|
748
|
+
elapsed_ms=elapsed_ms,
|
|
749
|
+
external=self._external,
|
|
750
|
+
)
|
|
751
|
+
)
|
|
752
|
+
if inspect.isawaitable(maybe):
|
|
753
|
+
await cast(Awaitable[None], maybe)
|
|
754
|
+
elif aborted_reason is not None:
|
|
755
|
+
maybe = self._emit_event(
|
|
756
|
+
StreamAborted(
|
|
757
|
+
client_request_id=self._client_request_id,
|
|
758
|
+
request=self._request_info,
|
|
759
|
+
reason=aborted_reason,
|
|
760
|
+
bytes_read=bytes_read,
|
|
761
|
+
bytes_total=self._bytes_total,
|
|
762
|
+
elapsed_ms=elapsed_ms,
|
|
763
|
+
external=self._external,
|
|
764
|
+
)
|
|
765
|
+
)
|
|
766
|
+
if inspect.isawaitable(maybe):
|
|
767
|
+
await cast(Awaitable[None], maybe)
|
|
768
|
+
elif raised is not None:
|
|
769
|
+
maybe = self._emit_event(
|
|
770
|
+
StreamFailed(
|
|
771
|
+
client_request_id=self._client_request_id,
|
|
772
|
+
request=self._request_info,
|
|
773
|
+
error=raised,
|
|
774
|
+
bytes_read=bytes_read,
|
|
775
|
+
bytes_total=self._bytes_total,
|
|
776
|
+
elapsed_ms=elapsed_ms,
|
|
777
|
+
external=self._external,
|
|
778
|
+
)
|
|
779
|
+
)
|
|
780
|
+
if inspect.isawaitable(maybe):
|
|
781
|
+
await cast(Awaitable[None], maybe)
|
|
782
|
+
await asyncio.shield(self.aclose())
|
|
783
|
+
|
|
784
|
+
return _gen()
|
|
785
|
+
|
|
786
|
+
|
|
787
|
+
# =============================================================================
|
|
788
|
+
# Rate Limit Tracking
|
|
789
|
+
# =============================================================================
|
|
790
|
+
|
|
791
|
+
|
|
792
|
+
@dataclass
|
|
793
|
+
class RateLimitState:
|
|
794
|
+
"""Tracks rate limit status from response headers."""
|
|
795
|
+
|
|
796
|
+
user_limit: int | None = None
|
|
797
|
+
user_remaining: int | None = None
|
|
798
|
+
user_reset_seconds: int | None = None
|
|
799
|
+
org_limit: int | None = None
|
|
800
|
+
org_remaining: int | None = None
|
|
801
|
+
org_reset_seconds: int | None = None
|
|
802
|
+
last_updated: float | None = None
|
|
803
|
+
last_request_id: str | None = None
|
|
804
|
+
_lock: threading.Lock = field(default_factory=threading.Lock, repr=False, compare=False)
|
|
805
|
+
|
|
806
|
+
def update_from_headers(self, headers: Mapping[str, str]) -> None:
|
|
807
|
+
"""Update state from response headers."""
|
|
808
|
+
|
|
809
|
+
# Handle both uppercase (current) and lowercase (future) headers.
|
|
810
|
+
def get_int(name: str) -> int | None:
|
|
811
|
+
value = headers.get(name) or headers.get(name.lower())
|
|
812
|
+
return int(value) if value else None
|
|
813
|
+
|
|
814
|
+
request_id = _extract_request_id(_select_response_headers(headers))
|
|
815
|
+
observed_any = False
|
|
816
|
+
|
|
817
|
+
user_limit = get_int("X-Ratelimit-Limit-User")
|
|
818
|
+
user_remaining = get_int("X-Ratelimit-Limit-User-Remaining")
|
|
819
|
+
user_reset = get_int("X-Ratelimit-Limit-User-Reset")
|
|
820
|
+
org_limit = get_int("X-Ratelimit-Limit-Org")
|
|
821
|
+
org_remaining = get_int("X-Ratelimit-Limit-Org-Remaining")
|
|
822
|
+
org_reset = get_int("X-Ratelimit-Limit-Org-Reset")
|
|
823
|
+
|
|
824
|
+
with self._lock:
|
|
825
|
+
if request_id is not None:
|
|
826
|
+
self.last_request_id = request_id
|
|
827
|
+
|
|
828
|
+
# Only update timestamps when we actually observed rate limit headers.
|
|
829
|
+
for v in (user_limit, user_remaining, user_reset, org_limit, org_remaining, org_reset):
|
|
830
|
+
if v is not None:
|
|
831
|
+
observed_any = True
|
|
832
|
+
break
|
|
833
|
+
|
|
834
|
+
if not observed_any:
|
|
835
|
+
return
|
|
836
|
+
|
|
837
|
+
self.last_updated = time.time()
|
|
838
|
+
|
|
839
|
+
if user_limit is not None:
|
|
840
|
+
self.user_limit = user_limit
|
|
841
|
+
if user_remaining is not None:
|
|
842
|
+
self.user_remaining = user_remaining
|
|
843
|
+
if user_reset is not None:
|
|
844
|
+
self.user_reset_seconds = user_reset
|
|
845
|
+
if org_limit is not None:
|
|
846
|
+
self.org_limit = org_limit
|
|
847
|
+
if org_remaining is not None:
|
|
848
|
+
self.org_remaining = org_remaining
|
|
849
|
+
if org_reset is not None:
|
|
850
|
+
self.org_reset_seconds = org_reset
|
|
851
|
+
|
|
852
|
+
@property
|
|
853
|
+
def should_throttle(self) -> bool:
|
|
854
|
+
"""Whether we should slow down requests."""
|
|
855
|
+
return (self.user_remaining is not None and self.user_remaining < 50) or (
|
|
856
|
+
self.org_remaining is not None and self.org_remaining < 1000
|
|
857
|
+
)
|
|
858
|
+
|
|
859
|
+
@property
|
|
860
|
+
def seconds_until_user_reset(self) -> float:
|
|
861
|
+
"""Seconds until per-minute limit resets."""
|
|
862
|
+
if self.user_reset_seconds is None or self.last_updated is None:
|
|
863
|
+
return 0.0
|
|
864
|
+
elapsed = time.time() - self.last_updated
|
|
865
|
+
return max(0.0, float(self.user_reset_seconds) - elapsed)
|
|
866
|
+
|
|
867
|
+
def snapshot(self) -> dict[str, Any]:
|
|
868
|
+
"""Return a coherent snapshot of the tracked state."""
|
|
869
|
+
with self._lock:
|
|
870
|
+
return {
|
|
871
|
+
"user_limit": self.user_limit,
|
|
872
|
+
"user_remaining": self.user_remaining,
|
|
873
|
+
"user_reset_seconds": self.user_reset_seconds,
|
|
874
|
+
"org_limit": self.org_limit,
|
|
875
|
+
"org_remaining": self.org_remaining,
|
|
876
|
+
"org_reset_seconds": self.org_reset_seconds,
|
|
877
|
+
"last_updated": self.last_updated,
|
|
878
|
+
"last_request_id": self.last_request_id,
|
|
879
|
+
}
|
|
880
|
+
|
|
881
|
+
|
|
882
|
+
class _RateLimitGateSync:
|
|
883
|
+
def __init__(self) -> None:
|
|
884
|
+
self._blocked_until: float = 0.0
|
|
885
|
+
self._lock = threading.Lock()
|
|
886
|
+
|
|
887
|
+
def note(self, wait_seconds: float) -> None:
|
|
888
|
+
if wait_seconds <= 0:
|
|
889
|
+
return
|
|
890
|
+
now = time.monotonic()
|
|
891
|
+
with self._lock:
|
|
892
|
+
self._blocked_until = max(self._blocked_until, now + wait_seconds)
|
|
893
|
+
|
|
894
|
+
def delay(self) -> float:
|
|
895
|
+
now = time.monotonic()
|
|
896
|
+
with self._lock:
|
|
897
|
+
return max(0.0, self._blocked_until - now)
|
|
898
|
+
|
|
899
|
+
|
|
900
|
+
class _RateLimitGateAsync:
|
|
901
|
+
def __init__(self) -> None:
|
|
902
|
+
self._blocked_until: float = 0.0
|
|
903
|
+
self._lock = asyncio.Lock()
|
|
904
|
+
|
|
905
|
+
async def note(self, wait_seconds: float) -> None:
|
|
906
|
+
if wait_seconds <= 0:
|
|
907
|
+
return
|
|
908
|
+
now = time.monotonic()
|
|
909
|
+
async with self._lock:
|
|
910
|
+
self._blocked_until = max(self._blocked_until, now + wait_seconds)
|
|
911
|
+
|
|
912
|
+
async def delay(self) -> float:
|
|
913
|
+
now = time.monotonic()
|
|
914
|
+
async with self._lock:
|
|
915
|
+
return max(0.0, self._blocked_until - now)
|
|
916
|
+
|
|
917
|
+
|
|
918
|
+
# =============================================================================
|
|
919
|
+
# Simple TTL Cache
|
|
920
|
+
# =============================================================================
|
|
921
|
+
|
|
922
|
+
|
|
923
|
+
@dataclass
|
|
924
|
+
class CacheEntry:
|
|
925
|
+
"""Single cache entry with TTL."""
|
|
926
|
+
|
|
927
|
+
value: dict[str, Any]
|
|
928
|
+
expires_at: float
|
|
929
|
+
|
|
930
|
+
|
|
931
|
+
class SimpleCache:
|
|
932
|
+
"""
|
|
933
|
+
Simple in-memory cache with TTL.
|
|
934
|
+
|
|
935
|
+
Used for caching field metadata and other rarely-changing data.
|
|
936
|
+
"""
|
|
937
|
+
|
|
938
|
+
def __init__(self, default_ttl: float = 300.0):
|
|
939
|
+
self._cache: dict[str, CacheEntry] = {}
|
|
940
|
+
self._default_ttl = default_ttl
|
|
941
|
+
|
|
942
|
+
def get(self, key: str) -> dict[str, Any] | None:
|
|
943
|
+
"""Get value if not expired."""
|
|
944
|
+
entry = self._cache.get(key)
|
|
945
|
+
if entry is None:
|
|
946
|
+
return None
|
|
947
|
+
if time.time() > entry.expires_at:
|
|
948
|
+
del self._cache[key]
|
|
949
|
+
return None
|
|
950
|
+
return entry.value
|
|
951
|
+
|
|
952
|
+
def set(self, key: str, value: dict[str, Any], ttl: float | None = None) -> None:
|
|
953
|
+
"""Set value with TTL."""
|
|
954
|
+
expires_at = time.time() + (ttl or self._default_ttl)
|
|
955
|
+
self._cache[key] = CacheEntry(value=value, expires_at=expires_at)
|
|
956
|
+
|
|
957
|
+
def delete(self, key: str) -> None:
|
|
958
|
+
"""Delete a cache entry."""
|
|
959
|
+
self._cache.pop(key, None)
|
|
960
|
+
|
|
961
|
+
def clear(self) -> None:
|
|
962
|
+
"""Clear all cache entries."""
|
|
963
|
+
self._cache.clear()
|
|
964
|
+
|
|
965
|
+
def invalidate_prefix(self, prefix: str) -> None:
|
|
966
|
+
"""Invalidate all entries with the given prefix."""
|
|
967
|
+
keys_to_delete = [k for k in self._cache if k.startswith(prefix)]
|
|
968
|
+
for key in keys_to_delete:
|
|
969
|
+
del self._cache[key]
|
|
970
|
+
|
|
971
|
+
|
|
972
|
+
# =============================================================================
|
|
973
|
+
# HTTP Client Configuration
|
|
974
|
+
# =============================================================================
|
|
975
|
+
|
|
976
|
+
|
|
977
|
+
@dataclass
|
|
978
|
+
class ClientConfig:
|
|
979
|
+
"""Configuration for the HTTP client."""
|
|
980
|
+
|
|
981
|
+
api_key: str
|
|
982
|
+
v1_base_url: str = V1_BASE_URL
|
|
983
|
+
v2_base_url: str = V2_BASE_URL
|
|
984
|
+
http2: bool = False
|
|
985
|
+
v1_auth_mode: Literal["bearer", "basic"] = "bearer"
|
|
986
|
+
timeout: httpx.Timeout | float = field(
|
|
987
|
+
default_factory=lambda: httpx.Timeout(
|
|
988
|
+
30.0,
|
|
989
|
+
connect=10.0,
|
|
990
|
+
read=30.0,
|
|
991
|
+
write=30.0,
|
|
992
|
+
pool=10.0,
|
|
993
|
+
)
|
|
994
|
+
)
|
|
995
|
+
limits: httpx.Limits = field(
|
|
996
|
+
default_factory=lambda: httpx.Limits(
|
|
997
|
+
max_connections=20,
|
|
998
|
+
max_keepalive_connections=10,
|
|
999
|
+
keepalive_expiry=30.0,
|
|
1000
|
+
)
|
|
1001
|
+
)
|
|
1002
|
+
transport: httpx.BaseTransport | None = None
|
|
1003
|
+
async_transport: httpx.AsyncBaseTransport | None = None
|
|
1004
|
+
max_retries: int = 3
|
|
1005
|
+
retry_delay: float = 1.0
|
|
1006
|
+
enable_cache: bool = False
|
|
1007
|
+
cache_ttl: float = 300.0
|
|
1008
|
+
log_requests: bool = False
|
|
1009
|
+
enable_beta_endpoints: bool = False
|
|
1010
|
+
# If True, allows following `http://` redirects when downloading files (not recommended).
|
|
1011
|
+
allow_insecure_download_redirects: bool = False
|
|
1012
|
+
# Request/response hooks (DX-008)
|
|
1013
|
+
on_request: RequestHook | None = None
|
|
1014
|
+
on_response: ResponseHook | None = None
|
|
1015
|
+
on_error: ErrorHook | None = None
|
|
1016
|
+
on_event: AnyEventHook | None = None
|
|
1017
|
+
hook_error_policy: Literal["swallow", "raise"] = "swallow"
|
|
1018
|
+
# TR-015: Expected v2 API version for diagnostics and safety checks
|
|
1019
|
+
expected_v2_version: str | None = None
|
|
1020
|
+
policies: Policies = field(default_factory=Policies)
|
|
1021
|
+
|
|
1022
|
+
def __post_init__(self) -> None:
|
|
1023
|
+
if isinstance(self.timeout, (int, float)):
|
|
1024
|
+
self.timeout = httpx.Timeout(float(self.timeout))
|
|
1025
|
+
|
|
1026
|
+
|
|
1027
|
+
def _cache_key_suffix(v1_base_url: str, v2_base_url: str, api_key: str) -> str:
|
|
1028
|
+
digest = hashlib.sha256(api_key.encode("utf-8")).hexdigest()
|
|
1029
|
+
return f"|v1={v1_base_url}|v2={v2_base_url}|tenant={digest}"
|
|
1030
|
+
|
|
1031
|
+
|
|
1032
|
+
# =============================================================================
|
|
1033
|
+
# Synchronous HTTP Client
|
|
1034
|
+
# =============================================================================
|
|
1035
|
+
|
|
1036
|
+
|
|
1037
|
+
class HTTPClient:
|
|
1038
|
+
"""
|
|
1039
|
+
Synchronous HTTP client for Affinity API.
|
|
1040
|
+
|
|
1041
|
+
Handles authentication, rate limiting, retries, and caching.
|
|
1042
|
+
"""
|
|
1043
|
+
|
|
1044
|
+
def __init__(self, config: ClientConfig):
|
|
1045
|
+
self._config = config
|
|
1046
|
+
self._rate_limit = RateLimitState()
|
|
1047
|
+
self._rate_limit_gate = _RateLimitGateSync()
|
|
1048
|
+
self._cache = SimpleCache(config.cache_ttl) if config.enable_cache else None
|
|
1049
|
+
self._cache_suffix = _cache_key_suffix(
|
|
1050
|
+
self._config.v1_base_url,
|
|
1051
|
+
self._config.v2_base_url,
|
|
1052
|
+
self._config.api_key,
|
|
1053
|
+
)
|
|
1054
|
+
|
|
1055
|
+
# Configure httpx client (auth is applied per-request)
|
|
1056
|
+
self._client = httpx.Client(
|
|
1057
|
+
http2=config.http2,
|
|
1058
|
+
timeout=config.timeout,
|
|
1059
|
+
limits=config.limits,
|
|
1060
|
+
transport=config.transport,
|
|
1061
|
+
headers=dict(_DEFAULT_HEADERS),
|
|
1062
|
+
)
|
|
1063
|
+
self._pipeline = self._build_pipeline()
|
|
1064
|
+
self._raw_buffered_pipeline = self._build_raw_buffered_pipeline()
|
|
1065
|
+
self._raw_stream_pipeline = self._build_raw_stream_pipeline()
|
|
1066
|
+
|
|
1067
|
+
def _request_id_middleware(
|
|
1068
|
+
self,
|
|
1069
|
+
) -> Middleware[SDKBaseResponse]:
|
|
1070
|
+
def middleware(
|
|
1071
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKBaseResponse]
|
|
1072
|
+
) -> SDKBaseResponse:
|
|
1073
|
+
context: RequestContext = cast(RequestContext, dict(req.context))
|
|
1074
|
+
client_request_id = context.get("client_request_id")
|
|
1075
|
+
if not isinstance(client_request_id, str) or not client_request_id:
|
|
1076
|
+
try:
|
|
1077
|
+
client_request_id = uuid.uuid4().hex
|
|
1078
|
+
except Exception:
|
|
1079
|
+
client_request_id = "unknown"
|
|
1080
|
+
context["client_request_id"] = client_request_id
|
|
1081
|
+
|
|
1082
|
+
headers = list(req.headers)
|
|
1083
|
+
if not any(k.lower() == "x-client-request-id" for (k, _v) in headers):
|
|
1084
|
+
headers.append(("X-Client-Request-Id", client_request_id))
|
|
1085
|
+
|
|
1086
|
+
return next(replace(req, headers=headers, context=context))
|
|
1087
|
+
|
|
1088
|
+
return middleware
|
|
1089
|
+
|
|
1090
|
+
def _hooks_middleware(
|
|
1091
|
+
self,
|
|
1092
|
+
) -> Middleware[SDKBaseResponse]:
|
|
1093
|
+
config = self._config
|
|
1094
|
+
|
|
1095
|
+
def middleware(
|
|
1096
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKBaseResponse]
|
|
1097
|
+
) -> SDKBaseResponse:
|
|
1098
|
+
context: RequestContext = cast(RequestContext, dict(req.context))
|
|
1099
|
+
started_at = context.get("started_at")
|
|
1100
|
+
if started_at is None:
|
|
1101
|
+
started_at = time.monotonic()
|
|
1102
|
+
context["started_at"] = started_at
|
|
1103
|
+
|
|
1104
|
+
client_request_id = context.get("client_request_id") or "unknown"
|
|
1105
|
+
|
|
1106
|
+
def emit_event(event: HookEvent) -> None:
|
|
1107
|
+
if config.on_event is None:
|
|
1108
|
+
return
|
|
1109
|
+
if (
|
|
1110
|
+
getattr(event, "external", False)
|
|
1111
|
+
and config.policies.external_hooks is ExternalHookPolicy.SUPPRESS
|
|
1112
|
+
):
|
|
1113
|
+
return
|
|
1114
|
+
try:
|
|
1115
|
+
result = config.on_event(event)
|
|
1116
|
+
if inspect.isawaitable(result):
|
|
1117
|
+
if inspect.iscoroutine(result):
|
|
1118
|
+
result.close()
|
|
1119
|
+
raise ConfigurationError(
|
|
1120
|
+
"Sync clients require a synchronous `on_event` handler"
|
|
1121
|
+
)
|
|
1122
|
+
except Exception:
|
|
1123
|
+
if config.hook_error_policy == "raise":
|
|
1124
|
+
raise
|
|
1125
|
+
logger.warning(
|
|
1126
|
+
"Hook error suppressed (hook_error_policy=swallow)", exc_info=True
|
|
1127
|
+
)
|
|
1128
|
+
|
|
1129
|
+
context["emit_event"] = emit_event
|
|
1130
|
+
|
|
1131
|
+
external = bool(context.get("external", False))
|
|
1132
|
+
sanitized_url = _sanitize_hook_url(
|
|
1133
|
+
req.url,
|
|
1134
|
+
api_key=config.api_key,
|
|
1135
|
+
external=external,
|
|
1136
|
+
external_hook_policy=config.policies.external_hooks,
|
|
1137
|
+
)
|
|
1138
|
+
request_info = (
|
|
1139
|
+
RequestInfo(
|
|
1140
|
+
method=req.method.upper(),
|
|
1141
|
+
url=sanitized_url,
|
|
1142
|
+
headers=_sanitize_hook_headers(req.headers),
|
|
1143
|
+
)
|
|
1144
|
+
if sanitized_url is not None
|
|
1145
|
+
else None
|
|
1146
|
+
)
|
|
1147
|
+
context["hook_request_info"] = request_info
|
|
1148
|
+
|
|
1149
|
+
if request_info is not None:
|
|
1150
|
+
if config.on_request:
|
|
1151
|
+
config.on_request(request_info)
|
|
1152
|
+
emit_event(
|
|
1153
|
+
RequestStarted(
|
|
1154
|
+
client_request_id=client_request_id,
|
|
1155
|
+
request=request_info,
|
|
1156
|
+
api_version=req.api_version if not external else "external",
|
|
1157
|
+
)
|
|
1158
|
+
)
|
|
1159
|
+
|
|
1160
|
+
try:
|
|
1161
|
+
resp = next(replace(req, context=context))
|
|
1162
|
+
except Exception as exc:
|
|
1163
|
+
elapsed_ms = (time.monotonic() - started_at) * 1000
|
|
1164
|
+
if request_info is not None:
|
|
1165
|
+
emit_event(
|
|
1166
|
+
RequestFailed(
|
|
1167
|
+
client_request_id=client_request_id,
|
|
1168
|
+
request=request_info,
|
|
1169
|
+
error=exc,
|
|
1170
|
+
elapsed_ms=elapsed_ms,
|
|
1171
|
+
external=external,
|
|
1172
|
+
)
|
|
1173
|
+
)
|
|
1174
|
+
if config.on_error and request_info is not None:
|
|
1175
|
+
config.on_error(
|
|
1176
|
+
ErrorInfo(error=exc, elapsed_ms=elapsed_ms, request=request_info)
|
|
1177
|
+
)
|
|
1178
|
+
raise
|
|
1179
|
+
|
|
1180
|
+
elapsed_ms = (time.monotonic() - started_at) * 1000
|
|
1181
|
+
resp.context.setdefault("client_request_id", client_request_id)
|
|
1182
|
+
resp.context.setdefault("external", external)
|
|
1183
|
+
resp.context.setdefault("elapsed_seconds", elapsed_ms / 1000.0)
|
|
1184
|
+
|
|
1185
|
+
if config.on_response and request_info is not None:
|
|
1186
|
+
config.on_response(
|
|
1187
|
+
ResponseInfo(
|
|
1188
|
+
status_code=resp.status_code,
|
|
1189
|
+
headers=dict(resp.headers),
|
|
1190
|
+
elapsed_ms=elapsed_ms,
|
|
1191
|
+
cache_hit=bool(resp.context.get("cache_hit", False)),
|
|
1192
|
+
request=request_info,
|
|
1193
|
+
)
|
|
1194
|
+
)
|
|
1195
|
+
|
|
1196
|
+
if request_info is not None:
|
|
1197
|
+
emit_event(
|
|
1198
|
+
ResponseHeadersReceived(
|
|
1199
|
+
client_request_id=client_request_id,
|
|
1200
|
+
request=request_info,
|
|
1201
|
+
status_code=resp.status_code,
|
|
1202
|
+
headers=list(resp.headers),
|
|
1203
|
+
elapsed_ms=elapsed_ms,
|
|
1204
|
+
external=bool(resp.context.get("external", False)),
|
|
1205
|
+
cache_hit=bool(resp.context.get("cache_hit", False)),
|
|
1206
|
+
request_id=resp.context.get("request_id"),
|
|
1207
|
+
)
|
|
1208
|
+
)
|
|
1209
|
+
|
|
1210
|
+
if not isinstance(resp, SDKRawStreamResponse):
|
|
1211
|
+
emit_event(
|
|
1212
|
+
RequestSucceeded(
|
|
1213
|
+
client_request_id=client_request_id,
|
|
1214
|
+
request=request_info,
|
|
1215
|
+
status_code=resp.status_code,
|
|
1216
|
+
elapsed_ms=elapsed_ms,
|
|
1217
|
+
external=bool(resp.context.get("external", False)),
|
|
1218
|
+
)
|
|
1219
|
+
)
|
|
1220
|
+
|
|
1221
|
+
return resp
|
|
1222
|
+
|
|
1223
|
+
return middleware
|
|
1224
|
+
|
|
1225
|
+
def _retry_middleware(
|
|
1226
|
+
self,
|
|
1227
|
+
) -> Middleware[SDKBaseResponse]:
|
|
1228
|
+
config = self._config
|
|
1229
|
+
|
|
1230
|
+
def middleware(
|
|
1231
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKBaseResponse]
|
|
1232
|
+
) -> SDKBaseResponse:
|
|
1233
|
+
last_error: Exception | None = None
|
|
1234
|
+
for attempt in range(config.max_retries + 1):
|
|
1235
|
+
if attempt == 0:
|
|
1236
|
+
throttle_delay = self._rate_limit_gate.delay()
|
|
1237
|
+
if throttle_delay > 0:
|
|
1238
|
+
time.sleep(throttle_delay + _throttle_jitter(throttle_delay))
|
|
1239
|
+
try:
|
|
1240
|
+
resp = next(req)
|
|
1241
|
+
resp.context["retry_count"] = attempt
|
|
1242
|
+
return resp
|
|
1243
|
+
except (
|
|
1244
|
+
RateLimitError,
|
|
1245
|
+
AffinityError,
|
|
1246
|
+
httpx.TimeoutException,
|
|
1247
|
+
httpx.NetworkError,
|
|
1248
|
+
) as e:
|
|
1249
|
+
outcome = _retry_outcome(
|
|
1250
|
+
method=req.method.upper(),
|
|
1251
|
+
attempt=attempt,
|
|
1252
|
+
max_retries=config.max_retries,
|
|
1253
|
+
retry_delay=config.retry_delay,
|
|
1254
|
+
error=e,
|
|
1255
|
+
)
|
|
1256
|
+
if isinstance(e, RateLimitError):
|
|
1257
|
+
rate_limit_wait = (
|
|
1258
|
+
float(e.retry_after) if e.retry_after is not None else outcome.wait_time
|
|
1259
|
+
)
|
|
1260
|
+
if rate_limit_wait is not None:
|
|
1261
|
+
self._rate_limit_gate.note(rate_limit_wait)
|
|
1262
|
+
if outcome.action == "raise":
|
|
1263
|
+
raise
|
|
1264
|
+
if outcome.action == "raise_wrapped":
|
|
1265
|
+
assert outcome.wrapped_error is not None
|
|
1266
|
+
raise outcome.wrapped_error from e
|
|
1267
|
+
|
|
1268
|
+
assert outcome.last_error is not None
|
|
1269
|
+
last_error = outcome.last_error
|
|
1270
|
+
if outcome.action == "break":
|
|
1271
|
+
break
|
|
1272
|
+
|
|
1273
|
+
assert outcome.wait_time is not None
|
|
1274
|
+
emit = req.context.get("emit_event")
|
|
1275
|
+
request_info = cast(RequestInfo | None, req.context.get("hook_request_info"))
|
|
1276
|
+
if emit is not None and request_info is not None:
|
|
1277
|
+
emit(
|
|
1278
|
+
RequestRetrying(
|
|
1279
|
+
client_request_id=req.context.get("client_request_id") or "unknown",
|
|
1280
|
+
request=request_info,
|
|
1281
|
+
attempt=attempt + 1,
|
|
1282
|
+
wait_seconds=outcome.wait_time,
|
|
1283
|
+
reason=outcome.log_message or type(e).__name__,
|
|
1284
|
+
)
|
|
1285
|
+
)
|
|
1286
|
+
|
|
1287
|
+
if outcome.log_message:
|
|
1288
|
+
logger.warning(outcome.log_message)
|
|
1289
|
+
time.sleep(outcome.wait_time)
|
|
1290
|
+
|
|
1291
|
+
if last_error:
|
|
1292
|
+
raise last_error
|
|
1293
|
+
raise AffinityError("Request failed after retries")
|
|
1294
|
+
|
|
1295
|
+
return middleware
|
|
1296
|
+
|
|
1297
|
+
def _auth_middleware(
|
|
1298
|
+
self,
|
|
1299
|
+
) -> Middleware[SDKBaseResponse]:
|
|
1300
|
+
config = self._config
|
|
1301
|
+
|
|
1302
|
+
def middleware(
|
|
1303
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKBaseResponse]
|
|
1304
|
+
) -> SDKBaseResponse:
|
|
1305
|
+
headers = [(k, v) for (k, v) in req.headers if k.lower() != "authorization"]
|
|
1306
|
+
if req.api_version == "v1" and config.v1_auth_mode == "basic":
|
|
1307
|
+
token = base64.b64encode(f":{config.api_key}".encode()).decode("ascii")
|
|
1308
|
+
headers.append(("Authorization", f"Basic {token}"))
|
|
1309
|
+
else:
|
|
1310
|
+
headers.append(("Authorization", f"Bearer {config.api_key}"))
|
|
1311
|
+
return next(replace(req, headers=headers))
|
|
1312
|
+
|
|
1313
|
+
return middleware
|
|
1314
|
+
|
|
1315
|
+
def _write_guard_middleware(
|
|
1316
|
+
self,
|
|
1317
|
+
) -> Middleware[SDKBaseResponse]:
|
|
1318
|
+
config = self._config
|
|
1319
|
+
|
|
1320
|
+
def middleware(
|
|
1321
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKBaseResponse]
|
|
1322
|
+
) -> SDKBaseResponse:
|
|
1323
|
+
if config.policies.write is WritePolicy.DENY and req.write_intent:
|
|
1324
|
+
method_upper = req.method.upper()
|
|
1325
|
+
raise WriteNotAllowedError(
|
|
1326
|
+
f"Cannot {method_upper} while writes are disabled by policy",
|
|
1327
|
+
method=method_upper,
|
|
1328
|
+
url=_redact_url(req.url, config.api_key),
|
|
1329
|
+
)
|
|
1330
|
+
return next(req)
|
|
1331
|
+
|
|
1332
|
+
return middleware
|
|
1333
|
+
|
|
1334
|
+
def _build_pipeline(self) -> Callable[[SDKRequest], SDKResponse]:
|
|
1335
|
+
config = self._config
|
|
1336
|
+
|
|
1337
|
+
def terminal(req: SDKRequest) -> SDKResponse:
|
|
1338
|
+
external = bool(req.context.get("external", False))
|
|
1339
|
+
if config.log_requests and not external:
|
|
1340
|
+
logger.debug(f"{req.method} {req.url}")
|
|
1341
|
+
|
|
1342
|
+
request_kwargs: dict[str, Any] = {}
|
|
1343
|
+
if req.headers:
|
|
1344
|
+
request_kwargs["headers"] = req.headers
|
|
1345
|
+
if req.params is not None:
|
|
1346
|
+
request_kwargs["params"] = req.params
|
|
1347
|
+
if req.json is not None:
|
|
1348
|
+
request_kwargs["json"] = req.json
|
|
1349
|
+
if req.files is not None:
|
|
1350
|
+
request_kwargs["files"] = req.files
|
|
1351
|
+
if req.data is not None:
|
|
1352
|
+
request_kwargs["data"] = req.data
|
|
1353
|
+
|
|
1354
|
+
timeout_seconds = req.context.get("timeout_seconds")
|
|
1355
|
+
if timeout_seconds is not None:
|
|
1356
|
+
request_kwargs["timeout"] = float(timeout_seconds)
|
|
1357
|
+
|
|
1358
|
+
response = self._client.request(req.method, req.url, **request_kwargs)
|
|
1359
|
+
return SDKResponse(
|
|
1360
|
+
status_code=response.status_code,
|
|
1361
|
+
headers=list(response.headers.multi_items()),
|
|
1362
|
+
content=response.content,
|
|
1363
|
+
context={"external": external, "http_version": response.http_version},
|
|
1364
|
+
)
|
|
1365
|
+
|
|
1366
|
+
def response_mapping(
|
|
1367
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKResponse]
|
|
1368
|
+
) -> SDKResponse:
|
|
1369
|
+
resp = next(req)
|
|
1370
|
+
headers_map = dict(resp.headers)
|
|
1371
|
+
external = bool(resp.context.get("external", False))
|
|
1372
|
+
if not external:
|
|
1373
|
+
self._rate_limit.update_from_headers(headers_map)
|
|
1374
|
+
|
|
1375
|
+
if req.context.get("safe_follow", False):
|
|
1376
|
+
location = headers_map.get("Location") or headers_map.get("location")
|
|
1377
|
+
if 300 <= resp.status_code < 400 and location:
|
|
1378
|
+
raise UnsafeUrlError(
|
|
1379
|
+
"Refusing to follow redirect for server-provided URL",
|
|
1380
|
+
url=req.url,
|
|
1381
|
+
)
|
|
1382
|
+
|
|
1383
|
+
if resp.status_code >= 400:
|
|
1384
|
+
try:
|
|
1385
|
+
body = json.loads(resp.content) if resp.content else {}
|
|
1386
|
+
except Exception:
|
|
1387
|
+
body = {"message": resp.content.decode("utf-8", errors="replace")}
|
|
1388
|
+
|
|
1389
|
+
retry_after = None
|
|
1390
|
+
if resp.status_code == 429:
|
|
1391
|
+
header_value = headers_map.get("Retry-After") or headers_map.get("retry-after")
|
|
1392
|
+
if header_value is not None:
|
|
1393
|
+
retry_after = _parse_retry_after(header_value)
|
|
1394
|
+
|
|
1395
|
+
selected_headers = _select_response_headers(headers_map)
|
|
1396
|
+
request_id = _extract_request_id(selected_headers)
|
|
1397
|
+
if request_id is not None:
|
|
1398
|
+
resp.context["request_id"] = request_id
|
|
1399
|
+
|
|
1400
|
+
diagnostics = ErrorDiagnostics(
|
|
1401
|
+
method=req.method.upper(),
|
|
1402
|
+
url=_redact_url(req.url, config.api_key),
|
|
1403
|
+
request_params=_diagnostic_request_params(req.params),
|
|
1404
|
+
api_version=req.api_version,
|
|
1405
|
+
base_url=config.v1_base_url if req.api_version == "v1" else config.v2_base_url,
|
|
1406
|
+
request_id=request_id,
|
|
1407
|
+
http_version=resp.context.get("http_version"),
|
|
1408
|
+
response_headers=selected_headers,
|
|
1409
|
+
response_body_snippet=str(body)[:512].replace(config.api_key, "[REDACTED]"),
|
|
1410
|
+
)
|
|
1411
|
+
raise error_from_response(
|
|
1412
|
+
resp.status_code,
|
|
1413
|
+
body,
|
|
1414
|
+
retry_after=retry_after,
|
|
1415
|
+
diagnostics=diagnostics,
|
|
1416
|
+
)
|
|
1417
|
+
|
|
1418
|
+
if resp.status_code == 204 or not resp.content:
|
|
1419
|
+
resp.json = {}
|
|
1420
|
+
return resp
|
|
1421
|
+
|
|
1422
|
+
try:
|
|
1423
|
+
payload = json.loads(resp.content)
|
|
1424
|
+
except Exception as e:
|
|
1425
|
+
raise AffinityError("Expected JSON object/array response") from e
|
|
1426
|
+
|
|
1427
|
+
if isinstance(payload, dict):
|
|
1428
|
+
resp.json = payload
|
|
1429
|
+
return resp
|
|
1430
|
+
if isinstance(payload, list):
|
|
1431
|
+
resp.json = {"data": payload}
|
|
1432
|
+
return resp
|
|
1433
|
+
raise AffinityError("Expected JSON object/array response")
|
|
1434
|
+
|
|
1435
|
+
def cache_middleware(
|
|
1436
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKResponse]
|
|
1437
|
+
) -> SDKResponse:
|
|
1438
|
+
cache_key = req.context.get("cache_key")
|
|
1439
|
+
if cache_key and self._cache:
|
|
1440
|
+
cached = self._cache.get(f"{cache_key}{self._cache_suffix}")
|
|
1441
|
+
if cached is not None:
|
|
1442
|
+
return SDKResponse(
|
|
1443
|
+
status_code=200,
|
|
1444
|
+
headers=[],
|
|
1445
|
+
content=b"",
|
|
1446
|
+
json=cached,
|
|
1447
|
+
context={
|
|
1448
|
+
"cache_hit": True,
|
|
1449
|
+
"external": bool(req.context.get("external", False)),
|
|
1450
|
+
},
|
|
1451
|
+
)
|
|
1452
|
+
|
|
1453
|
+
resp = next(req)
|
|
1454
|
+
if cache_key and self._cache and isinstance(resp.json, dict):
|
|
1455
|
+
self._cache.set(
|
|
1456
|
+
f"{cache_key}{self._cache_suffix}",
|
|
1457
|
+
cast(dict[str, Any], resp.json),
|
|
1458
|
+
req.context.get("cache_ttl"),
|
|
1459
|
+
)
|
|
1460
|
+
return resp
|
|
1461
|
+
|
|
1462
|
+
middlewares: list[Middleware[SDKResponse]] = [
|
|
1463
|
+
cast(Middleware[SDKResponse], self._request_id_middleware()),
|
|
1464
|
+
cast(Middleware[SDKResponse], self._hooks_middleware()),
|
|
1465
|
+
cast(Middleware[SDKResponse], self._write_guard_middleware()),
|
|
1466
|
+
cast(Middleware[SDKResponse], self._retry_middleware()),
|
|
1467
|
+
cast(Middleware[SDKResponse], self._auth_middleware()),
|
|
1468
|
+
cache_middleware,
|
|
1469
|
+
response_mapping,
|
|
1470
|
+
]
|
|
1471
|
+
return compose(middlewares, terminal)
|
|
1472
|
+
|
|
1473
|
+
def _build_raw_buffered_pipeline(self) -> Callable[[SDKRequest], SDKRawResponse]:
|
|
1474
|
+
config = self._config
|
|
1475
|
+
internal_hosts = {
|
|
1476
|
+
urlsplit(config.v1_base_url).netloc,
|
|
1477
|
+
urlsplit(config.v2_base_url).netloc,
|
|
1478
|
+
}
|
|
1479
|
+
|
|
1480
|
+
def terminal(req: SDKRequest) -> SDKRawResponse:
|
|
1481
|
+
external = bool(req.context.get("external", False))
|
|
1482
|
+
if config.log_requests and not external:
|
|
1483
|
+
logger.debug(f"{req.method} {req.url}")
|
|
1484
|
+
|
|
1485
|
+
request_kwargs: dict[str, Any] = {"follow_redirects": False}
|
|
1486
|
+
if req.headers:
|
|
1487
|
+
request_kwargs["headers"] = req.headers
|
|
1488
|
+
if req.params is not None:
|
|
1489
|
+
request_kwargs["params"] = req.params
|
|
1490
|
+
if req.files is not None:
|
|
1491
|
+
request_kwargs["files"] = req.files
|
|
1492
|
+
if req.data is not None:
|
|
1493
|
+
request_kwargs["data"] = req.data
|
|
1494
|
+
if req.json is not None:
|
|
1495
|
+
request_kwargs["json"] = req.json
|
|
1496
|
+
|
|
1497
|
+
timeout = req.context.get("timeout")
|
|
1498
|
+
if timeout is not None:
|
|
1499
|
+
request_kwargs["timeout"] = timeout
|
|
1500
|
+
timeout_seconds = req.context.get("timeout_seconds")
|
|
1501
|
+
if timeout_seconds is not None and timeout is None:
|
|
1502
|
+
request_kwargs["timeout"] = float(timeout_seconds)
|
|
1503
|
+
|
|
1504
|
+
response = self._client.request(req.method, req.url, **request_kwargs)
|
|
1505
|
+
return SDKRawResponse(
|
|
1506
|
+
status_code=response.status_code,
|
|
1507
|
+
headers=list(response.headers.multi_items()),
|
|
1508
|
+
content=response.content,
|
|
1509
|
+
context={"external": external, "http_version": response.http_version},
|
|
1510
|
+
)
|
|
1511
|
+
|
|
1512
|
+
def raw_response_mapping(
|
|
1513
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKRawResponse]
|
|
1514
|
+
) -> SDKRawResponse:
|
|
1515
|
+
resp = next(req)
|
|
1516
|
+
headers_map = dict(resp.headers)
|
|
1517
|
+
external = bool(resp.context.get("external", False))
|
|
1518
|
+
if not external:
|
|
1519
|
+
self._rate_limit.update_from_headers(headers_map)
|
|
1520
|
+
|
|
1521
|
+
if resp.status_code >= 400:
|
|
1522
|
+
try:
|
|
1523
|
+
body: Any = json.loads(resp.content) if resp.content else {}
|
|
1524
|
+
except Exception:
|
|
1525
|
+
body = {
|
|
1526
|
+
"message": _safe_body_preview(
|
|
1527
|
+
resp.content, api_key=config.api_key, external=external
|
|
1528
|
+
)
|
|
1529
|
+
}
|
|
1530
|
+
|
|
1531
|
+
retry_after = None
|
|
1532
|
+
if resp.status_code == 429:
|
|
1533
|
+
header_value = headers_map.get("Retry-After") or headers_map.get("retry-after")
|
|
1534
|
+
if header_value is not None:
|
|
1535
|
+
retry_after = _parse_retry_after(header_value)
|
|
1536
|
+
|
|
1537
|
+
selected_headers = _select_response_headers(headers_map)
|
|
1538
|
+
request_id = _extract_request_id(selected_headers)
|
|
1539
|
+
if request_id is not None:
|
|
1540
|
+
resp.context["request_id"] = request_id
|
|
1541
|
+
|
|
1542
|
+
if external:
|
|
1543
|
+
api_version: Literal["v1", "v2", "external"] = "external"
|
|
1544
|
+
base_url = f"{urlsplit(req.url).scheme}://{urlsplit(req.url).netloc}"
|
|
1545
|
+
redacted_url = _redact_external_url(req.url)
|
|
1546
|
+
else:
|
|
1547
|
+
api_version = req.api_version
|
|
1548
|
+
base_url = config.v1_base_url if req.api_version == "v1" else config.v2_base_url
|
|
1549
|
+
redacted_url = _redact_url(req.url, config.api_key)
|
|
1550
|
+
|
|
1551
|
+
diagnostics = ErrorDiagnostics(
|
|
1552
|
+
method=req.method.upper(),
|
|
1553
|
+
url=redacted_url,
|
|
1554
|
+
request_params=_diagnostic_request_params(req.params),
|
|
1555
|
+
api_version=api_version,
|
|
1556
|
+
base_url=base_url,
|
|
1557
|
+
request_id=request_id,
|
|
1558
|
+
http_version=resp.context.get("http_version"),
|
|
1559
|
+
response_headers=selected_headers,
|
|
1560
|
+
response_body_snippet=str(body)[:512].replace(config.api_key, "[REDACTED]"),
|
|
1561
|
+
)
|
|
1562
|
+
raise error_from_response(
|
|
1563
|
+
resp.status_code,
|
|
1564
|
+
body,
|
|
1565
|
+
retry_after=retry_after,
|
|
1566
|
+
diagnostics=diagnostics,
|
|
1567
|
+
)
|
|
1568
|
+
|
|
1569
|
+
return resp
|
|
1570
|
+
|
|
1571
|
+
def redirect_policy(
|
|
1572
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKRawResponse]
|
|
1573
|
+
) -> SDKRawResponse:
|
|
1574
|
+
current_req = req
|
|
1575
|
+
redirects_followed = 0
|
|
1576
|
+
ever_external = bool(current_req.context.get("ever_external", False))
|
|
1577
|
+
|
|
1578
|
+
while True:
|
|
1579
|
+
deadline_seconds = current_req.context.get("deadline_seconds")
|
|
1580
|
+
if deadline_seconds is not None:
|
|
1581
|
+
started_at = current_req.context.get("started_at") or time.monotonic()
|
|
1582
|
+
if (time.monotonic() - started_at) >= deadline_seconds:
|
|
1583
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
1584
|
+
|
|
1585
|
+
resp = next(current_req)
|
|
1586
|
+
if not (300 <= resp.status_code < 400):
|
|
1587
|
+
resp.context["ever_external"] = ever_external
|
|
1588
|
+
return resp
|
|
1589
|
+
|
|
1590
|
+
location = dict(resp.headers).get("Location") or dict(resp.headers).get("location")
|
|
1591
|
+
if not location:
|
|
1592
|
+
resp.context["ever_external"] = ever_external
|
|
1593
|
+
return resp
|
|
1594
|
+
|
|
1595
|
+
if redirects_followed >= _MAX_DOWNLOAD_REDIRECTS:
|
|
1596
|
+
raise UnsafeUrlError(
|
|
1597
|
+
"Refusing to follow too many redirects for download",
|
|
1598
|
+
url=_redact_external_url(current_req.url),
|
|
1599
|
+
)
|
|
1600
|
+
|
|
1601
|
+
absolute = str(urljoin(current_req.url, location))
|
|
1602
|
+
scheme = urlsplit(absolute).scheme.lower()
|
|
1603
|
+
if scheme and scheme not in ("https", "http"):
|
|
1604
|
+
raise UnsafeUrlError("Refusing to follow non-http(s) redirect", url=absolute)
|
|
1605
|
+
if scheme == "http" and not config.allow_insecure_download_redirects:
|
|
1606
|
+
raise UnsafeUrlError(
|
|
1607
|
+
"Refusing to follow non-https redirect for download",
|
|
1608
|
+
url=_redact_external_url(absolute),
|
|
1609
|
+
)
|
|
1610
|
+
|
|
1611
|
+
to_host = urlsplit(absolute).netloc
|
|
1612
|
+
to_external = to_host not in internal_hosts
|
|
1613
|
+
ever_external = ever_external or to_external
|
|
1614
|
+
|
|
1615
|
+
emit = current_req.context.get("emit_event")
|
|
1616
|
+
if emit is not None:
|
|
1617
|
+
from_url = _sanitize_hook_url(
|
|
1618
|
+
current_req.url,
|
|
1619
|
+
api_key=config.api_key,
|
|
1620
|
+
external=bool(current_req.context.get("external", False)),
|
|
1621
|
+
external_hook_policy=config.policies.external_hooks,
|
|
1622
|
+
)
|
|
1623
|
+
to_url = _sanitize_hook_url(
|
|
1624
|
+
absolute,
|
|
1625
|
+
api_key=config.api_key,
|
|
1626
|
+
external=to_external,
|
|
1627
|
+
external_hook_policy=config.policies.external_hooks,
|
|
1628
|
+
)
|
|
1629
|
+
if from_url is not None and to_url is not None:
|
|
1630
|
+
emit(
|
|
1631
|
+
RedirectFollowed(
|
|
1632
|
+
client_request_id=current_req.context.get("client_request_id")
|
|
1633
|
+
or "unknown",
|
|
1634
|
+
from_url=from_url,
|
|
1635
|
+
to_url=to_url,
|
|
1636
|
+
hop=redirects_followed + 1,
|
|
1637
|
+
external=to_external,
|
|
1638
|
+
)
|
|
1639
|
+
)
|
|
1640
|
+
|
|
1641
|
+
next_headers = (
|
|
1642
|
+
_strip_credential_headers(current_req.headers)
|
|
1643
|
+
if to_external
|
|
1644
|
+
else list(current_req.headers)
|
|
1645
|
+
)
|
|
1646
|
+
next_context: RequestContext = cast(RequestContext, dict(current_req.context))
|
|
1647
|
+
next_context["external"] = to_external
|
|
1648
|
+
next_context["ever_external"] = ever_external
|
|
1649
|
+
current_req = replace(
|
|
1650
|
+
current_req, url=absolute, headers=next_headers, context=next_context
|
|
1651
|
+
)
|
|
1652
|
+
redirects_followed += 1
|
|
1653
|
+
|
|
1654
|
+
middlewares: list[Middleware[SDKRawResponse]] = [
|
|
1655
|
+
cast(Middleware[SDKRawResponse], self._request_id_middleware()),
|
|
1656
|
+
cast(Middleware[SDKRawResponse], self._hooks_middleware()),
|
|
1657
|
+
cast(Middleware[SDKRawResponse], self._write_guard_middleware()),
|
|
1658
|
+
cast(Middleware[SDKRawResponse], self._retry_middleware()),
|
|
1659
|
+
cast(Middleware[SDKRawResponse], self._auth_middleware()),
|
|
1660
|
+
redirect_policy,
|
|
1661
|
+
raw_response_mapping,
|
|
1662
|
+
]
|
|
1663
|
+
return compose(middlewares, terminal)
|
|
1664
|
+
|
|
1665
|
+
def _build_raw_stream_pipeline(self) -> Callable[[SDKRequest], SDKBaseResponse]:
|
|
1666
|
+
config = self._config
|
|
1667
|
+
internal_hosts = {
|
|
1668
|
+
urlsplit(config.v1_base_url).netloc,
|
|
1669
|
+
urlsplit(config.v2_base_url).netloc,
|
|
1670
|
+
}
|
|
1671
|
+
|
|
1672
|
+
def terminal(req: SDKRequest) -> SDKBaseResponse:
|
|
1673
|
+
external = bool(req.context.get("external", False))
|
|
1674
|
+
if config.log_requests and not external:
|
|
1675
|
+
logger.debug(f"{req.method} {req.url}")
|
|
1676
|
+
|
|
1677
|
+
request_kwargs: dict[str, Any] = {"follow_redirects": False}
|
|
1678
|
+
if req.headers:
|
|
1679
|
+
request_kwargs["headers"] = req.headers
|
|
1680
|
+
if req.params is not None:
|
|
1681
|
+
request_kwargs["params"] = req.params
|
|
1682
|
+
|
|
1683
|
+
timeout = req.context.get("timeout")
|
|
1684
|
+
if timeout is not None:
|
|
1685
|
+
request_kwargs["timeout"] = timeout
|
|
1686
|
+
|
|
1687
|
+
cm = self._client.stream(req.method, req.url, **request_kwargs)
|
|
1688
|
+
response = cm.__enter__()
|
|
1689
|
+
headers = list(response.headers.multi_items())
|
|
1690
|
+
|
|
1691
|
+
if response.status_code >= 400:
|
|
1692
|
+
try:
|
|
1693
|
+
content = response.read()
|
|
1694
|
+
finally:
|
|
1695
|
+
cm.__exit__(None, None, None)
|
|
1696
|
+
return SDKRawResponse(
|
|
1697
|
+
status_code=response.status_code,
|
|
1698
|
+
headers=headers,
|
|
1699
|
+
content=content,
|
|
1700
|
+
context={"external": external, "http_version": response.http_version},
|
|
1701
|
+
)
|
|
1702
|
+
|
|
1703
|
+
request_info = cast(RequestInfo | None, req.context.get("hook_request_info"))
|
|
1704
|
+
client_request_id = req.context.get("client_request_id") or "unknown"
|
|
1705
|
+
started_at = req.context.get("started_at") or time.monotonic()
|
|
1706
|
+
deadline_seconds = req.context.get("deadline_seconds")
|
|
1707
|
+
on_progress = cast(ProgressCallback | None, req.context.get("on_progress"))
|
|
1708
|
+
emit = cast(Callable[[HookEvent], Any] | None, req.context.get("emit_event"))
|
|
1709
|
+
|
|
1710
|
+
stream = _HTTPXSyncStream(
|
|
1711
|
+
context_manager=cm,
|
|
1712
|
+
response=response,
|
|
1713
|
+
headers=headers,
|
|
1714
|
+
request_info=request_info,
|
|
1715
|
+
client_request_id=client_request_id,
|
|
1716
|
+
external=external,
|
|
1717
|
+
started_at=started_at,
|
|
1718
|
+
deadline_seconds=deadline_seconds,
|
|
1719
|
+
on_progress=on_progress,
|
|
1720
|
+
emit_event=emit,
|
|
1721
|
+
)
|
|
1722
|
+
return SDKRawStreamResponse(
|
|
1723
|
+
status_code=response.status_code,
|
|
1724
|
+
headers=headers,
|
|
1725
|
+
stream=stream,
|
|
1726
|
+
context={"external": external, "http_version": response.http_version},
|
|
1727
|
+
)
|
|
1728
|
+
|
|
1729
|
+
def raw_response_mapping(
|
|
1730
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKBaseResponse]
|
|
1731
|
+
) -> SDKBaseResponse:
|
|
1732
|
+
resp = next(req)
|
|
1733
|
+
headers_map = dict(resp.headers)
|
|
1734
|
+
external = bool(resp.context.get("external", False))
|
|
1735
|
+
if not external:
|
|
1736
|
+
self._rate_limit.update_from_headers(headers_map)
|
|
1737
|
+
|
|
1738
|
+
if resp.status_code >= 400:
|
|
1739
|
+
assert isinstance(resp, SDKRawResponse)
|
|
1740
|
+
try:
|
|
1741
|
+
body: Any = json.loads(resp.content) if resp.content else {}
|
|
1742
|
+
except Exception:
|
|
1743
|
+
body = {
|
|
1744
|
+
"message": _safe_body_preview(
|
|
1745
|
+
resp.content, api_key=config.api_key, external=external
|
|
1746
|
+
)
|
|
1747
|
+
}
|
|
1748
|
+
|
|
1749
|
+
retry_after = None
|
|
1750
|
+
if resp.status_code == 429:
|
|
1751
|
+
header_value = headers_map.get("Retry-After") or headers_map.get("retry-after")
|
|
1752
|
+
if header_value is not None:
|
|
1753
|
+
retry_after = _parse_retry_after(header_value)
|
|
1754
|
+
|
|
1755
|
+
selected_headers = _select_response_headers(headers_map)
|
|
1756
|
+
request_id = _extract_request_id(selected_headers)
|
|
1757
|
+
if request_id is not None:
|
|
1758
|
+
resp.context["request_id"] = request_id
|
|
1759
|
+
|
|
1760
|
+
if external:
|
|
1761
|
+
api_version: Literal["v1", "v2", "external"] = "external"
|
|
1762
|
+
base_url = f"{urlsplit(req.url).scheme}://{urlsplit(req.url).netloc}"
|
|
1763
|
+
redacted_url = _redact_external_url(req.url)
|
|
1764
|
+
else:
|
|
1765
|
+
api_version = req.api_version
|
|
1766
|
+
base_url = config.v1_base_url if req.api_version == "v1" else config.v2_base_url
|
|
1767
|
+
redacted_url = _redact_url(req.url, config.api_key)
|
|
1768
|
+
|
|
1769
|
+
diagnostics = ErrorDiagnostics(
|
|
1770
|
+
method=req.method.upper(),
|
|
1771
|
+
url=redacted_url,
|
|
1772
|
+
request_params=_diagnostic_request_params(req.params),
|
|
1773
|
+
api_version=api_version,
|
|
1774
|
+
base_url=base_url,
|
|
1775
|
+
request_id=request_id,
|
|
1776
|
+
http_version=resp.context.get("http_version"),
|
|
1777
|
+
response_headers=selected_headers,
|
|
1778
|
+
response_body_snippet=str(body)[:512].replace(config.api_key, "[REDACTED]"),
|
|
1779
|
+
)
|
|
1780
|
+
raise error_from_response(
|
|
1781
|
+
resp.status_code,
|
|
1782
|
+
body,
|
|
1783
|
+
retry_after=retry_after,
|
|
1784
|
+
diagnostics=diagnostics,
|
|
1785
|
+
)
|
|
1786
|
+
|
|
1787
|
+
return resp
|
|
1788
|
+
|
|
1789
|
+
def redirect_policy(
|
|
1790
|
+
req: SDKRequest, next: Callable[[SDKRequest], SDKBaseResponse]
|
|
1791
|
+
) -> SDKBaseResponse:
|
|
1792
|
+
current_req = req
|
|
1793
|
+
redirects_followed = 0
|
|
1794
|
+
ever_external = bool(current_req.context.get("ever_external", False))
|
|
1795
|
+
|
|
1796
|
+
while True:
|
|
1797
|
+
deadline_seconds = current_req.context.get("deadline_seconds")
|
|
1798
|
+
if deadline_seconds is not None:
|
|
1799
|
+
started_at = current_req.context.get("started_at") or time.monotonic()
|
|
1800
|
+
if (time.monotonic() - started_at) >= deadline_seconds:
|
|
1801
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
1802
|
+
|
|
1803
|
+
resp = next(current_req)
|
|
1804
|
+
if not (300 <= resp.status_code < 400):
|
|
1805
|
+
resp.context["ever_external"] = ever_external
|
|
1806
|
+
return resp
|
|
1807
|
+
|
|
1808
|
+
location = dict(resp.headers).get("Location") or dict(resp.headers).get("location")
|
|
1809
|
+
if not location:
|
|
1810
|
+
resp.context["ever_external"] = ever_external
|
|
1811
|
+
return resp
|
|
1812
|
+
|
|
1813
|
+
if redirects_followed >= _MAX_DOWNLOAD_REDIRECTS:
|
|
1814
|
+
raise UnsafeUrlError(
|
|
1815
|
+
"Refusing to follow too many redirects for download",
|
|
1816
|
+
url=_redact_external_url(current_req.url),
|
|
1817
|
+
)
|
|
1818
|
+
|
|
1819
|
+
absolute = str(urljoin(current_req.url, location))
|
|
1820
|
+
scheme = urlsplit(absolute).scheme.lower()
|
|
1821
|
+
if scheme and scheme not in ("https", "http"):
|
|
1822
|
+
raise UnsafeUrlError("Refusing to follow non-http(s) redirect", url=absolute)
|
|
1823
|
+
if scheme == "http" and not config.allow_insecure_download_redirects:
|
|
1824
|
+
raise UnsafeUrlError(
|
|
1825
|
+
"Refusing to follow non-https redirect for download",
|
|
1826
|
+
url=_redact_external_url(absolute),
|
|
1827
|
+
)
|
|
1828
|
+
|
|
1829
|
+
to_host = urlsplit(absolute).netloc
|
|
1830
|
+
to_external = to_host not in internal_hosts
|
|
1831
|
+
ever_external = ever_external or to_external
|
|
1832
|
+
|
|
1833
|
+
if isinstance(resp, SDKRawStreamResponse):
|
|
1834
|
+
resp.stream.close()
|
|
1835
|
+
|
|
1836
|
+
emit = current_req.context.get("emit_event")
|
|
1837
|
+
if emit is not None:
|
|
1838
|
+
from_url = _sanitize_hook_url(
|
|
1839
|
+
current_req.url,
|
|
1840
|
+
api_key=config.api_key,
|
|
1841
|
+
external=bool(current_req.context.get("external", False)),
|
|
1842
|
+
external_hook_policy=config.policies.external_hooks,
|
|
1843
|
+
)
|
|
1844
|
+
to_url = _sanitize_hook_url(
|
|
1845
|
+
absolute,
|
|
1846
|
+
api_key=config.api_key,
|
|
1847
|
+
external=to_external,
|
|
1848
|
+
external_hook_policy=config.policies.external_hooks,
|
|
1849
|
+
)
|
|
1850
|
+
if from_url is not None and to_url is not None:
|
|
1851
|
+
emit(
|
|
1852
|
+
RedirectFollowed(
|
|
1853
|
+
client_request_id=current_req.context.get("client_request_id")
|
|
1854
|
+
or "unknown",
|
|
1855
|
+
from_url=from_url,
|
|
1856
|
+
to_url=to_url,
|
|
1857
|
+
hop=redirects_followed + 1,
|
|
1858
|
+
external=to_external,
|
|
1859
|
+
)
|
|
1860
|
+
)
|
|
1861
|
+
|
|
1862
|
+
next_headers = (
|
|
1863
|
+
_strip_credential_headers(current_req.headers)
|
|
1864
|
+
if to_external
|
|
1865
|
+
else list(current_req.headers)
|
|
1866
|
+
)
|
|
1867
|
+
next_context: RequestContext = cast(RequestContext, dict(current_req.context))
|
|
1868
|
+
next_context["external"] = to_external
|
|
1869
|
+
next_context["ever_external"] = ever_external
|
|
1870
|
+
current_req = replace(
|
|
1871
|
+
current_req, url=absolute, headers=next_headers, context=next_context
|
|
1872
|
+
)
|
|
1873
|
+
redirects_followed += 1
|
|
1874
|
+
|
|
1875
|
+
middlewares: list[Middleware[SDKBaseResponse]] = [
|
|
1876
|
+
self._request_id_middleware(),
|
|
1877
|
+
self._hooks_middleware(),
|
|
1878
|
+
self._write_guard_middleware(),
|
|
1879
|
+
self._retry_middleware(),
|
|
1880
|
+
self._auth_middleware(),
|
|
1881
|
+
redirect_policy,
|
|
1882
|
+
raw_response_mapping,
|
|
1883
|
+
]
|
|
1884
|
+
return compose(middlewares, terminal)
|
|
1885
|
+
|
|
1886
|
+
def close(self) -> None:
|
|
1887
|
+
"""Close the HTTP client."""
|
|
1888
|
+
self._client.close()
|
|
1889
|
+
|
|
1890
|
+
def __enter__(self) -> HTTPClient:
|
|
1891
|
+
return self
|
|
1892
|
+
|
|
1893
|
+
def __exit__(self, *args: Any) -> None:
|
|
1894
|
+
self.close()
|
|
1895
|
+
|
|
1896
|
+
@property
|
|
1897
|
+
def cache(self) -> SimpleCache | None:
|
|
1898
|
+
"""Access to the cache for invalidation."""
|
|
1899
|
+
return self._cache
|
|
1900
|
+
|
|
1901
|
+
@property
|
|
1902
|
+
def rate_limit_state(self) -> RateLimitState:
|
|
1903
|
+
"""Current rate limit state."""
|
|
1904
|
+
return self._rate_limit
|
|
1905
|
+
|
|
1906
|
+
@property
|
|
1907
|
+
def enable_beta_endpoints(self) -> bool:
|
|
1908
|
+
"""Whether beta endpoints are enabled for this client."""
|
|
1909
|
+
return self._config.enable_beta_endpoints
|
|
1910
|
+
|
|
1911
|
+
def _build_url(self, path: str, *, v1: bool = False) -> str:
|
|
1912
|
+
"""Build full URL from path."""
|
|
1913
|
+
base = self._config.v1_base_url if v1 else self._config.v2_base_url
|
|
1914
|
+
# V1 paths don't have /v1 prefix in the base URL
|
|
1915
|
+
if v1:
|
|
1916
|
+
return f"{base}/{path.lstrip('/')}"
|
|
1917
|
+
return f"{base}/{path.lstrip('/')}"
|
|
1918
|
+
|
|
1919
|
+
def _handle_response(
|
|
1920
|
+
self,
|
|
1921
|
+
response: httpx.Response,
|
|
1922
|
+
*,
|
|
1923
|
+
method: str,
|
|
1924
|
+
url: str,
|
|
1925
|
+
v1: bool,
|
|
1926
|
+
) -> dict[str, Any]:
|
|
1927
|
+
"""Process response and handle errors."""
|
|
1928
|
+
# Update rate limit state
|
|
1929
|
+
self._rate_limit.update_from_headers(response.headers)
|
|
1930
|
+
|
|
1931
|
+
# Check for errors
|
|
1932
|
+
if response.status_code >= 400:
|
|
1933
|
+
try:
|
|
1934
|
+
body = response.json()
|
|
1935
|
+
except Exception:
|
|
1936
|
+
body = {"message": response.text}
|
|
1937
|
+
|
|
1938
|
+
retry_after = None
|
|
1939
|
+
if response.status_code == 429:
|
|
1940
|
+
header_value = response.headers.get("Retry-After")
|
|
1941
|
+
if header_value is not None:
|
|
1942
|
+
retry_after = _parse_retry_after(header_value)
|
|
1943
|
+
|
|
1944
|
+
selected_headers = _select_response_headers(response.headers)
|
|
1945
|
+
request_id = _extract_request_id(selected_headers)
|
|
1946
|
+
diagnostics = ErrorDiagnostics(
|
|
1947
|
+
method=method,
|
|
1948
|
+
url=_redact_url(url, self._config.api_key),
|
|
1949
|
+
api_version="v1" if v1 else "v2",
|
|
1950
|
+
base_url=self._config.v1_base_url if v1 else self._config.v2_base_url,
|
|
1951
|
+
request_id=request_id,
|
|
1952
|
+
http_version=response.http_version,
|
|
1953
|
+
response_headers=selected_headers,
|
|
1954
|
+
response_body_snippet=str(body)[:512].replace(self._config.api_key, "[REDACTED]"),
|
|
1955
|
+
)
|
|
1956
|
+
|
|
1957
|
+
raise error_from_response(
|
|
1958
|
+
response.status_code,
|
|
1959
|
+
body,
|
|
1960
|
+
retry_after=retry_after,
|
|
1961
|
+
diagnostics=diagnostics,
|
|
1962
|
+
)
|
|
1963
|
+
|
|
1964
|
+
# Empty response (204 No Content, etc.)
|
|
1965
|
+
if response.status_code == 204 or not response.content:
|
|
1966
|
+
return {}
|
|
1967
|
+
|
|
1968
|
+
payload = response.json()
|
|
1969
|
+
if isinstance(payload, dict):
|
|
1970
|
+
return cast(dict[str, Any], payload)
|
|
1971
|
+
if isinstance(payload, list):
|
|
1972
|
+
# Some V1 endpoints return top-level arrays. Normalize into an object
|
|
1973
|
+
# wrapper so call sites can consistently access `data`.
|
|
1974
|
+
return {"data": payload}
|
|
1975
|
+
raise AffinityError("Expected JSON object/array response")
|
|
1976
|
+
|
|
1977
|
+
def _request_with_retry(
|
|
1978
|
+
self,
|
|
1979
|
+
method: str,
|
|
1980
|
+
url: str,
|
|
1981
|
+
*,
|
|
1982
|
+
v1: bool,
|
|
1983
|
+
safe_follow: bool = False,
|
|
1984
|
+
write_intent: bool = False,
|
|
1985
|
+
cache_key: str | None = None,
|
|
1986
|
+
cache_ttl: float | None = None,
|
|
1987
|
+
**kwargs: Any,
|
|
1988
|
+
) -> dict[str, Any]:
|
|
1989
|
+
headers = kwargs.pop("headers", None) or {}
|
|
1990
|
+
params = kwargs.pop("params", None)
|
|
1991
|
+
json_payload = kwargs.pop("json", None)
|
|
1992
|
+
files = kwargs.pop("files", None)
|
|
1993
|
+
data = kwargs.pop("data", None)
|
|
1994
|
+
timeout = kwargs.pop("timeout", None)
|
|
1995
|
+
if kwargs:
|
|
1996
|
+
raise TypeError(f"Unsupported request kwargs: {sorted(kwargs.keys())}")
|
|
1997
|
+
|
|
1998
|
+
context: RequestContext = {}
|
|
1999
|
+
if safe_follow:
|
|
2000
|
+
context["safe_follow"] = True
|
|
2001
|
+
if cache_key is not None:
|
|
2002
|
+
context["cache_key"] = cache_key
|
|
2003
|
+
if cache_ttl is not None:
|
|
2004
|
+
context["cache_ttl"] = float(cache_ttl)
|
|
2005
|
+
if timeout is not None:
|
|
2006
|
+
if isinstance(timeout, (int, float)):
|
|
2007
|
+
context["timeout_seconds"] = float(timeout)
|
|
2008
|
+
else:
|
|
2009
|
+
raise TypeError("timeout must be float seconds for JSON requests")
|
|
2010
|
+
|
|
2011
|
+
req = SDKRequest(
|
|
2012
|
+
method=method.upper(),
|
|
2013
|
+
url=url,
|
|
2014
|
+
headers=list(headers.items()),
|
|
2015
|
+
params=params,
|
|
2016
|
+
json=json_payload,
|
|
2017
|
+
files=files,
|
|
2018
|
+
data=data,
|
|
2019
|
+
api_version="v1" if v1 else "v2",
|
|
2020
|
+
write_intent=write_intent,
|
|
2021
|
+
context=context,
|
|
2022
|
+
)
|
|
2023
|
+
resp = self._pipeline(req)
|
|
2024
|
+
payload = resp.json
|
|
2025
|
+
if not isinstance(payload, dict):
|
|
2026
|
+
raise AffinityError("Expected JSON object response")
|
|
2027
|
+
return cast(dict[str, Any], payload)
|
|
2028
|
+
|
|
2029
|
+
# =========================================================================
|
|
2030
|
+
# Public Request Methods
|
|
2031
|
+
# =========================================================================
|
|
2032
|
+
|
|
2033
|
+
def get(
|
|
2034
|
+
self,
|
|
2035
|
+
path: str,
|
|
2036
|
+
*,
|
|
2037
|
+
params: Mapping[str, Any] | Sequence[tuple[str, Any]] | None = None,
|
|
2038
|
+
v1: bool = False,
|
|
2039
|
+
cache_key: str | None = None,
|
|
2040
|
+
cache_ttl: float | None = None,
|
|
2041
|
+
) -> dict[str, Any]:
|
|
2042
|
+
"""
|
|
2043
|
+
Make a GET request.
|
|
2044
|
+
|
|
2045
|
+
Args:
|
|
2046
|
+
path: API path (e.g., "/companies")
|
|
2047
|
+
params: Query parameters
|
|
2048
|
+
v1: Use V1 API endpoint
|
|
2049
|
+
cache_key: If provided, cache the response with this key
|
|
2050
|
+
cache_ttl: Cache TTL override
|
|
2051
|
+
|
|
2052
|
+
Returns:
|
|
2053
|
+
Parsed JSON response
|
|
2054
|
+
"""
|
|
2055
|
+
url = self._build_url(path, v1=v1)
|
|
2056
|
+
encoded_params = _encode_query_params(params)
|
|
2057
|
+
return self._request_with_retry(
|
|
2058
|
+
"GET",
|
|
2059
|
+
url,
|
|
2060
|
+
v1=v1,
|
|
2061
|
+
params=encoded_params,
|
|
2062
|
+
cache_key=cache_key,
|
|
2063
|
+
cache_ttl=cache_ttl,
|
|
2064
|
+
)
|
|
2065
|
+
|
|
2066
|
+
def get_v1_page(
|
|
2067
|
+
self,
|
|
2068
|
+
path: str,
|
|
2069
|
+
*,
|
|
2070
|
+
signature: Sequence[tuple[str, str]],
|
|
2071
|
+
page_token: str | None = None,
|
|
2072
|
+
) -> dict[str, Any]:
|
|
2073
|
+
"""
|
|
2074
|
+
Fetch a v1 paginated page using a frozen canonical query signature.
|
|
2075
|
+
|
|
2076
|
+
This enforces TR-017a by reusing the exact same query signature across
|
|
2077
|
+
pages, varying only the `page_token`.
|
|
2078
|
+
"""
|
|
2079
|
+
params = list(signature)
|
|
2080
|
+
if page_token is not None:
|
|
2081
|
+
params.append(("page_token", page_token))
|
|
2082
|
+
url = self._build_url(path, v1=True)
|
|
2083
|
+
return self._request_with_retry("GET", url, v1=True, params=params)
|
|
2084
|
+
|
|
2085
|
+
def get_url(self, url: str) -> dict[str, Any]:
|
|
2086
|
+
"""
|
|
2087
|
+
Make a GET request to a full URL.
|
|
2088
|
+
|
|
2089
|
+
Used for following pagination URLs.
|
|
2090
|
+
"""
|
|
2091
|
+
absolute, is_v1 = _safe_follow_url(
|
|
2092
|
+
url,
|
|
2093
|
+
v1_base_url=self._config.v1_base_url,
|
|
2094
|
+
v2_base_url=self._config.v2_base_url,
|
|
2095
|
+
)
|
|
2096
|
+
return self._request_with_retry("GET", absolute, v1=is_v1, safe_follow=True)
|
|
2097
|
+
|
|
2098
|
+
def post(
|
|
2099
|
+
self,
|
|
2100
|
+
path: str,
|
|
2101
|
+
*,
|
|
2102
|
+
json: Any = None,
|
|
2103
|
+
v1: bool = False,
|
|
2104
|
+
) -> dict[str, Any]:
|
|
2105
|
+
"""Make a POST request."""
|
|
2106
|
+
url = self._build_url(path, v1=v1)
|
|
2107
|
+
return self._request_with_retry("POST", url, v1=v1, json=json, write_intent=True)
|
|
2108
|
+
|
|
2109
|
+
def put(
|
|
2110
|
+
self,
|
|
2111
|
+
path: str,
|
|
2112
|
+
*,
|
|
2113
|
+
json: Any = None,
|
|
2114
|
+
v1: bool = False,
|
|
2115
|
+
) -> dict[str, Any]:
|
|
2116
|
+
"""Make a PUT request."""
|
|
2117
|
+
url = self._build_url(path, v1=v1)
|
|
2118
|
+
return self._request_with_retry("PUT", url, v1=v1, json=json, write_intent=True)
|
|
2119
|
+
|
|
2120
|
+
def patch(
|
|
2121
|
+
self,
|
|
2122
|
+
path: str,
|
|
2123
|
+
*,
|
|
2124
|
+
json: Any = None,
|
|
2125
|
+
v1: bool = False,
|
|
2126
|
+
) -> dict[str, Any]:
|
|
2127
|
+
"""Make a PATCH request."""
|
|
2128
|
+
url = self._build_url(path, v1=v1)
|
|
2129
|
+
return self._request_with_retry("PATCH", url, v1=v1, json=json, write_intent=True)
|
|
2130
|
+
|
|
2131
|
+
def delete(
|
|
2132
|
+
self,
|
|
2133
|
+
path: str,
|
|
2134
|
+
*,
|
|
2135
|
+
params: Mapping[str, Any] | Sequence[tuple[str, Any]] | None = None,
|
|
2136
|
+
v1: bool = False,
|
|
2137
|
+
) -> dict[str, Any]:
|
|
2138
|
+
"""Make a DELETE request."""
|
|
2139
|
+
url = self._build_url(path, v1=v1)
|
|
2140
|
+
return self._request_with_retry(
|
|
2141
|
+
"DELETE",
|
|
2142
|
+
url,
|
|
2143
|
+
v1=v1,
|
|
2144
|
+
params=_encode_query_params(params),
|
|
2145
|
+
write_intent=True,
|
|
2146
|
+
)
|
|
2147
|
+
|
|
2148
|
+
def upload_file(
|
|
2149
|
+
self,
|
|
2150
|
+
path: str,
|
|
2151
|
+
*,
|
|
2152
|
+
files: dict[str, Any],
|
|
2153
|
+
data: dict[str, Any] | None = None,
|
|
2154
|
+
v1: bool = False,
|
|
2155
|
+
) -> dict[str, Any]:
|
|
2156
|
+
"""Upload files with multipart form data."""
|
|
2157
|
+
url = self._build_url(path, v1=v1)
|
|
2158
|
+
|
|
2159
|
+
# Ensure we don't force a Content-Type; httpx must generate multipart boundaries.
|
|
2160
|
+
headers = dict(self._client.headers)
|
|
2161
|
+
headers.pop("Content-Type", None)
|
|
2162
|
+
return self._request_with_retry(
|
|
2163
|
+
"POST",
|
|
2164
|
+
url,
|
|
2165
|
+
v1=v1,
|
|
2166
|
+
files=files,
|
|
2167
|
+
data=data,
|
|
2168
|
+
headers=headers,
|
|
2169
|
+
write_intent=True,
|
|
2170
|
+
)
|
|
2171
|
+
|
|
2172
|
+
def download_file(
|
|
2173
|
+
self,
|
|
2174
|
+
path: str,
|
|
2175
|
+
*,
|
|
2176
|
+
v1: bool = False,
|
|
2177
|
+
timeout: httpx.Timeout | float | None = None,
|
|
2178
|
+
deadline_seconds: float | None = None,
|
|
2179
|
+
) -> bytes:
|
|
2180
|
+
"""
|
|
2181
|
+
Download file content.
|
|
2182
|
+
|
|
2183
|
+
Notes:
|
|
2184
|
+
- The initial Affinity API response may redirect to an external signed URL.
|
|
2185
|
+
Redirects are followed without forwarding credentials.
|
|
2186
|
+
- Uses the standard retry/diagnostics policy for GET requests.
|
|
2187
|
+
"""
|
|
2188
|
+
if deadline_seconds is not None and deadline_seconds <= 0:
|
|
2189
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
2190
|
+
|
|
2191
|
+
url = self._build_url(path, v1=v1)
|
|
2192
|
+
context: RequestContext = {}
|
|
2193
|
+
if timeout is not None:
|
|
2194
|
+
context["timeout"] = timeout
|
|
2195
|
+
if deadline_seconds is not None:
|
|
2196
|
+
context["deadline_seconds"] = float(deadline_seconds)
|
|
2197
|
+
|
|
2198
|
+
req = SDKRequest(
|
|
2199
|
+
method="GET",
|
|
2200
|
+
url=url,
|
|
2201
|
+
headers=[("Accept", "*/*")],
|
|
2202
|
+
api_version="v1" if v1 else "v2",
|
|
2203
|
+
write_intent=False,
|
|
2204
|
+
context=context,
|
|
2205
|
+
)
|
|
2206
|
+
resp = self._raw_buffered_pipeline(req)
|
|
2207
|
+
return resp.content
|
|
2208
|
+
|
|
2209
|
+
def stream_download(
|
|
2210
|
+
self,
|
|
2211
|
+
path: str,
|
|
2212
|
+
*,
|
|
2213
|
+
v1: bool = False,
|
|
2214
|
+
chunk_size: int = 65_536,
|
|
2215
|
+
on_progress: ProgressCallback | None = None,
|
|
2216
|
+
timeout: httpx.Timeout | float | None = None,
|
|
2217
|
+
deadline_seconds: float | None = None,
|
|
2218
|
+
) -> Iterator[bytes]:
|
|
2219
|
+
"""
|
|
2220
|
+
Stream-download file content in chunks.
|
|
2221
|
+
|
|
2222
|
+
Notes:
|
|
2223
|
+
- The initial Affinity API response may redirect to an external signed URL.
|
|
2224
|
+
Redirects are followed without forwarding credentials.
|
|
2225
|
+
- External signed URLs are protected via ExternalHookPolicy (redaction by default).
|
|
2226
|
+
"""
|
|
2227
|
+
if deadline_seconds is not None and deadline_seconds <= 0:
|
|
2228
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
2229
|
+
|
|
2230
|
+
url = self._build_url(path, v1=v1)
|
|
2231
|
+
context: RequestContext = {"streaming": True}
|
|
2232
|
+
if timeout is not None:
|
|
2233
|
+
context["timeout"] = timeout
|
|
2234
|
+
if deadline_seconds is not None:
|
|
2235
|
+
context["deadline_seconds"] = float(deadline_seconds)
|
|
2236
|
+
if on_progress is not None:
|
|
2237
|
+
context["on_progress"] = on_progress
|
|
2238
|
+
|
|
2239
|
+
req = SDKRequest(
|
|
2240
|
+
method="GET",
|
|
2241
|
+
url=url,
|
|
2242
|
+
headers=[("Accept", "*/*")],
|
|
2243
|
+
api_version="v1" if v1 else "v2",
|
|
2244
|
+
write_intent=False,
|
|
2245
|
+
context=context,
|
|
2246
|
+
)
|
|
2247
|
+
resp = self._raw_stream_pipeline(req)
|
|
2248
|
+
if not isinstance(resp, SDKRawStreamResponse):
|
|
2249
|
+
return iter(())
|
|
2250
|
+
|
|
2251
|
+
def _iter() -> Iterator[bytes]:
|
|
2252
|
+
yield from resp.stream.iter_bytes(chunk_size=chunk_size)
|
|
2253
|
+
|
|
2254
|
+
return _iter()
|
|
2255
|
+
|
|
2256
|
+
def stream_download_with_info(
|
|
2257
|
+
self,
|
|
2258
|
+
path: str,
|
|
2259
|
+
*,
|
|
2260
|
+
v1: bool = False,
|
|
2261
|
+
chunk_size: int = 65_536,
|
|
2262
|
+
on_progress: ProgressCallback | None = None,
|
|
2263
|
+
timeout: httpx.Timeout | float | None = None,
|
|
2264
|
+
deadline_seconds: float | None = None,
|
|
2265
|
+
) -> DownloadedFile:
|
|
2266
|
+
"""
|
|
2267
|
+
Stream-download file content and return response metadata (headers/filename/size).
|
|
2268
|
+
|
|
2269
|
+
Notes:
|
|
2270
|
+
- The initial Affinity API response may redirect to an external signed URL.
|
|
2271
|
+
Redirects are followed without forwarding credentials.
|
|
2272
|
+
- External signed URLs are protected via ExternalHookPolicy (redaction by default).
|
|
2273
|
+
"""
|
|
2274
|
+
if deadline_seconds is not None and deadline_seconds <= 0:
|
|
2275
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
2276
|
+
|
|
2277
|
+
url = self._build_url(path, v1=v1)
|
|
2278
|
+
context: RequestContext = {"streaming": True}
|
|
2279
|
+
if timeout is not None:
|
|
2280
|
+
context["timeout"] = timeout
|
|
2281
|
+
if deadline_seconds is not None:
|
|
2282
|
+
context["deadline_seconds"] = float(deadline_seconds)
|
|
2283
|
+
if on_progress is not None:
|
|
2284
|
+
context["on_progress"] = on_progress
|
|
2285
|
+
|
|
2286
|
+
req = SDKRequest(
|
|
2287
|
+
method="GET",
|
|
2288
|
+
url=url,
|
|
2289
|
+
headers=[("Accept", "*/*")],
|
|
2290
|
+
api_version="v1" if v1 else "v2",
|
|
2291
|
+
write_intent=False,
|
|
2292
|
+
context=context,
|
|
2293
|
+
)
|
|
2294
|
+
resp = self._raw_stream_pipeline(req)
|
|
2295
|
+
if not isinstance(resp, SDKRawStreamResponse):
|
|
2296
|
+
info = _download_info_from_headers([])
|
|
2297
|
+
return DownloadedFile(
|
|
2298
|
+
headers=info["headers"],
|
|
2299
|
+
raw_headers=[],
|
|
2300
|
+
content_type=info["content_type"],
|
|
2301
|
+
filename=info["filename"],
|
|
2302
|
+
size=info["size"],
|
|
2303
|
+
iter_bytes=iter(()),
|
|
2304
|
+
)
|
|
2305
|
+
|
|
2306
|
+
info = _download_info_from_headers(resp.headers)
|
|
2307
|
+
|
|
2308
|
+
def _iter() -> Iterator[bytes]:
|
|
2309
|
+
yield from resp.stream.iter_bytes(chunk_size=chunk_size)
|
|
2310
|
+
|
|
2311
|
+
return DownloadedFile(
|
|
2312
|
+
headers=info["headers"],
|
|
2313
|
+
raw_headers=list(resp.headers),
|
|
2314
|
+
content_type=info["content_type"],
|
|
2315
|
+
filename=info["filename"],
|
|
2316
|
+
size=info["size"],
|
|
2317
|
+
iter_bytes=_iter(),
|
|
2318
|
+
)
|
|
2319
|
+
|
|
2320
|
+
def wrap_validation_error(
|
|
2321
|
+
self,
|
|
2322
|
+
error: Exception,
|
|
2323
|
+
*,
|
|
2324
|
+
context: str | None = None,
|
|
2325
|
+
) -> VersionCompatibilityError:
|
|
2326
|
+
"""
|
|
2327
|
+
Wrap a validation error with version compatibility context.
|
|
2328
|
+
|
|
2329
|
+
TR-015: If expected_v2_version is configured, validation failures
|
|
2330
|
+
are wrapped with actionable guidance about checking API version.
|
|
2331
|
+
"""
|
|
2332
|
+
expected = self._config.expected_v2_version
|
|
2333
|
+
message = (
|
|
2334
|
+
f"Response parsing failed: {error}. "
|
|
2335
|
+
"This may indicate a v2 API version mismatch. "
|
|
2336
|
+
"Check your API key's Default API Version in the Affinity dashboard."
|
|
2337
|
+
)
|
|
2338
|
+
if context:
|
|
2339
|
+
message = f"[{context}] {message}"
|
|
2340
|
+
return VersionCompatibilityError(
|
|
2341
|
+
message,
|
|
2342
|
+
expected_version=expected,
|
|
2343
|
+
parsing_error=str(error),
|
|
2344
|
+
)
|
|
2345
|
+
|
|
2346
|
+
@property
|
|
2347
|
+
def expected_v2_version(self) -> str | None:
|
|
2348
|
+
"""Expected V2 API version for diagnostics."""
|
|
2349
|
+
return self._config.expected_v2_version
|
|
2350
|
+
|
|
2351
|
+
|
|
2352
|
+
# =============================================================================
|
|
2353
|
+
# Asynchronous HTTP Client
|
|
2354
|
+
# =============================================================================
|
|
2355
|
+
|
|
2356
|
+
|
|
2357
|
+
class AsyncHTTPClient:
|
|
2358
|
+
"""
|
|
2359
|
+
Asynchronous HTTP client for Affinity API.
|
|
2360
|
+
|
|
2361
|
+
Same functionality as HTTPClient but with async/await support.
|
|
2362
|
+
"""
|
|
2363
|
+
|
|
2364
|
+
def __init__(self, config: ClientConfig):
|
|
2365
|
+
self._config = config
|
|
2366
|
+
self._rate_limit = RateLimitState()
|
|
2367
|
+
self._rate_limit_gate = _RateLimitGateAsync()
|
|
2368
|
+
self._cache = SimpleCache(config.cache_ttl) if config.enable_cache else None
|
|
2369
|
+
self._cache_suffix = _cache_key_suffix(
|
|
2370
|
+
self._config.v1_base_url,
|
|
2371
|
+
self._config.v2_base_url,
|
|
2372
|
+
self._config.api_key,
|
|
2373
|
+
)
|
|
2374
|
+
self._client: httpx.AsyncClient | None = None
|
|
2375
|
+
self._pipeline = self._build_pipeline()
|
|
2376
|
+
self._raw_buffered_pipeline = self._build_raw_buffered_pipeline()
|
|
2377
|
+
self._raw_stream_pipeline = self._build_raw_stream_pipeline()
|
|
2378
|
+
|
|
2379
|
+
def _request_id_middleware(
|
|
2380
|
+
self,
|
|
2381
|
+
) -> AsyncMiddleware[SDKBaseResponse]:
|
|
2382
|
+
async def middleware(
|
|
2383
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKBaseResponse]]
|
|
2384
|
+
) -> SDKBaseResponse:
|
|
2385
|
+
context: RequestContext = cast(RequestContext, dict(req.context))
|
|
2386
|
+
client_request_id = context.get("client_request_id")
|
|
2387
|
+
if not isinstance(client_request_id, str) or not client_request_id:
|
|
2388
|
+
try:
|
|
2389
|
+
client_request_id = uuid.uuid4().hex
|
|
2390
|
+
except Exception:
|
|
2391
|
+
client_request_id = "unknown"
|
|
2392
|
+
context["client_request_id"] = client_request_id
|
|
2393
|
+
|
|
2394
|
+
headers = list(req.headers)
|
|
2395
|
+
if not any(k.lower() == "x-client-request-id" for (k, _v) in headers):
|
|
2396
|
+
headers.append(("X-Client-Request-Id", client_request_id))
|
|
2397
|
+
|
|
2398
|
+
return await next(replace(req, headers=headers, context=context))
|
|
2399
|
+
|
|
2400
|
+
return middleware
|
|
2401
|
+
|
|
2402
|
+
def _hooks_middleware(
|
|
2403
|
+
self,
|
|
2404
|
+
) -> AsyncMiddleware[SDKBaseResponse]:
|
|
2405
|
+
config = self._config
|
|
2406
|
+
|
|
2407
|
+
async def middleware(
|
|
2408
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKBaseResponse]]
|
|
2409
|
+
) -> SDKBaseResponse:
|
|
2410
|
+
context: RequestContext = cast(RequestContext, dict(req.context))
|
|
2411
|
+
started_at = context.get("started_at")
|
|
2412
|
+
if started_at is None:
|
|
2413
|
+
started_at = time.monotonic()
|
|
2414
|
+
context["started_at"] = started_at
|
|
2415
|
+
|
|
2416
|
+
client_request_id = context.get("client_request_id") or "unknown"
|
|
2417
|
+
|
|
2418
|
+
async def emit_event(event: HookEvent) -> None:
|
|
2419
|
+
if config.on_event is None:
|
|
2420
|
+
return
|
|
2421
|
+
if (
|
|
2422
|
+
getattr(event, "external", False)
|
|
2423
|
+
and config.policies.external_hooks is ExternalHookPolicy.SUPPRESS
|
|
2424
|
+
):
|
|
2425
|
+
return
|
|
2426
|
+
try:
|
|
2427
|
+
result = config.on_event(event)
|
|
2428
|
+
if inspect.isawaitable(result):
|
|
2429
|
+
await result
|
|
2430
|
+
except Exception:
|
|
2431
|
+
if config.hook_error_policy == "raise":
|
|
2432
|
+
raise
|
|
2433
|
+
logger.warning(
|
|
2434
|
+
"Hook error suppressed (hook_error_policy=swallow)", exc_info=True
|
|
2435
|
+
)
|
|
2436
|
+
|
|
2437
|
+
context["emit_event"] = emit_event
|
|
2438
|
+
|
|
2439
|
+
external = bool(context.get("external", False))
|
|
2440
|
+
sanitized_url = _sanitize_hook_url(
|
|
2441
|
+
req.url,
|
|
2442
|
+
api_key=config.api_key,
|
|
2443
|
+
external=external,
|
|
2444
|
+
external_hook_policy=config.policies.external_hooks,
|
|
2445
|
+
)
|
|
2446
|
+
request_info = (
|
|
2447
|
+
RequestInfo(
|
|
2448
|
+
method=req.method.upper(),
|
|
2449
|
+
url=sanitized_url,
|
|
2450
|
+
headers=_sanitize_hook_headers(req.headers),
|
|
2451
|
+
)
|
|
2452
|
+
if sanitized_url is not None
|
|
2453
|
+
else None
|
|
2454
|
+
)
|
|
2455
|
+
context["hook_request_info"] = request_info
|
|
2456
|
+
|
|
2457
|
+
if request_info is not None:
|
|
2458
|
+
if config.on_request:
|
|
2459
|
+
config.on_request(request_info)
|
|
2460
|
+
await emit_event(
|
|
2461
|
+
RequestStarted(
|
|
2462
|
+
client_request_id=client_request_id,
|
|
2463
|
+
request=request_info,
|
|
2464
|
+
api_version=req.api_version if not external else "external",
|
|
2465
|
+
)
|
|
2466
|
+
)
|
|
2467
|
+
|
|
2468
|
+
try:
|
|
2469
|
+
resp = await next(replace(req, context=context))
|
|
2470
|
+
except asyncio.CancelledError as exc:
|
|
2471
|
+
elapsed_ms = (time.monotonic() - started_at) * 1000
|
|
2472
|
+
if request_info is not None:
|
|
2473
|
+
await emit_event(
|
|
2474
|
+
RequestFailed(
|
|
2475
|
+
client_request_id=client_request_id,
|
|
2476
|
+
request=request_info,
|
|
2477
|
+
error=exc,
|
|
2478
|
+
elapsed_ms=elapsed_ms,
|
|
2479
|
+
external=external,
|
|
2480
|
+
)
|
|
2481
|
+
)
|
|
2482
|
+
if config.on_error and request_info is not None:
|
|
2483
|
+
config.on_error(
|
|
2484
|
+
ErrorInfo(error=exc, elapsed_ms=elapsed_ms, request=request_info)
|
|
2485
|
+
)
|
|
2486
|
+
raise
|
|
2487
|
+
except Exception as exc:
|
|
2488
|
+
elapsed_ms = (time.monotonic() - started_at) * 1000
|
|
2489
|
+
if request_info is not None:
|
|
2490
|
+
await emit_event(
|
|
2491
|
+
RequestFailed(
|
|
2492
|
+
client_request_id=client_request_id,
|
|
2493
|
+
request=request_info,
|
|
2494
|
+
error=exc,
|
|
2495
|
+
elapsed_ms=elapsed_ms,
|
|
2496
|
+
external=external,
|
|
2497
|
+
)
|
|
2498
|
+
)
|
|
2499
|
+
if config.on_error and request_info is not None:
|
|
2500
|
+
config.on_error(
|
|
2501
|
+
ErrorInfo(error=exc, elapsed_ms=elapsed_ms, request=request_info)
|
|
2502
|
+
)
|
|
2503
|
+
raise
|
|
2504
|
+
|
|
2505
|
+
elapsed_ms = (time.monotonic() - started_at) * 1000
|
|
2506
|
+
resp.context.setdefault("client_request_id", client_request_id)
|
|
2507
|
+
resp.context.setdefault("external", external)
|
|
2508
|
+
resp.context.setdefault("elapsed_seconds", elapsed_ms / 1000.0)
|
|
2509
|
+
|
|
2510
|
+
if config.on_response and request_info is not None:
|
|
2511
|
+
config.on_response(
|
|
2512
|
+
ResponseInfo(
|
|
2513
|
+
status_code=resp.status_code,
|
|
2514
|
+
headers=dict(resp.headers),
|
|
2515
|
+
elapsed_ms=elapsed_ms,
|
|
2516
|
+
cache_hit=bool(resp.context.get("cache_hit", False)),
|
|
2517
|
+
request=request_info,
|
|
2518
|
+
)
|
|
2519
|
+
)
|
|
2520
|
+
|
|
2521
|
+
if request_info is not None:
|
|
2522
|
+
await emit_event(
|
|
2523
|
+
ResponseHeadersReceived(
|
|
2524
|
+
client_request_id=client_request_id,
|
|
2525
|
+
request=request_info,
|
|
2526
|
+
status_code=resp.status_code,
|
|
2527
|
+
headers=list(resp.headers),
|
|
2528
|
+
elapsed_ms=elapsed_ms,
|
|
2529
|
+
external=bool(resp.context.get("external", False)),
|
|
2530
|
+
cache_hit=bool(resp.context.get("cache_hit", False)),
|
|
2531
|
+
request_id=resp.context.get("request_id"),
|
|
2532
|
+
)
|
|
2533
|
+
)
|
|
2534
|
+
if not isinstance(resp, SDKRawStreamResponse):
|
|
2535
|
+
await emit_event(
|
|
2536
|
+
RequestSucceeded(
|
|
2537
|
+
client_request_id=client_request_id,
|
|
2538
|
+
request=request_info,
|
|
2539
|
+
status_code=resp.status_code,
|
|
2540
|
+
elapsed_ms=elapsed_ms,
|
|
2541
|
+
external=bool(resp.context.get("external", False)),
|
|
2542
|
+
)
|
|
2543
|
+
)
|
|
2544
|
+
|
|
2545
|
+
return resp
|
|
2546
|
+
|
|
2547
|
+
return middleware
|
|
2548
|
+
|
|
2549
|
+
def _retry_middleware(
|
|
2550
|
+
self,
|
|
2551
|
+
) -> AsyncMiddleware[SDKBaseResponse]:
|
|
2552
|
+
config = self._config
|
|
2553
|
+
|
|
2554
|
+
async def middleware(
|
|
2555
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKBaseResponse]]
|
|
2556
|
+
) -> SDKBaseResponse:
|
|
2557
|
+
last_error: Exception | None = None
|
|
2558
|
+
for attempt in range(config.max_retries + 1):
|
|
2559
|
+
if attempt == 0:
|
|
2560
|
+
throttle_delay = await self._rate_limit_gate.delay()
|
|
2561
|
+
if throttle_delay > 0:
|
|
2562
|
+
await asyncio.sleep(throttle_delay + _throttle_jitter(throttle_delay))
|
|
2563
|
+
try:
|
|
2564
|
+
resp = await next(req)
|
|
2565
|
+
resp.context["retry_count"] = attempt
|
|
2566
|
+
return resp
|
|
2567
|
+
except (
|
|
2568
|
+
RateLimitError,
|
|
2569
|
+
AffinityError,
|
|
2570
|
+
httpx.TimeoutException,
|
|
2571
|
+
httpx.NetworkError,
|
|
2572
|
+
) as e:
|
|
2573
|
+
outcome = _retry_outcome(
|
|
2574
|
+
method=req.method.upper(),
|
|
2575
|
+
attempt=attempt,
|
|
2576
|
+
max_retries=config.max_retries,
|
|
2577
|
+
retry_delay=config.retry_delay,
|
|
2578
|
+
error=e,
|
|
2579
|
+
)
|
|
2580
|
+
if isinstance(e, RateLimitError):
|
|
2581
|
+
rate_limit_wait = (
|
|
2582
|
+
float(e.retry_after) if e.retry_after is not None else outcome.wait_time
|
|
2583
|
+
)
|
|
2584
|
+
if rate_limit_wait is not None:
|
|
2585
|
+
await self._rate_limit_gate.note(rate_limit_wait)
|
|
2586
|
+
if outcome.action == "raise":
|
|
2587
|
+
raise
|
|
2588
|
+
if outcome.action == "raise_wrapped":
|
|
2589
|
+
assert outcome.wrapped_error is not None
|
|
2590
|
+
raise outcome.wrapped_error from e
|
|
2591
|
+
|
|
2592
|
+
assert outcome.last_error is not None
|
|
2593
|
+
last_error = outcome.last_error
|
|
2594
|
+
if outcome.action == "break":
|
|
2595
|
+
break
|
|
2596
|
+
|
|
2597
|
+
assert outcome.wait_time is not None
|
|
2598
|
+
emit = req.context.get("emit_event")
|
|
2599
|
+
request_info = cast(RequestInfo | None, req.context.get("hook_request_info"))
|
|
2600
|
+
if emit is not None and request_info is not None:
|
|
2601
|
+
await cast(Callable[[HookEvent], Awaitable[None]], emit)(
|
|
2602
|
+
RequestRetrying(
|
|
2603
|
+
client_request_id=req.context.get("client_request_id") or "unknown",
|
|
2604
|
+
request=request_info,
|
|
2605
|
+
attempt=attempt + 1,
|
|
2606
|
+
wait_seconds=outcome.wait_time,
|
|
2607
|
+
reason=outcome.log_message or type(e).__name__,
|
|
2608
|
+
)
|
|
2609
|
+
)
|
|
2610
|
+
|
|
2611
|
+
if outcome.log_message:
|
|
2612
|
+
logger.warning(outcome.log_message)
|
|
2613
|
+
await asyncio.sleep(outcome.wait_time)
|
|
2614
|
+
|
|
2615
|
+
if last_error:
|
|
2616
|
+
raise last_error
|
|
2617
|
+
raise AffinityError("Request failed after retries")
|
|
2618
|
+
|
|
2619
|
+
return middleware
|
|
2620
|
+
|
|
2621
|
+
def _auth_middleware(
|
|
2622
|
+
self,
|
|
2623
|
+
) -> AsyncMiddleware[SDKBaseResponse]:
|
|
2624
|
+
config = self._config
|
|
2625
|
+
|
|
2626
|
+
async def middleware(
|
|
2627
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKBaseResponse]]
|
|
2628
|
+
) -> SDKBaseResponse:
|
|
2629
|
+
headers = [(k, v) for (k, v) in req.headers if k.lower() != "authorization"]
|
|
2630
|
+
if req.api_version == "v1" and config.v1_auth_mode == "basic":
|
|
2631
|
+
token = base64.b64encode(f":{config.api_key}".encode()).decode("ascii")
|
|
2632
|
+
headers.append(("Authorization", f"Basic {token}"))
|
|
2633
|
+
else:
|
|
2634
|
+
headers.append(("Authorization", f"Bearer {config.api_key}"))
|
|
2635
|
+
return await next(replace(req, headers=headers))
|
|
2636
|
+
|
|
2637
|
+
return middleware
|
|
2638
|
+
|
|
2639
|
+
def _write_guard_middleware(
|
|
2640
|
+
self,
|
|
2641
|
+
) -> AsyncMiddleware[SDKBaseResponse]:
|
|
2642
|
+
config = self._config
|
|
2643
|
+
|
|
2644
|
+
async def middleware(
|
|
2645
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKBaseResponse]]
|
|
2646
|
+
) -> SDKBaseResponse:
|
|
2647
|
+
if config.policies.write is WritePolicy.DENY and req.write_intent:
|
|
2648
|
+
method_upper = req.method.upper()
|
|
2649
|
+
raise WriteNotAllowedError(
|
|
2650
|
+
f"Cannot {method_upper} while writes are disabled by policy",
|
|
2651
|
+
method=method_upper,
|
|
2652
|
+
url=_redact_url(req.url, config.api_key),
|
|
2653
|
+
)
|
|
2654
|
+
return await next(req)
|
|
2655
|
+
|
|
2656
|
+
return middleware
|
|
2657
|
+
|
|
2658
|
+
def _build_pipeline(self) -> Callable[[SDKRequest], Awaitable[SDKResponse]]:
|
|
2659
|
+
config = self._config
|
|
2660
|
+
|
|
2661
|
+
async def terminal(req: SDKRequest) -> SDKResponse:
|
|
2662
|
+
external = bool(req.context.get("external", False))
|
|
2663
|
+
if config.log_requests and not external:
|
|
2664
|
+
logger.debug(f"{req.method} {req.url}")
|
|
2665
|
+
|
|
2666
|
+
request_kwargs: dict[str, Any] = {}
|
|
2667
|
+
if req.headers:
|
|
2668
|
+
request_kwargs["headers"] = req.headers
|
|
2669
|
+
if req.params is not None:
|
|
2670
|
+
request_kwargs["params"] = req.params
|
|
2671
|
+
if req.json is not None:
|
|
2672
|
+
request_kwargs["json"] = req.json
|
|
2673
|
+
if req.files is not None:
|
|
2674
|
+
request_kwargs["files"] = req.files
|
|
2675
|
+
if req.data is not None:
|
|
2676
|
+
request_kwargs["data"] = req.data
|
|
2677
|
+
|
|
2678
|
+
timeout_seconds = req.context.get("timeout_seconds")
|
|
2679
|
+
if timeout_seconds is not None:
|
|
2680
|
+
request_kwargs["timeout"] = float(timeout_seconds)
|
|
2681
|
+
|
|
2682
|
+
client = await self._get_client()
|
|
2683
|
+
response = await client.request(req.method, req.url, **request_kwargs)
|
|
2684
|
+
return SDKResponse(
|
|
2685
|
+
status_code=response.status_code,
|
|
2686
|
+
headers=list(response.headers.multi_items()),
|
|
2687
|
+
content=response.content,
|
|
2688
|
+
context={"external": external, "http_version": response.http_version},
|
|
2689
|
+
)
|
|
2690
|
+
|
|
2691
|
+
async def response_mapping(
|
|
2692
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKResponse]]
|
|
2693
|
+
) -> SDKResponse:
|
|
2694
|
+
resp = await next(req)
|
|
2695
|
+
headers_map = dict(resp.headers)
|
|
2696
|
+
external = bool(resp.context.get("external", False))
|
|
2697
|
+
if not external:
|
|
2698
|
+
self._rate_limit.update_from_headers(headers_map)
|
|
2699
|
+
|
|
2700
|
+
if req.context.get("safe_follow", False):
|
|
2701
|
+
location = headers_map.get("Location") or headers_map.get("location")
|
|
2702
|
+
if 300 <= resp.status_code < 400 and location:
|
|
2703
|
+
raise UnsafeUrlError(
|
|
2704
|
+
"Refusing to follow redirect for server-provided URL",
|
|
2705
|
+
url=req.url,
|
|
2706
|
+
)
|
|
2707
|
+
|
|
2708
|
+
if resp.status_code >= 400:
|
|
2709
|
+
try:
|
|
2710
|
+
body = json.loads(resp.content) if resp.content else {}
|
|
2711
|
+
except Exception:
|
|
2712
|
+
body = {"message": resp.content.decode("utf-8", errors="replace")}
|
|
2713
|
+
|
|
2714
|
+
retry_after = None
|
|
2715
|
+
if resp.status_code == 429:
|
|
2716
|
+
header_value = headers_map.get("Retry-After") or headers_map.get("retry-after")
|
|
2717
|
+
if header_value is not None:
|
|
2718
|
+
retry_after = _parse_retry_after(header_value)
|
|
2719
|
+
|
|
2720
|
+
selected_headers = _select_response_headers(headers_map)
|
|
2721
|
+
request_id = _extract_request_id(selected_headers)
|
|
2722
|
+
if request_id is not None:
|
|
2723
|
+
resp.context["request_id"] = request_id
|
|
2724
|
+
|
|
2725
|
+
diagnostics = ErrorDiagnostics(
|
|
2726
|
+
method=req.method.upper(),
|
|
2727
|
+
url=_redact_url(req.url, config.api_key),
|
|
2728
|
+
request_params=_diagnostic_request_params(req.params),
|
|
2729
|
+
api_version=req.api_version,
|
|
2730
|
+
base_url=config.v1_base_url if req.api_version == "v1" else config.v2_base_url,
|
|
2731
|
+
request_id=request_id,
|
|
2732
|
+
http_version=resp.context.get("http_version"),
|
|
2733
|
+
response_headers=selected_headers,
|
|
2734
|
+
response_body_snippet=str(body)[:512].replace(config.api_key, "[REDACTED]"),
|
|
2735
|
+
)
|
|
2736
|
+
raise error_from_response(
|
|
2737
|
+
resp.status_code,
|
|
2738
|
+
body,
|
|
2739
|
+
retry_after=retry_after,
|
|
2740
|
+
diagnostics=diagnostics,
|
|
2741
|
+
)
|
|
2742
|
+
|
|
2743
|
+
if resp.status_code == 204 or not resp.content:
|
|
2744
|
+
resp.json = {}
|
|
2745
|
+
return resp
|
|
2746
|
+
|
|
2747
|
+
try:
|
|
2748
|
+
payload = json.loads(resp.content)
|
|
2749
|
+
except Exception as e:
|
|
2750
|
+
raise AffinityError("Expected JSON object/array response") from e
|
|
2751
|
+
|
|
2752
|
+
if isinstance(payload, dict):
|
|
2753
|
+
resp.json = payload
|
|
2754
|
+
return resp
|
|
2755
|
+
if isinstance(payload, list):
|
|
2756
|
+
resp.json = {"data": payload}
|
|
2757
|
+
return resp
|
|
2758
|
+
raise AffinityError("Expected JSON object/array response")
|
|
2759
|
+
|
|
2760
|
+
async def cache_middleware(
|
|
2761
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKResponse]]
|
|
2762
|
+
) -> SDKResponse:
|
|
2763
|
+
cache_key = req.context.get("cache_key")
|
|
2764
|
+
if cache_key and self._cache:
|
|
2765
|
+
cached = self._cache.get(f"{cache_key}{self._cache_suffix}")
|
|
2766
|
+
if cached is not None:
|
|
2767
|
+
return SDKResponse(
|
|
2768
|
+
status_code=200,
|
|
2769
|
+
headers=[],
|
|
2770
|
+
content=b"",
|
|
2771
|
+
json=cached,
|
|
2772
|
+
context={
|
|
2773
|
+
"cache_hit": True,
|
|
2774
|
+
"external": bool(req.context.get("external", False)),
|
|
2775
|
+
},
|
|
2776
|
+
)
|
|
2777
|
+
|
|
2778
|
+
resp = await next(req)
|
|
2779
|
+
if cache_key and self._cache and isinstance(resp.json, dict):
|
|
2780
|
+
self._cache.set(
|
|
2781
|
+
f"{cache_key}{self._cache_suffix}",
|
|
2782
|
+
cast(dict[str, Any], resp.json),
|
|
2783
|
+
req.context.get("cache_ttl"),
|
|
2784
|
+
)
|
|
2785
|
+
return resp
|
|
2786
|
+
|
|
2787
|
+
middlewares: list[AsyncMiddleware[SDKResponse]] = [
|
|
2788
|
+
cast(AsyncMiddleware[SDKResponse], self._request_id_middleware()),
|
|
2789
|
+
cast(AsyncMiddleware[SDKResponse], self._hooks_middleware()),
|
|
2790
|
+
cast(AsyncMiddleware[SDKResponse], self._write_guard_middleware()),
|
|
2791
|
+
cast(AsyncMiddleware[SDKResponse], self._retry_middleware()),
|
|
2792
|
+
cast(AsyncMiddleware[SDKResponse], self._auth_middleware()),
|
|
2793
|
+
cache_middleware,
|
|
2794
|
+
response_mapping,
|
|
2795
|
+
]
|
|
2796
|
+
return compose_async(middlewares, terminal)
|
|
2797
|
+
|
|
2798
|
+
def _build_raw_buffered_pipeline(self) -> Callable[[SDKRequest], Awaitable[SDKRawResponse]]:
|
|
2799
|
+
config = self._config
|
|
2800
|
+
internal_hosts = {
|
|
2801
|
+
urlsplit(config.v1_base_url).netloc,
|
|
2802
|
+
urlsplit(config.v2_base_url).netloc,
|
|
2803
|
+
}
|
|
2804
|
+
|
|
2805
|
+
async def terminal(req: SDKRequest) -> SDKRawResponse:
|
|
2806
|
+
external = bool(req.context.get("external", False))
|
|
2807
|
+
if config.log_requests and not external:
|
|
2808
|
+
logger.debug(f"{req.method} {req.url}")
|
|
2809
|
+
|
|
2810
|
+
request_kwargs: dict[str, Any] = {"follow_redirects": False}
|
|
2811
|
+
if req.headers:
|
|
2812
|
+
request_kwargs["headers"] = req.headers
|
|
2813
|
+
if req.params is not None:
|
|
2814
|
+
request_kwargs["params"] = req.params
|
|
2815
|
+
if req.files is not None:
|
|
2816
|
+
request_kwargs["files"] = req.files
|
|
2817
|
+
if req.data is not None:
|
|
2818
|
+
request_kwargs["data"] = req.data
|
|
2819
|
+
if req.json is not None:
|
|
2820
|
+
request_kwargs["json"] = req.json
|
|
2821
|
+
|
|
2822
|
+
timeout = req.context.get("timeout")
|
|
2823
|
+
if timeout is not None:
|
|
2824
|
+
request_kwargs["timeout"] = timeout
|
|
2825
|
+
timeout_seconds = req.context.get("timeout_seconds")
|
|
2826
|
+
if timeout_seconds is not None and timeout is None:
|
|
2827
|
+
request_kwargs["timeout"] = float(timeout_seconds)
|
|
2828
|
+
|
|
2829
|
+
client = await self._get_client()
|
|
2830
|
+
response = await client.request(req.method, req.url, **request_kwargs)
|
|
2831
|
+
return SDKRawResponse(
|
|
2832
|
+
status_code=response.status_code,
|
|
2833
|
+
headers=list(response.headers.multi_items()),
|
|
2834
|
+
content=response.content,
|
|
2835
|
+
context={"external": external, "http_version": response.http_version},
|
|
2836
|
+
)
|
|
2837
|
+
|
|
2838
|
+
async def raw_response_mapping(
|
|
2839
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKRawResponse]]
|
|
2840
|
+
) -> SDKRawResponse:
|
|
2841
|
+
resp = await next(req)
|
|
2842
|
+
headers_map = dict(resp.headers)
|
|
2843
|
+
external = bool(resp.context.get("external", False))
|
|
2844
|
+
if not external:
|
|
2845
|
+
self._rate_limit.update_from_headers(headers_map)
|
|
2846
|
+
|
|
2847
|
+
if resp.status_code >= 400:
|
|
2848
|
+
try:
|
|
2849
|
+
body: Any = json.loads(resp.content) if resp.content else {}
|
|
2850
|
+
except Exception:
|
|
2851
|
+
body = {
|
|
2852
|
+
"message": _safe_body_preview(
|
|
2853
|
+
resp.content, api_key=config.api_key, external=external
|
|
2854
|
+
)
|
|
2855
|
+
}
|
|
2856
|
+
|
|
2857
|
+
retry_after = None
|
|
2858
|
+
if resp.status_code == 429:
|
|
2859
|
+
header_value = headers_map.get("Retry-After") or headers_map.get("retry-after")
|
|
2860
|
+
if header_value is not None:
|
|
2861
|
+
retry_after = _parse_retry_after(header_value)
|
|
2862
|
+
|
|
2863
|
+
selected_headers = _select_response_headers(headers_map)
|
|
2864
|
+
request_id = _extract_request_id(selected_headers)
|
|
2865
|
+
if request_id is not None:
|
|
2866
|
+
resp.context["request_id"] = request_id
|
|
2867
|
+
|
|
2868
|
+
if external:
|
|
2869
|
+
api_version: Literal["v1", "v2", "external"] = "external"
|
|
2870
|
+
base_url = f"{urlsplit(req.url).scheme}://{urlsplit(req.url).netloc}"
|
|
2871
|
+
redacted_url = _redact_external_url(req.url)
|
|
2872
|
+
else:
|
|
2873
|
+
api_version = req.api_version
|
|
2874
|
+
base_url = config.v1_base_url if req.api_version == "v1" else config.v2_base_url
|
|
2875
|
+
redacted_url = _redact_url(req.url, config.api_key)
|
|
2876
|
+
|
|
2877
|
+
diagnostics = ErrorDiagnostics(
|
|
2878
|
+
method=req.method.upper(),
|
|
2879
|
+
url=redacted_url,
|
|
2880
|
+
request_params=_diagnostic_request_params(req.params),
|
|
2881
|
+
api_version=api_version,
|
|
2882
|
+
base_url=base_url,
|
|
2883
|
+
request_id=request_id,
|
|
2884
|
+
http_version=resp.context.get("http_version"),
|
|
2885
|
+
response_headers=selected_headers,
|
|
2886
|
+
response_body_snippet=str(body)[:512].replace(config.api_key, "[REDACTED]"),
|
|
2887
|
+
)
|
|
2888
|
+
raise error_from_response(
|
|
2889
|
+
resp.status_code,
|
|
2890
|
+
body,
|
|
2891
|
+
retry_after=retry_after,
|
|
2892
|
+
diagnostics=diagnostics,
|
|
2893
|
+
)
|
|
2894
|
+
|
|
2895
|
+
return resp
|
|
2896
|
+
|
|
2897
|
+
async def redirect_policy(
|
|
2898
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKRawResponse]]
|
|
2899
|
+
) -> SDKRawResponse:
|
|
2900
|
+
current_req = req
|
|
2901
|
+
redirects_followed = 0
|
|
2902
|
+
ever_external = bool(current_req.context.get("ever_external", False))
|
|
2903
|
+
|
|
2904
|
+
while True:
|
|
2905
|
+
deadline_seconds = current_req.context.get("deadline_seconds")
|
|
2906
|
+
if deadline_seconds is not None:
|
|
2907
|
+
started_at = current_req.context.get("started_at") or time.monotonic()
|
|
2908
|
+
if (time.monotonic() - started_at) >= deadline_seconds:
|
|
2909
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
2910
|
+
|
|
2911
|
+
resp = await next(current_req)
|
|
2912
|
+
if not (300 <= resp.status_code < 400):
|
|
2913
|
+
resp.context["ever_external"] = ever_external
|
|
2914
|
+
return resp
|
|
2915
|
+
|
|
2916
|
+
headers_dict = dict(resp.headers)
|
|
2917
|
+
location = headers_dict.get("Location") or headers_dict.get("location")
|
|
2918
|
+
if not location:
|
|
2919
|
+
resp.context["ever_external"] = ever_external
|
|
2920
|
+
return resp
|
|
2921
|
+
|
|
2922
|
+
if redirects_followed >= _MAX_DOWNLOAD_REDIRECTS:
|
|
2923
|
+
raise UnsafeUrlError(
|
|
2924
|
+
"Refusing to follow too many redirects for download",
|
|
2925
|
+
url=_redact_external_url(current_req.url),
|
|
2926
|
+
)
|
|
2927
|
+
|
|
2928
|
+
absolute = str(urljoin(current_req.url, location))
|
|
2929
|
+
scheme = urlsplit(absolute).scheme.lower()
|
|
2930
|
+
if scheme and scheme not in ("https", "http"):
|
|
2931
|
+
raise UnsafeUrlError("Refusing to follow non-http(s) redirect", url=absolute)
|
|
2932
|
+
if scheme == "http" and not config.allow_insecure_download_redirects:
|
|
2933
|
+
raise UnsafeUrlError(
|
|
2934
|
+
"Refusing to follow non-https redirect for download",
|
|
2935
|
+
url=_redact_external_url(absolute),
|
|
2936
|
+
)
|
|
2937
|
+
|
|
2938
|
+
to_host = urlsplit(absolute).netloc
|
|
2939
|
+
to_external = to_host not in internal_hosts
|
|
2940
|
+
ever_external = ever_external or to_external
|
|
2941
|
+
|
|
2942
|
+
emit = current_req.context.get("emit_event")
|
|
2943
|
+
if emit is not None:
|
|
2944
|
+
from_url = _sanitize_hook_url(
|
|
2945
|
+
current_req.url,
|
|
2946
|
+
api_key=config.api_key,
|
|
2947
|
+
external=bool(current_req.context.get("external", False)),
|
|
2948
|
+
external_hook_policy=config.policies.external_hooks,
|
|
2949
|
+
)
|
|
2950
|
+
to_url = _sanitize_hook_url(
|
|
2951
|
+
absolute,
|
|
2952
|
+
api_key=config.api_key,
|
|
2953
|
+
external=to_external,
|
|
2954
|
+
external_hook_policy=config.policies.external_hooks,
|
|
2955
|
+
)
|
|
2956
|
+
if from_url is not None and to_url is not None:
|
|
2957
|
+
await cast(Callable[[HookEvent], Awaitable[None]], emit)(
|
|
2958
|
+
RedirectFollowed(
|
|
2959
|
+
client_request_id=current_req.context.get("client_request_id")
|
|
2960
|
+
or "unknown",
|
|
2961
|
+
from_url=from_url,
|
|
2962
|
+
to_url=to_url,
|
|
2963
|
+
hop=redirects_followed + 1,
|
|
2964
|
+
external=to_external,
|
|
2965
|
+
)
|
|
2966
|
+
)
|
|
2967
|
+
|
|
2968
|
+
next_headers = (
|
|
2969
|
+
_strip_credential_headers(current_req.headers)
|
|
2970
|
+
if to_external
|
|
2971
|
+
else list(current_req.headers)
|
|
2972
|
+
)
|
|
2973
|
+
next_context: RequestContext = cast(RequestContext, dict(current_req.context))
|
|
2974
|
+
next_context["external"] = to_external
|
|
2975
|
+
next_context["ever_external"] = ever_external
|
|
2976
|
+
current_req = replace(
|
|
2977
|
+
current_req, url=absolute, headers=next_headers, context=next_context
|
|
2978
|
+
)
|
|
2979
|
+
redirects_followed += 1
|
|
2980
|
+
|
|
2981
|
+
middlewares: list[AsyncMiddleware[SDKRawResponse]] = [
|
|
2982
|
+
cast(AsyncMiddleware[SDKRawResponse], self._request_id_middleware()),
|
|
2983
|
+
cast(AsyncMiddleware[SDKRawResponse], self._hooks_middleware()),
|
|
2984
|
+
cast(AsyncMiddleware[SDKRawResponse], self._write_guard_middleware()),
|
|
2985
|
+
cast(AsyncMiddleware[SDKRawResponse], self._retry_middleware()),
|
|
2986
|
+
cast(AsyncMiddleware[SDKRawResponse], self._auth_middleware()),
|
|
2987
|
+
redirect_policy,
|
|
2988
|
+
raw_response_mapping,
|
|
2989
|
+
]
|
|
2990
|
+
return compose_async(middlewares, terminal)
|
|
2991
|
+
|
|
2992
|
+
def _build_raw_stream_pipeline(self) -> Callable[[SDKRequest], Awaitable[SDKBaseResponse]]:
|
|
2993
|
+
config = self._config
|
|
2994
|
+
internal_hosts = {
|
|
2995
|
+
urlsplit(config.v1_base_url).netloc,
|
|
2996
|
+
urlsplit(config.v2_base_url).netloc,
|
|
2997
|
+
}
|
|
2998
|
+
|
|
2999
|
+
async def terminal(req: SDKRequest) -> SDKBaseResponse:
|
|
3000
|
+
external = bool(req.context.get("external", False))
|
|
3001
|
+
if config.log_requests and not external:
|
|
3002
|
+
logger.debug(f"{req.method} {req.url}")
|
|
3003
|
+
|
|
3004
|
+
request_kwargs: dict[str, Any] = {"follow_redirects": False}
|
|
3005
|
+
if req.headers:
|
|
3006
|
+
request_kwargs["headers"] = req.headers
|
|
3007
|
+
if req.params is not None:
|
|
3008
|
+
request_kwargs["params"] = req.params
|
|
3009
|
+
|
|
3010
|
+
timeout = req.context.get("timeout")
|
|
3011
|
+
if timeout is not None:
|
|
3012
|
+
request_kwargs["timeout"] = timeout
|
|
3013
|
+
|
|
3014
|
+
client = await self._get_client()
|
|
3015
|
+
cm = client.stream(req.method, req.url, **request_kwargs)
|
|
3016
|
+
response = await cm.__aenter__()
|
|
3017
|
+
headers = list(response.headers.multi_items())
|
|
3018
|
+
|
|
3019
|
+
if response.status_code >= 400:
|
|
3020
|
+
try:
|
|
3021
|
+
content = await response.aread()
|
|
3022
|
+
finally:
|
|
3023
|
+
await cm.__aexit__(None, None, None)
|
|
3024
|
+
return SDKRawResponse(
|
|
3025
|
+
status_code=response.status_code,
|
|
3026
|
+
headers=headers,
|
|
3027
|
+
content=content,
|
|
3028
|
+
context={"external": external, "http_version": response.http_version},
|
|
3029
|
+
)
|
|
3030
|
+
|
|
3031
|
+
request_info = cast(RequestInfo | None, req.context.get("hook_request_info"))
|
|
3032
|
+
client_request_id = req.context.get("client_request_id") or "unknown"
|
|
3033
|
+
started_at = req.context.get("started_at") or time.monotonic()
|
|
3034
|
+
deadline_seconds = req.context.get("deadline_seconds")
|
|
3035
|
+
on_progress = cast(ProgressCallback | None, req.context.get("on_progress"))
|
|
3036
|
+
emit = cast(Callable[[HookEvent], Any] | None, req.context.get("emit_event"))
|
|
3037
|
+
|
|
3038
|
+
stream = _HTTPXAsyncStream(
|
|
3039
|
+
context_manager=cm,
|
|
3040
|
+
response=response,
|
|
3041
|
+
headers=headers,
|
|
3042
|
+
request_info=request_info,
|
|
3043
|
+
client_request_id=client_request_id,
|
|
3044
|
+
external=external,
|
|
3045
|
+
started_at=started_at,
|
|
3046
|
+
deadline_seconds=deadline_seconds,
|
|
3047
|
+
on_progress=on_progress,
|
|
3048
|
+
emit_event=emit,
|
|
3049
|
+
)
|
|
3050
|
+
return SDKRawStreamResponse(
|
|
3051
|
+
status_code=response.status_code,
|
|
3052
|
+
headers=headers,
|
|
3053
|
+
stream=stream,
|
|
3054
|
+
context={"external": external, "http_version": response.http_version},
|
|
3055
|
+
)
|
|
3056
|
+
|
|
3057
|
+
async def raw_response_mapping(
|
|
3058
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKBaseResponse]]
|
|
3059
|
+
) -> SDKBaseResponse:
|
|
3060
|
+
resp = await next(req)
|
|
3061
|
+
headers_map = dict(resp.headers)
|
|
3062
|
+
external = bool(resp.context.get("external", False))
|
|
3063
|
+
if not external:
|
|
3064
|
+
self._rate_limit.update_from_headers(headers_map)
|
|
3065
|
+
|
|
3066
|
+
if resp.status_code >= 400:
|
|
3067
|
+
assert isinstance(resp, SDKRawResponse)
|
|
3068
|
+
try:
|
|
3069
|
+
body: Any = json.loads(resp.content) if resp.content else {}
|
|
3070
|
+
except Exception:
|
|
3071
|
+
body = {
|
|
3072
|
+
"message": _safe_body_preview(
|
|
3073
|
+
resp.content, api_key=config.api_key, external=external
|
|
3074
|
+
)
|
|
3075
|
+
}
|
|
3076
|
+
|
|
3077
|
+
retry_after = None
|
|
3078
|
+
if resp.status_code == 429:
|
|
3079
|
+
header_value = headers_map.get("Retry-After") or headers_map.get("retry-after")
|
|
3080
|
+
if header_value is not None:
|
|
3081
|
+
retry_after = _parse_retry_after(header_value)
|
|
3082
|
+
|
|
3083
|
+
selected_headers = _select_response_headers(headers_map)
|
|
3084
|
+
request_id = _extract_request_id(selected_headers)
|
|
3085
|
+
if request_id is not None:
|
|
3086
|
+
resp.context["request_id"] = request_id
|
|
3087
|
+
|
|
3088
|
+
if external:
|
|
3089
|
+
api_version: Literal["v1", "v2", "external"] = "external"
|
|
3090
|
+
base_url = f"{urlsplit(req.url).scheme}://{urlsplit(req.url).netloc}"
|
|
3091
|
+
redacted_url = _redact_external_url(req.url)
|
|
3092
|
+
else:
|
|
3093
|
+
api_version = req.api_version
|
|
3094
|
+
base_url = config.v1_base_url if req.api_version == "v1" else config.v2_base_url
|
|
3095
|
+
redacted_url = _redact_url(req.url, config.api_key)
|
|
3096
|
+
|
|
3097
|
+
diagnostics = ErrorDiagnostics(
|
|
3098
|
+
method=req.method.upper(),
|
|
3099
|
+
url=redacted_url,
|
|
3100
|
+
request_params=_diagnostic_request_params(req.params),
|
|
3101
|
+
api_version=api_version,
|
|
3102
|
+
base_url=base_url,
|
|
3103
|
+
request_id=request_id,
|
|
3104
|
+
http_version=resp.context.get("http_version"),
|
|
3105
|
+
response_headers=selected_headers,
|
|
3106
|
+
response_body_snippet=str(body)[:512].replace(config.api_key, "[REDACTED]"),
|
|
3107
|
+
)
|
|
3108
|
+
raise error_from_response(
|
|
3109
|
+
resp.status_code,
|
|
3110
|
+
body,
|
|
3111
|
+
retry_after=retry_after,
|
|
3112
|
+
diagnostics=diagnostics,
|
|
3113
|
+
)
|
|
3114
|
+
|
|
3115
|
+
return resp
|
|
3116
|
+
|
|
3117
|
+
async def redirect_policy(
|
|
3118
|
+
req: SDKRequest, next: Callable[[SDKRequest], Awaitable[SDKBaseResponse]]
|
|
3119
|
+
) -> SDKBaseResponse:
|
|
3120
|
+
current_req = req
|
|
3121
|
+
redirects_followed = 0
|
|
3122
|
+
ever_external = bool(current_req.context.get("ever_external", False))
|
|
3123
|
+
|
|
3124
|
+
while True:
|
|
3125
|
+
deadline_seconds = current_req.context.get("deadline_seconds")
|
|
3126
|
+
if deadline_seconds is not None:
|
|
3127
|
+
started_at = current_req.context.get("started_at") or time.monotonic()
|
|
3128
|
+
if (time.monotonic() - started_at) >= deadline_seconds:
|
|
3129
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
3130
|
+
|
|
3131
|
+
resp = await next(current_req)
|
|
3132
|
+
if not (300 <= resp.status_code < 400):
|
|
3133
|
+
resp.context["ever_external"] = ever_external
|
|
3134
|
+
return resp
|
|
3135
|
+
|
|
3136
|
+
headers_dict = dict(resp.headers)
|
|
3137
|
+
location = headers_dict.get("Location") or headers_dict.get("location")
|
|
3138
|
+
if not location:
|
|
3139
|
+
resp.context["ever_external"] = ever_external
|
|
3140
|
+
return resp
|
|
3141
|
+
|
|
3142
|
+
if redirects_followed >= _MAX_DOWNLOAD_REDIRECTS:
|
|
3143
|
+
raise UnsafeUrlError(
|
|
3144
|
+
"Refusing to follow too many redirects for download",
|
|
3145
|
+
url=_redact_external_url(current_req.url),
|
|
3146
|
+
)
|
|
3147
|
+
|
|
3148
|
+
absolute = str(urljoin(current_req.url, location))
|
|
3149
|
+
scheme = urlsplit(absolute).scheme.lower()
|
|
3150
|
+
if scheme and scheme not in ("https", "http"):
|
|
3151
|
+
raise UnsafeUrlError("Refusing to follow non-http(s) redirect", url=absolute)
|
|
3152
|
+
if scheme == "http" and not config.allow_insecure_download_redirects:
|
|
3153
|
+
raise UnsafeUrlError(
|
|
3154
|
+
"Refusing to follow non-https redirect for download",
|
|
3155
|
+
url=_redact_external_url(absolute),
|
|
3156
|
+
)
|
|
3157
|
+
|
|
3158
|
+
to_host = urlsplit(absolute).netloc
|
|
3159
|
+
to_external = to_host not in internal_hosts
|
|
3160
|
+
ever_external = ever_external or to_external
|
|
3161
|
+
|
|
3162
|
+
if isinstance(resp, SDKRawStreamResponse):
|
|
3163
|
+
await cast(_HTTPXAsyncStream, resp.stream).aclose()
|
|
3164
|
+
|
|
3165
|
+
emit = current_req.context.get("emit_event")
|
|
3166
|
+
if emit is not None:
|
|
3167
|
+
from_url = _sanitize_hook_url(
|
|
3168
|
+
current_req.url,
|
|
3169
|
+
api_key=config.api_key,
|
|
3170
|
+
external=bool(current_req.context.get("external", False)),
|
|
3171
|
+
external_hook_policy=config.policies.external_hooks,
|
|
3172
|
+
)
|
|
3173
|
+
to_url = _sanitize_hook_url(
|
|
3174
|
+
absolute,
|
|
3175
|
+
api_key=config.api_key,
|
|
3176
|
+
external=to_external,
|
|
3177
|
+
external_hook_policy=config.policies.external_hooks,
|
|
3178
|
+
)
|
|
3179
|
+
if from_url is not None and to_url is not None:
|
|
3180
|
+
await cast(Callable[[HookEvent], Awaitable[None]], emit)(
|
|
3181
|
+
RedirectFollowed(
|
|
3182
|
+
client_request_id=current_req.context.get("client_request_id")
|
|
3183
|
+
or "unknown",
|
|
3184
|
+
from_url=from_url,
|
|
3185
|
+
to_url=to_url,
|
|
3186
|
+
hop=redirects_followed + 1,
|
|
3187
|
+
external=to_external,
|
|
3188
|
+
)
|
|
3189
|
+
)
|
|
3190
|
+
|
|
3191
|
+
next_headers = (
|
|
3192
|
+
_strip_credential_headers(current_req.headers)
|
|
3193
|
+
if to_external
|
|
3194
|
+
else list(current_req.headers)
|
|
3195
|
+
)
|
|
3196
|
+
next_context: RequestContext = cast(RequestContext, dict(current_req.context))
|
|
3197
|
+
next_context["external"] = to_external
|
|
3198
|
+
next_context["ever_external"] = ever_external
|
|
3199
|
+
current_req = replace(
|
|
3200
|
+
current_req, url=absolute, headers=next_headers, context=next_context
|
|
3201
|
+
)
|
|
3202
|
+
redirects_followed += 1
|
|
3203
|
+
|
|
3204
|
+
middlewares: list[AsyncMiddleware[SDKBaseResponse]] = [
|
|
3205
|
+
self._request_id_middleware(),
|
|
3206
|
+
self._hooks_middleware(),
|
|
3207
|
+
self._write_guard_middleware(),
|
|
3208
|
+
self._retry_middleware(),
|
|
3209
|
+
self._auth_middleware(),
|
|
3210
|
+
redirect_policy,
|
|
3211
|
+
raw_response_mapping,
|
|
3212
|
+
]
|
|
3213
|
+
return compose_async(middlewares, terminal)
|
|
3214
|
+
|
|
3215
|
+
async def _get_client(self) -> httpx.AsyncClient:
|
|
3216
|
+
"""Lazy initialization of async client."""
|
|
3217
|
+
if self._client is None:
|
|
3218
|
+
self._client = httpx.AsyncClient(
|
|
3219
|
+
http2=self._config.http2,
|
|
3220
|
+
timeout=self._config.timeout,
|
|
3221
|
+
limits=self._config.limits,
|
|
3222
|
+
transport=self._config.async_transport,
|
|
3223
|
+
headers=dict(_DEFAULT_HEADERS),
|
|
3224
|
+
)
|
|
3225
|
+
return self._client
|
|
3226
|
+
|
|
3227
|
+
async def close(self) -> None:
|
|
3228
|
+
"""Close the HTTP client."""
|
|
3229
|
+
if self._client:
|
|
3230
|
+
await self._client.aclose()
|
|
3231
|
+
self._client = None
|
|
3232
|
+
|
|
3233
|
+
async def __aenter__(self) -> AsyncHTTPClient:
|
|
3234
|
+
return self
|
|
3235
|
+
|
|
3236
|
+
async def __aexit__(self, *args: Any) -> None:
|
|
3237
|
+
await self.close()
|
|
3238
|
+
|
|
3239
|
+
@property
|
|
3240
|
+
def cache(self) -> SimpleCache | None:
|
|
3241
|
+
return self._cache
|
|
3242
|
+
|
|
3243
|
+
@property
|
|
3244
|
+
def rate_limit_state(self) -> RateLimitState:
|
|
3245
|
+
return self._rate_limit
|
|
3246
|
+
|
|
3247
|
+
@property
|
|
3248
|
+
def enable_beta_endpoints(self) -> bool:
|
|
3249
|
+
return self._config.enable_beta_endpoints
|
|
3250
|
+
|
|
3251
|
+
def _build_url(self, path: str, *, v1: bool = False) -> str:
|
|
3252
|
+
base = self._config.v1_base_url if v1 else self._config.v2_base_url
|
|
3253
|
+
if v1:
|
|
3254
|
+
return f"{base}/{path.lstrip('/')}"
|
|
3255
|
+
return f"{base}/{path.lstrip('/')}"
|
|
3256
|
+
|
|
3257
|
+
def _handle_response(
|
|
3258
|
+
self,
|
|
3259
|
+
response: httpx.Response,
|
|
3260
|
+
*,
|
|
3261
|
+
method: str,
|
|
3262
|
+
url: str,
|
|
3263
|
+
v1: bool,
|
|
3264
|
+
) -> dict[str, Any]:
|
|
3265
|
+
self._rate_limit.update_from_headers(response.headers)
|
|
3266
|
+
|
|
3267
|
+
if response.status_code >= 400:
|
|
3268
|
+
try:
|
|
3269
|
+
body = response.json()
|
|
3270
|
+
except Exception:
|
|
3271
|
+
body = {"message": response.text}
|
|
3272
|
+
|
|
3273
|
+
retry_after = None
|
|
3274
|
+
if response.status_code == 429:
|
|
3275
|
+
header_value = response.headers.get("Retry-After")
|
|
3276
|
+
if header_value is not None:
|
|
3277
|
+
retry_after = _parse_retry_after(header_value)
|
|
3278
|
+
|
|
3279
|
+
selected_headers = _select_response_headers(response.headers)
|
|
3280
|
+
request_id = _extract_request_id(selected_headers)
|
|
3281
|
+
diagnostics = ErrorDiagnostics(
|
|
3282
|
+
method=method,
|
|
3283
|
+
url=_redact_url(url, self._config.api_key),
|
|
3284
|
+
api_version="v1" if v1 else "v2",
|
|
3285
|
+
base_url=self._config.v1_base_url if v1 else self._config.v2_base_url,
|
|
3286
|
+
request_id=request_id,
|
|
3287
|
+
http_version=response.http_version,
|
|
3288
|
+
response_headers=selected_headers,
|
|
3289
|
+
response_body_snippet=str(body)[:512].replace(self._config.api_key, "[REDACTED]"),
|
|
3290
|
+
)
|
|
3291
|
+
|
|
3292
|
+
raise error_from_response(
|
|
3293
|
+
response.status_code,
|
|
3294
|
+
body,
|
|
3295
|
+
retry_after=retry_after,
|
|
3296
|
+
diagnostics=diagnostics,
|
|
3297
|
+
)
|
|
3298
|
+
|
|
3299
|
+
if response.status_code == 204 or not response.content:
|
|
3300
|
+
return {}
|
|
3301
|
+
|
|
3302
|
+
payload = response.json()
|
|
3303
|
+
if isinstance(payload, dict):
|
|
3304
|
+
return cast(dict[str, Any], payload)
|
|
3305
|
+
if isinstance(payload, list):
|
|
3306
|
+
return {"data": payload}
|
|
3307
|
+
raise AffinityError("Expected JSON object/array response")
|
|
3308
|
+
|
|
3309
|
+
async def _request_with_retry(
|
|
3310
|
+
self,
|
|
3311
|
+
method: str,
|
|
3312
|
+
url: str,
|
|
3313
|
+
*,
|
|
3314
|
+
v1: bool,
|
|
3315
|
+
safe_follow: bool = False,
|
|
3316
|
+
write_intent: bool = False,
|
|
3317
|
+
cache_key: str | None = None,
|
|
3318
|
+
cache_ttl: float | None = None,
|
|
3319
|
+
**kwargs: Any,
|
|
3320
|
+
) -> dict[str, Any]:
|
|
3321
|
+
headers = kwargs.pop("headers", None) or {}
|
|
3322
|
+
params = kwargs.pop("params", None)
|
|
3323
|
+
json_payload = kwargs.pop("json", None)
|
|
3324
|
+
files = kwargs.pop("files", None)
|
|
3325
|
+
data = kwargs.pop("data", None)
|
|
3326
|
+
timeout = kwargs.pop("timeout", None)
|
|
3327
|
+
if kwargs:
|
|
3328
|
+
raise TypeError(f"Unsupported request kwargs: {sorted(kwargs.keys())}")
|
|
3329
|
+
|
|
3330
|
+
context: RequestContext = {}
|
|
3331
|
+
if safe_follow:
|
|
3332
|
+
context["safe_follow"] = True
|
|
3333
|
+
if cache_key is not None:
|
|
3334
|
+
context["cache_key"] = cache_key
|
|
3335
|
+
if cache_ttl is not None:
|
|
3336
|
+
context["cache_ttl"] = float(cache_ttl)
|
|
3337
|
+
if timeout is not None:
|
|
3338
|
+
if isinstance(timeout, (int, float)):
|
|
3339
|
+
context["timeout_seconds"] = float(timeout)
|
|
3340
|
+
else:
|
|
3341
|
+
raise TypeError("timeout must be float seconds for JSON requests")
|
|
3342
|
+
|
|
3343
|
+
req = SDKRequest(
|
|
3344
|
+
method=method.upper(),
|
|
3345
|
+
url=url,
|
|
3346
|
+
headers=list(headers.items()),
|
|
3347
|
+
params=params,
|
|
3348
|
+
json=json_payload,
|
|
3349
|
+
files=files,
|
|
3350
|
+
data=data,
|
|
3351
|
+
api_version="v1" if v1 else "v2",
|
|
3352
|
+
write_intent=write_intent,
|
|
3353
|
+
context=context,
|
|
3354
|
+
)
|
|
3355
|
+
resp = await self._pipeline(req)
|
|
3356
|
+
payload = resp.json
|
|
3357
|
+
if not isinstance(payload, dict):
|
|
3358
|
+
raise AffinityError("Expected JSON object response")
|
|
3359
|
+
return cast(dict[str, Any], payload)
|
|
3360
|
+
|
|
3361
|
+
# =========================================================================
|
|
3362
|
+
# Public Request Methods
|
|
3363
|
+
# =========================================================================
|
|
3364
|
+
|
|
3365
|
+
async def get(
|
|
3366
|
+
self,
|
|
3367
|
+
path: str,
|
|
3368
|
+
*,
|
|
3369
|
+
params: Mapping[str, Any] | Sequence[tuple[str, Any]] | None = None,
|
|
3370
|
+
v1: bool = False,
|
|
3371
|
+
cache_key: str | None = None,
|
|
3372
|
+
cache_ttl: float | None = None,
|
|
3373
|
+
) -> dict[str, Any]:
|
|
3374
|
+
url = self._build_url(path, v1=v1)
|
|
3375
|
+
encoded_params = _encode_query_params(params)
|
|
3376
|
+
return await self._request_with_retry(
|
|
3377
|
+
"GET",
|
|
3378
|
+
url,
|
|
3379
|
+
v1=v1,
|
|
3380
|
+
params=encoded_params,
|
|
3381
|
+
cache_key=cache_key,
|
|
3382
|
+
cache_ttl=cache_ttl,
|
|
3383
|
+
)
|
|
3384
|
+
|
|
3385
|
+
async def get_v1_page(
|
|
3386
|
+
self,
|
|
3387
|
+
path: str,
|
|
3388
|
+
*,
|
|
3389
|
+
signature: Sequence[tuple[str, str]],
|
|
3390
|
+
page_token: str | None = None,
|
|
3391
|
+
) -> dict[str, Any]:
|
|
3392
|
+
"""Async version of `get_v1_page()`."""
|
|
3393
|
+
params = list(signature)
|
|
3394
|
+
if page_token is not None:
|
|
3395
|
+
params.append(("page_token", page_token))
|
|
3396
|
+
url = self._build_url(path, v1=True)
|
|
3397
|
+
return await self._request_with_retry("GET", url, v1=True, params=params)
|
|
3398
|
+
|
|
3399
|
+
async def get_url(self, url: str) -> dict[str, Any]:
|
|
3400
|
+
absolute, is_v1 = _safe_follow_url(
|
|
3401
|
+
url,
|
|
3402
|
+
v1_base_url=self._config.v1_base_url,
|
|
3403
|
+
v2_base_url=self._config.v2_base_url,
|
|
3404
|
+
)
|
|
3405
|
+
return await self._request_with_retry(
|
|
3406
|
+
"GET",
|
|
3407
|
+
absolute,
|
|
3408
|
+
v1=is_v1,
|
|
3409
|
+
safe_follow=True,
|
|
3410
|
+
)
|
|
3411
|
+
|
|
3412
|
+
async def post(
|
|
3413
|
+
self,
|
|
3414
|
+
path: str,
|
|
3415
|
+
*,
|
|
3416
|
+
json: Any = None,
|
|
3417
|
+
v1: bool = False,
|
|
3418
|
+
) -> dict[str, Any]:
|
|
3419
|
+
url = self._build_url(path, v1=v1)
|
|
3420
|
+
return await self._request_with_retry("POST", url, v1=v1, json=json, write_intent=True)
|
|
3421
|
+
|
|
3422
|
+
async def put(
|
|
3423
|
+
self,
|
|
3424
|
+
path: str,
|
|
3425
|
+
*,
|
|
3426
|
+
json: Any = None,
|
|
3427
|
+
v1: bool = False,
|
|
3428
|
+
) -> dict[str, Any]:
|
|
3429
|
+
url = self._build_url(path, v1=v1)
|
|
3430
|
+
return await self._request_with_retry("PUT", url, v1=v1, json=json, write_intent=True)
|
|
3431
|
+
|
|
3432
|
+
async def patch(
|
|
3433
|
+
self,
|
|
3434
|
+
path: str,
|
|
3435
|
+
*,
|
|
3436
|
+
json: Any = None,
|
|
3437
|
+
v1: bool = False,
|
|
3438
|
+
) -> dict[str, Any]:
|
|
3439
|
+
url = self._build_url(path, v1=v1)
|
|
3440
|
+
return await self._request_with_retry("PATCH", url, v1=v1, json=json, write_intent=True)
|
|
3441
|
+
|
|
3442
|
+
async def delete(
|
|
3443
|
+
self,
|
|
3444
|
+
path: str,
|
|
3445
|
+
*,
|
|
3446
|
+
params: Mapping[str, Any] | Sequence[tuple[str, Any]] | None = None,
|
|
3447
|
+
v1: bool = False,
|
|
3448
|
+
) -> dict[str, Any]:
|
|
3449
|
+
url = self._build_url(path, v1=v1)
|
|
3450
|
+
return await self._request_with_retry(
|
|
3451
|
+
"DELETE",
|
|
3452
|
+
url,
|
|
3453
|
+
v1=v1,
|
|
3454
|
+
params=_encode_query_params(params),
|
|
3455
|
+
write_intent=True,
|
|
3456
|
+
)
|
|
3457
|
+
|
|
3458
|
+
async def upload_file(
|
|
3459
|
+
self,
|
|
3460
|
+
path: str,
|
|
3461
|
+
*,
|
|
3462
|
+
files: dict[str, Any],
|
|
3463
|
+
data: dict[str, Any] | None = None,
|
|
3464
|
+
v1: bool = False,
|
|
3465
|
+
) -> dict[str, Any]:
|
|
3466
|
+
"""Upload files with multipart form data."""
|
|
3467
|
+
url = self._build_url(path, v1=v1)
|
|
3468
|
+
client = await self._get_client()
|
|
3469
|
+
|
|
3470
|
+
headers = dict(client.headers)
|
|
3471
|
+
headers.pop("Content-Type", None)
|
|
3472
|
+
return await self._request_with_retry(
|
|
3473
|
+
"POST",
|
|
3474
|
+
url,
|
|
3475
|
+
v1=v1,
|
|
3476
|
+
files=files,
|
|
3477
|
+
data=data,
|
|
3478
|
+
headers=headers,
|
|
3479
|
+
write_intent=True,
|
|
3480
|
+
)
|
|
3481
|
+
|
|
3482
|
+
async def download_file(
|
|
3483
|
+
self,
|
|
3484
|
+
path: str,
|
|
3485
|
+
*,
|
|
3486
|
+
v1: bool = False,
|
|
3487
|
+
timeout: httpx.Timeout | float | None = None,
|
|
3488
|
+
deadline_seconds: float | None = None,
|
|
3489
|
+
) -> bytes:
|
|
3490
|
+
"""
|
|
3491
|
+
Download file content.
|
|
3492
|
+
|
|
3493
|
+
Notes:
|
|
3494
|
+
- The initial Affinity API response may redirect to an external signed URL.
|
|
3495
|
+
Redirects are followed without forwarding credentials.
|
|
3496
|
+
- External signed URLs are protected via ExternalHookPolicy (redaction by default).
|
|
3497
|
+
"""
|
|
3498
|
+
if deadline_seconds is not None and deadline_seconds <= 0:
|
|
3499
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
3500
|
+
|
|
3501
|
+
url = self._build_url(path, v1=v1)
|
|
3502
|
+
context: RequestContext = {}
|
|
3503
|
+
if timeout is not None:
|
|
3504
|
+
context["timeout"] = timeout
|
|
3505
|
+
if deadline_seconds is not None:
|
|
3506
|
+
context["deadline_seconds"] = float(deadline_seconds)
|
|
3507
|
+
|
|
3508
|
+
req = SDKRequest(
|
|
3509
|
+
method="GET",
|
|
3510
|
+
url=url,
|
|
3511
|
+
headers=[("Accept", "*/*")],
|
|
3512
|
+
api_version="v1" if v1 else "v2",
|
|
3513
|
+
write_intent=False,
|
|
3514
|
+
context=context,
|
|
3515
|
+
)
|
|
3516
|
+
resp = await self._raw_buffered_pipeline(req)
|
|
3517
|
+
return resp.content
|
|
3518
|
+
|
|
3519
|
+
async def stream_download(
|
|
3520
|
+
self,
|
|
3521
|
+
path: str,
|
|
3522
|
+
*,
|
|
3523
|
+
v1: bool = False,
|
|
3524
|
+
chunk_size: int = 65_536,
|
|
3525
|
+
on_progress: ProgressCallback | None = None,
|
|
3526
|
+
timeout: httpx.Timeout | float | None = None,
|
|
3527
|
+
deadline_seconds: float | None = None,
|
|
3528
|
+
) -> AsyncIterator[bytes]:
|
|
3529
|
+
"""
|
|
3530
|
+
Stream-download file content in chunks.
|
|
3531
|
+
|
|
3532
|
+
Notes:
|
|
3533
|
+
- The initial Affinity API response may redirect to an external signed URL.
|
|
3534
|
+
Redirects are followed without forwarding credentials.
|
|
3535
|
+
- External signed URLs are protected via ExternalHookPolicy (redaction by default).
|
|
3536
|
+
"""
|
|
3537
|
+
if deadline_seconds is not None and deadline_seconds <= 0:
|
|
3538
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
3539
|
+
|
|
3540
|
+
url = self._build_url(path, v1=v1)
|
|
3541
|
+
context: RequestContext = {"streaming": True}
|
|
3542
|
+
if timeout is not None:
|
|
3543
|
+
context["timeout"] = timeout
|
|
3544
|
+
if deadline_seconds is not None:
|
|
3545
|
+
context["deadline_seconds"] = float(deadline_seconds)
|
|
3546
|
+
if on_progress is not None:
|
|
3547
|
+
context["on_progress"] = on_progress
|
|
3548
|
+
|
|
3549
|
+
req = SDKRequest(
|
|
3550
|
+
method="GET",
|
|
3551
|
+
url=url,
|
|
3552
|
+
headers=[("Accept", "*/*")],
|
|
3553
|
+
api_version="v1" if v1 else "v2",
|
|
3554
|
+
write_intent=False,
|
|
3555
|
+
context=context,
|
|
3556
|
+
)
|
|
3557
|
+
|
|
3558
|
+
resp = await self._raw_stream_pipeline(req)
|
|
3559
|
+
if not isinstance(resp, SDKRawStreamResponse):
|
|
3560
|
+
return
|
|
3561
|
+
|
|
3562
|
+
async for chunk in resp.stream.aiter_bytes(chunk_size=chunk_size):
|
|
3563
|
+
yield chunk
|
|
3564
|
+
|
|
3565
|
+
async def stream_download_with_info(
|
|
3566
|
+
self,
|
|
3567
|
+
path: str,
|
|
3568
|
+
*,
|
|
3569
|
+
v1: bool = False,
|
|
3570
|
+
chunk_size: int = 65_536,
|
|
3571
|
+
on_progress: ProgressCallback | None = None,
|
|
3572
|
+
timeout: httpx.Timeout | float | None = None,
|
|
3573
|
+
deadline_seconds: float | None = None,
|
|
3574
|
+
) -> AsyncDownloadedFile:
|
|
3575
|
+
"""
|
|
3576
|
+
Stream-download file content and return response metadata (headers/filename/size).
|
|
3577
|
+
|
|
3578
|
+
Notes:
|
|
3579
|
+
- The initial Affinity API response may redirect to an external signed URL.
|
|
3580
|
+
Redirects are followed without forwarding credentials.
|
|
3581
|
+
- External signed URLs are protected via ExternalHookPolicy (redaction by default).
|
|
3582
|
+
"""
|
|
3583
|
+
if deadline_seconds is not None and deadline_seconds <= 0:
|
|
3584
|
+
raise TimeoutError(f"Download deadline exceeded: {deadline_seconds}s")
|
|
3585
|
+
|
|
3586
|
+
url = self._build_url(path, v1=v1)
|
|
3587
|
+
context: RequestContext = {"streaming": True}
|
|
3588
|
+
if timeout is not None:
|
|
3589
|
+
context["timeout"] = timeout
|
|
3590
|
+
if deadline_seconds is not None:
|
|
3591
|
+
context["deadline_seconds"] = float(deadline_seconds)
|
|
3592
|
+
if on_progress is not None:
|
|
3593
|
+
context["on_progress"] = on_progress
|
|
3594
|
+
|
|
3595
|
+
req = SDKRequest(
|
|
3596
|
+
method="GET",
|
|
3597
|
+
url=url,
|
|
3598
|
+
headers=[("Accept", "*/*")],
|
|
3599
|
+
api_version="v1" if v1 else "v2",
|
|
3600
|
+
write_intent=False,
|
|
3601
|
+
context=context,
|
|
3602
|
+
)
|
|
3603
|
+
resp = await self._raw_stream_pipeline(req)
|
|
3604
|
+
if not isinstance(resp, SDKRawStreamResponse):
|
|
3605
|
+
info = _download_info_from_headers([])
|
|
3606
|
+
|
|
3607
|
+
async def _empty_iter_bytes() -> AsyncIterator[bytes]:
|
|
3608
|
+
if False:
|
|
3609
|
+
yield b""
|
|
3610
|
+
|
|
3611
|
+
return AsyncDownloadedFile(
|
|
3612
|
+
headers=info["headers"],
|
|
3613
|
+
raw_headers=[],
|
|
3614
|
+
content_type=info["content_type"],
|
|
3615
|
+
filename=info["filename"],
|
|
3616
|
+
size=info["size"],
|
|
3617
|
+
iter_bytes=_empty_iter_bytes(),
|
|
3618
|
+
)
|
|
3619
|
+
|
|
3620
|
+
info = _download_info_from_headers(resp.headers)
|
|
3621
|
+
|
|
3622
|
+
async def _iter_bytes() -> AsyncIterator[bytes]:
|
|
3623
|
+
async for chunk in resp.stream.aiter_bytes(chunk_size=chunk_size):
|
|
3624
|
+
yield chunk
|
|
3625
|
+
|
|
3626
|
+
return AsyncDownloadedFile(
|
|
3627
|
+
headers=info["headers"],
|
|
3628
|
+
raw_headers=list(resp.headers),
|
|
3629
|
+
content_type=info["content_type"],
|
|
3630
|
+
filename=info["filename"],
|
|
3631
|
+
size=info["size"],
|
|
3632
|
+
iter_bytes=_iter_bytes(),
|
|
3633
|
+
)
|
|
3634
|
+
|
|
3635
|
+
def wrap_validation_error(
|
|
3636
|
+
self,
|
|
3637
|
+
error: Exception,
|
|
3638
|
+
*,
|
|
3639
|
+
context: str | None = None,
|
|
3640
|
+
) -> VersionCompatibilityError:
|
|
3641
|
+
"""
|
|
3642
|
+
Wrap a validation error with version compatibility context.
|
|
3643
|
+
|
|
3644
|
+
TR-015: If expected_v2_version is configured, validation failures
|
|
3645
|
+
are wrapped with actionable guidance about checking API version.
|
|
3646
|
+
"""
|
|
3647
|
+
expected = self._config.expected_v2_version
|
|
3648
|
+
message = (
|
|
3649
|
+
f"Response parsing failed: {error}. "
|
|
3650
|
+
"This may indicate a v2 API version mismatch. "
|
|
3651
|
+
"Check your API key's Default API Version in the Affinity dashboard."
|
|
3652
|
+
)
|
|
3653
|
+
if context:
|
|
3654
|
+
message = f"[{context}] {message}"
|
|
3655
|
+
return VersionCompatibilityError(
|
|
3656
|
+
message,
|
|
3657
|
+
expected_version=expected,
|
|
3658
|
+
parsing_error=str(error),
|
|
3659
|
+
)
|
|
3660
|
+
|
|
3661
|
+
@property
|
|
3662
|
+
def expected_v2_version(self) -> str | None:
|
|
3663
|
+
"""Expected V2 API version for diagnostics."""
|
|
3664
|
+
return self._config.expected_v2_version
|