hishel 0.1.5__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hishel/__init__.py +63 -52
- hishel/_async_cache.py +213 -0
- hishel/_async_httpx.py +236 -0
- hishel/{beta/_core → _core}/_headers.py +11 -1
- hishel/{beta/_core → _core}/_spec.py +270 -136
- hishel/_core/_storages/_async_base.py +71 -0
- hishel/_core/_storages/_async_sqlite.py +420 -0
- hishel/_core/_storages/_packing.py +144 -0
- hishel/_core/_storages/_sync_base.py +71 -0
- hishel/_core/_storages/_sync_sqlite.py +420 -0
- hishel/{beta/_core → _core}/models.py +100 -37
- hishel/_policies.py +49 -0
- hishel/_sync_cache.py +213 -0
- hishel/_sync_httpx.py +236 -0
- hishel/_utils.py +37 -366
- hishel/asgi.py +400 -0
- hishel/fastapi.py +263 -0
- hishel/httpx.py +12 -0
- hishel/{beta/requests.py → requests.py} +31 -25
- hishel-1.0.0.dist-info/METADATA +513 -0
- hishel-1.0.0.dist-info/RECORD +24 -0
- hishel/_async/__init__.py +0 -5
- hishel/_async/_client.py +0 -30
- hishel/_async/_mock.py +0 -43
- hishel/_async/_pool.py +0 -201
- hishel/_async/_storages.py +0 -768
- hishel/_async/_transports.py +0 -282
- hishel/_controller.py +0 -581
- hishel/_exceptions.py +0 -10
- hishel/_files.py +0 -54
- hishel/_headers.py +0 -215
- hishel/_lfu_cache.py +0 -71
- hishel/_lmdb_types_.pyi +0 -53
- hishel/_s3.py +0 -122
- hishel/_serializers.py +0 -329
- hishel/_sync/__init__.py +0 -5
- hishel/_sync/_client.py +0 -30
- hishel/_sync/_mock.py +0 -43
- hishel/_sync/_pool.py +0 -201
- hishel/_sync/_storages.py +0 -768
- hishel/_sync/_transports.py +0 -282
- hishel/_synchronization.py +0 -37
- hishel/beta/__init__.py +0 -59
- hishel/beta/_async_cache.py +0 -167
- hishel/beta/_core/__init__.py +0 -0
- hishel/beta/_core/_async/_storages/_sqlite.py +0 -411
- hishel/beta/_core/_base/_storages/_base.py +0 -272
- hishel/beta/_core/_base/_storages/_packing.py +0 -165
- hishel/beta/_core/_sync/_storages/_sqlite.py +0 -411
- hishel/beta/_sync_cache.py +0 -167
- hishel/beta/httpx.py +0 -328
- hishel-0.1.5.dist-info/METADATA +0 -258
- hishel-0.1.5.dist-info/RECORD +0 -41
- {hishel-0.1.5.dist-info → hishel-1.0.0.dist-info}/WHEEL +0 -0
- {hishel-0.1.5.dist-info → hishel-1.0.0.dist-info}/licenses/LICENSE +0 -0
hishel/_sync_cache.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
from dataclasses import replace
|
|
7
|
+
from typing import Iterable, Iterator, Awaitable, Callable
|
|
8
|
+
|
|
9
|
+
from typing_extensions import assert_never
|
|
10
|
+
|
|
11
|
+
from hishel import (
|
|
12
|
+
AnyState,
|
|
13
|
+
SyncBaseStorage,
|
|
14
|
+
SyncSqliteStorage,
|
|
15
|
+
CacheMiss,
|
|
16
|
+
CouldNotBeStored,
|
|
17
|
+
FromCache,
|
|
18
|
+
IdleClient,
|
|
19
|
+
NeedRevalidation,
|
|
20
|
+
NeedToBeUpdated,
|
|
21
|
+
Request,
|
|
22
|
+
Response,
|
|
23
|
+
StoreAndUse,
|
|
24
|
+
)
|
|
25
|
+
from hishel._core._spec import InvalidateEntries, vary_headers_match
|
|
26
|
+
from hishel._core.models import Entry, ResponseMetadata
|
|
27
|
+
from hishel._policies import CachePolicy, FilterPolicy, SpecificationPolicy
|
|
28
|
+
from hishel._utils import make_sync_iterator
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger("hishel.integrations.clients")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class SyncCacheProxy:
|
|
34
|
+
"""
|
|
35
|
+
A proxy for HTTP caching in clients.
|
|
36
|
+
|
|
37
|
+
This class is independent of any specific HTTP library and works only with internal models.
|
|
38
|
+
It delegates request execution to a user-provided callable, making it compatible with any
|
|
39
|
+
HTTP client. Caching behavior is determined by the policy object.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
request_sender: Callable that sends HTTP requests and returns responses.
|
|
43
|
+
storage: Storage backend for cache entries. Defaults to SyncSqliteStorage.
|
|
44
|
+
policy: Caching policy to use. Can be SpecificationPolicy (respects RFC 9111) or
|
|
45
|
+
FilterPolicy (user-defined filtering). Defaults to SpecificationPolicy().
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
request_sender: Callable[[Request], Response],
|
|
51
|
+
storage: SyncBaseStorage | None = None,
|
|
52
|
+
policy: CachePolicy | None = None,
|
|
53
|
+
) -> None:
|
|
54
|
+
self.send_request = request_sender
|
|
55
|
+
self.storage = storage if storage is not None else SyncSqliteStorage()
|
|
56
|
+
self.policy = policy if policy is not None else SpecificationPolicy()
|
|
57
|
+
|
|
58
|
+
def handle_request(self, request: Request) -> Response:
|
|
59
|
+
if isinstance(self.policy, FilterPolicy):
|
|
60
|
+
return self._handle_request_with_filters(request)
|
|
61
|
+
return self._handle_request_respecting_spec(request)
|
|
62
|
+
|
|
63
|
+
def _get_key_for_request(self, request: Request) -> str:
|
|
64
|
+
if self.policy.use_body_key or request.metadata.get("hishel_body_key"):
|
|
65
|
+
assert isinstance(request.stream, (Iterator, Iterable))
|
|
66
|
+
collected = b"".join([chunk for chunk in request.stream])
|
|
67
|
+
hash_ = hashlib.sha256(collected).hexdigest()
|
|
68
|
+
request.stream = make_sync_iterator([collected])
|
|
69
|
+
return hash_
|
|
70
|
+
return hashlib.sha256(str(request.url).encode("utf-8")).hexdigest()
|
|
71
|
+
|
|
72
|
+
def _maybe_refresh_entry_ttl(self, entry: Entry) -> None:
|
|
73
|
+
if entry.request.metadata.get("hishel_refresh_ttl_on_access"):
|
|
74
|
+
self.storage.update_entry(
|
|
75
|
+
entry.id,
|
|
76
|
+
lambda current_entry: replace(
|
|
77
|
+
current_entry,
|
|
78
|
+
meta=replace(current_entry.meta, created_at=time.time()),
|
|
79
|
+
),
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
def _handle_request_with_filters(self, request: Request) -> Response:
|
|
83
|
+
assert isinstance(self.policy, FilterPolicy)
|
|
84
|
+
|
|
85
|
+
for request_filter in self.policy.request_filters:
|
|
86
|
+
if request_filter.needs_body():
|
|
87
|
+
body = request.read()
|
|
88
|
+
if not request_filter.apply(request, body):
|
|
89
|
+
logger.debug("Request filtered out by request filter")
|
|
90
|
+
return self.send_request(request)
|
|
91
|
+
else:
|
|
92
|
+
if not request_filter.apply(request, None):
|
|
93
|
+
logger.debug("Request filtered out by request filter")
|
|
94
|
+
return self.send_request(request)
|
|
95
|
+
|
|
96
|
+
logger.debug("Trying to get cached response ignoring specification")
|
|
97
|
+
cache_key = self._get_key_for_request(request)
|
|
98
|
+
entries = self.storage.get_entries(cache_key)
|
|
99
|
+
|
|
100
|
+
logger.debug(f"Found {len(entries)} cached entries for the request")
|
|
101
|
+
|
|
102
|
+
for entry in entries:
|
|
103
|
+
if (
|
|
104
|
+
str(entry.request.url) == str(request.url)
|
|
105
|
+
and entry.request.method == request.method
|
|
106
|
+
and vary_headers_match(
|
|
107
|
+
request,
|
|
108
|
+
entry,
|
|
109
|
+
)
|
|
110
|
+
):
|
|
111
|
+
logger.debug(
|
|
112
|
+
"Found matching cached response for the request",
|
|
113
|
+
)
|
|
114
|
+
response_meta = ResponseMetadata(
|
|
115
|
+
hishel_from_cache=True,
|
|
116
|
+
hishel_created_at=entry.meta.created_at,
|
|
117
|
+
hishel_revalidated=False,
|
|
118
|
+
hishel_stored=False,
|
|
119
|
+
)
|
|
120
|
+
entry.response.metadata.update(response_meta) # type: ignore
|
|
121
|
+
self._maybe_refresh_entry_ttl(entry)
|
|
122
|
+
return entry.response
|
|
123
|
+
|
|
124
|
+
response = self.send_request(request)
|
|
125
|
+
for response_filter in self.policy.response_filters:
|
|
126
|
+
if response_filter.needs_body():
|
|
127
|
+
body = response.read()
|
|
128
|
+
if not response_filter.apply(response, body):
|
|
129
|
+
logger.debug("Response filtered out by response filter")
|
|
130
|
+
return response
|
|
131
|
+
else:
|
|
132
|
+
if not response_filter.apply(response, None):
|
|
133
|
+
logger.debug("Response filtered out by response filter")
|
|
134
|
+
return response
|
|
135
|
+
response_meta = ResponseMetadata(
|
|
136
|
+
hishel_from_cache=False,
|
|
137
|
+
hishel_created_at=time.time(),
|
|
138
|
+
hishel_revalidated=False,
|
|
139
|
+
hishel_stored=True,
|
|
140
|
+
)
|
|
141
|
+
response.metadata.update(response_meta) # type: ignore
|
|
142
|
+
|
|
143
|
+
logger.debug("Storing response in cache ignoring specification")
|
|
144
|
+
entry = self.storage.create_entry(
|
|
145
|
+
request,
|
|
146
|
+
response,
|
|
147
|
+
cache_key,
|
|
148
|
+
)
|
|
149
|
+
return entry.response
|
|
150
|
+
|
|
151
|
+
def _handle_request_respecting_spec(self, request: Request) -> Response:
|
|
152
|
+
assert isinstance(self.policy, SpecificationPolicy)
|
|
153
|
+
state: AnyState = IdleClient(options=self.policy.cache_options)
|
|
154
|
+
|
|
155
|
+
while state:
|
|
156
|
+
logger.debug(f"Handling state: {state.__class__.__name__}")
|
|
157
|
+
if isinstance(state, IdleClient):
|
|
158
|
+
state = self._handle_idle_state(state, request)
|
|
159
|
+
elif isinstance(state, CacheMiss):
|
|
160
|
+
state = self._handle_cache_miss(state)
|
|
161
|
+
elif isinstance(state, StoreAndUse):
|
|
162
|
+
return self._handle_store_and_use(state, request)
|
|
163
|
+
elif isinstance(state, CouldNotBeStored):
|
|
164
|
+
return state.response
|
|
165
|
+
elif isinstance(state, NeedRevalidation):
|
|
166
|
+
state = self._handle_revalidation(state)
|
|
167
|
+
elif isinstance(state, FromCache):
|
|
168
|
+
self._maybe_refresh_entry_ttl(state.entry)
|
|
169
|
+
return state.entry.response
|
|
170
|
+
elif isinstance(state, NeedToBeUpdated):
|
|
171
|
+
state = self._handle_update(state)
|
|
172
|
+
elif isinstance(state, InvalidateEntries):
|
|
173
|
+
state = self._handle_invalidate_entries(state)
|
|
174
|
+
else:
|
|
175
|
+
assert_never(state)
|
|
176
|
+
|
|
177
|
+
raise RuntimeError("Unreachable")
|
|
178
|
+
|
|
179
|
+
def _handle_idle_state(self, state: IdleClient, request: Request) -> AnyState:
|
|
180
|
+
stored_entries = self.storage.get_entries(self._get_key_for_request(request))
|
|
181
|
+
return state.next(request, stored_entries)
|
|
182
|
+
|
|
183
|
+
def _handle_cache_miss(self, state: CacheMiss) -> AnyState:
|
|
184
|
+
response = self.send_request(state.request)
|
|
185
|
+
return state.next(response)
|
|
186
|
+
|
|
187
|
+
def _handle_store_and_use(self, state: StoreAndUse, request: Request) -> Response:
|
|
188
|
+
entry = self.storage.create_entry(
|
|
189
|
+
request,
|
|
190
|
+
state.response,
|
|
191
|
+
self._get_key_for_request(request),
|
|
192
|
+
)
|
|
193
|
+
return entry.response
|
|
194
|
+
|
|
195
|
+
def _handle_revalidation(self, state: NeedRevalidation) -> AnyState:
|
|
196
|
+
revalidation_response = self.send_request(state.request)
|
|
197
|
+
return state.next(revalidation_response)
|
|
198
|
+
|
|
199
|
+
def _handle_update(self, state: NeedToBeUpdated) -> AnyState:
|
|
200
|
+
for entry in state.updating_entries:
|
|
201
|
+
self.storage.update_entry(
|
|
202
|
+
entry.id,
|
|
203
|
+
lambda entry: replace(
|
|
204
|
+
entry,
|
|
205
|
+
response=replace(entry.response, headers=entry.response.headers),
|
|
206
|
+
),
|
|
207
|
+
)
|
|
208
|
+
return state.next()
|
|
209
|
+
|
|
210
|
+
def _handle_invalidate_entries(self, state: InvalidateEntries) -> AnyState:
|
|
211
|
+
for entry_id in state.entry_ids:
|
|
212
|
+
self.storage.remove_entry(entry_id)
|
|
213
|
+
return state.next()
|
hishel/_sync_httpx.py
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ssl
|
|
4
|
+
import typing as t
|
|
5
|
+
from typing import (
|
|
6
|
+
Iterable,
|
|
7
|
+
Iterator,
|
|
8
|
+
Iterator,
|
|
9
|
+
Union,
|
|
10
|
+
cast,
|
|
11
|
+
overload,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from httpx import RequestNotRead
|
|
15
|
+
|
|
16
|
+
from hishel import SyncCacheProxy, Headers, Request, Response
|
|
17
|
+
from hishel._core._storages._sync_base import SyncBaseStorage
|
|
18
|
+
from hishel._core.models import RequestMetadata, extract_metadata_from_headers
|
|
19
|
+
from hishel._policies import CachePolicy
|
|
20
|
+
from hishel._utils import (
|
|
21
|
+
filter_mapping,
|
|
22
|
+
make_sync_iterator,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
import httpx
|
|
27
|
+
except ImportError as e:
|
|
28
|
+
raise ImportError(
|
|
29
|
+
"httpx is required to use hishel.httpx module. "
|
|
30
|
+
"Please install hishel with the 'httpx' extra, "
|
|
31
|
+
"e.g., 'pip install hishel[httpx]'."
|
|
32
|
+
) from e
|
|
33
|
+
|
|
34
|
+
SOCKET_OPTION = t.Union[
|
|
35
|
+
t.Tuple[int, int, int],
|
|
36
|
+
t.Tuple[int, int, t.Union[bytes, bytearray]],
|
|
37
|
+
t.Tuple[int, int, None, int],
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
# 128 KB
|
|
41
|
+
CHUNK_SIZE = 131072
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@overload
|
|
45
|
+
def _internal_to_httpx(
|
|
46
|
+
value: Request,
|
|
47
|
+
) -> httpx.Request: ...
|
|
48
|
+
@overload
|
|
49
|
+
def _internal_to_httpx(
|
|
50
|
+
value: Response,
|
|
51
|
+
) -> httpx.Response: ...
|
|
52
|
+
def _internal_to_httpx(
|
|
53
|
+
value: Union[Request, Response],
|
|
54
|
+
) -> Union[httpx.Request, httpx.Response]:
|
|
55
|
+
"""
|
|
56
|
+
Convert internal Request/Response to httpx.Request/httpx.Response.
|
|
57
|
+
"""
|
|
58
|
+
if isinstance(value, Request):
|
|
59
|
+
return httpx.Request(
|
|
60
|
+
method=value.method,
|
|
61
|
+
url=value.url,
|
|
62
|
+
headers=value.headers,
|
|
63
|
+
stream=_IteratorStream(value._iter_stream()),
|
|
64
|
+
extensions=value.metadata,
|
|
65
|
+
)
|
|
66
|
+
elif isinstance(value, Response):
|
|
67
|
+
return httpx.Response(
|
|
68
|
+
status_code=value.status_code,
|
|
69
|
+
headers=value.headers,
|
|
70
|
+
stream=_IteratorStream(value._iter_stream()),
|
|
71
|
+
extensions=value.metadata,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@overload
|
|
76
|
+
def _httpx_to_internal(
|
|
77
|
+
value: httpx.Request,
|
|
78
|
+
) -> Request: ...
|
|
79
|
+
@overload
|
|
80
|
+
def _httpx_to_internal(
|
|
81
|
+
value: httpx.Response,
|
|
82
|
+
) -> Response: ...
|
|
83
|
+
def _httpx_to_internal(
|
|
84
|
+
value: Union[httpx.Request, httpx.Response],
|
|
85
|
+
) -> Union[Request, Response]:
|
|
86
|
+
"""
|
|
87
|
+
Convert httpx.Request/httpx.Response to internal Request/Response.
|
|
88
|
+
"""
|
|
89
|
+
headers = Headers(
|
|
90
|
+
filter_mapping(
|
|
91
|
+
Headers({key: value for key, value in value.headers.items()}),
|
|
92
|
+
["Transfer-Encoding"],
|
|
93
|
+
)
|
|
94
|
+
)
|
|
95
|
+
if isinstance(value, httpx.Request):
|
|
96
|
+
extension_metadata = RequestMetadata(
|
|
97
|
+
hishel_refresh_ttl_on_access=value.extensions.get("hishel_refresh_ttl_on_access"),
|
|
98
|
+
hishel_ttl=value.extensions.get("hishel_ttl"),
|
|
99
|
+
hishel_spec_ignore=value.extensions.get("hishel_spec_ignore"),
|
|
100
|
+
hishel_body_key=value.extensions.get("hishel_body_key"),
|
|
101
|
+
)
|
|
102
|
+
headers_metadata = extract_metadata_from_headers(value.headers)
|
|
103
|
+
|
|
104
|
+
for key, val in extension_metadata.items():
|
|
105
|
+
if key in value.extensions:
|
|
106
|
+
headers_metadata[key] = val # type: ignore
|
|
107
|
+
|
|
108
|
+
try:
|
|
109
|
+
stream = make_sync_iterator([value.content])
|
|
110
|
+
except RequestNotRead:
|
|
111
|
+
stream = cast(Iterator[bytes], value.stream)
|
|
112
|
+
|
|
113
|
+
return Request(
|
|
114
|
+
method=value.method,
|
|
115
|
+
url=str(value.url),
|
|
116
|
+
headers=headers,
|
|
117
|
+
stream=stream,
|
|
118
|
+
metadata=headers_metadata,
|
|
119
|
+
)
|
|
120
|
+
elif isinstance(value, httpx.Response):
|
|
121
|
+
if value.is_stream_consumed and "content-encoding" in value.headers:
|
|
122
|
+
raise RuntimeError("Can't get the raw stream of a response with `Content-Encoding` header.")
|
|
123
|
+
stream = (
|
|
124
|
+
make_sync_iterator([value.content]) if value.is_stream_consumed else value.iter_raw(chunk_size=CHUNK_SIZE)
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
return Response(
|
|
128
|
+
status_code=value.status_code,
|
|
129
|
+
headers=headers,
|
|
130
|
+
stream=stream,
|
|
131
|
+
metadata={},
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class _IteratorStream(httpx.SyncByteStream, httpx.AsyncByteStream):
|
|
136
|
+
def __init__(self, iterator: Iterator[bytes] | Iterator[bytes]) -> None:
|
|
137
|
+
self.iterator = iterator
|
|
138
|
+
|
|
139
|
+
def __iter__(self) -> Iterator[bytes]:
|
|
140
|
+
assert isinstance(self.iterator, (Iterator, Iterable))
|
|
141
|
+
for chunk in self.iterator:
|
|
142
|
+
yield chunk
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
class SyncCacheTransport(httpx.BaseTransport):
|
|
146
|
+
def __init__(
|
|
147
|
+
self,
|
|
148
|
+
next_transport: httpx.BaseTransport,
|
|
149
|
+
storage: SyncBaseStorage | None = None,
|
|
150
|
+
policy: CachePolicy | None = None,
|
|
151
|
+
) -> None:
|
|
152
|
+
self.next_transport = next_transport
|
|
153
|
+
self._cache_proxy: SyncCacheProxy = SyncCacheProxy(
|
|
154
|
+
request_sender=self.request_sender,
|
|
155
|
+
storage=storage,
|
|
156
|
+
policy=policy,
|
|
157
|
+
)
|
|
158
|
+
self.storage = self._cache_proxy.storage
|
|
159
|
+
|
|
160
|
+
def handle_request(
|
|
161
|
+
self,
|
|
162
|
+
request: httpx.Request,
|
|
163
|
+
) -> httpx.Response:
|
|
164
|
+
internal_request = _httpx_to_internal(request)
|
|
165
|
+
internal_response = self._cache_proxy.handle_request(internal_request)
|
|
166
|
+
response = _internal_to_httpx(internal_response)
|
|
167
|
+
return response
|
|
168
|
+
|
|
169
|
+
def close(self) -> None:
|
|
170
|
+
self.next_transport.close()
|
|
171
|
+
self.storage.close()
|
|
172
|
+
super().close()
|
|
173
|
+
|
|
174
|
+
def request_sender(self, request: Request) -> Response:
|
|
175
|
+
httpx_request = _internal_to_httpx(request)
|
|
176
|
+
httpx_response = self.next_transport.handle_request(httpx_request)
|
|
177
|
+
return _httpx_to_internal(httpx_response)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class SyncCacheClient(httpx.Client):
|
|
181
|
+
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
|
|
182
|
+
self.storage: SyncBaseStorage | None = kwargs.pop("storage", None)
|
|
183
|
+
self.policy: CachePolicy | None = kwargs.pop("policy", None)
|
|
184
|
+
super().__init__(*args, **kwargs)
|
|
185
|
+
|
|
186
|
+
def _init_transport(
|
|
187
|
+
self,
|
|
188
|
+
verify: ssl.SSLContext | str | bool = True,
|
|
189
|
+
cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
|
|
190
|
+
trust_env: bool = True,
|
|
191
|
+
http1: bool = True,
|
|
192
|
+
http2: bool = False,
|
|
193
|
+
limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
|
|
194
|
+
transport: httpx.BaseTransport | None = None,
|
|
195
|
+
**kwargs: t.Any,
|
|
196
|
+
) -> httpx.BaseTransport:
|
|
197
|
+
if transport is not None:
|
|
198
|
+
return transport
|
|
199
|
+
|
|
200
|
+
return SyncCacheTransport(
|
|
201
|
+
next_transport=httpx.HTTPTransport(
|
|
202
|
+
verify=verify,
|
|
203
|
+
cert=cert,
|
|
204
|
+
trust_env=trust_env,
|
|
205
|
+
http1=http1,
|
|
206
|
+
http2=http2,
|
|
207
|
+
limits=limits,
|
|
208
|
+
),
|
|
209
|
+
storage=self.storage,
|
|
210
|
+
policy=self.policy,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
def _init_proxy_transport(
|
|
214
|
+
self,
|
|
215
|
+
proxy: httpx.Proxy,
|
|
216
|
+
verify: ssl.SSLContext | str | bool = True,
|
|
217
|
+
cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
|
|
218
|
+
trust_env: bool = True,
|
|
219
|
+
http1: bool = True,
|
|
220
|
+
http2: bool = False,
|
|
221
|
+
limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
|
|
222
|
+
**kwargs: t.Any,
|
|
223
|
+
) -> httpx.BaseTransport:
|
|
224
|
+
return SyncCacheTransport(
|
|
225
|
+
next_transport=httpx.HTTPTransport(
|
|
226
|
+
verify=verify,
|
|
227
|
+
cert=cert,
|
|
228
|
+
trust_env=trust_env,
|
|
229
|
+
http1=http1,
|
|
230
|
+
http2=http2,
|
|
231
|
+
limits=limits,
|
|
232
|
+
proxy=proxy,
|
|
233
|
+
),
|
|
234
|
+
storage=self.storage,
|
|
235
|
+
policy=self.policy,
|
|
236
|
+
)
|