hishel 1.0.0.dev1__py3-none-any.whl → 1.0.0.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hishel/__init__.py +15 -14
- hishel/_async_cache.py +50 -37
- hishel/_async_httpx.py +243 -0
- hishel/_core/_headers.py +11 -1
- hishel/_core/_spec.py +184 -127
- hishel/_core/_storages/_async_base.py +71 -0
- hishel/_core/{_async/_storages/_sqlite.py → _storages/_async_sqlite.py} +95 -132
- hishel/_core/_storages/_packing.py +144 -0
- hishel/_core/_storages/_sync_base.py +71 -0
- hishel/_core/{_sync/_storages/_sqlite.py → _storages/_sync_sqlite.py} +95 -132
- hishel/_core/models.py +13 -26
- hishel/_sync_cache.py +50 -37
- hishel/_sync_httpx.py +243 -0
- hishel/_utils.py +48 -137
- hishel/asgi.py +400 -0
- hishel/fastapi.py +263 -0
- hishel/httpx.py +3 -326
- hishel/requests.py +25 -17
- {hishel-1.0.0.dev1.dist-info → hishel-1.0.0.dev3.dist-info}/METADATA +139 -27
- hishel-1.0.0.dev3.dist-info/RECORD +23 -0
- hishel/_core/__init__.py +0 -59
- hishel/_core/_base/_storages/_base.py +0 -272
- hishel/_core/_base/_storages/_packing.py +0 -165
- hishel-1.0.0.dev1.dist-info/RECORD +0 -19
- {hishel-1.0.0.dev1.dist-info → hishel-1.0.0.dev3.dist-info}/WHEEL +0 -0
- {hishel-1.0.0.dev1.dist-info → hishel-1.0.0.dev3.dist-info}/licenses/LICENSE +0 -0
hishel/__init__.py
CHANGED
|
@@ -1,8 +1,7 @@
|
|
|
1
|
-
from hishel._core.
|
|
2
|
-
from hishel._core.
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
)
|
|
1
|
+
from hishel._core._storages._async_sqlite import AsyncSqliteStorage
|
|
2
|
+
from hishel._core._storages._async_base import AsyncBaseStorage
|
|
3
|
+
from hishel._core._storages._sync_sqlite import SyncSqliteStorage
|
|
4
|
+
from hishel._core._storages._sync_base import SyncBaseStorage
|
|
6
5
|
from hishel._core._headers import Headers as Headers
|
|
7
6
|
from hishel._core._spec import (
|
|
8
7
|
AnyState as AnyState,
|
|
@@ -11,21 +10,21 @@ from hishel._core._spec import (
|
|
|
11
10
|
CouldNotBeStored as CouldNotBeStored,
|
|
12
11
|
FromCache as FromCache,
|
|
13
12
|
IdleClient as IdleClient,
|
|
13
|
+
InvalidateEntries,
|
|
14
14
|
NeedRevalidation as NeedRevalidation,
|
|
15
15
|
NeedToBeUpdated as NeedToBeUpdated,
|
|
16
16
|
State as State,
|
|
17
17
|
StoreAndUse as StoreAndUse,
|
|
18
18
|
create_idle_state as create_idle_state,
|
|
19
19
|
)
|
|
20
|
-
from hishel._core._sync._storages._sqlite import SyncSqliteStorage
|
|
21
20
|
from hishel._core.models import (
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
Pair as Pair,
|
|
25
|
-
PairMeta as PairMeta,
|
|
21
|
+
Entry as Entry,
|
|
22
|
+
EntryMeta as EntryMeta,
|
|
26
23
|
Request as Request,
|
|
27
24
|
Response,
|
|
28
25
|
)
|
|
26
|
+
from hishel._async_cache import AsyncCacheProxy as AsyncCacheProxy
|
|
27
|
+
from hishel._sync_cache import SyncCacheProxy as SyncCacheProxy
|
|
29
28
|
|
|
30
29
|
__all__ = (
|
|
31
30
|
# New API
|
|
@@ -41,14 +40,13 @@ __all__ = (
|
|
|
41
40
|
"State",
|
|
42
41
|
"StoreAndUse",
|
|
43
42
|
"CouldNotBeStored",
|
|
43
|
+
"InvalidateEntries",
|
|
44
44
|
"create_idle_state",
|
|
45
45
|
## Models
|
|
46
46
|
"Request",
|
|
47
47
|
"Response",
|
|
48
|
-
"
|
|
49
|
-
"
|
|
50
|
-
"CompletePair",
|
|
51
|
-
"PairMeta",
|
|
48
|
+
"Entry",
|
|
49
|
+
"EntryMeta",
|
|
52
50
|
## Headers
|
|
53
51
|
"Headers",
|
|
54
52
|
## Storages
|
|
@@ -56,4 +54,7 @@ __all__ = (
|
|
|
56
54
|
"AsyncBaseStorage",
|
|
57
55
|
"SyncSqliteStorage",
|
|
58
56
|
"AsyncSqliteStorage",
|
|
57
|
+
# Proxy
|
|
58
|
+
"AsyncCacheProxy",
|
|
59
|
+
"SyncCacheProxy",
|
|
59
60
|
)
|
hishel/_async_cache.py
CHANGED
|
@@ -4,7 +4,7 @@ import hashlib
|
|
|
4
4
|
import logging
|
|
5
5
|
import time
|
|
6
6
|
from dataclasses import replace
|
|
7
|
-
from typing import AsyncIterator, Awaitable, Callable
|
|
7
|
+
from typing import AsyncIterable, AsyncIterator, Awaitable, Callable
|
|
8
8
|
|
|
9
9
|
from typing_extensions import assert_never
|
|
10
10
|
|
|
@@ -24,8 +24,9 @@ from hishel import (
|
|
|
24
24
|
StoreAndUse,
|
|
25
25
|
create_idle_state,
|
|
26
26
|
)
|
|
27
|
-
from hishel._core._spec import
|
|
28
|
-
from hishel._core.models import
|
|
27
|
+
from hishel._core._spec import InvalidateEntries, vary_headers_match
|
|
28
|
+
from hishel._core.models import Entry, ResponseMetadata
|
|
29
|
+
from hishel._utils import make_async_iterator
|
|
29
30
|
|
|
30
31
|
logger = logging.getLogger("hishel.integrations.clients")
|
|
31
32
|
|
|
@@ -42,12 +43,12 @@ class AsyncCacheProxy:
|
|
|
42
43
|
|
|
43
44
|
def __init__(
|
|
44
45
|
self,
|
|
45
|
-
|
|
46
|
+
request_sender: Callable[[Request], Awaitable[Response]],
|
|
46
47
|
storage: AsyncBaseStorage | None = None,
|
|
47
48
|
cache_options: CacheOptions | None = None,
|
|
48
49
|
ignore_specification: bool = False,
|
|
49
50
|
) -> None:
|
|
50
|
-
self.send_request =
|
|
51
|
+
self.send_request = request_sender
|
|
51
52
|
self.storage = storage if storage is not None else AsyncSqliteStorage()
|
|
52
53
|
self.cache_options = cache_options if cache_options is not None else CacheOptions()
|
|
53
54
|
self.ignore_specification = ignore_specification
|
|
@@ -59,26 +60,30 @@ class AsyncCacheProxy:
|
|
|
59
60
|
|
|
60
61
|
async def _get_key_for_request(self, request: Request) -> str:
|
|
61
62
|
if request.metadata.get("hishel_body_key"):
|
|
62
|
-
assert isinstance(request.stream, AsyncIterator)
|
|
63
|
+
assert isinstance(request.stream, (AsyncIterator, AsyncIterable))
|
|
63
64
|
collected = b"".join([chunk async for chunk in request.stream])
|
|
64
65
|
hash_ = hashlib.sha256(collected).hexdigest()
|
|
66
|
+
request.stream = make_async_iterator([collected])
|
|
65
67
|
return f"{str(request.url)}-{hash_}"
|
|
66
|
-
return str(request.url)
|
|
68
|
+
return hashlib.sha256(str(request.url).encode("utf-8")).hexdigest()
|
|
67
69
|
|
|
68
|
-
async def _maybe_refresh_pair_ttl(self, pair:
|
|
70
|
+
async def _maybe_refresh_pair_ttl(self, pair: Entry) -> None:
|
|
69
71
|
if pair.request.metadata.get("hishel_refresh_ttl_on_access"):
|
|
70
|
-
await self.storage.
|
|
72
|
+
await self.storage.update_entry(
|
|
71
73
|
pair.id,
|
|
72
|
-
lambda complete_pair: replace(
|
|
74
|
+
lambda complete_pair: replace(
|
|
75
|
+
complete_pair,
|
|
76
|
+
meta=replace(complete_pair.meta, created_at=time.time()),
|
|
77
|
+
),
|
|
73
78
|
)
|
|
74
79
|
|
|
75
80
|
async def _handle_request_ignoring_spec(self, request: Request) -> Response:
|
|
76
81
|
logger.debug("Trying to get cached response ignoring specification")
|
|
77
|
-
|
|
82
|
+
entries = await self.storage.get_entries(await self._get_key_for_request(request))
|
|
78
83
|
|
|
79
|
-
logger.debug(f"Found {len(
|
|
84
|
+
logger.debug(f"Found {len(entries)} cached entries for the request")
|
|
80
85
|
|
|
81
|
-
for pair in
|
|
86
|
+
for pair in entries:
|
|
82
87
|
if (
|
|
83
88
|
str(pair.request.url) == str(request.url)
|
|
84
89
|
and pair.request.method == request.method
|
|
@@ -90,20 +95,26 @@ class AsyncCacheProxy:
|
|
|
90
95
|
logger.debug(
|
|
91
96
|
"Found matching cached response for the request",
|
|
92
97
|
)
|
|
93
|
-
|
|
98
|
+
response_meta = ResponseMetadata(
|
|
99
|
+
hishel_spec_ignored=True,
|
|
100
|
+
hishel_from_cache=True,
|
|
101
|
+
hishel_created_at=pair.meta.created_at,
|
|
102
|
+
hishel_revalidated=False,
|
|
103
|
+
hishel_stored=False,
|
|
104
|
+
)
|
|
105
|
+
pair.response.metadata.update(response_meta) # type: ignore
|
|
94
106
|
await self._maybe_refresh_pair_ttl(pair)
|
|
95
107
|
return pair.response
|
|
96
108
|
|
|
97
|
-
|
|
98
|
-
request,
|
|
99
|
-
)
|
|
100
|
-
response = await self.send_request(incomplete_pair.request)
|
|
109
|
+
response = await self.send_request(request)
|
|
101
110
|
|
|
102
111
|
logger.debug("Storing response in cache ignoring specification")
|
|
103
|
-
|
|
104
|
-
|
|
112
|
+
entry = await self.storage.create_entry(
|
|
113
|
+
request,
|
|
114
|
+
response,
|
|
115
|
+
await self._get_key_for_request(request),
|
|
105
116
|
)
|
|
106
|
-
return
|
|
117
|
+
return entry.response
|
|
107
118
|
|
|
108
119
|
async def _handle_request_respecting_spec(self, request: Request) -> Response:
|
|
109
120
|
state: AnyState = create_idle_state("client", self.cache_options)
|
|
@@ -125,25 +136,26 @@ class AsyncCacheProxy:
|
|
|
125
136
|
return state.pair.response
|
|
126
137
|
elif isinstance(state, NeedToBeUpdated):
|
|
127
138
|
state = await self._handle_update(state)
|
|
128
|
-
elif isinstance(state,
|
|
129
|
-
state = await self.
|
|
139
|
+
elif isinstance(state, InvalidateEntries):
|
|
140
|
+
state = await self._handle_invalidate_entries(state)
|
|
130
141
|
else:
|
|
131
142
|
assert_never(state)
|
|
132
143
|
|
|
133
144
|
raise RuntimeError("Unreachable")
|
|
134
145
|
|
|
135
146
|
async def _handle_idle_state(self, state: IdleClient, request: Request) -> AnyState:
|
|
136
|
-
|
|
137
|
-
return state.next(request,
|
|
147
|
+
stored_entries = await self.storage.get_entries(await self._get_key_for_request(request))
|
|
148
|
+
return state.next(request, stored_entries)
|
|
138
149
|
|
|
139
150
|
async def _handle_cache_miss(self, state: CacheMiss) -> AnyState:
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
return state.next(response, incomplete_pair.id)
|
|
151
|
+
response = await self.send_request(state.request)
|
|
152
|
+
return state.next(response)
|
|
143
153
|
|
|
144
154
|
async def _handle_store_and_use(self, state: StoreAndUse, request: Request) -> Response:
|
|
145
|
-
complete_pair = await self.storage.
|
|
146
|
-
|
|
155
|
+
complete_pair = await self.storage.create_entry(
|
|
156
|
+
request,
|
|
157
|
+
state.response,
|
|
158
|
+
await self._get_key_for_request(request),
|
|
147
159
|
)
|
|
148
160
|
return complete_pair.response
|
|
149
161
|
|
|
@@ -152,16 +164,17 @@ class AsyncCacheProxy:
|
|
|
152
164
|
return state.next(revalidation_response)
|
|
153
165
|
|
|
154
166
|
async def _handle_update(self, state: NeedToBeUpdated) -> AnyState:
|
|
155
|
-
for
|
|
156
|
-
await self.storage.
|
|
157
|
-
|
|
167
|
+
for entry in state.updating_entries:
|
|
168
|
+
await self.storage.update_entry(
|
|
169
|
+
entry.id,
|
|
158
170
|
lambda complete_pair: replace(
|
|
159
|
-
complete_pair,
|
|
171
|
+
complete_pair,
|
|
172
|
+
response=replace(entry.response, headers=entry.response.headers),
|
|
160
173
|
),
|
|
161
174
|
)
|
|
162
175
|
return state.next()
|
|
163
176
|
|
|
164
|
-
async def
|
|
165
|
-
for
|
|
166
|
-
await self.storage.
|
|
177
|
+
async def _handle_invalidate_entries(self, state: InvalidateEntries) -> AnyState:
|
|
178
|
+
for entry_id in state.entry_ids:
|
|
179
|
+
await self.storage.remove_entry(entry_id)
|
|
167
180
|
return state.next()
|
hishel/_async_httpx.py
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ssl
|
|
4
|
+
import typing as t
|
|
5
|
+
from typing import (
|
|
6
|
+
AsyncIterable,
|
|
7
|
+
AsyncIterator,
|
|
8
|
+
Iterator,
|
|
9
|
+
Union,
|
|
10
|
+
cast,
|
|
11
|
+
overload,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from httpx import RequestNotRead
|
|
15
|
+
|
|
16
|
+
from hishel import AsyncCacheProxy, Headers, Request, Response
|
|
17
|
+
from hishel._core._spec import (
|
|
18
|
+
CacheOptions,
|
|
19
|
+
)
|
|
20
|
+
from hishel._core._storages._async_base import AsyncBaseStorage
|
|
21
|
+
from hishel._core.models import RequestMetadata, extract_metadata_from_headers
|
|
22
|
+
from hishel._utils import (
|
|
23
|
+
filter_mapping,
|
|
24
|
+
make_async_iterator,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
import httpx
|
|
29
|
+
except ImportError as e:
|
|
30
|
+
raise ImportError(
|
|
31
|
+
"httpx is required to use hishel.httpx module. "
|
|
32
|
+
"Please install hishel with the 'httpx' extra, "
|
|
33
|
+
"e.g., 'pip install hishel[httpx]'."
|
|
34
|
+
) from e
|
|
35
|
+
|
|
36
|
+
SOCKET_OPTION = t.Union[
|
|
37
|
+
t.Tuple[int, int, int],
|
|
38
|
+
t.Tuple[int, int, t.Union[bytes, bytearray]],
|
|
39
|
+
t.Tuple[int, int, None, int],
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
# 128 KB
|
|
43
|
+
CHUNK_SIZE = 131072
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@overload
|
|
47
|
+
def _internal_to_httpx(
|
|
48
|
+
value: Request,
|
|
49
|
+
) -> httpx.Request: ...
|
|
50
|
+
@overload
|
|
51
|
+
def _internal_to_httpx(
|
|
52
|
+
value: Response,
|
|
53
|
+
) -> httpx.Response: ...
|
|
54
|
+
def _internal_to_httpx(
|
|
55
|
+
value: Union[Request, Response],
|
|
56
|
+
) -> Union[httpx.Request, httpx.Response]:
|
|
57
|
+
"""
|
|
58
|
+
Convert internal Request/Response to httpx.Request/httpx.Response.
|
|
59
|
+
"""
|
|
60
|
+
if isinstance(value, Request):
|
|
61
|
+
return httpx.Request(
|
|
62
|
+
method=value.method,
|
|
63
|
+
url=value.url,
|
|
64
|
+
headers=value.headers,
|
|
65
|
+
stream=_IteratorStream(value.aiter_stream()),
|
|
66
|
+
extensions=value.metadata,
|
|
67
|
+
)
|
|
68
|
+
elif isinstance(value, Response):
|
|
69
|
+
return httpx.Response(
|
|
70
|
+
status_code=value.status_code,
|
|
71
|
+
headers=value.headers,
|
|
72
|
+
stream=_IteratorStream(value.aiter_stream()),
|
|
73
|
+
extensions=value.metadata,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@overload
|
|
78
|
+
def _httpx_to_internal(
|
|
79
|
+
value: httpx.Request,
|
|
80
|
+
) -> Request: ...
|
|
81
|
+
@overload
|
|
82
|
+
def _httpx_to_internal(
|
|
83
|
+
value: httpx.Response,
|
|
84
|
+
) -> Response: ...
|
|
85
|
+
def _httpx_to_internal(
|
|
86
|
+
value: Union[httpx.Request, httpx.Response],
|
|
87
|
+
) -> Union[Request, Response]:
|
|
88
|
+
"""
|
|
89
|
+
Convert httpx.Request/httpx.Response to internal Request/Response.
|
|
90
|
+
"""
|
|
91
|
+
headers = Headers(
|
|
92
|
+
filter_mapping(
|
|
93
|
+
Headers({key: value for key, value in value.headers.items()}),
|
|
94
|
+
["Transfer-Encoding"],
|
|
95
|
+
)
|
|
96
|
+
)
|
|
97
|
+
if isinstance(value, httpx.Request):
|
|
98
|
+
extension_metadata = RequestMetadata(
|
|
99
|
+
hishel_refresh_ttl_on_access=value.extensions.get("hishel_refresh_ttl_on_access"),
|
|
100
|
+
hishel_ttl=value.extensions.get("hishel_ttl"),
|
|
101
|
+
hishel_spec_ignore=value.extensions.get("hishel_spec_ignore"),
|
|
102
|
+
hishel_body_key=value.extensions.get("hishel_body_key"),
|
|
103
|
+
)
|
|
104
|
+
headers_metadata = extract_metadata_from_headers(value.headers)
|
|
105
|
+
|
|
106
|
+
for key, val in extension_metadata.items():
|
|
107
|
+
if key in value.extensions:
|
|
108
|
+
headers_metadata[key] = val # type: ignore
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
stream = make_async_iterator([value.content])
|
|
112
|
+
except RequestNotRead:
|
|
113
|
+
stream = cast(AsyncIterator[bytes], value.stream)
|
|
114
|
+
|
|
115
|
+
return Request(
|
|
116
|
+
method=value.method,
|
|
117
|
+
url=str(value.url),
|
|
118
|
+
headers=headers,
|
|
119
|
+
stream=stream,
|
|
120
|
+
metadata=headers_metadata,
|
|
121
|
+
)
|
|
122
|
+
elif isinstance(value, httpx.Response):
|
|
123
|
+
if value.is_stream_consumed and "content-encoding" in value.headers:
|
|
124
|
+
raise RuntimeError("Can't get the raw stream of a response with `Content-Encoding` header.")
|
|
125
|
+
stream = (
|
|
126
|
+
make_async_iterator([value.content]) if value.is_stream_consumed else value.aiter_raw(chunk_size=CHUNK_SIZE)
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
return Response(
|
|
130
|
+
status_code=value.status_code,
|
|
131
|
+
headers=headers,
|
|
132
|
+
stream=stream,
|
|
133
|
+
metadata={},
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class _IteratorStream(httpx.SyncByteStream, httpx.AsyncByteStream):
|
|
138
|
+
def __init__(self, iterator: Iterator[bytes] | AsyncIterator[bytes]) -> None:
|
|
139
|
+
self.iterator = iterator
|
|
140
|
+
|
|
141
|
+
async def __aiter__(self) -> AsyncIterator[bytes]:
|
|
142
|
+
assert isinstance(self.iterator, (AsyncIterator, AsyncIterable))
|
|
143
|
+
async for chunk in self.iterator:
|
|
144
|
+
yield chunk
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class AsyncCacheTransport(httpx.AsyncBaseTransport):
|
|
148
|
+
def __init__(
|
|
149
|
+
self,
|
|
150
|
+
next_transport: httpx.AsyncBaseTransport,
|
|
151
|
+
storage: AsyncBaseStorage | None = None,
|
|
152
|
+
cache_options: CacheOptions | None = None,
|
|
153
|
+
ignore_specification: bool = False,
|
|
154
|
+
) -> None:
|
|
155
|
+
self.next_transport = next_transport
|
|
156
|
+
self._cache_proxy: AsyncCacheProxy = AsyncCacheProxy(
|
|
157
|
+
request_sender=self.request_sender,
|
|
158
|
+
storage=storage,
|
|
159
|
+
cache_options=cache_options,
|
|
160
|
+
ignore_specification=ignore_specification,
|
|
161
|
+
)
|
|
162
|
+
self.storage = self._cache_proxy.storage
|
|
163
|
+
|
|
164
|
+
async def handle_async_request(
|
|
165
|
+
self,
|
|
166
|
+
request: httpx.Request,
|
|
167
|
+
) -> httpx.Response:
|
|
168
|
+
internal_request = _httpx_to_internal(request)
|
|
169
|
+
internal_response = await self._cache_proxy.handle_request(internal_request)
|
|
170
|
+
response = _internal_to_httpx(internal_response)
|
|
171
|
+
return response
|
|
172
|
+
|
|
173
|
+
async def aclose(self) -> None:
|
|
174
|
+
await self.next_transport.aclose()
|
|
175
|
+
await self.storage.close()
|
|
176
|
+
await super().aclose()
|
|
177
|
+
|
|
178
|
+
async def request_sender(self, request: Request) -> Response:
|
|
179
|
+
httpx_request = _internal_to_httpx(request)
|
|
180
|
+
httpx_response = await self.next_transport.handle_async_request(httpx_request)
|
|
181
|
+
return _httpx_to_internal(httpx_response)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class AsyncCacheClient(httpx.AsyncClient):
|
|
185
|
+
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
|
|
186
|
+
self.storage: AsyncBaseStorage | None = kwargs.pop("storage", None)
|
|
187
|
+
self.cache_options: CacheOptions | None = kwargs.pop("cache_options", None)
|
|
188
|
+
self.ignore_specification: bool = kwargs.pop("ignore_specification", False)
|
|
189
|
+
super().__init__(*args, **kwargs)
|
|
190
|
+
|
|
191
|
+
def _init_transport(
|
|
192
|
+
self,
|
|
193
|
+
verify: ssl.SSLContext | str | bool = True,
|
|
194
|
+
cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
|
|
195
|
+
trust_env: bool = True,
|
|
196
|
+
http1: bool = True,
|
|
197
|
+
http2: bool = False,
|
|
198
|
+
limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
|
|
199
|
+
transport: httpx.AsyncBaseTransport | None = None,
|
|
200
|
+
**kwargs: t.Any,
|
|
201
|
+
) -> httpx.AsyncBaseTransport:
|
|
202
|
+
if transport is not None:
|
|
203
|
+
return transport
|
|
204
|
+
|
|
205
|
+
return AsyncCacheTransport(
|
|
206
|
+
next_transport=httpx.AsyncHTTPTransport(
|
|
207
|
+
verify=verify,
|
|
208
|
+
cert=cert,
|
|
209
|
+
trust_env=trust_env,
|
|
210
|
+
http1=http1,
|
|
211
|
+
http2=http2,
|
|
212
|
+
limits=limits,
|
|
213
|
+
),
|
|
214
|
+
storage=self.storage,
|
|
215
|
+
cache_options=self.cache_options,
|
|
216
|
+
ignore_specification=False,
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
def _init_proxy_transport(
|
|
220
|
+
self,
|
|
221
|
+
proxy: httpx.Proxy,
|
|
222
|
+
verify: ssl.SSLContext | str | bool = True,
|
|
223
|
+
cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
|
|
224
|
+
trust_env: bool = True,
|
|
225
|
+
http1: bool = True,
|
|
226
|
+
http2: bool = False,
|
|
227
|
+
limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
|
|
228
|
+
**kwargs: t.Any,
|
|
229
|
+
) -> httpx.AsyncBaseTransport:
|
|
230
|
+
return AsyncCacheTransport(
|
|
231
|
+
next_transport=httpx.AsyncHTTPTransport(
|
|
232
|
+
verify=verify,
|
|
233
|
+
cert=cert,
|
|
234
|
+
trust_env=trust_env,
|
|
235
|
+
http1=http1,
|
|
236
|
+
http2=http2,
|
|
237
|
+
limits=limits,
|
|
238
|
+
proxy=proxy,
|
|
239
|
+
),
|
|
240
|
+
storage=self.storage,
|
|
241
|
+
cache_options=self.cache_options,
|
|
242
|
+
ignore_specification=self.ignore_specification,
|
|
243
|
+
)
|
hishel/_core/_headers.py
CHANGED
|
@@ -1,7 +1,17 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
|
-
from typing import
|
|
4
|
+
from typing import (
|
|
5
|
+
Any,
|
|
6
|
+
Iterator,
|
|
7
|
+
List,
|
|
8
|
+
Literal,
|
|
9
|
+
Mapping,
|
|
10
|
+
MutableMapping,
|
|
11
|
+
Optional,
|
|
12
|
+
Union,
|
|
13
|
+
cast,
|
|
14
|
+
)
|
|
5
15
|
|
|
6
16
|
"""
|
|
7
17
|
HTTP token and quoted-string parsing utilities.
|