hishel 0.1.1__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. {hishel-0.1.1 → hishel-0.1.3}/.gitignore +2 -1
  2. {hishel-0.1.1 → hishel-0.1.3}/CHANGELOG.md +16 -1
  3. {hishel-0.1.1 → hishel-0.1.3}/PKG-INFO +21 -6
  4. {hishel-0.1.1 → hishel-0.1.3}/hishel/__init__.py +1 -1
  5. {hishel-0.1.1 → hishel-0.1.3}/hishel/_async/_pool.py +1 -1
  6. {hishel-0.1.1 → hishel-0.1.3}/hishel/_async/_storages.py +7 -3
  7. {hishel-0.1.1 → hishel-0.1.3}/hishel/_async/_transports.py +2 -2
  8. {hishel-0.1.1 → hishel-0.1.3}/hishel/_controller.py +10 -3
  9. {hishel-0.1.1 → hishel-0.1.3}/hishel/_headers.py +2 -2
  10. {hishel-0.1.1 → hishel-0.1.3}/hishel/_s3.py +29 -9
  11. {hishel-0.1.1 → hishel-0.1.3}/hishel/_sync/_pool.py +1 -1
  12. {hishel-0.1.1 → hishel-0.1.3}/hishel/_sync/_storages.py +6 -2
  13. {hishel-0.1.1 → hishel-0.1.3}/hishel/_sync/_transports.py +4 -4
  14. {hishel-0.1.1 → hishel-0.1.3}/hishel/_utils.py +26 -8
  15. {hishel-0.1.1 → hishel-0.1.3}/pyproject.toml +20 -4
  16. {hishel-0.1.1 → hishel-0.1.3}/LICENSE +0 -0
  17. {hishel-0.1.1 → hishel-0.1.3}/README.md +0 -0
  18. {hishel-0.1.1 → hishel-0.1.3}/hishel/_async/__init__.py +0 -0
  19. {hishel-0.1.1 → hishel-0.1.3}/hishel/_async/_client.py +0 -0
  20. {hishel-0.1.1 → hishel-0.1.3}/hishel/_async/_mock.py +0 -0
  21. {hishel-0.1.1 → hishel-0.1.3}/hishel/_exceptions.py +0 -0
  22. {hishel-0.1.1 → hishel-0.1.3}/hishel/_files.py +0 -0
  23. {hishel-0.1.1 → hishel-0.1.3}/hishel/_lfu_cache.py +0 -0
  24. {hishel-0.1.1 → hishel-0.1.3}/hishel/_serializers.py +0 -0
  25. {hishel-0.1.1 → hishel-0.1.3}/hishel/_sync/__init__.py +0 -0
  26. {hishel-0.1.1 → hishel-0.1.3}/hishel/_sync/_client.py +0 -0
  27. {hishel-0.1.1 → hishel-0.1.3}/hishel/_sync/_mock.py +0 -0
  28. {hishel-0.1.1 → hishel-0.1.3}/hishel/_synchronization.py +0 -0
  29. {hishel-0.1.1 → hishel-0.1.3}/hishel/py.typed +0 -0
@@ -1,4 +1,5 @@
1
1
  venv/
2
2
  __pycache__/
3
3
  .coverage
4
- .cache/
4
+ .cache/
5
+ .idea/
@@ -1,8 +1,23 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.1.3 (1st July, 2025)
4
+
5
+ - Remove `types-redis` from dev dependencies (#336)
6
+ - Bump redis to 6.0.0 and address async `.close()` deprecation warning (#336)
7
+ - Avoid race condition when unlinking files in `FileStorage`. (#334)
8
+ - Allow prodiving a `path_prefix` in `S3Storage` and `AsyncS3Storage`. (#342)
9
+
10
+ ## 0.1.2 (5th April, 2025)
11
+
12
+ - Add check for fips compliant python. (#325)
13
+ - Fix compatibility with httpx. (#291)
14
+ - Use `SyncByteStream` instead of `ByteStream`. (#298)
15
+ - Don't raise exceptions if date-containing headers are invalid. (#318)
16
+ - Fix for S3 Storage missing metadata in API request. (#320)
17
+
3
18
  ## 0.1.1 (2nd Nov, 2024)
4
19
 
5
- - FIx typig extensions nor found. (#290)
20
+ - Fix typing extensions not found. (#290)
6
21
 
7
22
  ## 0.1.0 (2nd Nov, 2024)
8
23
 
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: hishel
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: Persistent cache implementation for httpx and httpcore
5
5
  Project-URL: Homepage, https://hishel.com
6
6
  Project-URL: Source, https://github.com/karpetrosyan/hishel
@@ -23,16 +23,16 @@ Classifier: Programming Language :: Python :: 3.12
23
23
  Classifier: Programming Language :: Python :: 3.13
24
24
  Classifier: Topic :: Internet :: WWW/HTTP
25
25
  Requires-Python: >=3.9
26
- Requires-Dist: httpx>=0.22.0
26
+ Requires-Dist: httpx>=0.28.0
27
27
  Provides-Extra: redis
28
- Requires-Dist: redis==5.0.1; extra == 'redis'
28
+ Requires-Dist: redis==6.2.0; extra == 'redis'
29
29
  Provides-Extra: s3
30
30
  Requires-Dist: boto3<=1.15.3,>=1.15.0; (python_version < '3.12') and extra == 's3'
31
31
  Requires-Dist: boto3>=1.15.3; (python_version >= '3.12') and extra == 's3'
32
32
  Provides-Extra: sqlite
33
33
  Requires-Dist: anysqlite>=0.0.5; extra == 'sqlite'
34
34
  Provides-Extra: yaml
35
- Requires-Dist: pyyaml==6.0.1; extra == 'yaml'
35
+ Requires-Dist: pyyaml==6.0.2; extra == 'yaml'
36
36
  Description-Content-Type: text/markdown
37
37
 
38
38
  <p align="center" class="logo">
@@ -181,9 +181,24 @@ Help us grow and continue developing good software for you ❤️
181
181
 
182
182
  # Changelog
183
183
 
184
+ ## 0.1.3 (1st July, 2025)
185
+
186
+ - Remove `types-redis` from dev dependencies (#336)
187
+ - Bump redis to 6.0.0 and address async `.close()` deprecation warning (#336)
188
+ - Avoid race condition when unlinking files in `FileStorage`. (#334)
189
+ - Allow prodiving a `path_prefix` in `S3Storage` and `AsyncS3Storage`. (#342)
190
+
191
+ ## 0.1.2 (5th April, 2025)
192
+
193
+ - Add check for fips compliant python. (#325)
194
+ - Fix compatibility with httpx. (#291)
195
+ - Use `SyncByteStream` instead of `ByteStream`. (#298)
196
+ - Don't raise exceptions if date-containing headers are invalid. (#318)
197
+ - Fix for S3 Storage missing metadata in API request. (#320)
198
+
184
199
  ## 0.1.1 (2nd Nov, 2024)
185
200
 
186
- - FIx typig extensions nor found. (#290)
201
+ - Fix typing extensions not found. (#290)
187
202
 
188
203
  ## 0.1.0 (2nd Nov, 2024)
189
204
 
@@ -14,4 +14,4 @@ def install_cache() -> None: # pragma: no cover
14
14
  httpx.Client = CacheClient # type: ignore
15
15
 
16
16
 
17
- __version__ = "0.1.1"
17
+ __version__ = "0.1.3"
@@ -47,7 +47,7 @@ class AsyncCacheConnectionPool(AsyncRequestInterface):
47
47
 
48
48
  self._storage = storage if storage is not None else AsyncFileStorage(serializer=JSONSerializer())
49
49
 
50
- if not isinstance(self._storage, AsyncBaseStorage):
50
+ if not isinstance(self._storage, AsyncBaseStorage): # pragma: no cover
51
51
  raise TypeError(f"Expected subclass of `AsyncBaseStorage` but got `{storage.__class__.__name__}`")
52
52
 
53
53
  self._controller = controller if controller is not None else Controller()
@@ -159,7 +159,7 @@ class AsyncFileStorage(AsyncBaseStorage):
159
159
 
160
160
  async with self._lock:
161
161
  if response_path.exists():
162
- response_path.unlink()
162
+ response_path.unlink(missing_ok=True)
163
163
 
164
164
  async def update_metadata(self, key: str, response: Response, request: Request, metadata: Metadata) -> None:
165
165
  """
@@ -222,7 +222,7 @@ class AsyncFileStorage(AsyncBaseStorage):
222
222
  if response_path.is_file():
223
223
  age = time.time() - response_path.stat().st_mtime
224
224
  if age > self._ttl:
225
- response_path.unlink()
225
+ response_path.unlink(missing_ok=True)
226
226
  return
227
227
 
228
228
  self._last_cleaned = time.monotonic()
@@ -507,7 +507,7 @@ class AsyncRedisStorage(AsyncBaseStorage):
507
507
  return self._serializer.loads(cached_response)
508
508
 
509
509
  async def aclose(self) -> None: # pragma: no cover
510
- await self._client.close()
510
+ await self._client.aclose()
511
511
 
512
512
 
513
513
  class AsyncInMemoryStorage(AsyncBaseStorage):
@@ -654,6 +654,8 @@ class AsyncS3Storage(AsyncBaseStorage): # pragma: no cover
654
654
  :type check_ttl_every: tp.Union[int, float]
655
655
  :param client: A client for S3, defaults to None
656
656
  :type client: tp.Optional[tp.Any], optional
657
+ :param path_prefix: A path prefix to use for S3 object keys, defaults to "hishel-"
658
+ :type path_prefix: str, optional
657
659
  """
658
660
 
659
661
  def __init__(
@@ -663,6 +665,7 @@ class AsyncS3Storage(AsyncBaseStorage): # pragma: no cover
663
665
  ttl: tp.Optional[tp.Union[int, float]] = None,
664
666
  check_ttl_every: tp.Union[int, float] = 60,
665
667
  client: tp.Optional[tp.Any] = None,
668
+ path_prefix: str = "hishel-",
666
669
  ) -> None:
667
670
  super().__init__(serializer, ttl)
668
671
 
@@ -680,6 +683,7 @@ class AsyncS3Storage(AsyncBaseStorage): # pragma: no cover
680
683
  bucket_name=bucket_name,
681
684
  is_binary=self._serializer.is_binary,
682
685
  check_ttl_every=check_ttl_every,
686
+ path_prefix=path_prefix,
683
687
  )
684
688
  self._lock = AsyncLock()
685
689
 
@@ -64,7 +64,7 @@ class AsyncCacheTransport(httpx.AsyncBaseTransport):
64
64
 
65
65
  self._storage = storage if storage is not None else AsyncFileStorage(serializer=JSONSerializer())
66
66
 
67
- if not isinstance(self._storage, AsyncBaseStorage):
67
+ if not isinstance(self._storage, AsyncBaseStorage): # pragma: no cover
68
68
  raise TypeError(f"Expected subclass of `AsyncBaseStorage` but got `{storage.__class__.__name__}`")
69
69
 
70
70
  self._controller = controller if controller is not None else Controller()
@@ -152,7 +152,7 @@ class AsyncCacheTransport(httpx.AsyncBaseTransport):
152
152
  # Controller has determined that the response needs to be re-validated.
153
153
  assert isinstance(res.stream, tp.AsyncIterable)
154
154
  revalidation_request = Request(
155
- method=res.method,
155
+ method=res.method.decode(),
156
156
  url=normalized_url(res.url),
157
157
  headers=res.headers,
158
158
  stream=AsyncCacheStream(res.stream),
@@ -60,8 +60,12 @@ def get_freshness_lifetime(response: Response) -> tp.Optional[int]:
60
60
  if header_presents(response.headers, b"expires"):
61
61
  expires = extract_header_values_decoded(response.headers, b"expires", single=True)[0]
62
62
  expires_timestamp = parse_date(expires)
63
+ if expires_timestamp is None:
64
+ return None
63
65
  date = extract_header_values_decoded(response.headers, b"date", single=True)[0]
64
66
  date_timestamp = parse_date(date)
67
+ if date_timestamp is None:
68
+ return None
65
69
 
66
70
  return expires_timestamp - date_timestamp
67
71
  return None
@@ -72,11 +76,12 @@ def get_heuristic_freshness(response: Response, clock: "BaseClock") -> int:
72
76
 
73
77
  if last_modified:
74
78
  last_modified_timestamp = parse_date(last_modified[0])
75
- now = clock.now()
79
+ if last_modified_timestamp is not None:
80
+ now = clock.now()
76
81
 
77
- ONE_WEEK = 604_800
82
+ ONE_WEEK = 604_800
78
83
 
79
- return min(ONE_WEEK, int((now - last_modified_timestamp) * 0.1))
84
+ return min(ONE_WEEK, int((now - last_modified_timestamp) * 0.1))
80
85
 
81
86
  ONE_DAY = 86_400
82
87
  return ONE_DAY
@@ -89,6 +94,8 @@ def get_age(response: Response, clock: "BaseClock") -> int:
89
94
  return float("inf") # type: ignore
90
95
 
91
96
  date = parse_date(extract_header_values_decoded(response.headers, b"date")[0])
97
+ if date is None:
98
+ return float("inf") # type: ignore
92
99
 
93
100
  now = clock.now()
94
101
 
@@ -94,13 +94,13 @@ def parse_cache_control(cache_control_values: List[str]) -> "CacheControl":
94
94
  for value_char in value:
95
95
  if value_char not in tchar:
96
96
  raise ParseError(
97
- f"The character '{value_char!r}' " "is not permitted for the unquoted values."
97
+ f"The character '{value_char!r}' is not permitted for the unquoted values."
98
98
  )
99
99
  else:
100
100
  for value_char in value[1:-1]:
101
101
  if value_char not in qdtext:
102
102
  raise ParseError(
103
- f"The character '{value_char!r}' " "is not permitted for the quoted values."
103
+ f"The character '{value_char!r}' is not permitted for the quoted values."
104
104
  )
105
105
  break
106
106
 
@@ -11,16 +11,22 @@ def get_timestamp_in_ms() -> float:
11
11
 
12
12
  class S3Manager:
13
13
  def __init__(
14
- self, client: tp.Any, bucket_name: str, check_ttl_every: tp.Union[int, float], is_binary: bool = False
14
+ self,
15
+ client: tp.Any,
16
+ bucket_name: str,
17
+ check_ttl_every: tp.Union[int, float],
18
+ is_binary: bool = False,
19
+ path_prefix: str = "hishel-",
15
20
  ):
16
21
  self._client = client
17
22
  self._bucket_name = bucket_name
18
23
  self._is_binary = is_binary
19
24
  self._last_cleaned = time.monotonic()
20
25
  self._check_ttl_every = check_ttl_every
26
+ self._path_prefix = path_prefix
21
27
 
22
28
  def write_to(self, path: str, data: tp.Union[bytes, str], only_metadata: bool = False) -> None:
23
- path = "hishel-" + path
29
+ path = self._path_prefix + path
24
30
  if isinstance(data, str):
25
31
  data = data.encode("utf-8")
26
32
 
@@ -43,7 +49,7 @@ class S3Manager:
43
49
  )
44
50
 
45
51
  def read_from(self, path: str) -> tp.Union[bytes, str]:
46
- path = "hishel-" + path
52
+ path = self._path_prefix + path
47
53
  response = self._client.get_object(
48
54
  Bucket=self._bucket_name,
49
55
  Key=path,
@@ -57,7 +63,7 @@ class S3Manager:
57
63
  return tp.cast(str, content.decode("utf-8"))
58
64
 
59
65
  def remove_expired(self, ttl: int, key: str) -> None:
60
- path = "hishel-" + key
66
+ path = self._path_prefix + key
61
67
 
62
68
  if time.monotonic() - self._last_cleaned < self._check_ttl_every:
63
69
  try:
@@ -72,22 +78,36 @@ class S3Manager:
72
78
 
73
79
  self._last_cleaned = time.monotonic()
74
80
  for obj in self._client.list_objects(Bucket=self._bucket_name).get("Contents", []):
75
- if not obj["Key"].startswith("hishel-"): # pragma: no cover
81
+ if not obj["Key"].startswith(self._path_prefix): # pragma: no cover
76
82
  continue
77
83
 
78
- if get_timestamp_in_ms() - float(obj["Metadata"]["created_at"]) > ttl:
84
+ try:
85
+ metadata_obj = self._client.head_object(Bucket=self._bucket_name, Key=obj["Key"]).get("Metadata", {})
86
+ except ClientError as e:
87
+ if e.response["Error"]["Code"] == "404":
88
+ continue
89
+
90
+ if not metadata_obj or "created_at" not in metadata_obj:
91
+ continue
92
+
93
+ if get_timestamp_in_ms() - float(metadata_obj["created_at"]) > ttl:
79
94
  self._client.delete_object(Bucket=self._bucket_name, Key=obj["Key"])
80
95
 
81
96
  def remove_entry(self, key: str) -> None:
82
- path = "hishel-" + key
97
+ path = self._path_prefix + key
83
98
  self._client.delete_object(Bucket=self._bucket_name, Key=path)
84
99
 
85
100
 
86
101
  class AsyncS3Manager: # pragma: no cover
87
102
  def __init__(
88
- self, client: tp.Any, bucket_name: str, check_ttl_every: tp.Union[int, float], is_binary: bool = False
103
+ self,
104
+ client: tp.Any,
105
+ bucket_name: str,
106
+ check_ttl_every: tp.Union[int, float],
107
+ is_binary: bool = False,
108
+ path_prefix: str = "hishel-",
89
109
  ):
90
- self._sync_manager = S3Manager(client, bucket_name, check_ttl_every, is_binary)
110
+ self._sync_manager = S3Manager(client, bucket_name, check_ttl_every, is_binary, path_prefix)
91
111
 
92
112
  async def write_to(self, path: str, data: tp.Union[bytes, str], only_metadata: bool = False) -> None:
93
113
  return await to_thread.run_sync(self._sync_manager.write_to, path, data, only_metadata)
@@ -47,7 +47,7 @@ class CacheConnectionPool(RequestInterface):
47
47
 
48
48
  self._storage = storage if storage is not None else FileStorage(serializer=JSONSerializer())
49
49
 
50
- if not isinstance(self._storage, BaseStorage):
50
+ if not isinstance(self._storage, BaseStorage): # pragma: no cover
51
51
  raise TypeError(f"Expected subclass of `BaseStorage` but got `{storage.__class__.__name__}`")
52
52
 
53
53
  self._controller = controller if controller is not None else Controller()
@@ -159,7 +159,7 @@ class FileStorage(BaseStorage):
159
159
 
160
160
  with self._lock:
161
161
  if response_path.exists():
162
- response_path.unlink()
162
+ response_path.unlink(missing_ok=True)
163
163
 
164
164
  def update_metadata(self, key: str, response: Response, request: Request, metadata: Metadata) -> None:
165
165
  """
@@ -222,7 +222,7 @@ class FileStorage(BaseStorage):
222
222
  if response_path.is_file():
223
223
  age = time.time() - response_path.stat().st_mtime
224
224
  if age > self._ttl:
225
- response_path.unlink()
225
+ response_path.unlink(missing_ok=True)
226
226
  return
227
227
 
228
228
  self._last_cleaned = time.monotonic()
@@ -654,6 +654,8 @@ class S3Storage(BaseStorage): # pragma: no cover
654
654
  :type check_ttl_every: tp.Union[int, float]
655
655
  :param client: A client for S3, defaults to None
656
656
  :type client: tp.Optional[tp.Any], optional
657
+ :param path_prefix: A path prefix to use for S3 object keys, defaults to "hishel-"
658
+ :type path_prefix: str, optional
657
659
  """
658
660
 
659
661
  def __init__(
@@ -663,6 +665,7 @@ class S3Storage(BaseStorage): # pragma: no cover
663
665
  ttl: tp.Optional[tp.Union[int, float]] = None,
664
666
  check_ttl_every: tp.Union[int, float] = 60,
665
667
  client: tp.Optional[tp.Any] = None,
668
+ path_prefix: str = "hishel-",
666
669
  ) -> None:
667
670
  super().__init__(serializer, ttl)
668
671
 
@@ -680,6 +683,7 @@ class S3Storage(BaseStorage): # pragma: no cover
680
683
  bucket_name=bucket_name,
681
684
  is_binary=self._serializer.is_binary,
682
685
  check_ttl_every=check_ttl_every,
686
+ path_prefix=path_prefix,
683
687
  )
684
688
  self._lock = Lock()
685
689
 
@@ -5,7 +5,7 @@ import typing as tp
5
5
 
6
6
  import httpcore
7
7
  import httpx
8
- from httpx import ByteStream, Request, Response
8
+ from httpx import SyncByteStream, Request, Response
9
9
  from httpx._exceptions import ConnectError
10
10
 
11
11
  from hishel._utils import extract_header_values_decoded, normalized_url
@@ -29,7 +29,7 @@ def generate_504() -> Response:
29
29
  return Response(status_code=504)
30
30
 
31
31
 
32
- class CacheStream(ByteStream):
32
+ class CacheStream(SyncByteStream):
33
33
  def __init__(self, httpcore_stream: tp.Iterable[bytes]):
34
34
  self._httpcore_stream = httpcore_stream
35
35
 
@@ -64,7 +64,7 @@ class CacheTransport(httpx.BaseTransport):
64
64
 
65
65
  self._storage = storage if storage is not None else FileStorage(serializer=JSONSerializer())
66
66
 
67
- if not isinstance(self._storage, BaseStorage):
67
+ if not isinstance(self._storage, BaseStorage): # pragma: no cover
68
68
  raise TypeError(f"Expected subclass of `BaseStorage` but got `{storage.__class__.__name__}`")
69
69
 
70
70
  self._controller = controller if controller is not None else Controller()
@@ -152,7 +152,7 @@ class CacheTransport(httpx.BaseTransport):
152
152
  # Controller has determined that the response needs to be re-validated.
153
153
  assert isinstance(res.stream, tp.Iterable)
154
154
  revalidation_request = Request(
155
- method=res.method,
155
+ method=res.method.decode(),
156
156
  url=normalized_url(res.url),
157
157
  headers=res.headers,
158
158
  stream=CacheStream(res.stream),
@@ -1,8 +1,8 @@
1
1
  import calendar
2
+ import hashlib
2
3
  import time
3
4
  import typing as tp
4
5
  from email.utils import parsedate_tz
5
- from hashlib import blake2b
6
6
 
7
7
  import anyio
8
8
  import httpcore
@@ -30,7 +30,7 @@ def normalized_url(url: tp.Union[httpcore.URL, str, bytes]) -> str:
30
30
 
31
31
  if isinstance(url, httpcore.URL):
32
32
  port = f":{url.port}" if url.port is not None else ""
33
- return f'{url.scheme.decode("ascii")}://{url.host.decode("ascii")}{port}{url.target.decode("ascii")}'
33
+ return f"{url.scheme.decode('ascii')}://{url.host.decode('ascii')}{port}{url.target.decode('ascii')}"
34
34
  assert False, "Invalid type for `normalized_url`" # pragma: no cover
35
35
 
36
36
 
@@ -49,10 +49,26 @@ def generate_key(request: httpcore.Request, body: bytes = b"") -> str:
49
49
 
50
50
  key_parts = [request.method, encoded_url, body]
51
51
 
52
- key = blake2b(digest_size=16, usedforsecurity=False)
53
- for part in key_parts:
54
- key.update(part)
55
- return key.hexdigest()
52
+ # FIPs mode disables blake2 algorithm, use sha256 instead when not found.
53
+ blake2b_hasher = None
54
+ sha256_hasher = hashlib.sha256(usedforsecurity=False)
55
+ try:
56
+ blake2b_hasher = hashlib.blake2b(digest_size=16, usedforsecurity=False)
57
+ except (ValueError, TypeError, AttributeError):
58
+ pass
59
+
60
+ hexdigest: str
61
+ if blake2b_hasher:
62
+ for part in key_parts:
63
+ blake2b_hasher.update(part)
64
+
65
+ hexdigest = blake2b_hasher.hexdigest()
66
+ else:
67
+ for part in key_parts:
68
+ sha256_hasher.update(part)
69
+
70
+ hexdigest = sha256_hasher.hexdigest()
71
+ return hexdigest
56
72
 
57
73
 
58
74
  def extract_header_values(
@@ -82,9 +98,11 @@ def header_presents(headers: tp.List[tp.Tuple[bytes, bytes]], header_key: bytes)
82
98
  return bool(extract_header_values(headers, header_key, single=True))
83
99
 
84
100
 
85
- def parse_date(date: str) -> int:
101
+ def parse_date(date: str) -> tp.Optional[int]:
86
102
  expires = parsedate_tz(date)
87
- timestamp = calendar.timegm(expires[:6]) # type: ignore
103
+ if expires is None:
104
+ return None
105
+ timestamp = calendar.timegm(expires[:6])
88
106
  return timestamp
89
107
 
90
108
 
@@ -29,17 +29,17 @@ classifiers = [
29
29
  "Topic :: Internet :: WWW/HTTP",
30
30
  ]
31
31
  dependencies = [
32
- "httpx>=0.22.0",
32
+ "httpx>=0.28.0",
33
33
  ]
34
34
 
35
35
  [project.optional-dependencies]
36
36
 
37
37
  yaml = [
38
- "pyyaml==6.0.1",
38
+ "pyyaml==6.0.2",
39
39
  ]
40
40
 
41
41
  redis = [
42
- "redis==5.0.1"
42
+ "redis==6.2.0"
43
43
  ]
44
44
 
45
45
  sqlite = [
@@ -92,7 +92,7 @@ filterwarnings = []
92
92
 
93
93
  [tool.coverage.run]
94
94
  omit = [
95
- "venv/*",
95
+ "venv/*",
96
96
  "hishel/_sync/*",
97
97
  "hishel/_s3.py"
98
98
  ]
@@ -122,3 +122,19 @@ select = [
122
122
 
123
123
  [tool.ruff.lint.isort]
124
124
  combine-as-imports = true
125
+
126
+ [dependency-groups]
127
+ dev = [
128
+ "anyio==4.7.0",
129
+ "coverage==7.6.10",
130
+ "hatch==1.9.3",
131
+ "mkdocs==1.6.1",
132
+ "mkdocs-material==9.5.1",
133
+ "mypy==1.14.1",
134
+ "pytest==8.3.4",
135
+ "ruff==0.11.0",
136
+ "trio==0.28.0",
137
+ "types-boto3==1.0.2",
138
+ "types-pyyaml==6.0.12.20240311",
139
+ "zipp>=3.19.1",
140
+ ]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes