omlish 0.0.0.dev472__py3-none-any.whl → 0.0.0.dev474__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omlish might be problematic. Click here for more details.

Files changed (37) hide show
  1. omlish/__about__.py +3 -3
  2. omlish/collections/__init__.py +4 -0
  3. omlish/collections/attrregistry.py +32 -4
  4. omlish/diag/cmds/__init__.py +0 -0
  5. omlish/diag/{lslocks.py → cmds/lslocks.py} +6 -6
  6. omlish/diag/{lsof.py → cmds/lsof.py} +6 -6
  7. omlish/diag/{ps.py → cmds/ps.py} +6 -6
  8. omlish/dispatch/__init__.py +18 -12
  9. omlish/formats/json/stream/__init__.py +13 -0
  10. omlish/http/clients/asyncs.py +11 -17
  11. omlish/http/clients/coro/sync.py +3 -2
  12. omlish/http/clients/default.py +2 -2
  13. omlish/http/clients/executor.py +8 -2
  14. omlish/http/clients/httpx.py +29 -46
  15. omlish/http/clients/sync.py +11 -17
  16. omlish/http/clients/syncasync.py +8 -2
  17. omlish/http/clients/urllib.py +2 -1
  18. omlish/io/buffers.py +115 -0
  19. omlish/io/readers.py +29 -0
  20. omlish/lite/contextmanagers.py +4 -4
  21. omlish/os/pidfiles/pinning.py +2 -2
  22. omlish/text/docwrap/__init__.py +3 -0
  23. omlish/text/docwrap/api.py +77 -0
  24. omlish/text/docwrap/groups.py +84 -0
  25. omlish/text/docwrap/lists.py +167 -0
  26. omlish/text/docwrap/parts.py +139 -0
  27. omlish/text/docwrap/reflowing.py +103 -0
  28. omlish/text/docwrap/rendering.py +142 -0
  29. omlish/text/docwrap/utils.py +11 -0
  30. omlish/text/docwrap/wrapping.py +59 -0
  31. omlish/text/textwrap.py +51 -0
  32. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/METADATA +7 -6
  33. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/RECORD +37 -25
  34. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/WHEEL +0 -0
  35. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/entry_points.txt +0 -0
  36. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/licenses/LICENSE +0 -0
  37. {omlish-0.0.0.dev472.dist-info → omlish-0.0.0.dev474.dist-info}/top_level.txt +0 -0
omlish/__about__.py CHANGED
@@ -1,5 +1,5 @@
1
- __version__ = '0.0.0.dev472'
2
- __revision__ = '78c62ff9afe49d9f0ae12b27ff595b2079e9c082'
1
+ __version__ = '0.0.0.dev474'
2
+ __revision__ = '5a7b02bc5884a6c8e41f2e2afd5aaf7f495cc28e'
3
3
 
4
4
 
5
5
  #
@@ -41,7 +41,7 @@ class Project(ProjectBase):
41
41
 
42
42
  'greenlet ~= 3.2',
43
43
 
44
- 'trio ~= 0.31',
44
+ 'trio ~= 0.32',
45
45
  'trio-asyncio ~= 0.15',
46
46
  ],
47
47
 
@@ -8,7 +8,9 @@ with _lang.auto_proxy_init(globals()):
8
8
 
9
9
  from .attrregistry import ( # noqa
10
10
  AttrRegistry,
11
+
11
12
  AttrRegistryCache,
13
+ SimpleAttrRegistryCache,
12
14
  )
13
15
 
14
16
  from .bimap import ( # noqa
@@ -17,6 +19,8 @@ with _lang.auto_proxy_init(globals()):
17
19
  make_bi_map,
18
20
  )
19
21
 
22
+ from . import cache # noqa
23
+
20
24
  from .coerce import ( # noqa
21
25
  abs_set,
22
26
  abs_set_of,
@@ -1,3 +1,7 @@
1
+ """
2
+ TODO:
3
+ - lock?
4
+ """
1
5
  import dataclasses as dc
2
6
  import typing as ta
3
7
  import weakref
@@ -50,13 +54,32 @@ class AttrRegistry(ta.Generic[K, V]):
50
54
  def add_invalidate_callback(self, callback: ta.Callable[[], None]) -> None:
51
55
  self._invalidate_callbacks.append(callback)
52
56
 
57
+ @ta.overload
53
58
  def register(self, obj: K, val: V) -> None:
54
- check.not_in(obj, self._objs)
59
+ ...
55
60
 
56
- self._objs[obj] = val
61
+ @ta.overload
62
+ def register(self, val: V) -> ta.Callable[[T], T]:
63
+ ...
57
64
 
58
- for iv in self._invalidate_callbacks:
59
- iv()
65
+ def register(self, *args):
66
+ def inner(obj, val):
67
+ check.not_in(obj, self._objs)
68
+
69
+ self._objs[obj] = val
70
+
71
+ for iv in self._invalidate_callbacks:
72
+ iv()
73
+
74
+ return obj
75
+
76
+ if len(args) == 1:
77
+ return lambda obj: inner(obj, args[0])
78
+ elif len(args) == 2:
79
+ inner = inner(*args)
80
+ return None
81
+ else:
82
+ raise TypeError(args)
60
83
 
61
84
  def _lookup(self, obj: ta.Any) -> lang.Maybe[V]:
62
85
  if not self._identity:
@@ -180,3 +203,8 @@ class AttrRegistryCache(ta.Generic[K, V, T]):
180
203
  out = self._prepare(instance_cls, collected)
181
204
  self._cache[weakref.ref(instance_cls, self._cache_remove)] = out
182
205
  return out
206
+
207
+
208
+ class SimpleAttrRegistryCache(AttrRegistryCache[K, V, dict[str, tuple[K, V]]], ta.Generic[K, V]):
209
+ def __init__(self, registry: AttrRegistry[K, V]) -> None:
210
+ super().__init__(registry, lambda _, dct: dct)
File without changes
@@ -7,12 +7,12 @@ import dataclasses as dc
7
7
  import json
8
8
  import typing as ta
9
9
 
10
- from ..lite.check import check
11
- from ..lite.marshal import OBJ_MARSHALER_FIELD_KEY
12
- from ..lite.marshal import unmarshal_obj
13
- from ..subprocesses.run import SubprocessRun
14
- from ..subprocesses.run import SubprocessRunnable
15
- from ..subprocesses.run import SubprocessRunOutput
10
+ from ...lite.check import check
11
+ from ...lite.marshal import OBJ_MARSHALER_FIELD_KEY
12
+ from ...lite.marshal import unmarshal_obj
13
+ from ...subprocesses.run import SubprocessRun
14
+ from ...subprocesses.run import SubprocessRunnable
15
+ from ...subprocesses.run import SubprocessRunOutput
16
16
 
17
17
 
18
18
  ##
@@ -7,12 +7,12 @@ import dataclasses as dc
7
7
  import enum
8
8
  import typing as ta
9
9
 
10
- from ..lite.check import check
11
- from ..lite.dataclasses import dataclass_repr_omit_falsey
12
- from ..lite.marshal import OBJ_MARSHALER_OMIT_IF_NONE
13
- from ..subprocesses.run import SubprocessRun
14
- from ..subprocesses.run import SubprocessRunnable
15
- from ..subprocesses.run import SubprocessRunOutput
10
+ from ...lite.check import check
11
+ from ...lite.dataclasses import dataclass_repr_omit_falsey
12
+ from ...lite.marshal import OBJ_MARSHALER_OMIT_IF_NONE
13
+ from ...subprocesses.run import SubprocessRun
14
+ from ...subprocesses.run import SubprocessRunnable
15
+ from ...subprocesses.run import SubprocessRunOutput
16
16
 
17
17
 
18
18
  ##
@@ -4,12 +4,12 @@ import dataclasses as dc
4
4
  import os
5
5
  import typing as ta
6
6
 
7
- from ..lite.check import check
8
- from ..lite.timeouts import Timeout
9
- from ..subprocesses.run import SubprocessRun
10
- from ..subprocesses.run import SubprocessRunnable
11
- from ..subprocesses.run import SubprocessRunOutput
12
- from ..subprocesses.sync import subprocesses
7
+ from ...lite.check import check
8
+ from ...lite.timeouts import Timeout
9
+ from ...subprocesses.run import SubprocessRun
10
+ from ...subprocesses.run import SubprocessRunnable
11
+ from ...subprocesses.run import SubprocessRunOutput
12
+ from ...subprocesses.sync import subprocesses
13
13
 
14
14
 
15
15
  ##
@@ -1,12 +1,18 @@
1
- from .dispatch import ( # noqa
2
- Dispatcher,
3
- )
4
-
5
- from .functions import ( # noqa
6
- function,
7
- )
8
-
9
- from .methods import ( # noqa
10
- install_method,
11
- method,
12
- )
1
+ from .. import lang as _lang
2
+
3
+
4
+ with _lang.auto_proxy_init(globals()):
5
+ ##
6
+
7
+ from .dispatch import ( # noqa
8
+ Dispatcher,
9
+ )
10
+
11
+ from .functions import ( # noqa
12
+ function,
13
+ )
14
+
15
+ from .methods import ( # noqa
16
+ install_method,
17
+ method,
18
+ )
@@ -1,3 +1,16 @@
1
+ """
2
+ A generator powered, configurable, mostly fully streaming JSON parser.
3
+
4
+ Regarding the 'streamyness' of the subsystems:
5
+ - Lexing only buffers for string and number literals.
6
+ - Parsing maintains only a stack that scales by nesting depth.
7
+ - Building values will obviously hold everything under the topmost object it's building until it's finished.
8
+
9
+ It's reasonably optimized, but performance is not a primary or even secondary goal: its goal is flexibility. If speed
10
+ matters use a native library.
11
+ """
12
+
13
+
1
14
  from .building import ( # noqa
2
15
  JsonValueBuilder,
3
16
  )
@@ -3,9 +3,9 @@
3
3
  import abc
4
4
  import contextlib
5
5
  import dataclasses as dc
6
- import io
7
6
  import typing as ta
8
7
 
8
+ from ...io.readers import AsyncBufferedBytesReader
9
9
  from ...lite.abstract import Abstract
10
10
  from ...lite.dataclasses import dataclass_shallow_asdict
11
11
  from .base import BaseHttpClient
@@ -27,25 +27,19 @@ AsyncHttpClientT = ta.TypeVar('AsyncHttpClientT', bound='AsyncHttpClient')
27
27
  @ta.final
28
28
  @dc.dataclass(frozen=True) # kw_only=True
29
29
  class AsyncStreamHttpResponse(BaseHttpResponse):
30
- class Stream(ta.Protocol):
31
- def read1(self, /, n: int = -1) -> ta.Awaitable[bytes]: ...
30
+ _stream: ta.Optional[AsyncBufferedBytesReader] = None
32
31
 
33
- @ta.final
34
- class _NullStream:
35
- def read1(self, /, n: int = -1) -> ta.Awaitable[bytes]:
36
- raise TypeError
37
-
38
- stream: Stream = _NullStream()
32
+ @property
33
+ def stream(self) -> 'AsyncBufferedBytesReader':
34
+ if (st := self._stream) is None:
35
+ raise TypeError('No data')
36
+ return st
39
37
 
40
38
  @property
41
39
  def has_data(self) -> bool:
42
- return not isinstance(self.stream, AsyncStreamHttpResponse._NullStream)
40
+ return self._stream is not None
43
41
 
44
- async def read_all(self) -> bytes:
45
- buf = io.BytesIO()
46
- while (b := await self.stream.read1()):
47
- buf.write(b)
48
- return buf.getvalue()
42
+ #
49
43
 
50
44
  _closer: ta.Optional[ta.Callable[[], ta.Awaitable[None]]] = None
51
45
 
@@ -96,8 +90,8 @@ async def async_read_http_client_response(resp: BaseHttpResponse) -> HttpRespons
96
90
 
97
91
  elif isinstance(resp, AsyncStreamHttpResponse):
98
92
  return HttpResponse(**{
99
- **{k: v for k, v in dataclass_shallow_asdict(resp).items() if k not in ('stream', '_closer')},
100
- **({'data': await resp.read_all()} if resp.has_data else {}),
93
+ **{k: v for k, v in dataclass_shallow_asdict(resp).items() if k not in ('_stream', '_closer')},
94
+ **({'data': await resp.stream.readall()} if resp.has_data else {}),
101
95
  })
102
96
 
103
97
  else:
@@ -5,6 +5,7 @@ import socket
5
5
  import typing as ta
6
6
  import urllib.parse
7
7
 
8
+ from ....io.buffers import ReadableListBuffer
8
9
  from ....lite.check import check
9
10
  from ...coro.client.connection import CoroHttpClientConnection
10
11
  from ...coro.client.response import CoroHttpClientResponse
@@ -92,7 +93,7 @@ class CoroHttpClient(HttpClient):
92
93
  headers=HttpHeaders(resp._state.headers.items()), # noqa
93
94
  request=self._req,
94
95
  underlying=self,
95
- stream=self,
96
+ _stream=ReadableListBuffer().new_buffered_reader(self),
96
97
  _closer=self.close,
97
98
  )
98
99
 
@@ -156,7 +157,7 @@ class CoroHttpClient(HttpClient):
156
157
  else:
157
158
  raise TypeError(o)
158
159
 
159
- def read1(self, /, n: int = -1) -> bytes:
160
+ def read1(self, n: int = -1, /) -> bytes:
160
161
  return self._run_coro(check.not_none(self._resp).read(n if n >= 0 else None))
161
162
 
162
163
  def close(self) -> None:
@@ -83,7 +83,7 @@ def client() -> HttpClient:
83
83
 
84
84
 
85
85
  @contextlib.contextmanager
86
- def manage_client(client: HttpClient | None) -> ta.Generator[HttpClient]: # noqa
86
+ def manage_client(client: HttpClient | None = None) -> ta.Generator[HttpClient]: # noqa
87
87
  if client is not None:
88
88
  yield client
89
89
 
@@ -167,7 +167,7 @@ def async_client() -> AsyncHttpClient:
167
167
 
168
168
 
169
169
  @contextlib.asynccontextmanager
170
- async def manage_async_client(client: AsyncHttpClient | None) -> ta.AsyncGenerator[AsyncHttpClient]: # noqa
170
+ async def manage_async_client(client: AsyncHttpClient | None = None) -> ta.AsyncGenerator[AsyncHttpClient]: # noqa
171
171
  if client is not None:
172
172
  yield client
173
173
 
@@ -30,9 +30,15 @@ class ExecutorAsyncHttpClient(AsyncHttpClient):
30
30
  owner: 'ExecutorAsyncHttpClient'
31
31
  resp: StreamHttpResponse
32
32
 
33
- async def read1(self, /, n: int = -1) -> bytes:
33
+ async def read1(self, n: int = -1, /) -> bytes:
34
34
  return await self.owner._run_in_executor(self.resp.stream.read1, n) # noqa
35
35
 
36
+ async def read(self, n: int = -1, /) -> bytes:
37
+ return await self.owner._run_in_executor(self.resp.stream.read, n) # noqa
38
+
39
+ async def readall(self) -> bytes:
40
+ return await self.owner._run_in_executor(self.resp.stream.readall) # noqa
41
+
36
42
  async def close(self) -> None:
37
43
  return await self.owner._run_in_executor(self.resp.close) # noqa
38
44
 
@@ -44,7 +50,7 @@ class ExecutorAsyncHttpClient(AsyncHttpClient):
44
50
  request=req,
45
51
  underlying=resp,
46
52
  **(dict( # type: ignore
47
- stream=(adapter := self._StreamAdapter(self, resp)),
53
+ _stream=(adapter := self._StreamAdapter(self, resp)),
48
54
  _closer=adapter.close,
49
55
  ) if resp.has_data else {}),
50
56
  )
@@ -32,27 +32,12 @@ class HttpxHttpClient(HttpClient):
32
32
  @dc.dataclass(frozen=True)
33
33
  class _StreamAdapter:
34
34
  it: ta.Iterator[bytes]
35
- buf: ReadableListBuffer = dc.field(default_factory=ReadableListBuffer)
36
-
37
- def read1(self, /, n: int = -1) -> bytes:
38
- if n < 0:
39
- if (b := self.buf.read(n)) is not None:
40
- return b
41
- try:
42
- return next(self.it)
43
- except StopIteration:
44
- return b''
45
-
46
- else:
47
- while len(self.buf) < n:
48
- try:
49
- b = next(self.it)
50
- except StopIteration:
51
- b = b''
52
- if not b:
53
- return self.buf.read() or b''
54
- self.buf.feed(b)
55
- return self.buf.read(n) or b''
35
+
36
+ def read1(self, n: int = -1, /) -> bytes:
37
+ try:
38
+ return next(self.it)
39
+ except StopIteration:
40
+ return b''
56
41
 
57
42
  def _stream_request(self, ctx: HttpClientContext, req: HttpRequest) -> StreamHttpResponse:
58
43
  try:
@@ -76,7 +61,7 @@ class HttpxHttpClient(HttpClient):
76
61
  headers=HttpHeaders(resp.headers.raw),
77
62
  request=req,
78
63
  underlying=resp,
79
- stream=self._StreamAdapter(resp.iter_bytes()),
64
+ _stream=ReadableListBuffer().new_buffered_reader(self._StreamAdapter(resp.iter_bytes())),
80
65
  _closer=resp_close, # type: ignore
81
66
  )
82
67
 
@@ -84,7 +69,7 @@ class HttpxHttpClient(HttpClient):
84
69
  resp_close()
85
70
  raise HttpClientError from e
86
71
 
87
- except Exception:
72
+ except BaseException:
88
73
  resp_close()
89
74
  raise
90
75
 
@@ -96,27 +81,12 @@ class HttpxAsyncHttpClient(AsyncHttpClient):
96
81
  @dc.dataclass(frozen=True)
97
82
  class _StreamAdapter:
98
83
  it: ta.AsyncIterator[bytes]
99
- buf: ReadableListBuffer = dc.field(default_factory=ReadableListBuffer)
100
-
101
- async def read1(self, /, n: int = -1) -> bytes:
102
- if n < 0:
103
- if (b := self.buf.read(n)) is not None:
104
- return b
105
- try:
106
- return await anext(self.it)
107
- except StopAsyncIteration:
108
- return b''
109
-
110
- else:
111
- while len(self.buf) < n:
112
- try:
113
- b = await anext(self.it)
114
- except StopAsyncIteration:
115
- b = b''
116
- if not b:
117
- return self.buf.read() or b''
118
- self.buf.feed(b)
119
- return self.buf.read(n) or b''
84
+
85
+ async def read1(self, n: int = -1, /) -> bytes:
86
+ try:
87
+ return await anext(self.it)
88
+ except StopAsyncIteration:
89
+ return b''
120
90
 
121
91
  async def _stream_request(self, ctx: HttpClientContext, req: HttpRequest) -> AsyncStreamHttpResponse:
122
92
  es = contextlib.AsyncExitStack()
@@ -132,12 +102,25 @@ class HttpxAsyncHttpClient(AsyncHttpClient):
132
102
  timeout=req.timeout_s,
133
103
  ))
134
104
 
105
+ it = resp.aiter_bytes()
106
+
107
+ # FIXME:
108
+ # this has a tendency to raise `RuntimeError: async generator ignored GeneratorExit` when all of the
109
+ # following conditions are met:
110
+ # - stopped iterating midway through
111
+ # - shutting down the event loop
112
+ # - debugging under pycharm / pydevd
113
+ # - running under asyncio
114
+ # it does not seem to happen unless all of these conditions are met. see:
115
+ # https://gist.github.com/wrmsr/a0578ee5d5371b53804cfb56aeb84cdf .
116
+ es.push_async_callback(it.aclose) # type: ignore[attr-defined]
117
+
135
118
  return AsyncStreamHttpResponse(
136
119
  status=resp.status_code,
137
120
  headers=HttpHeaders(resp.headers.raw),
138
121
  request=req,
139
122
  underlying=resp,
140
- stream=self._StreamAdapter(resp.aiter_bytes()),
123
+ _stream=ReadableListBuffer().new_async_buffered_reader(self._StreamAdapter(it)),
141
124
  _closer=es.aclose,
142
125
  )
143
126
 
@@ -145,6 +128,6 @@ class HttpxAsyncHttpClient(AsyncHttpClient):
145
128
  await es.aclose()
146
129
  raise HttpClientError from e
147
130
 
148
- except Exception:
131
+ except BaseException:
149
132
  await es.aclose()
150
133
  raise
@@ -3,9 +3,9 @@
3
3
  import abc
4
4
  import contextlib
5
5
  import dataclasses as dc
6
- import io
7
6
  import typing as ta
8
7
 
8
+ from ...io.readers import BufferedBytesReader
9
9
  from ...lite.abstract import Abstract
10
10
  from ...lite.dataclasses import dataclass_shallow_asdict
11
11
  from .base import BaseHttpClient
@@ -27,25 +27,19 @@ HttpClientT = ta.TypeVar('HttpClientT', bound='HttpClient')
27
27
  @ta.final
28
28
  @dc.dataclass(frozen=True) # kw_only=True
29
29
  class StreamHttpResponse(BaseHttpResponse):
30
- class Stream(ta.Protocol):
31
- def read1(self, /, n: int = -1) -> bytes: ...
30
+ _stream: ta.Optional[BufferedBytesReader] = None
32
31
 
33
- @ta.final
34
- class _NullStream:
35
- def read1(self, /, n: int = -1) -> bytes:
36
- raise TypeError
37
-
38
- stream: Stream = _NullStream()
32
+ @property
33
+ def stream(self) -> 'BufferedBytesReader':
34
+ if (st := self._stream) is None:
35
+ raise TypeError('No data')
36
+ return st
39
37
 
40
38
  @property
41
39
  def has_data(self) -> bool:
42
- return not isinstance(self.stream, StreamHttpResponse._NullStream)
40
+ return self._stream is not None
43
41
 
44
- def read_all(self) -> bytes:
45
- buf = io.BytesIO()
46
- while (b := self.stream.read1()):
47
- buf.write(b)
48
- return buf.getvalue()
42
+ #
49
43
 
50
44
  _closer: ta.Optional[ta.Callable[[], None]] = None
51
45
 
@@ -94,8 +88,8 @@ def read_http_client_response(resp: BaseHttpResponse) -> HttpResponse:
94
88
 
95
89
  elif isinstance(resp, StreamHttpResponse):
96
90
  return HttpResponse(**{
97
- **{k: v for k, v in dataclass_shallow_asdict(resp).items() if k not in ('stream', '_closer')},
98
- **({'data': resp.read_all()} if resp.has_data else {}),
91
+ **{k: v for k, v in dataclass_shallow_asdict(resp).items() if k not in ('_stream', '_closer')},
92
+ **({'data': resp.stream.readall()} if resp.has_data else {}),
99
93
  })
100
94
 
101
95
  else:
@@ -23,9 +23,15 @@ class SyncAsyncHttpClient(AsyncHttpClient):
23
23
  class _StreamAdapter:
24
24
  ul: StreamHttpResponse
25
25
 
26
- async def read1(self, /, n: int = -1) -> bytes:
26
+ async def read1(self, n: int = -1, /) -> bytes:
27
27
  return self.ul.stream.read1(n)
28
28
 
29
+ async def read(self, n: int = -1, /) -> bytes:
30
+ return self.ul.stream.read(n)
31
+
32
+ async def readall(self) -> bytes:
33
+ return self.ul.stream.readall()
34
+
29
35
  async def close(self) -> None:
30
36
  self.ul.close()
31
37
 
@@ -37,7 +43,7 @@ class SyncAsyncHttpClient(AsyncHttpClient):
37
43
  request=req,
38
44
  underlying=resp,
39
45
  **(dict( # type: ignore
40
- stream=(adapter := self._StreamAdapter(resp)),
46
+ _stream=(adapter := self._StreamAdapter(resp)),
41
47
  _closer=adapter.close,
42
48
  ) if resp.has_data else {}),
43
49
  )
@@ -5,6 +5,7 @@ import typing as ta
5
5
  import urllib.error
6
6
  import urllib.request
7
7
 
8
+ from ...io.buffers import ReadableListBuffer
8
9
  from ..headers import HttpHeaders
9
10
  from .base import DEFAULT_ENCODING
10
11
  from .base import HttpClientContext
@@ -72,7 +73,7 @@ class UrllibHttpClient(HttpClient):
72
73
  headers=HttpHeaders(resp.headers.items()),
73
74
  request=req,
74
75
  underlying=resp,
75
- stream=resp,
76
+ _stream=ReadableListBuffer().new_buffered_reader(resp),
76
77
  _closer=resp.close,
77
78
  )
78
79