isaacus 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
isaacus/_streaming.py ADDED
@@ -0,0 +1,333 @@
1
+ # Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import inspect
6
+ from types import TracebackType
7
+ from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast
8
+ from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable
9
+
10
+ import httpx
11
+
12
+ from ._utils import extract_type_var_from_base
13
+
14
+ if TYPE_CHECKING:
15
+ from ._client import Isaacus, AsyncIsaacus
16
+
17
+
18
+ _T = TypeVar("_T")
19
+
20
+
21
+ class Stream(Generic[_T]):
22
+ """Provides the core interface to iterate over a synchronous stream response."""
23
+
24
+ response: httpx.Response
25
+
26
+ _decoder: SSEBytesDecoder
27
+
28
+ def __init__(
29
+ self,
30
+ *,
31
+ cast_to: type[_T],
32
+ response: httpx.Response,
33
+ client: Isaacus,
34
+ ) -> None:
35
+ self.response = response
36
+ self._cast_to = cast_to
37
+ self._client = client
38
+ self._decoder = client._make_sse_decoder()
39
+ self._iterator = self.__stream__()
40
+
41
+ def __next__(self) -> _T:
42
+ return self._iterator.__next__()
43
+
44
+ def __iter__(self) -> Iterator[_T]:
45
+ for item in self._iterator:
46
+ yield item
47
+
48
+ def _iter_events(self) -> Iterator[ServerSentEvent]:
49
+ yield from self._decoder.iter_bytes(self.response.iter_bytes())
50
+
51
+ def __stream__(self) -> Iterator[_T]:
52
+ cast_to = cast(Any, self._cast_to)
53
+ response = self.response
54
+ process_data = self._client._process_response_data
55
+ iterator = self._iter_events()
56
+
57
+ for sse in iterator:
58
+ yield process_data(data=sse.json(), cast_to=cast_to, response=response)
59
+
60
+ # Ensure the entire stream is consumed
61
+ for _sse in iterator:
62
+ ...
63
+
64
+ def __enter__(self) -> Self:
65
+ return self
66
+
67
+ def __exit__(
68
+ self,
69
+ exc_type: type[BaseException] | None,
70
+ exc: BaseException | None,
71
+ exc_tb: TracebackType | None,
72
+ ) -> None:
73
+ self.close()
74
+
75
+ def close(self) -> None:
76
+ """
77
+ Close the response and release the connection.
78
+
79
+ Automatically called if the response body is read to completion.
80
+ """
81
+ self.response.close()
82
+
83
+
84
+ class AsyncStream(Generic[_T]):
85
+ """Provides the core interface to iterate over an asynchronous stream response."""
86
+
87
+ response: httpx.Response
88
+
89
+ _decoder: SSEDecoder | SSEBytesDecoder
90
+
91
+ def __init__(
92
+ self,
93
+ *,
94
+ cast_to: type[_T],
95
+ response: httpx.Response,
96
+ client: AsyncIsaacus,
97
+ ) -> None:
98
+ self.response = response
99
+ self._cast_to = cast_to
100
+ self._client = client
101
+ self._decoder = client._make_sse_decoder()
102
+ self._iterator = self.__stream__()
103
+
104
+ async def __anext__(self) -> _T:
105
+ return await self._iterator.__anext__()
106
+
107
+ async def __aiter__(self) -> AsyncIterator[_T]:
108
+ async for item in self._iterator:
109
+ yield item
110
+
111
+ async def _iter_events(self) -> AsyncIterator[ServerSentEvent]:
112
+ async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()):
113
+ yield sse
114
+
115
+ async def __stream__(self) -> AsyncIterator[_T]:
116
+ cast_to = cast(Any, self._cast_to)
117
+ response = self.response
118
+ process_data = self._client._process_response_data
119
+ iterator = self._iter_events()
120
+
121
+ async for sse in iterator:
122
+ yield process_data(data=sse.json(), cast_to=cast_to, response=response)
123
+
124
+ # Ensure the entire stream is consumed
125
+ async for _sse in iterator:
126
+ ...
127
+
128
+ async def __aenter__(self) -> Self:
129
+ return self
130
+
131
+ async def __aexit__(
132
+ self,
133
+ exc_type: type[BaseException] | None,
134
+ exc: BaseException | None,
135
+ exc_tb: TracebackType | None,
136
+ ) -> None:
137
+ await self.close()
138
+
139
+ async def close(self) -> None:
140
+ """
141
+ Close the response and release the connection.
142
+
143
+ Automatically called if the response body is read to completion.
144
+ """
145
+ await self.response.aclose()
146
+
147
+
148
+ class ServerSentEvent:
149
+ def __init__(
150
+ self,
151
+ *,
152
+ event: str | None = None,
153
+ data: str | None = None,
154
+ id: str | None = None,
155
+ retry: int | None = None,
156
+ ) -> None:
157
+ if data is None:
158
+ data = ""
159
+
160
+ self._id = id
161
+ self._data = data
162
+ self._event = event or None
163
+ self._retry = retry
164
+
165
+ @property
166
+ def event(self) -> str | None:
167
+ return self._event
168
+
169
+ @property
170
+ def id(self) -> str | None:
171
+ return self._id
172
+
173
+ @property
174
+ def retry(self) -> int | None:
175
+ return self._retry
176
+
177
+ @property
178
+ def data(self) -> str:
179
+ return self._data
180
+
181
+ def json(self) -> Any:
182
+ return json.loads(self.data)
183
+
184
+ @override
185
+ def __repr__(self) -> str:
186
+ return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})"
187
+
188
+
189
+ class SSEDecoder:
190
+ _data: list[str]
191
+ _event: str | None
192
+ _retry: int | None
193
+ _last_event_id: str | None
194
+
195
+ def __init__(self) -> None:
196
+ self._event = None
197
+ self._data = []
198
+ self._last_event_id = None
199
+ self._retry = None
200
+
201
+ def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
202
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
203
+ for chunk in self._iter_chunks(iterator):
204
+ # Split before decoding so splitlines() only uses \r and \n
205
+ for raw_line in chunk.splitlines():
206
+ line = raw_line.decode("utf-8")
207
+ sse = self.decode(line)
208
+ if sse:
209
+ yield sse
210
+
211
+ def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]:
212
+ """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
213
+ data = b""
214
+ for chunk in iterator:
215
+ for line in chunk.splitlines(keepends=True):
216
+ data += line
217
+ if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
218
+ yield data
219
+ data = b""
220
+ if data:
221
+ yield data
222
+
223
+ async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
224
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
225
+ async for chunk in self._aiter_chunks(iterator):
226
+ # Split before decoding so splitlines() only uses \r and \n
227
+ for raw_line in chunk.splitlines():
228
+ line = raw_line.decode("utf-8")
229
+ sse = self.decode(line)
230
+ if sse:
231
+ yield sse
232
+
233
+ async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]:
234
+ """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
235
+ data = b""
236
+ async for chunk in iterator:
237
+ for line in chunk.splitlines(keepends=True):
238
+ data += line
239
+ if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
240
+ yield data
241
+ data = b""
242
+ if data:
243
+ yield data
244
+
245
+ def decode(self, line: str) -> ServerSentEvent | None:
246
+ # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501
247
+
248
+ if not line:
249
+ if not self._event and not self._data and not self._last_event_id and self._retry is None:
250
+ return None
251
+
252
+ sse = ServerSentEvent(
253
+ event=self._event,
254
+ data="\n".join(self._data),
255
+ id=self._last_event_id,
256
+ retry=self._retry,
257
+ )
258
+
259
+ # NOTE: as per the SSE spec, do not reset last_event_id.
260
+ self._event = None
261
+ self._data = []
262
+ self._retry = None
263
+
264
+ return sse
265
+
266
+ if line.startswith(":"):
267
+ return None
268
+
269
+ fieldname, _, value = line.partition(":")
270
+
271
+ if value.startswith(" "):
272
+ value = value[1:]
273
+
274
+ if fieldname == "event":
275
+ self._event = value
276
+ elif fieldname == "data":
277
+ self._data.append(value)
278
+ elif fieldname == "id":
279
+ if "\0" in value:
280
+ pass
281
+ else:
282
+ self._last_event_id = value
283
+ elif fieldname == "retry":
284
+ try:
285
+ self._retry = int(value)
286
+ except (TypeError, ValueError):
287
+ pass
288
+ else:
289
+ pass # Field is ignored.
290
+
291
+ return None
292
+
293
+
294
+ @runtime_checkable
295
+ class SSEBytesDecoder(Protocol):
296
+ def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
297
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
298
+ ...
299
+
300
+ def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
301
+ """Given an async iterator that yields raw binary data, iterate over it & yield every event encountered"""
302
+ ...
303
+
304
+
305
+ def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]:
306
+ """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`"""
307
+ origin = get_origin(typ) or typ
308
+ return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream))
309
+
310
+
311
+ def extract_stream_chunk_type(
312
+ stream_cls: type,
313
+ *,
314
+ failure_message: str | None = None,
315
+ ) -> type:
316
+ """Given a type like `Stream[T]`, returns the generic type variable `T`.
317
+
318
+ This also handles the case where a concrete subclass is given, e.g.
319
+ ```py
320
+ class MyStream(Stream[bytes]):
321
+ ...
322
+
323
+ extract_stream_chunk_type(MyStream) -> bytes
324
+ ```
325
+ """
326
+ from ._base_client import Stream, AsyncStream
327
+
328
+ return extract_type_var_from_base(
329
+ stream_cls,
330
+ index=0,
331
+ generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)),
332
+ failure_message=failure_message,
333
+ )
isaacus/_types.py ADDED
@@ -0,0 +1,217 @@
1
+ from __future__ import annotations
2
+
3
+ from os import PathLike
4
+ from typing import (
5
+ IO,
6
+ TYPE_CHECKING,
7
+ Any,
8
+ Dict,
9
+ List,
10
+ Type,
11
+ Tuple,
12
+ Union,
13
+ Mapping,
14
+ TypeVar,
15
+ Callable,
16
+ Optional,
17
+ Sequence,
18
+ )
19
+ from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
20
+
21
+ import httpx
22
+ import pydantic
23
+ from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport
24
+
25
+ if TYPE_CHECKING:
26
+ from ._models import BaseModel
27
+ from ._response import APIResponse, AsyncAPIResponse
28
+
29
+ Transport = BaseTransport
30
+ AsyncTransport = AsyncBaseTransport
31
+ Query = Mapping[str, object]
32
+ Body = object
33
+ AnyMapping = Mapping[str, object]
34
+ ModelT = TypeVar("ModelT", bound=pydantic.BaseModel)
35
+ _T = TypeVar("_T")
36
+
37
+
38
+ # Approximates httpx internal ProxiesTypes and RequestFiles types
39
+ # while adding support for `PathLike` instances
40
+ ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]]
41
+ ProxiesTypes = Union[str, Proxy, ProxiesDict]
42
+ if TYPE_CHECKING:
43
+ Base64FileInput = Union[IO[bytes], PathLike[str]]
44
+ FileContent = Union[IO[bytes], bytes, PathLike[str]]
45
+ else:
46
+ Base64FileInput = Union[IO[bytes], PathLike]
47
+ FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8.
48
+ FileTypes = Union[
49
+ # file (or bytes)
50
+ FileContent,
51
+ # (filename, file (or bytes))
52
+ Tuple[Optional[str], FileContent],
53
+ # (filename, file (or bytes), content_type)
54
+ Tuple[Optional[str], FileContent, Optional[str]],
55
+ # (filename, file (or bytes), content_type, headers)
56
+ Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]],
57
+ ]
58
+ RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]]
59
+
60
+ # duplicate of the above but without our custom file support
61
+ HttpxFileContent = Union[IO[bytes], bytes]
62
+ HttpxFileTypes = Union[
63
+ # file (or bytes)
64
+ HttpxFileContent,
65
+ # (filename, file (or bytes))
66
+ Tuple[Optional[str], HttpxFileContent],
67
+ # (filename, file (or bytes), content_type)
68
+ Tuple[Optional[str], HttpxFileContent, Optional[str]],
69
+ # (filename, file (or bytes), content_type, headers)
70
+ Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]],
71
+ ]
72
+ HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]]
73
+
74
+ # Workaround to support (cast_to: Type[ResponseT]) -> ResponseT
75
+ # where ResponseT includes `None`. In order to support directly
76
+ # passing `None`, overloads would have to be defined for every
77
+ # method that uses `ResponseT` which would lead to an unacceptable
78
+ # amount of code duplication and make it unreadable. See _base_client.py
79
+ # for example usage.
80
+ #
81
+ # This unfortunately means that you will either have
82
+ # to import this type and pass it explicitly:
83
+ #
84
+ # from isaacus import NoneType
85
+ # client.get('/foo', cast_to=NoneType)
86
+ #
87
+ # or build it yourself:
88
+ #
89
+ # client.get('/foo', cast_to=type(None))
90
+ if TYPE_CHECKING:
91
+ NoneType: Type[None]
92
+ else:
93
+ NoneType = type(None)
94
+
95
+
96
+ class RequestOptions(TypedDict, total=False):
97
+ headers: Headers
98
+ max_retries: int
99
+ timeout: float | Timeout | None
100
+ params: Query
101
+ extra_json: AnyMapping
102
+ idempotency_key: str
103
+
104
+
105
+ # Sentinel class used until PEP 0661 is accepted
106
+ class NotGiven:
107
+ """
108
+ A sentinel singleton class used to distinguish omitted keyword arguments
109
+ from those passed in with the value None (which may have different behavior).
110
+
111
+ For example:
112
+
113
+ ```py
114
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
115
+
116
+
117
+ get(timeout=1) # 1s timeout
118
+ get(timeout=None) # No timeout
119
+ get() # Default timeout behavior, which may not be statically known at the method definition.
120
+ ```
121
+ """
122
+
123
+ def __bool__(self) -> Literal[False]:
124
+ return False
125
+
126
+ @override
127
+ def __repr__(self) -> str:
128
+ return "NOT_GIVEN"
129
+
130
+
131
+ NotGivenOr = Union[_T, NotGiven]
132
+ NOT_GIVEN = NotGiven()
133
+
134
+
135
+ class Omit:
136
+ """In certain situations you need to be able to represent a case where a default value has
137
+ to be explicitly removed and `None` is not an appropriate substitute, for example:
138
+
139
+ ```py
140
+ # as the default `Content-Type` header is `application/json` that will be sent
141
+ client.post("/upload/files", files={"file": b"my raw file content"})
142
+
143
+ # you can't explicitly override the header as it has to be dynamically generated
144
+ # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
145
+ client.post(..., headers={"Content-Type": "multipart/form-data"})
146
+
147
+ # instead you can remove the default `application/json` header by passing Omit
148
+ client.post(..., headers={"Content-Type": Omit()})
149
+ ```
150
+ """
151
+
152
+ def __bool__(self) -> Literal[False]:
153
+ return False
154
+
155
+
156
+ @runtime_checkable
157
+ class ModelBuilderProtocol(Protocol):
158
+ @classmethod
159
+ def build(
160
+ cls: type[_T],
161
+ *,
162
+ response: Response,
163
+ data: object,
164
+ ) -> _T: ...
165
+
166
+
167
+ Headers = Mapping[str, Union[str, Omit]]
168
+
169
+
170
+ class HeadersLikeProtocol(Protocol):
171
+ def get(self, __key: str) -> str | None: ...
172
+
173
+
174
+ HeadersLike = Union[Headers, HeadersLikeProtocol]
175
+
176
+ ResponseT = TypeVar(
177
+ "ResponseT",
178
+ bound=Union[
179
+ object,
180
+ str,
181
+ None,
182
+ "BaseModel",
183
+ List[Any],
184
+ Dict[str, Any],
185
+ Response,
186
+ ModelBuilderProtocol,
187
+ "APIResponse[Any]",
188
+ "AsyncAPIResponse[Any]",
189
+ ],
190
+ )
191
+
192
+ StrBytesIntFloat = Union[str, bytes, int, float]
193
+
194
+ # Note: copied from Pydantic
195
+ # https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79
196
+ IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]]
197
+
198
+ PostParser = Callable[[Any], Any]
199
+
200
+
201
+ @runtime_checkable
202
+ class InheritsGeneric(Protocol):
203
+ """Represents a type that has inherited from `Generic`
204
+
205
+ The `__orig_bases__` property can be used to determine the resolved
206
+ type variable for a given base class.
207
+ """
208
+
209
+ __orig_bases__: tuple[_GenericAlias]
210
+
211
+
212
+ class _GenericAlias(Protocol):
213
+ __origin__: type[object]
214
+
215
+
216
+ class HttpxSendArgs(TypedDict, total=False):
217
+ auth: httpx.Auth
@@ -0,0 +1,57 @@
1
+ from ._sync import asyncify as asyncify
2
+ from ._proxy import LazyProxy as LazyProxy
3
+ from ._utils import (
4
+ flatten as flatten,
5
+ is_dict as is_dict,
6
+ is_list as is_list,
7
+ is_given as is_given,
8
+ is_tuple as is_tuple,
9
+ json_safe as json_safe,
10
+ lru_cache as lru_cache,
11
+ is_mapping as is_mapping,
12
+ is_tuple_t as is_tuple_t,
13
+ parse_date as parse_date,
14
+ is_iterable as is_iterable,
15
+ is_sequence as is_sequence,
16
+ coerce_float as coerce_float,
17
+ is_mapping_t as is_mapping_t,
18
+ removeprefix as removeprefix,
19
+ removesuffix as removesuffix,
20
+ extract_files as extract_files,
21
+ is_sequence_t as is_sequence_t,
22
+ required_args as required_args,
23
+ coerce_boolean as coerce_boolean,
24
+ coerce_integer as coerce_integer,
25
+ file_from_path as file_from_path,
26
+ parse_datetime as parse_datetime,
27
+ strip_not_given as strip_not_given,
28
+ deepcopy_minimal as deepcopy_minimal,
29
+ get_async_library as get_async_library,
30
+ maybe_coerce_float as maybe_coerce_float,
31
+ get_required_header as get_required_header,
32
+ maybe_coerce_boolean as maybe_coerce_boolean,
33
+ maybe_coerce_integer as maybe_coerce_integer,
34
+ )
35
+ from ._typing import (
36
+ is_list_type as is_list_type,
37
+ is_union_type as is_union_type,
38
+ extract_type_arg as extract_type_arg,
39
+ is_iterable_type as is_iterable_type,
40
+ is_required_type as is_required_type,
41
+ is_annotated_type as is_annotated_type,
42
+ is_type_alias_type as is_type_alias_type,
43
+ strip_annotated_type as strip_annotated_type,
44
+ extract_type_var_from_base as extract_type_var_from_base,
45
+ )
46
+ from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator
47
+ from ._transform import (
48
+ PropertyInfo as PropertyInfo,
49
+ transform as transform,
50
+ async_transform as async_transform,
51
+ maybe_transform as maybe_transform,
52
+ async_maybe_transform as async_maybe_transform,
53
+ )
54
+ from ._reflection import (
55
+ function_has_argument as function_has_argument,
56
+ assert_signatures_in_sync as assert_signatures_in_sync,
57
+ )
@@ -0,0 +1,25 @@
1
+ import os
2
+ import logging
3
+
4
+ logger: logging.Logger = logging.getLogger("isaacus")
5
+ httpx_logger: logging.Logger = logging.getLogger("httpx")
6
+
7
+
8
+ def _basic_config() -> None:
9
+ # e.g. [2023-10-05 14:12:26 - isaacus._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK"
10
+ logging.basicConfig(
11
+ format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
12
+ datefmt="%Y-%m-%d %H:%M:%S",
13
+ )
14
+
15
+
16
+ def setup_logging() -> None:
17
+ env = os.environ.get("ISAACUS_LOG")
18
+ if env == "debug":
19
+ _basic_config()
20
+ logger.setLevel(logging.DEBUG)
21
+ httpx_logger.setLevel(logging.DEBUG)
22
+ elif env == "info":
23
+ _basic_config()
24
+ logger.setLevel(logging.INFO)
25
+ httpx_logger.setLevel(logging.INFO)
@@ -0,0 +1,62 @@
1
+ from __future__ import annotations
2
+
3
+ from abc import ABC, abstractmethod
4
+ from typing import Generic, TypeVar, Iterable, cast
5
+ from typing_extensions import override
6
+
7
+ T = TypeVar("T")
8
+
9
+
10
+ class LazyProxy(Generic[T], ABC):
11
+ """Implements data methods to pretend that an instance is another instance.
12
+
13
+ This includes forwarding attribute access and other methods.
14
+ """
15
+
16
+ # Note: we have to special case proxies that themselves return proxies
17
+ # to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz`
18
+
19
+ def __getattr__(self, attr: str) -> object:
20
+ proxied = self.__get_proxied__()
21
+ if isinstance(proxied, LazyProxy):
22
+ return proxied # pyright: ignore
23
+ return getattr(proxied, attr)
24
+
25
+ @override
26
+ def __repr__(self) -> str:
27
+ proxied = self.__get_proxied__()
28
+ if isinstance(proxied, LazyProxy):
29
+ return proxied.__class__.__name__
30
+ return repr(self.__get_proxied__())
31
+
32
+ @override
33
+ def __str__(self) -> str:
34
+ proxied = self.__get_proxied__()
35
+ if isinstance(proxied, LazyProxy):
36
+ return proxied.__class__.__name__
37
+ return str(proxied)
38
+
39
+ @override
40
+ def __dir__(self) -> Iterable[str]:
41
+ proxied = self.__get_proxied__()
42
+ if isinstance(proxied, LazyProxy):
43
+ return []
44
+ return proxied.__dir__()
45
+
46
+ @property # type: ignore
47
+ @override
48
+ def __class__(self) -> type: # pyright: ignore
49
+ proxied = self.__get_proxied__()
50
+ if issubclass(type(proxied), LazyProxy):
51
+ return type(proxied)
52
+ return proxied.__class__
53
+
54
+ def __get_proxied__(self) -> T:
55
+ return self.__load__()
56
+
57
+ def __as_proxied__(self) -> T:
58
+ """Helper method that returns the current proxy, typed as the loaded object"""
59
+ return cast(T, self)
60
+
61
+ @abstractmethod
62
+ def __load__(self) -> T: ...