tokenator 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,78 +0,0 @@
1
- import logging
2
- from typing import AsyncIterator, Callable, Generic, List, Optional, TypeVar
3
-
4
- from openai import AsyncStream, AsyncOpenAI
5
- from openai.types.chat import ChatCompletionChunk
6
-
7
- logger = logging.getLogger(__name__)
8
-
9
- _T = TypeVar("_T") # or you might specifically do _T = ChatCompletionChunk
10
-
11
-
12
- class AsyncStreamInterceptor(AsyncStream[_T]):
13
- """
14
- A wrapper around openai.AsyncStream that delegates all functionality
15
- to the 'base_stream' but intercepts each chunk to handle usage or
16
- logging logic. This preserves .response and other methods.
17
-
18
- You can store aggregated usage in a local list and process it when
19
- the stream ends (StopAsyncIteration).
20
- """
21
-
22
- def __init__(
23
- self,
24
- base_stream: AsyncStream[_T],
25
- usage_callback: Optional[Callable[[List[_T]], None]] = None,
26
- ):
27
- # We do NOT call super().__init__() because openai.AsyncStream
28
- # expects constructor parameters we don't want to re-initialize.
29
- # Instead, we just store the base_stream and delegate everything to it.
30
- self._base_stream = base_stream
31
- self._usage_callback = usage_callback
32
- self._chunks: List[_T] = []
33
-
34
- @property
35
- def response(self):
36
- """Expose the original stream's 'response' so user code can do stream.response, etc."""
37
- return self._base_stream.response
38
-
39
- def __aiter__(self) -> AsyncIterator[_T]:
40
- """
41
- Called when we do 'async for chunk in wrapped_stream:'
42
- We simply return 'self'. Then __anext__ does the rest.
43
- """
44
- return self
45
-
46
- async def __anext__(self) -> _T:
47
- """
48
- Intercept iteration. We pull the next chunk from the base_stream.
49
- If it's the end, do any final usage logging, then raise StopAsyncIteration.
50
- Otherwise, we can accumulate usage info or do whatever we need with the chunk.
51
- """
52
- try:
53
- chunk = await self._base_stream.__anext__()
54
- except StopAsyncIteration:
55
- # Once the base stream is fully consumed, we can do final usage/logging.
56
- if self._usage_callback and self._chunks:
57
- self._usage_callback(self._chunks)
58
- raise
59
-
60
- # Intercept each chunk
61
- self._chunks.append(chunk)
62
- return chunk
63
-
64
- async def __aenter__(self) -> "AsyncStreamInterceptor[_T]":
65
- """Support async with ... : usage."""
66
- await self._base_stream.__aenter__()
67
- return self
68
-
69
- async def __aexit__(self, exc_type, exc_val, exc_tb):
70
- """
71
- Ensure we propagate __aexit__ to the base stream,
72
- so connections are properly closed.
73
- """
74
- return await self._base_stream.__aexit__(exc_type, exc_val, exc_tb)
75
-
76
- async def close(self) -> None:
77
- """Delegate close to the base_stream."""
78
- await self._base_stream.close()
@@ -1,18 +0,0 @@
1
- tokenator/__init__.py,sha256=mYwK5EJTlbh_7WvylzxXcL-yzWe_fESSL6FLrlY1qck,583
2
- tokenator/base_wrapper.py,sha256=vSu_pStKYulho7_5g0jMCNf84KRxC4kTKep0v8YE61M,2377
3
- tokenator/client_anthropic.py,sha256=1ejWIZBxtk-mWTVaKWeMUvS2hZ_Dn-vNKYa3yopdjAU,6714
4
- tokenator/create_migrations.py,sha256=n1OVbWrdwvBdaN-Aqqt1gLCPQidfoQfeJtGsab_epGk,746
5
- tokenator/migrations/env.py,sha256=LR_hONDa8Saiq9CyNUpH8kZCi5PtXLaDlfABs_CePkk,1415
6
- tokenator/migrations/script.py.mako,sha256=nJL-tbLQE0Qy4P9S4r4ntNAcikPtoFUlvXe6xvm9ot8,635
7
- tokenator/migrations/versions/f6f1f2437513_initial_migration.py,sha256=DvHcjnREmUHZVX9q1e6PS4wNK_d4qGw-8pz0eS4_3mE,1860
8
- tokenator/migrations.py,sha256=BFgZRsdIx-Qs_WwDaH6cyi2124mLf5hA8VrIlW7f7Mg,1134
9
- tokenator/models.py,sha256=EprE_MMJxDS-YXlcIQLZzfekH7xTYbeOC3bx3B2osVw,1171
10
- tokenator/openai/AsyncStreamInterceptor.py,sha256=estfEFBFyo5BWqTNwHlCZ-wE0dRjtGeyQ0ihBeW3jrU,2842
11
- tokenator/openai/client_openai.py,sha256=q-0abTq54zRORPLeushdHx1UYq-hOAlp6qY8wAOP2GQ,6682
12
- tokenator/schemas.py,sha256=V7NYfY9eZvH3J6uOwXJz4dSAU6WYzINRnfFi1wWsTcc,2280
13
- tokenator/usage.py,sha256=aHjGwzDzaiVznahNk5HqVyk3IxDo5FtFVfOUCeE7DZ4,7833
14
- tokenator/utils.py,sha256=5mDiGHgt4koCY0onHwkRjwZIuAgP6QvrDZCwD20Sdk8,1969
15
- tokenator-0.1.9.dist-info/LICENSE,sha256=wdG-B6-ODk8RQ4jq5uXSn0w1UWTzCH_MMyvh7AwtGns,1074
16
- tokenator-0.1.9.dist-info/METADATA,sha256=A7x7gEjbTwOBoR7mxGiHKiZVvKL8ZD6ecL7Wd0y6jfM,3093
17
- tokenator-0.1.9.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
18
- tokenator-0.1.9.dist-info/RECORD,,