unique_toolkit 0.5.5__py3-none-any.whl → 0.5.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unique_toolkit/_common/_base_service.py +10 -0
- unique_toolkit/app/performance/async_tasks.py +70 -0
- unique_toolkit/app/performance/async_wrapper.py +19 -8
- unique_toolkit/chat/service.py +106 -103
- unique_toolkit/chat/state.py +0 -10
- unique_toolkit/content/service.py +89 -77
- unique_toolkit/embedding/service.py +31 -26
- unique_toolkit/language_model/service.py +96 -96
- {unique_toolkit-0.5.5.dist-info → unique_toolkit-0.5.7.dist-info}/METADATA +12 -4
- {unique_toolkit-0.5.5.dist-info → unique_toolkit-0.5.7.dist-info}/RECORD +12 -11
- unique_toolkit/app/performance/async_executor.py +0 -186
- {unique_toolkit-0.5.5.dist-info → unique_toolkit-0.5.7.dist-info}/LICENSE +0 -0
- {unique_toolkit-0.5.5.dist-info → unique_toolkit-0.5.7.dist-info}/WHEEL +0 -0
@@ -1,24 +1,25 @@
|
|
1
1
|
unique_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
+
unique_toolkit/_common/_base_service.py,sha256=SDhO69RA6E1ydUuV9N99RKreHZbQmMJj6SCdrYiakhU,283
|
2
3
|
unique_toolkit/app/init_logging.py,sha256=Sh26SRxOj8i8dzobKhYha2lLrkrMTHfB1V4jR3h23gQ,678
|
3
4
|
unique_toolkit/app/init_sdk.py,sha256=Nv4Now4pMfM0AgRhbtatLpm_39rKxn0WmRLwmPhRl-8,1285
|
4
|
-
unique_toolkit/app/performance/
|
5
|
-
unique_toolkit/app/performance/async_wrapper.py,sha256=
|
5
|
+
unique_toolkit/app/performance/async_tasks.py,sha256=H0l3OAcosLwNHZ8d2pd-Di4wHIXfclEvagi5kfqLFPA,1941
|
6
|
+
unique_toolkit/app/performance/async_wrapper.py,sha256=yVVcRDkcdyfjsxro-N29SBvi-7773wnfDplef6-y8xw,1077
|
6
7
|
unique_toolkit/app/schemas.py,sha256=UAbIkrgdqwWEwKqnJwB4TNQWuJgEsFzDxqJbEC8xcOc,1098
|
7
8
|
unique_toolkit/app/verification.py,sha256=UZqTHg3PX_QxMjeLH_BVBYoMVqMnMpeMoqvyTBKDqj8,1996
|
8
9
|
unique_toolkit/chat/schemas.py,sha256=TYqrDy96ynP7XpnT_abOJlq6LkqBTdKwEft4YHeEgR8,801
|
9
|
-
unique_toolkit/chat/service.py,sha256=
|
10
|
-
unique_toolkit/chat/state.py,sha256=
|
10
|
+
unique_toolkit/chat/service.py,sha256=jTxFchdjcGp3IQZxWR4x3XTdATxodP0xD4LGJZYltVw,13338
|
11
|
+
unique_toolkit/chat/state.py,sha256=qrXkxmYlTM6PRpHX6j_vhJNWkJX4r-xgJ3NvqM80rAY,1441
|
11
12
|
unique_toolkit/chat/utils.py,sha256=ihm-wQykBWhB4liR3LnwPVPt_qGW6ETq21Mw4HY0THE,854
|
12
13
|
unique_toolkit/content/schemas.py,sha256=b6QiqZUBdYA14_CfJYeXEGSrPmT98n_Q8knugnP20yA,2093
|
13
|
-
unique_toolkit/content/service.py,sha256=
|
14
|
+
unique_toolkit/content/service.py,sha256=eRoh92c3sCz2Bz77VdzrUTTnJNWFdo0Ta5xZXMqq47U,13327
|
14
15
|
unique_toolkit/content/utils.py,sha256=x3ABo8ZCRm3YJAQwDtrr82z77DmW4Mei7KCIITjP0fk,6897
|
15
16
|
unique_toolkit/embedding/schemas.py,sha256=1GvKCaSk4jixzVQ2PKq8yDqwGEVY_hWclYtoAr6CC2g,96
|
16
|
-
unique_toolkit/embedding/service.py,sha256=
|
17
|
+
unique_toolkit/embedding/service.py,sha256=xcU84ANMIebBui_AWBhfN1rqz7evXYRgg_Z-132veNE,2703
|
17
18
|
unique_toolkit/language_model/infos.py,sha256=NhAkeW7PyusSIHCMvwRikLlzGG4tOXSLf_Fnq7V9rNE,8881
|
18
19
|
unique_toolkit/language_model/schemas.py,sha256=kTGSGT3ygrH3guQELOWpxN4MTgEPuudi-CTvRu-zCcI,4377
|
19
|
-
unique_toolkit/language_model/service.py,sha256=
|
20
|
+
unique_toolkit/language_model/service.py,sha256=0g0nAGj9Te16YHFhqO9pkszhEwtuZ6bSqor6hi49e6c,10549
|
20
21
|
unique_toolkit/language_model/utils.py,sha256=WBPj1XKkDgxy_-T8HCZvsfkkSzj_1w4UZzNmyvdbBLY,1081
|
21
|
-
unique_toolkit-0.5.
|
22
|
-
unique_toolkit-0.5.
|
23
|
-
unique_toolkit-0.5.
|
24
|
-
unique_toolkit-0.5.
|
22
|
+
unique_toolkit-0.5.7.dist-info/LICENSE,sha256=bIeCWCYuoUU_MzNdg48-ubJSVm7qxakaRbzTiJ5uxrs,1065
|
23
|
+
unique_toolkit-0.5.7.dist-info/METADATA,sha256=4UJiE5Qwp0g2FWhtVO-EshsKsN5iuCGjH_wn0zQVahY,8977
|
24
|
+
unique_toolkit-0.5.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
25
|
+
unique_toolkit-0.5.7.dist-info/RECORD,,
|
@@ -1,186 +0,0 @@
|
|
1
|
-
import asyncio
|
2
|
-
import contextlib
|
3
|
-
import logging
|
4
|
-
import threading
|
5
|
-
import time
|
6
|
-
from concurrent.futures import ThreadPoolExecutor
|
7
|
-
from math import ceil
|
8
|
-
from typing import (
|
9
|
-
AsyncContextManager,
|
10
|
-
Awaitable,
|
11
|
-
Callable,
|
12
|
-
Optional,
|
13
|
-
Sequence,
|
14
|
-
TypeVar,
|
15
|
-
Union,
|
16
|
-
)
|
17
|
-
|
18
|
-
T = TypeVar("T")
|
19
|
-
Result = Union[T, BaseException]
|
20
|
-
|
21
|
-
|
22
|
-
class AsyncExecutor:
|
23
|
-
"""
|
24
|
-
A class for executing asynchronous tasks concurrently, with optional threading support.
|
25
|
-
|
26
|
-
This class provides methods to run multiple asynchronous tasks in parallel, with
|
27
|
-
the ability to limit the number of concurrent tasks and distribute work across
|
28
|
-
multiple threads if needed.
|
29
|
-
|
30
|
-
Attributes:
|
31
|
-
logger (logging.Logger): Logger instance for recording execution information.
|
32
|
-
context_manager (Callable[[], AsyncContextManager]): A factory function that returns
|
33
|
-
an async context manager to be used for each task execution, e.g., quart.current_app.app_context().
|
34
|
-
|
35
|
-
"""
|
36
|
-
|
37
|
-
def __init__(
|
38
|
-
self,
|
39
|
-
logger: Optional[logging.Logger] = None,
|
40
|
-
context_manager: Optional[Callable[[], AsyncContextManager]] = None,
|
41
|
-
) -> None:
|
42
|
-
self.logger = logger or logging.getLogger(__name__)
|
43
|
-
self.context_manager = context_manager or (lambda: contextlib.nullcontext())
|
44
|
-
|
45
|
-
async def run_async_tasks(
|
46
|
-
self,
|
47
|
-
tasks: Sequence[Awaitable[T]],
|
48
|
-
max_tasks: int,
|
49
|
-
) -> list[Result]:
|
50
|
-
"""
|
51
|
-
Executes the a set of given async tasks and returns the results.
|
52
|
-
|
53
|
-
Args:
|
54
|
-
tasks (list[Awaitable[T]]): list of async callables to execute in parallel.
|
55
|
-
max_tasks (int): Maximum number of tasks for the asyncio Semaphore.
|
56
|
-
|
57
|
-
Returns:
|
58
|
-
list[Result]: list of results from the executed tasks.
|
59
|
-
"""
|
60
|
-
|
61
|
-
async def logging_wrapper(task: Awaitable[T], task_id: int) -> Result:
|
62
|
-
thread = threading.current_thread()
|
63
|
-
start_time = time.time()
|
64
|
-
|
65
|
-
self.logger.info(
|
66
|
-
f"Thread {thread.name} (ID: {thread.ident}) starting task {task_id}"
|
67
|
-
)
|
68
|
-
|
69
|
-
try:
|
70
|
-
async with self.context_manager():
|
71
|
-
result = await task
|
72
|
-
return result
|
73
|
-
except Exception as e:
|
74
|
-
self.logger.error(
|
75
|
-
f"Thread {thread.name} (ID: {thread.ident}) - Task {task_id} failed with error: {e}"
|
76
|
-
)
|
77
|
-
return e
|
78
|
-
finally:
|
79
|
-
end_time = time.time()
|
80
|
-
duration = end_time - start_time
|
81
|
-
self.logger.debug(
|
82
|
-
f"Thread {thread.name} (ID: {thread.ident}) - Task {task_id} finished in {duration:.2f} seconds"
|
83
|
-
)
|
84
|
-
|
85
|
-
sem = asyncio.Semaphore(max_tasks)
|
86
|
-
|
87
|
-
async def sem_task(task: Awaitable[T], task_id: int) -> Result:
|
88
|
-
async with sem:
|
89
|
-
return await logging_wrapper(task, task_id)
|
90
|
-
|
91
|
-
wrapped_tasks: list[Awaitable[Result]] = [
|
92
|
-
sem_task(task, i) for i, task in enumerate(tasks)
|
93
|
-
]
|
94
|
-
|
95
|
-
results: list[Result] = await asyncio.gather(
|
96
|
-
*wrapped_tasks, return_exceptions=True
|
97
|
-
)
|
98
|
-
|
99
|
-
return results
|
100
|
-
|
101
|
-
async def run_async_tasks_in_threads(
|
102
|
-
self,
|
103
|
-
tasks: Sequence[Awaitable[T]],
|
104
|
-
max_threads: int,
|
105
|
-
max_tasks: int,
|
106
|
-
) -> list[Result[T]]:
|
107
|
-
"""
|
108
|
-
Executes the given async tasks in multiple threads and returns the results.
|
109
|
-
|
110
|
-
Args:
|
111
|
-
tasks (list[Awaitable[T]]): list of async callables to execute in parallel.
|
112
|
-
max_threads (int): Maximum number of threads.
|
113
|
-
max_tasks (int): Maximum number of tasks per thread run in parallel.
|
114
|
-
|
115
|
-
Returns:
|
116
|
-
list[Result]: list of results from the executed tasks.
|
117
|
-
"""
|
118
|
-
|
119
|
-
async def run_in_thread(task_chunk: list[Awaitable[T]]) -> list[Result]:
|
120
|
-
loop = asyncio.new_event_loop()
|
121
|
-
asyncio.set_event_loop(loop)
|
122
|
-
async with self.context_manager():
|
123
|
-
return await self.run_async_tasks(task_chunk, max_tasks)
|
124
|
-
|
125
|
-
def thread_worker(
|
126
|
-
task_chunk: list[Awaitable[T]], chunk_id: int
|
127
|
-
) -> list[Result]:
|
128
|
-
thread = threading.current_thread()
|
129
|
-
self.logger.info(
|
130
|
-
f"Thread {thread.name} (ID: {thread.ident}) starting chunk {chunk_id} with {len(task_chunk)} tasks"
|
131
|
-
)
|
132
|
-
|
133
|
-
start_time = time.time()
|
134
|
-
loop = asyncio.new_event_loop()
|
135
|
-
asyncio.set_event_loop(loop)
|
136
|
-
|
137
|
-
try:
|
138
|
-
results = loop.run_until_complete(run_in_thread(task_chunk))
|
139
|
-
end_time = time.time()
|
140
|
-
duration = end_time - start_time
|
141
|
-
self.logger.info(
|
142
|
-
f"Thread {thread.name} (ID: {thread.ident}) finished chunk {chunk_id} in {duration:.2f} seconds"
|
143
|
-
)
|
144
|
-
return results
|
145
|
-
except Exception as e:
|
146
|
-
self.logger.error(
|
147
|
-
f"Thread {thread.name} (ID: {thread.ident}) encountered an error in chunk {chunk_id}: {str(e)}"
|
148
|
-
)
|
149
|
-
raise
|
150
|
-
finally:
|
151
|
-
loop.close()
|
152
|
-
|
153
|
-
start_time = time.time()
|
154
|
-
# Calculate the number of tasks per thread
|
155
|
-
tasks_per_thread: int = ceil(len(tasks) / max_threads)
|
156
|
-
|
157
|
-
# Split tasks into chunks
|
158
|
-
task_chunks: list[Sequence[Awaitable[T]]] = [
|
159
|
-
tasks[i : i + tasks_per_thread]
|
160
|
-
for i in range(0, len(tasks), tasks_per_thread)
|
161
|
-
]
|
162
|
-
|
163
|
-
self.logger.info(
|
164
|
-
f"Splitting {len(tasks)} tasks into {len(task_chunks)} chunks across {max_threads} threads"
|
165
|
-
)
|
166
|
-
|
167
|
-
# Use ThreadPoolExecutor to manage threads
|
168
|
-
with ThreadPoolExecutor(max_workers=max_threads) as executor:
|
169
|
-
# Submit each chunk of tasks to a thread
|
170
|
-
future_results: list[list[Result]] = list(
|
171
|
-
executor.map(
|
172
|
-
thread_worker,
|
173
|
-
task_chunks,
|
174
|
-
range(len(task_chunks)), # chunk_id
|
175
|
-
)
|
176
|
-
)
|
177
|
-
|
178
|
-
# Flatten the results from all threads
|
179
|
-
results: list[Result] = [item for sublist in future_results for item in sublist]
|
180
|
-
end_time = time.time()
|
181
|
-
duration = end_time - start_time
|
182
|
-
self.logger.info(
|
183
|
-
f"All threads completed. Total results: {len(results)}. Duration: {duration:.2f} seconds"
|
184
|
-
)
|
185
|
-
|
186
|
-
return results
|
File without changes
|
File without changes
|