fabricatio 0.3.13__cp312-cp312-win_amd64.whl → 0.3.14__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fabricatio/__init__.py +6 -13
- fabricatio/actions/article.py +87 -50
- fabricatio/actions/article_rag.py +59 -68
- fabricatio/actions/output.py +58 -24
- fabricatio/actions/rag.py +2 -3
- fabricatio/capabilities/advanced_judge.py +4 -7
- fabricatio/capabilities/advanced_rag.py +2 -1
- fabricatio/capabilities/censor.py +5 -4
- fabricatio/capabilities/check.py +27 -27
- fabricatio/capabilities/correct.py +22 -22
- fabricatio/capabilities/extract.py +33 -33
- fabricatio/capabilities/persist.py +103 -0
- fabricatio/capabilities/propose.py +2 -2
- fabricatio/capabilities/rag.py +11 -10
- fabricatio/capabilities/rating.py +66 -70
- fabricatio/capabilities/review.py +12 -11
- fabricatio/capabilities/task.py +19 -18
- fabricatio/decorators.py +11 -9
- fabricatio/{core.py → emitter.py} +17 -19
- fabricatio/journal.py +2 -4
- fabricatio/models/action.py +15 -32
- fabricatio/models/extra/aricle_rag.py +13 -8
- fabricatio/models/extra/article_base.py +57 -25
- fabricatio/models/extra/article_essence.py +2 -1
- fabricatio/models/extra/article_main.py +24 -22
- fabricatio/models/extra/article_outline.py +2 -1
- fabricatio/models/extra/article_proposal.py +1 -1
- fabricatio/models/extra/rag.py +2 -2
- fabricatio/models/extra/rule.py +2 -1
- fabricatio/models/generic.py +55 -137
- fabricatio/models/kwargs_types.py +1 -54
- fabricatio/models/role.py +49 -28
- fabricatio/models/task.py +3 -4
- fabricatio/models/tool.py +6 -7
- fabricatio/models/usages.py +146 -149
- fabricatio/parser.py +59 -99
- fabricatio/rust.cp312-win_amd64.pyd +0 -0
- fabricatio/rust.pyi +58 -81
- fabricatio/utils.py +63 -162
- fabricatio-0.3.14.data/scripts/tdown.exe +0 -0
- fabricatio-0.3.14.data/scripts/ttm.exe +0 -0
- {fabricatio-0.3.13.dist-info → fabricatio-0.3.14.dist-info}/METADATA +10 -13
- fabricatio-0.3.14.dist-info/RECORD +64 -0
- {fabricatio-0.3.13.dist-info → fabricatio-0.3.14.dist-info}/WHEEL +1 -1
- fabricatio-0.3.13.data/scripts/tdown.exe +0 -0
- fabricatio-0.3.13.data/scripts/ttm.exe +0 -0
- fabricatio-0.3.13.dist-info/RECORD +0 -63
- {fabricatio-0.3.13.dist-info → fabricatio-0.3.14.dist-info}/licenses/LICENSE +0 -0
fabricatio/models/usages.py
CHANGED
@@ -1,13 +1,25 @@
|
|
1
1
|
"""This module contains classes that manage the usage of language models and tools in tasks."""
|
2
2
|
|
3
3
|
import traceback
|
4
|
+
from abc import ABC
|
4
5
|
from asyncio import gather
|
5
6
|
from typing import Callable, Dict, Iterable, List, Literal, Optional, Self, Sequence, Set, Union, Unpack, overload
|
6
7
|
|
7
8
|
import asyncstdlib
|
8
|
-
import
|
9
|
+
from fabricatio.decorators import logging_exec_time
|
10
|
+
from fabricatio.journal import logger
|
11
|
+
from fabricatio.models.generic import ScopedConfig, WithBriefing
|
12
|
+
from fabricatio.models.kwargs_types import ChooseKwargs, EmbeddingKwargs, GenerateKwargs, LLMKwargs, ValidateKwargs
|
13
|
+
from fabricatio.models.task import Task
|
14
|
+
from fabricatio.models.tool import Tool, ToolBox
|
9
15
|
from fabricatio.rust import CONFIG, TEMPLATE_MANAGER
|
10
|
-
from
|
16
|
+
from fabricatio.utils import first_available, ok
|
17
|
+
from litellm import ( # pyright: ignore [reportPrivateImportUsage]
|
18
|
+
RateLimitError,
|
19
|
+
Router,
|
20
|
+
aembedding,
|
21
|
+
stream_chunk_builder,
|
22
|
+
)
|
11
23
|
from litellm.types.router import Deployment, LiteLLM_Params, ModelInfo
|
12
24
|
from litellm.types.utils import (
|
13
25
|
Choices,
|
@@ -20,15 +32,6 @@ from litellm.utils import CustomStreamWrapper, token_counter # pyright: ignore
|
|
20
32
|
from more_itertools import duplicates_everseen
|
21
33
|
from pydantic import BaseModel, ConfigDict, Field, NonNegativeInt, PositiveInt
|
22
34
|
|
23
|
-
from fabricatio.decorators import logging_exec_time
|
24
|
-
from fabricatio.journal import logger
|
25
|
-
from fabricatio.models.generic import ScopedConfig, WithBriefing
|
26
|
-
from fabricatio.models.kwargs_types import ChooseKwargs, EmbeddingKwargs, GenerateKwargs, LLMKwargs, ValidateKwargs
|
27
|
-
from fabricatio.models.task import Task
|
28
|
-
from fabricatio.models.tool import Tool, ToolBox
|
29
|
-
from fabricatio.parser import GenericCapture, JsonCapture
|
30
|
-
from fabricatio.utils import ok
|
31
|
-
|
32
35
|
ROUTER = Router(
|
33
36
|
routing_strategy="usage-based-routing-v2",
|
34
37
|
default_max_parallel_requests=CONFIG.routing.max_parallel_requests,
|
@@ -38,7 +41,7 @@ ROUTER = Router(
|
|
38
41
|
)
|
39
42
|
|
40
43
|
|
41
|
-
class LLMUsage(ScopedConfig):
|
44
|
+
class LLMUsage(ScopedConfig, ABC):
|
42
45
|
"""Class that manages LLM (Large Language Model) usage parameters and methods.
|
43
46
|
|
44
47
|
This class provides methods to deploy LLMs, query them for responses, and handle various configurations
|
@@ -59,10 +62,10 @@ class LLMUsage(ScopedConfig):
|
|
59
62
|
|
60
63
|
# noinspection PyTypeChecker,PydanticTypeChecker,t
|
61
64
|
async def aquery(
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
65
|
+
self,
|
66
|
+
messages: List[Dict[str, str]],
|
67
|
+
n: PositiveInt | None = None,
|
68
|
+
**kwargs: Unpack[LLMKwargs],
|
66
69
|
) -> ModelResponse | CustomStreamWrapper:
|
67
70
|
"""Asynchronously queries the language model to generate a response based on the provided messages and parameters.
|
68
71
|
|
@@ -91,7 +94,7 @@ class LLMUsage(ScopedConfig):
|
|
91
94
|
api_base=ok(
|
92
95
|
self.llm_api_endpoint or CONFIG.llm.api_endpoint,
|
93
96
|
"llm api endpoint is not set at any place",
|
94
|
-
)
|
97
|
+
),
|
95
98
|
model=m_name,
|
96
99
|
tpm=self.llm_tpm or CONFIG.llm.tpm,
|
97
100
|
rpm=self.llm_rpm or CONFIG.llm.rpm,
|
@@ -109,27 +112,27 @@ class LLMUsage(ScopedConfig):
|
|
109
112
|
stop=kwargs.get("stop") or self.llm_stop_sign or CONFIG.llm.stop_sign,
|
110
113
|
top_p=kwargs.get("top_p") or self.llm_top_p or CONFIG.llm.top_p,
|
111
114
|
max_tokens=kwargs.get("max_tokens") or self.llm_max_tokens or CONFIG.llm.max_tokens,
|
112
|
-
stream=
|
115
|
+
stream=first_available(
|
116
|
+
(kwargs.get("stream"), self.llm_stream, CONFIG.llm.stream), "stream is not set at any place"
|
117
|
+
),
|
113
118
|
cache={
|
114
119
|
"no-cache": kwargs.get("no_cache"),
|
115
120
|
"no-store": kwargs.get("no_store"),
|
116
121
|
"cache-ttl": kwargs.get("cache_ttl"),
|
117
122
|
"s-maxage": kwargs.get("s_maxage"),
|
118
123
|
},
|
119
|
-
presence_penalty=kwargs.get("presence_penalty")
|
120
|
-
or self.llm_presence_penalty
|
121
|
-
or CONFIG.llm.presence_penalty,
|
124
|
+
presence_penalty=kwargs.get("presence_penalty") or self.llm_presence_penalty or CONFIG.llm.presence_penalty,
|
122
125
|
frequency_penalty=kwargs.get("frequency_penalty")
|
123
|
-
|
124
|
-
|
126
|
+
or self.llm_frequency_penalty
|
127
|
+
or CONFIG.llm.frequency_penalty,
|
125
128
|
)
|
126
129
|
|
127
130
|
async def ainvoke(
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
131
|
+
self,
|
132
|
+
question: str,
|
133
|
+
system_message: str = "",
|
134
|
+
n: PositiveInt | None = None,
|
135
|
+
**kwargs: Unpack[LLMKwargs],
|
133
136
|
) -> Sequence[TextChoices | Choices | StreamingChoices]:
|
134
137
|
"""Asynchronously invokes the language model with a question and optional system message.
|
135
138
|
|
@@ -149,54 +152,49 @@ class LLMUsage(ScopedConfig):
|
|
149
152
|
)
|
150
153
|
if isinstance(resp, ModelResponse):
|
151
154
|
return resp.choices
|
152
|
-
if isinstance(resp, CustomStreamWrapper):
|
153
|
-
|
154
|
-
return pack.choices
|
155
|
+
if isinstance(resp, CustomStreamWrapper) and (pack := stream_chunk_builder(await asyncstdlib.list(resp))):
|
156
|
+
return pack.choices
|
155
157
|
logger.critical(err := f"Unexpected response type: {type(resp)}")
|
156
158
|
raise ValueError(err)
|
157
159
|
|
158
160
|
@overload
|
159
161
|
async def aask(
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
) -> List[str]:
|
165
|
-
...
|
162
|
+
self,
|
163
|
+
question: List[str],
|
164
|
+
system_message: List[str],
|
165
|
+
**kwargs: Unpack[LLMKwargs],
|
166
|
+
) -> List[str]: ...
|
166
167
|
|
167
168
|
@overload
|
168
169
|
async def aask(
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
) -> List[str]:
|
174
|
-
...
|
170
|
+
self,
|
171
|
+
question: str,
|
172
|
+
system_message: List[str],
|
173
|
+
**kwargs: Unpack[LLMKwargs],
|
174
|
+
) -> List[str]: ...
|
175
175
|
|
176
176
|
@overload
|
177
177
|
async def aask(
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
) -> List[str]:
|
183
|
-
...
|
178
|
+
self,
|
179
|
+
question: List[str],
|
180
|
+
system_message: Optional[str] = None,
|
181
|
+
**kwargs: Unpack[LLMKwargs],
|
182
|
+
) -> List[str]: ...
|
184
183
|
|
185
184
|
@overload
|
186
185
|
async def aask(
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
) -> str:
|
192
|
-
...
|
186
|
+
self,
|
187
|
+
question: str,
|
188
|
+
system_message: Optional[str] = None,
|
189
|
+
**kwargs: Unpack[LLMKwargs],
|
190
|
+
) -> str: ...
|
193
191
|
|
194
192
|
@logging_exec_time
|
195
193
|
async def aask(
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
194
|
+
self,
|
195
|
+
question: str | List[str],
|
196
|
+
system_message: Optional[str | List[str]] = None,
|
197
|
+
**kwargs: Unpack[LLMKwargs],
|
200
198
|
) -> str | List[str]:
|
201
199
|
"""Asynchronously asks the language model a question and returns the response content.
|
202
200
|
|
@@ -224,8 +222,7 @@ class LLMUsage(ScopedConfig):
|
|
224
222
|
res = await gather(*[self.ainvoke(n=1, question=q, system_message=sm, **kwargs) for sm in sm_seq])
|
225
223
|
out = [r[0].message.content for r in res] # pyright: ignore [reportAttributeAccessIssue]
|
226
224
|
case (str(q), str(sm)):
|
227
|
-
out = ((await self.ainvoke(n=1, question=q, system_message=sm, **kwargs))[
|
228
|
-
0]).message.content # pyright: ignore [reportAttributeAccessIssue]
|
225
|
+
out = ((await self.ainvoke(n=1, question=q, system_message=sm, **kwargs))[0]).message.content # pyright: ignore [reportAttributeAccessIssue]
|
229
226
|
case _:
|
230
227
|
raise RuntimeError("Should not reach here.")
|
231
228
|
|
@@ -237,55 +234,51 @@ class LLMUsage(ScopedConfig):
|
|
237
234
|
|
238
235
|
@overload
|
239
236
|
async def aask_validate[T](
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
) -> T:
|
247
|
-
...
|
237
|
+
self,
|
238
|
+
question: str,
|
239
|
+
validator: Callable[[str], T | None],
|
240
|
+
default: T = ...,
|
241
|
+
max_validations: PositiveInt = 2,
|
242
|
+
**kwargs: Unpack[GenerateKwargs],
|
243
|
+
) -> T: ...
|
248
244
|
|
249
245
|
@overload
|
250
246
|
async def aask_validate[T](
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
) -> List[T]:
|
258
|
-
...
|
247
|
+
self,
|
248
|
+
question: List[str],
|
249
|
+
validator: Callable[[str], T | None],
|
250
|
+
default: T = ...,
|
251
|
+
max_validations: PositiveInt = 2,
|
252
|
+
**kwargs: Unpack[GenerateKwargs],
|
253
|
+
) -> List[T]: ...
|
259
254
|
|
260
255
|
@overload
|
261
256
|
async def aask_validate[T](
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
) -> Optional[T]:
|
269
|
-
...
|
257
|
+
self,
|
258
|
+
question: str,
|
259
|
+
validator: Callable[[str], T | None],
|
260
|
+
default: None = None,
|
261
|
+
max_validations: PositiveInt = 2,
|
262
|
+
**kwargs: Unpack[GenerateKwargs],
|
263
|
+
) -> Optional[T]: ...
|
270
264
|
|
271
265
|
@overload
|
272
266
|
async def aask_validate[T](
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
) -> List[Optional[T]]:
|
280
|
-
...
|
267
|
+
self,
|
268
|
+
question: List[str],
|
269
|
+
validator: Callable[[str], T | None],
|
270
|
+
default: None = None,
|
271
|
+
max_validations: PositiveInt = 2,
|
272
|
+
**kwargs: Unpack[GenerateKwargs],
|
273
|
+
) -> List[Optional[T]]: ...
|
281
274
|
|
282
275
|
async def aask_validate[T](
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
276
|
+
self,
|
277
|
+
question: str | List[str],
|
278
|
+
validator: Callable[[str], T | None],
|
279
|
+
default: Optional[T] = None,
|
280
|
+
max_validations: PositiveInt = 3,
|
281
|
+
**kwargs: Unpack[GenerateKwargs],
|
289
282
|
) -> Optional[T] | List[Optional[T]] | List[T] | T:
|
290
283
|
"""Asynchronously asks a question and validates the response using a given validator.
|
291
284
|
|
@@ -325,7 +318,7 @@ class LLMUsage(ScopedConfig):
|
|
325
318
|
return await (gather(*[_inner(q) for q in question]) if isinstance(question, list) else _inner(question))
|
326
319
|
|
327
320
|
async def alist_str(
|
328
|
-
|
321
|
+
self, requirement: str, k: NonNegativeInt = 0, **kwargs: Unpack[ValidateKwargs[List[str]]]
|
329
322
|
) -> Optional[List[str]]:
|
330
323
|
"""Asynchronously generates a list of strings based on a given requirement.
|
331
324
|
|
@@ -337,6 +330,8 @@ class LLMUsage(ScopedConfig):
|
|
337
330
|
Returns:
|
338
331
|
Optional[List[str]]: The validated response as a list of strings.
|
339
332
|
"""
|
333
|
+
from fabricatio.parser import JsonCapture
|
334
|
+
|
340
335
|
return await self.aask_validate(
|
341
336
|
TEMPLATE_MANAGER.render_template(
|
342
337
|
CONFIG.templates.liststr_template,
|
@@ -375,9 +370,9 @@ class LLMUsage(ScopedConfig):
|
|
375
370
|
Optional[str]: The validated response as a single string.
|
376
371
|
"""
|
377
372
|
if paths := await self.apathstr(
|
378
|
-
|
379
|
-
|
380
|
-
|
373
|
+
requirement,
|
374
|
+
k=1,
|
375
|
+
**kwargs,
|
381
376
|
):
|
382
377
|
return paths.pop()
|
383
378
|
|
@@ -393,6 +388,8 @@ class LLMUsage(ScopedConfig):
|
|
393
388
|
Returns:
|
394
389
|
Optional[str]: The generated string.
|
395
390
|
"""
|
391
|
+
from fabricatio.parser import GenericCapture
|
392
|
+
|
396
393
|
return await self.aask_validate( # pyright: ignore [reportReturnType]
|
397
394
|
TEMPLATE_MANAGER.render_template(
|
398
395
|
CONFIG.templates.generic_string_template,
|
@@ -403,11 +400,11 @@ class LLMUsage(ScopedConfig):
|
|
403
400
|
)
|
404
401
|
|
405
402
|
async def achoose[T: WithBriefing](
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
403
|
+
self,
|
404
|
+
instruction: str,
|
405
|
+
choices: List[T],
|
406
|
+
k: NonNegativeInt = 0,
|
407
|
+
**kwargs: Unpack[ValidateKwargs[List[T]]],
|
411
408
|
) -> Optional[List[T]]:
|
412
409
|
"""Asynchronously executes a multi-choice decision-making process, generating a prompt based on the instruction and options, and validates the returned selection results.
|
413
410
|
|
@@ -420,6 +417,8 @@ class LLMUsage(ScopedConfig):
|
|
420
417
|
Returns:
|
421
418
|
Optional[List[T]]: The final validated selection result list, with element types matching the input `choices`.
|
422
419
|
"""
|
420
|
+
from fabricatio.parser import JsonCapture
|
421
|
+
|
423
422
|
if dup := duplicates_everseen(choices, key=lambda x: x.name):
|
424
423
|
logger.error(err := f"Redundant choices: {dup}")
|
425
424
|
raise ValueError(err)
|
@@ -450,10 +449,10 @@ class LLMUsage(ScopedConfig):
|
|
450
449
|
)
|
451
450
|
|
452
451
|
async def apick[T: WithBriefing](
|
453
|
-
|
454
|
-
|
455
|
-
|
456
|
-
|
452
|
+
self,
|
453
|
+
instruction: str,
|
454
|
+
choices: List[T],
|
455
|
+
**kwargs: Unpack[ValidateKwargs[List[T]]],
|
457
456
|
) -> T:
|
458
457
|
"""Asynchronously picks a single choice from a list of options using AI validation.
|
459
458
|
|
@@ -478,11 +477,11 @@ class LLMUsage(ScopedConfig):
|
|
478
477
|
)[0]
|
479
478
|
|
480
479
|
async def ajudge(
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
480
|
+
self,
|
481
|
+
prompt: str,
|
482
|
+
affirm_case: str = "",
|
483
|
+
deny_case: str = "",
|
484
|
+
**kwargs: Unpack[ValidateKwargs[bool]],
|
486
485
|
) -> Optional[bool]:
|
487
486
|
"""Asynchronously judges a prompt using AI validation.
|
488
487
|
|
@@ -495,6 +494,8 @@ class LLMUsage(ScopedConfig):
|
|
495
494
|
Returns:
|
496
495
|
bool: The judgment result (True or False) based on the AI's response.
|
497
496
|
"""
|
497
|
+
from fabricatio.parser import JsonCapture
|
498
|
+
|
498
499
|
return await self.aask_validate(
|
499
500
|
question=TEMPLATE_MANAGER.render_template(
|
500
501
|
CONFIG.templates.make_judgment_template,
|
@@ -505,19 +506,19 @@ class LLMUsage(ScopedConfig):
|
|
505
506
|
)
|
506
507
|
|
507
508
|
|
508
|
-
class EmbeddingUsage(LLMUsage):
|
509
|
+
class EmbeddingUsage(LLMUsage, ABC):
|
509
510
|
"""A class representing the embedding model.
|
510
511
|
|
511
512
|
This class extends LLMUsage and provides methods to generate embeddings for input text using various models.
|
512
513
|
"""
|
513
514
|
|
514
515
|
async def aembedding(
|
515
|
-
|
516
|
-
|
517
|
-
|
518
|
-
|
519
|
-
|
520
|
-
|
516
|
+
self,
|
517
|
+
input_text: List[str],
|
518
|
+
model: Optional[str] = None,
|
519
|
+
dimensions: Optional[int] = None,
|
520
|
+
timeout: Optional[PositiveInt] = None,
|
521
|
+
caching: Optional[bool] = False,
|
521
522
|
) -> EmbeddingResponse:
|
522
523
|
"""Asynchronously generates embeddings for the given input text.
|
523
524
|
|
@@ -537,16 +538,16 @@ class EmbeddingUsage(LLMUsage):
|
|
537
538
|
logger.error(err := f"Input text exceeds maximum sequence length {max_len}, got {length}.")
|
538
539
|
raise ValueError(err)
|
539
540
|
|
540
|
-
return await
|
541
|
+
return await aembedding(
|
541
542
|
input=input_text,
|
542
543
|
caching=caching or self.embedding_caching or CONFIG.embedding.caching,
|
543
544
|
dimensions=dimensions or self.embedding_dimensions or CONFIG.embedding.dimensions,
|
544
545
|
model=model or self.embedding_model or CONFIG.embedding.model or self.llm_model or CONFIG.llm.model,
|
545
546
|
timeout=timeout
|
546
|
-
|
547
|
-
|
548
|
-
|
549
|
-
|
547
|
+
or self.embedding_timeout
|
548
|
+
or CONFIG.embedding.timeout
|
549
|
+
or self.llm_timeout
|
550
|
+
or CONFIG.llm.timeout,
|
550
551
|
api_key=ok(
|
551
552
|
self.embedding_api_key or CONFIG.embedding.api_key or self.llm_api_key or CONFIG.llm.api_key
|
552
553
|
).get_secret_value(),
|
@@ -555,22 +556,18 @@ class EmbeddingUsage(LLMUsage):
|
|
555
556
|
or CONFIG.embedding.api_endpoint
|
556
557
|
or self.llm_api_endpoint
|
557
558
|
or CONFIG.llm.api_endpoint
|
558
|
-
)
|
559
|
-
.unicode_string()
|
560
|
-
.rstrip("/"),
|
559
|
+
).rstrip("/"),
|
561
560
|
# seems embedding function takes no base_url end with a slash
|
562
561
|
)
|
563
562
|
|
564
563
|
@overload
|
565
|
-
async def vectorize(self, input_text: List[str], **kwargs: Unpack[EmbeddingKwargs]) -> List[List[float]]:
|
566
|
-
...
|
564
|
+
async def vectorize(self, input_text: List[str], **kwargs: Unpack[EmbeddingKwargs]) -> List[List[float]]: ...
|
567
565
|
|
568
566
|
@overload
|
569
|
-
async def vectorize(self, input_text: str, **kwargs: Unpack[EmbeddingKwargs]) -> List[float]:
|
570
|
-
...
|
567
|
+
async def vectorize(self, input_text: str, **kwargs: Unpack[EmbeddingKwargs]) -> List[float]: ...
|
571
568
|
|
572
569
|
async def vectorize(
|
573
|
-
|
570
|
+
self, input_text: List[str] | str, **kwargs: Unpack[EmbeddingKwargs]
|
574
571
|
) -> List[List[float]] | List[float]:
|
575
572
|
"""Asynchronously generates vector embeddings for the given input text.
|
576
573
|
|
@@ -587,7 +584,7 @@ class EmbeddingUsage(LLMUsage):
|
|
587
584
|
return [o.get("embedding") for o in (await self.aembedding(input_text, **kwargs)).data]
|
588
585
|
|
589
586
|
|
590
|
-
class ToolBoxUsage(LLMUsage):
|
587
|
+
class ToolBoxUsage(LLMUsage, ABC):
|
591
588
|
"""A class representing the usage of tools in a task.
|
592
589
|
|
593
590
|
This class extends LLMUsage and provides methods to manage and use toolboxes and tools within tasks.
|
@@ -606,9 +603,9 @@ class ToolBoxUsage(LLMUsage):
|
|
606
603
|
return [toolbox.name for toolbox in self.toolboxes]
|
607
604
|
|
608
605
|
async def choose_toolboxes(
|
609
|
-
|
610
|
-
|
611
|
-
|
606
|
+
self,
|
607
|
+
task: Task,
|
608
|
+
**kwargs: Unpack[ChooseKwargs[List[ToolBox]]],
|
612
609
|
) -> Optional[List[ToolBox]]:
|
613
610
|
"""Asynchronously executes a multi-choice decision-making process to choose toolboxes.
|
614
611
|
|
@@ -629,10 +626,10 @@ class ToolBoxUsage(LLMUsage):
|
|
629
626
|
)
|
630
627
|
|
631
628
|
async def choose_tools(
|
632
|
-
|
633
|
-
|
634
|
-
|
635
|
-
|
629
|
+
self,
|
630
|
+
task: Task,
|
631
|
+
toolbox: ToolBox,
|
632
|
+
**kwargs: Unpack[ChooseKwargs[List[Tool]]],
|
636
633
|
) -> Optional[List[Tool]]:
|
637
634
|
"""Asynchronously executes a multi-choice decision-making process to choose tools.
|
638
635
|
|
@@ -654,10 +651,10 @@ class ToolBoxUsage(LLMUsage):
|
|
654
651
|
)
|
655
652
|
|
656
653
|
async def gather_tools_fine_grind(
|
657
|
-
|
658
|
-
|
659
|
-
|
660
|
-
|
654
|
+
self,
|
655
|
+
task: Task,
|
656
|
+
box_choose_kwargs: Optional[ChooseKwargs] = None,
|
657
|
+
tool_choose_kwargs: Optional[ChooseKwargs] = None,
|
661
658
|
) -> List[Tool]:
|
662
659
|
"""Asynchronously gathers tools based on the provided task and toolbox and tool selection criteria.
|
663
660
|
|