unique_toolkit 0.7.19__py3-none-any.whl → 0.7.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unique_toolkit/chat/functions.py +4 -3
- unique_toolkit/chat/service.py +5 -4
- unique_toolkit/language_model/__init__.py +3 -0
- unique_toolkit/language_model/functions.py +8 -6
- unique_toolkit/language_model/schemas.py +39 -1
- unique_toolkit/language_model/service.py +4 -3
- unique_toolkit/protocols/support.py +2 -1
- {unique_toolkit-0.7.19.dist-info → unique_toolkit-0.7.20.dist-info}/METADATA +6 -2
- {unique_toolkit-0.7.19.dist-info → unique_toolkit-0.7.20.dist-info}/RECORD +11 -11
- {unique_toolkit-0.7.19.dist-info → unique_toolkit-0.7.20.dist-info}/LICENSE +0 -0
- {unique_toolkit-0.7.19.dist-info → unique_toolkit-0.7.20.dist-info}/WHEEL +0 -0
unique_toolkit/chat/functions.py
CHANGED
@@ -27,6 +27,7 @@ from unique_toolkit.language_model.schemas import (
|
|
27
27
|
LanguageModelMessages,
|
28
28
|
LanguageModelStreamResponse,
|
29
29
|
LanguageModelTool,
|
30
|
+
LanguageModelToolDescription,
|
30
31
|
)
|
31
32
|
|
32
33
|
logger = logging.getLogger(__name__)
|
@@ -685,7 +686,7 @@ def stream_complete_to_chat(
|
|
685
686
|
debug_info: dict = {},
|
686
687
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
687
688
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
688
|
-
tools: list[LanguageModelTool] | None = None,
|
689
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
689
690
|
start_text: str | None = None,
|
690
691
|
other_options: dict | None = None,
|
691
692
|
) -> LanguageModelStreamResponse:
|
@@ -705,7 +706,7 @@ def stream_complete_to_chat(
|
|
705
706
|
debug_info (dict): Debug information.
|
706
707
|
temperature (float): Temperature setting.
|
707
708
|
timeout (int): Timeout in milliseconds.
|
708
|
-
tools (Optional[list[LanguageModelTool]]): Optional tools.
|
709
|
+
tools (Optional[list[LanguageModelTool | LanguageModelToolDescription ]]): Optional tools.
|
709
710
|
start_text (Optional[str]): Starting text.
|
710
711
|
other_options (Optional[dict]): Additional options.
|
711
712
|
|
@@ -759,7 +760,7 @@ async def stream_complete_to_chat_async(
|
|
759
760
|
debug_info: dict = {},
|
760
761
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
761
762
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
762
|
-
tools: list[LanguageModelTool] | None = None,
|
763
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
763
764
|
start_text: str | None = None,
|
764
765
|
other_options: dict | None = None,
|
765
766
|
) -> LanguageModelStreamResponse:
|
unique_toolkit/chat/service.py
CHANGED
@@ -44,6 +44,7 @@ from unique_toolkit.language_model.schemas import (
|
|
44
44
|
LanguageModelResponse,
|
45
45
|
LanguageModelStreamResponse,
|
46
46
|
LanguageModelTool,
|
47
|
+
LanguageModelToolDescription,
|
47
48
|
)
|
48
49
|
|
49
50
|
from .functions import (
|
@@ -1091,7 +1092,7 @@ class ChatService:
|
|
1091
1092
|
debug_info: dict = {},
|
1092
1093
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
1093
1094
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
1094
|
-
tools: Optional[list[LanguageModelTool]] = None,
|
1095
|
+
tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
|
1095
1096
|
start_text: Optional[str] = None,
|
1096
1097
|
other_options: Optional[dict] = None,
|
1097
1098
|
) -> LanguageModelStreamResponse:
|
@@ -1142,7 +1143,7 @@ class ChatService:
|
|
1142
1143
|
debug_info: dict = {},
|
1143
1144
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
1144
1145
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
1145
|
-
tools: Optional[list[LanguageModelTool]] = None,
|
1146
|
+
tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
|
1146
1147
|
start_text: Optional[str] = None,
|
1147
1148
|
other_options: Optional[dict] = None,
|
1148
1149
|
) -> LanguageModelResponse:
|
@@ -1168,7 +1169,7 @@ class ChatService:
|
|
1168
1169
|
debug_info: dict = {},
|
1169
1170
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
1170
1171
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
1171
|
-
tools: Optional[list[LanguageModelTool]] = None,
|
1172
|
+
tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
|
1172
1173
|
start_text: Optional[str] = None,
|
1173
1174
|
other_options: Optional[dict] = None,
|
1174
1175
|
) -> LanguageModelStreamResponse:
|
@@ -1220,7 +1221,7 @@ class ChatService:
|
|
1220
1221
|
debug_info: dict = {},
|
1221
1222
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
1222
1223
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
1223
|
-
tools: Optional[list[LanguageModelTool]] = None,
|
1224
|
+
tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
|
1224
1225
|
start_text: Optional[str] = None,
|
1225
1226
|
other_options: Optional[dict] = None,
|
1226
1227
|
) -> LanguageModelResponse:
|
@@ -43,6 +43,9 @@ from .schemas import (
|
|
43
43
|
from .schemas import (
|
44
44
|
LanguageModelTool as LanguageModelTool,
|
45
45
|
)
|
46
|
+
from .schemas import (
|
47
|
+
LanguageModelToolDescription as LanguageModelToolDescription,
|
48
|
+
)
|
46
49
|
from .schemas import (
|
47
50
|
LanguageModelToolMessage as LanguageModelToolMessage,
|
48
51
|
)
|
@@ -16,6 +16,7 @@ from .schemas import (
|
|
16
16
|
LanguageModelMessages,
|
17
17
|
LanguageModelResponse,
|
18
18
|
LanguageModelTool,
|
19
|
+
LanguageModelToolDescription,
|
19
20
|
)
|
20
21
|
|
21
22
|
logger = logging.getLogger(f"toolkit.{DOMAIN_NAME}.{__name__}")
|
@@ -27,7 +28,7 @@ def complete(
|
|
27
28
|
model_name: LanguageModelName | str,
|
28
29
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
29
30
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
30
|
-
tools: list[LanguageModelTool] | None = None,
|
31
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
31
32
|
other_options: dict | None = None,
|
32
33
|
structured_output_model: type[BaseModel] | None = None,
|
33
34
|
structured_output_enforce_schema: bool = False,
|
@@ -40,7 +41,7 @@ def complete(
|
|
40
41
|
model_name (LanguageModelName | str): The model name to use for the completion.
|
41
42
|
temperature (float): The temperature setting for the completion. Defaults to 0.
|
42
43
|
timeout (int): The timeout value in milliseconds. Defaults to 240_000.
|
43
|
-
tools (Optional[list[LanguageModelTool]]): Optional list of tools to include.
|
44
|
+
tools (Optional[list[LanguageModelTool | LanguageModelToolDescription ]]): Optional list of tools to include.
|
44
45
|
other_options (Optional[dict]): Additional options to use. Defaults to None.
|
45
46
|
|
46
47
|
Returns:
|
@@ -80,7 +81,7 @@ async def complete_async(
|
|
80
81
|
model_name: LanguageModelName | str,
|
81
82
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
82
83
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
83
|
-
tools: list[LanguageModelTool] | None = None,
|
84
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
84
85
|
other_options: dict | None = None,
|
85
86
|
structured_output_model: type[BaseModel] | None = None,
|
86
87
|
structured_output_enforce_schema: bool = False,
|
@@ -97,7 +98,7 @@ async def complete_async(
|
|
97
98
|
model_name (LanguageModelName | str): The model name to use for the completion.
|
98
99
|
temperature (float): The temperature setting for the completion. Defaults to 0.
|
99
100
|
timeout (int): The timeout value in milliseconds for the request. Defaults to 240_000.
|
100
|
-
tools (Optional[list[LanguageModelTool]]): Optional list of tools to include in the request.
|
101
|
+
tools (Optional[list[LanguageModelTool | LanguageModelToolDescription ]]): Optional list of tools to include in the request.
|
101
102
|
other_options (Optional[dict]): The other options to use. Defaults to None.
|
102
103
|
|
103
104
|
Returns:
|
@@ -137,7 +138,7 @@ async def complete_async(
|
|
137
138
|
|
138
139
|
def _add_tools_to_options(
|
139
140
|
options: dict,
|
140
|
-
tools: list[LanguageModelTool] | None,
|
141
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None,
|
141
142
|
) -> dict:
|
142
143
|
if tools:
|
143
144
|
options["tools"] = [
|
@@ -189,7 +190,7 @@ def _prepare_completion_params_util(
|
|
189
190
|
messages: LanguageModelMessages,
|
190
191
|
model_name: LanguageModelName | str,
|
191
192
|
temperature: float,
|
192
|
-
tools: list[LanguageModelTool] | None = None,
|
193
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
193
194
|
other_options: dict | None = None,
|
194
195
|
content_chunks: list[ContentChunk] | None = None,
|
195
196
|
structured_output_model: type[BaseModel] | None = None,
|
@@ -206,6 +207,7 @@ def _prepare_completion_params_util(
|
|
206
207
|
|
207
208
|
"""
|
208
209
|
options = _add_tools_to_options({}, tools)
|
210
|
+
|
209
211
|
if structured_output_model:
|
210
212
|
options = _add_response_format_to_options(
|
211
213
|
options,
|
@@ -11,6 +11,7 @@ from pydantic import (
|
|
11
11
|
Field,
|
12
12
|
PrivateAttr,
|
13
13
|
RootModel,
|
14
|
+
field_serializer,
|
14
15
|
field_validator,
|
15
16
|
model_serializer,
|
16
17
|
model_validator,
|
@@ -344,6 +345,11 @@ class LanguageModelTokenLimits(BaseModel):
|
|
344
345
|
|
345
346
|
|
346
347
|
# This is more restrictive than what openai allows
|
348
|
+
|
349
|
+
|
350
|
+
@deprecated(
|
351
|
+
"Deprecated as `LanguageModelTool` is deprecated in favor of `LanguageModelToolDescription`"
|
352
|
+
)
|
347
353
|
class LanguageModelToolParameterProperty(BaseModel):
|
348
354
|
type: str
|
349
355
|
description: str
|
@@ -353,6 +359,9 @@ class LanguageModelToolParameterProperty(BaseModel):
|
|
353
359
|
|
354
360
|
# Looks most like
|
355
361
|
# from openai.types.shared.function_parameters import FunctionParameters
|
362
|
+
@deprecated(
|
363
|
+
"Deprecated as `LanguageModelTool` is deprecated in favor of `LanguageModelToolDescription`"
|
364
|
+
)
|
356
365
|
class LanguageModelToolParameters(BaseModel):
|
357
366
|
type: str = "object"
|
358
367
|
properties: dict[str, LanguageModelToolParameterProperty]
|
@@ -362,6 +371,9 @@ class LanguageModelToolParameters(BaseModel):
|
|
362
371
|
# Looks most like
|
363
372
|
# from openai.types.shared_params.function_definition import FunctionDefinition
|
364
373
|
# but returns parameter is not known
|
374
|
+
@deprecated(
|
375
|
+
"Deprecated as `LanguageModelTool` use `LanguageModelToolDescription` instead"
|
376
|
+
)
|
365
377
|
class LanguageModelTool(BaseModel):
|
366
378
|
name: str = Field(
|
367
379
|
...,
|
@@ -370,8 +382,34 @@ class LanguageModelTool(BaseModel):
|
|
370
382
|
)
|
371
383
|
description: str
|
372
384
|
parameters: (
|
373
|
-
LanguageModelToolParameters | dict
|
385
|
+
LanguageModelToolParameters | dict[str, Any]
|
374
386
|
) # dict represents json schema dumped from pydantic
|
375
387
|
returns: LanguageModelToolParameterProperty | LanguageModelToolParameters | None = (
|
376
388
|
None
|
377
389
|
)
|
390
|
+
|
391
|
+
|
392
|
+
class LanguageModelToolDescription(BaseModel):
|
393
|
+
name: str = Field(
|
394
|
+
...,
|
395
|
+
pattern=r"^[a-zA-Z1-9_-]+$",
|
396
|
+
description="Name must adhere to the pattern ^[a-zA-Z1-9_-]+$",
|
397
|
+
)
|
398
|
+
description: str = Field(
|
399
|
+
...,
|
400
|
+
description="Description of what the tool is doing the tool",
|
401
|
+
)
|
402
|
+
parameters: type[BaseModel] = Field(
|
403
|
+
...,
|
404
|
+
description="Pydantic model for the tool parameters",
|
405
|
+
)
|
406
|
+
|
407
|
+
# TODO: This should be default `True` but if this is the case the parameter_model needs to include additional properties
|
408
|
+
strict: bool = Field(
|
409
|
+
default=False,
|
410
|
+
description="Setting strict to true will ensure function calls reliably adhere to the function schema, instead of being best effort. If set to True the `parameter_model` set `model_config = {'extra':'forbid'}` must be set for on all BaseModels.",
|
411
|
+
)
|
412
|
+
|
413
|
+
@field_serializer("parameters")
|
414
|
+
def serialize_parameters(self, parameters: type[BaseModel]):
|
415
|
+
return parameters.model_json_schema()
|
@@ -20,6 +20,7 @@ from unique_toolkit.language_model.schemas import (
|
|
20
20
|
LanguageModelMessages,
|
21
21
|
LanguageModelResponse,
|
22
22
|
LanguageModelTool,
|
23
|
+
LanguageModelToolDescription,
|
23
24
|
)
|
24
25
|
|
25
26
|
logger = logging.getLogger(f"toolkit.{DOMAIN_NAME}.{__name__}")
|
@@ -180,7 +181,7 @@ class LanguageModelService:
|
|
180
181
|
model_name: LanguageModelName | str,
|
181
182
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
182
183
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
183
|
-
tools: Optional[list[LanguageModelTool]] = None,
|
184
|
+
tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
|
184
185
|
structured_output_model: Optional[Type[BaseModel]] = None,
|
185
186
|
structured_output_enforce_schema: bool = False,
|
186
187
|
other_options: Optional[dict] = None,
|
@@ -208,7 +209,7 @@ class LanguageModelService:
|
|
208
209
|
model_name: LanguageModelName | str,
|
209
210
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
210
211
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
211
|
-
tools: Optional[list[LanguageModelTool]] = None,
|
212
|
+
tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
|
212
213
|
structured_output_model: Optional[Type[BaseModel]] = None,
|
213
214
|
structured_output_enforce_schema: bool = False,
|
214
215
|
other_options: Optional[dict] = None,
|
@@ -239,7 +240,7 @@ class LanguageModelService:
|
|
239
240
|
model_name: LanguageModelName | str,
|
240
241
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
241
242
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
242
|
-
tools: Optional[list[LanguageModelTool]] = None,
|
243
|
+
tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
|
243
244
|
structured_output_model: Optional[Type[BaseModel]] = None,
|
244
245
|
structured_output_enforce_schema: bool = False,
|
245
246
|
other_options: Optional[dict] = None,
|
@@ -5,6 +5,7 @@ from unique_toolkit.language_model import (
|
|
5
5
|
LanguageModelName,
|
6
6
|
LanguageModelResponse,
|
7
7
|
LanguageModelTool,
|
8
|
+
LanguageModelToolDescription,
|
8
9
|
)
|
9
10
|
from unique_toolkit.language_model.constants import (
|
10
11
|
DEFAULT_COMPLETE_TEMPERATURE,
|
@@ -23,6 +24,6 @@ class SupportsComplete(Protocol):
|
|
23
24
|
model_name: LanguageModelName | str,
|
24
25
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
25
26
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
26
|
-
tools: list[LanguageModelTool] | None = None,
|
27
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
27
28
|
**kwargs,
|
28
29
|
) -> LanguageModelResponse: ...
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: unique_toolkit
|
3
|
-
Version: 0.7.
|
3
|
+
Version: 0.7.20
|
4
4
|
Summary:
|
5
5
|
License: Proprietary
|
6
6
|
Author: Martin Fadler
|
@@ -17,7 +17,7 @@ Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
|
|
17
17
|
Requires-Dist: regex (>=2024.5.15,<2025.0.0)
|
18
18
|
Requires-Dist: tiktoken (>=0.7.0,<0.8.0)
|
19
19
|
Requires-Dist: typing-extensions (>=4.9.0,<5.0.0)
|
20
|
-
Requires-Dist: unique-sdk (>=0.9.
|
20
|
+
Requires-Dist: unique-sdk (>=0.9.31,<0.10.0)
|
21
21
|
Description-Content-Type: text/markdown
|
22
22
|
|
23
23
|
# Unique Toolkit
|
@@ -111,6 +111,10 @@ All notable changes to this project will be documented in this file.
|
|
111
111
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
112
112
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
113
113
|
|
114
|
+
|
115
|
+
## [0.7.20] - 2025-05-21
|
116
|
+
- Deprecate `LanguageModelTool` and associated models in favor of `LanguageModelToolDescription`
|
117
|
+
|
114
118
|
## [0.7.19] - 2025-05-20
|
115
119
|
- Extend the `MessageBuilder` to allow for appending any `LanguageModelMessage`
|
116
120
|
|
@@ -13,9 +13,9 @@ unique_toolkit/app/schemas.py,sha256=fNPRQPrpJjYrtkkXPR7sNFjP0AYPZtKe3H1YZkXd2QQ
|
|
13
13
|
unique_toolkit/app/verification.py,sha256=GxFFwcJMy25fCA_Xe89wKW7bgqOu8PAs5y8QpHF0GSc,3861
|
14
14
|
unique_toolkit/chat/__init__.py,sha256=LRs2G-JTVuci4lbtHTkVUiNcZcSR6uqqfnAyo7af6nY,619
|
15
15
|
unique_toolkit/chat/constants.py,sha256=05kq6zjqUVB2d6_P7s-90nbljpB3ryxwCI-CAz0r2O4,83
|
16
|
-
unique_toolkit/chat/functions.py,sha256=
|
16
|
+
unique_toolkit/chat/functions.py,sha256=TP55fSVXWTO3OoGUuYBuK9cBHUw96wlQGbVfhhMalCI,27332
|
17
17
|
unique_toolkit/chat/schemas.py,sha256=MNcGAXjK1K8zOODeMFz3FHVQL5sIBQXRwkr_2hFkG8k,2672
|
18
|
-
unique_toolkit/chat/service.py,sha256=
|
18
|
+
unique_toolkit/chat/service.py,sha256=K7XtB3IdKznNu1r4dy2dXiwZYQg_vKgRUD52RoKewQU,40799
|
19
19
|
unique_toolkit/chat/state.py,sha256=Cjgwv_2vhDFbV69xxsn7SefhaoIAEqLx3ferdVFCnOg,1445
|
20
20
|
unique_toolkit/chat/utils.py,sha256=ihm-wQykBWhB4liR3LnwPVPt_qGW6ETq21Mw4HY0THE,854
|
21
21
|
unique_toolkit/content/__init__.py,sha256=EdJg_A_7loEtCQf4cah3QARQreJx6pdz89Rm96YbMVg,940
|
@@ -44,22 +44,22 @@ unique_toolkit/evaluators/hallucination/service.py,sha256=k8qro5Lw4Ak58m4HYp3G4H
|
|
44
44
|
unique_toolkit/evaluators/hallucination/utils.py,sha256=gO2AOzDQwVTev2_5vDKgJ9A6A9e0himJyAta_wglVG8,8326
|
45
45
|
unique_toolkit/evaluators/output_parser.py,sha256=eI72qkzK1dZyUvnfP2SOAQCGBj_-PwX5wy_aLPMsJMY,883
|
46
46
|
unique_toolkit/evaluators/schemas.py,sha256=Jaue6Uhx75X1CyHKWj8sT3RE1JZXTqoLtfLt2xQNCX8,2507
|
47
|
-
unique_toolkit/language_model/__init__.py,sha256=
|
47
|
+
unique_toolkit/language_model/__init__.py,sha256=lRQyLlbwHbNFf4-0foBU13UGb09lwEeodbVsfsSgaCk,1971
|
48
48
|
unique_toolkit/language_model/builder.py,sha256=69WCcmkm2rMP2-YEH_EjHiEp6OzwjwCs8VbhjVJaCe0,3168
|
49
49
|
unique_toolkit/language_model/constants.py,sha256=B-topqW0r83dkC_25DeQfnPk3n53qzIHUCBS7YJ0-1U,119
|
50
|
-
unique_toolkit/language_model/functions.py,sha256=
|
50
|
+
unique_toolkit/language_model/functions.py,sha256=KbCClXmRnrs-Ug8Wi9ehCLTqT2aIr3PQFc1rBuV3Om0,8172
|
51
51
|
unique_toolkit/language_model/infos.py,sha256=qPf4Xlanet8jf0apZ6-qxS_6zmDd6p9D40it2TqmF3w,25910
|
52
52
|
unique_toolkit/language_model/prompt.py,sha256=JSawaLjQg3VR-E2fK8engFyJnNdk21zaO8pPIodzN4Q,3991
|
53
|
-
unique_toolkit/language_model/schemas.py,sha256=
|
54
|
-
unique_toolkit/language_model/service.py,sha256=
|
53
|
+
unique_toolkit/language_model/schemas.py,sha256=DJD2aoMfs2Irnc4rzOrVuV4Fbt84LQAiDGG5rse1dgk,12770
|
54
|
+
unique_toolkit/language_model/service.py,sha256=9LS3ouRNtzqZaKrMFagLZS9gBvNC5e46Ut86YWHBBHY,8470
|
55
55
|
unique_toolkit/language_model/utils.py,sha256=bPQ4l6_YO71w-zaIPanUUmtbXC1_hCvLK0tAFc3VCRc,1902
|
56
|
-
unique_toolkit/protocols/support.py,sha256=
|
56
|
+
unique_toolkit/protocols/support.py,sha256=SD17M8jgjtzCh0bgDXgKrX96n6DizF1PT2SZIhyt4n8,888
|
57
57
|
unique_toolkit/short_term_memory/__init__.py,sha256=2mI3AUrffgH7Yt-xS57EGqnHf7jnn6xquoKEhJqk3Wg,185
|
58
58
|
unique_toolkit/short_term_memory/constants.py,sha256=698CL6-wjup2MvU19RxSmQk3gX7aqW_OOpZB7sbz_Xg,34
|
59
59
|
unique_toolkit/short_term_memory/functions.py,sha256=3WiK-xatY5nh4Dr5zlDUye1k3E6kr41RiscwtTplw5k,4484
|
60
60
|
unique_toolkit/short_term_memory/schemas.py,sha256=OhfcXyF6ACdwIXW45sKzjtZX_gkcJs8FEZXcgQTNenw,1406
|
61
61
|
unique_toolkit/short_term_memory/service.py,sha256=vEKFxP1SScPrFniso492fVthWR1sosdFibhiNF3zRvI,8081
|
62
|
-
unique_toolkit-0.7.
|
63
|
-
unique_toolkit-0.7.
|
64
|
-
unique_toolkit-0.7.
|
65
|
-
unique_toolkit-0.7.
|
62
|
+
unique_toolkit-0.7.20.dist-info/LICENSE,sha256=GlN8wHNdh53xwOPg44URnwag6TEolCjoq3YD_KrWgss,193
|
63
|
+
unique_toolkit-0.7.20.dist-info/METADATA,sha256=xu8iiH5H160McoUAkZB14EkYWfNqZ7uDhwzu_QmrCu8,22908
|
64
|
+
unique_toolkit-0.7.20.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
65
|
+
unique_toolkit-0.7.20.dist-info/RECORD,,
|
File without changes
|
File without changes
|