unique_toolkit 0.7.17__py3-none-any.whl → 0.7.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,6 +27,7 @@ from unique_toolkit.language_model.schemas import (
27
27
  LanguageModelMessages,
28
28
  LanguageModelStreamResponse,
29
29
  LanguageModelTool,
30
+ LanguageModelToolDescription,
30
31
  )
31
32
 
32
33
  logger = logging.getLogger(__name__)
@@ -685,7 +686,7 @@ def stream_complete_to_chat(
685
686
  debug_info: dict = {},
686
687
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
687
688
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
688
- tools: list[LanguageModelTool] | None = None,
689
+ tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
689
690
  start_text: str | None = None,
690
691
  other_options: dict | None = None,
691
692
  ) -> LanguageModelStreamResponse:
@@ -705,7 +706,7 @@ def stream_complete_to_chat(
705
706
  debug_info (dict): Debug information.
706
707
  temperature (float): Temperature setting.
707
708
  timeout (int): Timeout in milliseconds.
708
- tools (Optional[list[LanguageModelTool]]): Optional tools.
709
+ tools (Optional[list[LanguageModelTool | LanguageModelToolDescription ]]): Optional tools.
709
710
  start_text (Optional[str]): Starting text.
710
711
  other_options (Optional[dict]): Additional options.
711
712
 
@@ -759,7 +760,7 @@ async def stream_complete_to_chat_async(
759
760
  debug_info: dict = {},
760
761
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
761
762
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
762
- tools: list[LanguageModelTool] | None = None,
763
+ tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
763
764
  start_text: str | None = None,
764
765
  other_options: dict | None = None,
765
766
  ) -> LanguageModelStreamResponse:
@@ -44,6 +44,7 @@ from unique_toolkit.language_model.schemas import (
44
44
  LanguageModelResponse,
45
45
  LanguageModelStreamResponse,
46
46
  LanguageModelTool,
47
+ LanguageModelToolDescription,
47
48
  )
48
49
 
49
50
  from .functions import (
@@ -1091,7 +1092,7 @@ class ChatService:
1091
1092
  debug_info: dict = {},
1092
1093
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
1093
1094
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
1094
- tools: Optional[list[LanguageModelTool]] = None,
1095
+ tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
1095
1096
  start_text: Optional[str] = None,
1096
1097
  other_options: Optional[dict] = None,
1097
1098
  ) -> LanguageModelStreamResponse:
@@ -1142,7 +1143,7 @@ class ChatService:
1142
1143
  debug_info: dict = {},
1143
1144
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
1144
1145
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
1145
- tools: Optional[list[LanguageModelTool]] = None,
1146
+ tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
1146
1147
  start_text: Optional[str] = None,
1147
1148
  other_options: Optional[dict] = None,
1148
1149
  ) -> LanguageModelResponse:
@@ -1168,7 +1169,7 @@ class ChatService:
1168
1169
  debug_info: dict = {},
1169
1170
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
1170
1171
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
1171
- tools: Optional[list[LanguageModelTool]] = None,
1172
+ tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
1172
1173
  start_text: Optional[str] = None,
1173
1174
  other_options: Optional[dict] = None,
1174
1175
  ) -> LanguageModelStreamResponse:
@@ -1220,7 +1221,7 @@ class ChatService:
1220
1221
  debug_info: dict = {},
1221
1222
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
1222
1223
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
1223
- tools: Optional[list[LanguageModelTool]] = None,
1224
+ tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
1224
1225
  start_text: Optional[str] = None,
1225
1226
  other_options: Optional[dict] = None,
1226
1227
  ) -> LanguageModelResponse:
@@ -214,6 +214,7 @@ def upload_content_from_bytes(
214
214
  chat_id: str | None = None,
215
215
  skip_ingestion: bool = False,
216
216
  ingestion_config: unique_sdk.Content.IngestionConfig | None = None,
217
+ metadata: dict[str, any] | None = None,
217
218
  ):
218
219
  """
219
220
  Uploads content to the knowledge base.
@@ -227,6 +228,8 @@ def upload_content_from_bytes(
227
228
  scope_id (str | None): The scope ID. Defaults to None.
228
229
  chat_id (str | None): The chat ID. Defaults to None.
229
230
  skip_ingestion (bool): Whether to skip ingestion. Defaults to False.
231
+ ingestion_config (unique_sdk.Content.IngestionConfig | None): The ingestion configuration. Defaults to None.
232
+ metadata ( dict[str, any] | None): The metadata for the content. Defaults to None.
230
233
 
231
234
  Returns:
232
235
  Content: The uploaded content.
@@ -243,6 +246,7 @@ def upload_content_from_bytes(
243
246
  chat_id=chat_id,
244
247
  skip_ingestion=skip_ingestion,
245
248
  ingestion_config=ingestion_config,
249
+ metadata=metadata,
246
250
  )
247
251
  except Exception as e:
248
252
  logger.error(f"Error while uploading content: {e}")
@@ -259,6 +263,7 @@ def upload_content(
259
263
  chat_id: str | None = None,
260
264
  skip_ingestion: bool = False,
261
265
  ingestion_config: unique_sdk.Content.IngestionConfig | None = None,
266
+ metadata: dict[str, any] | None = None,
262
267
  ):
263
268
  """
264
269
  Uploads content to the knowledge base.
@@ -272,6 +277,8 @@ def upload_content(
272
277
  scope_id (str | None): The scope ID. Defaults to None.
273
278
  chat_id (str | None): The chat ID. Defaults to None.
274
279
  skip_ingestion (bool): Whether to skip ingestion. Defaults to False.
280
+ ingestion_config (unique_sdk.Content.IngestionConfig | None): The ingestion configuration. Defaults to None.
281
+ metadata ( dict[str, any] | None): The metadata for the content. Defaults to None.
275
282
 
276
283
  Returns:
277
284
  Content: The uploaded content.
@@ -288,6 +295,7 @@ def upload_content(
288
295
  chat_id=chat_id,
289
296
  skip_ingestion=skip_ingestion,
290
297
  ingestion_config=ingestion_config,
298
+ metadata=metadata,
291
299
  )
292
300
  except Exception as e:
293
301
  logger.error(f"Error while uploading content: {e}")
@@ -304,6 +312,7 @@ def _trigger_upload_content(
304
312
  chat_id: str | None = None,
305
313
  skip_ingestion: bool = False,
306
314
  ingestion_config: unique_sdk.Content.IngestionConfig | None = None,
315
+ metadata: dict[str, any] | None = None,
307
316
  ):
308
317
  """
309
318
  Uploads content to the knowledge base.
@@ -317,6 +326,8 @@ def _trigger_upload_content(
317
326
  scope_id (str | None): The scope ID. Defaults to None.
318
327
  chat_id (str | None): The chat ID. Defaults to None.
319
328
  skip_ingestion (bool): Whether to skip ingestion. Defaults to False.
329
+ ingestion_config (unique_sdk.Content.IngestionConfig | None): The ingestion configuration. Defaults to None.
330
+ metadata (dict[str, any] | None): The metadata for the content. Defaults to None.
320
331
 
321
332
  Returns:
322
333
  Content: The uploaded content.
@@ -385,6 +396,7 @@ def _trigger_upload_content(
385
396
  "mimeType": mime_type,
386
397
  "byteSize": byte_size,
387
398
  "ingestionConfig": ingestion_config,
399
+ "metadata": metadata,
388
400
  }
389
401
 
390
402
  if chat_id:
@@ -54,6 +54,8 @@ class Content(BaseModel):
54
54
  read_url: str | None = None
55
55
  created_at: datetime | None = None
56
56
  updated_at: datetime | None = None
57
+ metadata: dict[str, any] | None = None
58
+ ingestion_config: dict | None = None
57
59
 
58
60
 
59
61
  class ContentReference(BaseModel):
@@ -367,6 +367,7 @@ class ContentService:
367
367
  chat_id: str | None = None,
368
368
  skip_ingestion: bool = False,
369
369
  ingestion_config: unique_sdk.Content.IngestionConfig | None = None,
370
+ metadata: dict | None = None,
370
371
  ) -> Content:
371
372
  """
372
373
  Uploads content to the knowledge base.
@@ -378,6 +379,8 @@ class ContentService:
378
379
  scope_id (str | None): The scope ID. Defaults to None.
379
380
  chat_id (str | None): The chat ID. Defaults to None.
380
381
  skip_ingestion (bool): Whether to skip ingestion. Defaults to False.
382
+ ingestion_config (unique_sdk.Content.IngestionConfig | None): The ingestion configuration. Defaults to None.
383
+ metadata (dict | None): The metadata to associate with the content. Defaults to None.
381
384
 
382
385
  Returns:
383
386
  Content: The uploaded content.
@@ -393,6 +396,7 @@ class ContentService:
393
396
  chat_id=chat_id,
394
397
  skip_ingestion=skip_ingestion,
395
398
  ingestion_config=ingestion_config,
399
+ metadata=metadata,
396
400
  )
397
401
 
398
402
  def upload_content(
@@ -404,6 +408,7 @@ class ContentService:
404
408
  chat_id: str | None = None,
405
409
  skip_ingestion: bool = False,
406
410
  ingestion_config: unique_sdk.Content.IngestionConfig | None = None,
411
+ metadata: dict[str, any] | None = None,
407
412
  ):
408
413
  """
409
414
  Uploads content to the knowledge base.
@@ -415,6 +420,8 @@ class ContentService:
415
420
  scope_id (str | None): The scope ID. Defaults to None.
416
421
  chat_id (str | None): The chat ID. Defaults to None.
417
422
  skip_ingestion (bool): Whether to skip ingestion. Defaults to False.
423
+ ingestion_config (unique_sdk.Content.IngestionConfig | None): The ingestion configuration. Defaults to None.
424
+ metadata (dict[str, any] | None): The metadata to associate with the content. Defaults to None.
418
425
 
419
426
  Returns:
420
427
  Content: The uploaded content.
@@ -430,6 +437,7 @@ class ContentService:
430
437
  chat_id=chat_id,
431
438
  skip_ingestion=skip_ingestion,
432
439
  ingestion_config=ingestion_config,
440
+ metadata=metadata,
433
441
  )
434
442
 
435
443
  def request_content_by_id(
@@ -43,6 +43,9 @@ from .schemas import (
43
43
  from .schemas import (
44
44
  LanguageModelTool as LanguageModelTool,
45
45
  )
46
+ from .schemas import (
47
+ LanguageModelToolDescription as LanguageModelToolDescription,
48
+ )
46
49
  from .schemas import (
47
50
  LanguageModelToolMessage as LanguageModelToolMessage,
48
51
  )
@@ -17,6 +17,10 @@ class MessagesBuilder:
17
17
  def __init__(self):
18
18
  self.messages: list[LanguageModelMessage] = []
19
19
 
20
+ def append(self, message: LanguageModelMessage) -> Self:
21
+ self.messages.append(message)
22
+ return self
23
+
20
24
  def message_append(self, role: LanguageModelMessageRole, content: str):
21
25
  message = LanguageModelMessage(role=role, content=content)
22
26
  self.messages.append(message)
@@ -16,6 +16,7 @@ from .schemas import (
16
16
  LanguageModelMessages,
17
17
  LanguageModelResponse,
18
18
  LanguageModelTool,
19
+ LanguageModelToolDescription,
19
20
  )
20
21
 
21
22
  logger = logging.getLogger(f"toolkit.{DOMAIN_NAME}.{__name__}")
@@ -27,7 +28,7 @@ def complete(
27
28
  model_name: LanguageModelName | str,
28
29
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
29
30
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
30
- tools: list[LanguageModelTool] | None = None,
31
+ tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
31
32
  other_options: dict | None = None,
32
33
  structured_output_model: type[BaseModel] | None = None,
33
34
  structured_output_enforce_schema: bool = False,
@@ -40,7 +41,7 @@ def complete(
40
41
  model_name (LanguageModelName | str): The model name to use for the completion.
41
42
  temperature (float): The temperature setting for the completion. Defaults to 0.
42
43
  timeout (int): The timeout value in milliseconds. Defaults to 240_000.
43
- tools (Optional[list[LanguageModelTool]]): Optional list of tools to include.
44
+ tools (Optional[list[LanguageModelTool | LanguageModelToolDescription ]]): Optional list of tools to include.
44
45
  other_options (Optional[dict]): Additional options to use. Defaults to None.
45
46
 
46
47
  Returns:
@@ -80,7 +81,7 @@ async def complete_async(
80
81
  model_name: LanguageModelName | str,
81
82
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
82
83
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
83
- tools: list[LanguageModelTool] | None = None,
84
+ tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
84
85
  other_options: dict | None = None,
85
86
  structured_output_model: type[BaseModel] | None = None,
86
87
  structured_output_enforce_schema: bool = False,
@@ -97,7 +98,7 @@ async def complete_async(
97
98
  model_name (LanguageModelName | str): The model name to use for the completion.
98
99
  temperature (float): The temperature setting for the completion. Defaults to 0.
99
100
  timeout (int): The timeout value in milliseconds for the request. Defaults to 240_000.
100
- tools (Optional[list[LanguageModelTool]]): Optional list of tools to include in the request.
101
+ tools (Optional[list[LanguageModelTool | LanguageModelToolDescription ]]): Optional list of tools to include in the request.
101
102
  other_options (Optional[dict]): The other options to use. Defaults to None.
102
103
 
103
104
  Returns:
@@ -137,7 +138,7 @@ async def complete_async(
137
138
 
138
139
  def _add_tools_to_options(
139
140
  options: dict,
140
- tools: list[LanguageModelTool] | None,
141
+ tools: list[LanguageModelTool | LanguageModelToolDescription] | None,
141
142
  ) -> dict:
142
143
  if tools:
143
144
  options["tools"] = [
@@ -189,7 +190,7 @@ def _prepare_completion_params_util(
189
190
  messages: LanguageModelMessages,
190
191
  model_name: LanguageModelName | str,
191
192
  temperature: float,
192
- tools: list[LanguageModelTool] | None = None,
193
+ tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
193
194
  other_options: dict | None = None,
194
195
  content_chunks: list[ContentChunk] | None = None,
195
196
  structured_output_model: type[BaseModel] | None = None,
@@ -206,6 +207,7 @@ def _prepare_completion_params_util(
206
207
 
207
208
  """
208
209
  options = _add_tools_to_options({}, tools)
210
+
209
211
  if structured_output_model:
210
212
  options = _add_response_format_to_options(
211
213
  options,
@@ -11,6 +11,7 @@ from pydantic import (
11
11
  Field,
12
12
  PrivateAttr,
13
13
  RootModel,
14
+ field_serializer,
14
15
  field_validator,
15
16
  model_serializer,
16
17
  model_validator,
@@ -344,6 +345,11 @@ class LanguageModelTokenLimits(BaseModel):
344
345
 
345
346
 
346
347
  # This is more restrictive than what openai allows
348
+
349
+
350
+ @deprecated(
351
+ "Deprecated as `LanguageModelTool` is deprecated in favor of `LanguageModelToolDescription`"
352
+ )
347
353
  class LanguageModelToolParameterProperty(BaseModel):
348
354
  type: str
349
355
  description: str
@@ -353,6 +359,9 @@ class LanguageModelToolParameterProperty(BaseModel):
353
359
 
354
360
  # Looks most like
355
361
  # from openai.types.shared.function_parameters import FunctionParameters
362
+ @deprecated(
363
+ "Deprecated as `LanguageModelTool` is deprecated in favor of `LanguageModelToolDescription`"
364
+ )
356
365
  class LanguageModelToolParameters(BaseModel):
357
366
  type: str = "object"
358
367
  properties: dict[str, LanguageModelToolParameterProperty]
@@ -362,6 +371,9 @@ class LanguageModelToolParameters(BaseModel):
362
371
  # Looks most like
363
372
  # from openai.types.shared_params.function_definition import FunctionDefinition
364
373
  # but returns parameter is not known
374
+ @deprecated(
375
+ "Deprecated as `LanguageModelTool` use `LanguageModelToolDescription` instead"
376
+ )
365
377
  class LanguageModelTool(BaseModel):
366
378
  name: str = Field(
367
379
  ...,
@@ -370,8 +382,34 @@ class LanguageModelTool(BaseModel):
370
382
  )
371
383
  description: str
372
384
  parameters: (
373
- LanguageModelToolParameters | dict
385
+ LanguageModelToolParameters | dict[str, Any]
374
386
  ) # dict represents json schema dumped from pydantic
375
387
  returns: LanguageModelToolParameterProperty | LanguageModelToolParameters | None = (
376
388
  None
377
389
  )
390
+
391
+
392
+ class LanguageModelToolDescription(BaseModel):
393
+ name: str = Field(
394
+ ...,
395
+ pattern=r"^[a-zA-Z1-9_-]+$",
396
+ description="Name must adhere to the pattern ^[a-zA-Z1-9_-]+$",
397
+ )
398
+ description: str = Field(
399
+ ...,
400
+ description="Description of what the tool is doing the tool",
401
+ )
402
+ parameters: type[BaseModel] = Field(
403
+ ...,
404
+ description="Pydantic model for the tool parameters",
405
+ )
406
+
407
+ # TODO: This should be default `True` but if this is the case the parameter_model needs to include additional properties
408
+ strict: bool = Field(
409
+ default=False,
410
+ description="Setting strict to true will ensure function calls reliably adhere to the function schema, instead of being best effort. If set to True the `parameter_model` set `model_config = {'extra':'forbid'}` must be set for on all BaseModels.",
411
+ )
412
+
413
+ @field_serializer("parameters")
414
+ def serialize_parameters(self, parameters: type[BaseModel]):
415
+ return parameters.model_json_schema()
@@ -20,6 +20,7 @@ from unique_toolkit.language_model.schemas import (
20
20
  LanguageModelMessages,
21
21
  LanguageModelResponse,
22
22
  LanguageModelTool,
23
+ LanguageModelToolDescription,
23
24
  )
24
25
 
25
26
  logger = logging.getLogger(f"toolkit.{DOMAIN_NAME}.{__name__}")
@@ -180,7 +181,7 @@ class LanguageModelService:
180
181
  model_name: LanguageModelName | str,
181
182
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
182
183
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
183
- tools: Optional[list[LanguageModelTool]] = None,
184
+ tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
184
185
  structured_output_model: Optional[Type[BaseModel]] = None,
185
186
  structured_output_enforce_schema: bool = False,
186
187
  other_options: Optional[dict] = None,
@@ -208,7 +209,7 @@ class LanguageModelService:
208
209
  model_name: LanguageModelName | str,
209
210
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
210
211
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
211
- tools: Optional[list[LanguageModelTool]] = None,
212
+ tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
212
213
  structured_output_model: Optional[Type[BaseModel]] = None,
213
214
  structured_output_enforce_schema: bool = False,
214
215
  other_options: Optional[dict] = None,
@@ -239,7 +240,7 @@ class LanguageModelService:
239
240
  model_name: LanguageModelName | str,
240
241
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
241
242
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
242
- tools: Optional[list[LanguageModelTool]] = None,
243
+ tools: Optional[list[LanguageModelTool | LanguageModelToolDescription]] = None,
243
244
  structured_output_model: Optional[Type[BaseModel]] = None,
244
245
  structured_output_enforce_schema: bool = False,
245
246
  other_options: Optional[dict] = None,
@@ -5,6 +5,7 @@ from unique_toolkit.language_model import (
5
5
  LanguageModelName,
6
6
  LanguageModelResponse,
7
7
  LanguageModelTool,
8
+ LanguageModelToolDescription,
8
9
  )
9
10
  from unique_toolkit.language_model.constants import (
10
11
  DEFAULT_COMPLETE_TEMPERATURE,
@@ -23,6 +24,6 @@ class SupportsComplete(Protocol):
23
24
  model_name: LanguageModelName | str,
24
25
  temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
25
26
  timeout: int = DEFAULT_COMPLETE_TIMEOUT,
26
- tools: list[LanguageModelTool] | None = None,
27
+ tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
27
28
  **kwargs,
28
29
  ) -> LanguageModelResponse: ...
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: unique_toolkit
3
- Version: 0.7.17
3
+ Version: 0.7.20
4
4
  Summary:
5
5
  License: Proprietary
6
6
  Author: Martin Fadler
@@ -17,7 +17,7 @@ Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
17
17
  Requires-Dist: regex (>=2024.5.15,<2025.0.0)
18
18
  Requires-Dist: tiktoken (>=0.7.0,<0.8.0)
19
19
  Requires-Dist: typing-extensions (>=4.9.0,<5.0.0)
20
- Requires-Dist: unique-sdk (>=0.9.26,<0.10.0)
20
+ Requires-Dist: unique-sdk (>=0.9.31,<0.10.0)
21
21
  Description-Content-Type: text/markdown
22
22
 
23
23
  # Unique Toolkit
@@ -111,6 +111,16 @@ All notable changes to this project will be documented in this file.
111
111
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
112
112
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
113
113
 
114
+
115
+ ## [0.7.20] - 2025-05-21
116
+ - Deprecate `LanguageModelTool` and associated models in favor of `LanguageModelToolDescription`
117
+
118
+ ## [0.7.19] - 2025-05-20
119
+ - Extend the `MessageBuilder` to allow for appending any `LanguageModelMessage`
120
+
121
+ ## [0.7.18] - 2025-05-20
122
+ - Add the possibility to specify metadata when creating or updating a Content.
123
+
114
124
  ## [0.7.17] - 2025-05-16
115
125
  - Change inheritance hierarchy of events for easier deprecation
116
126
 
@@ -13,16 +13,16 @@ unique_toolkit/app/schemas.py,sha256=fNPRQPrpJjYrtkkXPR7sNFjP0AYPZtKe3H1YZkXd2QQ
13
13
  unique_toolkit/app/verification.py,sha256=GxFFwcJMy25fCA_Xe89wKW7bgqOu8PAs5y8QpHF0GSc,3861
14
14
  unique_toolkit/chat/__init__.py,sha256=LRs2G-JTVuci4lbtHTkVUiNcZcSR6uqqfnAyo7af6nY,619
15
15
  unique_toolkit/chat/constants.py,sha256=05kq6zjqUVB2d6_P7s-90nbljpB3ryxwCI-CAz0r2O4,83
16
- unique_toolkit/chat/functions.py,sha256=J9Cmgkhj9bBxZja3ggkSp48af_LPU4Dfi9Sbc_WhhNY,27204
16
+ unique_toolkit/chat/functions.py,sha256=TP55fSVXWTO3OoGUuYBuK9cBHUw96wlQGbVfhhMalCI,27332
17
17
  unique_toolkit/chat/schemas.py,sha256=MNcGAXjK1K8zOODeMFz3FHVQL5sIBQXRwkr_2hFkG8k,2672
18
- unique_toolkit/chat/service.py,sha256=C8L5Alc9BKmXau5kcbQWKBjg1OGc5fmtO0F9xooxSCw,40641
18
+ unique_toolkit/chat/service.py,sha256=K7XtB3IdKznNu1r4dy2dXiwZYQg_vKgRUD52RoKewQU,40799
19
19
  unique_toolkit/chat/state.py,sha256=Cjgwv_2vhDFbV69xxsn7SefhaoIAEqLx3ferdVFCnOg,1445
20
20
  unique_toolkit/chat/utils.py,sha256=ihm-wQykBWhB4liR3LnwPVPt_qGW6ETq21Mw4HY0THE,854
21
21
  unique_toolkit/content/__init__.py,sha256=EdJg_A_7loEtCQf4cah3QARQreJx6pdz89Rm96YbMVg,940
22
22
  unique_toolkit/content/constants.py,sha256=1iy4Y67xobl5VTnJB6SxSyuoBWbdLl9244xfVMUZi5o,60
23
- unique_toolkit/content/functions.py,sha256=imNINvUW_-ejPBT8yPKuL9THdDplfjeKvnK9_EuFlqk,17497
24
- unique_toolkit/content/schemas.py,sha256=zks_Pkki2VhxICJJgHZyc-LPmRuj5dLbw3pgcUT7SW8,2362
25
- unique_toolkit/content/service.py,sha256=JDqlCJc-z-VQOmEvCIA8VcWSNOSuVo3lFetJs257H7A,18842
23
+ unique_toolkit/content/functions.py,sha256=Chf2QcnnWvKvXMF4IUmU-_aUN6nTZIfsbM7ds77olcY,18344
24
+ unique_toolkit/content/schemas.py,sha256=28Cj0R9JzJ4s0qR2Sfunr7luwYjMF2I8TepVxt5ZE2o,2446
25
+ unique_toolkit/content/service.py,sha256=27awBOsYHdfSxwHM1UzCQLnHuo-M49ej3jpFwBLRflM,19438
26
26
  unique_toolkit/content/utils.py,sha256=GUVPrkZfMoAj4MRoBs5BD_7vSuLZTZx69hyWzYFrI50,7747
27
27
  unique_toolkit/embedding/__init__.py,sha256=uUyzjonPvuDCYsvXCIt7ErQXopLggpzX-MEQd3_e2kE,250
28
28
  unique_toolkit/embedding/constants.py,sha256=Lj8-Lcy1FvuC31PM9Exq7vaFuxQV4pEI1huUMFX-J2M,52
@@ -44,22 +44,22 @@ unique_toolkit/evaluators/hallucination/service.py,sha256=k8qro5Lw4Ak58m4HYp3G4H
44
44
  unique_toolkit/evaluators/hallucination/utils.py,sha256=gO2AOzDQwVTev2_5vDKgJ9A6A9e0himJyAta_wglVG8,8326
45
45
  unique_toolkit/evaluators/output_parser.py,sha256=eI72qkzK1dZyUvnfP2SOAQCGBj_-PwX5wy_aLPMsJMY,883
46
46
  unique_toolkit/evaluators/schemas.py,sha256=Jaue6Uhx75X1CyHKWj8sT3RE1JZXTqoLtfLt2xQNCX8,2507
47
- unique_toolkit/language_model/__init__.py,sha256=jWko_vQj48wjnpTtlkg8iNdef0SMI3FN2kGywXRTMzg,1880
48
- unique_toolkit/language_model/builder.py,sha256=aIAXWWUoB5G-HONJiAt3MdRGd4jdP8nA-HYX2D2WlSI,3048
47
+ unique_toolkit/language_model/__init__.py,sha256=lRQyLlbwHbNFf4-0foBU13UGb09lwEeodbVsfsSgaCk,1971
48
+ unique_toolkit/language_model/builder.py,sha256=69WCcmkm2rMP2-YEH_EjHiEp6OzwjwCs8VbhjVJaCe0,3168
49
49
  unique_toolkit/language_model/constants.py,sha256=B-topqW0r83dkC_25DeQfnPk3n53qzIHUCBS7YJ0-1U,119
50
- unique_toolkit/language_model/functions.py,sha256=0oSkG4xpbxeaVTJide6g-zunBrsBRuvp7UQlKVbjpSk,7949
50
+ unique_toolkit/language_model/functions.py,sha256=KbCClXmRnrs-Ug8Wi9ehCLTqT2aIr3PQFc1rBuV3Om0,8172
51
51
  unique_toolkit/language_model/infos.py,sha256=qPf4Xlanet8jf0apZ6-qxS_6zmDd6p9D40it2TqmF3w,25910
52
52
  unique_toolkit/language_model/prompt.py,sha256=JSawaLjQg3VR-E2fK8engFyJnNdk21zaO8pPIodzN4Q,3991
53
- unique_toolkit/language_model/schemas.py,sha256=Wc_OeML0AYPTfIC1BObwumsunq23h12qVzi4hVlaZPE,11389
54
- unique_toolkit/language_model/service.py,sha256=FUf-HTKNslrMAh8qFMco_ZpP-N0t_iAFWK3juldoUe8,8343
53
+ unique_toolkit/language_model/schemas.py,sha256=DJD2aoMfs2Irnc4rzOrVuV4Fbt84LQAiDGG5rse1dgk,12770
54
+ unique_toolkit/language_model/service.py,sha256=9LS3ouRNtzqZaKrMFagLZS9gBvNC5e46Ut86YWHBBHY,8470
55
55
  unique_toolkit/language_model/utils.py,sha256=bPQ4l6_YO71w-zaIPanUUmtbXC1_hCvLK0tAFc3VCRc,1902
56
- unique_toolkit/protocols/support.py,sha256=iSSoERUZGLbmY2DGBqGeFTCRtH3ClhzAUutqNxwYgKs,823
56
+ unique_toolkit/protocols/support.py,sha256=SD17M8jgjtzCh0bgDXgKrX96n6DizF1PT2SZIhyt4n8,888
57
57
  unique_toolkit/short_term_memory/__init__.py,sha256=2mI3AUrffgH7Yt-xS57EGqnHf7jnn6xquoKEhJqk3Wg,185
58
58
  unique_toolkit/short_term_memory/constants.py,sha256=698CL6-wjup2MvU19RxSmQk3gX7aqW_OOpZB7sbz_Xg,34
59
59
  unique_toolkit/short_term_memory/functions.py,sha256=3WiK-xatY5nh4Dr5zlDUye1k3E6kr41RiscwtTplw5k,4484
60
60
  unique_toolkit/short_term_memory/schemas.py,sha256=OhfcXyF6ACdwIXW45sKzjtZX_gkcJs8FEZXcgQTNenw,1406
61
61
  unique_toolkit/short_term_memory/service.py,sha256=vEKFxP1SScPrFniso492fVthWR1sosdFibhiNF3zRvI,8081
62
- unique_toolkit-0.7.17.dist-info/LICENSE,sha256=GlN8wHNdh53xwOPg44URnwag6TEolCjoq3YD_KrWgss,193
63
- unique_toolkit-0.7.17.dist-info/METADATA,sha256=1s2gtPKoW7K5BT6aAVwh96O-1K8-2d-uEXKXzaO2fLo,22573
64
- unique_toolkit-0.7.17.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
65
- unique_toolkit-0.7.17.dist-info/RECORD,,
62
+ unique_toolkit-0.7.20.dist-info/LICENSE,sha256=GlN8wHNdh53xwOPg44URnwag6TEolCjoq3YD_KrWgss,193
63
+ unique_toolkit-0.7.20.dist-info/METADATA,sha256=xu8iiH5H160McoUAkZB14EkYWfNqZ7uDhwzu_QmrCu8,22908
64
+ unique_toolkit-0.7.20.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
65
+ unique_toolkit-0.7.20.dist-info/RECORD,,