unique_toolkit 0.8.6__py3-none-any.whl → 0.8.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unique_toolkit/app/__init__.py +3 -3
- unique_toolkit/chat/functions.py +82 -74
- unique_toolkit/chat/service.py +145 -144
- unique_toolkit/framework_utilities/openai/message_builder.py +21 -12
- unique_toolkit/language_model/builder.py +23 -11
- unique_toolkit/language_model/functions.py +109 -10
- unique_toolkit/language_model/schemas.py +19 -7
- {unique_toolkit-0.8.6.dist-info → unique_toolkit-0.8.7.dist-info}/METADATA +8 -1
- {unique_toolkit-0.8.6.dist-info → unique_toolkit-0.8.7.dist-info}/RECORD +11 -11
- {unique_toolkit-0.8.6.dist-info → unique_toolkit-0.8.7.dist-info}/LICENSE +0 -0
- {unique_toolkit-0.8.6.dist-info → unique_toolkit-0.8.7.dist-info}/WHEEL +0 -0
|
@@ -2,8 +2,10 @@ from collections.abc import Iterable
|
|
|
2
2
|
from typing import Self
|
|
3
3
|
|
|
4
4
|
from openai.types.chat.chat_completion_assistant_message_param import (
|
|
5
|
+
Audio,
|
|
5
6
|
ChatCompletionAssistantMessageParam,
|
|
6
7
|
ContentArrayOfContentPart,
|
|
8
|
+
FunctionCall,
|
|
7
9
|
)
|
|
8
10
|
from openai.types.chat.chat_completion_content_part_param import (
|
|
9
11
|
ChatCompletionContentPartParam,
|
|
@@ -18,6 +20,9 @@ from openai.types.chat.chat_completion_function_message_param import (
|
|
|
18
20
|
ChatCompletionFunctionMessageParam,
|
|
19
21
|
)
|
|
20
22
|
from openai.types.chat.chat_completion_message_param import ChatCompletionMessageParam
|
|
23
|
+
from openai.types.chat.chat_completion_message_tool_call_param import (
|
|
24
|
+
ChatCompletionMessageToolCallParam,
|
|
25
|
+
)
|
|
21
26
|
from openai.types.chat.chat_completion_system_message_param import (
|
|
22
27
|
ChatCompletionSystemMessageParam,
|
|
23
28
|
)
|
|
@@ -44,21 +49,20 @@ class OpenAIMessageBuilder:
|
|
|
44
49
|
builder._messages = messages.copy()
|
|
45
50
|
return builder
|
|
46
51
|
|
|
47
|
-
def
|
|
52
|
+
def system_message_append(
|
|
48
53
|
self,
|
|
49
54
|
content: str | Iterable[ChatCompletionContentPartTextParam],
|
|
50
|
-
name: str = "
|
|
55
|
+
name: str = "system",
|
|
51
56
|
) -> Self:
|
|
52
57
|
self._messages.append(
|
|
53
58
|
ChatCompletionSystemMessageParam(
|
|
54
59
|
content=content,
|
|
55
60
|
role="system",
|
|
56
|
-
name=name,
|
|
57
61
|
),
|
|
58
62
|
)
|
|
59
63
|
return self
|
|
60
64
|
|
|
61
|
-
def
|
|
65
|
+
def user_message_append(
|
|
62
66
|
self,
|
|
63
67
|
content: str | Iterable[ChatCompletionContentPartParam],
|
|
64
68
|
name: str = "user",
|
|
@@ -67,26 +71,32 @@ class OpenAIMessageBuilder:
|
|
|
67
71
|
ChatCompletionUserMessageParam(
|
|
68
72
|
content=content,
|
|
69
73
|
role="user",
|
|
70
|
-
name=name,
|
|
71
74
|
),
|
|
72
75
|
)
|
|
73
76
|
return self
|
|
74
77
|
|
|
75
|
-
def
|
|
78
|
+
def assistant_message_append(
|
|
76
79
|
self,
|
|
77
|
-
content: str | Iterable[ContentArrayOfContentPart],
|
|
80
|
+
content: str | Iterable[ContentArrayOfContentPart] | None = None,
|
|
78
81
|
name: str = "assistant",
|
|
82
|
+
audio: Audio | None = None,
|
|
83
|
+
function_call: FunctionCall | None = None,
|
|
84
|
+
refusal: str | None = None,
|
|
85
|
+
tool_calls: Iterable[ChatCompletionMessageToolCallParam] | None = None,
|
|
79
86
|
) -> Self:
|
|
80
87
|
self._messages.append(
|
|
81
88
|
ChatCompletionAssistantMessageParam(
|
|
82
89
|
content=content,
|
|
83
90
|
role="assistant",
|
|
84
|
-
|
|
91
|
+
audio=audio,
|
|
92
|
+
function_call=function_call,
|
|
93
|
+
refusal=refusal,
|
|
94
|
+
tool_calls=tool_calls or [],
|
|
85
95
|
),
|
|
86
96
|
)
|
|
87
97
|
return self
|
|
88
98
|
|
|
89
|
-
def
|
|
99
|
+
def developper_message_append(
|
|
90
100
|
self,
|
|
91
101
|
content: str | Iterable[ChatCompletionContentPartTextParam],
|
|
92
102
|
name: str = "developer",
|
|
@@ -95,12 +105,11 @@ class OpenAIMessageBuilder:
|
|
|
95
105
|
ChatCompletionDeveloperMessageParam(
|
|
96
106
|
content=content,
|
|
97
107
|
role="developer",
|
|
98
|
-
name=name,
|
|
99
108
|
),
|
|
100
109
|
)
|
|
101
110
|
return self
|
|
102
111
|
|
|
103
|
-
def
|
|
112
|
+
def function_message_append(
|
|
104
113
|
self,
|
|
105
114
|
content: str | None,
|
|
106
115
|
name: str = "function",
|
|
@@ -114,7 +123,7 @@ class OpenAIMessageBuilder:
|
|
|
114
123
|
)
|
|
115
124
|
return self
|
|
116
125
|
|
|
117
|
-
def
|
|
126
|
+
def tool_message_append(
|
|
118
127
|
self,
|
|
119
128
|
content: str | Iterable[ChatCompletionContentPartTextParam],
|
|
120
129
|
tool_call_id: str,
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from typing import Any, Self
|
|
2
2
|
|
|
3
3
|
from unique_toolkit.language_model import (
|
|
4
4
|
LanguageModelAssistantMessage,
|
|
@@ -39,23 +39,33 @@ class MessagesBuilder:
|
|
|
39
39
|
return self # Return self to allow method chaining
|
|
40
40
|
|
|
41
41
|
def image_message_append(
|
|
42
|
-
self,
|
|
42
|
+
self,
|
|
43
|
+
content: str,
|
|
44
|
+
images: list[str],
|
|
45
|
+
role: LanguageModelMessageRole = LanguageModelMessageRole.USER,
|
|
43
46
|
) -> Self:
|
|
47
|
+
final_content: list[dict[str, Any]] = [{"type": "text", "text": content}]
|
|
48
|
+
final_content.extend(
|
|
49
|
+
[
|
|
50
|
+
{
|
|
51
|
+
"type": "image_url",
|
|
52
|
+
"imageUrl": {"url": image},
|
|
53
|
+
}
|
|
54
|
+
for image in images
|
|
55
|
+
],
|
|
56
|
+
)
|
|
57
|
+
|
|
44
58
|
message = LanguageModelMessage(
|
|
45
59
|
role=role,
|
|
46
|
-
content=
|
|
47
|
-
{"type": "text", "text": content},
|
|
48
|
-
*[
|
|
49
|
-
{"type": "image_url", "imageUrl": {"url": image}}
|
|
50
|
-
for image in images
|
|
51
|
-
],
|
|
52
|
-
],
|
|
60
|
+
content=final_content,
|
|
53
61
|
)
|
|
54
62
|
self.messages.append(message)
|
|
55
63
|
return self
|
|
56
64
|
|
|
57
65
|
def assistant_message_append(
|
|
58
|
-
self,
|
|
66
|
+
self,
|
|
67
|
+
content: str,
|
|
68
|
+
tool_calls: list[LanguageModelFunction] | None = None,
|
|
59
69
|
) -> Self:
|
|
60
70
|
"""Appends an assistant message to the messages list."""
|
|
61
71
|
message = LanguageModelAssistantMessage(content=content)
|
|
@@ -74,7 +84,9 @@ class MessagesBuilder:
|
|
|
74
84
|
def tool_message_append(self, name: str, tool_call_id: str, content: str) -> Self:
|
|
75
85
|
"""Appends a tool message to the messages list."""
|
|
76
86
|
message = LanguageModelToolMessage(
|
|
77
|
-
name=name,
|
|
87
|
+
name=name,
|
|
88
|
+
tool_call_id=tool_call_id,
|
|
89
|
+
content=content,
|
|
78
90
|
)
|
|
79
91
|
self.messages.append(message)
|
|
80
92
|
return self
|
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import copy
|
|
2
2
|
import logging
|
|
3
|
-
from datetime import
|
|
3
|
+
from datetime import UTC, datetime
|
|
4
4
|
from typing import Any, cast
|
|
5
5
|
|
|
6
|
+
import humps
|
|
6
7
|
import unique_sdk
|
|
8
|
+
from openai.types.chat.chat_completion_message_param import ChatCompletionMessageParam
|
|
7
9
|
from pydantic import BaseModel
|
|
8
10
|
|
|
9
11
|
from unique_toolkit.chat.schemas import ChatMessage, ChatMessageRole
|
|
@@ -33,7 +35,7 @@ logger = logging.getLogger(f"toolkit.{DOMAIN_NAME}.{__name__}")
|
|
|
33
35
|
|
|
34
36
|
def complete(
|
|
35
37
|
company_id: str,
|
|
36
|
-
messages: LanguageModelMessages,
|
|
38
|
+
messages: LanguageModelMessages | list[ChatCompletionMessageParam],
|
|
37
39
|
model_name: LanguageModelName | str,
|
|
38
40
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
|
39
41
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
|
@@ -59,7 +61,7 @@ def complete(
|
|
|
59
61
|
LanguageModelResponse: The response object containing the completed result.
|
|
60
62
|
|
|
61
63
|
"""
|
|
62
|
-
options, model, messages_dict, _ =
|
|
64
|
+
options, model, messages_dict, _ = _prepare_all_completions_params_util(
|
|
63
65
|
messages=messages,
|
|
64
66
|
model_name=model_name,
|
|
65
67
|
temperature=temperature,
|
|
@@ -88,7 +90,7 @@ def complete(
|
|
|
88
90
|
|
|
89
91
|
async def complete_async(
|
|
90
92
|
company_id: str,
|
|
91
|
-
messages: LanguageModelMessages,
|
|
93
|
+
messages: LanguageModelMessages | list[ChatCompletionMessageParam],
|
|
92
94
|
model_name: LanguageModelName | str,
|
|
93
95
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
|
94
96
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
|
@@ -123,7 +125,7 @@ async def complete_async(
|
|
|
123
125
|
and logged.
|
|
124
126
|
|
|
125
127
|
"""
|
|
126
|
-
options, model, messages_dict, _ =
|
|
128
|
+
options, model, messages_dict, _ = _prepare_all_completions_params_util(
|
|
127
129
|
messages=messages,
|
|
128
130
|
model_name=model_name,
|
|
129
131
|
temperature=temperature,
|
|
@@ -249,6 +251,104 @@ def _prepare_completion_params_util(
|
|
|
249
251
|
return options, model, messages_dict, search_context
|
|
250
252
|
|
|
251
253
|
|
|
254
|
+
def _prepare_openai_completion_params_util(
|
|
255
|
+
model_name: LanguageModelName | str,
|
|
256
|
+
temperature: float,
|
|
257
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
|
258
|
+
other_options: dict | None = None,
|
|
259
|
+
content_chunks: list[ContentChunk] | None = None,
|
|
260
|
+
structured_output_model: type[BaseModel] | None = None,
|
|
261
|
+
structured_output_enforce_schema: bool = False,
|
|
262
|
+
) -> tuple[dict, str, dict | None]:
|
|
263
|
+
"""Prepare common parameters for completion requests.
|
|
264
|
+
|
|
265
|
+
Returns
|
|
266
|
+
-------
|
|
267
|
+
tuple containing:
|
|
268
|
+
- options (dict): Combined options including tools and temperature
|
|
269
|
+
- model (str): Resolved model name
|
|
270
|
+
- messages_dict (dict): Processed messages
|
|
271
|
+
- search_context (dict | None): Processed content chunks if provided
|
|
272
|
+
|
|
273
|
+
"""
|
|
274
|
+
options = _add_tools_to_options({}, tools)
|
|
275
|
+
|
|
276
|
+
if structured_output_model:
|
|
277
|
+
options = _add_response_format_to_options(
|
|
278
|
+
options,
|
|
279
|
+
structured_output_model,
|
|
280
|
+
structured_output_enforce_schema,
|
|
281
|
+
)
|
|
282
|
+
options["temperature"] = temperature
|
|
283
|
+
if other_options:
|
|
284
|
+
options.update(other_options)
|
|
285
|
+
|
|
286
|
+
model = (
|
|
287
|
+
model_name.value if isinstance(model_name, LanguageModelName) else model_name
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
search_context = (
|
|
291
|
+
_to_search_context(content_chunks) if content_chunks is not None else None
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
return options, model, search_context
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def __camelize_keys(data):
|
|
298
|
+
"""Recursively camelize dictionary keys using humps."""
|
|
299
|
+
if isinstance(data, dict):
|
|
300
|
+
return {humps.camelize(k): __camelize_keys(v) for k, v in data.items()}
|
|
301
|
+
if isinstance(data, list):
|
|
302
|
+
return [__camelize_keys(item) for item in data]
|
|
303
|
+
return data
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def _prepare_all_completions_params_util(
|
|
307
|
+
messages: LanguageModelMessages | list[ChatCompletionMessageParam],
|
|
308
|
+
model_name: LanguageModelName | str,
|
|
309
|
+
temperature: float,
|
|
310
|
+
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
|
311
|
+
other_options: dict | None = None,
|
|
312
|
+
content_chunks: list[ContentChunk] | None = None,
|
|
313
|
+
structured_output_model: type[BaseModel] | None = None,
|
|
314
|
+
structured_output_enforce_schema: bool = False,
|
|
315
|
+
) -> tuple[
|
|
316
|
+
dict,
|
|
317
|
+
str,
|
|
318
|
+
list[unique_sdk.Integrated.ChatCompletionRequestMessage],
|
|
319
|
+
dict | None,
|
|
320
|
+
]:
|
|
321
|
+
if isinstance(messages, LanguageModelMessages):
|
|
322
|
+
options, model, messages_dict, search_context = _prepare_completion_params_util(
|
|
323
|
+
messages=messages,
|
|
324
|
+
model_name=model_name,
|
|
325
|
+
temperature=temperature,
|
|
326
|
+
tools=tools,
|
|
327
|
+
content_chunks=content_chunks,
|
|
328
|
+
other_options=other_options,
|
|
329
|
+
structured_output_model=structured_output_model,
|
|
330
|
+
structured_output_enforce_schema=structured_output_enforce_schema,
|
|
331
|
+
)
|
|
332
|
+
else:
|
|
333
|
+
options, model, search_context = _prepare_openai_completion_params_util(
|
|
334
|
+
model_name=model_name,
|
|
335
|
+
temperature=temperature,
|
|
336
|
+
tools=tools,
|
|
337
|
+
content_chunks=content_chunks,
|
|
338
|
+
other_options=other_options,
|
|
339
|
+
structured_output_model=structured_output_model,
|
|
340
|
+
structured_output_enforce_schema=structured_output_enforce_schema,
|
|
341
|
+
)
|
|
342
|
+
messages_dict = __camelize_keys(messages.copy())
|
|
343
|
+
|
|
344
|
+
integrated_messages = cast(
|
|
345
|
+
"list[unique_sdk.Integrated.ChatCompletionRequestMessage]",
|
|
346
|
+
messages_dict,
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
return options, model, integrated_messages, search_context
|
|
350
|
+
|
|
351
|
+
|
|
252
352
|
def complete_with_references(
|
|
253
353
|
company_id: str,
|
|
254
354
|
messages: LanguageModelMessages,
|
|
@@ -284,7 +384,7 @@ async def complete_with_references_async(
|
|
|
284
384
|
messages: LanguageModelMessages,
|
|
285
385
|
model_name: LanguageModelName | str,
|
|
286
386
|
content_chunks: list[ContentChunk] | None = None,
|
|
287
|
-
debug_dict: dict =
|
|
387
|
+
debug_dict: dict | None = None,
|
|
288
388
|
temperature: float = DEFAULT_COMPLETE_TEMPERATURE,
|
|
289
389
|
timeout: int = DEFAULT_COMPLETE_TIMEOUT,
|
|
290
390
|
tools: list[LanguageModelTool | LanguageModelToolDescription] | None = None,
|
|
@@ -319,16 +419,15 @@ def _create_language_model_stream_response_with_references(
|
|
|
319
419
|
|
|
320
420
|
if content is None:
|
|
321
421
|
raise ValueError("Content is None, which is not supported")
|
|
322
|
-
|
|
422
|
+
if isinstance(content, list):
|
|
323
423
|
raise ValueError("Content is a list, which is not supported")
|
|
324
|
-
|
|
325
|
-
content = start_text or "" + str(content)
|
|
424
|
+
content = start_text or "" + str(content)
|
|
326
425
|
|
|
327
426
|
message = ChatMessage(
|
|
328
427
|
id="msg_unknown",
|
|
329
428
|
text=copy.deepcopy(content),
|
|
330
429
|
role=ChatMessageRole.ASSISTANT,
|
|
331
|
-
created_at=datetime.now(
|
|
430
|
+
created_at=datetime.now(UTC),
|
|
332
431
|
chat_id="chat_unknown",
|
|
333
432
|
)
|
|
334
433
|
|
|
@@ -5,6 +5,8 @@ from typing import Any, Self
|
|
|
5
5
|
from uuid import uuid4
|
|
6
6
|
|
|
7
7
|
from humps import camelize
|
|
8
|
+
from openai.types.chat.chat_completion_tool_param import ChatCompletionToolParam
|
|
9
|
+
from openai.types.shared_params.function_definition import FunctionDefinition
|
|
8
10
|
from pydantic import (
|
|
9
11
|
BaseModel,
|
|
10
12
|
ConfigDict,
|
|
@@ -85,8 +87,7 @@ class LanguageModelFunction(BaseModel):
|
|
|
85
87
|
seralization["arguments"] = json.dumps(self.arguments)
|
|
86
88
|
return seralization
|
|
87
89
|
|
|
88
|
-
|
|
89
|
-
def __eq__(self, other:Self) -> bool:
|
|
90
|
+
def __eq__(self, other: Self) -> bool:
|
|
90
91
|
"""
|
|
91
92
|
Compare two tool calls based on name and arguments.
|
|
92
93
|
"""
|
|
@@ -101,9 +102,10 @@ class LanguageModelFunction(BaseModel):
|
|
|
101
102
|
|
|
102
103
|
if self.arguments != other.arguments:
|
|
103
104
|
return False
|
|
104
|
-
|
|
105
|
+
|
|
105
106
|
return True
|
|
106
107
|
|
|
108
|
+
|
|
107
109
|
# This is tailored to the unique backend
|
|
108
110
|
class LanguageModelStreamResponse(BaseModel):
|
|
109
111
|
model_config = model_config
|
|
@@ -155,7 +157,6 @@ class LanguageModelMessage(BaseModel):
|
|
|
155
157
|
|
|
156
158
|
|
|
157
159
|
# Equivalent to
|
|
158
|
-
# from openai.types.chat.chat_completion_system_message_param import ChatCompletionSystemMessageParam
|
|
159
160
|
class LanguageModelSystemMessage(LanguageModelMessage):
|
|
160
161
|
role: LanguageModelMessageRole = LanguageModelMessageRole.SYSTEM
|
|
161
162
|
|
|
@@ -401,7 +402,7 @@ class LanguageModelTokenLimits(BaseModel):
|
|
|
401
402
|
|
|
402
403
|
|
|
403
404
|
@deprecated(
|
|
404
|
-
"Deprecated as `LanguageModelTool` is deprecated in favor of `LanguageModelToolDescription`"
|
|
405
|
+
"Deprecated as `LanguageModelTool` is deprecated in favor of `LanguageModelToolDescription`",
|
|
405
406
|
)
|
|
406
407
|
class LanguageModelToolParameterProperty(BaseModel):
|
|
407
408
|
type: str
|
|
@@ -413,7 +414,7 @@ class LanguageModelToolParameterProperty(BaseModel):
|
|
|
413
414
|
# Looks most like
|
|
414
415
|
# from openai.types.shared.function_parameters import FunctionParameters
|
|
415
416
|
@deprecated(
|
|
416
|
-
"Deprecated as `LanguageModelTool` is deprecated in favor of `LanguageModelToolDescription`"
|
|
417
|
+
"Deprecated as `LanguageModelTool` is deprecated in favor of `LanguageModelToolDescription`",
|
|
417
418
|
)
|
|
418
419
|
class LanguageModelToolParameters(BaseModel):
|
|
419
420
|
type: str = "object"
|
|
@@ -425,7 +426,7 @@ class LanguageModelToolParameters(BaseModel):
|
|
|
425
426
|
# from openai.types.shared_params.function_definition import FunctionDefinition
|
|
426
427
|
# but returns parameter is not known
|
|
427
428
|
@deprecated(
|
|
428
|
-
"Deprecated as `LanguageModelTool` use `LanguageModelToolDescription` instead"
|
|
429
|
+
"Deprecated as `LanguageModelTool` use `LanguageModelToolDescription` instead",
|
|
429
430
|
)
|
|
430
431
|
class LanguageModelTool(BaseModel):
|
|
431
432
|
name: str = Field(
|
|
@@ -466,3 +467,14 @@ class LanguageModelToolDescription(BaseModel):
|
|
|
466
467
|
@field_serializer("parameters")
|
|
467
468
|
def serialize_parameters(self, parameters: type[BaseModel]):
|
|
468
469
|
return parameters.model_json_schema()
|
|
470
|
+
|
|
471
|
+
def to_openai(self) -> ChatCompletionToolParam:
|
|
472
|
+
return ChatCompletionToolParam(
|
|
473
|
+
function=FunctionDefinition(
|
|
474
|
+
name=self.name,
|
|
475
|
+
description=self.description,
|
|
476
|
+
parameters=self.parameters.model_json_schema(),
|
|
477
|
+
strict=self.strict,
|
|
478
|
+
),
|
|
479
|
+
type="function",
|
|
480
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: unique_toolkit
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.7
|
|
4
4
|
Summary:
|
|
5
5
|
License: Proprietary
|
|
6
6
|
Author: Martin Fadler
|
|
@@ -11,6 +11,7 @@ Classifier: Programming Language :: Python :: 3
|
|
|
11
11
|
Classifier: Programming Language :: Python :: 3.11
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.12
|
|
13
13
|
Requires-Dist: numpy (>=1.26.4,<2.0.0)
|
|
14
|
+
Requires-Dist: openai (>=1.99.9,<2.0.0)
|
|
14
15
|
Requires-Dist: pydantic (>=2.8.2,<3.0.0)
|
|
15
16
|
Requires-Dist: pydantic-settings (>=2.10.1,<3.0.0)
|
|
16
17
|
Requires-Dist: pyhumps (>=3.8.0,<4.0.0)
|
|
@@ -113,6 +114,12 @@ All notable changes to this project will be documented in this file.
|
|
|
113
114
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
114
115
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
115
116
|
|
|
117
|
+
|
|
118
|
+
## [0.8.7] - 2025-08-11
|
|
119
|
+
- Make chat service openai compatible
|
|
120
|
+
- Fix some bugs
|
|
121
|
+
- Make OpenAIMessageBuilder more congruent to MessageBuilder
|
|
122
|
+
|
|
116
123
|
## [0.8.6] - 2025-08-11
|
|
117
124
|
- Add GPT-5, GPT-5_MINI, GPT-5_NANO, GPT-5_CHAT to supported models list
|
|
118
125
|
|
|
@@ -4,7 +4,7 @@ unique_toolkit/_common/_time_utils.py,sha256=ztmTovTvr-3w71Ns2VwXC65OKUUh-sQlzbH
|
|
|
4
4
|
unique_toolkit/_common/exception.py,sha256=caQIE1btsQnpKCHqL2cgWUSbHup06enQu_Pt7uGUTTE,727
|
|
5
5
|
unique_toolkit/_common/validate_required_values.py,sha256=Y_M1ub9gIKP9qZ45F6Zq3ZHtuIqhmOjl8Z2Vd3avg8w,588
|
|
6
6
|
unique_toolkit/_common/validators.py,sha256=l7-hWyRTZ3aF_e73oTQFZdz93s06VhNWVpkERbg2a64,1569
|
|
7
|
-
unique_toolkit/app/__init__.py,sha256=
|
|
7
|
+
unique_toolkit/app/__init__.py,sha256=ETxYDpEizg_PKmi4JPX_P76ySq-us-xypfAIdKQ1QZU,1284
|
|
8
8
|
unique_toolkit/app/dev_util.py,sha256=rN-xSg4OGfmwjaToy8m_hQroehcLYyk9-GCmQJ-f6uY,4302
|
|
9
9
|
unique_toolkit/app/init_logging.py,sha256=Sh26SRxOj8i8dzobKhYha2lLrkrMTHfB1V4jR3h23gQ,678
|
|
10
10
|
unique_toolkit/app/init_sdk.py,sha256=5_oDoETr6akwYyBCb0ivTdMNu3SVgPSkrXcDS6ELyY8,2269
|
|
@@ -15,9 +15,9 @@ unique_toolkit/app/unique_settings.py,sha256=Gn8qxy_hNraVTTlP4wfZJzgxPU8cU6s84Uw
|
|
|
15
15
|
unique_toolkit/app/verification.py,sha256=GxFFwcJMy25fCA_Xe89wKW7bgqOu8PAs5y8QpHF0GSc,3861
|
|
16
16
|
unique_toolkit/chat/__init__.py,sha256=LRs2G-JTVuci4lbtHTkVUiNcZcSR6uqqfnAyo7af6nY,619
|
|
17
17
|
unique_toolkit/chat/constants.py,sha256=05kq6zjqUVB2d6_P7s-90nbljpB3ryxwCI-CAz0r2O4,83
|
|
18
|
-
unique_toolkit/chat/functions.py,sha256=
|
|
18
|
+
unique_toolkit/chat/functions.py,sha256=I6r81PpWNu-j9DvHxNFKN3V6T5E5q42FhW9kF8pJJSY,29950
|
|
19
19
|
unique_toolkit/chat/schemas.py,sha256=abPPeDtUIeEyKDnLhIqgIyqTsFANxh3j44EYrITBlHw,2786
|
|
20
|
-
unique_toolkit/chat/service.py,sha256=
|
|
20
|
+
unique_toolkit/chat/service.py,sha256=dB36KCfeowQ9EgiRscK2UK7WOnGhklVNawC_t8MTaZA,37803
|
|
21
21
|
unique_toolkit/chat/state.py,sha256=Cjgwv_2vhDFbV69xxsn7SefhaoIAEqLx3ferdVFCnOg,1445
|
|
22
22
|
unique_toolkit/chat/utils.py,sha256=ihm-wQykBWhB4liR3LnwPVPt_qGW6ETq21Mw4HY0THE,854
|
|
23
23
|
unique_toolkit/content/__init__.py,sha256=EdJg_A_7loEtCQf4cah3QARQreJx6pdz89Rm96YbMVg,940
|
|
@@ -49,16 +49,16 @@ unique_toolkit/evaluators/schemas.py,sha256=Jaue6Uhx75X1CyHKWj8sT3RE1JZXTqoLtfLt
|
|
|
49
49
|
unique_toolkit/framework_utilities/langchain/client.py,sha256=Msfmr7uezwqagyRJ2zjWbQRFqzDExWYK0y5KLEnDNqM,1329
|
|
50
50
|
unique_toolkit/framework_utilities/langchain/history.py,sha256=R9RuCeSFNaUO3OZ0G_LmIC4gmOCIANcl91MfyWLnZ1c,650
|
|
51
51
|
unique_toolkit/framework_utilities/openai/client.py,sha256=IasxPXlVJHIsZdXHin7yq-5tO4RNLUu9cEuhrgb4ghE,1205
|
|
52
|
-
unique_toolkit/framework_utilities/openai/message_builder.py,sha256=
|
|
52
|
+
unique_toolkit/framework_utilities/openai/message_builder.py,sha256=XNtgFcgzVeTHZBjfs-fS4cgMEOWqkGObj6ygG08HdfM,4240
|
|
53
53
|
unique_toolkit/framework_utilities/utils.py,sha256=JK7g2yMfEx3eMprug26769xqNpS5WJcizf8n2zWMBng,789
|
|
54
54
|
unique_toolkit/language_model/__init__.py,sha256=lRQyLlbwHbNFf4-0foBU13UGb09lwEeodbVsfsSgaCk,1971
|
|
55
|
-
unique_toolkit/language_model/builder.py,sha256=
|
|
55
|
+
unique_toolkit/language_model/builder.py,sha256=4OKfwJfj3TrgO1ezc_ewIue6W7BCQ2ZYQXUckWVPPTA,3369
|
|
56
56
|
unique_toolkit/language_model/constants.py,sha256=B-topqW0r83dkC_25DeQfnPk3n53qzIHUCBS7YJ0-1U,119
|
|
57
|
-
unique_toolkit/language_model/functions.py,sha256=
|
|
57
|
+
unique_toolkit/language_model/functions.py,sha256=qi4KGa4hLUXy0KwkkGmtGBM1IuYfJqmTheSQ95R7pNw,15611
|
|
58
58
|
unique_toolkit/language_model/infos.py,sha256=mnUnbjDQNOIuPS2VE1SsgyKOxdRulo-9Z5k7_S1Q8Cw,37631
|
|
59
59
|
unique_toolkit/language_model/prompt.py,sha256=JSawaLjQg3VR-E2fK8engFyJnNdk21zaO8pPIodzN4Q,3991
|
|
60
60
|
unique_toolkit/language_model/reference.py,sha256=nkX2VFz-IrUz8yqyc3G5jUMNwrNpxITBrMEKkbqqYoI,8583
|
|
61
|
-
unique_toolkit/language_model/schemas.py,sha256=
|
|
61
|
+
unique_toolkit/language_model/schemas.py,sha256=q7jZNijgy6xu3tHrr2Eqs1jglI_YkqH1RrBt5bCXn-8,15181
|
|
62
62
|
unique_toolkit/language_model/service.py,sha256=zlvC_t9T1wixwcGDPRxl6yYniaKl2725NxWrbW51jUs,11290
|
|
63
63
|
unique_toolkit/language_model/utils.py,sha256=bPQ4l6_YO71w-zaIPanUUmtbXC1_hCvLK0tAFc3VCRc,1902
|
|
64
64
|
unique_toolkit/protocols/support.py,sha256=V15WEIFKVMyF1QCnR8vIi4GrJy4dfTCB6d6JlqPZ58o,2341
|
|
@@ -82,7 +82,7 @@ unique_toolkit/tools/utils/execution/execution.py,sha256=vjG2Y6awsGNtlvyQAGCTthQ
|
|
|
82
82
|
unique_toolkit/tools/utils/source_handling/schema.py,sha256=pvNhtL2daDLpCVIQpfdn6R35GvKmITVLXjZNLAwpgUE,871
|
|
83
83
|
unique_toolkit/tools/utils/source_handling/source_formatting.py,sha256=C7uayNbdkNVJdEARA5CENnHtNY1SU6etlaqbgHNyxaQ,9152
|
|
84
84
|
unique_toolkit/tools/utils/source_handling/tests/test_source_formatting.py,sha256=zu3AJnYH9CMqZPrxKEH3IgI-fM3nlvIBuspJG6W6B18,6978
|
|
85
|
-
unique_toolkit-0.8.
|
|
86
|
-
unique_toolkit-0.8.
|
|
87
|
-
unique_toolkit-0.8.
|
|
88
|
-
unique_toolkit-0.8.
|
|
85
|
+
unique_toolkit-0.8.7.dist-info/LICENSE,sha256=GlN8wHNdh53xwOPg44URnwag6TEolCjoq3YD_KrWgss,193
|
|
86
|
+
unique_toolkit-0.8.7.dist-info/METADATA,sha256=ybNaWfFpNt6HHOd3aMPvcvUAwust7t6VV23dfZlOk4Q,26524
|
|
87
|
+
unique_toolkit-0.8.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
88
|
+
unique_toolkit-0.8.7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|