ommlds 0.0.0.dev476__py3-none-any.whl → 0.0.0.dev478__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ommlds/.omlish-manifests.json +18 -6
- ommlds/backends/groq/protocol.py +69 -4
- ommlds/cli/sessions/chat/chat/ai/rendering.py +3 -3
- ommlds/cli/sessions/chat/chat/ai/services.py +2 -2
- ommlds/cli/sessions/chat/chat/ai/types.py +1 -1
- ommlds/cli/sessions/chat/tools/configs.py +9 -0
- ommlds/cli/sessions/chat/tools/fs/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/fs/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/fs/inject.py +35 -0
- ommlds/cli/sessions/chat/tools/inject.py +11 -64
- ommlds/cli/sessions/chat/tools/injection.py +14 -0
- ommlds/cli/sessions/chat/tools/todo/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/todo/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/todo/inject.py +31 -0
- ommlds/cli/sessions/chat/tools/weather/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/weather/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/weather/inject.py +22 -0
- ommlds/cli/sessions/chat/tools/{weather.py → weather/tools.py} +1 -1
- ommlds/minichain/__init__.py +39 -22
- ommlds/minichain/backends/impls/anthropic/stream.py +11 -11
- ommlds/minichain/backends/impls/dummy/chat.py +7 -7
- ommlds/minichain/backends/impls/google/stream.py +9 -9
- ommlds/minichain/backends/impls/groq/chat.py +11 -5
- ommlds/minichain/backends/impls/groq/names.py +13 -0
- ommlds/minichain/backends/impls/groq/protocol.py +120 -23
- ommlds/minichain/backends/impls/groq/stream.py +14 -10
- ommlds/minichain/backends/impls/llamacpp/stream.py +7 -7
- ommlds/minichain/backends/impls/mlx/chat.py +7 -7
- ommlds/minichain/backends/impls/ollama/chat.py +7 -7
- ommlds/minichain/backends/impls/openai/format.py +10 -10
- ommlds/minichain/backends/impls/openai/names.py +2 -2
- ommlds/minichain/backends/impls/openai/stream.py +8 -8
- ommlds/minichain/backends/impls/tinygrad/chat.py +7 -7
- ommlds/minichain/backends/impls/transformers/transformers.py +7 -7
- ommlds/minichain/chat/choices/stream/__init__.py +0 -0
- ommlds/minichain/chat/{stream → choices/stream}/adapters.py +7 -7
- ommlds/minichain/chat/choices/stream/joining.py +31 -0
- ommlds/minichain/chat/choices/stream/services.py +45 -0
- ommlds/minichain/chat/choices/stream/types.py +43 -0
- ommlds/minichain/chat/stream/_marshal.py +4 -4
- ommlds/minichain/chat/stream/joining.py +32 -43
- ommlds/minichain/chat/stream/services.py +15 -15
- ommlds/minichain/chat/stream/types.py +13 -23
- ommlds/minichain/tools/reflect.py +5 -1
- {ommlds-0.0.0.dev476.dist-info → ommlds-0.0.0.dev478.dist-info}/METADATA +3 -3
- {ommlds-0.0.0.dev476.dist-info → ommlds-0.0.0.dev478.dist-info}/RECORD +50 -37
- {ommlds-0.0.0.dev476.dist-info → ommlds-0.0.0.dev478.dist-info}/WHEEL +0 -0
- {ommlds-0.0.0.dev476.dist-info → ommlds-0.0.0.dev478.dist-info}/entry_points.txt +0 -0
- {ommlds-0.0.0.dev476.dist-info → ommlds-0.0.0.dev478.dist-info}/licenses/LICENSE +0 -0
- {ommlds-0.0.0.dev476.dist-info → ommlds-0.0.0.dev478.dist-info}/top_level.txt +0 -0
ommlds/.omlish-manifests.json
CHANGED
|
@@ -198,7 +198,7 @@
|
|
|
198
198
|
"module": ".minichain.backends.impls.groq.chat",
|
|
199
199
|
"attr": null,
|
|
200
200
|
"file": "ommlds/minichain/backends/impls/groq/chat.py",
|
|
201
|
-
"line":
|
|
201
|
+
"line": 26,
|
|
202
202
|
"value": {
|
|
203
203
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
204
204
|
"module": "ommlds.minichain.backends.impls.groq.chat",
|
|
@@ -213,7 +213,7 @@
|
|
|
213
213
|
"module": ".minichain.backends.impls.groq.names",
|
|
214
214
|
"attr": "_BACKEND_STRINGS_MANIFEST",
|
|
215
215
|
"file": "ommlds/minichain/backends/impls/groq/names.py",
|
|
216
|
-
"line":
|
|
216
|
+
"line": 40,
|
|
217
217
|
"value": {
|
|
218
218
|
"!.minichain.backends.strings.manifests.BackendStringsManifest": {
|
|
219
219
|
"service_cls_names": [
|
|
@@ -237,7 +237,7 @@
|
|
|
237
237
|
"module": ".minichain.backends.impls.groq.stream",
|
|
238
238
|
"attr": null,
|
|
239
239
|
"file": "ommlds/minichain/backends/impls/groq/stream.py",
|
|
240
|
-
"line":
|
|
240
|
+
"line": 33,
|
|
241
241
|
"value": {
|
|
242
242
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
243
243
|
"module": "ommlds.minichain.backends.impls.groq.stream",
|
|
@@ -533,8 +533,8 @@
|
|
|
533
533
|
"gpt5-chat-latest": "gpt-5-chat-latest",
|
|
534
534
|
"gpt5-mini": "gpt-5-mini",
|
|
535
535
|
"gpt5-nano": "gpt-5-nano",
|
|
536
|
-
"gpt": "gpt-
|
|
537
|
-
"gpt-mini": "gpt-
|
|
536
|
+
"gpt": "gpt-5",
|
|
537
|
+
"gpt-mini": "gpt-5-mini",
|
|
538
538
|
"o3": null,
|
|
539
539
|
"o3-mini": null,
|
|
540
540
|
"o4-mini": null,
|
|
@@ -729,6 +729,18 @@
|
|
|
729
729
|
}
|
|
730
730
|
}
|
|
731
731
|
},
|
|
732
|
+
{
|
|
733
|
+
"module": ".minichain.chat.choices.stream.services",
|
|
734
|
+
"attr": null,
|
|
735
|
+
"file": "ommlds/minichain/chat/choices/stream/services.py",
|
|
736
|
+
"line": 28,
|
|
737
|
+
"value": {
|
|
738
|
+
"!.minichain.registries.manifests.RegistryTypeManifest": {
|
|
739
|
+
"module": "ommlds.minichain.chat.choices.stream.services",
|
|
740
|
+
"attr": "ChatChoicesStreamService"
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
},
|
|
732
744
|
{
|
|
733
745
|
"module": ".minichain.chat.services",
|
|
734
746
|
"attr": null,
|
|
@@ -749,7 +761,7 @@
|
|
|
749
761
|
"value": {
|
|
750
762
|
"!.minichain.registries.manifests.RegistryTypeManifest": {
|
|
751
763
|
"module": "ommlds.minichain.chat.stream.services",
|
|
752
|
-
"attr": "
|
|
764
|
+
"attr": "ChatStreamService"
|
|
753
765
|
}
|
|
754
766
|
}
|
|
755
767
|
},
|
ommlds/backends/groq/protocol.py
CHANGED
|
@@ -103,13 +103,41 @@ class ChatCompletionRequest(lang.Final):
|
|
|
103
103
|
stream: bool | None = None
|
|
104
104
|
stream_options: ta.Mapping[str, ta.Any] | None = None
|
|
105
105
|
temperature: float | None = None
|
|
106
|
-
|
|
107
|
-
|
|
106
|
+
ool_choice: str | None = None
|
|
107
|
+
|
|
108
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
109
|
+
@_set_class_marshal_options
|
|
110
|
+
class Tool(lang.Final):
|
|
111
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
112
|
+
@_set_class_marshal_options
|
|
113
|
+
class Function(lang.Final):
|
|
114
|
+
description: str | None = None
|
|
115
|
+
name: str
|
|
116
|
+
parameters: ta.Mapping[str, ta.Any] | None = None # json schema
|
|
117
|
+
strict: bool | None = None
|
|
118
|
+
|
|
119
|
+
function: Function
|
|
120
|
+
type: ta.Literal['function', 'browser_search', 'code_interpreter'] = 'function'
|
|
121
|
+
|
|
122
|
+
tools: ta.Sequence[Tool] | None = None
|
|
123
|
+
|
|
108
124
|
top_logprobs: int | None = None
|
|
109
125
|
top_p: float | None = None
|
|
110
126
|
user: str | None = None
|
|
111
127
|
|
|
112
128
|
|
|
129
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
130
|
+
@_set_class_marshal_options
|
|
131
|
+
class ExecutedTool(lang.Final):
|
|
132
|
+
arguments: str
|
|
133
|
+
index: int
|
|
134
|
+
type: str
|
|
135
|
+
browser_results: ta.Sequence[ta.Any] | None = None
|
|
136
|
+
code_results: ta.Sequence[ta.Any] | None = None
|
|
137
|
+
output: str | None = None
|
|
138
|
+
search_results: ta.Any | None = None
|
|
139
|
+
|
|
140
|
+
|
|
113
141
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
114
142
|
@_set_class_marshal_options
|
|
115
143
|
class ChatCompletionResponse(lang.Final):
|
|
@@ -125,10 +153,27 @@ class ChatCompletionResponse(lang.Final):
|
|
|
125
153
|
class Message(lang.Final):
|
|
126
154
|
annotations: ta.Sequence[ta.Mapping[str, ta.Any]] | None = None
|
|
127
155
|
content: str | None = None
|
|
128
|
-
|
|
156
|
+
|
|
157
|
+
executed_tools: ta.Sequence[ExecutedTool] | None = None
|
|
158
|
+
|
|
129
159
|
reasoning: str | None = None
|
|
130
160
|
role: ta.Literal['assistant'] = 'assistant'
|
|
131
|
-
|
|
161
|
+
|
|
162
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
163
|
+
@_set_class_marshal_options
|
|
164
|
+
class ToolCall(lang.Final):
|
|
165
|
+
id: str
|
|
166
|
+
|
|
167
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
168
|
+
@_set_class_marshal_options
|
|
169
|
+
class Function(lang.Final):
|
|
170
|
+
arguments: str
|
|
171
|
+
name: str
|
|
172
|
+
|
|
173
|
+
function: Function
|
|
174
|
+
type: ta.Literal['function'] = 'function'
|
|
175
|
+
|
|
176
|
+
tool_calls: ta.Sequence[ToolCall] | None = None
|
|
132
177
|
|
|
133
178
|
message: Message
|
|
134
179
|
|
|
@@ -167,6 +212,26 @@ class ChatCompletionChunk(lang.Final):
|
|
|
167
212
|
channel: str | None = None
|
|
168
213
|
reasoning: str | None = None
|
|
169
214
|
|
|
215
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
216
|
+
@_set_class_marshal_options
|
|
217
|
+
class ToolCall(lang.Final):
|
|
218
|
+
index: int
|
|
219
|
+
id: str | None = None
|
|
220
|
+
|
|
221
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
222
|
+
@_set_class_marshal_options
|
|
223
|
+
class Function(lang.Final):
|
|
224
|
+
arguments: str | None = None
|
|
225
|
+
name: str | None = None
|
|
226
|
+
|
|
227
|
+
function: Function | None = None
|
|
228
|
+
|
|
229
|
+
type: ta.Literal['function'] = 'function'
|
|
230
|
+
|
|
231
|
+
tool_calls: ta.Sequence[ToolCall] | None = None
|
|
232
|
+
|
|
233
|
+
executed_tools: ta.Sequence[ExecutedTool] | None = None
|
|
234
|
+
|
|
170
235
|
delta: Delta
|
|
171
236
|
logprobs: ta.Mapping[str, ta.Any] | None = None
|
|
172
237
|
finish_reason: ta.Literal['stop', 'length', 'tool_calls', 'function_call'] | None = None
|
|
@@ -57,11 +57,11 @@ class RenderingStreamAiChatGenerator(StreamAiChatGenerator):
|
|
|
57
57
|
async def get_next_ai_messages_streamed(
|
|
58
58
|
self,
|
|
59
59
|
chat: 'mc.Chat',
|
|
60
|
-
delta_callback: ta.Callable[['mc.
|
|
60
|
+
delta_callback: ta.Callable[['mc.AiDelta'], ta.Awaitable[None]] | None = None,
|
|
61
61
|
) -> mc.Chat:
|
|
62
62
|
async with self._renderer.create_context() as renderer:
|
|
63
|
-
async def inner(delta: mc.
|
|
64
|
-
if isinstance(delta, mc.
|
|
63
|
+
async def inner(delta: mc.AiDelta) -> None:
|
|
64
|
+
if isinstance(delta, mc.ContentAiDelta):
|
|
65
65
|
await renderer.render_content(delta.c)
|
|
66
66
|
|
|
67
67
|
if delta_callback is not None:
|
|
@@ -59,12 +59,12 @@ class ChatChoicesStreamServiceStreamAiChatGenerator(StreamAiChatGenerator):
|
|
|
59
59
|
async def get_next_ai_messages_streamed(
|
|
60
60
|
self,
|
|
61
61
|
chat: 'mc.Chat',
|
|
62
|
-
delta_callback: ta.Callable[['mc.
|
|
62
|
+
delta_callback: ta.Callable[['mc.AiDelta'], ta.Awaitable[None]] | None = None,
|
|
63
63
|
) -> mc.AiChat:
|
|
64
64
|
opts = self._options() if self._options is not None else []
|
|
65
65
|
|
|
66
66
|
async with self._service_provider.provide_backend() as service:
|
|
67
|
-
joiner = mc.
|
|
67
|
+
joiner = mc.AiChoicesDeltaJoiner()
|
|
68
68
|
|
|
69
69
|
async with (await service.invoke(mc.ChatChoicesStreamRequest(chat, opts))).v as st_resp:
|
|
70
70
|
async for o in st_resp:
|
|
@@ -23,6 +23,6 @@ class StreamAiChatGenerator(AiChatGenerator, lang.Abstract):
|
|
|
23
23
|
def get_next_ai_messages_streamed(
|
|
24
24
|
self,
|
|
25
25
|
chat: 'mc.Chat',
|
|
26
|
-
delta_callback: ta.Callable[['mc.
|
|
26
|
+
delta_callback: ta.Callable[['mc.AiDelta'], ta.Awaitable[None]] | None = None,
|
|
27
27
|
) -> ta.Awaitable['mc.Chat']:
|
|
28
28
|
raise NotImplementedError
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import typing as ta
|
|
2
2
|
|
|
3
3
|
from omlish import dataclasses as dc
|
|
4
|
+
from omlish import lang
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
##
|
|
@@ -11,3 +12,11 @@ class ToolsConfig:
|
|
|
11
12
|
silent: bool = False
|
|
12
13
|
dangerous_no_confirmation: bool = False
|
|
13
14
|
enabled_tools: ta.Iterable[str] | None = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
##
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
21
|
+
class ToolSetConfig(lang.Abstract):
|
|
22
|
+
pass
|
|
File without changes
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from omlish import inject as inj
|
|
4
|
+
|
|
5
|
+
from ..injection import ToolSetBinder
|
|
6
|
+
from ..injection import bind_tool_context_provider_to_key
|
|
7
|
+
from ..injection import tool_catalog_entries
|
|
8
|
+
from .configs import FsToolSetConfig
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
##
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def bind_fs_tools(cfg: FsToolSetConfig) -> inj.Elements:
|
|
15
|
+
from ......minichain.lib.fs.context import FsContext
|
|
16
|
+
from ......minichain.lib.fs.tools.ls import ls_tool
|
|
17
|
+
from ......minichain.lib.fs.tools.read import read_tool
|
|
18
|
+
|
|
19
|
+
return inj.as_elements(
|
|
20
|
+
tool_catalog_entries().bind_item_consts(
|
|
21
|
+
ls_tool(),
|
|
22
|
+
read_tool(),
|
|
23
|
+
),
|
|
24
|
+
|
|
25
|
+
inj.bind(FsContext(
|
|
26
|
+
root_dir=os.getcwd(),
|
|
27
|
+
)),
|
|
28
|
+
bind_tool_context_provider_to_key(FsContext),
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
##
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
FS_TOOL_SET_BINDER = ToolSetBinder(FsToolSetConfig, bind_fs_tools)
|
|
@@ -1,13 +1,10 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import typing as ta
|
|
3
|
-
|
|
4
1
|
from omlish import check
|
|
5
2
|
from omlish import inject as inj
|
|
6
3
|
from omlish import lang
|
|
7
4
|
|
|
8
5
|
from ..... import minichain as mc
|
|
9
6
|
from .configs import ToolsConfig
|
|
10
|
-
from .injection import
|
|
7
|
+
from .injection import ToolSetBinder
|
|
11
8
|
from .injection import tool_catalog_entries
|
|
12
9
|
from .injection import tool_context_providers
|
|
13
10
|
|
|
@@ -21,65 +18,6 @@ with lang.auto_proxy_import(globals()):
|
|
|
21
18
|
##
|
|
22
19
|
|
|
23
20
|
|
|
24
|
-
_TOOL_BINDERS: dict[str, ta.Callable[[], inj.Elements]] = {}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def _tool_binder(name: str) -> ta.Callable[[ta.Callable[[], inj.Elements]], ta.Callable[[], inj.Elements]]:
|
|
28
|
-
def inner(fn):
|
|
29
|
-
check.not_in(name, _TOOL_BINDERS)
|
|
30
|
-
_TOOL_BINDERS[name] = fn
|
|
31
|
-
return fn
|
|
32
|
-
return inner
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
#
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
@_tool_binder('weather')
|
|
39
|
-
def _bind_weather_tool() -> inj.Elements:
|
|
40
|
-
from .weather import WEATHER_TOOL
|
|
41
|
-
|
|
42
|
-
return inj.as_elements(
|
|
43
|
-
tool_catalog_entries().bind_item_consts(WEATHER_TOOL),
|
|
44
|
-
)
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
@_tool_binder('todo')
|
|
48
|
-
def _bind_todo_tools() -> inj.Elements:
|
|
49
|
-
from .....minichain.lib.todo.context import TodoContext
|
|
50
|
-
from .....minichain.lib.todo.tools.read import todo_read_tool
|
|
51
|
-
from .....minichain.lib.todo.tools.write import todo_write_tool
|
|
52
|
-
|
|
53
|
-
return inj.as_elements(
|
|
54
|
-
tool_catalog_entries().bind_item_consts(
|
|
55
|
-
todo_read_tool(),
|
|
56
|
-
todo_write_tool(),
|
|
57
|
-
),
|
|
58
|
-
|
|
59
|
-
inj.bind(TodoContext()),
|
|
60
|
-
bind_tool_context_provider_to_key(TodoContext),
|
|
61
|
-
)
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
@_tool_binder('fs')
|
|
65
|
-
def _bind_fs_tools() -> inj.Elements:
|
|
66
|
-
from .....minichain.lib.fs.context import FsContext
|
|
67
|
-
from .....minichain.lib.fs.tools.ls import ls_tool
|
|
68
|
-
from .....minichain.lib.fs.tools.read import read_tool
|
|
69
|
-
|
|
70
|
-
return inj.as_elements(
|
|
71
|
-
tool_catalog_entries().bind_item_consts(
|
|
72
|
-
ls_tool(),
|
|
73
|
-
read_tool(),
|
|
74
|
-
),
|
|
75
|
-
|
|
76
|
-
inj.bind(FsContext(
|
|
77
|
-
root_dir=os.getcwd(),
|
|
78
|
-
)),
|
|
79
|
-
bind_tool_context_provider_to_key(FsContext),
|
|
80
|
-
)
|
|
81
|
-
|
|
82
|
-
|
|
83
21
|
# if tools_config.enable_unsafe_tools_do_not_use_lol:
|
|
84
22
|
# from ...minichain.lib.bash import bash_tool
|
|
85
23
|
# els.append(bind_tool(bash_tool()))
|
|
@@ -103,7 +41,16 @@ def bind_tools(cfg: ToolsConfig = ToolsConfig()) -> inj.Elements:
|
|
|
103
41
|
els.append(tool_catalog_entries().bind_items_provider(singleton=True))
|
|
104
42
|
|
|
105
43
|
for etn in check.not_isinstance(cfg.enabled_tools or [], str):
|
|
106
|
-
|
|
44
|
+
from .fs.inject import FS_TOOL_SET_BINDER
|
|
45
|
+
from .todo.inject import TODO_TOOL_SET_BINDER
|
|
46
|
+
from .weather.inject import WEATHER_TOOL_SET_BINDER
|
|
47
|
+
ts_binder: ToolSetBinder = { # type: ignore[assignment] # FIXME: placeholder obviously lol
|
|
48
|
+
'fs': FS_TOOL_SET_BINDER,
|
|
49
|
+
'todo': TODO_TOOL_SET_BINDER,
|
|
50
|
+
'weather': WEATHER_TOOL_SET_BINDER,
|
|
51
|
+
}[etn]
|
|
52
|
+
|
|
53
|
+
els.append(ts_binder.fn(ts_binder.cfg_cls()))
|
|
107
54
|
|
|
108
55
|
#
|
|
109
56
|
|
|
@@ -1,15 +1,20 @@
|
|
|
1
1
|
import typing as ta
|
|
2
2
|
|
|
3
|
+
from omlish import dataclasses as dc
|
|
3
4
|
from omlish import inject as inj
|
|
4
5
|
from omlish import lang
|
|
5
6
|
|
|
6
7
|
from ..... import minichain as mc
|
|
8
|
+
from .configs import ToolSetConfig
|
|
7
9
|
|
|
8
10
|
|
|
9
11
|
with lang.auto_proxy_import(globals()):
|
|
10
12
|
from . import execution as _execution
|
|
11
13
|
|
|
12
14
|
|
|
15
|
+
ToolSetConfigT = ta.TypeVar('ToolSetConfigT', bound='ToolSetConfig')
|
|
16
|
+
|
|
17
|
+
|
|
13
18
|
##
|
|
14
19
|
|
|
15
20
|
|
|
@@ -28,3 +33,12 @@ def bind_tool_context_provider_to_key(key: ta.Any) -> inj.Elements:
|
|
|
28
33
|
lambda v: _execution.ToolContextProvider(lambda: [v]),
|
|
29
34
|
v=key,
|
|
30
35
|
), singleton=True)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
##
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dc.dataclass(frozen=True)
|
|
42
|
+
class ToolSetBinder(lang.Final, ta.Generic[ToolSetConfigT]):
|
|
43
|
+
cfg_cls: type[ToolSetConfig]
|
|
44
|
+
fn: ta.Callable[[ToolSetConfigT], inj.Elements]
|
|
File without changes
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from omlish import inject as inj
|
|
2
|
+
|
|
3
|
+
from ..injection import ToolSetBinder
|
|
4
|
+
from ..injection import bind_tool_context_provider_to_key
|
|
5
|
+
from ..injection import tool_catalog_entries
|
|
6
|
+
from .configs import TodoToolSetConfig
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def bind_todo_tools(cfg: TodoToolSetConfig) -> inj.Elements:
|
|
13
|
+
from ......minichain.lib.todo.context import TodoContext
|
|
14
|
+
from ......minichain.lib.todo.tools.read import todo_read_tool
|
|
15
|
+
from ......minichain.lib.todo.tools.write import todo_write_tool
|
|
16
|
+
|
|
17
|
+
return inj.as_elements(
|
|
18
|
+
tool_catalog_entries().bind_item_consts(
|
|
19
|
+
todo_read_tool(),
|
|
20
|
+
todo_write_tool(),
|
|
21
|
+
),
|
|
22
|
+
|
|
23
|
+
inj.bind(TodoContext()),
|
|
24
|
+
bind_tool_context_provider_to_key(TodoContext),
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
##
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
TODO_TOOL_SET_BINDER = ToolSetBinder(TodoToolSetConfig, bind_todo_tools)
|
|
File without changes
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from omlish import inject as inj
|
|
2
|
+
|
|
3
|
+
from ..injection import ToolSetBinder
|
|
4
|
+
from ..injection import tool_catalog_entries
|
|
5
|
+
from .configs import WeatherToolSetConfig
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
##
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def bind_weather_tools(cfg: WeatherToolSetConfig) -> inj.Elements:
|
|
12
|
+
from .tools import WEATHER_TOOL
|
|
13
|
+
|
|
14
|
+
return inj.as_elements(
|
|
15
|
+
tool_catalog_entries().bind_item_consts(WEATHER_TOOL),
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
##
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
WEATHER_TOOL_SET_BINDER = ToolSetBinder(WeatherToolSetConfig, bind_weather_tools)
|
ommlds/minichain/__init__.py
CHANGED
|
@@ -65,6 +65,33 @@ with _lang.auto_proxy_init(
|
|
|
65
65
|
|
|
66
66
|
##
|
|
67
67
|
|
|
68
|
+
from .chat.choices.stream.adapters import ( # noqa
|
|
69
|
+
ChatChoicesStreamServiceChatChoicesService,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
from .chat.choices.stream.joining import ( # noqa
|
|
73
|
+
AiChoicesDeltaJoiner,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
from .chat.choices.stream.services import ( # noqa
|
|
77
|
+
ChatChoicesStreamRequest,
|
|
78
|
+
ChatChoicesStreamResponse,
|
|
79
|
+
ChatChoicesStreamService,
|
|
80
|
+
AbstractChatChoicesStreamService,
|
|
81
|
+
static_check_is_chat_choices_stream_service,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
from .chat.choices.stream.types import ( # noqa
|
|
85
|
+
ChatChoicesStreamOption,
|
|
86
|
+
ChatChoicesStreamOptions,
|
|
87
|
+
|
|
88
|
+
ChatChoicesStreamOutput,
|
|
89
|
+
ChatChoicesStreamOutputs,
|
|
90
|
+
|
|
91
|
+
AiChoiceDeltas,
|
|
92
|
+
AiChoicesDeltas,
|
|
93
|
+
)
|
|
94
|
+
|
|
68
95
|
from .chat.choices.adapters import ( # noqa
|
|
69
96
|
ChatChoicesServiceChatService,
|
|
70
97
|
)
|
|
@@ -88,37 +115,27 @@ with _lang.auto_proxy_init(
|
|
|
88
115
|
AiChoices,
|
|
89
116
|
)
|
|
90
117
|
|
|
91
|
-
from .chat.stream.adapters import ( # noqa
|
|
92
|
-
ChatChoicesStreamServiceChatChoicesService,
|
|
93
|
-
)
|
|
94
|
-
|
|
95
118
|
from .chat.stream.joining import ( # noqa
|
|
96
|
-
|
|
119
|
+
AiDeltaJoiner,
|
|
97
120
|
)
|
|
98
121
|
|
|
99
122
|
from .chat.stream.services import ( # noqa
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
123
|
+
ChatStreamRequest,
|
|
124
|
+
ChatStreamResponse,
|
|
125
|
+
ChatStreamService,
|
|
126
|
+
AbstractChatStreamService,
|
|
127
|
+
static_check_is_chat_stream_service,
|
|
105
128
|
)
|
|
106
129
|
|
|
107
130
|
from .chat.stream.types import ( # noqa
|
|
108
|
-
|
|
109
|
-
|
|
131
|
+
AiDelta,
|
|
132
|
+
AiDeltas,
|
|
110
133
|
|
|
111
|
-
|
|
112
|
-
ChatChoicesStreamOutputs,
|
|
113
|
-
|
|
114
|
-
AiChoiceDelta,
|
|
115
|
-
ContentAiChoiceDelta,
|
|
116
|
-
AnyToolUseAiChoiceDelta,
|
|
117
|
-
ToolUseAiChoiceDelta,
|
|
118
|
-
PartialToolUseAiChoiceDelta,
|
|
134
|
+
ContentAiDelta,
|
|
119
135
|
|
|
120
|
-
|
|
121
|
-
|
|
136
|
+
AnyToolUseAiDelta,
|
|
137
|
+
ToolUseAiDelta,
|
|
138
|
+
PartialToolUseAiDelta,
|
|
122
139
|
)
|
|
123
140
|
|
|
124
141
|
from .chat.tools.execution import ( # noqa
|
|
@@ -11,13 +11,13 @@ from omlish.io.buffers import DelimitingBuffer
|
|
|
11
11
|
from .....backends.anthropic.protocol import types as pt
|
|
12
12
|
from .....backends.anthropic.protocol.sse.events import AnthropicSseDecoderEvents
|
|
13
13
|
from ....chat.choices.services import ChatChoicesOutputs
|
|
14
|
-
from ....chat.stream.services import ChatChoicesStreamRequest
|
|
15
|
-
from ....chat.stream.services import ChatChoicesStreamResponse
|
|
16
|
-
from ....chat.stream.services import static_check_is_chat_choices_stream_service
|
|
17
|
-
from ....chat.stream.types import AiChoiceDeltas
|
|
18
|
-
from ....chat.stream.types import AiChoicesDeltas
|
|
19
|
-
from ....chat.stream.types import
|
|
20
|
-
from ....chat.stream.types import
|
|
14
|
+
from ....chat.choices.stream.services import ChatChoicesStreamRequest
|
|
15
|
+
from ....chat.choices.stream.services import ChatChoicesStreamResponse
|
|
16
|
+
from ....chat.choices.stream.services import static_check_is_chat_choices_stream_service
|
|
17
|
+
from ....chat.choices.stream.types import AiChoiceDeltas
|
|
18
|
+
from ....chat.choices.stream.types import AiChoicesDeltas
|
|
19
|
+
from ....chat.stream.types import ContentAiDelta
|
|
20
|
+
from ....chat.stream.types import PartialToolUseAiDelta
|
|
21
21
|
from ....chat.tools.types import Tool
|
|
22
22
|
from ....configs import Config
|
|
23
23
|
from ....resources import UseResources
|
|
@@ -131,12 +131,12 @@ class AnthropicChatChoicesStreamService:
|
|
|
131
131
|
cbk_start = ae
|
|
132
132
|
|
|
133
133
|
if isinstance(ae.content_block, AnthropicSseDecoderEvents.ContentBlockStart.Text): # noqa
|
|
134
|
-
await sink.emit(AiChoicesDeltas([AiChoiceDeltas([
|
|
134
|
+
await sink.emit(AiChoicesDeltas([AiChoiceDeltas([ContentAiDelta(
|
|
135
135
|
ae.content_block.text,
|
|
136
136
|
)])]))
|
|
137
137
|
|
|
138
138
|
elif isinstance(ae.content_block, AnthropicSseDecoderEvents.ContentBlockStart.ToolUse): # noqa
|
|
139
|
-
await sink.emit(AiChoicesDeltas([AiChoiceDeltas([
|
|
139
|
+
await sink.emit(AiChoicesDeltas([AiChoiceDeltas([PartialToolUseAiDelta( # noqa
|
|
140
140
|
id=ae.content_block.id,
|
|
141
141
|
name=ae.content_block.name,
|
|
142
142
|
raw_args=ae.content_block.input,
|
|
@@ -149,12 +149,12 @@ class AnthropicChatChoicesStreamService:
|
|
|
149
149
|
check.not_none(cbk_start)
|
|
150
150
|
|
|
151
151
|
if isinstance(ae.delta, AnthropicSseDecoderEvents.ContentBlockDelta.TextDelta):
|
|
152
|
-
await sink.emit(AiChoicesDeltas([AiChoiceDeltas([
|
|
152
|
+
await sink.emit(AiChoicesDeltas([AiChoiceDeltas([ContentAiDelta(
|
|
153
153
|
ae.delta.text,
|
|
154
154
|
)])]))
|
|
155
155
|
|
|
156
156
|
elif isinstance(ae.delta, AnthropicSseDecoderEvents.ContentBlockDelta.InputJsonDelta): # noqa
|
|
157
|
-
await sink.emit(AiChoicesDeltas([AiChoiceDeltas([
|
|
157
|
+
await sink.emit(AiChoicesDeltas([AiChoiceDeltas([PartialToolUseAiDelta( # noqa
|
|
158
158
|
raw_args=ae.delta.partial_json,
|
|
159
159
|
)])]))
|
|
160
160
|
|
|
@@ -6,15 +6,15 @@ from omlish.text.lorem import LOREM
|
|
|
6
6
|
from ....chat.choices.services import ChatChoicesRequest
|
|
7
7
|
from ....chat.choices.services import ChatChoicesResponse
|
|
8
8
|
from ....chat.choices.services import static_check_is_chat_choices_service
|
|
9
|
+
from ....chat.choices.stream.services import ChatChoicesStreamRequest
|
|
10
|
+
from ....chat.choices.stream.services import ChatChoicesStreamResponse
|
|
11
|
+
from ....chat.choices.stream.services import static_check_is_chat_choices_stream_service
|
|
12
|
+
from ....chat.choices.stream.types import AiChoiceDeltas
|
|
13
|
+
from ....chat.choices.stream.types import AiChoicesDeltas
|
|
9
14
|
from ....chat.choices.types import AiChoice
|
|
10
15
|
from ....chat.choices.types import ChatChoicesOutputs
|
|
11
16
|
from ....chat.messages import AiMessage
|
|
12
|
-
from ....chat.stream.
|
|
13
|
-
from ....chat.stream.services import ChatChoicesStreamResponse
|
|
14
|
-
from ....chat.stream.services import static_check_is_chat_choices_stream_service
|
|
15
|
-
from ....chat.stream.types import AiChoiceDeltas
|
|
16
|
-
from ....chat.stream.types import AiChoicesDeltas
|
|
17
|
-
from ....chat.stream.types import ContentAiChoiceDelta
|
|
17
|
+
from ....chat.stream.types import ContentAiDelta
|
|
18
18
|
from ....resources import UseResources
|
|
19
19
|
from ....stream.services import StreamResponseSink
|
|
20
20
|
from ....stream.services import new_stream_response
|
|
@@ -48,7 +48,7 @@ class DummyChatChoicesStreamService:
|
|
|
48
48
|
for s in LOREM:
|
|
49
49
|
await sink.emit(AiChoicesDeltas([
|
|
50
50
|
AiChoiceDeltas([
|
|
51
|
-
|
|
51
|
+
ContentAiDelta(s),
|
|
52
52
|
]),
|
|
53
53
|
]))
|
|
54
54
|
|