ommlds 0.0.0.dev464__py3-none-any.whl → 0.0.0.dev466__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ommlds might be problematic. Click here for more details.
- ommlds/.omlish-manifests.json +3 -3
- ommlds/backends/google/protocol/types.py +4 -1
- ommlds/cli/inject.py +14 -27
- ommlds/cli/main.py +14 -11
- ommlds/cli/sessions/chat/chat/ai/inject.py +10 -13
- ommlds/cli/sessions/chat/chat/ai/services.py +5 -7
- ommlds/cli/sessions/chat/chat/state/storage.py +1 -1
- ommlds/cli/sessions/chat/inject.py +10 -0
- ommlds/cli/sessions/completion/configs.py +21 -0
- ommlds/cli/sessions/completion/inject.py +28 -0
- ommlds/cli/sessions/completion/{completion.py → session.py} +4 -9
- ommlds/cli/sessions/embedding/configs.py +21 -0
- ommlds/cli/sessions/embedding/inject.py +28 -0
- ommlds/cli/sessions/embedding/{embedding.py → session.py} +4 -9
- ommlds/cli/sessions/inject.py +28 -11
- ommlds/cli/state/__init__.py +0 -0
- ommlds/cli/state/inject.py +28 -0
- ommlds/minichain/__init__.py +6 -0
- ommlds/minichain/backends/impls/anthropic/chat.py +4 -61
- ommlds/minichain/backends/impls/anthropic/protocol.py +109 -0
- ommlds/minichain/backends/impls/anthropic/stream.py +34 -17
- ommlds/minichain/backends/impls/google/stream.py +105 -20
- ommlds/minichain/backends/impls/openai/format.py +19 -14
- ommlds/minichain/chat/stream/adapters.py +5 -50
- ommlds/minichain/chat/stream/joining.py +96 -0
- ommlds/minichain/chat/stream/types.py +17 -4
- {ommlds-0.0.0.dev464.dist-info → ommlds-0.0.0.dev466.dist-info}/METADATA +3 -3
- {ommlds-0.0.0.dev464.dist-info → ommlds-0.0.0.dev466.dist-info}/RECORD +33 -25
- /ommlds/cli/{state.py → state/storage.py} +0 -0
- {ommlds-0.0.0.dev464.dist-info → ommlds-0.0.0.dev466.dist-info}/WHEEL +0 -0
- {ommlds-0.0.0.dev464.dist-info → ommlds-0.0.0.dev466.dist-info}/entry_points.txt +0 -0
- {ommlds-0.0.0.dev464.dist-info → ommlds-0.0.0.dev466.dist-info}/licenses/LICENSE +0 -0
- {ommlds-0.0.0.dev464.dist-info → ommlds-0.0.0.dev466.dist-info}/top_level.txt +0 -0
ommlds/.omlish-manifests.json
CHANGED
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
"module": ".minichain.backends.impls.anthropic.chat",
|
|
19
19
|
"attr": null,
|
|
20
20
|
"file": "ommlds/minichain/backends/impls/anthropic/chat.py",
|
|
21
|
-
"line":
|
|
21
|
+
"line": 38,
|
|
22
22
|
"value": {
|
|
23
23
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
24
24
|
"module": "ommlds.minichain.backends.impls.anthropic.chat",
|
|
@@ -63,7 +63,7 @@
|
|
|
63
63
|
"module": ".minichain.backends.impls.anthropic.stream",
|
|
64
64
|
"attr": null,
|
|
65
65
|
"file": "ommlds/minichain/backends/impls/anthropic/stream.py",
|
|
66
|
-
"line":
|
|
66
|
+
"line": 36,
|
|
67
67
|
"value": {
|
|
68
68
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
69
69
|
"module": "ommlds.minichain.backends.impls.anthropic.stream",
|
|
@@ -137,7 +137,7 @@
|
|
|
137
137
|
"module": ".minichain.backends.impls.google.stream",
|
|
138
138
|
"attr": null,
|
|
139
139
|
"file": "ommlds/minichain/backends/impls/google/stream.py",
|
|
140
|
-
"line":
|
|
140
|
+
"line": 41,
|
|
141
141
|
"value": {
|
|
142
142
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
143
143
|
"module": "ommlds.minichain.backends.impls.google.stream",
|
|
@@ -148,11 +148,14 @@ class Part(lang.Final):
|
|
|
148
148
|
video_metadata: VideoMetadata | None = None
|
|
149
149
|
|
|
150
150
|
|
|
151
|
+
ContentRole: ta.TypeAlias = ta.Literal['user', 'model']
|
|
152
|
+
|
|
153
|
+
|
|
151
154
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
152
155
|
@_set_class_marshal_options
|
|
153
156
|
class Content(lang.Final):
|
|
154
157
|
parts: ta.Sequence[Part] | None = None
|
|
155
|
-
role:
|
|
158
|
+
role: ContentRole | None = None
|
|
156
159
|
|
|
157
160
|
|
|
158
161
|
##
|
ommlds/cli/inject.py
CHANGED
|
@@ -1,28 +1,13 @@
|
|
|
1
|
-
import
|
|
1
|
+
import typing as ta
|
|
2
2
|
|
|
3
|
-
from omdev.home.paths import get_home_paths
|
|
4
3
|
from omlish import inject as inj
|
|
5
4
|
from omlish import lang
|
|
6
5
|
|
|
7
6
|
|
|
8
7
|
with lang.auto_proxy_import(globals()):
|
|
9
|
-
from . import
|
|
10
|
-
from .
|
|
11
|
-
from .
|
|
12
|
-
from .sessions import inject as sessions_inj
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
##
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def _provide_state_storage() -> 'state.StateStorage':
|
|
19
|
-
state_dir = os.path.join(get_home_paths().state_dir, 'minichain', 'cli')
|
|
20
|
-
if not os.path.exists(state_dir):
|
|
21
|
-
os.makedirs(state_dir, exist_ok=True)
|
|
22
|
-
os.chmod(state_dir, 0o770) # noqa
|
|
23
|
-
|
|
24
|
-
state_file = os.path.join(state_dir, 'state.json')
|
|
25
|
-
return state.JsonFileStateStorage(state_file)
|
|
8
|
+
from .backends import inject as _backends
|
|
9
|
+
from .sessions import inject as _sessions
|
|
10
|
+
from .state import inject as _state
|
|
26
11
|
|
|
27
12
|
|
|
28
13
|
##
|
|
@@ -30,20 +15,22 @@ def _provide_state_storage() -> 'state.StateStorage':
|
|
|
30
15
|
|
|
31
16
|
def bind_main(
|
|
32
17
|
*,
|
|
33
|
-
session_cfg:
|
|
18
|
+
session_cfg: ta.Any,
|
|
34
19
|
enable_backend_strings: bool = False,
|
|
35
20
|
) -> inj.Elements:
|
|
36
|
-
els: list[inj.Elemental] = [
|
|
37
|
-
|
|
21
|
+
els: list[inj.Elemental] = []
|
|
22
|
+
|
|
23
|
+
#
|
|
24
|
+
|
|
25
|
+
els.extend([
|
|
26
|
+
_backends.bind_backends(
|
|
38
27
|
enable_backend_strings=enable_backend_strings,
|
|
39
28
|
),
|
|
40
29
|
|
|
41
|
-
|
|
42
|
-
]
|
|
43
|
-
|
|
44
|
-
#
|
|
30
|
+
_sessions.bind_sessions(session_cfg),
|
|
45
31
|
|
|
46
|
-
|
|
32
|
+
_state.bind_state(),
|
|
33
|
+
])
|
|
47
34
|
|
|
48
35
|
#
|
|
49
36
|
|
ommlds/cli/main.py
CHANGED
|
@@ -24,9 +24,9 @@ from omlish.subprocesses.sync import subprocesses
|
|
|
24
24
|
from .. import minichain as mc
|
|
25
25
|
from .inject import bind_main
|
|
26
26
|
from .sessions.base import Session
|
|
27
|
-
from .sessions.chat.
|
|
28
|
-
from .sessions.completion.
|
|
29
|
-
from .sessions.embedding.
|
|
27
|
+
from .sessions.chat.configs import ChatConfig
|
|
28
|
+
from .sessions.completion.configs import CompletionConfig
|
|
29
|
+
from .sessions.embedding.configs import EmbeddingConfig
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
if ta.TYPE_CHECKING:
|
|
@@ -125,30 +125,33 @@ async def _a_main(args: ta.Any = None) -> None:
|
|
|
125
125
|
|
|
126
126
|
#
|
|
127
127
|
|
|
128
|
-
session_cfg:
|
|
128
|
+
session_cfg: ta.Any
|
|
129
129
|
|
|
130
130
|
if args.embed:
|
|
131
|
-
session_cfg =
|
|
131
|
+
session_cfg = EmbeddingConfig(
|
|
132
132
|
check.not_none(content), # noqa
|
|
133
133
|
backend=args.backend,
|
|
134
134
|
)
|
|
135
135
|
|
|
136
136
|
elif args.completion:
|
|
137
|
-
session_cfg =
|
|
137
|
+
session_cfg = CompletionConfig(
|
|
138
138
|
check.not_none(content), # noqa
|
|
139
139
|
backend=args.backend,
|
|
140
140
|
)
|
|
141
141
|
|
|
142
142
|
else:
|
|
143
|
-
|
|
143
|
+
system_content: mc.Content | None = None
|
|
144
|
+
if (args.new or args.ephemeral) and args.code:
|
|
145
|
+
from ..minichain.lib.code.prompts import CODE_AGENT_SYSTEM_PROMPT
|
|
146
|
+
system_content = CODE_AGENT_SYSTEM_PROMPT
|
|
144
147
|
|
|
145
|
-
session_cfg =
|
|
148
|
+
session_cfg = ChatConfig(
|
|
146
149
|
backend=args.backend,
|
|
147
150
|
model_name=args.model_name,
|
|
148
151
|
state='ephemeral' if args.ephemeral else 'new' if args.new else 'continue',
|
|
149
|
-
initial_system_content=
|
|
152
|
+
initial_system_content=system_content,
|
|
150
153
|
initial_user_content=content, # noqa
|
|
151
|
-
interactive=args.interactive,
|
|
154
|
+
interactive=bool(args.interactive),
|
|
152
155
|
markdown=bool(args.markdown),
|
|
153
156
|
stream=bool(args.stream),
|
|
154
157
|
enable_tools=(
|
|
@@ -171,7 +174,7 @@ async def _a_main(args: ta.Any = None) -> None:
|
|
|
171
174
|
|
|
172
175
|
with inj.create_managed_injector(bind_main(
|
|
173
176
|
session_cfg=session_cfg,
|
|
174
|
-
enable_backend_strings=isinstance(session_cfg,
|
|
177
|
+
enable_backend_strings=isinstance(session_cfg, ChatConfig),
|
|
175
178
|
)) as injector:
|
|
176
179
|
await injector[Session].run()
|
|
177
180
|
|
|
@@ -36,34 +36,31 @@ def bind_ai(
|
|
|
36
36
|
|
|
37
37
|
#
|
|
38
38
|
|
|
39
|
+
ai_stack = inj.wrapper_binder_helper(_types.AiChatGenerator)
|
|
40
|
+
|
|
39
41
|
if stream:
|
|
40
|
-
|
|
42
|
+
stream_ai_stack = inj.wrapper_binder_helper(_types.StreamAiChatGenerator)
|
|
41
43
|
|
|
42
|
-
els.append(
|
|
44
|
+
els.append(stream_ai_stack.push_bind(to_ctor=_services.ChatChoicesStreamServiceStreamAiChatGenerator, singleton=True)) # noqa
|
|
43
45
|
|
|
44
46
|
if not silent:
|
|
45
|
-
els.append(
|
|
46
|
-
|
|
47
|
-
if enable_tools:
|
|
48
|
-
raise NotImplementedError
|
|
47
|
+
els.append(stream_ai_stack.push_bind(to_ctor=_rendering.RenderingStreamAiChatGenerator, singleton=True))
|
|
49
48
|
|
|
50
49
|
els.extend([
|
|
51
|
-
inj.bind(_types.StreamAiChatGenerator, to_key=
|
|
52
|
-
|
|
50
|
+
inj.bind(_types.StreamAiChatGenerator, to_key=stream_ai_stack.top),
|
|
51
|
+
ai_stack.push_bind(to_key=_types.StreamAiChatGenerator),
|
|
53
52
|
])
|
|
54
53
|
|
|
55
54
|
else:
|
|
56
|
-
ai_stack = inj.wrapper_binder_helper(_types.AiChatGenerator)
|
|
57
|
-
|
|
58
55
|
els.append(ai_stack.push_bind(to_ctor=_services.ChatChoicesServiceAiChatGenerator, singleton=True))
|
|
59
56
|
|
|
60
57
|
if not silent:
|
|
61
58
|
els.append(ai_stack.push_bind(to_ctor=_rendering.RenderingAiChatGenerator, singleton=True))
|
|
62
59
|
|
|
63
|
-
|
|
64
|
-
|
|
60
|
+
if enable_tools:
|
|
61
|
+
els.append(ai_stack.push_bind(to_ctor=_tools.ToolExecutingAiChatGenerator, singleton=True))
|
|
65
62
|
|
|
66
|
-
|
|
63
|
+
els.append(inj.bind(_types.AiChatGenerator, to_key=ai_stack.top))
|
|
67
64
|
|
|
68
65
|
#
|
|
69
66
|
|
|
@@ -63,19 +63,17 @@ class ChatChoicesStreamServiceStreamAiChatGenerator(StreamAiChatGenerator):
|
|
|
63
63
|
) -> mc.AiChat:
|
|
64
64
|
opts = self._options() if self._options is not None else []
|
|
65
65
|
|
|
66
|
-
lst: list[str] = []
|
|
67
|
-
|
|
68
66
|
async with self._service_provider.provide_backend() as service:
|
|
67
|
+
joiner = mc.AiChoiceDeltaJoiner()
|
|
68
|
+
|
|
69
69
|
async with (await service.invoke(mc.ChatChoicesStreamRequest(chat, opts))).v as st_resp:
|
|
70
70
|
async for o in st_resp:
|
|
71
|
+
joiner.add(o.choices)
|
|
72
|
+
|
|
71
73
|
choice = check.single(o.choices)
|
|
72
74
|
|
|
73
75
|
for delta in choice.deltas:
|
|
74
76
|
if delta_callback is not None:
|
|
75
77
|
await delta_callback(delta)
|
|
76
78
|
|
|
77
|
-
|
|
78
|
-
if c is not None:
|
|
79
|
-
lst.append(check.isinstance(c, str))
|
|
80
|
-
|
|
81
|
-
return [mc.AiMessage(''.join(lst))]
|
|
79
|
+
return check.single(joiner.build())
|
|
@@ -1,12 +1,15 @@
|
|
|
1
|
+
from omlish import dataclasses as dc
|
|
1
2
|
from omlish import inject as inj
|
|
2
3
|
from omlish import lang
|
|
3
4
|
|
|
5
|
+
from ..base import Session
|
|
4
6
|
from .configs import DEFAULT_CHAT_MODEL_BACKEND
|
|
5
7
|
from .configs import ChatConfig
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
with lang.auto_proxy_import(globals()):
|
|
9
11
|
from . import driver as _driver
|
|
12
|
+
from . import session as _session
|
|
10
13
|
from .backends import inject as _backends
|
|
11
14
|
from .chat.ai import inject as _chat_ai
|
|
12
15
|
from .chat.state import inject as _chat_state
|
|
@@ -69,4 +72,11 @@ def bind_chat(cfg: ChatConfig) -> inj.Elements:
|
|
|
69
72
|
|
|
70
73
|
#
|
|
71
74
|
|
|
75
|
+
els.extend([
|
|
76
|
+
inj.bind(_session.ChatSession.Config(**dc.asdict(cfg))),
|
|
77
|
+
inj.bind(Session, to_ctor=_session.ChatSession, singleton=True),
|
|
78
|
+
])
|
|
79
|
+
|
|
80
|
+
#
|
|
81
|
+
|
|
72
82
|
return inj.as_elements(*els)
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import dataclasses as dc
|
|
2
|
+
|
|
3
|
+
from .... import minichain as mc
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
##
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
DEFAULT_COMPLETION_MODEL_BACKEND = 'openai'
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dc.dataclass(frozen=True)
|
|
16
|
+
class CompletionConfig:
|
|
17
|
+
content: 'mc.Content'
|
|
18
|
+
|
|
19
|
+
_: dc.KW_ONLY
|
|
20
|
+
|
|
21
|
+
backend: str | None = None
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from omlish import dataclasses as dc
|
|
2
|
+
from omlish import inject as inj
|
|
3
|
+
from omlish import lang
|
|
4
|
+
|
|
5
|
+
from ..base import Session
|
|
6
|
+
from .configs import CompletionConfig
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
with lang.auto_proxy_import(globals()):
|
|
10
|
+
from . import session as _session
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def bind_completion(cfg: CompletionConfig) -> inj.Elements:
|
|
17
|
+
els: list[inj.Elemental] = []
|
|
18
|
+
|
|
19
|
+
#
|
|
20
|
+
|
|
21
|
+
els.extend([
|
|
22
|
+
inj.bind(_session.CompletionSession.Config(**dc.asdict(cfg))),
|
|
23
|
+
inj.bind(Session, to_ctor=_session.CompletionSession, singleton=True),
|
|
24
|
+
])
|
|
25
|
+
|
|
26
|
+
#
|
|
27
|
+
|
|
28
|
+
return inj.as_elements(*els)
|
|
@@ -5,22 +5,17 @@ from omlish import lang
|
|
|
5
5
|
|
|
6
6
|
from .... import minichain as mc
|
|
7
7
|
from ..base import Session
|
|
8
|
+
from .configs import DEFAULT_COMPLETION_MODEL_BACKEND
|
|
9
|
+
from .configs import CompletionConfig
|
|
8
10
|
|
|
9
11
|
|
|
10
12
|
##
|
|
11
13
|
|
|
12
14
|
|
|
13
|
-
DEFAULT_COMPLETION_MODEL_BACKEND = 'openai'
|
|
14
|
-
|
|
15
|
-
|
|
16
15
|
class CompletionSession(Session['CompletionSession.Config']):
|
|
17
16
|
@dc.dataclass(frozen=True)
|
|
18
|
-
class Config(Session.Config):
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
_: dc.KW_ONLY
|
|
22
|
-
|
|
23
|
-
backend: str | None = None
|
|
17
|
+
class Config(Session.Config, CompletionConfig):
|
|
18
|
+
pass
|
|
24
19
|
|
|
25
20
|
def __init__(
|
|
26
21
|
self,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import dataclasses as dc
|
|
2
|
+
|
|
3
|
+
from .... import minichain as mc
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
##
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
DEFAULT_EMBEDDING_MODEL_BACKEND = 'openai'
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dc.dataclass(frozen=True)
|
|
16
|
+
class EmbeddingConfig:
|
|
17
|
+
content: 'mc.Content'
|
|
18
|
+
|
|
19
|
+
_: dc.KW_ONLY
|
|
20
|
+
|
|
21
|
+
backend: str | None = None
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from omlish import dataclasses as dc
|
|
2
|
+
from omlish import inject as inj
|
|
3
|
+
from omlish import lang
|
|
4
|
+
|
|
5
|
+
from ..base import Session
|
|
6
|
+
from .configs import EmbeddingConfig
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
with lang.auto_proxy_import(globals()):
|
|
10
|
+
from . import session as _session
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def bind_embedding(cfg: EmbeddingConfig) -> inj.Elements:
|
|
17
|
+
els: list[inj.Elemental] = []
|
|
18
|
+
|
|
19
|
+
#
|
|
20
|
+
|
|
21
|
+
els.extend([
|
|
22
|
+
inj.bind(_session.EmbeddingSession.Config(**dc.asdict(cfg))),
|
|
23
|
+
inj.bind(Session, to_ctor=_session.EmbeddingSession, singleton=True),
|
|
24
|
+
])
|
|
25
|
+
|
|
26
|
+
#
|
|
27
|
+
|
|
28
|
+
return inj.as_elements(*els)
|
|
@@ -5,22 +5,17 @@ from omlish.formats import json
|
|
|
5
5
|
|
|
6
6
|
from .... import minichain as mc
|
|
7
7
|
from ..base import Session
|
|
8
|
+
from .configs import DEFAULT_EMBEDDING_MODEL_BACKEND
|
|
9
|
+
from .configs import EmbeddingConfig
|
|
8
10
|
|
|
9
11
|
|
|
10
12
|
##
|
|
11
13
|
|
|
12
14
|
|
|
13
|
-
DEFAULT_EMBEDDING_MODEL_BACKEND = 'openai'
|
|
14
|
-
|
|
15
|
-
|
|
16
15
|
class EmbeddingSession(Session['EmbeddingSession.Config']):
|
|
17
16
|
@dc.dataclass(frozen=True)
|
|
18
|
-
class Config(Session.Config):
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
_: dc.KW_ONLY
|
|
22
|
-
|
|
23
|
-
backend: str | None = None
|
|
17
|
+
class Config(Session.Config, EmbeddingConfig):
|
|
18
|
+
pass
|
|
24
19
|
|
|
25
20
|
def __init__(
|
|
26
21
|
self,
|
ommlds/cli/sessions/inject.py
CHANGED
|
@@ -1,21 +1,38 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
|
|
1
3
|
from omlish import inject as inj
|
|
4
|
+
from omlish import lang
|
|
5
|
+
|
|
2
6
|
|
|
3
|
-
|
|
4
|
-
from .chat
|
|
7
|
+
with lang.auto_proxy_import(globals()):
|
|
8
|
+
from .chat import configs as _chat_cfgs
|
|
9
|
+
from .chat import inject as _chat_inj
|
|
10
|
+
from .completion import configs as _completion_cfgs
|
|
11
|
+
from .completion import inject as _completion_inj
|
|
12
|
+
from .embedding import configs as _embedding_cfgs
|
|
13
|
+
from .embedding import inject as _embedding_inj
|
|
5
14
|
|
|
6
15
|
|
|
7
16
|
##
|
|
8
17
|
|
|
9
18
|
|
|
10
|
-
def bind_sessions(
|
|
11
|
-
els: list[inj.Elemental] = [
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
19
|
+
def bind_sessions(cfg: ta.Any) -> inj.Elements:
|
|
20
|
+
els: list[inj.Elemental] = []
|
|
21
|
+
|
|
22
|
+
#
|
|
23
|
+
|
|
24
|
+
if isinstance(cfg, _chat_cfgs.ChatConfig):
|
|
25
|
+
els.append(_chat_inj.bind_chat(cfg))
|
|
26
|
+
|
|
27
|
+
elif isinstance(cfg, _completion_cfgs.CompletionConfig):
|
|
28
|
+
els.append(_completion_inj.bind_completion(cfg))
|
|
29
|
+
|
|
30
|
+
elif isinstance(cfg, _embedding_cfgs.EmbeddingConfig):
|
|
31
|
+
els.append(_embedding_inj.bind_embedding(cfg))
|
|
32
|
+
|
|
33
|
+
else:
|
|
34
|
+
raise TypeError(cfg)
|
|
16
35
|
|
|
17
|
-
|
|
18
|
-
from .chat.inject import bind_chat
|
|
19
|
-
els.append(bind_chat(session_cfg)) # noqa
|
|
36
|
+
#
|
|
20
37
|
|
|
21
38
|
return inj.as_elements(*els)
|
|
File without changes
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import os.path
|
|
2
|
+
|
|
3
|
+
from omdev.home.paths import get_home_paths
|
|
4
|
+
from omlish import inject as inj
|
|
5
|
+
from omlish import lang
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
with lang.auto_proxy_import(globals()):
|
|
9
|
+
from . import storage as _storage
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _provide_state_storage() -> '_storage.StateStorage':
|
|
16
|
+
state_dir = os.path.join(get_home_paths().state_dir, 'minichain', 'cli')
|
|
17
|
+
if not os.path.exists(state_dir):
|
|
18
|
+
os.makedirs(state_dir, exist_ok=True)
|
|
19
|
+
os.chmod(state_dir, 0o770) # noqa
|
|
20
|
+
|
|
21
|
+
state_file = os.path.join(state_dir, 'state.json')
|
|
22
|
+
return _storage.JsonFileStateStorage(state_file)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def bind_state() -> inj.Elements:
|
|
26
|
+
return inj.as_elements(
|
|
27
|
+
inj.bind(_provide_state_storage, singleton=True),
|
|
28
|
+
)
|
ommlds/minichain/__init__.py
CHANGED
|
@@ -92,6 +92,10 @@ with _lang.auto_proxy_init(
|
|
|
92
92
|
ChatChoicesStreamServiceChatChoicesService,
|
|
93
93
|
)
|
|
94
94
|
|
|
95
|
+
from .chat.stream.joining import ( # noqa
|
|
96
|
+
AiChoiceDeltaJoiner,
|
|
97
|
+
)
|
|
98
|
+
|
|
95
99
|
from .chat.stream.services import ( # noqa
|
|
96
100
|
ChatChoicesStreamRequest,
|
|
97
101
|
ChatChoicesStreamResponse,
|
|
@@ -109,7 +113,9 @@ with _lang.auto_proxy_init(
|
|
|
109
113
|
|
|
110
114
|
AiChoiceDelta,
|
|
111
115
|
ContentAiChoiceDelta,
|
|
116
|
+
AnyToolUseAiChoiceDelta,
|
|
112
117
|
ToolUseAiChoiceDelta,
|
|
118
|
+
PartialToolUseAiChoiceDelta,
|
|
113
119
|
|
|
114
120
|
AiChoiceDeltas,
|
|
115
121
|
AiChoicesDeltas,
|
|
@@ -22,15 +22,14 @@ from ....chat.messages import AnyAiMessage
|
|
|
22
22
|
from ....chat.messages import Message
|
|
23
23
|
from ....chat.messages import SystemMessage
|
|
24
24
|
from ....chat.messages import ToolUseMessage
|
|
25
|
-
from ....chat.messages import ToolUseResultMessage
|
|
26
25
|
from ....chat.messages import UserMessage
|
|
27
26
|
from ....chat.tools.types import Tool
|
|
28
|
-
from ....content.prepare import prepare_content_str
|
|
29
27
|
from ....models.configs import ModelName
|
|
30
28
|
from ....standard import ApiKey
|
|
31
|
-
from ....tools.jsonschema import build_tool_spec_params_json_schema
|
|
32
29
|
from ....tools.types import ToolUse
|
|
33
30
|
from .names import MODEL_NAMES
|
|
31
|
+
from .protocol import build_protocol_chat_messages
|
|
32
|
+
from .protocol import build_protocol_tool
|
|
34
33
|
|
|
35
34
|
|
|
36
35
|
##
|
|
@@ -44,13 +43,6 @@ from .names import MODEL_NAMES
|
|
|
44
43
|
class AnthropicChatChoicesService:
|
|
45
44
|
DEFAULT_MODEL_NAME: ta.ClassVar[ModelName] = ModelName(check.not_none(MODEL_NAMES.default))
|
|
46
45
|
|
|
47
|
-
ROLES_MAP: ta.ClassVar[ta.Mapping[type[Message], str]] = {
|
|
48
|
-
SystemMessage: 'system',
|
|
49
|
-
UserMessage: 'user',
|
|
50
|
-
AiMessage: 'assistant',
|
|
51
|
-
ToolUseMessage: 'assistant',
|
|
52
|
-
}
|
|
53
|
-
|
|
54
46
|
def __init__(
|
|
55
47
|
self,
|
|
56
48
|
*configs: ApiKey | ModelName,
|
|
@@ -78,62 +70,13 @@ class AnthropicChatChoicesService:
|
|
|
78
70
|
*,
|
|
79
71
|
max_tokens: int = 4096, # FIXME: ChatOption
|
|
80
72
|
) -> ChatChoicesResponse:
|
|
81
|
-
messages
|
|
82
|
-
system: list[pt.Content] | None = None
|
|
83
|
-
for i, m in enumerate(request.v):
|
|
84
|
-
if isinstance(m, SystemMessage):
|
|
85
|
-
if i != 0 or system is not None:
|
|
86
|
-
raise Exception('Only supports one system message and must be first')
|
|
87
|
-
system = [pt.Text(check.not_none(self._get_msg_content(m)))]
|
|
88
|
-
|
|
89
|
-
elif isinstance(m, ToolUseResultMessage):
|
|
90
|
-
messages.append(pt.Message(
|
|
91
|
-
role='user',
|
|
92
|
-
content=[pt.ToolResult(
|
|
93
|
-
tool_use_id=check.not_none(m.tur.id),
|
|
94
|
-
content=json.dumps_compact(msh.marshal(m.tur.c)) if not isinstance(m.tur.c, str) else m.tur.c,
|
|
95
|
-
)],
|
|
96
|
-
))
|
|
97
|
-
|
|
98
|
-
elif isinstance(m, AiMessage):
|
|
99
|
-
# messages.append(pt.Message(
|
|
100
|
-
# role=self.ROLES_MAP[type(m)], # noqa
|
|
101
|
-
# content=[pt.Text(check.isinstance(self._get_msg_content(m), str))],
|
|
102
|
-
# ))
|
|
103
|
-
messages.append(pt.Message(
|
|
104
|
-
role='assistant',
|
|
105
|
-
content=[
|
|
106
|
-
*([pt.Text(check.isinstance(m.c, str))] if m.c is not None else []),
|
|
107
|
-
],
|
|
108
|
-
))
|
|
109
|
-
|
|
110
|
-
elif isinstance(m, ToolUseMessage):
|
|
111
|
-
messages.append(pt.Message(
|
|
112
|
-
role='assistant',
|
|
113
|
-
content=[
|
|
114
|
-
pt.ToolUse(
|
|
115
|
-
id=check.not_none(m.tu.id),
|
|
116
|
-
name=check.not_none(m.tu.name),
|
|
117
|
-
input=m.tu.args,
|
|
118
|
-
),
|
|
119
|
-
],
|
|
120
|
-
))
|
|
121
|
-
|
|
122
|
-
else:
|
|
123
|
-
messages.append(pt.Message(
|
|
124
|
-
role=self.ROLES_MAP[type(m)], # type: ignore[arg-type]
|
|
125
|
-
content=[pt.Text(check.isinstance(self._get_msg_content(m), str))],
|
|
126
|
-
))
|
|
73
|
+
messages, system = build_protocol_chat_messages(request.v)
|
|
127
74
|
|
|
128
75
|
tools: list[pt.ToolSpec] = []
|
|
129
76
|
with tv.TypedValues(*request.options).consume() as oc:
|
|
130
77
|
t: Tool
|
|
131
78
|
for t in oc.pop(Tool, []):
|
|
132
|
-
tools.append(
|
|
133
|
-
name=check.not_none(t.spec.name),
|
|
134
|
-
description=prepare_content_str(t.spec.desc),
|
|
135
|
-
input_schema=build_tool_spec_params_json_schema(t.spec),
|
|
136
|
-
))
|
|
79
|
+
tools.append(build_protocol_tool(t))
|
|
137
80
|
|
|
138
81
|
a_req = pt.MessagesRequest(
|
|
139
82
|
model=MODEL_NAMES.resolve(self._model_name.v),
|