ommlds 0.0.0.dev436__py3-none-any.whl → 0.0.0.dev480__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ommlds/.omlish-manifests.json +332 -35
- ommlds/__about__.py +15 -9
- ommlds/_hacks/__init__.py +4 -0
- ommlds/_hacks/funcs.py +110 -0
- ommlds/_hacks/names.py +158 -0
- ommlds/_hacks/params.py +73 -0
- ommlds/_hacks/patches.py +0 -3
- ommlds/backends/anthropic/protocol/_marshal.py +2 -2
- ommlds/backends/anthropic/protocol/sse/_marshal.py +1 -1
- ommlds/backends/anthropic/protocol/sse/assemble.py +23 -7
- ommlds/backends/anthropic/protocol/sse/events.py +13 -0
- ommlds/backends/anthropic/protocol/types.py +30 -9
- ommlds/backends/google/protocol/__init__.py +3 -0
- ommlds/backends/google/protocol/_marshal.py +16 -0
- ommlds/backends/google/protocol/types.py +626 -0
- ommlds/backends/groq/_marshal.py +23 -0
- ommlds/backends/groq/protocol.py +249 -0
- ommlds/backends/mlx/generation.py +1 -1
- ommlds/backends/mlx/loading.py +58 -1
- ommlds/backends/ollama/__init__.py +0 -0
- ommlds/backends/ollama/protocol.py +170 -0
- ommlds/backends/openai/protocol/__init__.py +9 -28
- ommlds/backends/openai/protocol/_common.py +18 -0
- ommlds/backends/openai/protocol/_marshal.py +27 -0
- ommlds/backends/openai/protocol/chatcompletion/chunk.py +58 -31
- ommlds/backends/openai/protocol/chatcompletion/contentpart.py +49 -44
- ommlds/backends/openai/protocol/chatcompletion/message.py +55 -43
- ommlds/backends/openai/protocol/chatcompletion/request.py +114 -66
- ommlds/backends/openai/protocol/chatcompletion/response.py +71 -45
- ommlds/backends/openai/protocol/chatcompletion/responseformat.py +27 -20
- ommlds/backends/openai/protocol/chatcompletion/tokenlogprob.py +16 -7
- ommlds/backends/openai/protocol/completionusage.py +24 -15
- ommlds/backends/tavily/__init__.py +0 -0
- ommlds/backends/tavily/protocol.py +301 -0
- ommlds/backends/tinygrad/models/llama3/__init__.py +22 -14
- ommlds/backends/transformers/__init__.py +0 -0
- ommlds/backends/transformers/filecache.py +109 -0
- ommlds/backends/transformers/streamers.py +73 -0
- ommlds/cli/asyncs.py +30 -0
- ommlds/cli/backends/catalog.py +93 -0
- ommlds/cli/backends/configs.py +9 -0
- ommlds/cli/backends/inject.py +31 -36
- ommlds/cli/backends/injection.py +16 -0
- ommlds/cli/backends/types.py +46 -0
- ommlds/cli/content/__init__.py +0 -0
- ommlds/cli/content/messages.py +34 -0
- ommlds/cli/content/strings.py +42 -0
- ommlds/cli/inject.py +15 -32
- ommlds/cli/inputs/__init__.py +0 -0
- ommlds/cli/inputs/asyncs.py +32 -0
- ommlds/cli/inputs/sync.py +75 -0
- ommlds/cli/main.py +270 -110
- ommlds/cli/rendering/__init__.py +0 -0
- ommlds/cli/rendering/configs.py +9 -0
- ommlds/cli/rendering/inject.py +31 -0
- ommlds/cli/rendering/markdown.py +52 -0
- ommlds/cli/rendering/raw.py +73 -0
- ommlds/cli/rendering/types.py +21 -0
- ommlds/cli/secrets.py +21 -0
- ommlds/cli/sessions/base.py +1 -1
- ommlds/cli/sessions/chat/chat/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/ai/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/ai/configs.py +11 -0
- ommlds/cli/sessions/chat/chat/ai/inject.py +74 -0
- ommlds/cli/sessions/chat/chat/ai/injection.py +14 -0
- ommlds/cli/sessions/chat/chat/ai/rendering.py +70 -0
- ommlds/cli/sessions/chat/chat/ai/services.py +79 -0
- ommlds/cli/sessions/chat/chat/ai/tools.py +44 -0
- ommlds/cli/sessions/chat/chat/ai/types.py +28 -0
- ommlds/cli/sessions/chat/chat/state/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/state/configs.py +11 -0
- ommlds/cli/sessions/chat/chat/state/inject.py +36 -0
- ommlds/cli/sessions/chat/chat/state/inmemory.py +33 -0
- ommlds/cli/sessions/chat/chat/state/storage.py +52 -0
- ommlds/cli/sessions/chat/chat/state/types.py +38 -0
- ommlds/cli/sessions/chat/chat/user/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/user/configs.py +17 -0
- ommlds/cli/sessions/chat/chat/user/inject.py +62 -0
- ommlds/cli/sessions/chat/chat/user/interactive.py +31 -0
- ommlds/cli/sessions/chat/chat/user/oneshot.py +25 -0
- ommlds/cli/sessions/chat/chat/user/types.py +15 -0
- ommlds/cli/sessions/chat/configs.py +27 -0
- ommlds/cli/sessions/chat/driver.py +43 -0
- ommlds/cli/sessions/chat/inject.py +33 -65
- ommlds/cli/sessions/chat/phases/__init__.py +0 -0
- ommlds/cli/sessions/chat/phases/inject.py +27 -0
- ommlds/cli/sessions/chat/phases/injection.py +14 -0
- ommlds/cli/sessions/chat/phases/manager.py +29 -0
- ommlds/cli/sessions/chat/phases/types.py +29 -0
- ommlds/cli/sessions/chat/session.py +27 -0
- ommlds/cli/sessions/chat/tools/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/configs.py +22 -0
- ommlds/cli/sessions/chat/tools/confirmation.py +46 -0
- ommlds/cli/sessions/chat/tools/execution.py +66 -0
- ommlds/cli/sessions/chat/tools/fs/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/fs/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/fs/inject.py +35 -0
- ommlds/cli/sessions/chat/tools/inject.py +88 -0
- ommlds/cli/sessions/chat/tools/injection.py +44 -0
- ommlds/cli/sessions/chat/tools/rendering.py +58 -0
- ommlds/cli/sessions/chat/tools/todo/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/todo/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/todo/inject.py +31 -0
- ommlds/cli/sessions/chat/tools/weather/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/weather/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/weather/inject.py +22 -0
- ommlds/cli/{tools/weather.py → sessions/chat/tools/weather/tools.py} +1 -1
- ommlds/cli/sessions/completion/configs.py +21 -0
- ommlds/cli/sessions/completion/inject.py +42 -0
- ommlds/cli/sessions/completion/session.py +35 -0
- ommlds/cli/sessions/embedding/configs.py +21 -0
- ommlds/cli/sessions/embedding/inject.py +42 -0
- ommlds/cli/sessions/embedding/session.py +33 -0
- ommlds/cli/sessions/inject.py +28 -11
- ommlds/cli/state/__init__.py +0 -0
- ommlds/cli/state/inject.py +28 -0
- ommlds/cli/{state.py → state/storage.py} +41 -24
- ommlds/minichain/__init__.py +84 -24
- ommlds/minichain/_marshal.py +49 -9
- ommlds/minichain/_typedvalues.py +2 -4
- ommlds/minichain/backends/catalogs/base.py +20 -1
- ommlds/minichain/backends/catalogs/simple.py +2 -2
- ommlds/minichain/backends/catalogs/strings.py +10 -8
- ommlds/minichain/backends/impls/anthropic/chat.py +65 -27
- ommlds/minichain/backends/impls/anthropic/names.py +10 -8
- ommlds/minichain/backends/impls/anthropic/protocol.py +109 -0
- ommlds/minichain/backends/impls/anthropic/stream.py +111 -43
- ommlds/minichain/backends/impls/duckduckgo/search.py +1 -1
- ommlds/minichain/backends/impls/dummy/__init__.py +0 -0
- ommlds/minichain/backends/impls/dummy/chat.py +69 -0
- ommlds/minichain/backends/impls/google/chat.py +114 -22
- ommlds/minichain/backends/impls/google/search.py +7 -2
- ommlds/minichain/backends/impls/google/stream.py +219 -0
- ommlds/minichain/backends/impls/google/tools.py +149 -0
- ommlds/minichain/backends/impls/groq/__init__.py +0 -0
- ommlds/minichain/backends/impls/groq/chat.py +75 -0
- ommlds/minichain/backends/impls/groq/names.py +48 -0
- ommlds/minichain/backends/impls/groq/protocol.py +143 -0
- ommlds/minichain/backends/impls/groq/stream.py +125 -0
- ommlds/minichain/backends/impls/llamacpp/chat.py +33 -18
- ommlds/minichain/backends/impls/llamacpp/completion.py +1 -1
- ommlds/minichain/backends/impls/llamacpp/format.py +4 -2
- ommlds/minichain/backends/impls/llamacpp/stream.py +37 -20
- ommlds/minichain/backends/impls/mistral.py +20 -5
- ommlds/minichain/backends/impls/mlx/chat.py +96 -22
- ommlds/minichain/backends/impls/ollama/__init__.py +0 -0
- ommlds/minichain/backends/impls/ollama/chat.py +199 -0
- ommlds/minichain/backends/impls/openai/chat.py +18 -8
- ommlds/minichain/backends/impls/openai/completion.py +10 -3
- ommlds/minichain/backends/impls/openai/embedding.py +10 -3
- ommlds/minichain/backends/impls/openai/format.py +131 -106
- ommlds/minichain/backends/impls/openai/names.py +31 -5
- ommlds/minichain/backends/impls/openai/stream.py +43 -25
- ommlds/minichain/backends/impls/tavily.py +66 -0
- ommlds/minichain/backends/impls/tinygrad/chat.py +23 -16
- ommlds/minichain/backends/impls/transformers/sentence.py +1 -1
- ommlds/minichain/backends/impls/transformers/tokens.py +1 -1
- ommlds/minichain/backends/impls/transformers/transformers.py +155 -34
- ommlds/minichain/backends/strings/parsing.py +1 -1
- ommlds/minichain/backends/strings/resolving.py +4 -1
- ommlds/minichain/chat/_marshal.py +16 -9
- ommlds/minichain/chat/choices/adapters.py +4 -4
- ommlds/minichain/chat/choices/services.py +1 -1
- ommlds/minichain/chat/choices/stream/__init__.py +0 -0
- ommlds/minichain/chat/choices/stream/adapters.py +35 -0
- ommlds/minichain/chat/choices/stream/joining.py +31 -0
- ommlds/minichain/chat/choices/stream/services.py +45 -0
- ommlds/minichain/chat/choices/stream/types.py +43 -0
- ommlds/minichain/chat/choices/types.py +2 -2
- ommlds/minichain/chat/history.py +3 -3
- ommlds/minichain/chat/messages.py +55 -19
- ommlds/minichain/chat/services.py +3 -3
- ommlds/minichain/chat/stream/_marshal.py +16 -0
- ommlds/minichain/chat/stream/joining.py +85 -0
- ommlds/minichain/chat/stream/services.py +15 -21
- ommlds/minichain/chat/stream/types.py +32 -19
- ommlds/minichain/chat/tools/execution.py +8 -7
- ommlds/minichain/chat/tools/ids.py +9 -15
- ommlds/minichain/chat/tools/parsing.py +17 -26
- ommlds/minichain/chat/transforms/base.py +29 -38
- ommlds/minichain/chat/transforms/metadata.py +30 -4
- ommlds/minichain/chat/transforms/services.py +9 -11
- ommlds/minichain/content/_marshal.py +44 -20
- ommlds/minichain/content/json.py +13 -0
- ommlds/minichain/content/materialize.py +14 -21
- ommlds/minichain/content/prepare.py +4 -0
- ommlds/minichain/content/transforms/interleave.py +1 -1
- ommlds/minichain/content/transforms/squeeze.py +1 -1
- ommlds/minichain/content/transforms/stringify.py +1 -1
- ommlds/minichain/json.py +20 -0
- ommlds/minichain/lib/code/__init__.py +0 -0
- ommlds/minichain/lib/code/prompts.py +6 -0
- ommlds/minichain/lib/fs/binfiles.py +108 -0
- ommlds/minichain/lib/fs/context.py +126 -0
- ommlds/minichain/lib/fs/errors.py +101 -0
- ommlds/minichain/lib/fs/suggestions.py +36 -0
- ommlds/minichain/lib/fs/tools/__init__.py +0 -0
- ommlds/minichain/lib/fs/tools/edit.py +104 -0
- ommlds/minichain/lib/fs/tools/ls.py +38 -0
- ommlds/minichain/lib/fs/tools/read.py +115 -0
- ommlds/minichain/lib/fs/tools/recursivels/__init__.py +0 -0
- ommlds/minichain/lib/fs/tools/recursivels/execution.py +40 -0
- ommlds/minichain/lib/todo/__init__.py +0 -0
- ommlds/minichain/lib/todo/context.py +54 -0
- ommlds/minichain/lib/todo/tools/__init__.py +0 -0
- ommlds/minichain/lib/todo/tools/read.py +44 -0
- ommlds/minichain/lib/todo/tools/write.py +335 -0
- ommlds/minichain/lib/todo/types.py +60 -0
- ommlds/minichain/llms/_marshal.py +25 -17
- ommlds/minichain/llms/types.py +4 -0
- ommlds/minichain/registries/globals.py +18 -4
- ommlds/minichain/resources.py +66 -43
- ommlds/minichain/search.py +1 -1
- ommlds/minichain/services/_marshal.py +46 -39
- ommlds/minichain/services/facades.py +3 -3
- ommlds/minichain/services/services.py +1 -1
- ommlds/minichain/standard.py +8 -0
- ommlds/minichain/stream/services.py +152 -38
- ommlds/minichain/stream/wrap.py +22 -24
- ommlds/minichain/tools/_marshal.py +1 -1
- ommlds/minichain/tools/execution/catalog.py +2 -1
- ommlds/minichain/tools/execution/context.py +34 -14
- ommlds/minichain/tools/execution/errors.py +15 -0
- ommlds/minichain/tools/execution/executors.py +8 -3
- ommlds/minichain/tools/execution/reflect.py +40 -5
- ommlds/minichain/tools/fns.py +46 -9
- ommlds/minichain/tools/jsonschema.py +14 -5
- ommlds/minichain/tools/reflect.py +54 -18
- ommlds/minichain/tools/types.py +33 -1
- ommlds/minichain/utils.py +27 -0
- ommlds/minichain/vectors/_marshal.py +11 -10
- ommlds/nanochat/LICENSE +21 -0
- ommlds/nanochat/__init__.py +0 -0
- ommlds/nanochat/rustbpe/LICENSE +21 -0
- ommlds/nanochat/tokenizers.py +406 -0
- ommlds/server/server.py +3 -3
- ommlds/specs/__init__.py +0 -0
- ommlds/specs/mcp/__init__.py +0 -0
- ommlds/specs/mcp/_marshal.py +23 -0
- ommlds/specs/mcp/protocol.py +266 -0
- ommlds/tools/git.py +27 -10
- ommlds/tools/ocr.py +8 -9
- ommlds/wiki/analyze.py +2 -2
- ommlds/wiki/text/mfh.py +1 -5
- ommlds/wiki/text/wtp.py +1 -3
- ommlds/wiki/utils/xml.py +5 -5
- {ommlds-0.0.0.dev436.dist-info → ommlds-0.0.0.dev480.dist-info}/METADATA +24 -21
- ommlds-0.0.0.dev480.dist-info/RECORD +427 -0
- ommlds/cli/backends/standard.py +0 -20
- ommlds/cli/sessions/chat/base.py +0 -42
- ommlds/cli/sessions/chat/interactive.py +0 -73
- ommlds/cli/sessions/chat/printing.py +0 -96
- ommlds/cli/sessions/chat/prompt.py +0 -143
- ommlds/cli/sessions/chat/state.py +0 -109
- ommlds/cli/sessions/chat/tools.py +0 -91
- ommlds/cli/sessions/completion/completion.py +0 -44
- ommlds/cli/sessions/embedding/embedding.py +0 -42
- ommlds/cli/tools/config.py +0 -13
- ommlds/cli/tools/inject.py +0 -64
- ommlds/minichain/chat/stream/adapters.py +0 -69
- ommlds/minichain/lib/fs/ls/execution.py +0 -32
- ommlds-0.0.0.dev436.dist-info/RECORD +0 -303
- /ommlds/{cli/tools → backends/google}/__init__.py +0 -0
- /ommlds/{minichain/lib/fs/ls → backends/groq}/__init__.py +0 -0
- /ommlds/{huggingface.py → backends/huggingface.py} +0 -0
- /ommlds/minichain/lib/fs/{ls → tools/recursivels}/rendering.py +0 -0
- /ommlds/minichain/lib/fs/{ls → tools/recursivels}/running.py +0 -0
- {ommlds-0.0.0.dev436.dist-info → ommlds-0.0.0.dev480.dist-info}/WHEEL +0 -0
- {ommlds-0.0.0.dev436.dist-info → ommlds-0.0.0.dev480.dist-info}/entry_points.txt +0 -0
- {ommlds-0.0.0.dev436.dist-info → ommlds-0.0.0.dev480.dist-info}/licenses/LICENSE +0 -0
- {ommlds-0.0.0.dev436.dist-info → ommlds-0.0.0.dev480.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import typing as ta
|
|
3
|
+
|
|
4
|
+
from omlish import lang
|
|
5
|
+
|
|
6
|
+
from ... import minichain as mc
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ContentRendering(lang.Abstract):
|
|
13
|
+
@abc.abstractmethod
|
|
14
|
+
def render_content(self, content: 'mc.Content') -> ta.Awaitable[None]:
|
|
15
|
+
raise NotImplementedError
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class StreamContentRendering(lang.Abstract):
|
|
19
|
+
@abc.abstractmethod
|
|
20
|
+
def create_context(self) -> ta.AsyncContextManager[ContentRendering]:
|
|
21
|
+
raise NotImplementedError
|
ommlds/cli/secrets.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from omdev.home.secrets import load_secrets
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
##
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def install_secrets() -> None:
|
|
10
|
+
# FIXME: lol garbage
|
|
11
|
+
for key in [
|
|
12
|
+
'ANTHROPIC_API_KEY',
|
|
13
|
+
'GEMINI_API_KEY',
|
|
14
|
+
'GROQ_API_KEY',
|
|
15
|
+
'HUGGINGFACE_TOKEN',
|
|
16
|
+
'MISTRAL_API_KEY',
|
|
17
|
+
'OPENAI_API_KEY',
|
|
18
|
+
'TAVILY_API_KEY',
|
|
19
|
+
]:
|
|
20
|
+
if (sec := load_secrets().try_get(key.lower())) is not None:
|
|
21
|
+
os.environ[key] = sec.reveal()
|
ommlds/cli/sessions/base.py
CHANGED
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
from omlish import inject as inj
|
|
2
|
+
from omlish import lang
|
|
3
|
+
|
|
4
|
+
from ...... import minichain as mc
|
|
5
|
+
from .configs import AiConfig
|
|
6
|
+
from .injection import chat_options_providers
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
with lang.auto_proxy_import(globals()):
|
|
10
|
+
from . import rendering as _rendering
|
|
11
|
+
from . import services as _services
|
|
12
|
+
from . import tools as _tools
|
|
13
|
+
from . import types as _types
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
##
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def bind_ai(cfg: AiConfig = AiConfig()) -> inj.Elements:
|
|
20
|
+
els: list[inj.Elemental] = []
|
|
21
|
+
|
|
22
|
+
#
|
|
23
|
+
|
|
24
|
+
els.append(chat_options_providers().bind_items_provider(singleton=True))
|
|
25
|
+
|
|
26
|
+
def _provide_chat_choices_options_provider(
|
|
27
|
+
ps: _services.ChatChoicesServiceOptionsProviders,
|
|
28
|
+
) -> _services.ChatChoicesServiceOptionsProvider:
|
|
29
|
+
return _services.ChatChoicesServiceOptionsProvider(lambda: [o for p in ps for o in p()])
|
|
30
|
+
|
|
31
|
+
els.append(inj.bind(_provide_chat_choices_options_provider, singleton=True))
|
|
32
|
+
|
|
33
|
+
#
|
|
34
|
+
|
|
35
|
+
ai_stack = inj.wrapper_binder_helper(_types.AiChatGenerator)
|
|
36
|
+
|
|
37
|
+
if cfg.stream:
|
|
38
|
+
stream_ai_stack = inj.wrapper_binder_helper(_types.StreamAiChatGenerator)
|
|
39
|
+
|
|
40
|
+
els.append(stream_ai_stack.push_bind(to_ctor=_services.ChatChoicesStreamServiceStreamAiChatGenerator, singleton=True)) # noqa
|
|
41
|
+
|
|
42
|
+
if not cfg.silent:
|
|
43
|
+
els.append(stream_ai_stack.push_bind(to_ctor=_rendering.RenderingStreamAiChatGenerator, singleton=True))
|
|
44
|
+
|
|
45
|
+
els.extend([
|
|
46
|
+
inj.bind(_types.StreamAiChatGenerator, to_key=stream_ai_stack.top),
|
|
47
|
+
ai_stack.push_bind(to_key=_types.StreamAiChatGenerator),
|
|
48
|
+
])
|
|
49
|
+
|
|
50
|
+
else:
|
|
51
|
+
els.append(ai_stack.push_bind(to_ctor=_services.ChatChoicesServiceAiChatGenerator, singleton=True))
|
|
52
|
+
|
|
53
|
+
if not cfg.silent:
|
|
54
|
+
els.append(ai_stack.push_bind(to_ctor=_rendering.RenderingAiChatGenerator, singleton=True))
|
|
55
|
+
|
|
56
|
+
if cfg.enable_tools:
|
|
57
|
+
els.append(ai_stack.push_bind(to_ctor=_tools.ToolExecutingAiChatGenerator, singleton=True))
|
|
58
|
+
|
|
59
|
+
els.append(inj.bind(_types.AiChatGenerator, to_key=ai_stack.top))
|
|
60
|
+
|
|
61
|
+
#
|
|
62
|
+
|
|
63
|
+
if cfg.enable_tools:
|
|
64
|
+
def _provide_tools_chat_choices_options_provider(tc: mc.ToolCatalog) -> _services.ChatChoicesServiceOptionsProvider: # noqa
|
|
65
|
+
return _services.ChatChoicesServiceOptionsProvider(lambda: [
|
|
66
|
+
mc.Tool(tce.spec)
|
|
67
|
+
for tce in tc.by_name.values()
|
|
68
|
+
])
|
|
69
|
+
|
|
70
|
+
els.append(chat_options_providers().bind_item(to_fn=_provide_tools_chat_choices_options_provider, singleton=True)) # noqa
|
|
71
|
+
|
|
72
|
+
#
|
|
73
|
+
|
|
74
|
+
return inj.as_elements(*els)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from omlish import inject as inj
|
|
2
|
+
from omlish import lang
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
with lang.auto_proxy_import(globals()):
|
|
6
|
+
from . import services as _services
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@lang.cached_function
|
|
13
|
+
def chat_options_providers() -> 'inj.ItemsBinderHelper[_services.ChatChoicesServiceOptionsProvider]':
|
|
14
|
+
return inj.items_binder_helper[_services.ChatChoicesServiceOptionsProvider](_services.ChatChoicesServiceOptionsProviders) # noqa
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
|
|
3
|
+
from ...... import minichain as mc
|
|
4
|
+
from .....content.messages import MessageContentExtractor
|
|
5
|
+
from .....content.messages import MessageContentExtractorImpl
|
|
6
|
+
from .....rendering.types import ContentRendering
|
|
7
|
+
from .....rendering.types import StreamContentRendering
|
|
8
|
+
from .types import AiChatGenerator
|
|
9
|
+
from .types import StreamAiChatGenerator
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RenderingAiChatGenerator(AiChatGenerator):
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
*,
|
|
19
|
+
wrapped: AiChatGenerator,
|
|
20
|
+
extractor: MessageContentExtractor | None = None,
|
|
21
|
+
renderer: ContentRendering,
|
|
22
|
+
) -> None:
|
|
23
|
+
super().__init__()
|
|
24
|
+
|
|
25
|
+
self._wrapped = wrapped
|
|
26
|
+
if extractor is None:
|
|
27
|
+
extractor = MessageContentExtractorImpl()
|
|
28
|
+
self._extractor = extractor
|
|
29
|
+
self._renderer = renderer
|
|
30
|
+
|
|
31
|
+
async def get_next_ai_messages(self, chat: 'mc.Chat') -> 'mc.Chat':
|
|
32
|
+
out = await self._wrapped.get_next_ai_messages(chat)
|
|
33
|
+
|
|
34
|
+
for msg in out:
|
|
35
|
+
if (c := self._extractor.extract_message_content(msg)) is not None:
|
|
36
|
+
await self._renderer.render_content(c)
|
|
37
|
+
|
|
38
|
+
return out
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class RenderingStreamAiChatGenerator(StreamAiChatGenerator):
|
|
42
|
+
def __init__(
|
|
43
|
+
self,
|
|
44
|
+
*,
|
|
45
|
+
wrapped: StreamAiChatGenerator,
|
|
46
|
+
extractor: MessageContentExtractor | None = None,
|
|
47
|
+
renderer: StreamContentRendering,
|
|
48
|
+
) -> None:
|
|
49
|
+
super().__init__()
|
|
50
|
+
|
|
51
|
+
self._wrapped = wrapped
|
|
52
|
+
if extractor is None:
|
|
53
|
+
extractor = MessageContentExtractorImpl()
|
|
54
|
+
self._extractor = extractor
|
|
55
|
+
self._renderer = renderer
|
|
56
|
+
|
|
57
|
+
async def get_next_ai_messages_streamed(
|
|
58
|
+
self,
|
|
59
|
+
chat: 'mc.Chat',
|
|
60
|
+
delta_callback: ta.Callable[['mc.AiDelta'], ta.Awaitable[None]] | None = None,
|
|
61
|
+
) -> mc.Chat:
|
|
62
|
+
async with self._renderer.create_context() as renderer:
|
|
63
|
+
async def inner(delta: mc.AiDelta) -> None:
|
|
64
|
+
if isinstance(delta, mc.ContentAiDelta):
|
|
65
|
+
await renderer.render_content(delta.c)
|
|
66
|
+
|
|
67
|
+
if delta_callback is not None:
|
|
68
|
+
await delta_callback(delta)
|
|
69
|
+
|
|
70
|
+
return await self._wrapped.get_next_ai_messages_streamed(chat, delta_callback=inner)
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
|
|
3
|
+
from omlish import check
|
|
4
|
+
from omlish import lang
|
|
5
|
+
|
|
6
|
+
from ...... import minichain as mc
|
|
7
|
+
from .....backends.types import ChatChoicesServiceBackendProvider
|
|
8
|
+
from .....backends.types import ChatChoicesStreamServiceBackendProvider
|
|
9
|
+
from .types import AiChatGenerator
|
|
10
|
+
from .types import StreamAiChatGenerator
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ChatChoicesServiceOptionsProvider(lang.Func0[ta.Sequence['mc.ChatChoicesOptions']]):
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
ChatChoicesServiceOptionsProviders = ta.NewType('ChatChoicesServiceOptionsProviders', ta.Sequence[ChatChoicesServiceOptionsProvider]) # noqa
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ChatChoicesServiceAiChatGenerator(AiChatGenerator):
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
service_provider: ChatChoicesServiceBackendProvider,
|
|
30
|
+
*,
|
|
31
|
+
options: ChatChoicesServiceOptionsProvider | None = None,
|
|
32
|
+
) -> None:
|
|
33
|
+
super().__init__()
|
|
34
|
+
|
|
35
|
+
self._service_provider = service_provider
|
|
36
|
+
self._options = options
|
|
37
|
+
|
|
38
|
+
async def get_next_ai_messages(self, chat: 'mc.Chat') -> 'mc.Chat':
|
|
39
|
+
opts = self._options() if self._options is not None else []
|
|
40
|
+
|
|
41
|
+
async with self._service_provider.provide_backend() as service:
|
|
42
|
+
resp = await service.invoke(mc.ChatChoicesRequest(chat, opts))
|
|
43
|
+
|
|
44
|
+
return check.single(resp.v).ms
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ChatChoicesStreamServiceStreamAiChatGenerator(StreamAiChatGenerator):
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
service_provider: ChatChoicesStreamServiceBackendProvider,
|
|
51
|
+
*,
|
|
52
|
+
options: ChatChoicesServiceOptionsProvider | None = None,
|
|
53
|
+
) -> None:
|
|
54
|
+
super().__init__()
|
|
55
|
+
|
|
56
|
+
self._service_provider = service_provider
|
|
57
|
+
self._options = options
|
|
58
|
+
|
|
59
|
+
async def get_next_ai_messages_streamed(
|
|
60
|
+
self,
|
|
61
|
+
chat: 'mc.Chat',
|
|
62
|
+
delta_callback: ta.Callable[['mc.AiDelta'], ta.Awaitable[None]] | None = None,
|
|
63
|
+
) -> mc.AiChat:
|
|
64
|
+
opts = self._options() if self._options is not None else []
|
|
65
|
+
|
|
66
|
+
async with self._service_provider.provide_backend() as service:
|
|
67
|
+
joiner = mc.AiChoicesDeltaJoiner()
|
|
68
|
+
|
|
69
|
+
async with (await service.invoke(mc.ChatChoicesStreamRequest(chat, opts))).v as st_resp:
|
|
70
|
+
async for o in st_resp:
|
|
71
|
+
joiner.add(o.choices)
|
|
72
|
+
|
|
73
|
+
choice = check.single(o.choices)
|
|
74
|
+
|
|
75
|
+
for delta in choice.deltas:
|
|
76
|
+
if delta_callback is not None:
|
|
77
|
+
await delta_callback(delta)
|
|
78
|
+
|
|
79
|
+
return check.single(joiner.build())
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from ...... import minichain as mc
|
|
2
|
+
from ...tools.execution import ToolUseExecutor
|
|
3
|
+
from .types import AiChatGenerator
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
##
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ToolExecutingAiChatGenerator(AiChatGenerator):
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
*,
|
|
13
|
+
wrapped: AiChatGenerator,
|
|
14
|
+
executor: ToolUseExecutor,
|
|
15
|
+
) -> None:
|
|
16
|
+
super().__init__()
|
|
17
|
+
|
|
18
|
+
self._wrapped = wrapped
|
|
19
|
+
self._executor = executor
|
|
20
|
+
|
|
21
|
+
async def get_next_ai_messages(self, chat: 'mc.Chat') -> 'mc.Chat':
|
|
22
|
+
out: list[mc.Message] = []
|
|
23
|
+
|
|
24
|
+
while True:
|
|
25
|
+
new = await self._wrapped.get_next_ai_messages([*chat, *out])
|
|
26
|
+
|
|
27
|
+
out.extend(new)
|
|
28
|
+
|
|
29
|
+
cont = False
|
|
30
|
+
|
|
31
|
+
for msg in new:
|
|
32
|
+
if isinstance(msg, mc.ToolUseMessage):
|
|
33
|
+
trm = await self._executor.execute_tool_use(
|
|
34
|
+
msg.tu,
|
|
35
|
+
# fs_tool_context,
|
|
36
|
+
# todo_tool_context, # noqa
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
out.append(trm)
|
|
40
|
+
|
|
41
|
+
cont = True
|
|
42
|
+
|
|
43
|
+
if not cont:
|
|
44
|
+
return out
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import typing as ta
|
|
3
|
+
|
|
4
|
+
from omlish import lang
|
|
5
|
+
|
|
6
|
+
from ...... import minichain as mc
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class AiChatGenerator(lang.Abstract):
|
|
13
|
+
@abc.abstractmethod
|
|
14
|
+
def get_next_ai_messages(self, chat: 'mc.Chat') -> ta.Awaitable['mc.Chat']:
|
|
15
|
+
raise NotImplementedError
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class StreamAiChatGenerator(AiChatGenerator, lang.Abstract):
|
|
19
|
+
def get_next_ai_messages(self, chat: 'mc.Chat') -> ta.Awaitable['mc.Chat']:
|
|
20
|
+
return self.get_next_ai_messages_streamed(chat)
|
|
21
|
+
|
|
22
|
+
@abc.abstractmethod
|
|
23
|
+
def get_next_ai_messages_streamed(
|
|
24
|
+
self,
|
|
25
|
+
chat: 'mc.Chat',
|
|
26
|
+
delta_callback: ta.Callable[['mc.AiDelta'], ta.Awaitable[None]] | None = None,
|
|
27
|
+
) -> ta.Awaitable['mc.Chat']:
|
|
28
|
+
raise NotImplementedError
|
|
File without changes
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from omlish import inject as inj
|
|
2
|
+
from omlish import lang
|
|
3
|
+
|
|
4
|
+
from ...phases.injection import phase_callbacks
|
|
5
|
+
from ...phases.types import ChatPhase
|
|
6
|
+
from ...phases.types import ChatPhaseCallback
|
|
7
|
+
from .configs import StateConfig
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
with lang.auto_proxy_import(globals()):
|
|
11
|
+
from . import inmemory as _inmemory
|
|
12
|
+
from . import storage as _storage
|
|
13
|
+
from . import types as _types
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
##
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def bind_state(cfg: StateConfig = StateConfig()) -> inj.Elements:
|
|
20
|
+
els: list[inj.Elemental] = []
|
|
21
|
+
|
|
22
|
+
if cfg.state in ('continue', 'new'):
|
|
23
|
+
els.append(inj.bind(_types.ChatStateManager, to_ctor=_storage.StateStorageChatStateManager, singleton=True))
|
|
24
|
+
|
|
25
|
+
if cfg.state == 'new':
|
|
26
|
+
els.append(phase_callbacks().bind_item(to_fn=lang.typed_lambda(cm=_types.ChatStateManager)(
|
|
27
|
+
lambda cm: ChatPhaseCallback(ChatPhase.STARTING, cm.clear_state),
|
|
28
|
+
)))
|
|
29
|
+
|
|
30
|
+
elif cfg.state == 'ephemeral':
|
|
31
|
+
els.append(inj.bind(_types.ChatStateManager, to_ctor=_inmemory.InMemoryChatStateManager, singleton=True))
|
|
32
|
+
|
|
33
|
+
else:
|
|
34
|
+
raise TypeError(cfg.state)
|
|
35
|
+
|
|
36
|
+
return inj.as_elements(*els)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from omlish import dataclasses as dc
|
|
2
|
+
from omlish import lang
|
|
3
|
+
|
|
4
|
+
from ...... import minichain as mc
|
|
5
|
+
from .types import ChatState
|
|
6
|
+
from .types import ChatStateManager
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class InMemoryChatStateManager(ChatStateManager):
|
|
13
|
+
def __init__(self, initial_state: ChatState | None = None) -> None:
|
|
14
|
+
super().__init__()
|
|
15
|
+
|
|
16
|
+
if initial_state is None:
|
|
17
|
+
initial_state = ChatState()
|
|
18
|
+
self._state = initial_state
|
|
19
|
+
|
|
20
|
+
async def get_state(self) -> ChatState:
|
|
21
|
+
return self._state
|
|
22
|
+
|
|
23
|
+
async def clear_state(self) -> ChatState:
|
|
24
|
+
self._state = ChatState()
|
|
25
|
+
return self._state
|
|
26
|
+
|
|
27
|
+
async def extend_chat(self, chat_additions: 'mc.Chat') -> ChatState:
|
|
28
|
+
self._state = dc.replace(
|
|
29
|
+
self._state,
|
|
30
|
+
chat=[*self._state.chat, *chat_additions],
|
|
31
|
+
updated_at=lang.utcnow(),
|
|
32
|
+
)
|
|
33
|
+
return self._state
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from omlish import check
|
|
2
|
+
from omlish import dataclasses as dc
|
|
3
|
+
from omlish import lang
|
|
4
|
+
|
|
5
|
+
from ...... import minichain as mc
|
|
6
|
+
from .....state.storage import StateStorage
|
|
7
|
+
from .types import ChatState
|
|
8
|
+
from .types import ChatStateManager
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
##
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class StateStorageChatStateManager(ChatStateManager):
|
|
15
|
+
def __init__(
|
|
16
|
+
self,
|
|
17
|
+
*,
|
|
18
|
+
storage: StateStorage,
|
|
19
|
+
key: str = 'chat',
|
|
20
|
+
) -> None:
|
|
21
|
+
super().__init__()
|
|
22
|
+
|
|
23
|
+
self._storage = storage
|
|
24
|
+
self._key = check.non_empty_str(key)
|
|
25
|
+
|
|
26
|
+
self._state: ChatState | None = None
|
|
27
|
+
|
|
28
|
+
async def get_state(self) -> ChatState:
|
|
29
|
+
if self._state is not None:
|
|
30
|
+
return self._state
|
|
31
|
+
state: ChatState | None = await self._storage.load_state(self._key, ChatState)
|
|
32
|
+
if state is None:
|
|
33
|
+
state = ChatState()
|
|
34
|
+
self._state = state
|
|
35
|
+
return state
|
|
36
|
+
|
|
37
|
+
async def clear_state(self) -> ChatState:
|
|
38
|
+
state = ChatState()
|
|
39
|
+
await self._storage.save_state(self._key, state, ChatState)
|
|
40
|
+
self._state = state
|
|
41
|
+
return state
|
|
42
|
+
|
|
43
|
+
async def extend_chat(self, chat_additions: 'mc.Chat') -> ChatState:
|
|
44
|
+
state = await self.get_state()
|
|
45
|
+
state = dc.replace(
|
|
46
|
+
state,
|
|
47
|
+
chat=[*state.chat, *chat_additions],
|
|
48
|
+
updated_at=lang.utcnow(),
|
|
49
|
+
)
|
|
50
|
+
await self._storage.save_state(self._key, state, ChatState)
|
|
51
|
+
self._state = state
|
|
52
|
+
return state
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import datetime
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
from omlish import dataclasses as dc
|
|
6
|
+
from omlish import lang
|
|
7
|
+
|
|
8
|
+
from ...... import minichain as mc
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
##
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dc.dataclass(frozen=True)
|
|
15
|
+
class ChatState:
|
|
16
|
+
name: str | None = None
|
|
17
|
+
|
|
18
|
+
created_at: datetime.datetime = dc.field(default_factory=lang.utcnow)
|
|
19
|
+
updated_at: datetime.datetime = dc.field(default_factory=lang.utcnow)
|
|
20
|
+
|
|
21
|
+
chat: mc.Chat = ()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
##
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ChatStateManager(lang.Abstract):
|
|
28
|
+
@abc.abstractmethod
|
|
29
|
+
def get_state(self) -> ta.Awaitable[ChatState]:
|
|
30
|
+
raise NotImplementedError
|
|
31
|
+
|
|
32
|
+
@abc.abstractmethod
|
|
33
|
+
def clear_state(self) -> ta.Awaitable[ChatState]:
|
|
34
|
+
raise NotImplementedError
|
|
35
|
+
|
|
36
|
+
@abc.abstractmethod
|
|
37
|
+
def extend_chat(self, chat_additions: 'mc.Chat') -> ta.Awaitable[ChatState]:
|
|
38
|
+
raise NotImplementedError
|
|
File without changes
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
|
|
3
|
+
from omlish import dataclasses as dc
|
|
4
|
+
|
|
5
|
+
from ...... import minichain as mc
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
##
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
12
|
+
class UserConfig:
|
|
13
|
+
initial_system_content: ta.Optional['mc.Content'] = None
|
|
14
|
+
initial_user_content: ta.Optional['mc.Content'] = None
|
|
15
|
+
|
|
16
|
+
interactive: bool = False
|
|
17
|
+
use_readline: bool | ta.Literal['auto'] = 'auto'
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
from omlish import inject as inj
|
|
2
|
+
from omlish import lang
|
|
3
|
+
|
|
4
|
+
from ...... import minichain as mc
|
|
5
|
+
from ...phases.injection import phase_callbacks
|
|
6
|
+
from ...phases.types import ChatPhase
|
|
7
|
+
from ...phases.types import ChatPhaseCallback
|
|
8
|
+
from .configs import UserConfig
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
with lang.auto_proxy_import(globals()):
|
|
12
|
+
from .....inputs import asyncs as _inputs_asyncs
|
|
13
|
+
from .....inputs import sync as _inputs_sync
|
|
14
|
+
from ..state import types as _state
|
|
15
|
+
from . import interactive as _interactive
|
|
16
|
+
from . import oneshot as _oneshot
|
|
17
|
+
from . import types as _types
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
##
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def bind_user(cfg: UserConfig = UserConfig()) -> inj.Elements:
|
|
24
|
+
els: list[inj.Elemental] = []
|
|
25
|
+
|
|
26
|
+
# FIXME: barf
|
|
27
|
+
if cfg.initial_system_content is not None:
|
|
28
|
+
async def add_initial_system_content(cm: '_state.ChatStateManager') -> None:
|
|
29
|
+
await cm.extend_chat([mc.SystemMessage(cfg.initial_system_content)])
|
|
30
|
+
|
|
31
|
+
els.append(phase_callbacks().bind_item(to_fn=lang.typed_lambda(cm=_state.ChatStateManager)(
|
|
32
|
+
lambda cm: ChatPhaseCallback(ChatPhase.STARTED, lambda: add_initial_system_content(cm)),
|
|
33
|
+
)))
|
|
34
|
+
|
|
35
|
+
if cfg.interactive:
|
|
36
|
+
if cfg.initial_user_content is not None:
|
|
37
|
+
async def add_initial_user_content(cm: '_state.ChatStateManager') -> None:
|
|
38
|
+
await cm.extend_chat([mc.UserMessage(cfg.initial_user_content)])
|
|
39
|
+
|
|
40
|
+
els.append(phase_callbacks().bind_item(to_fn=lang.typed_lambda(cm=_state.ChatStateManager)(
|
|
41
|
+
lambda cm: ChatPhaseCallback(ChatPhase.STARTED, lambda: add_initial_user_content(cm)),
|
|
42
|
+
)))
|
|
43
|
+
|
|
44
|
+
raise NotImplementedError
|
|
45
|
+
|
|
46
|
+
els.append(inj.bind(_types.UserChatInput, to_ctor=_interactive.InteractiveUserChatInput, singleton=True))
|
|
47
|
+
|
|
48
|
+
els.extend([
|
|
49
|
+
inj.bind(_inputs_sync.SyncStringInput, to_const=_inputs_sync.InputSyncStringInput(use_readline=cfg.use_readline)), # noqa
|
|
50
|
+
inj.bind(_inputs_asyncs.AsyncStringInput, to_ctor=_inputs_asyncs.ThreadAsyncStringInput, singleton=True),
|
|
51
|
+
])
|
|
52
|
+
|
|
53
|
+
else:
|
|
54
|
+
if cfg.initial_user_content is None:
|
|
55
|
+
raise ValueError('Initial user content is required for non-interactive chat')
|
|
56
|
+
|
|
57
|
+
els.extend([
|
|
58
|
+
inj.bind(_oneshot.OneshotUserChatInputInitialChat, to_const=[mc.UserMessage(cfg.initial_user_content)]),
|
|
59
|
+
inj.bind(_types.UserChatInput, to_ctor=_oneshot.OneshotUserChatInput, singleton=True),
|
|
60
|
+
])
|
|
61
|
+
|
|
62
|
+
return inj.as_elements(*els)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
|
|
3
|
+
from ...... import minichain as mc
|
|
4
|
+
from .....inputs.asyncs import AsyncStringInput
|
|
5
|
+
from .....inputs.asyncs import SyncAsyncStringInput
|
|
6
|
+
from .....inputs.sync import InputSyncStringInput
|
|
7
|
+
from .types import UserChatInput
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
##
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class InteractiveUserChatInput(UserChatInput):
|
|
14
|
+
DEFAULT_STRING_INPUT: ta.ClassVar[AsyncStringInput] = SyncAsyncStringInput(InputSyncStringInput())
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
string_input: AsyncStringInput | None = None,
|
|
19
|
+
) -> None:
|
|
20
|
+
super().__init__()
|
|
21
|
+
|
|
22
|
+
if string_input is None:
|
|
23
|
+
string_input = self.DEFAULT_STRING_INPUT
|
|
24
|
+
self._string_input = string_input
|
|
25
|
+
|
|
26
|
+
async def get_next_user_messages(self) -> 'mc.UserChat':
|
|
27
|
+
try:
|
|
28
|
+
s = await self._string_input()
|
|
29
|
+
except EOFError:
|
|
30
|
+
return []
|
|
31
|
+
return [mc.UserMessage(s)]
|