ommlds 0.0.0.dev456__py3-none-any.whl → 0.0.0.dev485__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ommlds/.omlish-manifests.json +314 -33
- ommlds/__about__.py +15 -9
- ommlds/_hacks/__init__.py +4 -0
- ommlds/_hacks/funcs.py +110 -0
- ommlds/_hacks/names.py +158 -0
- ommlds/_hacks/params.py +73 -0
- ommlds/_hacks/patches.py +0 -3
- ommlds/backends/anthropic/protocol/__init__.py +13 -1
- ommlds/backends/anthropic/protocol/_dataclasses.py +1625 -0
- ommlds/backends/anthropic/protocol/sse/assemble.py +22 -6
- ommlds/backends/anthropic/protocol/sse/events.py +13 -0
- ommlds/backends/google/protocol/__init__.py +13 -0
- ommlds/backends/google/protocol/_dataclasses.py +5997 -0
- ommlds/backends/google/protocol/types.py +5 -1
- ommlds/backends/groq/__init__.py +7 -0
- ommlds/backends/groq/_dataclasses.py +3901 -0
- ommlds/backends/groq/_marshal.py +23 -0
- ommlds/backends/groq/protocol.py +249 -0
- ommlds/backends/llamacpp/logging.py +4 -1
- ommlds/backends/mlx/caching.py +7 -3
- ommlds/backends/mlx/cli.py +10 -7
- ommlds/backends/mlx/generation.py +18 -16
- ommlds/backends/mlx/limits.py +10 -6
- ommlds/backends/mlx/loading.py +65 -5
- ommlds/backends/ollama/__init__.py +7 -0
- ommlds/backends/ollama/_dataclasses.py +3458 -0
- ommlds/backends/ollama/protocol.py +170 -0
- ommlds/backends/openai/protocol/__init__.py +15 -1
- ommlds/backends/openai/protocol/_dataclasses.py +7708 -0
- ommlds/backends/tavily/__init__.py +7 -0
- ommlds/backends/tavily/_dataclasses.py +1734 -0
- ommlds/backends/tavily/protocol.py +301 -0
- ommlds/backends/tinygrad/models/llama3/__init__.py +22 -14
- ommlds/backends/transformers/__init__.py +14 -0
- ommlds/backends/transformers/filecache.py +109 -0
- ommlds/backends/transformers/streamers.py +73 -0
- ommlds/cli/__init__.py +7 -0
- ommlds/cli/_dataclasses.py +2562 -0
- ommlds/cli/asyncs.py +30 -0
- ommlds/cli/backends/catalog.py +93 -0
- ommlds/cli/backends/configs.py +9 -0
- ommlds/cli/backends/inject.py +31 -36
- ommlds/cli/backends/injection.py +16 -0
- ommlds/cli/backends/types.py +46 -0
- ommlds/cli/content/messages.py +34 -0
- ommlds/cli/content/strings.py +42 -0
- ommlds/cli/inject.py +15 -32
- ommlds/cli/inputs/__init__.py +0 -0
- ommlds/cli/inputs/asyncs.py +32 -0
- ommlds/cli/inputs/sync.py +75 -0
- ommlds/cli/main.py +267 -128
- ommlds/cli/rendering/__init__.py +0 -0
- ommlds/cli/rendering/configs.py +9 -0
- ommlds/cli/rendering/inject.py +31 -0
- ommlds/cli/rendering/markdown.py +52 -0
- ommlds/cli/rendering/raw.py +73 -0
- ommlds/cli/rendering/types.py +21 -0
- ommlds/cli/secrets.py +21 -0
- ommlds/cli/sessions/base.py +1 -1
- ommlds/cli/sessions/chat/chat/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/ai/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/ai/configs.py +11 -0
- ommlds/cli/sessions/chat/chat/ai/inject.py +74 -0
- ommlds/cli/sessions/chat/chat/ai/injection.py +14 -0
- ommlds/cli/sessions/chat/chat/ai/rendering.py +70 -0
- ommlds/cli/sessions/chat/chat/ai/services.py +79 -0
- ommlds/cli/sessions/chat/chat/ai/tools.py +44 -0
- ommlds/cli/sessions/chat/chat/ai/types.py +28 -0
- ommlds/cli/sessions/chat/chat/state/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/state/configs.py +11 -0
- ommlds/cli/sessions/chat/chat/state/inject.py +36 -0
- ommlds/cli/sessions/chat/chat/state/inmemory.py +33 -0
- ommlds/cli/sessions/chat/chat/state/storage.py +52 -0
- ommlds/cli/sessions/chat/chat/state/types.py +38 -0
- ommlds/cli/sessions/chat/chat/user/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/user/configs.py +17 -0
- ommlds/cli/sessions/chat/chat/user/inject.py +62 -0
- ommlds/cli/sessions/chat/chat/user/interactive.py +31 -0
- ommlds/cli/sessions/chat/chat/user/oneshot.py +25 -0
- ommlds/cli/sessions/chat/chat/user/types.py +15 -0
- ommlds/cli/sessions/chat/configs.py +27 -0
- ommlds/cli/sessions/chat/driver.py +43 -0
- ommlds/cli/sessions/chat/inject.py +33 -65
- ommlds/cli/sessions/chat/phases/__init__.py +0 -0
- ommlds/cli/sessions/chat/phases/inject.py +27 -0
- ommlds/cli/sessions/chat/phases/injection.py +14 -0
- ommlds/cli/sessions/chat/phases/manager.py +29 -0
- ommlds/cli/sessions/chat/phases/types.py +29 -0
- ommlds/cli/sessions/chat/session.py +27 -0
- ommlds/cli/sessions/chat/tools/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/configs.py +22 -0
- ommlds/cli/sessions/chat/tools/confirmation.py +46 -0
- ommlds/cli/sessions/chat/tools/execution.py +66 -0
- ommlds/cli/sessions/chat/tools/fs/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/fs/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/fs/inject.py +35 -0
- ommlds/cli/sessions/chat/tools/inject.py +88 -0
- ommlds/cli/sessions/chat/tools/injection.py +44 -0
- ommlds/cli/sessions/chat/tools/rendering.py +58 -0
- ommlds/cli/sessions/chat/tools/todo/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/todo/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/todo/inject.py +31 -0
- ommlds/cli/sessions/chat/tools/weather/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/weather/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/weather/inject.py +22 -0
- ommlds/cli/{tools/weather.py → sessions/chat/tools/weather/tools.py} +1 -1
- ommlds/cli/sessions/completion/configs.py +21 -0
- ommlds/cli/sessions/completion/inject.py +42 -0
- ommlds/cli/sessions/completion/session.py +35 -0
- ommlds/cli/sessions/embedding/configs.py +21 -0
- ommlds/cli/sessions/embedding/inject.py +42 -0
- ommlds/cli/sessions/embedding/session.py +33 -0
- ommlds/cli/sessions/inject.py +28 -11
- ommlds/cli/state/__init__.py +0 -0
- ommlds/cli/state/inject.py +28 -0
- ommlds/cli/{state.py → state/storage.py} +41 -24
- ommlds/minichain/__init__.py +46 -17
- ommlds/minichain/_dataclasses.py +15401 -0
- ommlds/minichain/backends/catalogs/base.py +20 -1
- ommlds/minichain/backends/catalogs/simple.py +2 -2
- ommlds/minichain/backends/catalogs/strings.py +10 -8
- ommlds/minichain/backends/impls/anthropic/chat.py +31 -65
- ommlds/minichain/backends/impls/anthropic/names.py +3 -4
- ommlds/minichain/backends/impls/anthropic/protocol.py +109 -0
- ommlds/minichain/backends/impls/anthropic/stream.py +53 -31
- ommlds/minichain/backends/impls/duckduckgo/search.py +5 -1
- ommlds/minichain/backends/impls/dummy/__init__.py +0 -0
- ommlds/minichain/backends/impls/dummy/chat.py +69 -0
- ommlds/minichain/backends/impls/google/chat.py +9 -2
- ommlds/minichain/backends/impls/google/search.py +6 -1
- ommlds/minichain/backends/impls/google/stream.py +122 -32
- ommlds/minichain/backends/impls/groq/__init__.py +0 -0
- ommlds/minichain/backends/impls/groq/chat.py +75 -0
- ommlds/minichain/backends/impls/groq/names.py +48 -0
- ommlds/minichain/backends/impls/groq/protocol.py +143 -0
- ommlds/minichain/backends/impls/groq/stream.py +125 -0
- ommlds/minichain/backends/impls/huggingface/repos.py +1 -5
- ommlds/minichain/backends/impls/llamacpp/chat.py +15 -3
- ommlds/minichain/backends/impls/llamacpp/completion.py +7 -3
- ommlds/minichain/backends/impls/llamacpp/stream.py +38 -19
- ommlds/minichain/backends/impls/mistral.py +9 -2
- ommlds/minichain/backends/impls/mlx/chat.py +100 -23
- ommlds/minichain/backends/impls/ollama/__init__.py +0 -0
- ommlds/minichain/backends/impls/ollama/chat.py +199 -0
- ommlds/minichain/backends/impls/openai/chat.py +14 -7
- ommlds/minichain/backends/impls/openai/completion.py +9 -2
- ommlds/minichain/backends/impls/openai/embedding.py +9 -2
- ommlds/minichain/backends/impls/openai/format.py +115 -109
- ommlds/minichain/backends/impls/openai/names.py +31 -5
- ommlds/minichain/backends/impls/openai/stream.py +33 -27
- ommlds/minichain/backends/impls/sentencepiece/tokens.py +9 -6
- ommlds/minichain/backends/impls/tavily.py +66 -0
- ommlds/minichain/backends/impls/tinygrad/chat.py +17 -14
- ommlds/minichain/backends/impls/tokenizers/tokens.py +9 -6
- ommlds/minichain/backends/impls/transformers/sentence.py +5 -2
- ommlds/minichain/backends/impls/transformers/tokens.py +10 -7
- ommlds/minichain/backends/impls/transformers/transformers.py +139 -20
- ommlds/minichain/backends/strings/parsing.py +1 -1
- ommlds/minichain/backends/strings/resolving.py +4 -1
- ommlds/minichain/chat/choices/stream/__init__.py +0 -0
- ommlds/minichain/chat/choices/stream/adapters.py +35 -0
- ommlds/minichain/chat/choices/stream/joining.py +31 -0
- ommlds/minichain/chat/choices/stream/services.py +45 -0
- ommlds/minichain/chat/choices/stream/types.py +43 -0
- ommlds/minichain/chat/stream/_marshal.py +4 -4
- ommlds/minichain/chat/stream/joining.py +85 -0
- ommlds/minichain/chat/stream/services.py +15 -15
- ommlds/minichain/chat/stream/types.py +24 -18
- ommlds/minichain/llms/types.py +4 -0
- ommlds/minichain/registries/globals.py +18 -4
- ommlds/minichain/resources.py +28 -3
- ommlds/minichain/search.py +1 -1
- ommlds/minichain/standard.py +8 -0
- ommlds/minichain/stream/services.py +19 -16
- ommlds/minichain/tools/reflect.py +5 -1
- ommlds/nanochat/LICENSE +21 -0
- ommlds/nanochat/__init__.py +0 -0
- ommlds/nanochat/rustbpe/LICENSE +21 -0
- ommlds/nanochat/tokenizers.py +406 -0
- ommlds/specs/__init__.py +0 -0
- ommlds/specs/mcp/__init__.py +0 -0
- ommlds/specs/mcp/_marshal.py +23 -0
- ommlds/specs/mcp/clients.py +146 -0
- ommlds/specs/mcp/protocol.py +371 -0
- ommlds/tools/git.py +13 -6
- ommlds/tools/ocr.py +1 -8
- ommlds/wiki/analyze.py +2 -2
- ommlds/wiki/text/mfh.py +1 -5
- ommlds/wiki/text/wtp.py +1 -3
- ommlds/wiki/utils/xml.py +5 -5
- {ommlds-0.0.0.dev456.dist-info → ommlds-0.0.0.dev485.dist-info}/METADATA +22 -19
- {ommlds-0.0.0.dev456.dist-info → ommlds-0.0.0.dev485.dist-info}/RECORD +198 -95
- ommlds/cli/backends/standard.py +0 -20
- ommlds/cli/sessions/chat/base.py +0 -42
- ommlds/cli/sessions/chat/code.py +0 -129
- ommlds/cli/sessions/chat/interactive.py +0 -71
- ommlds/cli/sessions/chat/printing.py +0 -97
- ommlds/cli/sessions/chat/prompt.py +0 -151
- ommlds/cli/sessions/chat/state.py +0 -110
- ommlds/cli/sessions/chat/tools.py +0 -100
- ommlds/cli/sessions/completion/completion.py +0 -44
- ommlds/cli/sessions/embedding/embedding.py +0 -42
- ommlds/cli/tools/config.py +0 -14
- ommlds/cli/tools/inject.py +0 -75
- ommlds/minichain/backends/impls/openai/format2.py +0 -210
- ommlds/minichain/chat/stream/adapters.py +0 -80
- /ommlds/{huggingface.py → backends/huggingface.py} +0 -0
- /ommlds/cli/{tools → content}/__init__.py +0 -0
- {ommlds-0.0.0.dev456.dist-info → ommlds-0.0.0.dev485.dist-info}/WHEEL +0 -0
- {ommlds-0.0.0.dev456.dist-info → ommlds-0.0.0.dev485.dist-info}/entry_points.txt +0 -0
- {ommlds-0.0.0.dev456.dist-info → ommlds-0.0.0.dev485.dist-info}/licenses/LICENSE +0 -0
- {ommlds-0.0.0.dev456.dist-info → ommlds-0.0.0.dev485.dist-info}/top_level.txt +0 -0
ommlds/cli/sessions/chat/code.py
DELETED
|
@@ -1,129 +0,0 @@
|
|
|
1
|
-
import dataclasses as dc
|
|
2
|
-
import itertools
|
|
3
|
-
import os.path
|
|
4
|
-
|
|
5
|
-
from omlish import check
|
|
6
|
-
from omlish import lang
|
|
7
|
-
|
|
8
|
-
from .... import minichain as mc
|
|
9
|
-
from ....minichain.lib.code.prompts import CODE_AGENT_SYSTEM_PROMPT
|
|
10
|
-
from ...tools.config import ToolsConfig
|
|
11
|
-
from .base import DEFAULT_CHAT_MODEL_BACKEND
|
|
12
|
-
from .base import ChatOptions
|
|
13
|
-
from .base import ChatSession
|
|
14
|
-
from .printing import ChatSessionPrinter
|
|
15
|
-
from .state import ChatStateManager
|
|
16
|
-
from .tools import ToolUseExecutor
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
with lang.auto_proxy_import(globals()):
|
|
20
|
-
from omdev import ptk
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
##
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class CodeChatSession(ChatSession['CodeChatSession.Config']):
|
|
27
|
-
@dc.dataclass(frozen=True)
|
|
28
|
-
class Config(ChatSession.Config):
|
|
29
|
-
_: dc.KW_ONLY
|
|
30
|
-
|
|
31
|
-
new: bool = False
|
|
32
|
-
|
|
33
|
-
backend: str | None = None
|
|
34
|
-
model_name: str | None = None
|
|
35
|
-
|
|
36
|
-
initial_message: mc.Content | None = None
|
|
37
|
-
|
|
38
|
-
def __init__(
|
|
39
|
-
self,
|
|
40
|
-
config: Config,
|
|
41
|
-
*,
|
|
42
|
-
state_manager: ChatStateManager,
|
|
43
|
-
chat_options: ChatOptions | None = None,
|
|
44
|
-
printer: ChatSessionPrinter,
|
|
45
|
-
backend_catalog: mc.BackendCatalog,
|
|
46
|
-
tool_exec_request_executor: ToolUseExecutor,
|
|
47
|
-
tools_config: ToolsConfig | None = None,
|
|
48
|
-
) -> None:
|
|
49
|
-
super().__init__(config)
|
|
50
|
-
|
|
51
|
-
self._state_manager = state_manager
|
|
52
|
-
self._chat_options = chat_options
|
|
53
|
-
self._printer = printer
|
|
54
|
-
self._backend_catalog = backend_catalog
|
|
55
|
-
self._tool_exec_request_executor = tool_exec_request_executor
|
|
56
|
-
self._tools_config = tools_config
|
|
57
|
-
|
|
58
|
-
async def run(self) -> None:
|
|
59
|
-
if self._config.new:
|
|
60
|
-
self._state_manager.clear_state()
|
|
61
|
-
state = self._state_manager.extend_chat([
|
|
62
|
-
mc.SystemMessage(CODE_AGENT_SYSTEM_PROMPT),
|
|
63
|
-
])
|
|
64
|
-
|
|
65
|
-
else:
|
|
66
|
-
state = self._state_manager.get_state()
|
|
67
|
-
|
|
68
|
-
backend = self._config.backend
|
|
69
|
-
if backend is None:
|
|
70
|
-
backend = DEFAULT_CHAT_MODEL_BACKEND
|
|
71
|
-
|
|
72
|
-
# FIXME: lol
|
|
73
|
-
from ....minichain.lib.fs.context import FsContext
|
|
74
|
-
fs_tool_context = FsContext(
|
|
75
|
-
root_dir=os.getcwd(),
|
|
76
|
-
writes_permitted=self._tools_config is not None and self._tools_config.enable_unsafe_tools_do_not_use_lol,
|
|
77
|
-
)
|
|
78
|
-
|
|
79
|
-
from ....minichain.lib.todo.context import TodoContext
|
|
80
|
-
todo_tool_context = TodoContext()
|
|
81
|
-
|
|
82
|
-
mdl: mc.ChatChoicesService
|
|
83
|
-
async with lang.async_maybe_managing(self._backend_catalog.get_backend(
|
|
84
|
-
mc.ChatChoicesService,
|
|
85
|
-
backend,
|
|
86
|
-
*([mc.ModelName(mn)] if (mn := self._config.model_name) is not None else []),
|
|
87
|
-
)) as mdl:
|
|
88
|
-
for i in itertools.count():
|
|
89
|
-
if not i and self._config.initial_message is not None:
|
|
90
|
-
req_msg = mc.UserMessage(self._config.initial_message)
|
|
91
|
-
else:
|
|
92
|
-
try:
|
|
93
|
-
prompt = await ptk.prompt('> ')
|
|
94
|
-
except EOFError:
|
|
95
|
-
break
|
|
96
|
-
req_msg = mc.UserMessage(prompt)
|
|
97
|
-
|
|
98
|
-
state = self._state_manager.extend_chat([req_msg])
|
|
99
|
-
|
|
100
|
-
while True:
|
|
101
|
-
response = await mdl.invoke(mc.ChatChoicesRequest(
|
|
102
|
-
state.chat,
|
|
103
|
-
(self._chat_options or []),
|
|
104
|
-
))
|
|
105
|
-
|
|
106
|
-
tool_resp_lst = []
|
|
107
|
-
for resp_msg in check.single(response.v).ms:
|
|
108
|
-
state = self._state_manager.extend_chat([resp_msg])
|
|
109
|
-
|
|
110
|
-
if isinstance(resp_msg, mc.AiMessage):
|
|
111
|
-
self._printer.print(resp_msg)
|
|
112
|
-
|
|
113
|
-
elif isinstance(resp_msg, mc.ToolUseMessage):
|
|
114
|
-
trm = await self._tool_exec_request_executor.execute_tool_use(
|
|
115
|
-
resp_msg.tu,
|
|
116
|
-
fs_tool_context,
|
|
117
|
-
todo_tool_context,
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
self._printer.print(trm.tur.c)
|
|
121
|
-
tool_resp_lst.append(trm)
|
|
122
|
-
|
|
123
|
-
else:
|
|
124
|
-
raise TypeError(resp_msg)
|
|
125
|
-
|
|
126
|
-
if not tool_resp_lst:
|
|
127
|
-
break
|
|
128
|
-
|
|
129
|
-
state = self._state_manager.extend_chat(tool_resp_lst)
|
|
@@ -1,71 +0,0 @@
|
|
|
1
|
-
import dataclasses as dc
|
|
2
|
-
|
|
3
|
-
from omlish import check
|
|
4
|
-
from omlish import lang
|
|
5
|
-
|
|
6
|
-
from .... import minichain as mc
|
|
7
|
-
from .base import DEFAULT_CHAT_MODEL_BACKEND
|
|
8
|
-
from .base import ChatSession
|
|
9
|
-
from .printing import ChatSessionPrinter
|
|
10
|
-
from .state import ChatStateManager
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
with lang.auto_proxy_import(globals()):
|
|
14
|
-
from omdev import ptk
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
##
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
class InteractiveChatSession(ChatSession['InteractiveChatSession.Config']):
|
|
21
|
-
@dc.dataclass(frozen=True)
|
|
22
|
-
class Config(ChatSession.Config):
|
|
23
|
-
_: dc.KW_ONLY
|
|
24
|
-
|
|
25
|
-
new: bool = False
|
|
26
|
-
|
|
27
|
-
backend: str | None = None
|
|
28
|
-
model_name: str | None = None
|
|
29
|
-
|
|
30
|
-
def __init__(
|
|
31
|
-
self,
|
|
32
|
-
config: Config,
|
|
33
|
-
*,
|
|
34
|
-
state_manager: ChatStateManager,
|
|
35
|
-
printer: ChatSessionPrinter,
|
|
36
|
-
backend_catalog: mc.BackendCatalog,
|
|
37
|
-
) -> None:
|
|
38
|
-
super().__init__(config)
|
|
39
|
-
|
|
40
|
-
self._state_manager = state_manager
|
|
41
|
-
self._printer = printer
|
|
42
|
-
self._backend_catalog = backend_catalog
|
|
43
|
-
|
|
44
|
-
async def run(self) -> None:
|
|
45
|
-
if self._config.new:
|
|
46
|
-
state = self._state_manager.clear_state()
|
|
47
|
-
else:
|
|
48
|
-
state = self._state_manager.get_state()
|
|
49
|
-
|
|
50
|
-
backend = self._config.backend
|
|
51
|
-
if backend is None:
|
|
52
|
-
backend = DEFAULT_CHAT_MODEL_BACKEND
|
|
53
|
-
|
|
54
|
-
mdl: mc.ChatChoicesService
|
|
55
|
-
async with lang.async_maybe_managing(self._backend_catalog.get_backend(
|
|
56
|
-
mc.ChatChoicesService,
|
|
57
|
-
backend,
|
|
58
|
-
*([mc.ModelName(mn)] if (mn := self._config.model_name) is not None else []),
|
|
59
|
-
)) as mdl:
|
|
60
|
-
while True:
|
|
61
|
-
prompt = await ptk.prompt('> ')
|
|
62
|
-
|
|
63
|
-
req_msg = mc.UserMessage(prompt)
|
|
64
|
-
|
|
65
|
-
response = await mdl.invoke(mc.ChatChoicesRequest([*state.chat, req_msg]))
|
|
66
|
-
|
|
67
|
-
resp_msg = check.isinstance(check.single(response.v[0].ms), mc.AiMessage)
|
|
68
|
-
|
|
69
|
-
self._printer.print(resp_msg)
|
|
70
|
-
|
|
71
|
-
state = self._state_manager.extend_chat([req_msg, resp_msg])
|
|
@@ -1,97 +0,0 @@
|
|
|
1
|
-
import abc
|
|
2
|
-
import typing as ta
|
|
3
|
-
|
|
4
|
-
from omlish import check
|
|
5
|
-
from omlish import lang
|
|
6
|
-
from omlish.formats import json
|
|
7
|
-
|
|
8
|
-
from .... import minichain as mc
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
with lang.auto_proxy_import(globals()):
|
|
12
|
-
from omdev import ptk
|
|
13
|
-
from omdev.ptk import markdown as ptk_md
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
##
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class ChatSessionPrinter(lang.Abstract):
|
|
20
|
-
@abc.abstractmethod
|
|
21
|
-
def print(self, obj: mc.Message | mc.Content) -> None:
|
|
22
|
-
raise NotImplementedError
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
##
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
class StringChatSessionPrinter(ChatSessionPrinter, lang.Abstract):
|
|
29
|
-
@abc.abstractmethod
|
|
30
|
-
def _print_str(self, s: str) -> None:
|
|
31
|
-
raise NotImplementedError
|
|
32
|
-
|
|
33
|
-
def print(self, obj: mc.Message | mc.Content) -> None:
|
|
34
|
-
if obj is None:
|
|
35
|
-
pass
|
|
36
|
-
|
|
37
|
-
elif isinstance(obj, mc.Message):
|
|
38
|
-
if isinstance(obj, mc.SystemMessage):
|
|
39
|
-
if obj.c is not None:
|
|
40
|
-
self._print_str(check.isinstance(obj.c, str))
|
|
41
|
-
elif isinstance(obj, mc.UserMessage):
|
|
42
|
-
if obj.c is not None:
|
|
43
|
-
self._print_str(check.isinstance(obj.c, str))
|
|
44
|
-
elif isinstance(obj, mc.AiMessage):
|
|
45
|
-
if obj.c is not None:
|
|
46
|
-
self._print_str(check.isinstance(obj.c, str))
|
|
47
|
-
elif isinstance(obj, mc.ToolUseResultMessage):
|
|
48
|
-
self._print_str(check.isinstance(obj.tur.c, str))
|
|
49
|
-
else:
|
|
50
|
-
raise TypeError(obj)
|
|
51
|
-
|
|
52
|
-
elif isinstance(obj, mc.JsonContent):
|
|
53
|
-
self._print_str(json.dumps_pretty(obj.v))
|
|
54
|
-
|
|
55
|
-
elif isinstance(obj, str):
|
|
56
|
-
self._print_str(obj)
|
|
57
|
-
|
|
58
|
-
else:
|
|
59
|
-
raise TypeError(obj)
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
##
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
class SimpleStringChatSessionPrinter(StringChatSessionPrinter):
|
|
66
|
-
def __init__(
|
|
67
|
-
self,
|
|
68
|
-
*,
|
|
69
|
-
str_printer: ta.Callable[[str], None] | None = None,
|
|
70
|
-
) -> None:
|
|
71
|
-
super().__init__()
|
|
72
|
-
|
|
73
|
-
if str_printer is None:
|
|
74
|
-
str_printer = print
|
|
75
|
-
self._str_printer = str_printer
|
|
76
|
-
|
|
77
|
-
def _print_str(self, s: str) -> None:
|
|
78
|
-
s = s.strip()
|
|
79
|
-
if not s:
|
|
80
|
-
return
|
|
81
|
-
|
|
82
|
-
self._str_printer(s)
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
##
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
class MarkdownStringChatSessionPrinter(StringChatSessionPrinter):
|
|
89
|
-
def _print_str(self, s: str) -> None:
|
|
90
|
-
s = s.strip()
|
|
91
|
-
if not s:
|
|
92
|
-
return
|
|
93
|
-
|
|
94
|
-
ptk.print_formatted_text(
|
|
95
|
-
ptk_md.Markdown(s),
|
|
96
|
-
style=ptk.Style(list(ptk_md.MARKDOWN_STYLE)),
|
|
97
|
-
)
|
|
@@ -1,151 +0,0 @@
|
|
|
1
|
-
import dataclasses as dc
|
|
2
|
-
import os
|
|
3
|
-
|
|
4
|
-
from omlish import check
|
|
5
|
-
from omlish import lang
|
|
6
|
-
|
|
7
|
-
from .... import minichain as mc
|
|
8
|
-
from .base import DEFAULT_CHAT_MODEL_BACKEND
|
|
9
|
-
from .base import ChatOptions
|
|
10
|
-
from .base import ChatSession
|
|
11
|
-
from .printing import ChatSessionPrinter
|
|
12
|
-
from .state import ChatStateManager
|
|
13
|
-
from .tools import ToolUseExecutor
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
##
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class ToolExecutionRequestDeniedError(Exception):
|
|
20
|
-
pass
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
class PromptChatSession(ChatSession['PromptChatSession.Config']):
|
|
24
|
-
@dc.dataclass(frozen=True)
|
|
25
|
-
class Config(ChatSession.Config): # noqa
|
|
26
|
-
content: mc.Content
|
|
27
|
-
|
|
28
|
-
_: dc.KW_ONLY
|
|
29
|
-
|
|
30
|
-
new: bool = False
|
|
31
|
-
|
|
32
|
-
backend: str | None = None
|
|
33
|
-
model_name: str | None = None
|
|
34
|
-
|
|
35
|
-
stream: bool = False
|
|
36
|
-
|
|
37
|
-
def __init__(
|
|
38
|
-
self,
|
|
39
|
-
config: Config,
|
|
40
|
-
*,
|
|
41
|
-
state_manager: ChatStateManager,
|
|
42
|
-
chat_options: ChatOptions | None = None,
|
|
43
|
-
printer: ChatSessionPrinter,
|
|
44
|
-
backend_catalog: mc.BackendCatalog,
|
|
45
|
-
tool_exec_request_executor: ToolUseExecutor,
|
|
46
|
-
) -> None:
|
|
47
|
-
super().__init__(config)
|
|
48
|
-
|
|
49
|
-
self._state_manager = state_manager
|
|
50
|
-
self._chat_options = chat_options
|
|
51
|
-
self._printer = printer
|
|
52
|
-
self._backend_catalog = backend_catalog
|
|
53
|
-
self._tool_exec_request_executor = tool_exec_request_executor
|
|
54
|
-
|
|
55
|
-
async def run(self) -> None:
|
|
56
|
-
if self._config.stream:
|
|
57
|
-
await self._run_stream()
|
|
58
|
-
else:
|
|
59
|
-
await self._run_immediate()
|
|
60
|
-
|
|
61
|
-
async def _run_stream(self) -> None:
|
|
62
|
-
prompt = check.isinstance(self._config.content, str)
|
|
63
|
-
|
|
64
|
-
if self._config.new:
|
|
65
|
-
state = self._state_manager.clear_state()
|
|
66
|
-
else:
|
|
67
|
-
state = self._state_manager.get_state()
|
|
68
|
-
|
|
69
|
-
new_chat: list[mc.Message] = [
|
|
70
|
-
mc.UserMessage(prompt),
|
|
71
|
-
]
|
|
72
|
-
|
|
73
|
-
mdl: mc.ChatChoicesStreamService
|
|
74
|
-
async with lang.async_maybe_managing(self._backend_catalog.get_backend(
|
|
75
|
-
mc.ChatChoicesStreamService,
|
|
76
|
-
self._config.backend or DEFAULT_CHAT_MODEL_BACKEND,
|
|
77
|
-
*([mc.ModelName(mn)] if (mn := self._config.model_name) is not None else []),
|
|
78
|
-
)) as mdl:
|
|
79
|
-
async with (await mdl.invoke(mc.ChatChoicesStreamRequest(
|
|
80
|
-
[*state.chat, *new_chat],
|
|
81
|
-
(self._chat_options or []),
|
|
82
|
-
))).v as st_resp:
|
|
83
|
-
lst: list[str] = []
|
|
84
|
-
async for o in st_resp:
|
|
85
|
-
if o:
|
|
86
|
-
c = check.isinstance(check.single(check.single(o.choices).deltas), mc.ContentAiChoiceDelta).c
|
|
87
|
-
if c is not None:
|
|
88
|
-
print(check.isinstance(c, str), end='', flush=True)
|
|
89
|
-
lst.append(check.isinstance(c, str))
|
|
90
|
-
print()
|
|
91
|
-
|
|
92
|
-
resp_m = mc.AiMessage(''.join(lst))
|
|
93
|
-
new_chat.append(resp_m)
|
|
94
|
-
|
|
95
|
-
self._state_manager.extend_chat(new_chat)
|
|
96
|
-
|
|
97
|
-
async def _run_immediate(self) -> None:
|
|
98
|
-
prompt = check.isinstance(self._config.content, str)
|
|
99
|
-
|
|
100
|
-
if self._config.new:
|
|
101
|
-
state = self._state_manager.clear_state()
|
|
102
|
-
else:
|
|
103
|
-
state = self._state_manager.get_state()
|
|
104
|
-
|
|
105
|
-
new_chat: list[mc.Message] = [
|
|
106
|
-
mc.UserMessage(prompt),
|
|
107
|
-
]
|
|
108
|
-
|
|
109
|
-
mdl: mc.ChatChoicesService
|
|
110
|
-
async with lang.async_maybe_managing(self._backend_catalog.get_backend(
|
|
111
|
-
mc.ChatChoicesService,
|
|
112
|
-
self._config.backend or DEFAULT_CHAT_MODEL_BACKEND,
|
|
113
|
-
*([mc.ModelName(mn)] if (mn := self._config.model_name) is not None else []),
|
|
114
|
-
)) as mdl:
|
|
115
|
-
response: mc.ChatChoicesResponse = await mdl.invoke(mc.ChatChoicesRequest(
|
|
116
|
-
[*state.chat, *new_chat],
|
|
117
|
-
(self._chat_options or []),
|
|
118
|
-
))
|
|
119
|
-
|
|
120
|
-
for resp_m in response.v[0].ms:
|
|
121
|
-
new_chat.append(resp_m)
|
|
122
|
-
|
|
123
|
-
if isinstance(resp_m, mc.AiMessage):
|
|
124
|
-
self._printer.print(resp_m)
|
|
125
|
-
|
|
126
|
-
elif isinstance(resp_m, mc.ToolUseMessage):
|
|
127
|
-
tr: mc.ToolUse = resp_m.tu
|
|
128
|
-
|
|
129
|
-
# FIXME: lol
|
|
130
|
-
from ....minichain.lib.fs.context import FsContext
|
|
131
|
-
|
|
132
|
-
trm = await self._tool_exec_request_executor.execute_tool_use(
|
|
133
|
-
tr,
|
|
134
|
-
FsContext(root_dir=os.getcwd()),
|
|
135
|
-
)
|
|
136
|
-
|
|
137
|
-
print(trm.tur.c)
|
|
138
|
-
new_chat.append(trm)
|
|
139
|
-
|
|
140
|
-
response = await mdl.invoke(mc.ChatChoicesRequest(
|
|
141
|
-
[*state.chat, *new_chat],
|
|
142
|
-
(self._chat_options or []),
|
|
143
|
-
))
|
|
144
|
-
|
|
145
|
-
resp_m = check.isinstance(check.single(response.v[0].ms), mc.AiMessage)
|
|
146
|
-
new_chat.append(resp_m)
|
|
147
|
-
|
|
148
|
-
else:
|
|
149
|
-
raise TypeError(resp_m)
|
|
150
|
-
|
|
151
|
-
self._state_manager.extend_chat(new_chat)
|
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
import abc
|
|
2
|
-
import dataclasses as dc
|
|
3
|
-
import datetime
|
|
4
|
-
|
|
5
|
-
from omlish import check
|
|
6
|
-
from omlish import lang
|
|
7
|
-
|
|
8
|
-
from .... import minichain as mc
|
|
9
|
-
from ...state import StateStorage
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
##
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
@dc.dataclass(frozen=True)
|
|
16
|
-
class ChatState:
|
|
17
|
-
name: str | None = None
|
|
18
|
-
|
|
19
|
-
created_at: datetime.datetime = dc.field(default_factory=lang.utcnow)
|
|
20
|
-
updated_at: datetime.datetime = dc.field(default_factory=lang.utcnow)
|
|
21
|
-
|
|
22
|
-
chat: mc.Chat = ()
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
##
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
class ChatStateManager(lang.Abstract):
|
|
29
|
-
@abc.abstractmethod
|
|
30
|
-
def get_state(self) -> ChatState:
|
|
31
|
-
raise NotImplementedError
|
|
32
|
-
|
|
33
|
-
@abc.abstractmethod
|
|
34
|
-
def clear_state(self) -> ChatState:
|
|
35
|
-
raise NotImplementedError
|
|
36
|
-
|
|
37
|
-
@abc.abstractmethod
|
|
38
|
-
def extend_chat(self, chat_additions: mc.Chat) -> ChatState:
|
|
39
|
-
raise NotImplementedError
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
##
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
class InMemoryChatStateManager(ChatStateManager):
|
|
46
|
-
def __init__(self, initial_state: ChatState | None = None) -> None:
|
|
47
|
-
super().__init__()
|
|
48
|
-
|
|
49
|
-
if initial_state is None:
|
|
50
|
-
initial_state = ChatState()
|
|
51
|
-
self._state = initial_state
|
|
52
|
-
|
|
53
|
-
def get_state(self) -> ChatState:
|
|
54
|
-
return self._state
|
|
55
|
-
|
|
56
|
-
def clear_state(self) -> ChatState:
|
|
57
|
-
self._state = ChatState()
|
|
58
|
-
return self._state
|
|
59
|
-
|
|
60
|
-
def extend_chat(self, chat_additions: mc.Chat) -> ChatState:
|
|
61
|
-
self._state = dc.replace(
|
|
62
|
-
self._state,
|
|
63
|
-
chat=[*self._state.chat, *chat_additions],
|
|
64
|
-
updated_at=lang.utcnow(),
|
|
65
|
-
)
|
|
66
|
-
return self._state
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
##
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
class StateStorageChatStateManager(ChatStateManager):
|
|
73
|
-
def __init__(
|
|
74
|
-
self,
|
|
75
|
-
*,
|
|
76
|
-
storage: StateStorage,
|
|
77
|
-
key: str = 'chat',
|
|
78
|
-
) -> None:
|
|
79
|
-
super().__init__()
|
|
80
|
-
|
|
81
|
-
self._storage = storage
|
|
82
|
-
self._key = check.non_empty_str(key)
|
|
83
|
-
|
|
84
|
-
self._state: ChatState | None = None
|
|
85
|
-
|
|
86
|
-
def get_state(self) -> ChatState:
|
|
87
|
-
if self._state is not None:
|
|
88
|
-
return self._state
|
|
89
|
-
state: ChatState | None = self._storage.load_state(self._key, ChatState)
|
|
90
|
-
if state is None:
|
|
91
|
-
state = ChatState()
|
|
92
|
-
self._state = state
|
|
93
|
-
return state
|
|
94
|
-
|
|
95
|
-
def clear_state(self) -> ChatState:
|
|
96
|
-
state = ChatState()
|
|
97
|
-
self._storage.save_state(self._key, state, ChatState)
|
|
98
|
-
self._state = state
|
|
99
|
-
return state
|
|
100
|
-
|
|
101
|
-
def extend_chat(self, chat_additions: mc.Chat) -> ChatState:
|
|
102
|
-
state = self.get_state()
|
|
103
|
-
state = dc.replace(
|
|
104
|
-
state,
|
|
105
|
-
chat=[*state.chat, *chat_additions],
|
|
106
|
-
updated_at=lang.utcnow(),
|
|
107
|
-
)
|
|
108
|
-
self._storage.save_state(self._key, state, ChatState)
|
|
109
|
-
self._state = state
|
|
110
|
-
return state
|
|
@@ -1,100 +0,0 @@
|
|
|
1
|
-
import abc
|
|
2
|
-
import typing as ta
|
|
3
|
-
|
|
4
|
-
from omlish import check
|
|
5
|
-
from omlish import lang
|
|
6
|
-
from omlish.formats import json
|
|
7
|
-
|
|
8
|
-
from .... import minichain as mc
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
with lang.auto_proxy_import(globals()):
|
|
12
|
-
from omdev import ptk
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
##
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class ToolExecutionRequestDeniedError(Exception):
|
|
19
|
-
pass
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class ToolExecutionConfirmation(lang.Abstract):
|
|
23
|
-
@abc.abstractmethod
|
|
24
|
-
def confirm_tool_execution_or_raise(
|
|
25
|
-
self,
|
|
26
|
-
tr: mc.ToolUse,
|
|
27
|
-
tce: mc.ToolCatalogEntry,
|
|
28
|
-
) -> ta.Awaitable[None]:
|
|
29
|
-
raise NotImplementedError
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
class NopToolExecutionConfirmation(ToolExecutionConfirmation):
|
|
33
|
-
async def confirm_tool_execution_or_raise(
|
|
34
|
-
self,
|
|
35
|
-
tr: mc.ToolUse,
|
|
36
|
-
tce: mc.ToolCatalogEntry,
|
|
37
|
-
) -> None:
|
|
38
|
-
pass
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
class AskingToolExecutionConfirmation(ToolExecutionConfirmation):
|
|
42
|
-
async def confirm_tool_execution_or_raise(
|
|
43
|
-
self,
|
|
44
|
-
tr: mc.ToolUse,
|
|
45
|
-
tce: mc.ToolCatalogEntry,
|
|
46
|
-
) -> None:
|
|
47
|
-
tr_dct = dict(
|
|
48
|
-
id=tr.id,
|
|
49
|
-
name=tce.spec.name,
|
|
50
|
-
args=tr.args,
|
|
51
|
-
# spec=msh.marshal(tce.spec),
|
|
52
|
-
)
|
|
53
|
-
cr = await ptk.strict_confirm(f'Execute requested tool?\n\n{json.dumps_pretty(tr_dct)}\n\n')
|
|
54
|
-
|
|
55
|
-
if not cr:
|
|
56
|
-
raise ToolExecutionRequestDeniedError
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
##
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
class ToolUseExecutor(lang.Abstract):
|
|
63
|
-
@abc.abstractmethod
|
|
64
|
-
def execute_tool_use(
|
|
65
|
-
self,
|
|
66
|
-
tr: mc.ToolUse,
|
|
67
|
-
*ctx_items: ta.Any,
|
|
68
|
-
) -> ta.Awaitable[mc.ToolUseResultMessage]:
|
|
69
|
-
raise NotImplementedError
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
class ToolUseExecutorImpl(ToolUseExecutor):
|
|
73
|
-
def __init__(
|
|
74
|
-
self,
|
|
75
|
-
*,
|
|
76
|
-
catalog: mc.ToolCatalog,
|
|
77
|
-
confirmation: ToolExecutionConfirmation,
|
|
78
|
-
) -> None:
|
|
79
|
-
super().__init__()
|
|
80
|
-
|
|
81
|
-
self._catalog = catalog
|
|
82
|
-
self._confirmation = confirmation
|
|
83
|
-
|
|
84
|
-
async def execute_tool_use(
|
|
85
|
-
self,
|
|
86
|
-
tr: mc.ToolUse,
|
|
87
|
-
*ctx_items: ta.Any,
|
|
88
|
-
) -> mc.ToolUseResultMessage:
|
|
89
|
-
tce = self._catalog.by_name[check.non_empty_str(tr.name)]
|
|
90
|
-
|
|
91
|
-
await self._confirmation.confirm_tool_execution_or_raise(tr, tce)
|
|
92
|
-
|
|
93
|
-
return await mc.execute_tool_use(
|
|
94
|
-
mc.ToolContext(
|
|
95
|
-
tr,
|
|
96
|
-
*ctx_items,
|
|
97
|
-
),
|
|
98
|
-
tce.executor(),
|
|
99
|
-
tr,
|
|
100
|
-
)
|
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
import dataclasses as dc
|
|
2
|
-
|
|
3
|
-
from omlish import check
|
|
4
|
-
from omlish import lang
|
|
5
|
-
|
|
6
|
-
from .... import minichain as mc
|
|
7
|
-
from ..base import Session
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
##
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
DEFAULT_COMPLETION_MODEL_BACKEND = 'openai'
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class CompletionSession(Session['CompletionSession.Config']):
|
|
17
|
-
@dc.dataclass(frozen=True)
|
|
18
|
-
class Config(Session.Config):
|
|
19
|
-
content: mc.Content
|
|
20
|
-
|
|
21
|
-
_: dc.KW_ONLY
|
|
22
|
-
|
|
23
|
-
backend: str | None = None
|
|
24
|
-
|
|
25
|
-
def __init__(
|
|
26
|
-
self,
|
|
27
|
-
config: Config,
|
|
28
|
-
*,
|
|
29
|
-
backend_catalog: mc.BackendCatalog,
|
|
30
|
-
) -> None:
|
|
31
|
-
super().__init__(config)
|
|
32
|
-
|
|
33
|
-
self._backend_catalog = backend_catalog
|
|
34
|
-
|
|
35
|
-
async def run(self) -> None:
|
|
36
|
-
prompt = check.isinstance(self._config.content, str)
|
|
37
|
-
|
|
38
|
-
mdl: mc.CompletionService
|
|
39
|
-
async with lang.async_maybe_managing(self._backend_catalog.get_backend(
|
|
40
|
-
mc.CompletionService,
|
|
41
|
-
self._config.backend or DEFAULT_COMPLETION_MODEL_BACKEND,
|
|
42
|
-
)) as mdl:
|
|
43
|
-
response = await mdl.invoke(mc.CompletionRequest(prompt))
|
|
44
|
-
print(response.v.strip())
|