ommlds 0.0.0.dev473__py3-none-any.whl → 0.0.0.dev475__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ommlds/.omlish-manifests.json +46 -0
- ommlds/__about__.py +2 -2
- ommlds/backends/tinygrad/models/llama3/__init__.py +22 -14
- ommlds/cli/asyncs.py +30 -0
- ommlds/cli/inject.py +7 -0
- ommlds/cli/main.py +2 -14
- ommlds/cli/main2.py +220 -0
- ommlds/cli/sessions/chat/chat/user/inject.py +7 -0
- ommlds/cli/sessions/chat/chat/user/inputs.py +105 -0
- ommlds/cli/sessions/chat/chat/user/interactive.py +7 -5
- ommlds/cli/sessions/chat/configs.py +2 -0
- ommlds/cli/sessions/chat/inject.py +1 -0
- ommlds/minichain/backends/impls/dummy/__init__.py +0 -0
- ommlds/minichain/backends/impls/dummy/chat.py +69 -0
- ommlds/minichain/resources.py +28 -3
- ommlds/minichain/stream/services.py +11 -10
- ommlds/wiki/analyze.py +2 -2
- {ommlds-0.0.0.dev473.dist-info → ommlds-0.0.0.dev475.dist-info}/METADATA +7 -7
- {ommlds-0.0.0.dev473.dist-info → ommlds-0.0.0.dev475.dist-info}/RECORD +23 -18
- {ommlds-0.0.0.dev473.dist-info → ommlds-0.0.0.dev475.dist-info}/WHEEL +0 -0
- {ommlds-0.0.0.dev473.dist-info → ommlds-0.0.0.dev475.dist-info}/entry_points.txt +0 -0
- {ommlds-0.0.0.dev473.dist-info → ommlds-0.0.0.dev475.dist-info}/licenses/LICENSE +0 -0
- {ommlds-0.0.0.dev473.dist-info → ommlds-0.0.0.dev475.dist-info}/top_level.txt +0 -0
ommlds/.omlish-manifests.json
CHANGED
|
@@ -91,6 +91,52 @@
|
|
|
91
91
|
}
|
|
92
92
|
}
|
|
93
93
|
},
|
|
94
|
+
{
|
|
95
|
+
"module": ".minichain.backends.impls.dummy.chat",
|
|
96
|
+
"attr": null,
|
|
97
|
+
"file": "ommlds/minichain/backends/impls/dummy/chat.py",
|
|
98
|
+
"line": 26,
|
|
99
|
+
"value": {
|
|
100
|
+
"!.minichain.registries.manifests.RegistryManifest": {
|
|
101
|
+
"module": "ommlds.minichain.backends.impls.dummy.chat",
|
|
102
|
+
"attr": "DummyChatChoicesService",
|
|
103
|
+
"name": "dummy",
|
|
104
|
+
"aliases": null,
|
|
105
|
+
"type": "ChatChoicesService"
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
{
|
|
110
|
+
"module": ".minichain.backends.impls.dummy.chat",
|
|
111
|
+
"attr": null,
|
|
112
|
+
"file": "ommlds/minichain/backends/impls/dummy/chat.py",
|
|
113
|
+
"line": 39,
|
|
114
|
+
"value": {
|
|
115
|
+
"!.minichain.registries.manifests.RegistryManifest": {
|
|
116
|
+
"module": "ommlds.minichain.backends.impls.dummy.chat",
|
|
117
|
+
"attr": "DummyChatChoicesStreamService",
|
|
118
|
+
"name": "dummy",
|
|
119
|
+
"aliases": null,
|
|
120
|
+
"type": "ChatChoicesStreamService"
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
},
|
|
124
|
+
{
|
|
125
|
+
"module": ".minichain.backends.impls.dummy.chat",
|
|
126
|
+
"attr": null,
|
|
127
|
+
"file": "ommlds/minichain/backends/impls/dummy/chat.py",
|
|
128
|
+
"line": 63,
|
|
129
|
+
"value": {
|
|
130
|
+
"!.minichain.backends.strings.manifests.BackendStringsManifest": {
|
|
131
|
+
"service_cls_names": [
|
|
132
|
+
"ChatChoicesService",
|
|
133
|
+
"ChatChoicesStreamService"
|
|
134
|
+
],
|
|
135
|
+
"backend_name": "dummy",
|
|
136
|
+
"model_names": null
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
},
|
|
94
140
|
{
|
|
95
141
|
"module": ".minichain.backends.impls.google.chat",
|
|
96
142
|
"attr": null,
|
ommlds/__about__.py
CHANGED
|
@@ -38,7 +38,7 @@ class Project(ProjectBase):
|
|
|
38
38
|
|
|
39
39
|
'huggingface': [
|
|
40
40
|
'huggingface-hub ~= 0.36',
|
|
41
|
-
'datasets ~= 4.
|
|
41
|
+
'datasets ~= 4.4',
|
|
42
42
|
],
|
|
43
43
|
|
|
44
44
|
'nanochat': [
|
|
@@ -60,7 +60,7 @@ class Project(ProjectBase):
|
|
|
60
60
|
],
|
|
61
61
|
|
|
62
62
|
'search': [
|
|
63
|
-
'ddgs ~= 9.
|
|
63
|
+
'ddgs ~= 9.8',
|
|
64
64
|
],
|
|
65
65
|
|
|
66
66
|
'wiki': [
|
|
@@ -4,17 +4,25 @@
|
|
|
4
4
|
# https://github.com/tinygrad/tinygrad/blob/ef17af85c6d3f84d1e1cc084d6dee8ced3d1a33e/extra/models/llama.py
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
from
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
)
|
|
7
|
+
from omlish import lang as _lang
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
with _lang.auto_proxy_init(globals()):
|
|
11
|
+
from .fetch import ( # noqa
|
|
12
|
+
fetch_model,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from .llm import ( # noqa
|
|
16
|
+
Llama3Llm,
|
|
17
|
+
run_llm,
|
|
18
|
+
run_llm_to_stop,
|
|
19
|
+
RunLlmToStopResult,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
from .tokenization import ( # noqa
|
|
23
|
+
Tokenizer,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
from .transformer import ( # noqa
|
|
27
|
+
Transformer,
|
|
28
|
+
)
|
ommlds/cli/asyncs.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import functools
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
from omlish import lang
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
with lang.auto_proxy_import(globals()):
|
|
9
|
+
import anyio
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
T = ta.TypeVar('T')
|
|
13
|
+
P = ta.ParamSpec('P')
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
##
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class AsyncThreadRunner(lang.Abstract):
|
|
20
|
+
@abc.abstractmethod
|
|
21
|
+
def run_in_thread(self, fn: ta.Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> ta.Awaitable[T]:
|
|
22
|
+
raise NotImplementedError
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
##
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class AnyioAsyncThreadRunner(AsyncThreadRunner):
|
|
29
|
+
def run_in_thread(self, fn: ta.Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> ta.Awaitable[T]:
|
|
30
|
+
return anyio.to_thread.run_sync(functools.partial(fn, *args, **kwargs))
|
ommlds/cli/inject.py
CHANGED
|
@@ -5,6 +5,7 @@ from omlish import lang
|
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
with lang.auto_proxy_import(globals()):
|
|
8
|
+
from . import asyncs
|
|
8
9
|
from .backends import inject as _backends
|
|
9
10
|
from .sessions import inject as _sessions
|
|
10
11
|
from .state import inject as _state
|
|
@@ -34,4 +35,10 @@ def bind_main(
|
|
|
34
35
|
|
|
35
36
|
#
|
|
36
37
|
|
|
38
|
+
els.extend([
|
|
39
|
+
inj.bind(asyncs.AsyncThreadRunner, to_ctor=asyncs.AnyioAsyncThreadRunner),
|
|
40
|
+
])
|
|
41
|
+
|
|
42
|
+
#
|
|
43
|
+
|
|
37
44
|
return inj.as_elements(*els)
|
ommlds/cli/main.py
CHANGED
|
@@ -4,10 +4,8 @@ See:
|
|
|
4
4
|
- https://github.com/TheR1D/shell_gpt
|
|
5
5
|
- https://github.com/paul-gauthier/aider
|
|
6
6
|
"""
|
|
7
|
-
import argparse
|
|
8
7
|
import functools
|
|
9
8
|
import os.path
|
|
10
|
-
import sys
|
|
11
9
|
import typing as ta
|
|
12
10
|
|
|
13
11
|
import anyio
|
|
@@ -16,7 +14,7 @@ from omdev.home.secrets import load_secrets
|
|
|
16
14
|
from omlish import check
|
|
17
15
|
from omlish import inject as inj
|
|
18
16
|
from omlish import lang
|
|
19
|
-
from omlish.
|
|
17
|
+
from omlish.argparse import all as ap
|
|
20
18
|
from omlish.logs import all as logs
|
|
21
19
|
from omlish.subprocesses.editor import edit_text_with_user_editor
|
|
22
20
|
from omlish.subprocesses.sync import subprocesses
|
|
@@ -39,7 +37,7 @@ else:
|
|
|
39
37
|
|
|
40
38
|
|
|
41
39
|
async def _a_main(args: ta.Any = None) -> None:
|
|
42
|
-
parser =
|
|
40
|
+
parser = ap.ArgumentParser()
|
|
43
41
|
parser.add_argument('prompt', nargs='*')
|
|
44
42
|
|
|
45
43
|
parser.add_argument('-b', '--backend', default='openai')
|
|
@@ -105,16 +103,6 @@ async def _a_main(args: ta.Any = None) -> None:
|
|
|
105
103
|
elif not args.prompt:
|
|
106
104
|
raise ValueError('Must provide prompt')
|
|
107
105
|
|
|
108
|
-
else:
|
|
109
|
-
prompt = ' '.join(args.prompt)
|
|
110
|
-
|
|
111
|
-
# FIXME: ptk / maysync
|
|
112
|
-
if not sys.stdin.isatty() and not pycharm.is_pycharm_hosted():
|
|
113
|
-
stdin_data = sys.stdin.read()
|
|
114
|
-
prompt = '\n'.join([prompt, stdin_data])
|
|
115
|
-
|
|
116
|
-
content = prompt
|
|
117
|
-
|
|
118
106
|
#
|
|
119
107
|
|
|
120
108
|
# FIXME: lol garbage
|
ommlds/cli/main2.py
ADDED
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TODO:
|
|
3
|
+
- bootstrap lol
|
|
4
|
+
"""
|
|
5
|
+
import abc
|
|
6
|
+
import functools
|
|
7
|
+
import typing as ta
|
|
8
|
+
|
|
9
|
+
import anyio
|
|
10
|
+
|
|
11
|
+
from omlish import check
|
|
12
|
+
from omlish import dataclasses as dc
|
|
13
|
+
from omlish import inject as inj
|
|
14
|
+
from omlish import lang
|
|
15
|
+
from omlish.argparse import all as ap
|
|
16
|
+
from omlish.logs import all as logs
|
|
17
|
+
|
|
18
|
+
from .inject import bind_main
|
|
19
|
+
from .sessions.base import Session
|
|
20
|
+
from .sessions.chat.configs import ChatConfig
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
MAIN_EXTRA_ARGS: ta.Sequence[ap.Arg] = [
|
|
27
|
+
ap.arg('-v', '--verbose', action='store_true'),
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _process_main_extra_args(args: ap.Namespace) -> None:
|
|
32
|
+
if args.verbose:
|
|
33
|
+
logs.configure_standard_logging('DEBUG')
|
|
34
|
+
else:
|
|
35
|
+
logs.configure_standard_logging('INFO')
|
|
36
|
+
logs.silence_noisy_loggers()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
##
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class Profile(lang.Abstract):
|
|
43
|
+
@abc.abstractmethod
|
|
44
|
+
def run(self, argv: ta.Sequence[str]) -> ta.Awaitable[None]:
|
|
45
|
+
raise NotImplementedError
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
##
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# class ChatAspect(lang.Abstract):
|
|
52
|
+
# def get_parser_args(self) -> ta.Sequence[ap.Arg]: ...
|
|
53
|
+
# def set_args(self, args: ap.Namespace) -> None: ...
|
|
54
|
+
# def configure(self, cfg: ChatConfig) -> ChatConfig: ...
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class ChatProfile(Profile):
|
|
58
|
+
_args: ap.Namespace
|
|
59
|
+
|
|
60
|
+
#
|
|
61
|
+
|
|
62
|
+
BACKEND_ARGS: ta.ClassVar[ta.Sequence[ap.Arg]] = [
|
|
63
|
+
ap.arg('-b', '--backend', group='backend'),
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
def configure_backend(self, cfg: ChatConfig) -> ChatConfig:
|
|
67
|
+
return dc.replace(
|
|
68
|
+
cfg,
|
|
69
|
+
backend=self._args.backend,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
#
|
|
73
|
+
|
|
74
|
+
INPUT_ARGS: ta.ClassVar[ta.Sequence[ap.Arg]] = [
|
|
75
|
+
ap.arg('message', nargs='*', group='input'),
|
|
76
|
+
ap.arg('-i', '--interactive', action='store_true', group='input'),
|
|
77
|
+
ap.arg('-e', '--editor', action='store_true', group='input'),
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
def configure_input(self, cfg: ChatConfig) -> ChatConfig:
|
|
81
|
+
if self._args.editor:
|
|
82
|
+
check.arg(not self._args.interactive)
|
|
83
|
+
check.arg(not self._args.message)
|
|
84
|
+
raise NotImplementedError
|
|
85
|
+
|
|
86
|
+
elif self._args.interactive:
|
|
87
|
+
check.arg(not self._args.message)
|
|
88
|
+
return dc.replace(
|
|
89
|
+
cfg,
|
|
90
|
+
interactive=True,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
elif self._args.message:
|
|
94
|
+
# TODO: '-' -> stdin
|
|
95
|
+
return dc.replace(
|
|
96
|
+
cfg,
|
|
97
|
+
initial_user_content=' '.join(self._args.message),
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
else:
|
|
101
|
+
raise ValueError('Must specify input')
|
|
102
|
+
|
|
103
|
+
#
|
|
104
|
+
|
|
105
|
+
STATE_ARGS: ta.ClassVar[ta.Sequence[ap.Arg]] = [
|
|
106
|
+
ap.arg('-n', '--new', action='store_true', group='state'),
|
|
107
|
+
ap.arg('--ephemeral', action='store_true', group='state'),
|
|
108
|
+
]
|
|
109
|
+
|
|
110
|
+
def configure_state(self, cfg: ChatConfig) -> ChatConfig:
|
|
111
|
+
return dc.replace(
|
|
112
|
+
cfg,
|
|
113
|
+
state='ephemeral' if self._args.ephemeral else 'new' if self._args.new else 'continue',
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
#
|
|
117
|
+
|
|
118
|
+
OUTPUT_ARGS: ta.ClassVar[ta.Sequence[ap.Arg]] = [
|
|
119
|
+
ap.arg('-s', '--stream', action='store_true', group='output'),
|
|
120
|
+
ap.arg('-M', '--markdown', action='store_true', group='output'),
|
|
121
|
+
]
|
|
122
|
+
|
|
123
|
+
def configure_output(self, cfg: ChatConfig) -> ChatConfig:
|
|
124
|
+
return dc.replace(
|
|
125
|
+
cfg,
|
|
126
|
+
stream=bool(self._args.stream),
|
|
127
|
+
markdown=bool(self._args.markdown),
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
#
|
|
131
|
+
|
|
132
|
+
async def run(self, argv: ta.Sequence[str]) -> None:
|
|
133
|
+
parser = ap.ArgumentParser()
|
|
134
|
+
|
|
135
|
+
for grp_name, grp_args in [
|
|
136
|
+
('backend', self.BACKEND_ARGS),
|
|
137
|
+
('input', self.INPUT_ARGS),
|
|
138
|
+
('state', self.STATE_ARGS),
|
|
139
|
+
('output', self.OUTPUT_ARGS),
|
|
140
|
+
]:
|
|
141
|
+
grp = parser.add_argument_group(grp_name)
|
|
142
|
+
for a in grp_args:
|
|
143
|
+
grp.add_argument(*a.args, **a.kwargs)
|
|
144
|
+
|
|
145
|
+
self._args = parser.parse_args(argv)
|
|
146
|
+
|
|
147
|
+
cfg = ChatConfig()
|
|
148
|
+
cfg = self.configure_backend(cfg)
|
|
149
|
+
cfg = self.configure_input(cfg)
|
|
150
|
+
cfg = self.configure_state(cfg)
|
|
151
|
+
cfg = self.configure_output(cfg)
|
|
152
|
+
|
|
153
|
+
# session_cfg = ChatConfig(
|
|
154
|
+
# initial_system_content=system_content,
|
|
155
|
+
# enable_tools=(
|
|
156
|
+
# args.enable_fs_tools or
|
|
157
|
+
# args.enable_todo_tools or
|
|
158
|
+
# args.enable_unsafe_tools_do_not_use_lol or
|
|
159
|
+
# args.enable_test_weather_tool or
|
|
160
|
+
# args.code
|
|
161
|
+
# ),
|
|
162
|
+
# enabled_tools={ # noqa
|
|
163
|
+
# *(['fs'] if args.enable_fs_tools else []),
|
|
164
|
+
# *(['todo'] if args.enable_todo_tools else []),
|
|
165
|
+
# *(['weather'] if args.enable_test_weather_tool else []),
|
|
166
|
+
# # FIXME: enable_unsafe_tools_do_not_use_lol
|
|
167
|
+
# },
|
|
168
|
+
# dangerous_no_tool_confirmation=bool(args.dangerous_no_tool_confirmation),
|
|
169
|
+
# )
|
|
170
|
+
|
|
171
|
+
with inj.create_managed_injector(bind_main(
|
|
172
|
+
session_cfg=cfg,
|
|
173
|
+
enable_backend_strings=True,
|
|
174
|
+
)) as injector:
|
|
175
|
+
await injector[Session].run()
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
##
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
PROFILE_TYPES: ta.Mapping[str, type[Profile]] = {
|
|
182
|
+
'chat': ChatProfile,
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
##
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
MAIN_PROFILE_ARGS: ta.Sequence[ap.Arg] = [
|
|
190
|
+
ap.arg('profile', nargs='?', default='chat'),
|
|
191
|
+
ap.arg('args', nargs=ap.REMAINDER),
|
|
192
|
+
]
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
async def _a_main(argv: ta.Any = None) -> None:
|
|
196
|
+
parser = ap.ArgumentParser()
|
|
197
|
+
|
|
198
|
+
for a in [*MAIN_EXTRA_ARGS, *MAIN_PROFILE_ARGS]:
|
|
199
|
+
parser.add_argument(*a.args, **a.kwargs)
|
|
200
|
+
|
|
201
|
+
args, unk_args = parser.parse_known_args(argv)
|
|
202
|
+
|
|
203
|
+
_process_main_extra_args(args)
|
|
204
|
+
|
|
205
|
+
profile_cls = PROFILE_TYPES[args.profile]
|
|
206
|
+
profile = profile_cls()
|
|
207
|
+
await profile.run([*unk_args, *args.args])
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def _main(args: ta.Any = None) -> None:
|
|
211
|
+
anyio.run(
|
|
212
|
+
functools.partial(
|
|
213
|
+
_a_main,
|
|
214
|
+
args,
|
|
215
|
+
),
|
|
216
|
+
) # noqa
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
if __name__ == '__main__':
|
|
220
|
+
_main()
|
|
@@ -11,6 +11,7 @@ from ...phases.types import ChatPhaseCallback
|
|
|
11
11
|
|
|
12
12
|
with lang.auto_proxy_import(globals()):
|
|
13
13
|
from ..state import types as _state
|
|
14
|
+
from . import inputs as _inputs
|
|
14
15
|
from . import interactive as _interactive
|
|
15
16
|
from . import oneshot as _oneshot
|
|
16
17
|
from . import types as _types
|
|
@@ -24,6 +25,7 @@ def bind_user(
|
|
|
24
25
|
initial_system_content: ta.Optional['mc.Content'] = None,
|
|
25
26
|
initial_user_content: ta.Optional['mc.Content'] = None,
|
|
26
27
|
interactive: bool = False,
|
|
28
|
+
use_readline: bool | ta.Literal['auto'] = False,
|
|
27
29
|
) -> inj.Elements:
|
|
28
30
|
els: list[inj.Elemental] = []
|
|
29
31
|
|
|
@@ -49,6 +51,11 @@ def bind_user(
|
|
|
49
51
|
|
|
50
52
|
els.append(inj.bind(_types.UserChatInput, to_ctor=_interactive.InteractiveUserChatInput, singleton=True))
|
|
51
53
|
|
|
54
|
+
els.extend([
|
|
55
|
+
inj.bind(_inputs.SyncStringInput, to_const=_inputs.InputSyncStringInput(use_readline=use_readline)), # noqa
|
|
56
|
+
inj.bind(_inputs.AsyncStringInput, to_ctor=_inputs.ThreadAsyncStringInput, singleton=True),
|
|
57
|
+
])
|
|
58
|
+
|
|
52
59
|
else:
|
|
53
60
|
if initial_user_content is None:
|
|
54
61
|
raise ValueError('Initial user content is required for non-interactive chat')
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import typing as ta
|
|
3
|
+
|
|
4
|
+
from omlish import check
|
|
5
|
+
from omlish import lang
|
|
6
|
+
|
|
7
|
+
from ..... import asyncs
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
with lang.auto_proxy_import(globals()):
|
|
11
|
+
from omlish.subprocesses import editor
|
|
12
|
+
from omlish.subprocesses import sync as sync_subprocesses
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
##
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class SyncStringInput(ta.Protocol):
|
|
19
|
+
def __call__(self) -> str: ...
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class InputSyncStringInput:
|
|
23
|
+
DEFAULT_PROMPT: ta.ClassVar[str] = '> '
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
prompt: str | None = None,
|
|
28
|
+
*,
|
|
29
|
+
use_readline: bool | ta.Literal['auto'] = False,
|
|
30
|
+
) -> None:
|
|
31
|
+
super().__init__()
|
|
32
|
+
|
|
33
|
+
if prompt is None:
|
|
34
|
+
prompt = self.DEFAULT_PROMPT
|
|
35
|
+
self._prompt = prompt
|
|
36
|
+
self._use_readline = use_readline
|
|
37
|
+
|
|
38
|
+
self._handled_readline = False
|
|
39
|
+
|
|
40
|
+
def _handle_readline(self) -> None:
|
|
41
|
+
if self._handled_readline:
|
|
42
|
+
return
|
|
43
|
+
self._handled_readline = True
|
|
44
|
+
|
|
45
|
+
if not self._use_readline:
|
|
46
|
+
return
|
|
47
|
+
|
|
48
|
+
if self._use_readline == 'auto':
|
|
49
|
+
if not sys.stdin.isatty():
|
|
50
|
+
return
|
|
51
|
+
|
|
52
|
+
try:
|
|
53
|
+
import readline # noqa
|
|
54
|
+
except ImportError:
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
def __call__(self) -> str:
|
|
58
|
+
self._handle_readline()
|
|
59
|
+
return input(self._prompt)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class FileSyncStringInput(InputSyncStringInput):
|
|
63
|
+
def __init__(self, path: str) -> None:
|
|
64
|
+
super().__init__()
|
|
65
|
+
|
|
66
|
+
self._path = check.non_empty_str(path)
|
|
67
|
+
|
|
68
|
+
def __call__(self) -> str:
|
|
69
|
+
with open(self._path) as f:
|
|
70
|
+
return f.read()
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class UserEditorSyncStringInput(InputSyncStringInput):
|
|
74
|
+
def __call__(self) -> str:
|
|
75
|
+
if (ec := editor.edit_text_with_user_editor('', sync_subprocesses.subprocesses)) is None:
|
|
76
|
+
raise EOFError
|
|
77
|
+
return ec
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
##
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class AsyncStringInput(ta.Protocol):
|
|
84
|
+
def __call__(self) -> ta.Awaitable[str]: ...
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class ThreadAsyncStringInput:
|
|
88
|
+
def __init__(self, child: SyncStringInput, runner: asyncs.AsyncThreadRunner) -> None:
|
|
89
|
+
super().__init__()
|
|
90
|
+
|
|
91
|
+
self._child = child
|
|
92
|
+
self._runner = runner
|
|
93
|
+
|
|
94
|
+
async def __call__(self) -> str:
|
|
95
|
+
return await self._runner.run_in_thread(self._child)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class SyncAsyncStringInput:
|
|
99
|
+
def __init__(self, child: SyncStringInput) -> None:
|
|
100
|
+
super().__init__()
|
|
101
|
+
|
|
102
|
+
self._child = child
|
|
103
|
+
|
|
104
|
+
async def __call__(self) -> str:
|
|
105
|
+
return self._child()
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import functools
|
|
2
1
|
import typing as ta
|
|
3
2
|
|
|
4
|
-
from omlish import lang
|
|
5
|
-
|
|
6
3
|
from ...... import minichain as mc
|
|
4
|
+
from .inputs import AsyncStringInput
|
|
5
|
+
from .inputs import InputSyncStringInput
|
|
6
|
+
from .inputs import SyncAsyncStringInput
|
|
7
7
|
from .types import UserChatInput
|
|
8
8
|
|
|
9
9
|
|
|
@@ -11,14 +11,16 @@ from .types import UserChatInput
|
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class InteractiveUserChatInput(UserChatInput):
|
|
14
|
+
DEFAULT_STRING_INPUT: ta.ClassVar[AsyncStringInput] = SyncAsyncStringInput(InputSyncStringInput())
|
|
15
|
+
|
|
14
16
|
def __init__(
|
|
15
17
|
self,
|
|
16
|
-
string_input:
|
|
18
|
+
string_input: AsyncStringInput | None = None,
|
|
17
19
|
) -> None:
|
|
18
20
|
super().__init__()
|
|
19
21
|
|
|
20
22
|
if string_input is None:
|
|
21
|
-
string_input =
|
|
23
|
+
string_input = self.DEFAULT_STRING_INPUT
|
|
22
24
|
self._string_input = string_input
|
|
23
25
|
|
|
24
26
|
async def get_next_user_messages(self) -> 'mc.UserChat':
|
|
@@ -24,7 +24,9 @@ class ChatConfig:
|
|
|
24
24
|
|
|
25
25
|
initial_system_content: ta.Optional['mc.Content'] = None
|
|
26
26
|
initial_user_content: ta.Optional['mc.Content'] = None
|
|
27
|
+
|
|
27
28
|
interactive: bool = False
|
|
29
|
+
use_readline: bool | ta.Literal['auto'] = 'auto'
|
|
28
30
|
|
|
29
31
|
silent: bool = False
|
|
30
32
|
markdown: bool = False
|
|
File without changes
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
# ruff: noqa: PERF402
|
|
2
|
+
import typing as ta
|
|
3
|
+
|
|
4
|
+
from omlish.text.lorem import LOREM
|
|
5
|
+
|
|
6
|
+
from ....chat.choices.services import ChatChoicesRequest
|
|
7
|
+
from ....chat.choices.services import ChatChoicesResponse
|
|
8
|
+
from ....chat.choices.services import static_check_is_chat_choices_service
|
|
9
|
+
from ....chat.choices.types import AiChoice
|
|
10
|
+
from ....chat.choices.types import ChatChoicesOutputs
|
|
11
|
+
from ....chat.messages import AiMessage
|
|
12
|
+
from ....chat.stream.services import ChatChoicesStreamRequest
|
|
13
|
+
from ....chat.stream.services import ChatChoicesStreamResponse
|
|
14
|
+
from ....chat.stream.services import static_check_is_chat_choices_stream_service
|
|
15
|
+
from ....chat.stream.types import AiChoiceDeltas
|
|
16
|
+
from ....chat.stream.types import AiChoicesDeltas
|
|
17
|
+
from ....chat.stream.types import ContentAiChoiceDelta
|
|
18
|
+
from ....resources import UseResources
|
|
19
|
+
from ....stream.services import StreamResponseSink
|
|
20
|
+
from ....stream.services import new_stream_response
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# @omlish-manifest $.minichain.registries.manifests.RegistryManifest(
|
|
27
|
+
# name='dummy',
|
|
28
|
+
# type='ChatChoicesService',
|
|
29
|
+
# )
|
|
30
|
+
@static_check_is_chat_choices_service
|
|
31
|
+
class DummyChatChoicesService:
|
|
32
|
+
async def invoke(self, request: ChatChoicesRequest) -> ChatChoicesResponse:
|
|
33
|
+
return ChatChoicesResponse([AiChoice([AiMessage(LOREM)])])
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
##
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# @omlish-manifest $.minichain.registries.manifests.RegistryManifest(
|
|
40
|
+
# name='dummy',
|
|
41
|
+
# type='ChatChoicesStreamService',
|
|
42
|
+
# )
|
|
43
|
+
@static_check_is_chat_choices_stream_service
|
|
44
|
+
class DummyChatChoicesStreamService:
|
|
45
|
+
async def invoke(self, request: ChatChoicesStreamRequest) -> ChatChoicesStreamResponse:
|
|
46
|
+
async with UseResources.or_new(request.options) as rs:
|
|
47
|
+
async def inner(sink: StreamResponseSink[AiChoicesDeltas]) -> ta.Sequence[ChatChoicesOutputs]:
|
|
48
|
+
for s in LOREM:
|
|
49
|
+
await sink.emit(AiChoicesDeltas([
|
|
50
|
+
AiChoiceDeltas([
|
|
51
|
+
ContentAiChoiceDelta(s),
|
|
52
|
+
]),
|
|
53
|
+
]))
|
|
54
|
+
|
|
55
|
+
return []
|
|
56
|
+
|
|
57
|
+
return await new_stream_response(rs, inner)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
##
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# @omlish-manifest $.minichain.backends.strings.manifests.BackendStringsManifest(
|
|
64
|
+
# [
|
|
65
|
+
# 'ChatChoicesService',
|
|
66
|
+
# 'ChatChoicesStreamService',
|
|
67
|
+
# ],
|
|
68
|
+
# 'dummy',
|
|
69
|
+
# )
|
ommlds/minichain/resources.py
CHANGED
|
@@ -31,6 +31,7 @@ class ResourcesRefNotRegisteredError(Exception):
|
|
|
31
31
|
pass
|
|
32
32
|
|
|
33
33
|
|
|
34
|
+
@ta.final
|
|
34
35
|
class Resources(lang.Final, lang.NotPicklable):
|
|
35
36
|
def __init__(
|
|
36
37
|
self,
|
|
@@ -145,24 +146,48 @@ class Resources(lang.Final, lang.NotPicklable):
|
|
|
145
146
|
##
|
|
146
147
|
|
|
147
148
|
|
|
149
|
+
@ta.final
|
|
148
150
|
class ResourceManaged(ResourcesRef, lang.Final, lang.NotPicklable, ta.Generic[T]):
|
|
151
|
+
"""
|
|
152
|
+
A class to 'handoff' a ref to a `Resources`, allowing the `Resources` to temporarily survive being passed from
|
|
153
|
+
instantiation within a callee to being `__aenter__`'d in the caller.
|
|
154
|
+
|
|
155
|
+
The ref to the `Resources` is allocated in the ctor, so the contract is that an instance of this must be immediately
|
|
156
|
+
`__aenter__`'d before doing anything else with the return value of the call. Failure to do so leaks the `Resources`.
|
|
157
|
+
"""
|
|
158
|
+
|
|
149
159
|
def __init__(self, v: T, resources: Resources) -> None:
|
|
150
160
|
super().__init__()
|
|
151
161
|
|
|
152
|
-
self.
|
|
162
|
+
self.__v = v
|
|
153
163
|
self.__resources = resources
|
|
154
164
|
|
|
155
165
|
resources.add_ref(self)
|
|
156
166
|
|
|
167
|
+
__state: ta.Literal['new', 'entered', 'exited'] = 'new'
|
|
168
|
+
|
|
157
169
|
def __repr__(self) -> str:
|
|
158
|
-
return f'{self.__class__.__name__}<{self.
|
|
170
|
+
return f'{self.__class__.__name__}<{self.__v!r}, {self.__state}>'
|
|
159
171
|
|
|
160
172
|
async def __aenter__(self) -> T:
|
|
161
|
-
|
|
173
|
+
check.state(self.__state == 'new')
|
|
174
|
+
self.__state = 'entered'
|
|
175
|
+
return self.__v
|
|
162
176
|
|
|
163
177
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
178
|
+
check.state(self.__state == 'entered')
|
|
179
|
+
self.__state = 'exited'
|
|
164
180
|
await self.__resources.remove_ref(self)
|
|
165
181
|
|
|
182
|
+
def __del__(self) -> None:
|
|
183
|
+
if self.__state != 'exited':
|
|
184
|
+
log.error(
|
|
185
|
+
f'{__package__}.{self.__class__.__name__}.__del__: ' # noqa
|
|
186
|
+
f'%r deleted without being entered and exited! '
|
|
187
|
+
f'resources: %s',
|
|
188
|
+
repr(self),
|
|
189
|
+
repr(self.__resources),
|
|
190
|
+
)
|
|
166
191
|
|
|
167
192
|
##
|
|
168
193
|
|
|
@@ -197,13 +197,14 @@ async def new_stream_response(
|
|
|
197
197
|
fn: ta.Callable[[StreamResponseSink[V]], ta.Awaitable[ta.Sequence[OutputT] | None]],
|
|
198
198
|
outputs: ta.Sequence[StreamOutputT] | None = None,
|
|
199
199
|
) -> StreamResponse[V, OutputT, StreamOutputT]:
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
200
|
+
ssr = _StreamServiceResponse(fn)
|
|
201
|
+
|
|
202
|
+
v = rs.new_managed(await rs.enter_async_context(ssr))
|
|
203
|
+
try:
|
|
204
|
+
return StreamResponse(v, outputs or [])
|
|
205
|
+
except BaseException: # noqa
|
|
206
|
+
# The StreamResponse ctor can raise - for example in `_tv_field_coercer` - in which case we need to clean up the
|
|
207
|
+
# resources ref we have already allocated before reraising.
|
|
208
|
+
async with v:
|
|
209
|
+
pass
|
|
210
|
+
raise
|
ommlds/wiki/analyze.py
CHANGED
|
@@ -213,7 +213,7 @@ class FileAnalyzer:
|
|
|
213
213
|
self.verbose and print()
|
|
214
214
|
return True
|
|
215
215
|
|
|
216
|
-
@logs.
|
|
216
|
+
@logs.exception_logging(log)
|
|
217
217
|
def run(self, file_name: str) -> None:
|
|
218
218
|
log.info(f'{self._ctx.deathpact=} {file_name}') # noqa
|
|
219
219
|
|
|
@@ -268,7 +268,7 @@ class FileAnalyzer:
|
|
|
268
268
|
self._flush_rows()
|
|
269
269
|
|
|
270
270
|
|
|
271
|
-
@logs.
|
|
271
|
+
@logs.exception_logging(log)
|
|
272
272
|
def analyze_file(
|
|
273
273
|
file_name: str,
|
|
274
274
|
db_url: str,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ommlds
|
|
3
|
-
Version: 0.0.0.
|
|
3
|
+
Version: 0.0.0.dev475
|
|
4
4
|
Summary: ommlds
|
|
5
5
|
Author: wrmsr
|
|
6
6
|
License-Expression: BSD-3-Clause
|
|
@@ -14,8 +14,8 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
14
14
|
Requires-Python: >=3.13
|
|
15
15
|
Description-Content-Type: text/markdown
|
|
16
16
|
License-File: LICENSE
|
|
17
|
-
Requires-Dist: omdev==0.0.0.
|
|
18
|
-
Requires-Dist: omlish==0.0.0.
|
|
17
|
+
Requires-Dist: omdev==0.0.0.dev475
|
|
18
|
+
Requires-Dist: omlish==0.0.0.dev475
|
|
19
19
|
Provides-Extra: all
|
|
20
20
|
Requires-Dist: llama-cpp-python~=0.3; extra == "all"
|
|
21
21
|
Requires-Dist: mlx~=0.29; extra == "all"
|
|
@@ -27,13 +27,13 @@ Requires-Dist: torch~=2.9; extra == "all"
|
|
|
27
27
|
Requires-Dist: transformers~=4.57; extra == "all"
|
|
28
28
|
Requires-Dist: sentence-transformers~=5.1; extra == "all"
|
|
29
29
|
Requires-Dist: huggingface-hub~=0.36; extra == "all"
|
|
30
|
-
Requires-Dist: datasets~=4.
|
|
30
|
+
Requires-Dist: datasets~=4.4; extra == "all"
|
|
31
31
|
Requires-Dist: regex>=2025.0; extra == "all"
|
|
32
32
|
Requires-Dist: numpy>=1.26; extra == "all"
|
|
33
33
|
Requires-Dist: pytesseract~=0.3; extra == "all"
|
|
34
34
|
Requires-Dist: rapidocr-onnxruntime~=1.4; extra == "all"
|
|
35
35
|
Requires-Dist: pillow~=12.0; extra == "all"
|
|
36
|
-
Requires-Dist: ddgs~=9.
|
|
36
|
+
Requires-Dist: ddgs~=9.8; extra == "all"
|
|
37
37
|
Requires-Dist: mwparserfromhell~=0.7; extra == "all"
|
|
38
38
|
Requires-Dist: wikitextparser~=0.56; extra == "all"
|
|
39
39
|
Requires-Dist: lxml>=5.3; python_version < "3.13" and extra == "all"
|
|
@@ -49,7 +49,7 @@ Requires-Dist: transformers~=4.57; extra == "backends"
|
|
|
49
49
|
Requires-Dist: sentence-transformers~=5.1; extra == "backends"
|
|
50
50
|
Provides-Extra: huggingface
|
|
51
51
|
Requires-Dist: huggingface-hub~=0.36; extra == "huggingface"
|
|
52
|
-
Requires-Dist: datasets~=4.
|
|
52
|
+
Requires-Dist: datasets~=4.4; extra == "huggingface"
|
|
53
53
|
Provides-Extra: nanochat
|
|
54
54
|
Requires-Dist: regex>=2025.0; extra == "nanochat"
|
|
55
55
|
Provides-Extra: numpy
|
|
@@ -60,7 +60,7 @@ Requires-Dist: rapidocr-onnxruntime~=1.4; extra == "ocr"
|
|
|
60
60
|
Provides-Extra: pillow
|
|
61
61
|
Requires-Dist: pillow~=12.0; extra == "pillow"
|
|
62
62
|
Provides-Extra: search
|
|
63
|
-
Requires-Dist: ddgs~=9.
|
|
63
|
+
Requires-Dist: ddgs~=9.8; extra == "search"
|
|
64
64
|
Provides-Extra: wiki
|
|
65
65
|
Requires-Dist: mwparserfromhell~=0.7; extra == "wiki"
|
|
66
66
|
Requires-Dist: wikitextparser~=0.56; extra == "wiki"
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
ommlds/.omlish-manifests.json,sha256=
|
|
2
|
-
ommlds/__about__.py,sha256=
|
|
1
|
+
ommlds/.omlish-manifests.json,sha256=h7jN7c_zVxbxyIrJ5uzUNl2iJ-F2b2F-0OJfnxronEc,22883
|
|
2
|
+
ommlds/__about__.py,sha256=mP2W108hhVEEa53DuCyBxWWFxtG8FB_CDe0erAZr2zA,1839
|
|
3
3
|
ommlds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
ommlds/_hacks/__init__.py,sha256=ajfw7dMKH8UuloeQ5MSxWwgAmdWf2v8gm-K3uLP9wtY,196
|
|
5
5
|
ommlds/_hacks/funcs.py,sha256=8XseIblP7yolDUD7WQSGn1LP90IQzByVejSzphAPDyM,2861
|
|
@@ -59,7 +59,7 @@ ommlds/backends/tinygrad/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
|
|
|
59
59
|
ommlds/backends/tinygrad/tinygrad.py,sha256=Va2mI11Q76IN9vLCYozMRXbEDVKxjKvRI0iQXJNJtJI,342
|
|
60
60
|
ommlds/backends/tinygrad/models/LICENSE,sha256=2IwCnrbqw67PNQEr5RnaRgXROGYc1nmfgO64MDlfMwI,1058
|
|
61
61
|
ommlds/backends/tinygrad/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
62
|
-
ommlds/backends/tinygrad/models/llama3/__init__.py,sha256=
|
|
62
|
+
ommlds/backends/tinygrad/models/llama3/__init__.py,sha256=WIDkWb6ZtGVjBs6fSWpM_jWtGGU0BHMca3L_hMB9CpU,627
|
|
63
63
|
ommlds/backends/tinygrad/models/llama3/__main__.py,sha256=gFhR9DikwDZk0LqgdR3qq_aXQHThUOPllDmHDOfnFAU,67
|
|
64
64
|
ommlds/backends/tinygrad/models/llama3/attention.py,sha256=Du-C6mSs7co3gAxpTSBdxUo-44GwqiinAPsqYjWJA8s,6420
|
|
65
65
|
ommlds/backends/tinygrad/models/llama3/cli.py,sha256=C0KEhAU03jAtLnPEa4nhchequVrJ55neJMrPj_4puSk,2764
|
|
@@ -85,8 +85,10 @@ ommlds/backends/transformers/filecache.py,sha256=ycfswt7f4qRrPSTFRhofXZaDBuDPpyp
|
|
|
85
85
|
ommlds/backends/transformers/streamers.py,sha256=Hu_9lp_kUilKjOfs7Ixqr2NoA5FuRn2eRh8JdvaBDYc,1688
|
|
86
86
|
ommlds/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
87
87
|
ommlds/cli/__main__.py,sha256=1ffCb0fcUOJMzxROJmJRXQ8PSOVYv7KrcuBtT95cf0c,140
|
|
88
|
-
ommlds/cli/
|
|
89
|
-
ommlds/cli/
|
|
88
|
+
ommlds/cli/asyncs.py,sha256=NAMzzaZq7ORjlbbBB_Y9vcM9qoBpGf4VJNtl_3p_8G4,629
|
|
89
|
+
ommlds/cli/inject.py,sha256=w1_gk26_RL7pJtzS9gSAO7CE24yWwW0DnhBhE80Wnjw,806
|
|
90
|
+
ommlds/cli/main.py,sha256=z87Vu6-_jrzrYuGV7D8eA3ePDHlOvRrl4QUYLYKpdq0,5500
|
|
91
|
+
ommlds/cli/main2.py,sha256=Fl6ljUkb8JxhRGe0mekKfOzGmE4GWqc-pVR0ZKCdor8,5724
|
|
90
92
|
ommlds/cli/backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
91
93
|
ommlds/cli/backends/inject.py,sha256=OVstNsoeVnprM9PBL_zP0N46KkoDg3_Wz90BWcQ7km4,1734
|
|
92
94
|
ommlds/cli/backends/standard.py,sha256=HnammWyAXJHeqXJrAMBdarcT4Nyt2CxudZdD2fW_Y9M,631
|
|
@@ -94,9 +96,9 @@ ommlds/cli/sessions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
|
|
|
94
96
|
ommlds/cli/sessions/base.py,sha256=oTqsqZ9jhBWFblANpVWLLIzmRfP8HO9QYtPnZ-GZxS0,452
|
|
95
97
|
ommlds/cli/sessions/inject.py,sha256=9SrtsozIhqok3jZtepKTJwpOxHkU7FrqKw6pc78mEO4,926
|
|
96
98
|
ommlds/cli/sessions/chat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
97
|
-
ommlds/cli/sessions/chat/configs.py,sha256=
|
|
99
|
+
ommlds/cli/sessions/chat/configs.py,sha256=C8Q3oZPBAiqOgDiwkNynWA9WLYCGzCFQR6gfCRFlWow,746
|
|
98
100
|
ommlds/cli/sessions/chat/driver.py,sha256=ddnCYTKqWiPxV8U4UbFwb7E3yi81ItjZ9j3AJd3a3Mk,1395
|
|
99
|
-
ommlds/cli/sessions/chat/inject.py,sha256=
|
|
101
|
+
ommlds/cli/sessions/chat/inject.py,sha256=i7XW9MPJfeKfj8fQDwbU9bUz3_E5ThSL1R7hJjFxggw,1971
|
|
100
102
|
ommlds/cli/sessions/chat/session.py,sha256=eqwelLE74JFC-fBpk_hdwMD2nP4pLv3ZPwUn99200B8,521
|
|
101
103
|
ommlds/cli/sessions/chat/backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
102
104
|
ommlds/cli/sessions/chat/backends/catalog.py,sha256=hIY0L1zewuJX0_xxcMcy4gylSLiQENB3YxgYJEoKgrU,2109
|
|
@@ -117,8 +119,9 @@ ommlds/cli/sessions/chat/chat/state/inmemory.py,sha256=2lSCWnNEH_vj9RJUVzM8huAHA
|
|
|
117
119
|
ommlds/cli/sessions/chat/chat/state/storage.py,sha256=Q7OOjZKjvkRbiEWIVFKG66xxrxHqFFzx0IcL5cNJrG4,1436
|
|
118
120
|
ommlds/cli/sessions/chat/chat/state/types.py,sha256=iMovPXnRvZJ8ieM5gPnTBi1X7j-9GUtziiPBZJJf034,794
|
|
119
121
|
ommlds/cli/sessions/chat/chat/user/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
120
|
-
ommlds/cli/sessions/chat/chat/user/inject.py,sha256=
|
|
121
|
-
ommlds/cli/sessions/chat/chat/user/
|
|
122
|
+
ommlds/cli/sessions/chat/chat/user/inject.py,sha256=rCk1zVQ1KE9uvJ_y0sQIIjdsFd1QWWfcbg6haQgUN70,2515
|
|
123
|
+
ommlds/cli/sessions/chat/chat/user/inputs.py,sha256=aOzq7kkzvRN-ZezZDiCnViLJAeGtfNP6O-XnqJEGE_k,2372
|
|
124
|
+
ommlds/cli/sessions/chat/chat/user/interactive.py,sha256=kF7g4XIafb-vUHKxtpOUZT8Mt9cYjZDCEvBrnNSnAJ0,831
|
|
122
125
|
ommlds/cli/sessions/chat/chat/user/oneshot.py,sha256=jPrZBBuf-olBfPF7CPTYK7-Dr7EvSriU7L0nORHfbv4,588
|
|
123
126
|
ommlds/cli/sessions/chat/chat/user/types.py,sha256=MNlhMxlLtxVod9rUZlSPvRaippPAXEdX_GHh73QLeSg,262
|
|
124
127
|
ommlds/cli/sessions/chat/content/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -163,7 +166,7 @@ ommlds/minichain/configs.py,sha256=WwrHxfkDAfo_RtuCqUgySthj-2W26lZbpuQoghUyGNw,1
|
|
|
163
166
|
ommlds/minichain/envs.py,sha256=vE2CSeT6KYxOpPY72VbFLzGUnBERYdhfiEUlvSRHkXE,225
|
|
164
167
|
ommlds/minichain/json.py,sha256=0_5rV5Zi2qPOvXi2CLAc5DF7FN3jK3ABbjoKdjtTuVo,360
|
|
165
168
|
ommlds/minichain/metadata.py,sha256=2jik8gEm_VMnknPuPwqRssTg0MClRFUrXz_IsyEgUt4,878
|
|
166
|
-
ommlds/minichain/resources.py,sha256=
|
|
169
|
+
ommlds/minichain/resources.py,sha256=CcFIUrxPGuxUabG74zL0yByZsyGJISxLVK1nULSZPyo,5488
|
|
167
170
|
ommlds/minichain/search.py,sha256=azRzWcYhcm9IgSHquqLwtbwowtYCRAtPLSm7Gvt9iNo,1262
|
|
168
171
|
ommlds/minichain/standard.py,sha256=cGXaGtC5iM9Q2lCcbhLtvEcPGKhcJUIh3UWyNgOssRM,2580
|
|
169
172
|
ommlds/minichain/types.py,sha256=K6RRjpUi17UEG0cqPrrvbVANU0iRVh3WLiH-y6oEWFI,414
|
|
@@ -183,6 +186,8 @@ ommlds/minichain/backends/impls/anthropic/protocol.py,sha256=whPVYuKShKiMCzasHl7
|
|
|
183
186
|
ommlds/minichain/backends/impls/anthropic/stream.py,sha256=NNBFb0sMId9yWua3fkAMZ-qYhQN9nLrXiO4DViR77YI,8790
|
|
184
187
|
ommlds/minichain/backends/impls/duckduckgo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
185
188
|
ommlds/minichain/backends/impls/duckduckgo/search.py,sha256=igzeU9P9b1MMiu4KAJVS9H6KLIoPm68wXi4Kx3_DHyQ,940
|
|
189
|
+
ommlds/minichain/backends/impls/dummy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
190
|
+
ommlds/minichain/backends/impls/dummy/chat.py,sha256=_FYMplztZqXqjiFW3dkpytDRX1G4kw_zcBvJevkH4zE,2255
|
|
186
191
|
ommlds/minichain/backends/impls/google/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
187
192
|
ommlds/minichain/backends/impls/google/chat.py,sha256=lGb5blGLlcBlt9xeDZJvbh5SlV7fgfezd5_As_SPBXo,6499
|
|
188
193
|
ommlds/minichain/backends/impls/google/names.py,sha256=HxHJ31HeKZg6aW1C_Anqp-gamCXpq9pOdKj8_yVgE8Y,871
|
|
@@ -318,7 +323,7 @@ ommlds/minichain/services/requests.py,sha256=VAfKbYu4T0CZTWVQmZ2LUmYU7DNm6IerYMN
|
|
|
318
323
|
ommlds/minichain/services/responses.py,sha256=4W6Z4Fx4_GFqKgle27OeLr0zzjVTA0pkZrlsZiFQNdo,1534
|
|
319
324
|
ommlds/minichain/services/services.py,sha256=WjkQNYIp87SflLSReOHMkG2qIVAOem6vsrs_2NxWN_M,325
|
|
320
325
|
ommlds/minichain/stream/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
321
|
-
ommlds/minichain/stream/services.py,sha256=
|
|
326
|
+
ommlds/minichain/stream/services.py,sha256=YXfEj3ZXKZ3Svkig6f3hOReHgZnLY2tDn2bgB0RIoRI,5566
|
|
322
327
|
ommlds/minichain/stream/wrap.py,sha256=nQC0aCi49I18nF0Yx8qiiLkhIAECV6s6o4pvOy5Kx98,2041
|
|
323
328
|
ommlds/minichain/text/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
324
329
|
ommlds/minichain/text/applypatch.py,sha256=YIN5JChJ0FXyK1I6OiAHQmE7BT-exHfaAMM9ay7ylyc,17705
|
|
@@ -370,7 +375,7 @@ ommlds/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
370
375
|
ommlds/tools/git.py,sha256=ILvsOFXbdDQvAHvGCSbd2fY4fswmDRXaB8yVDQymLY0,8205
|
|
371
376
|
ommlds/tools/ocr.py,sha256=UP2XK4-ELyhK2BnuBr7-DwUbkDIcX9xdvfXVimM19Y8,1839
|
|
372
377
|
ommlds/wiki/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
373
|
-
ommlds/wiki/analyze.py,sha256=
|
|
378
|
+
ommlds/wiki/analyze.py,sha256=lRzqxHjguc5WA0tasnbTSiCYCJiYWDU_q27rJn67HHU,9552
|
|
374
379
|
ommlds/wiki/convert.py,sha256=4UqEKMWW03HwrfxYTn0wmXobYVrTSK2x9Lx-2MeJW8M,2531
|
|
375
380
|
ommlds/wiki/models.py,sha256=MV7WqEqJJ_JTwIhaPNbQnRa_w7tO7mJggPu0xitJyLM,2473
|
|
376
381
|
ommlds/wiki/xml.py,sha256=8Xt4roJ9cyOlZMxT4L5NHbkaeMlgXt3wmjiArmiMh28,2925
|
|
@@ -381,9 +386,9 @@ ommlds/wiki/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
|
|
|
381
386
|
ommlds/wiki/utils/io.py,sha256=UKgDJGtmpnWvIqVd2mJc2QNPOqlToEY1GEveNp6_pMo,7088
|
|
382
387
|
ommlds/wiki/utils/progress.py,sha256=EhvKcMFYtsarCQhIahlO6f0SboyAKP3UwUyrnVnP-Vk,3222
|
|
383
388
|
ommlds/wiki/utils/xml.py,sha256=vVV8Ctn13aaRM9eYfs9Wd6rHn5WOCEUzQ44fIhOvJdg,3754
|
|
384
|
-
ommlds-0.0.0.
|
|
385
|
-
ommlds-0.0.0.
|
|
386
|
-
ommlds-0.0.0.
|
|
387
|
-
ommlds-0.0.0.
|
|
388
|
-
ommlds-0.0.0.
|
|
389
|
-
ommlds-0.0.0.
|
|
389
|
+
ommlds-0.0.0.dev475.dist-info/licenses/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
|
|
390
|
+
ommlds-0.0.0.dev475.dist-info/METADATA,sha256=yT7pQDVXTP6BklUfJ7Q1bH_tOqNBe_uVpkCIQYv3E7U,3344
|
|
391
|
+
ommlds-0.0.0.dev475.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
392
|
+
ommlds-0.0.0.dev475.dist-info/entry_points.txt,sha256=Z5YWtX7ClfiCKdW-dd_CSVvM0h4yQpJPi-2G3q6gNFo,35
|
|
393
|
+
ommlds-0.0.0.dev475.dist-info/top_level.txt,sha256=Rbnk5d5wi58vnAXx13WFZqdQ4VX8hBCS2hEL3WeXOhY,7
|
|
394
|
+
ommlds-0.0.0.dev475.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|