ommlds 0.0.0.dev466__py3-none-any.whl → 0.0.0.dev468__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ommlds might be problematic. Click here for more details.

Files changed (26) hide show
  1. ommlds/.omlish-manifests.json +129 -6
  2. ommlds/__about__.py +2 -2
  3. ommlds/backends/ollama/__init__.py +0 -0
  4. ommlds/backends/ollama/protocol.py +170 -0
  5. ommlds/backends/transformers/__init__.py +0 -0
  6. ommlds/backends/transformers/streamers.py +73 -0
  7. ommlds/cli/sessions/chat/backends/catalog.py +1 -1
  8. ommlds/minichain/__init__.py +4 -0
  9. ommlds/minichain/backends/impls/llamacpp/chat.py +9 -0
  10. ommlds/minichain/backends/impls/llamacpp/stream.py +26 -10
  11. ommlds/minichain/backends/impls/mlx/chat.py +95 -21
  12. ommlds/minichain/backends/impls/ollama/__init__.py +0 -0
  13. ommlds/minichain/backends/impls/ollama/chat.py +196 -0
  14. ommlds/minichain/backends/impls/openai/chat.py +2 -2
  15. ommlds/minichain/backends/impls/openai/format.py +106 -107
  16. ommlds/minichain/backends/impls/openai/stream.py +14 -13
  17. ommlds/minichain/backends/impls/transformers/transformers.py +93 -14
  18. ommlds/minichain/chat/stream/types.py +3 -0
  19. ommlds/minichain/standard.py +7 -0
  20. {ommlds-0.0.0.dev466.dist-info → ommlds-0.0.0.dev468.dist-info}/METADATA +7 -7
  21. {ommlds-0.0.0.dev466.dist-info → ommlds-0.0.0.dev468.dist-info}/RECORD +25 -20
  22. ommlds/minichain/backends/impls/openai/format2.py +0 -210
  23. {ommlds-0.0.0.dev466.dist-info → ommlds-0.0.0.dev468.dist-info}/WHEEL +0 -0
  24. {ommlds-0.0.0.dev466.dist-info → ommlds-0.0.0.dev468.dist-info}/entry_points.txt +0 -0
  25. {ommlds-0.0.0.dev466.dist-info → ommlds-0.0.0.dev468.dist-info}/licenses/LICENSE +0 -0
  26. {ommlds-0.0.0.dev466.dist-info → ommlds-0.0.0.dev468.dist-info}/top_level.txt +0 -0
@@ -11,7 +11,7 @@ from omlish.http import all as http
11
11
  from omlish.http import sse
12
12
  from omlish.io.buffers import DelimitingBuffer
13
13
 
14
- from .....backends.openai.protocol.chatcompletion.chunk import ChatCompletionChunk
14
+ from .....backends.openai import protocol as pt
15
15
  from ....chat.choices.services import ChatChoicesOutputs
16
16
  from ....chat.stream.services import ChatChoicesStreamRequest
17
17
  from ....chat.stream.services import ChatChoicesStreamResponse
@@ -28,6 +28,7 @@ from ....stream.services import StreamResponseSink
28
28
  from ....stream.services import new_stream_response
29
29
  from .chat import OpenaiChatChoicesService
30
30
  from .format import OpenaiChatRequestHandler
31
+ from .format import build_mc_ai_choice_delta
31
32
  from .names import MODEL_NAMES
32
33
 
33
34
 
@@ -62,13 +63,13 @@ class OpenaiChatChoicesStreamService:
62
63
  model=MODEL_NAMES.resolve(self._model_name.v),
63
64
  mandatory_kwargs=dict(
64
65
  stream=True,
65
- stream_options=dict(
66
+ stream_options=pt.ChatCompletionRequest.StreamOptions(
66
67
  include_usage=True,
67
68
  ),
68
69
  ),
69
70
  )
70
71
 
71
- raw_request = rh.raw_request()
72
+ raw_request = msh.marshal(rh.oai_request())
72
73
 
73
74
  http_request = http.HttpRequest(
74
75
  'https://api.openai.com/v1/chat/completions',
@@ -105,20 +106,20 @@ class OpenaiChatChoicesStreamService:
105
106
 
106
107
  check.state(sj['object'] == 'chat.completion.chunk')
107
108
 
108
- ccc = msh.unmarshal(sj, ChatCompletionChunk) # noqa
109
- # print(ccc)
109
+ ccc = msh.unmarshal(sj, pt.ChatCompletionChunk)
110
110
 
111
111
  # FIXME: stop reason
112
- if not sj['choices']:
112
+ if not ccc.choices:
113
113
  continue
114
114
 
115
- if any(choice['delta'] for choice in sj['choices']):
116
- await sink.emit(AiChoicesDeltas([
117
- AiChoiceDeltas(
118
- [rh.build_ai_choice_delta(choice['delta'])] if choice['delta'] else [],
119
- )
120
- for choice in sj['choices']
121
- ]))
115
+ if any(choice.finish_reason for choice in ccc.choices):
116
+ check.state(all(choice.finish_reason for choice in ccc.choices))
117
+ break
118
+
119
+ await sink.emit(AiChoicesDeltas([
120
+ AiChoiceDeltas([build_mc_ai_choice_delta(choice.delta)])
121
+ for choice in ccc.choices
122
+ ]))
122
123
 
123
124
  if not b:
124
125
  return []
@@ -16,23 +16,42 @@ from ....chat.choices.services import ChatChoicesRequest
16
16
  from ....chat.choices.services import ChatChoicesResponse
17
17
  from ....chat.choices.services import static_check_is_chat_choices_service
18
18
  from ....chat.choices.types import AiChoice
19
+ from ....chat.choices.types import ChatChoicesOutputs
19
20
  from ....chat.messages import AiMessage
20
21
  from ....chat.messages import Message
21
22
  from ....chat.messages import SystemMessage
22
23
  from ....chat.messages import ToolUseMessage
23
24
  from ....chat.messages import ToolUseResultMessage
24
25
  from ....chat.messages import UserMessage
26
+ from ....chat.stream.services import ChatChoicesStreamRequest
27
+ from ....chat.stream.services import ChatChoicesStreamResponse
28
+ from ....chat.stream.services import static_check_is_chat_choices_stream_service
29
+ from ....chat.stream.types import AiChoiceDeltas # noqa
30
+ from ....chat.stream.types import AiChoicesDeltas # noqa
31
+ from ....chat.stream.types import ContentAiChoiceDelta # noqa
25
32
  from ....completion import CompletionRequest
26
33
  from ....completion import CompletionResponse
27
34
  from ....completion import static_check_is_completion_service
28
35
  from ....configs import Config
29
36
  from ....models.configs import ModelPath
37
+ from ....resources import UseResources
38
+ from ....stream.services import StreamResponseSink
39
+ from ....stream.services import new_stream_response
30
40
  from ...impls.huggingface.configs import HuggingfaceHubToken
31
41
 
32
42
 
33
43
  ##
34
44
 
35
45
 
46
+ # @omlish-manifest $.minichain.backends.strings.manifests.BackendStringsManifest(
47
+ # ['ChatChoicesService', 'ChatChoicesStreamService'],
48
+ # 'transformers',
49
+ # )
50
+
51
+
52
+ ##
53
+
54
+
36
55
  class TransformersPipelineKwargs(Config, tv.ScalarTypedValue[ta.Mapping[str, ta.Any]]):
37
56
  pass
38
57
 
@@ -128,13 +147,10 @@ def build_chat_message(m: Message) -> ta.Mapping[str, ta.Any]:
128
147
  raise TypeError(m)
129
148
 
130
149
 
131
- # @omlish-manifest $.minichain.registries.manifests.RegistryManifest(
132
- # name='transformers',
133
- # aliases=['tfm'],
134
- # type='ChatChoicesService',
135
- # )
136
- @static_check_is_chat_choices_service
137
- class TransformersChatChoicesService(lang.ExitStacked):
150
+ ##
151
+
152
+
153
+ class BaseTransformersChatChoicesService(lang.ExitStacked):
138
154
  DEFAULT_MODEL: ta.ClassVar[str] = (
139
155
  'meta-llama/Llama-3.2-1B-Instruct'
140
156
  )
@@ -166,16 +182,79 @@ class TransformersChatChoicesService(lang.ExitStacked):
166
182
  **pkw,
167
183
  )
168
184
 
185
+
186
+ ##
187
+
188
+
189
+ # @omlish-manifest $.minichain.registries.manifests.RegistryManifest(
190
+ # name='transformers',
191
+ # aliases=['tfm'],
192
+ # type='ChatChoicesService',
193
+ # )
194
+ @static_check_is_chat_choices_service
195
+ class TransformersChatChoicesService(BaseTransformersChatChoicesService):
169
196
  async def invoke(self, request: ChatChoicesRequest) -> ChatChoicesResponse:
170
197
  check.empty(request.options)
171
198
 
172
199
  pipeline = self._load_pipeline()
173
200
 
174
- output = pipeline(
175
- [
176
- build_chat_message(m)
177
- for m in request.v
178
- ],
179
- )
201
+ inputs = [
202
+ build_chat_message(m)
203
+ for m in request.v
204
+ ]
205
+
206
+ outputs = pipeline(inputs)
207
+
208
+ gts = check.single(outputs)['generated_text']
209
+ ugt, agt = gts
210
+ check.state(ugt['role'] == 'user')
211
+ check.state(agt['role'] == 'assistant')
212
+
213
+ return ChatChoicesResponse([AiChoice([AiMessage(agt['content'])])])
214
+
215
+
216
+ ##
217
+
218
+
219
+ # @omlish-manifest $.minichain.registries.manifests.RegistryManifest(
220
+ # name='transformers',
221
+ # type='ChatChoicesStreamService',
222
+ # )
223
+ @static_check_is_chat_choices_stream_service
224
+ class TransformersChatChoicesStreamService(BaseTransformersChatChoicesService):
225
+ async def invoke(self, request: ChatChoicesStreamRequest) -> ChatChoicesStreamResponse:
226
+ check.empty(request.options)
180
227
 
181
- return ChatChoicesResponse([AiChoice([output])])
228
+ pipeline = self._load_pipeline() # noqa
229
+
230
+ inputs = [ # noqa
231
+ build_chat_message(m)
232
+ for m in request.v
233
+ ]
234
+
235
+ async with UseResources.or_new(request.options) as rs:
236
+ async def inner(sink: StreamResponseSink[AiChoicesDeltas]) -> ta.Sequence[ChatChoicesOutputs] | None:
237
+ # last_role: ta.Any = None
238
+ #
239
+ # for chunk in output:
240
+ # check.state(chunk['object'] == 'chat.completion.chunk')
241
+ #
242
+ # choice = check.single(chunk['choices'])
243
+ #
244
+ # if not (delta := choice.get('delta', {})):
245
+ # continue
246
+ #
247
+ # # FIXME: check role is assistant
248
+ # if (role := delta.get('role')) != last_role:
249
+ # last_role = role
250
+ #
251
+ # # FIXME: stop reason
252
+ #
253
+ # if (content := delta.get('content', '')):
254
+ # await sink.emit(AiChoicesDeltas([AiChoiceDeltas([ContentAiChoiceDelta(content)])]))
255
+ #
256
+ # return None
257
+
258
+ raise NotImplementedError
259
+
260
+ return await new_stream_response(rs, inner)
@@ -42,6 +42,9 @@ class AiChoiceDelta(lang.Sealed, lang.Abstract):
42
42
  pass
43
43
 
44
44
 
45
+ #
46
+
47
+
45
48
  @dc.dataclass(frozen=True)
46
49
  class ContentAiChoiceDelta(AiChoiceDelta, lang.Final):
47
50
  c: Content
@@ -25,6 +25,13 @@ class Device(tv.UniqueScalarTypedValue[ta.Any], Config):
25
25
  ##
26
26
 
27
27
 
28
+ class ApiUrl(tv.UniqueScalarTypedValue[str], Config):
29
+ pass
30
+
31
+
32
+ ##
33
+
34
+
28
35
  @dc.dataclass(frozen=True)
29
36
  class SecretConfig(Config, lang.Abstract):
30
37
  v: sec.SecretRefOrStr = dc.field() | sec.secret_field
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ommlds
3
- Version: 0.0.0.dev466
3
+ Version: 0.0.0.dev468
4
4
  Summary: ommlds
5
5
  Author: wrmsr
6
6
  License-Expression: BSD-3-Clause
@@ -14,8 +14,8 @@ Classifier: Programming Language :: Python :: 3.13
14
14
  Requires-Python: >=3.13
15
15
  Description-Content-Type: text/markdown
16
16
  License-File: LICENSE
17
- Requires-Dist: omdev==0.0.0.dev466
18
- Requires-Dist: omlish==0.0.0.dev466
17
+ Requires-Dist: omdev==0.0.0.dev468
18
+ Requires-Dist: omlish==0.0.0.dev468
19
19
  Provides-Extra: all
20
20
  Requires-Dist: llama-cpp-python~=0.3; extra == "all"
21
21
  Requires-Dist: mlx~=0.29; extra == "all"
@@ -26,8 +26,8 @@ Requires-Dist: tokenizers~=0.22; extra == "all"
26
26
  Requires-Dist: torch~=2.9; extra == "all"
27
27
  Requires-Dist: transformers~=4.57; extra == "all"
28
28
  Requires-Dist: sentence-transformers~=5.1; extra == "all"
29
- Requires-Dist: huggingface-hub~=0.35; extra == "all"
30
- Requires-Dist: datasets~=4.2; extra == "all"
29
+ Requires-Dist: huggingface-hub~=0.36; extra == "all"
30
+ Requires-Dist: datasets~=4.3; extra == "all"
31
31
  Requires-Dist: numpy>=1.26; extra == "all"
32
32
  Requires-Dist: pytesseract~=0.3; extra == "all"
33
33
  Requires-Dist: rapidocr-onnxruntime~=1.4; extra == "all"
@@ -47,8 +47,8 @@ Requires-Dist: torch~=2.9; extra == "backends"
47
47
  Requires-Dist: transformers~=4.57; extra == "backends"
48
48
  Requires-Dist: sentence-transformers~=5.1; extra == "backends"
49
49
  Provides-Extra: huggingface
50
- Requires-Dist: huggingface-hub~=0.35; extra == "huggingface"
51
- Requires-Dist: datasets~=4.2; extra == "huggingface"
50
+ Requires-Dist: huggingface-hub~=0.36; extra == "huggingface"
51
+ Requires-Dist: datasets~=4.3; extra == "huggingface"
52
52
  Provides-Extra: numpy
53
53
  Requires-Dist: numpy>=1.26; extra == "numpy"
54
54
  Provides-Extra: ocr
@@ -1,5 +1,5 @@
1
- ommlds/.omlish-manifests.json,sha256=ZrDlaAwG8hoshkjW-up0pk0dMvDI3g5dW1M92uaf5KI,17930
2
- ommlds/__about__.py,sha256=uAJgr2I_m_oZPlV5P8XLFeYpBlEM-DdzeyF6O5OK_qs,1759
1
+ ommlds/.omlish-manifests.json,sha256=u1WF90X6xpzZW21a4h5zzPyP4a3T30V08RjQz5HGABM,21555
2
+ ommlds/__about__.py,sha256=t2rQF0yXpWFcCb2dvgzGR3I35HKGvGSn-EfhaUWVl5s,1759
3
3
  ommlds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  ommlds/huggingface.py,sha256=JfEyfKOxU3-SY_ojtXBJFNeD-NIuKjvMe3GL3e93wNA,1175
5
5
  ommlds/_hacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -37,6 +37,8 @@ ommlds/backends/mlx/tokenization/detokenization/base.py,sha256=Tezf8Anh-w7BxpNQs
37
37
  ommlds/backends/mlx/tokenization/detokenization/bpe.py,sha256=cIw6-r-cyXTfZdyfGRgohrElMIqeLKfMRb8R1H_56nY,3659
38
38
  ommlds/backends/mlx/tokenization/detokenization/naive.py,sha256=6L-SvphzP1z16cmVB4QC9VraF7khE8ZcvKqIwwFqN6U,1779
39
39
  ommlds/backends/mlx/tokenization/detokenization/spm.py,sha256=IYSnEm-C0z_o5TKLJE_Rj6P0nNd-prT6psVPKsERWAE,1751
40
+ ommlds/backends/ollama/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
+ ommlds/backends/ollama/protocol.py,sha256=1rBZOIb080MsWMfgU4d59wDQhW5EiyBYKgnFbBnLatg,4437
40
42
  ommlds/backends/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
43
  ommlds/backends/openai/protocol/__init__.py,sha256=gYPUQ94GfoIAgU5TGoaC0OVGkWuplrHg-s83ynT9f-4,1750
42
44
  ommlds/backends/openai/protocol/_common.py,sha256=r4EXmw1fBFHjU5vbWTDvlM_fsafdIVg3d3PNw4F9m-Q,313
@@ -75,6 +77,8 @@ ommlds/backends/torch/__init__.py,sha256=Id8dKbxMLlp3ux62ohu9JKoXPSrM0ZXUK0eCDTY
75
77
  ommlds/backends/torch/backends.py,sha256=Bo-ZdW1n9NswvptT8bL9CssEOKwusDuBMaXVjRS8zrA,3528
76
78
  ommlds/backends/torch/devices.py,sha256=KWkeyArPdUwVqckQTJPkN-4GQdv39cpOgCMv_XfkLkQ,776
77
79
  ommlds/backends/torch/purge.py,sha256=sp6XUxNLoVCepxIPKw3tevHn-cQqgorILvIQzixauiI,1834
80
+ ommlds/backends/transformers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
81
+ ommlds/backends/transformers/streamers.py,sha256=Hu_9lp_kUilKjOfs7Ixqr2NoA5FuRn2eRh8JdvaBDYc,1688
78
82
  ommlds/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
79
83
  ommlds/cli/__main__.py,sha256=1ffCb0fcUOJMzxROJmJRXQ8PSOVYv7KrcuBtT95cf0c,140
80
84
  ommlds/cli/inject.py,sha256=WhTDabJz9b1NRRHVH-UyVN5nj6UncvIeTvgkGrcE9vc,666
@@ -91,7 +95,7 @@ ommlds/cli/sessions/chat/driver.py,sha256=ddnCYTKqWiPxV8U4UbFwb7E3yi81ItjZ9j3AJd
91
95
  ommlds/cli/sessions/chat/inject.py,sha256=7Yg6wUs2Oej4UjNZCAWCJCEsDJZWvT4G8XvkvVUMC7U,1928
92
96
  ommlds/cli/sessions/chat/session.py,sha256=eqwelLE74JFC-fBpk_hdwMD2nP4pLv3ZPwUn99200B8,521
93
97
  ommlds/cli/sessions/chat/backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
94
- ommlds/cli/sessions/chat/backends/catalog.py,sha256=GroogxBQf_zlrhEEwTSq18v13HWsuXOP276VcuphPic,1756
98
+ ommlds/cli/sessions/chat/backends/catalog.py,sha256=gc03dqXEUUSi4WCWJ30HrkHPWlAkZHXkj1pOq7KVakU,1764
95
99
  ommlds/cli/sessions/chat/backends/inject.py,sha256=VbZ-Fb679kTItRpAhIYCqSM8vXUFeRDQWssUfrFgGi8,882
96
100
  ommlds/cli/sessions/chat/backends/injection.py,sha256=GCn5OvNIEowgB70kQVuU84z3i8lLA4vOVkTZlQG8s0o,327
97
101
  ommlds/cli/sessions/chat/backends/types.py,sha256=5eImYHXLKqbC5MDrN443eMGamP9snCmV1n7LtAsqgPk,696
@@ -147,7 +151,7 @@ ommlds/cli/state/storage.py,sha256=tRPmgCANRrw7A5Qr700OaH58F6S96O37I8Ivrbo7_gI,3
147
151
  ommlds/datasets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
148
152
  ommlds/datasets/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
149
153
  ommlds/datasets/lib/movies.py,sha256=LmdfoXsZU9XMM_r-sxCLv_s06BFzwWO4xUj6sc9XVcI,1961
150
- ommlds/minichain/__init__.py,sha256=EqNJpuMwqkkdlNmipjaoC30yAqH7c8oziszlkCcXBrQ,10982
154
+ ommlds/minichain/__init__.py,sha256=5S2GfZW4qWF1fin2Ee8YHT4XuD_vEvtCVWTIXHOGrwo,11016
151
155
  ommlds/minichain/_marshal.py,sha256=n9PGWrHhvAmGIc7KDOYt3IF9Z6G0ncXskyICTp3Ji6k,1923
152
156
  ommlds/minichain/_typedvalues.py,sha256=Vl1Edt5khC0e5RPFBPmPCxn0IzrfVd0NHzAjAN2E6Kc,2183
153
157
  ommlds/minichain/completion.py,sha256=lQ0LfCIYZsvDqteHhhDIv16D2_gn_xMfEL0ouywE5Yo,1033
@@ -157,7 +161,7 @@ ommlds/minichain/json.py,sha256=0_5rV5Zi2qPOvXi2CLAc5DF7FN3jK3ABbjoKdjtTuVo,360
157
161
  ommlds/minichain/metadata.py,sha256=2jik8gEm_VMnknPuPwqRssTg0MClRFUrXz_IsyEgUt4,878
158
162
  ommlds/minichain/resources.py,sha256=HfcydnyFmXVRspYw-32-lvM_OfrZQdPEebAt3ivLev0,4436
159
163
  ommlds/minichain/search.py,sha256=azRzWcYhcm9IgSHquqLwtbwowtYCRAtPLSm7Gvt9iNo,1262
160
- ommlds/minichain/standard.py,sha256=uKXvdUNLxdUu7suCBsVOjJtnYVC2hjD_tmz3Ra7H6Jg,2510
164
+ ommlds/minichain/standard.py,sha256=cGXaGtC5iM9Q2lCcbhLtvEcPGKhcJUIh3UWyNgOssRM,2580
161
165
  ommlds/minichain/types.py,sha256=K6RRjpUi17UEG0cqPrrvbVANU0iRVh3WLiH-y6oEWFI,414
162
166
  ommlds/minichain/utils.py,sha256=NTsBu_pSZnLdZc1R1Se70rb_9J-IoB6VRwjhwzh3PwY,490
163
167
  ommlds/minichain/backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -185,20 +189,21 @@ ommlds/minichain/backends/impls/huggingface/__init__.py,sha256=47DEQpj8HBSa-_TIm
185
189
  ommlds/minichain/backends/impls/huggingface/configs.py,sha256=6jsBtPNXOP57PcpxNTVLGWLc-18Iwn_lDbGouwCJTIQ,258
186
190
  ommlds/minichain/backends/impls/huggingface/repos.py,sha256=8BDxJmra9elSQL2vzp2nr2p4Hpq56A3zTk7hTTnfJU4,861
187
191
  ommlds/minichain/backends/impls/llamacpp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
188
- ommlds/minichain/backends/impls/llamacpp/chat.py,sha256=YeBzlA_3gcuF6KF0HIE7abUp28_o1Kil-SujyQNAHyE,5508
192
+ ommlds/minichain/backends/impls/llamacpp/chat.py,sha256=J6Jslx9atAtWvLdrVtvRboQUBzRX7Z5aHlo0dK5X78A,5649
189
193
  ommlds/minichain/backends/impls/llamacpp/completion.py,sha256=oJ2I6wUoIPXYLm9Vc7dwOPgqbevatTjNBZ-jXeM24tQ,2372
190
194
  ommlds/minichain/backends/impls/llamacpp/format.py,sha256=fcLMwk7r7FbNrYCH39G3fDRInKvlPIqcoxyLj95CooA,778
191
- ommlds/minichain/backends/impls/llamacpp/stream.py,sha256=uGog3xPNqCjGgyZjXEjhlxKbIbakWbapjANAEsmW-U4,3378
195
+ ommlds/minichain/backends/impls/llamacpp/stream.py,sha256=uzrXr2HhshgFe3Z0g8KTPc6Dr2kPsyxZabIy2d6IOBg,3547
192
196
  ommlds/minichain/backends/impls/mlx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
193
- ommlds/minichain/backends/impls/mlx/chat.py,sha256=kNIDvkvNpoB80LfA1y7UpSzEEm2Z4K2w56HOuMwT9zE,4558
197
+ ommlds/minichain/backends/impls/mlx/chat.py,sha256=sMlhgiFZrxAC-kKkLSJ6c-2uJn0IHZXH4EiPET_-CKI,7458
198
+ ommlds/minichain/backends/impls/ollama/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
199
+ ommlds/minichain/backends/impls/ollama/chat.py,sha256=UK19riOph-ptIz9zW7PucGWvVEtWHOHvwp7hoKurDNw,6393
194
200
  ommlds/minichain/backends/impls/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
195
- ommlds/minichain/backends/impls/openai/chat.py,sha256=3hiKX2WqRo1cwF4AhcnzcCb2fsmwNLW7YPGYpask41A,2660
201
+ ommlds/minichain/backends/impls/openai/chat.py,sha256=eMRjxPNrzrRjaw83LJuYzP9DGvwGyY2ObJSZub4Z9bY,2658
196
202
  ommlds/minichain/backends/impls/openai/completion.py,sha256=0XTC08mZzbW23Y2DNW2xfRR0eDX4nTyejF8CR1BdHZs,1756
197
203
  ommlds/minichain/backends/impls/openai/embedding.py,sha256=kkDJ3_0EqwQ_E0eXsSH1TuWXQmRqaijK8zG90fnlf3s,1582
198
- ommlds/minichain/backends/impls/openai/format.py,sha256=M1AYWDhz1QApazFeae4xTO9ng_59sx4uYs0FKt0GIKM,7275
199
- ommlds/minichain/backends/impls/openai/format2.py,sha256=OQ3N8VR4uL3PvHxjOQSdgg1bQ4_WiDz_sOV4WhVEXpQ,6611
204
+ ommlds/minichain/backends/impls/openai/format.py,sha256=teGX8mNU3sXNWP4YWGD8d59M4X9_r75ImSzfTJgtNCM,7351
200
205
  ommlds/minichain/backends/impls/openai/names.py,sha256=b74t8FwSbGEveVtVz4SqM5tiRDyTKNlUKlseV6AX3Yo,1211
201
- ommlds/minichain/backends/impls/openai/stream.py,sha256=X45qIXgwAk7IVe4LL6gzL3uJivdaB-hUGutltHeswTc,5280
206
+ ommlds/minichain/backends/impls/openai/stream.py,sha256=M7II7kZFsy33j8NQwdM1CCeKet3lw-XLOQdDzrzn-Yo,5297
202
207
  ommlds/minichain/backends/impls/sentencepiece/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
203
208
  ommlds/minichain/backends/impls/sentencepiece/tokens.py,sha256=tUEBKyBgkTowssS_AdcAuPkyFzfyDfE935x4JG8PXM0,1602
204
209
  ommlds/minichain/backends/impls/tinygrad/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -208,7 +213,7 @@ ommlds/minichain/backends/impls/tokenizers/tokens.py,sha256=_8Q49k5YroG5wQI0cuK6
208
213
  ommlds/minichain/backends/impls/transformers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
209
214
  ommlds/minichain/backends/impls/transformers/sentence.py,sha256=1bFJ-ND3MOkj7mNsPuISrQCpqTs7npmmNmYcc2go-Fk,1393
210
215
  ommlds/minichain/backends/impls/transformers/tokens.py,sha256=uS3-IWOJRUMBfPDVRrp3SCaXdE1yzEdKHQcyv0JZQIw,2089
211
- ommlds/minichain/backends/impls/transformers/transformers.py,sha256=Bb1RnvDlo8bzu24ByhDacDC0sN7R7KYZnPZ9hjbViBg,5287
216
+ ommlds/minichain/backends/impls/transformers/transformers.py,sha256=U4O-MiVH3dRXf-UNSoKZueZVM8XvAm2mMr30qQUHhFY,8000
212
217
  ommlds/minichain/backends/strings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
213
218
  ommlds/minichain/backends/strings/manifests.py,sha256=kmlanVUAZqIh0P95Mm8H20e8ib3gEgYHHUlkCXDQGFk,413
214
219
  ommlds/minichain/backends/strings/parsing.py,sha256=2wChk9Z8fhqJTk8_91f8QFjKcSZygOQM_rVk-P4NnKw,1772
@@ -231,7 +236,7 @@ ommlds/minichain/chat/stream/_marshal.py,sha256=r6NYBUviV7jIssaFprzv2rVEj8cFEuBl
231
236
  ommlds/minichain/chat/stream/adapters.py,sha256=3hKo3-MLtVIB-Nhdlxt17LP9vZESr-2fBZQ3Yr6l_Ps,1077
232
237
  ommlds/minichain/chat/stream/joining.py,sha256=oPxLT4qEYWCaxclnZvt54ztQP5md4V6u6Uwn4qd2e9M,2936
233
238
  ommlds/minichain/chat/stream/services.py,sha256=TxNEOm85QEFYtKb59q_uP6eSNh75v1fF-IpsJjhY4to,1252
234
- ommlds/minichain/chat/stream/types.py,sha256=t1udlFSMlSlEyQHRnBEQYI_f-FuE6twRBFGzR66blWQ,1585
239
+ ommlds/minichain/chat/stream/types.py,sha256=kpHsWLNHk7hmaNPDSCqLH-ECSAiz83lRfr00LhSWb5U,1589
235
240
  ommlds/minichain/chat/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
236
241
  ommlds/minichain/chat/tools/execution.py,sha256=tCPsz1kCt5RcoRX7dwfaJRvObniJJv_D2hCwz1Slo_A,573
237
242
  ommlds/minichain/chat/tools/ids.py,sha256=DFBKrpeDTCnMcU-P38VbPWX0YBDaz_HzMgx3yXWjFWQ,759
@@ -368,9 +373,9 @@ ommlds/wiki/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
368
373
  ommlds/wiki/utils/io.py,sha256=UKgDJGtmpnWvIqVd2mJc2QNPOqlToEY1GEveNp6_pMo,7088
369
374
  ommlds/wiki/utils/progress.py,sha256=EhvKcMFYtsarCQhIahlO6f0SboyAKP3UwUyrnVnP-Vk,3222
370
375
  ommlds/wiki/utils/xml.py,sha256=vVV8Ctn13aaRM9eYfs9Wd6rHn5WOCEUzQ44fIhOvJdg,3754
371
- ommlds-0.0.0.dev466.dist-info/licenses/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
372
- ommlds-0.0.0.dev466.dist-info/METADATA,sha256=cTdtmfR8ON19GS5ay_ImJD5oZ5uXwylxukZIaX7NNUM,3224
373
- ommlds-0.0.0.dev466.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
374
- ommlds-0.0.0.dev466.dist-info/entry_points.txt,sha256=Z5YWtX7ClfiCKdW-dd_CSVvM0h4yQpJPi-2G3q6gNFo,35
375
- ommlds-0.0.0.dev466.dist-info/top_level.txt,sha256=Rbnk5d5wi58vnAXx13WFZqdQ4VX8hBCS2hEL3WeXOhY,7
376
- ommlds-0.0.0.dev466.dist-info/RECORD,,
376
+ ommlds-0.0.0.dev468.dist-info/licenses/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
377
+ ommlds-0.0.0.dev468.dist-info/METADATA,sha256=k1H1yGwCqmZETx1eTEKBsNZ_whn2QWxWap9pdErGIkw,3224
378
+ ommlds-0.0.0.dev468.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
379
+ ommlds-0.0.0.dev468.dist-info/entry_points.txt,sha256=Z5YWtX7ClfiCKdW-dd_CSVvM0h4yQpJPi-2G3q6gNFo,35
380
+ ommlds-0.0.0.dev468.dist-info/top_level.txt,sha256=Rbnk5d5wi58vnAXx13WFZqdQ4VX8hBCS2hEL3WeXOhY,7
381
+ ommlds-0.0.0.dev468.dist-info/RECORD,,
@@ -1,210 +0,0 @@
1
- import typing as ta
2
-
3
- from omlish import cached
4
- from omlish import check
5
- from omlish import typedvalues as tv
6
- from omlish.formats import json
7
-
8
- from .....backends.openai import protocol as pt
9
- from ....chat.choices.services import ChatChoicesResponse
10
- from ....chat.choices.types import AiChoice
11
- from ....chat.choices.types import AiChoices
12
- from ....chat.choices.types import ChatChoicesOptions
13
- from ....chat.messages import AiMessage
14
- from ....chat.messages import AnyAiMessage
15
- from ....chat.messages import Chat
16
- from ....chat.messages import SystemMessage
17
- from ....chat.messages import ToolUseMessage
18
- from ....chat.messages import ToolUseResultMessage
19
- from ....chat.messages import UserMessage
20
- from ....chat.tools.types import Tool
21
- from ....content.json import JsonContent
22
- from ....content.prepare import prepare_content_str
23
- from ....llms.types import MaxTokens
24
- from ....llms.types import Temperature
25
- from ....llms.types import TokenUsage
26
- from ....llms.types import TokenUsageOutput
27
- from ....tools.jsonschema import build_tool_spec_params_json_schema
28
- from ....tools.types import ToolSpec
29
- from ....tools.types import ToolUse
30
- from ....types import Option
31
-
32
-
33
- ##
34
-
35
-
36
- def build_oai_request_msgs(mc_chat: Chat) -> ta.Sequence[pt.ChatCompletionMessage]:
37
- oai_msgs: list[pt.ChatCompletionMessage] = []
38
-
39
- for mc_msg in mc_chat:
40
- if isinstance(mc_msg, SystemMessage):
41
- oai_msgs.append(pt.SystemChatCompletionMessage(
42
- content=check.isinstance(mc_msg.c, str),
43
- ))
44
-
45
- elif isinstance(mc_msg, AiMessage):
46
- oai_msgs.append(pt.AssistantChatCompletionMessage(
47
- content=check.isinstance(mc_msg.c, (str, None)),
48
- ))
49
-
50
- elif isinstance(mc_msg, ToolUseMessage):
51
- oai_msgs.append(pt.AssistantChatCompletionMessage(
52
- tool_calls=[pt.AssistantChatCompletionMessage.ToolCall(
53
- id=check.not_none(mc_msg.tu.id),
54
- function=pt.AssistantChatCompletionMessage.ToolCall.Function(
55
- arguments=check.not_none(mc_msg.tu.raw_args),
56
- name=mc_msg.tu.name,
57
- ),
58
- )],
59
- ))
60
-
61
- elif isinstance(mc_msg, UserMessage):
62
- oai_msgs.append(pt.UserChatCompletionMessage(
63
- content=prepare_content_str(mc_msg.c),
64
- ))
65
-
66
- elif isinstance(mc_msg, ToolUseResultMessage):
67
- tc: str
68
- if isinstance(mc_msg.tur.c, str):
69
- tc = mc_msg.tur.c
70
- elif isinstance(mc_msg.tur.c, JsonContent):
71
- tc = json.dumps_compact(mc_msg.tur.c)
72
- else:
73
- raise TypeError(mc_msg.tur.c)
74
- oai_msgs.append(pt.ToolChatCompletionMessage(
75
- tool_call_id=check.not_none(mc_msg.tur.id),
76
- content=tc,
77
- ))
78
-
79
- else:
80
- raise TypeError(mc_msg)
81
-
82
- return oai_msgs
83
-
84
-
85
- #
86
-
87
-
88
- def build_mc_ai_choice(oai_choice: pt.ChatCompletionResponseChoice) -> AiChoice:
89
- cur: list[AnyAiMessage] = []
90
-
91
- oai_msg = oai_choice.message
92
-
93
- if (oai_c := oai_msg.content) is not None:
94
- cur.append(AiMessage(check.isinstance(oai_c, str)))
95
-
96
- for oai_tc in oai_msg.tool_calls or []:
97
- cur.append(ToolUseMessage(ToolUse(
98
- id=oai_tc.id,
99
- name=oai_tc.function.name,
100
- args=json.loads(oai_tc.function.arguments or '{}'),
101
- raw_args=oai_tc.function.arguments,
102
- )))
103
-
104
- return AiChoice(cur)
105
-
106
-
107
- def build_mc_ai_choices(oai_resp: pt.ChatCompletionResponse) -> AiChoices:
108
- return [
109
- build_mc_ai_choice(oai_choice)
110
- for oai_choice in oai_resp.choices
111
- ]
112
-
113
-
114
- def build_mc_choices_response(oai_resp: pt.ChatCompletionResponse) -> ChatChoicesResponse:
115
- return ChatChoicesResponse(
116
- build_mc_ai_choices(oai_resp),
117
-
118
- tv.TypedValues(
119
- *([TokenUsageOutput(TokenUsage(
120
- input=tu.prompt_tokens,
121
- output=tu.completion_tokens,
122
- total=tu.total_tokens,
123
- ))] if (tu := oai_resp.usage) is not None else []),
124
- ),
125
- )
126
-
127
-
128
- ##
129
-
130
-
131
- class OpenaiChatRequestHandler:
132
- def __init__(
133
- self,
134
- chat: Chat,
135
- *options: ChatChoicesOptions,
136
- model: str,
137
- mandatory_kwargs: ta.Mapping[str, ta.Any] | None = None,
138
- ) -> None:
139
- super().__init__()
140
-
141
- self._chat = chat
142
- self._options = options
143
- self._model = model
144
- self._mandatory_kwargs = mandatory_kwargs
145
-
146
- DEFAULT_OPTIONS: ta.ClassVar[tv.TypedValues[Option]] = tv.TypedValues[Option](
147
- Temperature(0.),
148
- MaxTokens(1024),
149
- )
150
-
151
- _OPTION_KWARG_NAMES_MAP: ta.ClassVar[ta.Mapping[str, type[ChatChoicesOptions]]] = dict(
152
- temperature=Temperature,
153
- max_tokens=MaxTokens,
154
- )
155
-
156
- class _ProcessedOptions(ta.NamedTuple):
157
- kwargs: dict[str, ta.Any]
158
- tools_by_name: dict[str, ToolSpec]
159
-
160
- @cached.function
161
- def _process_options(self) -> _ProcessedOptions:
162
- kwargs: dict = dict(
163
- temperature=0,
164
- max_tokens=1024,
165
- )
166
-
167
- tools_by_name: dict[str, ToolSpec] = {}
168
-
169
- with tv.TypedValues(*self._options).consume() as oc:
170
- kwargs.update(oc.pop_scalar_kwargs(**self._OPTION_KWARG_NAMES_MAP))
171
-
172
- for t in oc.pop(Tool, []):
173
- if t.spec.name in tools_by_name:
174
- raise NameError(t.spec.name)
175
- tools_by_name[check.non_empty_str(t.spec.name)] = t.spec
176
-
177
- if (mk := self._mandatory_kwargs):
178
- for k, v in mk.items():
179
- check.not_in(k, kwargs)
180
- kwargs[k] = v
181
-
182
- return self._ProcessedOptions(
183
- kwargs=kwargs,
184
- tools_by_name=tools_by_name,
185
- )
186
-
187
- @cached.function
188
- def oai_request(self) -> pt.ChatCompletionRequest:
189
- po = self._process_options()
190
-
191
- tools: list[pt.ChatCompletionRequestTool] = [
192
- pt.ChatCompletionRequestTool(
193
- function=pt.ChatCompletionRequestTool.Function(
194
- name=check.not_none(ts.name),
195
- description=prepare_content_str(ts.desc),
196
- parameters=build_tool_spec_params_json_schema(ts),
197
- ),
198
- )
199
- for ts in po.tools_by_name.values()
200
- ]
201
-
202
- return pt.ChatCompletionRequest(
203
- model=self._model,
204
- messages=build_oai_request_msgs(self._chat),
205
- top_p=1,
206
- tools=tools or None,
207
- frequency_penalty=0.0,
208
- presence_penalty=0.0,
209
- **po.kwargs,
210
- )