ommlds 0.0.0.dev450__py3-none-any.whl → 0.0.0.dev452__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ommlds might be problematic. Click here for more details.

Files changed (94) hide show
  1. ommlds/.omlish-manifests.json +12 -12
  2. ommlds/backends/anthropic/protocol/_marshal.py +2 -2
  3. ommlds/backends/anthropic/protocol/sse/_marshal.py +1 -1
  4. ommlds/backends/anthropic/protocol/sse/assemble.py +1 -1
  5. ommlds/backends/anthropic/protocol/types.py +30 -9
  6. ommlds/backends/google/protocol/_marshal.py +1 -1
  7. ommlds/backends/openai/protocol/_common.py +18 -0
  8. ommlds/backends/openai/protocol/_marshal.py +3 -2
  9. ommlds/backends/openai/protocol/chatcompletion/chunk.py +4 -0
  10. ommlds/backends/openai/protocol/chatcompletion/contentpart.py +15 -7
  11. ommlds/backends/openai/protocol/chatcompletion/message.py +10 -0
  12. ommlds/backends/openai/protocol/chatcompletion/request.py +25 -7
  13. ommlds/backends/openai/protocol/chatcompletion/response.py +10 -0
  14. ommlds/backends/openai/protocol/chatcompletion/responseformat.py +6 -0
  15. ommlds/backends/openai/protocol/chatcompletion/tokenlogprob.py +4 -0
  16. ommlds/backends/openai/protocol/completionusage.py +5 -0
  17. ommlds/cli/main.py +2 -2
  18. ommlds/cli/sessions/chat/code.py +34 -19
  19. ommlds/cli/sessions/chat/inject.py +4 -4
  20. ommlds/cli/sessions/chat/interactive.py +2 -1
  21. ommlds/cli/sessions/chat/printing.py +6 -2
  22. ommlds/cli/sessions/chat/prompt.py +28 -27
  23. ommlds/cli/sessions/chat/tools.py +13 -14
  24. ommlds/cli/tools/config.py +1 -1
  25. ommlds/cli/tools/inject.py +4 -1
  26. ommlds/minichain/__init__.py +32 -8
  27. ommlds/minichain/_marshal.py +39 -0
  28. ommlds/minichain/backends/impls/anthropic/chat.py +82 -10
  29. ommlds/minichain/backends/impls/anthropic/names.py +3 -3
  30. ommlds/minichain/backends/impls/anthropic/stream.py +7 -7
  31. ommlds/minichain/backends/impls/google/chat.py +48 -22
  32. ommlds/minichain/backends/impls/google/stream.py +8 -4
  33. ommlds/minichain/backends/impls/llamacpp/chat.py +23 -17
  34. ommlds/minichain/backends/impls/llamacpp/format.py +4 -2
  35. ommlds/minichain/backends/impls/llamacpp/stream.py +6 -6
  36. ommlds/minichain/backends/impls/mistral.py +1 -1
  37. ommlds/minichain/backends/impls/mlx/chat.py +1 -1
  38. ommlds/minichain/backends/impls/openai/chat.py +6 -3
  39. ommlds/minichain/backends/impls/openai/format.py +80 -61
  40. ommlds/minichain/backends/impls/openai/format2.py +210 -0
  41. ommlds/minichain/backends/impls/openai/stream.py +9 -6
  42. ommlds/minichain/backends/impls/tinygrad/chat.py +10 -5
  43. ommlds/minichain/backends/impls/transformers/transformers.py +20 -16
  44. ommlds/minichain/chat/_marshal.py +16 -9
  45. ommlds/minichain/chat/choices/adapters.py +3 -3
  46. ommlds/minichain/chat/choices/types.py +2 -2
  47. ommlds/minichain/chat/history.py +1 -1
  48. ommlds/minichain/chat/messages.py +55 -19
  49. ommlds/minichain/chat/services.py +2 -2
  50. ommlds/minichain/chat/stream/_marshal.py +16 -0
  51. ommlds/minichain/chat/stream/adapters.py +39 -28
  52. ommlds/minichain/chat/stream/services.py +2 -2
  53. ommlds/minichain/chat/stream/types.py +20 -13
  54. ommlds/minichain/chat/tools/execution.py +8 -7
  55. ommlds/minichain/chat/tools/ids.py +9 -15
  56. ommlds/minichain/chat/tools/parsing.py +17 -26
  57. ommlds/minichain/chat/transforms/base.py +29 -38
  58. ommlds/minichain/chat/transforms/metadata.py +30 -4
  59. ommlds/minichain/chat/transforms/services.py +5 -7
  60. ommlds/minichain/content/_marshal.py +24 -3
  61. ommlds/minichain/content/json.py +13 -0
  62. ommlds/minichain/content/materialize.py +13 -20
  63. ommlds/minichain/content/prepare.py +4 -0
  64. ommlds/minichain/json.py +20 -0
  65. ommlds/minichain/lib/fs/context.py +15 -1
  66. ommlds/minichain/lib/fs/errors.py +6 -0
  67. ommlds/minichain/lib/fs/tools/edit.py +104 -0
  68. ommlds/minichain/lib/fs/tools/ls.py +2 -2
  69. ommlds/minichain/lib/fs/tools/read.py +2 -2
  70. ommlds/minichain/lib/fs/tools/recursivels/execution.py +2 -2
  71. ommlds/minichain/lib/todo/context.py +29 -2
  72. ommlds/minichain/lib/todo/tools/read.py +11 -6
  73. ommlds/minichain/lib/todo/tools/write.py +73 -13
  74. ommlds/minichain/lib/todo/types.py +6 -1
  75. ommlds/minichain/llms/_marshal.py +1 -1
  76. ommlds/minichain/services/_marshal.py +1 -1
  77. ommlds/minichain/tools/_marshal.py +1 -1
  78. ommlds/minichain/tools/execution/catalog.py +2 -1
  79. ommlds/minichain/tools/execution/executors.py +8 -3
  80. ommlds/minichain/tools/execution/reflect.py +43 -5
  81. ommlds/minichain/tools/fns.py +46 -9
  82. ommlds/minichain/tools/jsonschema.py +5 -6
  83. ommlds/minichain/tools/reflect.py +2 -2
  84. ommlds/minichain/tools/types.py +24 -1
  85. ommlds/minichain/vectors/_marshal.py +1 -1
  86. ommlds/server/server.py +1 -1
  87. ommlds/tools/git.py +18 -2
  88. ommlds/tools/ocr.py +7 -1
  89. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev452.dist-info}/METADATA +3 -3
  90. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev452.dist-info}/RECORD +94 -89
  91. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev452.dist-info}/WHEEL +0 -0
  92. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev452.dist-info}/entry_points.txt +0 -0
  93. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev452.dist-info}/licenses/LICENSE +0 -0
  94. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev452.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  """
2
+ https://docs.claude.com/en/api/messages
2
3
  https://github.com/anthropics/anthropic-sdk-python/tree/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/types
3
4
  https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/resources/completions.py#L53
4
5
  https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/resources/messages.py#L70
@@ -6,21 +7,29 @@ https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d15
6
7
  import typing as ta
7
8
 
8
9
  from omlish import check
9
- from omlish import lang
10
+ from omlish import marshal as msh
10
11
  from omlish import typedvalues as tv
11
12
  from omlish.formats import json
12
13
  from omlish.http import all as http
13
14
 
15
+ from .....backends.anthropic.protocol import types as pt
14
16
  from ....chat.choices.services import ChatChoicesRequest
15
17
  from ....chat.choices.services import ChatChoicesResponse
16
18
  from ....chat.choices.services import static_check_is_chat_choices_service
17
19
  from ....chat.choices.types import AiChoice
18
20
  from ....chat.messages import AiMessage
21
+ from ....chat.messages import AnyAiMessage
19
22
  from ....chat.messages import Message
20
23
  from ....chat.messages import SystemMessage
24
+ from ....chat.messages import ToolUseMessage
25
+ from ....chat.messages import ToolUseResultMessage
21
26
  from ....chat.messages import UserMessage
27
+ from ....chat.tools.types import Tool
28
+ from ....content.prepare import prepare_content_str
22
29
  from ....models.configs import ModelName
23
30
  from ....standard import ApiKey
31
+ from ....tools.jsonschema import build_tool_spec_params_json_schema
32
+ from ....tools.types import ToolUse
24
33
  from .names import MODEL_NAMES
25
34
 
26
35
 
@@ -39,6 +48,7 @@ class AnthropicChatChoicesService:
39
48
  SystemMessage: 'system',
40
49
  UserMessage: 'user',
41
50
  AiMessage: 'assistant',
51
+ ToolUseMessage: 'assistant',
42
52
  }
43
53
 
44
54
  def __init__(
@@ -68,26 +78,73 @@ class AnthropicChatChoicesService:
68
78
  *,
69
79
  max_tokens: int = 4096, # FIXME: ChatOption
70
80
  ) -> ChatChoicesResponse:
71
- messages = []
72
- system: str | None = None
81
+ messages: list[pt.Message] = []
82
+ system: list[pt.Content] | None = None
73
83
  for i, m in enumerate(request.v):
74
84
  if isinstance(m, SystemMessage):
75
85
  if i != 0 or system is not None:
76
86
  raise Exception('Only supports one system message and must be first')
77
- system = self._get_msg_content(m)
87
+ system = [pt.Text(check.not_none(self._get_msg_content(m)))]
88
+
89
+ elif isinstance(m, ToolUseResultMessage):
90
+ messages.append(pt.Message(
91
+ role='user',
92
+ content=[pt.ToolResult(
93
+ tool_use_id=check.not_none(m.tur.id),
94
+ content=json.dumps_compact(msh.marshal(m.tur.c)) if not isinstance(m.tur.c, str) else m.tur.c,
95
+ )],
96
+ ))
97
+
98
+ elif isinstance(m, AiMessage):
99
+ # messages.append(pt.Message(
100
+ # role=self.ROLES_MAP[type(m)], # noqa
101
+ # content=[pt.Text(check.isinstance(self._get_msg_content(m), str))],
102
+ # ))
103
+ messages.append(pt.Message(
104
+ role='assistant',
105
+ content=[
106
+ *([pt.Text(check.isinstance(m.c, str))] if m.c is not None else []),
107
+ ],
108
+ ))
109
+
110
+ elif isinstance(m, ToolUseMessage):
111
+ messages.append(pt.Message(
112
+ role='assistant',
113
+ content=[
114
+ pt.ToolUse(
115
+ id=check.not_none(m.tu.id),
116
+ name=check.not_none(m.tu.name),
117
+ input=m.tu.args,
118
+ ),
119
+ ],
120
+ ))
121
+
78
122
  else:
79
- messages.append(dict(
80
- role=self.ROLES_MAP[type(m)], # noqa
81
- content=check.isinstance(self._get_msg_content(m), str),
123
+ messages.append(pt.Message(
124
+ role=self.ROLES_MAP[type(m)], # type: ignore[arg-type]
125
+ content=[pt.Text(check.isinstance(self._get_msg_content(m), str))],
82
126
  ))
83
127
 
84
- raw_request = dict(
128
+ tools: list[pt.ToolSpec] = []
129
+ with tv.TypedValues(*request.options).consume() as oc:
130
+ t: Tool
131
+ for t in oc.pop(Tool, []):
132
+ tools.append(pt.ToolSpec(
133
+ name=check.not_none(t.spec.name),
134
+ description=prepare_content_str(t.spec.desc),
135
+ input_schema=build_tool_spec_params_json_schema(t.spec),
136
+ ))
137
+
138
+ a_req = pt.MessagesRequest(
85
139
  model=MODEL_NAMES.resolve(self._model_name.v),
86
- **lang.opt_kw(system=system),
140
+ system=system,
87
141
  messages=messages,
142
+ tools=tools or None,
88
143
  max_tokens=max_tokens,
89
144
  )
90
145
 
146
+ raw_request = msh.marshal(a_req)
147
+
91
148
  raw_response = http.request(
92
149
  'https://api.anthropic.com/v1/messages',
93
150
  headers={
@@ -100,6 +157,21 @@ class AnthropicChatChoicesService:
100
157
 
101
158
  response = json.loads(check.not_none(raw_response.data).decode('utf-8'))
102
159
 
160
+ out: list[AnyAiMessage] = []
161
+ for c in response['content']:
162
+ if c['type'] == 'text':
163
+ out.append(AiMessage(
164
+ check.not_none(c['text']),
165
+ ))
166
+ elif c['type'] == 'tool_use':
167
+ out.append(ToolUseMessage(ToolUse(
168
+ id=c['id'],
169
+ name=c['name'],
170
+ args=c['input'],
171
+ )))
172
+ else:
173
+ raise TypeError(c['type'])
174
+
103
175
  return ChatChoicesResponse([
104
- AiChoice(AiMessage(response['content'][0]['text'])), # noqa
176
+ AiChoice(out),
105
177
  ])
@@ -18,9 +18,9 @@ MODEL_NAMES = ModelNameCollection(
18
18
  'claude-opus-4-1': 'claude-opus-4-1-20250805',
19
19
  'claude-opus': 'claude-opus-4-1',
20
20
 
21
- 'claude-sonnet-4-20250514': None,
22
- 'claude-sonnet-4': 'claude-sonnet-4-20250514',
23
- 'claude-sonnet': 'claude-sonnet-4',
21
+ 'claude-sonnet-4-5-20250929': None,
22
+ 'claude-sonnet-4-5': 'claude-sonnet-4-5-20250929',
23
+ 'claude-sonnet': 'claude-sonnet-4-5',
24
24
 
25
25
  'claude-3-5-haiku-latest': None,
26
26
  'claude-haiku-3-5-latest': 'claude-3-5-haiku-latest',
@@ -15,9 +15,9 @@ from ....chat.messages import SystemMessage
15
15
  from ....chat.stream.services import ChatChoicesStreamRequest
16
16
  from ....chat.stream.services import ChatChoicesStreamResponse
17
17
  from ....chat.stream.services import static_check_is_chat_choices_stream_service
18
- from ....chat.stream.types import AiChoiceDelta
19
18
  from ....chat.stream.types import AiChoiceDeltas
20
- from ....chat.stream.types import AiMessageDelta
19
+ from ....chat.stream.types import AiChoicesDeltas
20
+ from ....chat.stream.types import ContentAiChoiceDelta
21
21
  from ....configs import Config
22
22
  from ....resources import UseResources
23
23
  from ....standard import ApiKey
@@ -86,7 +86,7 @@ class AnthropicChatChoicesStreamService:
86
86
  http_client = rs.enter_context(http.client())
87
87
  http_response = rs.enter_context(http_client.stream_request(http_request))
88
88
 
89
- async def inner(sink: StreamResponseSink[AiChoiceDeltas]) -> ta.Sequence[ChatChoicesOutputs] | None:
89
+ async def inner(sink: StreamResponseSink[AiChoicesDeltas]) -> ta.Sequence[ChatChoicesOutputs] | None:
90
90
  msg_start: AnthropicSseDecoderEvents.MessageStart | None = None
91
91
  cbk_start: AnthropicSseDecoderEvents.ContentBlockStart | None = None
92
92
  msg_stop: AnthropicSseDecoderEvents.MessageStop | None = None
@@ -124,18 +124,18 @@ class AnthropicChatChoicesStreamService:
124
124
  check.none(cbk_start)
125
125
  cbk_start = ae
126
126
  if isinstance(ae.content_block, AnthropicSseDecoderEvents.ContentBlockStart.Text): # noqa
127
- await sink.emit([AiChoiceDelta(AiMessageDelta(
127
+ await sink.emit(AiChoicesDeltas([AiChoiceDeltas([ContentAiChoiceDelta(
128
128
  ae.content_block.text,
129
- ))])
129
+ )])]))
130
130
  else:
131
131
  raise TypeError(ae.content_block)
132
132
 
133
133
  case AnthropicSseDecoderEvents.ContentBlockDelta():
134
134
  check.not_none(cbk_start)
135
135
  if isinstance(ae.delta, AnthropicSseDecoderEvents.ContentBlockDelta.TextDelta):
136
- await sink.emit([AiChoiceDelta(AiMessageDelta(
136
+ await sink.emit(AiChoicesDeltas([AiChoiceDeltas([ContentAiChoiceDelta(
137
137
  ae.delta.text,
138
- ))])
138
+ )])]))
139
139
  else:
140
140
  raise TypeError(ae.delta)
141
141
 
@@ -15,14 +15,16 @@ from ....chat.choices.services import ChatChoicesResponse
15
15
  from ....chat.choices.services import static_check_is_chat_choices_service
16
16
  from ....chat.choices.types import AiChoice
17
17
  from ....chat.messages import AiMessage
18
+ from ....chat.messages import AnyAiMessage
18
19
  from ....chat.messages import Message
19
20
  from ....chat.messages import SystemMessage
20
- from ....chat.messages import ToolExecResultMessage
21
+ from ....chat.messages import ToolUseMessage
22
+ from ....chat.messages import ToolUseResultMessage
21
23
  from ....chat.messages import UserMessage
22
24
  from ....chat.tools.types import Tool
23
25
  from ....models.configs import ModelName
24
26
  from ....standard import ApiKey
25
- from ....tools.types import ToolExecRequest
27
+ from ....tools.types import ToolUse
26
28
  from .names import MODEL_NAMES
27
29
  from .tools import build_tool_spec_schema
28
30
 
@@ -59,7 +61,8 @@ class GoogleChatChoicesService:
59
61
 
60
62
  ROLES_MAP: ta.ClassVar[ta.Mapping[type[Message], str]] = {
61
63
  UserMessage: 'user',
62
- AiMessage: 'assistant',
64
+ AiMessage: 'model',
65
+ ToolUseMessage: 'model',
63
66
  }
64
67
 
65
68
  async def invoke(
@@ -79,25 +82,47 @@ class GoogleChatChoicesService:
79
82
  text=check.not_none(self._get_msg_content(m)),
80
83
  )],
81
84
  )
82
- elif isinstance(m, ToolExecResultMessage):
85
+
86
+ elif isinstance(m, ToolUseResultMessage):
83
87
  tr_resp_val: pt.Value
84
- if m.c is None:
88
+ if m.tur.c is None:
85
89
  tr_resp_val = pt.NullValue() # type: ignore[unreachable]
86
- elif isinstance(m.c, str):
87
- tr_resp_val = pt.StringValue(m.c)
90
+ elif isinstance(m.tur.c, str):
91
+ tr_resp_val = pt.StringValue(m.tur.c)
88
92
  else:
89
- raise TypeError(m.c)
93
+ raise TypeError(m.tur.c)
90
94
  g_contents.append(pt.Content(
91
95
  parts=[pt.Part(
92
96
  function_response=pt.FunctionResponse(
93
- id=m.id,
94
- name=m.name,
97
+ id=m.tur.id,
98
+ name=m.tur.name,
95
99
  response={
96
100
  'value': tr_resp_val,
97
101
  },
98
102
  ),
99
103
  )],
100
104
  ))
105
+
106
+ elif isinstance(m, AiMessage):
107
+ g_contents.append(pt.Content(
108
+ parts=[pt.Part(
109
+ text=check.not_none(self._get_msg_content(m)),
110
+ )],
111
+ role='model',
112
+ ))
113
+
114
+ elif isinstance(m, ToolUseMessage):
115
+ g_contents.append(pt.Content(
116
+ parts=[pt.Part(
117
+ function_call=pt.FunctionCall(
118
+ id=m.tu.id,
119
+ name=m.tu.name,
120
+ args=m.tu.args,
121
+ ),
122
+ )],
123
+ role='model',
124
+ ))
125
+
101
126
  else:
102
127
  g_contents.append(pt.Content(
103
128
  parts=[pt.Part(
@@ -137,17 +162,18 @@ class GoogleChatChoicesService:
137
162
 
138
163
  ai_choices: list[AiChoice] = []
139
164
  for c in g_resp.candidates or []:
140
- g_resp_part = check.single(check.not_none(check.not_none(c.content).parts))
141
- ter: ToolExecRequest | None = None
142
- if (g_fc := g_resp_part.function_call) is not None:
143
- ter = ToolExecRequest(
144
- id=g_fc.id,
145
- name=g_fc.name,
146
- args=g_fc.args or {},
147
- )
148
- ai_choices.append(AiChoice(AiMessage(
149
- c=g_resp_part.text,
150
- tool_exec_requests=[ter] if ter is not None else None,
151
- )))
165
+ out: list[AnyAiMessage] = []
166
+ for g_resp_part in check.not_none(check.not_none(c.content).parts):
167
+ if (g_txt := g_resp_part.text) is not None:
168
+ out.append(AiMessage(g_txt))
169
+ elif (g_fc := g_resp_part.function_call) is not None:
170
+ out.append(ToolUseMessage(ToolUse(
171
+ id=g_fc.id,
172
+ name=g_fc.name,
173
+ args=g_fc.args or {},
174
+ )))
175
+ else:
176
+ raise TypeError(g_resp_part)
177
+ ai_choices.append(AiChoice(out))
152
178
 
153
179
  return ChatChoicesResponse(ai_choices)
@@ -19,9 +19,9 @@ from ....chat.messages import UserMessage
19
19
  from ....chat.stream.services import ChatChoicesStreamRequest
20
20
  from ....chat.stream.services import ChatChoicesStreamResponse
21
21
  from ....chat.stream.services import static_check_is_chat_choices_stream_service
22
- from ....chat.stream.types import AiChoiceDelta
23
22
  from ....chat.stream.types import AiChoiceDeltas
24
- from ....chat.stream.types import AiMessageDelta
23
+ from ....chat.stream.types import AiChoicesDeltas
24
+ from ....chat.stream.types import ContentAiChoiceDelta
25
25
  from ....models.configs import ModelName
26
26
  from ....resources import UseResources
27
27
  from ....standard import ApiKey
@@ -101,7 +101,7 @@ class GoogleChatChoicesStreamService:
101
101
  http_client = rs.enter_context(http.client())
102
102
  http_response = rs.enter_context(http_client.stream_request(http_request))
103
103
 
104
- async def inner(sink: StreamResponseSink[AiChoiceDeltas]) -> ta.Sequence[ChatChoicesOutputs] | None:
104
+ async def inner(sink: StreamResponseSink[AiChoicesDeltas]) -> ta.Sequence[ChatChoicesOutputs] | None:
105
105
  db = DelimitingBuffer([b'\r', b'\n', b'\r\n'])
106
106
  while True:
107
107
  # FIXME: read1 not on response stream protocol
@@ -117,7 +117,11 @@ class GoogleChatChoicesStreamService:
117
117
  gcr = msh.unmarshal(json.loads(l[6:]), pt.GenerateContentResponse) # noqa
118
118
  cnd = check.single(check.not_none(gcr.candidates))
119
119
  for p in check.not_none(cnd.content).parts or []:
120
- await sink.emit([AiChoiceDelta(AiMessageDelta(check.not_none(p.text)))])
120
+ await sink.emit(AiChoicesDeltas([
121
+ AiChoiceDeltas([
122
+ ContentAiChoiceDelta(check.not_none(p.text)),
123
+ ]),
124
+ ]))
121
125
 
122
126
  if not b:
123
127
  return []
@@ -15,7 +15,8 @@ from ....chat.choices.services import static_check_is_chat_choices_service
15
15
  from ....chat.choices.types import AiChoice
16
16
  from ....chat.choices.types import ChatChoicesOptions
17
17
  from ....chat.messages import AiMessage
18
- from ....chat.messages import ToolExecResultMessage
18
+ from ....chat.messages import ToolUseMessage
19
+ from ....chat.messages import ToolUseResultMessage
19
20
  from ....chat.tools.types import Tool
20
21
  from ....configs import Config
21
22
  from ....llms.types import MaxTokens
@@ -100,29 +101,34 @@ class LlamacppChatChoicesService:
100
101
 
101
102
  ims: list = []
102
103
  for rm in request.v:
103
- if isinstance(rm, ToolExecResultMessage):
104
+ if isinstance(rm, ToolUseResultMessage):
104
105
  ims.append(dict(
105
106
  role='tool',
106
- **(dict(id=rm.id) if rm.id is not None else {}),
107
- name=rm.name,
108
- content=check.isinstance(rm.c, str),
107
+ **(dict(id=rm.tur.id) if rm.tur.id is not None else {}),
108
+ name=rm.tur.name,
109
+ content=check.isinstance(rm.tur.c, str),
109
110
  ))
111
+
110
112
  elif isinstance(rm, AiMessage):
111
- tcs: list[dict] = []
112
- for ter in rm.tool_exec_requests or []:
113
- tcs.append(dict(
114
- id=check.not_none(ter.id),
115
- type='function',
116
- function=dict(
117
- name=ter.name,
118
- arguments=check.isinstance(ter.raw_args, str),
119
- ),
120
- ))
121
113
  ims.append(dict(
122
114
  role=ROLES_MAP[type(rm)],
123
115
  **(dict(content=mc) if (mc := get_msg_content(rm)) is not None else {}),
124
- **(dict(tool_calls=tcs) if tcs else {}),
125
116
  ))
117
+
118
+ elif isinstance(rm, ToolUseMessage):
119
+ ims.append(dict(
120
+ role=ROLES_MAP[type(rm)],
121
+ content='',
122
+ tool_calls=[dict(
123
+ id=check.not_none(rm.tu.id),
124
+ type='function',
125
+ function=dict(
126
+ name=rm.tu.name,
127
+ arguments=check.isinstance(rm.tu.raw_args, str),
128
+ ),
129
+ )],
130
+ ))
131
+
126
132
  else:
127
133
  ims.append(dict(
128
134
  role=ROLES_MAP[type(rm)],
@@ -137,6 +143,6 @@ class LlamacppChatChoicesService:
137
143
  out: list[AiChoice] = []
138
144
  for c in ta.cast(ta.Any, output)['choices']:
139
145
  m = c['message']
140
- out.append(AiChoice(AiMessage(m['content'])))
146
+ out.append(AiChoice([AiMessage(m['content'])]))
141
147
 
142
148
  return ChatChoicesResponse(out)
@@ -5,7 +5,8 @@ from omlish import check
5
5
  from ....chat.messages import AiMessage
6
6
  from ....chat.messages import Message
7
7
  from ....chat.messages import SystemMessage
8
- from ....chat.messages import ToolExecResultMessage
8
+ from ....chat.messages import ToolUseMessage
9
+ from ....chat.messages import ToolUseResultMessage
9
10
  from ....chat.messages import UserMessage
10
11
 
11
12
 
@@ -16,7 +17,8 @@ ROLES_MAP: ta.Mapping[type[Message], str] = {
16
17
  SystemMessage: 'system',
17
18
  UserMessage: 'user',
18
19
  AiMessage: 'assistant',
19
- ToolExecResultMessage: 'tool',
20
+ ToolUseMessage: 'assistant',
21
+ ToolUseResultMessage: 'tool',
20
22
  }
21
23
 
22
24
 
@@ -13,9 +13,9 @@ from ....chat.choices.services import ChatChoicesOutputs
13
13
  from ....chat.stream.services import ChatChoicesStreamRequest
14
14
  from ....chat.stream.services import ChatChoicesStreamResponse
15
15
  from ....chat.stream.services import static_check_is_chat_choices_stream_service
16
- from ....chat.stream.types import AiChoiceDelta
17
16
  from ....chat.stream.types import AiChoiceDeltas
18
- from ....chat.stream.types import AiMessageDelta
17
+ from ....chat.stream.types import AiChoicesDeltas
18
+ from ....chat.stream.types import ContentAiChoiceDelta
19
19
  from ....configs import Config
20
20
  from ....models.configs import ModelPath
21
21
  from ....resources import UseResources
@@ -75,10 +75,10 @@ class LlamacppChatChoicesStreamService(lang.ExitStacked):
75
75
 
76
76
  rs.enter_context(lang.defer(close_output))
77
77
 
78
- async def inner(sink: StreamResponseSink[AiChoiceDeltas]) -> ta.Sequence[ChatChoicesOutputs] | None:
78
+ async def inner(sink: StreamResponseSink[AiChoicesDeltas]) -> ta.Sequence[ChatChoicesOutputs] | None:
79
79
  for chunk in output:
80
80
  check.state(chunk['object'] == 'chat.completion.chunk')
81
- l: list[AiChoiceDelta] = []
81
+ l: list[AiChoiceDeltas] = []
82
82
  for choice in chunk['choices']:
83
83
  # FIXME: check role is assistant
84
84
  # FIXME: stop reason
@@ -86,8 +86,8 @@ class LlamacppChatChoicesStreamService(lang.ExitStacked):
86
86
  continue
87
87
  if not (content := delta.get('content', '')):
88
88
  continue
89
- l.append(AiChoiceDelta(AiMessageDelta(content)))
90
- await sink.emit(l)
89
+ l.append(AiChoiceDeltas([ContentAiChoiceDelta(content)]))
90
+ await sink.emit(AiChoicesDeltas(l))
91
91
  return None
92
92
 
93
93
  return await new_stream_response(rs, inner)
@@ -90,6 +90,6 @@ class MistralChatChoicesService:
90
90
  resp_dct = json.loads(check.not_none(resp.data).decode('utf-8'))
91
91
 
92
92
  return ChatChoicesResponse([
93
- AiChoice(AiMessage(c['message']['content']))
93
+ AiChoice([AiMessage(c['message']['content'])])
94
94
  for c in resp_dct['choices']
95
95
  ])
@@ -137,5 +137,5 @@ class MlxChatChoicesService(lang.ExitStacked):
137
137
  )
138
138
 
139
139
  return ChatChoicesResponse([
140
- AiChoice(AiMessage(response)) # noqa
140
+ AiChoice([AiMessage(response)]) # noqa
141
141
  ])
@@ -14,17 +14,20 @@ TODO:
14
14
  import typing as ta
15
15
 
16
16
  from omlish import check
17
+ from omlish import marshal as msh
17
18
  from omlish import typedvalues as tv
18
19
  from omlish.formats import json
19
20
  from omlish.http import all as http
20
21
 
22
+ from .....backends.openai import protocol as pt
21
23
  from ....chat.choices.services import ChatChoicesRequest
22
24
  from ....chat.choices.services import ChatChoicesResponse
23
25
  from ....chat.choices.services import static_check_is_chat_choices_service
24
26
  from ....models.configs import ModelName
25
27
  from ....standard import ApiKey
26
28
  from ....standard import DefaultOptions
27
- from .format import OpenaiChatRequestHandler
29
+ from .format2 import OpenaiChatRequestHandler
30
+ from .format2 import build_mc_choices_response
28
31
  from .names import MODEL_NAMES
29
32
 
30
33
 
@@ -63,7 +66,7 @@ class OpenaiChatChoicesService:
63
66
  ),
64
67
  )
65
68
 
66
- raw_request = rh.raw_request()
69
+ raw_request = msh.marshal(rh.oai_request())
67
70
 
68
71
  http_response = http.request(
69
72
  'https://api.openai.com/v1/chat/completions',
@@ -76,4 +79,4 @@ class OpenaiChatChoicesService:
76
79
 
77
80
  raw_response = json.loads(check.not_none(http_response.data).decode('utf-8'))
78
81
 
79
- return rh.build_response(raw_response)
82
+ return build_mc_choices_response(msh.unmarshal(raw_response, pt.ChatCompletionResponse))