ommlds 0.0.0.dev450__py3-none-any.whl → 0.0.0.dev451__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ommlds might be problematic. Click here for more details.

Files changed (49) hide show
  1. ommlds/.omlish-manifests.json +3 -3
  2. ommlds/backends/anthropic/protocol/_marshal.py +1 -1
  3. ommlds/backends/anthropic/protocol/sse/_marshal.py +1 -1
  4. ommlds/backends/anthropic/protocol/sse/assemble.py +1 -1
  5. ommlds/backends/anthropic/protocol/types.py +30 -9
  6. ommlds/backends/google/protocol/_marshal.py +1 -1
  7. ommlds/backends/openai/protocol/_marshal.py +1 -1
  8. ommlds/cli/main.py +2 -2
  9. ommlds/cli/sessions/chat/code.py +12 -2
  10. ommlds/cli/sessions/chat/printing.py +4 -0
  11. ommlds/cli/sessions/chat/tools.py +1 -2
  12. ommlds/cli/tools/config.py +1 -1
  13. ommlds/cli/tools/inject.py +4 -1
  14. ommlds/minichain/__init__.py +12 -0
  15. ommlds/minichain/_marshal.py +39 -0
  16. ommlds/minichain/backends/impls/anthropic/chat.py +78 -10
  17. ommlds/minichain/backends/impls/google/chat.py +39 -11
  18. ommlds/minichain/chat/_marshal.py +1 -1
  19. ommlds/minichain/content/_marshal.py +24 -3
  20. ommlds/minichain/content/json.py +13 -0
  21. ommlds/minichain/content/materialize.py +13 -20
  22. ommlds/minichain/content/prepare.py +4 -0
  23. ommlds/minichain/json.py +20 -0
  24. ommlds/minichain/lib/fs/context.py +15 -1
  25. ommlds/minichain/lib/fs/errors.py +6 -0
  26. ommlds/minichain/lib/fs/tools/edit.py +104 -0
  27. ommlds/minichain/lib/fs/tools/ls.py +2 -2
  28. ommlds/minichain/lib/fs/tools/read.py +2 -2
  29. ommlds/minichain/lib/fs/tools/recursivels/execution.py +2 -2
  30. ommlds/minichain/lib/todo/context.py +29 -2
  31. ommlds/minichain/lib/todo/tools/read.py +11 -6
  32. ommlds/minichain/lib/todo/tools/write.py +73 -13
  33. ommlds/minichain/lib/todo/types.py +6 -1
  34. ommlds/minichain/llms/_marshal.py +1 -1
  35. ommlds/minichain/services/_marshal.py +1 -1
  36. ommlds/minichain/tools/_marshal.py +1 -1
  37. ommlds/minichain/tools/execution/catalog.py +2 -1
  38. ommlds/minichain/tools/execution/executors.py +8 -3
  39. ommlds/minichain/tools/execution/reflect.py +43 -5
  40. ommlds/minichain/tools/fns.py +46 -9
  41. ommlds/minichain/tools/reflect.py +2 -2
  42. ommlds/minichain/vectors/_marshal.py +1 -1
  43. ommlds/tools/ocr.py +7 -1
  44. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev451.dist-info}/METADATA +3 -3
  45. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev451.dist-info}/RECORD +49 -47
  46. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev451.dist-info}/WHEEL +0 -0
  47. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev451.dist-info}/entry_points.txt +0 -0
  48. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev451.dist-info}/licenses/LICENSE +0 -0
  49. {ommlds-0.0.0.dev450.dist-info → ommlds-0.0.0.dev451.dist-info}/top_level.txt +0 -0
@@ -18,7 +18,7 @@
18
18
  "module": ".minichain.backends.impls.anthropic.chat",
19
19
  "attr": null,
20
20
  "file": "ommlds/minichain/backends/impls/anthropic/chat.py",
21
- "line": 30,
21
+ "line": 37,
22
22
  "value": {
23
23
  "!.minichain.registries.manifests.RegistryManifest": {
24
24
  "module": "ommlds.minichain.backends.impls.anthropic.chat",
@@ -96,7 +96,7 @@
96
96
  "module": ".minichain.backends.impls.google.chat",
97
97
  "attr": null,
98
98
  "file": "ommlds/minichain/backends/impls/google/chat.py",
99
- "line": 33,
99
+ "line": 34,
100
100
  "value": {
101
101
  "!.minichain.registries.manifests.RegistryManifest": {
102
102
  "module": "ommlds.minichain.backends.impls.google.chat",
@@ -606,7 +606,7 @@
606
606
  "module": ".tools.ocr",
607
607
  "attr": "_CLI_MODULE",
608
608
  "file": "ommlds/tools/ocr.py",
609
- "line": 83,
609
+ "line": 89,
610
610
  "value": {
611
611
  "!omdev.cli.types.CliModule": {
612
612
  "name": "ocr",
@@ -8,7 +8,7 @@ from .types import Content
8
8
 
9
9
 
10
10
  @lang.static_init
11
- def _install_standard_marshalling() -> None:
11
+ def _install_standard_marshaling() -> None:
12
12
  for root_cls in [
13
13
  Content,
14
14
  Content.CacheControl,
@@ -8,7 +8,7 @@ from .events import AnthropicSseDecoderEvents
8
8
 
9
9
 
10
10
  @lang.static_init
11
- def _install_standard_marshalling() -> None:
11
+ def _install_standard_marshaling() -> None:
12
12
  for root_cls in [
13
13
  AnthropicSseDecoderEvents.Event,
14
14
  AnthropicSseDecoderEvents.ContentBlockStart.ContentBlock,
@@ -76,7 +76,7 @@ class AnthropicSseMessageAssembler(
76
76
  elif isinstance(ae, AnthropicSseDecoderEvents.MessageStop):
77
77
  yield [Message(
78
78
  id=ms.message.id,
79
- role=ms.message.role,
79
+ role=ms.message.role, # type: ignore[arg-type]
80
80
  model=ms.message.model,
81
81
  content=content,
82
82
  stop_reason=dct['stop_reason'],
@@ -1,3 +1,6 @@
1
+ """
2
+ https://docs.claude.com/en/api/messages
3
+ """
1
4
  import typing as ta
2
5
 
3
6
  from omlish import dataclasses as dc
@@ -8,6 +11,22 @@ from omlish import marshal as msh
8
11
  ##
9
12
 
10
13
 
14
+ def _set_class_marshal_options(cls):
15
+ msh.update_object_metadata(
16
+ cls,
17
+ field_defaults=msh.FieldMetadata(
18
+ options=msh.FieldOptions(
19
+ omit_if=lang.is_none,
20
+ ),
21
+ ),
22
+ )
23
+
24
+ return cls
25
+
26
+
27
+ ##
28
+
29
+
11
30
  class Content(lang.Abstract, lang.Sealed):
12
31
  class CacheControl(lang.Abstract, lang.Sealed):
13
32
  """https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching"""
@@ -18,7 +37,7 @@ class Content(lang.Abstract, lang.Sealed):
18
37
 
19
38
 
20
39
  @dc.dataclass(frozen=True)
21
- @msh.update_fields_metadata(['cache_control'], omit_if=lang.is_none)
40
+ @_set_class_marshal_options
22
41
  class Text(Content):
23
42
  text: str
24
43
 
@@ -28,7 +47,7 @@ class Text(Content):
28
47
 
29
48
 
30
49
  @dc.dataclass(frozen=True)
31
- @msh.update_fields_metadata(['cache_control'], omit_if=lang.is_none)
50
+ @_set_class_marshal_options
32
51
  class ToolUse(Content):
33
52
  id: str
34
53
  name: str
@@ -40,6 +59,7 @@ class ToolUse(Content):
40
59
 
41
60
 
42
61
  @dc.dataclass(frozen=True)
62
+ @_set_class_marshal_options
43
63
  class ToolResult(Content):
44
64
  tool_use_id: str
45
65
  content: str
@@ -49,7 +69,7 @@ class ToolResult(Content):
49
69
 
50
70
 
51
71
  @dc.dataclass(frozen=True, kw_only=True)
52
- @msh.update_object_metadata(field_defaults=msh.FieldMetadata(options=msh.FieldOptions(omit_if=lang.is_none)))
72
+ @_set_class_marshal_options
53
73
  class CacheCreation:
54
74
  ephemeral_5m_input_tokens: int | None = None
55
75
  ephemeral_1h_input_tokens: int | None = None
@@ -59,7 +79,7 @@ class CacheCreation:
59
79
 
60
80
 
61
81
  @dc.dataclass(frozen=True, kw_only=True)
62
- @msh.update_object_metadata(field_defaults=msh.FieldMetadata(options=msh.FieldOptions(omit_if=lang.is_none)))
82
+ @_set_class_marshal_options
63
83
  class Usage:
64
84
  input_tokens: int | None = None
65
85
  output_tokens: int | None = None
@@ -75,15 +95,15 @@ class Usage:
75
95
 
76
96
 
77
97
  @dc.dataclass(frozen=True, kw_only=True)
78
- @msh.update_object_metadata(field_defaults=msh.FieldMetadata(options=msh.FieldOptions(omit_if=lang.is_none)))
98
+ @_set_class_marshal_options
79
99
  class Message:
80
100
  id: str | None = None
81
101
 
82
- role: str | None = None
102
+ role: ta.Literal['user', 'assistant']
83
103
 
84
104
  model: str | None = None
85
105
 
86
- content: ta.Sequence[Content] | None = None
106
+ content: str | ta.Sequence[Content] | None = None
87
107
 
88
108
  stop_reason: str | None = None
89
109
  stop_sequence: str | None = None
@@ -95,6 +115,7 @@ class Message:
95
115
 
96
116
 
97
117
  @dc.dataclass(frozen=True)
118
+ @_set_class_marshal_options
98
119
  class ToolSpec:
99
120
  name: str
100
121
  description: str
@@ -105,7 +126,7 @@ class ToolSpec:
105
126
 
106
127
 
107
128
  @dc.dataclass(frozen=True)
108
- @msh.update_object_metadata(field_defaults=msh.FieldMetadata(options=msh.FieldOptions(omit_if=lang.is_none)))
129
+ @_set_class_marshal_options
109
130
  class MessagesRequest:
110
131
  model: str
111
132
 
@@ -113,7 +134,7 @@ class MessagesRequest:
113
134
 
114
135
  _: dc.KW_ONLY
115
136
 
116
- system: ta.Sequence[Content] | None = None
137
+ system: str | ta.Sequence[Content] | None = None
117
138
 
118
139
  tools: ta.Sequence[ToolSpec] | None = None
119
140
 
@@ -8,7 +8,7 @@ from .types import Value
8
8
 
9
9
 
10
10
  @lang.static_init
11
- def _install_standard_marshalling() -> None:
11
+ def _install_standard_marshaling() -> None:
12
12
  msh.install_standard_factories(
13
13
  *msh.standard_polymorphism_factories(
14
14
  msh.polymorphism_from_subclasses(Value),
@@ -10,7 +10,7 @@ from .chatcompletion.responseformat import ChatCompletionResponseFormat
10
10
 
11
11
 
12
12
  @lang.static_init
13
- def _install_standard_marshalling() -> None:
13
+ def _install_standard_marshaling() -> None:
14
14
  for root_cls, tag_field in [
15
15
  (ChatCompletionContentPart, 'type'),
16
16
  (ChatCompletionMessage, 'role'),
ommlds/cli/main.py CHANGED
@@ -68,7 +68,7 @@ async def _a_main(args: ta.Any = None) -> None:
68
68
 
69
69
  parser.add_argument('--enable-fs-tools', action='store_true')
70
70
  parser.add_argument('--enable-todo-tools', action='store_true')
71
- parser.add_argument('--enable-unsafe-bash-tool', action='store_true')
71
+ parser.add_argument('--enable-unsafe-tools-do-not-use-lol', action='store_true')
72
72
  parser.add_argument('--enable-test-weather-tool', action='store_true')
73
73
  parser.add_argument('--dangerous-no-tool-confirmation', action='store_true')
74
74
 
@@ -174,7 +174,7 @@ async def _a_main(args: ta.Any = None) -> None:
174
174
  tools_config = ToolsConfig(
175
175
  enable_fs_tools=args.enable_fs_tools or args.code,
176
176
  enable_todo_tools=args.enable_todo_tools or args.code,
177
- enable_unsafe_bash_tool=args.enable_unsafe_bash_tool,
177
+ enable_unsafe_tools_do_not_use_lol=args.enable_unsafe_tools_do_not_use_lol,
178
178
  enable_test_weather_tool=args.enable_test_weather_tool,
179
179
  )
180
180
 
@@ -7,6 +7,7 @@ from omlish import lang
7
7
 
8
8
  from .... import minichain as mc
9
9
  from ....minichain.lib.code.prompts import CODE_AGENT_SYSTEM_PROMPT
10
+ from ...tools.config import ToolsConfig
10
11
  from .base import DEFAULT_CHAT_MODEL_BACKEND
11
12
  from .base import ChatOptions
12
13
  from .base import ChatSession
@@ -43,6 +44,7 @@ class CodeChatSession(ChatSession['CodeChatSession.Config']):
43
44
  printer: ChatSessionPrinter,
44
45
  backend_catalog: mc.BackendCatalog,
45
46
  tool_exec_request_executor: ToolExecRequestExecutor,
47
+ tools_config: ToolsConfig | None = None,
46
48
  ) -> None:
47
49
  super().__init__(config)
48
50
 
@@ -51,6 +53,7 @@ class CodeChatSession(ChatSession['CodeChatSession.Config']):
51
53
  self._printer = printer
52
54
  self._backend_catalog = backend_catalog
53
55
  self._tool_exec_request_executor = tool_exec_request_executor
56
+ self._tools_config = tools_config
54
57
 
55
58
  async def run(self) -> None:
56
59
  if self._config.new:
@@ -68,7 +71,11 @@ class CodeChatSession(ChatSession['CodeChatSession.Config']):
68
71
 
69
72
  # FIXME: lol
70
73
  from ....minichain.lib.fs.context import FsContext
71
- fs_tool_context = FsContext(root_dir=os.getcwd())
74
+ fs_tool_context = FsContext(
75
+ root_dir=os.getcwd(),
76
+ writes_permitted=self._tools_config is not None and self._tools_config.enable_unsafe_tools_do_not_use_lol,
77
+ )
78
+
72
79
  from ....minichain.lib.todo.context import TodoContext
73
80
  todo_tool_context = TodoContext()
74
81
 
@@ -82,7 +89,10 @@ class CodeChatSession(ChatSession['CodeChatSession.Config']):
82
89
  if not i and self._config.initial_message is not None:
83
90
  req_msg = mc.UserMessage(self._config.initial_message)
84
91
  else:
85
- prompt = await ptk.prompt('> ')
92
+ try:
93
+ prompt = await ptk.prompt('> ')
94
+ except EOFError:
95
+ break
86
96
  req_msg = mc.UserMessage(prompt)
87
97
 
88
98
  state = self._state_manager.extend_chat([req_msg])
@@ -3,6 +3,7 @@ import typing as ta
3
3
 
4
4
  from omlish import check
5
5
  from omlish import lang
6
+ from omlish.formats import json
6
7
 
7
8
  from .... import minichain as mc
8
9
 
@@ -48,6 +49,9 @@ class StringChatSessionPrinter(ChatSessionPrinter, lang.Abstract):
48
49
  else:
49
50
  raise TypeError(obj)
50
51
 
52
+ elif isinstance(obj, mc.JsonContent):
53
+ self._print_str(json.dumps_pretty(obj.v))
54
+
51
55
  elif isinstance(obj, str):
52
56
  self._print_str(obj)
53
57
 
@@ -3,7 +3,6 @@ import typing as ta
3
3
 
4
4
  from omlish import check
5
5
  from omlish import lang
6
- from omlish import marshal as msh
7
6
  from omlish.formats import json
8
7
 
9
8
  from .... import minichain as mc
@@ -49,7 +48,7 @@ class AskingToolExecutionConfirmation(ToolExecutionConfirmation):
49
48
  id=tr.id,
50
49
  name=tce.spec.name,
51
50
  args=tr.args,
52
- spec=msh.marshal(tce.spec),
51
+ # spec=msh.marshal(tce.spec),
53
52
  )
54
53
  cr = await ptk.strict_confirm(f'Execute requested tool?\n\n{json.dumps_pretty(tr_dct)}\n\n')
55
54
 
@@ -9,6 +9,6 @@ class ToolsConfig:
9
9
  enable_fs_tools: bool = False
10
10
  enable_todo_tools: bool = False
11
11
 
12
- enable_unsafe_bash_tool: bool = False
12
+ enable_unsafe_tools_do_not_use_lol: bool = False
13
13
 
14
14
  enable_test_weather_tool: bool = False
@@ -60,10 +60,13 @@ def bind_tools(tools_config: ToolsConfig) -> inj.Elements:
60
60
  from ...minichain.lib.todo.tools.write import todo_write_tool
61
61
  els.append(bind_tool(todo_write_tool()))
62
62
 
63
- if tools_config.enable_unsafe_bash_tool:
63
+ if tools_config.enable_unsafe_tools_do_not_use_lol:
64
64
  from ...minichain.lib.bash import bash_tool
65
65
  els.append(bind_tool(bash_tool()))
66
66
 
67
+ from ...minichain.lib.fs.tools.edit import edit_tool
68
+ els.append(bind_tool(edit_tool()))
69
+
67
70
  if tools_config.enable_test_weather_tool:
68
71
  els.append(bind_tool(WEATHER_TOOL))
69
72
 
@@ -197,8 +197,14 @@ with _lang.auto_proxy_init(
197
197
  ImageContent,
198
198
  )
199
199
 
200
+ from .content.json import ( # noqa
201
+ JsonContent,
202
+ )
203
+
200
204
  from .content.materialize import ( # noqa
201
205
  CanContent,
206
+
207
+ materialize_content,
202
208
  )
203
209
 
204
210
  from .content.metadata import ( # noqa
@@ -493,6 +499,12 @@ with _lang.auto_proxy_init(
493
499
  EnvKey,
494
500
  )
495
501
 
502
+ from .json import ( # noqa
503
+ JsonSchema,
504
+
505
+ JsonValue,
506
+ )
507
+
496
508
  from .metadata import ( # noqa
497
509
  Metadata,
498
510
 
@@ -1,10 +1,13 @@
1
1
  from omlish import dataclasses as dc
2
+ from omlish import lang
2
3
  from omlish import marshal as msh
3
4
  from omlish import reflect as rfl
4
5
  from omlish.funcs import match as mfs
5
6
  from omlish.typedvalues.marshal import build_typed_values_marshaler
6
7
  from omlish.typedvalues.marshal import build_typed_values_unmarshaler
7
8
 
9
+ from .json import JsonValue
10
+
8
11
 
9
12
  ##
10
13
 
@@ -25,3 +28,39 @@ class _TypedValuesFieldUnmarshalerFactory(msh.UnmarshalerFactoryMatchClass):
25
28
  @mfs.simple(lambda _, ctx, rty: True)
26
29
  def _build(self, ctx: msh.UnmarshalContext, rty: rfl.Type) -> msh.Unmarshaler:
27
30
  return build_typed_values_unmarshaler(ctx, self.tvs_rty)
31
+
32
+
33
+ ##
34
+
35
+
36
+ class MarshalJsonValue(lang.NotInstantiable, lang.Final):
37
+ pass
38
+
39
+
40
+ class _JsonValueMarshalerFactory(msh.MarshalerFactoryMatchClass):
41
+ @mfs.simple(lambda _, ctx, rty: rty is MarshalJsonValue)
42
+ def _build(self, ctx: msh.MarshalContext, rty: rfl.Type) -> msh.Marshaler:
43
+ return msh.NopMarshalerUnmarshaler()
44
+
45
+
46
+ class _JsonValueUnmarshalerFactory(msh.UnmarshalerFactoryMatchClass):
47
+ @mfs.simple(lambda _, ctx, rty: rty is MarshalJsonValue)
48
+ def _build(self, ctx: msh.UnmarshalContext, rty: rfl.Type) -> msh.Unmarshaler:
49
+ return msh.NopMarshalerUnmarshaler()
50
+
51
+
52
+ ##
53
+
54
+
55
+ @lang.static_init
56
+ def _install_standard_marshaling() -> None:
57
+ msh.register_global_config(
58
+ JsonValue,
59
+ msh.ReflectOverride(MarshalJsonValue),
60
+ identity=True,
61
+ )
62
+
63
+ msh.install_standard_factories(
64
+ _JsonValueMarshalerFactory(),
65
+ _JsonValueUnmarshalerFactory(),
66
+ )
@@ -1,4 +1,5 @@
1
1
  """
2
+ https://docs.claude.com/en/api/messages
2
3
  https://github.com/anthropics/anthropic-sdk-python/tree/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/types
3
4
  https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/resources/completions.py#L53
4
5
  https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/resources/messages.py#L70
@@ -6,11 +7,12 @@ https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d15
6
7
  import typing as ta
7
8
 
8
9
  from omlish import check
9
- from omlish import lang
10
+ from omlish import marshal as msh
10
11
  from omlish import typedvalues as tv
11
12
  from omlish.formats import json
12
13
  from omlish.http import all as http
13
14
 
15
+ from .....backends.anthropic.protocol import types as pt
14
16
  from ....chat.choices.services import ChatChoicesRequest
15
17
  from ....chat.choices.services import ChatChoicesResponse
16
18
  from ....chat.choices.services import static_check_is_chat_choices_service
@@ -18,9 +20,14 @@ from ....chat.choices.types import AiChoice
18
20
  from ....chat.messages import AiMessage
19
21
  from ....chat.messages import Message
20
22
  from ....chat.messages import SystemMessage
23
+ from ....chat.messages import ToolExecResultMessage
21
24
  from ....chat.messages import UserMessage
25
+ from ....chat.tools.types import Tool
26
+ from ....content.prepare import prepare_content_str
22
27
  from ....models.configs import ModelName
23
28
  from ....standard import ApiKey
29
+ from ....tools.jsonschema import build_tool_spec_params_json_schema
30
+ from ....tools.types import ToolExecRequest
24
31
  from .names import MODEL_NAMES
25
32
 
26
33
 
@@ -68,26 +75,69 @@ class AnthropicChatChoicesService:
68
75
  *,
69
76
  max_tokens: int = 4096, # FIXME: ChatOption
70
77
  ) -> ChatChoicesResponse:
71
- messages = []
72
- system: str | None = None
78
+ messages: list[pt.Message] = []
79
+ system: list[pt.Content] | None = None
73
80
  for i, m in enumerate(request.v):
74
81
  if isinstance(m, SystemMessage):
75
82
  if i != 0 or system is not None:
76
83
  raise Exception('Only supports one system message and must be first')
77
- system = self._get_msg_content(m)
84
+ system = [pt.Text(check.not_none(self._get_msg_content(m)))]
85
+
86
+ elif isinstance(m, ToolExecResultMessage):
87
+ messages.append(pt.Message(
88
+ role='user',
89
+ content=[pt.ToolResult(
90
+ tool_use_id=check.not_none(m.id),
91
+ content=json.dumps_compact(msh.marshal(m.c)) if not isinstance(m.c, str) else m.c,
92
+ )],
93
+ ))
94
+
95
+ elif isinstance(m, AiMessage):
96
+ # messages.append(pt.Message(
97
+ # role=self.ROLES_MAP[type(m)], # noqa
98
+ # content=[pt.Text(check.isinstance(self._get_msg_content(m), str))],
99
+ # ))
100
+ a_tus: list[pt.ToolUse] = []
101
+ for tr in m.tool_exec_requests or []:
102
+ a_tus.append(pt.ToolUse(
103
+ id=check.not_none(tr.id),
104
+ name=check.not_none(tr.name),
105
+ input=tr.args,
106
+ ))
107
+ messages.append(pt.Message(
108
+ role='assistant',
109
+ content=[
110
+ *([pt.Text(check.isinstance(m.c, str))] if m.c is not None else []),
111
+ *a_tus,
112
+ ],
113
+ ))
114
+
78
115
  else:
79
- messages.append(dict(
80
- role=self.ROLES_MAP[type(m)], # noqa
81
- content=check.isinstance(self._get_msg_content(m), str),
116
+ messages.append(pt.Message(
117
+ role=self.ROLES_MAP[type(m)], # type: ignore[arg-type]
118
+ content=[pt.Text(check.isinstance(self._get_msg_content(m), str))],
119
+ ))
120
+
121
+ tools: list[pt.ToolSpec] = []
122
+ with tv.TypedValues(*request.options).consume() as oc:
123
+ t: Tool
124
+ for t in oc.pop(Tool, []):
125
+ tools.append(pt.ToolSpec(
126
+ name=check.not_none(t.spec.name),
127
+ description=prepare_content_str(t.spec.desc),
128
+ input_schema=build_tool_spec_params_json_schema(t.spec),
82
129
  ))
83
130
 
84
- raw_request = dict(
131
+ a_req = pt.MessagesRequest(
85
132
  model=MODEL_NAMES.resolve(self._model_name.v),
86
- **lang.opt_kw(system=system),
133
+ system=system,
87
134
  messages=messages,
135
+ tools=tools or None,
88
136
  max_tokens=max_tokens,
89
137
  )
90
138
 
139
+ raw_request = msh.marshal(a_req)
140
+
91
141
  raw_response = http.request(
92
142
  'https://api.anthropic.com/v1/messages',
93
143
  headers={
@@ -100,6 +150,24 @@ class AnthropicChatChoicesService:
100
150
 
101
151
  response = json.loads(check.not_none(raw_response.data).decode('utf-8'))
102
152
 
153
+ resp_c: ta.Any = None
154
+ ters: list[ToolExecRequest] = []
155
+ for c in response['content']:
156
+ if c['type'] == 'text':
157
+ check.none(resp_c)
158
+ resp_c = check.not_none(c['text'])
159
+ elif c['type'] == 'tool_use':
160
+ ters.append(ToolExecRequest(
161
+ id=c['id'],
162
+ name=c['name'],
163
+ args=c['input'],
164
+ ))
165
+ else:
166
+ raise TypeError(c['type'])
167
+
103
168
  return ChatChoicesResponse([
104
- AiChoice(AiMessage(response['content'][0]['text'])), # noqa
169
+ AiChoice(AiMessage(
170
+ resp_c,
171
+ tool_exec_requests=ters if ters else None,
172
+ )),
105
173
  ])
@@ -20,6 +20,7 @@ from ....chat.messages import SystemMessage
20
20
  from ....chat.messages import ToolExecResultMessage
21
21
  from ....chat.messages import UserMessage
22
22
  from ....chat.tools.types import Tool
23
+ from ....content.types import Content
23
24
  from ....models.configs import ModelName
24
25
  from ....standard import ApiKey
25
26
  from ....tools.types import ToolExecRequest
@@ -59,7 +60,7 @@ class GoogleChatChoicesService:
59
60
 
60
61
  ROLES_MAP: ta.ClassVar[ta.Mapping[type[Message], str]] = {
61
62
  UserMessage: 'user',
62
- AiMessage: 'assistant',
63
+ AiMessage: 'model',
63
64
  }
64
65
 
65
66
  async def invoke(
@@ -79,6 +80,7 @@ class GoogleChatChoicesService:
79
80
  text=check.not_none(self._get_msg_content(m)),
80
81
  )],
81
82
  )
83
+
82
84
  elif isinstance(m, ToolExecResultMessage):
83
85
  tr_resp_val: pt.Value
84
86
  if m.c is None:
@@ -98,6 +100,26 @@ class GoogleChatChoicesService:
98
100
  ),
99
101
  )],
100
102
  ))
103
+
104
+ elif isinstance(m, AiMessage):
105
+ ai_parts: list[pt.Part] = []
106
+ if m.c is not None:
107
+ ai_parts.append(pt.Part(
108
+ text=check.not_none(self._get_msg_content(m)),
109
+ ))
110
+ for teq in m.tool_exec_requests or []:
111
+ ai_parts.append(pt.Part(
112
+ function_call=pt.FunctionCall(
113
+ id=teq.id,
114
+ name=teq.name,
115
+ args=teq.args,
116
+ ),
117
+ ))
118
+ g_contents.append(pt.Content(
119
+ parts=ai_parts,
120
+ role='model',
121
+ ))
122
+
101
123
  else:
102
124
  g_contents.append(pt.Content(
103
125
  parts=[pt.Part(
@@ -137,17 +159,23 @@ class GoogleChatChoicesService:
137
159
 
138
160
  ai_choices: list[AiChoice] = []
139
161
  for c in g_resp.candidates or []:
140
- g_resp_part = check.single(check.not_none(check.not_none(c.content).parts))
141
- ter: ToolExecRequest | None = None
142
- if (g_fc := g_resp_part.function_call) is not None:
143
- ter = ToolExecRequest(
144
- id=g_fc.id,
145
- name=g_fc.name,
146
- args=g_fc.args or {},
147
- )
162
+ ai_c: Content | None = None
163
+ ters: list[ToolExecRequest] = []
164
+ for g_resp_part in check.not_none(check.not_none(c.content).parts):
165
+ if (g_txt := g_resp_part.text) is not None:
166
+ check.none(ai_c)
167
+ ai_c = g_txt
168
+ elif (g_fc := g_resp_part.function_call) is not None:
169
+ ters.append(ToolExecRequest(
170
+ id=g_fc.id,
171
+ name=g_fc.name,
172
+ args=g_fc.args or {},
173
+ ))
174
+ else:
175
+ raise TypeError(g_resp_part)
148
176
  ai_choices.append(AiChoice(AiMessage(
149
- c=g_resp_part.text,
150
- tool_exec_requests=[ter] if ter is not None else None,
177
+ c=ai_c,
178
+ tool_exec_requests=ters if ters else None,
151
179
  )))
152
180
 
153
181
  return ChatChoicesResponse(ai_choices)
@@ -12,7 +12,7 @@ from .messages import Message
12
12
 
13
13
 
14
14
  @lang.static_init
15
- def _install_standard_marshalling() -> None:
15
+ def _install_standard_marshaling() -> None:
16
16
  msgs_poly = msh.polymorphism_from_subclasses(
17
17
  Message,
18
18
  naming=msh.Naming.SNAKE,