ommlds 0.0.0.dev426__py3-none-any.whl → 0.0.0.dev485__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ommlds/.omlish-manifests.json +336 -39
- ommlds/__about__.py +16 -10
- ommlds/_hacks/__init__.py +4 -0
- ommlds/_hacks/funcs.py +110 -0
- ommlds/_hacks/names.py +158 -0
- ommlds/_hacks/params.py +73 -0
- ommlds/_hacks/patches.py +0 -3
- ommlds/backends/anthropic/protocol/__init__.py +13 -1
- ommlds/backends/anthropic/protocol/_dataclasses.py +1625 -0
- ommlds/backends/anthropic/protocol/_marshal.py +2 -2
- ommlds/backends/anthropic/protocol/sse/_marshal.py +1 -1
- ommlds/backends/anthropic/protocol/sse/assemble.py +23 -7
- ommlds/backends/anthropic/protocol/sse/events.py +13 -0
- ommlds/backends/anthropic/protocol/types.py +40 -8
- ommlds/backends/google/protocol/__init__.py +16 -0
- ommlds/backends/google/protocol/_dataclasses.py +5997 -0
- ommlds/backends/google/protocol/_marshal.py +16 -0
- ommlds/backends/google/protocol/types.py +626 -0
- ommlds/backends/groq/__init__.py +7 -0
- ommlds/backends/groq/_dataclasses.py +3901 -0
- ommlds/backends/groq/_marshal.py +23 -0
- ommlds/backends/groq/protocol.py +249 -0
- ommlds/backends/llamacpp/logging.py +4 -1
- ommlds/backends/mlx/caching.py +7 -3
- ommlds/backends/mlx/cli.py +10 -7
- ommlds/backends/mlx/generation.py +19 -17
- ommlds/backends/mlx/limits.py +10 -6
- ommlds/backends/mlx/loading.py +65 -5
- ommlds/backends/ollama/__init__.py +7 -0
- ommlds/backends/ollama/_dataclasses.py +3458 -0
- ommlds/backends/ollama/protocol.py +170 -0
- ommlds/backends/openai/protocol/__init__.py +24 -29
- ommlds/backends/openai/protocol/_common.py +18 -0
- ommlds/backends/openai/protocol/_dataclasses.py +7708 -0
- ommlds/backends/openai/protocol/_marshal.py +27 -0
- ommlds/backends/openai/protocol/chatcompletion/chunk.py +58 -31
- ommlds/backends/openai/protocol/chatcompletion/contentpart.py +49 -44
- ommlds/backends/openai/protocol/chatcompletion/message.py +55 -43
- ommlds/backends/openai/protocol/chatcompletion/request.py +114 -66
- ommlds/backends/openai/protocol/chatcompletion/response.py +71 -45
- ommlds/backends/openai/protocol/chatcompletion/responseformat.py +27 -20
- ommlds/backends/openai/protocol/chatcompletion/tokenlogprob.py +16 -7
- ommlds/backends/openai/protocol/completionusage.py +24 -15
- ommlds/backends/tavily/__init__.py +7 -0
- ommlds/backends/tavily/_dataclasses.py +1734 -0
- ommlds/backends/tavily/protocol.py +301 -0
- ommlds/backends/tinygrad/models/llama3/__init__.py +22 -14
- ommlds/backends/transformers/__init__.py +14 -0
- ommlds/backends/transformers/filecache.py +109 -0
- ommlds/backends/transformers/streamers.py +73 -0
- ommlds/cli/__init__.py +7 -0
- ommlds/cli/_dataclasses.py +2562 -0
- ommlds/cli/asyncs.py +30 -0
- ommlds/cli/backends/catalog.py +93 -0
- ommlds/cli/backends/configs.py +9 -0
- ommlds/cli/backends/inject.py +31 -36
- ommlds/cli/backends/injection.py +16 -0
- ommlds/cli/backends/types.py +46 -0
- ommlds/cli/content/messages.py +34 -0
- ommlds/cli/content/strings.py +42 -0
- ommlds/cli/inject.py +17 -32
- ommlds/cli/inputs/__init__.py +0 -0
- ommlds/cli/inputs/asyncs.py +32 -0
- ommlds/cli/inputs/sync.py +75 -0
- ommlds/cli/main.py +270 -110
- ommlds/cli/rendering/__init__.py +0 -0
- ommlds/cli/rendering/configs.py +9 -0
- ommlds/cli/rendering/inject.py +31 -0
- ommlds/cli/rendering/markdown.py +52 -0
- ommlds/cli/rendering/raw.py +73 -0
- ommlds/cli/rendering/types.py +21 -0
- ommlds/cli/secrets.py +21 -0
- ommlds/cli/sessions/base.py +1 -1
- ommlds/cli/sessions/chat/chat/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/ai/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/ai/configs.py +11 -0
- ommlds/cli/sessions/chat/chat/ai/inject.py +74 -0
- ommlds/cli/sessions/chat/chat/ai/injection.py +14 -0
- ommlds/cli/sessions/chat/chat/ai/rendering.py +70 -0
- ommlds/cli/sessions/chat/chat/ai/services.py +79 -0
- ommlds/cli/sessions/chat/chat/ai/tools.py +44 -0
- ommlds/cli/sessions/chat/chat/ai/types.py +28 -0
- ommlds/cli/sessions/chat/chat/state/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/state/configs.py +11 -0
- ommlds/cli/sessions/chat/chat/state/inject.py +36 -0
- ommlds/cli/sessions/chat/chat/state/inmemory.py +33 -0
- ommlds/cli/sessions/chat/chat/state/storage.py +52 -0
- ommlds/cli/sessions/chat/chat/state/types.py +38 -0
- ommlds/cli/sessions/chat/chat/user/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/user/configs.py +17 -0
- ommlds/cli/sessions/chat/chat/user/inject.py +62 -0
- ommlds/cli/sessions/chat/chat/user/interactive.py +31 -0
- ommlds/cli/sessions/chat/chat/user/oneshot.py +25 -0
- ommlds/cli/sessions/chat/chat/user/types.py +15 -0
- ommlds/cli/sessions/chat/configs.py +27 -0
- ommlds/cli/sessions/chat/driver.py +43 -0
- ommlds/cli/sessions/chat/inject.py +33 -65
- ommlds/cli/sessions/chat/phases/__init__.py +0 -0
- ommlds/cli/sessions/chat/phases/inject.py +27 -0
- ommlds/cli/sessions/chat/phases/injection.py +14 -0
- ommlds/cli/sessions/chat/phases/manager.py +29 -0
- ommlds/cli/sessions/chat/phases/types.py +29 -0
- ommlds/cli/sessions/chat/session.py +27 -0
- ommlds/cli/sessions/chat/tools/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/configs.py +22 -0
- ommlds/cli/sessions/chat/tools/confirmation.py +46 -0
- ommlds/cli/sessions/chat/tools/execution.py +66 -0
- ommlds/cli/sessions/chat/tools/fs/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/fs/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/fs/inject.py +35 -0
- ommlds/cli/sessions/chat/tools/inject.py +88 -0
- ommlds/cli/sessions/chat/tools/injection.py +44 -0
- ommlds/cli/sessions/chat/tools/rendering.py +58 -0
- ommlds/cli/sessions/chat/tools/todo/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/todo/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/todo/inject.py +31 -0
- ommlds/cli/sessions/chat/tools/weather/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/weather/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/weather/inject.py +22 -0
- ommlds/cli/{tools/weather.py → sessions/chat/tools/weather/tools.py} +1 -1
- ommlds/cli/sessions/completion/configs.py +21 -0
- ommlds/cli/sessions/completion/inject.py +42 -0
- ommlds/cli/sessions/completion/session.py +35 -0
- ommlds/cli/sessions/embedding/configs.py +21 -0
- ommlds/cli/sessions/embedding/inject.py +42 -0
- ommlds/cli/sessions/embedding/session.py +33 -0
- ommlds/cli/sessions/inject.py +28 -11
- ommlds/cli/state/__init__.py +0 -0
- ommlds/cli/state/inject.py +28 -0
- ommlds/cli/{state.py → state/storage.py} +41 -24
- ommlds/minichain/__init__.py +84 -24
- ommlds/minichain/_dataclasses.py +15401 -0
- ommlds/minichain/_marshal.py +49 -9
- ommlds/minichain/_typedvalues.py +2 -4
- ommlds/minichain/backends/catalogs/base.py +20 -1
- ommlds/minichain/backends/catalogs/simple.py +2 -2
- ommlds/minichain/backends/catalogs/strings.py +10 -8
- ommlds/minichain/backends/impls/anthropic/chat.py +65 -27
- ommlds/minichain/backends/impls/anthropic/names.py +10 -8
- ommlds/minichain/backends/impls/anthropic/protocol.py +109 -0
- ommlds/minichain/backends/impls/anthropic/stream.py +111 -43
- ommlds/minichain/backends/impls/duckduckgo/search.py +6 -2
- ommlds/minichain/backends/impls/dummy/__init__.py +0 -0
- ommlds/minichain/backends/impls/dummy/chat.py +69 -0
- ommlds/minichain/backends/impls/google/chat.py +114 -22
- ommlds/minichain/backends/impls/google/search.py +7 -2
- ommlds/minichain/backends/impls/google/stream.py +219 -0
- ommlds/minichain/backends/impls/google/tools.py +149 -0
- ommlds/minichain/backends/impls/groq/__init__.py +0 -0
- ommlds/minichain/backends/impls/groq/chat.py +75 -0
- ommlds/minichain/backends/impls/groq/names.py +48 -0
- ommlds/minichain/backends/impls/groq/protocol.py +143 -0
- ommlds/minichain/backends/impls/groq/stream.py +125 -0
- ommlds/minichain/backends/impls/huggingface/repos.py +1 -5
- ommlds/minichain/backends/impls/llamacpp/chat.py +40 -22
- ommlds/minichain/backends/impls/llamacpp/completion.py +9 -5
- ommlds/minichain/backends/impls/llamacpp/format.py +4 -2
- ommlds/minichain/backends/impls/llamacpp/stream.py +43 -23
- ommlds/minichain/backends/impls/mistral.py +20 -5
- ommlds/minichain/backends/impls/mlx/chat.py +101 -24
- ommlds/minichain/backends/impls/ollama/__init__.py +0 -0
- ommlds/minichain/backends/impls/ollama/chat.py +199 -0
- ommlds/minichain/backends/impls/openai/chat.py +18 -8
- ommlds/minichain/backends/impls/openai/completion.py +10 -3
- ommlds/minichain/backends/impls/openai/embedding.py +10 -3
- ommlds/minichain/backends/impls/openai/format.py +131 -106
- ommlds/minichain/backends/impls/openai/names.py +31 -5
- ommlds/minichain/backends/impls/openai/stream.py +43 -25
- ommlds/minichain/backends/impls/sentencepiece/tokens.py +9 -6
- ommlds/minichain/backends/impls/tavily.py +66 -0
- ommlds/minichain/backends/impls/tinygrad/chat.py +30 -20
- ommlds/minichain/backends/impls/tokenizers/tokens.py +9 -6
- ommlds/minichain/backends/impls/transformers/sentence.py +6 -3
- ommlds/minichain/backends/impls/transformers/tokens.py +10 -7
- ommlds/minichain/backends/impls/transformers/transformers.py +160 -37
- ommlds/minichain/backends/strings/parsing.py +1 -1
- ommlds/minichain/backends/strings/resolving.py +4 -1
- ommlds/minichain/chat/_marshal.py +16 -9
- ommlds/minichain/chat/choices/adapters.py +4 -4
- ommlds/minichain/chat/choices/services.py +1 -1
- ommlds/minichain/chat/choices/stream/__init__.py +0 -0
- ommlds/minichain/chat/choices/stream/adapters.py +35 -0
- ommlds/minichain/chat/choices/stream/joining.py +31 -0
- ommlds/minichain/chat/choices/stream/services.py +45 -0
- ommlds/minichain/chat/choices/stream/types.py +43 -0
- ommlds/minichain/chat/choices/types.py +2 -2
- ommlds/minichain/chat/history.py +3 -3
- ommlds/minichain/chat/messages.py +55 -19
- ommlds/minichain/chat/services.py +3 -3
- ommlds/minichain/chat/stream/_marshal.py +16 -0
- ommlds/minichain/chat/stream/joining.py +85 -0
- ommlds/minichain/chat/stream/services.py +15 -21
- ommlds/minichain/chat/stream/types.py +32 -19
- ommlds/minichain/chat/tools/execution.py +8 -7
- ommlds/minichain/chat/tools/ids.py +9 -15
- ommlds/minichain/chat/tools/parsing.py +17 -26
- ommlds/minichain/chat/transforms/base.py +29 -38
- ommlds/minichain/chat/transforms/metadata.py +30 -4
- ommlds/minichain/chat/transforms/services.py +9 -11
- ommlds/minichain/content/_marshal.py +44 -20
- ommlds/minichain/content/json.py +13 -0
- ommlds/minichain/content/materialize.py +14 -21
- ommlds/minichain/content/prepare.py +4 -0
- ommlds/minichain/content/transforms/interleave.py +1 -1
- ommlds/minichain/content/transforms/squeeze.py +1 -1
- ommlds/minichain/content/transforms/stringify.py +1 -1
- ommlds/minichain/json.py +20 -0
- ommlds/minichain/lib/code/__init__.py +0 -0
- ommlds/minichain/lib/code/prompts.py +6 -0
- ommlds/minichain/lib/fs/binfiles.py +108 -0
- ommlds/minichain/lib/fs/context.py +126 -0
- ommlds/minichain/lib/fs/errors.py +101 -0
- ommlds/minichain/lib/fs/suggestions.py +36 -0
- ommlds/minichain/lib/fs/tools/__init__.py +0 -0
- ommlds/minichain/lib/fs/tools/edit.py +104 -0
- ommlds/minichain/lib/fs/tools/ls.py +38 -0
- ommlds/minichain/lib/fs/tools/read.py +115 -0
- ommlds/minichain/lib/fs/tools/recursivels/__init__.py +0 -0
- ommlds/minichain/lib/fs/tools/recursivels/execution.py +40 -0
- ommlds/minichain/lib/todo/__init__.py +0 -0
- ommlds/minichain/lib/todo/context.py +54 -0
- ommlds/minichain/lib/todo/tools/__init__.py +0 -0
- ommlds/minichain/lib/todo/tools/read.py +44 -0
- ommlds/minichain/lib/todo/tools/write.py +335 -0
- ommlds/minichain/lib/todo/types.py +60 -0
- ommlds/minichain/llms/_marshal.py +25 -17
- ommlds/minichain/llms/types.py +4 -0
- ommlds/minichain/registries/globals.py +18 -4
- ommlds/minichain/resources.py +68 -45
- ommlds/minichain/search.py +1 -1
- ommlds/minichain/services/_marshal.py +46 -39
- ommlds/minichain/services/facades.py +3 -3
- ommlds/minichain/services/services.py +1 -1
- ommlds/minichain/standard.py +8 -0
- ommlds/minichain/stream/services.py +152 -38
- ommlds/minichain/stream/wrap.py +22 -24
- ommlds/minichain/text/toolparsing/llamacpp/hermes2.py +3 -2
- ommlds/minichain/text/toolparsing/llamacpp/llama31.py +3 -2
- ommlds/minichain/text/toolparsing/llamacpp/utils.py +3 -2
- ommlds/minichain/tools/_marshal.py +1 -1
- ommlds/minichain/tools/execution/catalog.py +2 -1
- ommlds/minichain/tools/execution/context.py +34 -14
- ommlds/minichain/tools/execution/errors.py +15 -0
- ommlds/minichain/tools/execution/executors.py +8 -3
- ommlds/minichain/tools/execution/reflect.py +40 -5
- ommlds/minichain/tools/fns.py +46 -9
- ommlds/minichain/tools/jsonschema.py +14 -5
- ommlds/minichain/tools/reflect.py +54 -18
- ommlds/minichain/tools/types.py +33 -1
- ommlds/minichain/utils.py +27 -0
- ommlds/minichain/vectors/_marshal.py +11 -10
- ommlds/minichain/vectors/types.py +1 -1
- ommlds/nanochat/LICENSE +21 -0
- ommlds/nanochat/__init__.py +0 -0
- ommlds/nanochat/rustbpe/LICENSE +21 -0
- ommlds/nanochat/tokenizers.py +406 -0
- ommlds/server/cli.py +1 -2
- ommlds/server/server.py +5 -5
- ommlds/server/service.py +1 -1
- ommlds/specs/__init__.py +0 -0
- ommlds/specs/mcp/__init__.py +0 -0
- ommlds/specs/mcp/_marshal.py +23 -0
- ommlds/specs/mcp/clients.py +146 -0
- ommlds/specs/mcp/protocol.py +371 -0
- ommlds/tools/git.py +35 -12
- ommlds/tools/ocr.py +8 -9
- ommlds/wiki/analyze.py +6 -7
- ommlds/wiki/text/mfh.py +1 -5
- ommlds/wiki/text/wtp.py +1 -3
- ommlds/wiki/utils/xml.py +5 -5
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/METADATA +24 -21
- ommlds-0.0.0.dev485.dist-info/RECORD +436 -0
- ommlds/cli/backends/standard.py +0 -20
- ommlds/cli/sessions/chat/base.py +0 -42
- ommlds/cli/sessions/chat/interactive.py +0 -73
- ommlds/cli/sessions/chat/printing.py +0 -96
- ommlds/cli/sessions/chat/prompt.py +0 -143
- ommlds/cli/sessions/chat/state.py +0 -109
- ommlds/cli/sessions/chat/tools.py +0 -91
- ommlds/cli/sessions/completion/completion.py +0 -44
- ommlds/cli/sessions/embedding/embedding.py +0 -42
- ommlds/cli/tools/config.py +0 -13
- ommlds/cli/tools/inject.py +0 -64
- ommlds/minichain/chat/stream/adapters.py +0 -69
- ommlds/minichain/lib/fs/ls/execution.py +0 -32
- ommlds-0.0.0.dev426.dist-info/RECORD +0 -303
- /ommlds/{cli/tools → backends/google}/__init__.py +0 -0
- /ommlds/{huggingface.py → backends/huggingface.py} +0 -0
- /ommlds/{minichain/lib/fs/ls → cli/content}/__init__.py +0 -0
- /ommlds/minichain/lib/fs/{ls → tools/recursivels}/rendering.py +0 -0
- /ommlds/minichain/lib/fs/{ls → tools/recursivels}/running.py +0 -0
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/WHEEL +0 -0
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/entry_points.txt +0 -0
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/licenses/LICENSE +0 -0
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/top_level.txt +0 -0
ommlds/minichain/_marshal.py
CHANGED
|
@@ -1,27 +1,67 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
|
|
1
3
|
from omlish import dataclasses as dc
|
|
4
|
+
from omlish import lang
|
|
2
5
|
from omlish import marshal as msh
|
|
3
6
|
from omlish import reflect as rfl
|
|
4
|
-
from omlish.funcs import match as mfs
|
|
5
7
|
from omlish.typedvalues.marshal import build_typed_values_marshaler
|
|
6
8
|
from omlish.typedvalues.marshal import build_typed_values_unmarshaler
|
|
7
9
|
|
|
10
|
+
from .json import JsonValue
|
|
11
|
+
|
|
8
12
|
|
|
9
13
|
##
|
|
10
14
|
|
|
11
15
|
|
|
12
16
|
@dc.dataclass()
|
|
13
|
-
class _TypedValuesFieldMarshalerFactory(msh.
|
|
17
|
+
class _TypedValuesFieldMarshalerFactory(msh.MarshalerFactory):
|
|
14
18
|
tvs_rty: rfl.Type
|
|
15
19
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
return build_typed_values_marshaler(ctx, self.tvs_rty)
|
|
20
|
+
def make_marshaler(self, ctx: msh.MarshalFactoryContext, rty: rfl.Type) -> ta.Callable[[], msh.Marshaler] | None:
|
|
21
|
+
return lambda: build_typed_values_marshaler(ctx, self.tvs_rty)
|
|
19
22
|
|
|
20
23
|
|
|
21
24
|
@dc.dataclass()
|
|
22
|
-
class _TypedValuesFieldUnmarshalerFactory(msh.
|
|
25
|
+
class _TypedValuesFieldUnmarshalerFactory(msh.UnmarshalerFactory):
|
|
23
26
|
tvs_rty: rfl.Type
|
|
24
27
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
+
def make_unmarshaler(self, ctx: msh.UnmarshalFactoryContext, rty: rfl.Type) -> ta.Callable[[], msh.Unmarshaler] | None: # noqa
|
|
29
|
+
return lambda: build_typed_values_unmarshaler(ctx, self.tvs_rty)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
##
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class MarshalJsonValue(lang.NotInstantiable, lang.Final):
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class _JsonValueMarshalerFactory(msh.MarshalerFactory):
|
|
40
|
+
def make_marshaler(self, ctx: msh.MarshalFactoryContext, rty: rfl.Type) -> ta.Callable[[], msh.Marshaler] | None:
|
|
41
|
+
if rty is not MarshalJsonValue:
|
|
42
|
+
return None
|
|
43
|
+
return lambda: msh.NopMarshalerUnmarshaler()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class _JsonValueUnmarshalerFactory(msh.UnmarshalerFactory):
|
|
47
|
+
def make_unmarshaler(self, ctx: msh.UnmarshalFactoryContext, rty: rfl.Type) -> ta.Callable[[], msh.Unmarshaler] | None: # noqa
|
|
48
|
+
if rty is not MarshalJsonValue:
|
|
49
|
+
return None
|
|
50
|
+
return lambda: msh.NopMarshalerUnmarshaler()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
##
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@lang.static_init
|
|
57
|
+
def _install_standard_marshaling() -> None:
|
|
58
|
+
msh.register_global_config(
|
|
59
|
+
JsonValue,
|
|
60
|
+
msh.ReflectOverride(MarshalJsonValue),
|
|
61
|
+
identity=True,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
msh.install_standard_factories(
|
|
65
|
+
_JsonValueMarshalerFactory(),
|
|
66
|
+
_JsonValueUnmarshalerFactory(),
|
|
67
|
+
)
|
ommlds/minichain/_typedvalues.py
CHANGED
|
@@ -74,12 +74,10 @@ def _tv_field_metadata(
|
|
|
74
74
|
omit_if=operator.not_,
|
|
75
75
|
),
|
|
76
76
|
marshaler_factory=msh.FuncMarshalerFactory(
|
|
77
|
-
lambda ctx, rty:
|
|
78
|
-
lambda ctx, rty: _marshal._TypedValuesFieldMarshalerFactory(tvs_rty)(ctx, rty), # noqa
|
|
77
|
+
lambda ctx, rty: _marshal._TypedValuesFieldMarshalerFactory(tvs_rty).make_marshaler(ctx, rty), # noqa
|
|
79
78
|
),
|
|
80
79
|
unmarshaler_factory=msh.FuncUnmarshalerFactory(
|
|
81
|
-
lambda ctx, rty:
|
|
82
|
-
lambda ctx, rty: _marshal._TypedValuesFieldUnmarshalerFactory(tvs_rty)(ctx, rty), # noqa
|
|
80
|
+
lambda ctx, rty: _marshal._TypedValuesFieldUnmarshalerFactory(tvs_rty).make_unmarshaler(ctx, rty), # noqa
|
|
83
81
|
),
|
|
84
82
|
),
|
|
85
83
|
}
|
|
@@ -3,15 +3,34 @@ import typing as ta
|
|
|
3
3
|
|
|
4
4
|
from omlish import lang
|
|
5
5
|
|
|
6
|
+
from ...configs import Config
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
T = ta.TypeVar('T')
|
|
10
|
+
|
|
6
11
|
|
|
7
12
|
##
|
|
8
13
|
|
|
9
14
|
|
|
10
15
|
class BackendCatalog(lang.Abstract):
|
|
16
|
+
class Backend(ta.NamedTuple):
|
|
17
|
+
factory: ta.Callable[..., ta.Any]
|
|
18
|
+
configs: ta.Sequence[Config] | None
|
|
19
|
+
|
|
11
20
|
@abc.abstractmethod
|
|
12
|
-
def get_backend(self, service_cls:
|
|
21
|
+
def get_backend(self, service_cls: type[T], name: str) -> Backend:
|
|
13
22
|
raise NotImplementedError
|
|
14
23
|
|
|
24
|
+
def new_backend(
|
|
25
|
+
self,
|
|
26
|
+
service_cls: ta.Any,
|
|
27
|
+
name: str,
|
|
28
|
+
*args: ta.Any,
|
|
29
|
+
**kwargs: ta.Any,
|
|
30
|
+
) -> ta.Any:
|
|
31
|
+
be = self.get_backend(service_cls, name)
|
|
32
|
+
return be.factory(*be.configs or [], *args, **kwargs)
|
|
33
|
+
|
|
15
34
|
# #
|
|
16
35
|
#
|
|
17
36
|
# class Bound(lang.Final, ta.Generic[T]):
|
|
@@ -40,9 +40,9 @@ class SimpleBackendCatalog(BackendCatalog):
|
|
|
40
40
|
sc_dct[e.name] = e
|
|
41
41
|
self._dct = dct
|
|
42
42
|
|
|
43
|
-
def get_backend(self, service_cls: ta.Any, name: str, *args: ta.Any, **kwargs: ta.Any) ->
|
|
43
|
+
def get_backend(self, service_cls: ta.Any, name: str, *args: ta.Any, **kwargs: ta.Any) -> BackendCatalog.Backend:
|
|
44
44
|
e = self._dct[service_cls][name]
|
|
45
|
-
return e.factory_fn
|
|
45
|
+
return BackendCatalog.Backend(e.factory_fn, None)
|
|
46
46
|
|
|
47
47
|
|
|
48
48
|
##
|
|
@@ -5,7 +5,7 @@ from omlish import check
|
|
|
5
5
|
from ...models.configs import ModelPath
|
|
6
6
|
from ...models.configs import ModelRepo
|
|
7
7
|
from ...models.repos.resolving import ModelRepoResolver
|
|
8
|
-
from ...registries.globals import
|
|
8
|
+
from ...registries.globals import get_registry_cls
|
|
9
9
|
from ..strings.parsing import parse_backend_string
|
|
10
10
|
from ..strings.resolving import BackendStringResolver
|
|
11
11
|
from ..strings.resolving import ResolveBackendStringArgs
|
|
@@ -30,26 +30,28 @@ class BackendStringBackendCatalog(BackendCatalog):
|
|
|
30
30
|
self._string_resolver = string_resolver
|
|
31
31
|
self._model_repo_resolver = model_repo_resolver
|
|
32
32
|
|
|
33
|
-
def get_backend(self, service_cls: ta.Any, name: str, *args: ta.Any, **kwargs: ta.Any) ->
|
|
33
|
+
def get_backend(self, service_cls: ta.Any, name: str, *args: ta.Any, **kwargs: ta.Any) -> BackendCatalog.Backend:
|
|
34
34
|
ps = parse_backend_string(name)
|
|
35
35
|
rs = check.not_none(self._string_resolver.resolve_backend_string(ResolveBackendStringArgs(
|
|
36
36
|
service_cls,
|
|
37
37
|
ps,
|
|
38
38
|
)))
|
|
39
39
|
|
|
40
|
-
al = list(rs.args or [])
|
|
40
|
+
al: list = list(rs.args or [])
|
|
41
41
|
|
|
42
|
-
# FIXME: lol
|
|
42
|
+
# FIXME: lol - move *into* local model classes as an injected dep?
|
|
43
43
|
if al and isinstance(al[0], ModelRepo):
|
|
44
44
|
[mr] = al
|
|
45
45
|
mrr = check.not_none(self._model_repo_resolver)
|
|
46
46
|
mrp = check.not_none(mrr.resolve(mr))
|
|
47
47
|
al = [ModelPath(mrp.path), *al[1:]]
|
|
48
48
|
|
|
49
|
-
|
|
49
|
+
cls = get_registry_cls(
|
|
50
50
|
service_cls,
|
|
51
51
|
rs.name,
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
return BackendCatalog.Backend(
|
|
55
|
+
cls,
|
|
56
|
+
al,
|
|
55
57
|
)
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
"""
|
|
2
|
+
https://docs.claude.com/en/api/messages
|
|
2
3
|
https://github.com/anthropics/anthropic-sdk-python/tree/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/types
|
|
3
4
|
https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/resources/completions.py#L53
|
|
4
5
|
https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d155da31b898a4c6ee5/src/anthropic/resources/messages.py#L70
|
|
@@ -6,22 +7,33 @@ https://github.com/anthropics/anthropic-sdk-python/blob/cd80d46f7a223a5493565d15
|
|
|
6
7
|
import typing as ta
|
|
7
8
|
|
|
8
9
|
from omlish import check
|
|
9
|
-
from omlish import
|
|
10
|
+
from omlish import marshal as msh
|
|
10
11
|
from omlish import typedvalues as tv
|
|
11
12
|
from omlish.formats import json
|
|
12
13
|
from omlish.http import all as http
|
|
13
14
|
|
|
15
|
+
from .....backends.anthropic.protocol import types as pt
|
|
14
16
|
from ....chat.choices.services import ChatChoicesRequest
|
|
15
17
|
from ....chat.choices.services import ChatChoicesResponse
|
|
16
18
|
from ....chat.choices.services import static_check_is_chat_choices_service
|
|
17
19
|
from ....chat.choices.types import AiChoice
|
|
20
|
+
from ....chat.choices.types import ChatChoicesOptions
|
|
18
21
|
from ....chat.messages import AiMessage
|
|
22
|
+
from ....chat.messages import AnyAiMessage
|
|
19
23
|
from ....chat.messages import Message
|
|
20
24
|
from ....chat.messages import SystemMessage
|
|
25
|
+
from ....chat.messages import ToolUseMessage
|
|
21
26
|
from ....chat.messages import UserMessage
|
|
27
|
+
from ....chat.tools.types import Tool
|
|
28
|
+
from ....llms.types import MaxTokens
|
|
29
|
+
from ....llms.types import Temperature
|
|
22
30
|
from ....models.configs import ModelName
|
|
23
31
|
from ....standard import ApiKey
|
|
32
|
+
from ....tools.types import ToolUse
|
|
33
|
+
from ....types import Option
|
|
24
34
|
from .names import MODEL_NAMES
|
|
35
|
+
from .protocol import build_protocol_chat_messages
|
|
36
|
+
from .protocol import build_protocol_tool
|
|
25
37
|
|
|
26
38
|
|
|
27
39
|
##
|
|
@@ -35,18 +47,15 @@ from .names import MODEL_NAMES
|
|
|
35
47
|
class AnthropicChatChoicesService:
|
|
36
48
|
DEFAULT_MODEL_NAME: ta.ClassVar[ModelName] = ModelName(check.not_none(MODEL_NAMES.default))
|
|
37
49
|
|
|
38
|
-
ROLES_MAP: ta.ClassVar[ta.Mapping[type[Message], str]] = {
|
|
39
|
-
SystemMessage: 'system',
|
|
40
|
-
UserMessage: 'user',
|
|
41
|
-
AiMessage: 'assistant',
|
|
42
|
-
}
|
|
43
|
-
|
|
44
50
|
def __init__(
|
|
45
51
|
self,
|
|
46
52
|
*configs: ApiKey | ModelName,
|
|
53
|
+
http_client: http.AsyncHttpClient | None = None,
|
|
47
54
|
) -> None:
|
|
48
55
|
super().__init__()
|
|
49
56
|
|
|
57
|
+
self._http_client = http_client
|
|
58
|
+
|
|
50
59
|
with tv.consume(*configs) as cc:
|
|
51
60
|
self._api_key = check.not_none(ApiKey.pop_secret(cc, env='ANTHROPIC_API_KEY'))
|
|
52
61
|
self._model_name = cc.pop(self.DEFAULT_MODEL_NAME)
|
|
@@ -62,33 +71,46 @@ class AnthropicChatChoicesService:
|
|
|
62
71
|
else:
|
|
63
72
|
raise TypeError(m)
|
|
64
73
|
|
|
65
|
-
|
|
74
|
+
DEFAULT_OPTIONS: ta.ClassVar[tv.TypedValues[Option]] = tv.TypedValues[Option](
|
|
75
|
+
MaxTokens(4096),
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
_OPTION_KWARG_NAMES_MAP: ta.ClassVar[ta.Mapping[str, type[ChatChoicesOptions]]] = dict(
|
|
79
|
+
temperature=Temperature,
|
|
80
|
+
max_tokens=MaxTokens,
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
async def invoke(
|
|
66
84
|
self,
|
|
67
85
|
request: ChatChoicesRequest,
|
|
68
|
-
*,
|
|
69
|
-
max_tokens: int = 4096, # FIXME: ChatOption
|
|
70
86
|
) -> ChatChoicesResponse:
|
|
71
|
-
messages =
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
if isinstance(m, SystemMessage):
|
|
75
|
-
if i != 0 or system is not None:
|
|
76
|
-
raise Exception('Only supports one system message and must be first')
|
|
77
|
-
system = self._get_msg_content(m)
|
|
78
|
-
else:
|
|
79
|
-
messages.append(dict(
|
|
80
|
-
role=self.ROLES_MAP[type(m)], # noqa
|
|
81
|
-
content=check.isinstance(self._get_msg_content(m), str),
|
|
82
|
-
))
|
|
87
|
+
messages, system = build_protocol_chat_messages(request.v)
|
|
88
|
+
|
|
89
|
+
kwargs: dict = dict()
|
|
83
90
|
|
|
84
|
-
|
|
91
|
+
tools: list[pt.ToolSpec] = []
|
|
92
|
+
with tv.TypedValues(
|
|
93
|
+
*self.DEFAULT_OPTIONS,
|
|
94
|
+
*request.options,
|
|
95
|
+
override=True,
|
|
96
|
+
).consume() as oc:
|
|
97
|
+
kwargs.update(oc.pop_scalar_kwargs(**self._OPTION_KWARG_NAMES_MAP))
|
|
98
|
+
|
|
99
|
+
t: Tool
|
|
100
|
+
for t in oc.pop(Tool, []):
|
|
101
|
+
tools.append(build_protocol_tool(t))
|
|
102
|
+
|
|
103
|
+
a_req = pt.MessagesRequest(
|
|
85
104
|
model=MODEL_NAMES.resolve(self._model_name.v),
|
|
86
|
-
|
|
105
|
+
system=system,
|
|
87
106
|
messages=messages,
|
|
88
|
-
|
|
107
|
+
tools=tools or None,
|
|
108
|
+
**kwargs,
|
|
89
109
|
)
|
|
90
110
|
|
|
91
|
-
|
|
111
|
+
raw_request = msh.marshal(a_req)
|
|
112
|
+
|
|
113
|
+
raw_response = await http.async_request(
|
|
92
114
|
'https://api.anthropic.com/v1/messages',
|
|
93
115
|
headers={
|
|
94
116
|
http.consts.HEADER_CONTENT_TYPE: http.consts.CONTENT_TYPE_JSON,
|
|
@@ -96,10 +118,26 @@ class AnthropicChatChoicesService:
|
|
|
96
118
|
b'anthropic-version': b'2023-06-01',
|
|
97
119
|
},
|
|
98
120
|
data=json.dumps(raw_request).encode('utf-8'),
|
|
121
|
+
client=self._http_client,
|
|
99
122
|
)
|
|
100
123
|
|
|
101
124
|
response = json.loads(check.not_none(raw_response.data).decode('utf-8'))
|
|
102
125
|
|
|
126
|
+
out: list[AnyAiMessage] = []
|
|
127
|
+
for c in response['content']:
|
|
128
|
+
if c['type'] == 'text':
|
|
129
|
+
out.append(AiMessage(
|
|
130
|
+
check.not_none(c['text']),
|
|
131
|
+
))
|
|
132
|
+
elif c['type'] == 'tool_use':
|
|
133
|
+
out.append(ToolUseMessage(ToolUse(
|
|
134
|
+
id=c['id'],
|
|
135
|
+
name=c['name'],
|
|
136
|
+
args=c['input'],
|
|
137
|
+
)))
|
|
138
|
+
else:
|
|
139
|
+
raise TypeError(c['type'])
|
|
140
|
+
|
|
103
141
|
return ChatChoicesResponse([
|
|
104
|
-
AiChoice(
|
|
142
|
+
AiChoice(out),
|
|
105
143
|
])
|
|
@@ -18,14 +18,13 @@ MODEL_NAMES = ModelNameCollection(
|
|
|
18
18
|
'claude-opus-4-1': 'claude-opus-4-1-20250805',
|
|
19
19
|
'claude-opus': 'claude-opus-4-1',
|
|
20
20
|
|
|
21
|
-
'claude-sonnet-4-
|
|
22
|
-
'claude-sonnet-4': 'claude-sonnet-4-
|
|
23
|
-
'claude-sonnet': 'claude-sonnet-4',
|
|
21
|
+
'claude-sonnet-4-5-20250929': None,
|
|
22
|
+
'claude-sonnet-4-5': 'claude-sonnet-4-5-20250929',
|
|
23
|
+
'claude-sonnet': 'claude-sonnet-4-5',
|
|
24
24
|
|
|
25
|
-
'claude-
|
|
26
|
-
'claude-haiku-
|
|
27
|
-
'claude-haiku
|
|
28
|
-
'claude-haiku': 'claude-haiku-3-5',
|
|
25
|
+
'claude-haiku-4-5-20251001': None,
|
|
26
|
+
'claude-haiku-4-5': 'claude-haiku-4-5-20251001',
|
|
27
|
+
'claude-haiku': 'claude-haiku-4-5',
|
|
29
28
|
|
|
30
29
|
'claude': 'claude-haiku',
|
|
31
30
|
},
|
|
@@ -34,7 +33,10 @@ MODEL_NAMES = ModelNameCollection(
|
|
|
34
33
|
|
|
35
34
|
# @omlish-manifest
|
|
36
35
|
_BACKEND_STRINGS_MANIFEST = BackendStringsManifest(
|
|
37
|
-
[
|
|
36
|
+
[
|
|
37
|
+
'ChatChoicesService',
|
|
38
|
+
'ChatChoicesStreamService',
|
|
39
|
+
],
|
|
38
40
|
'anthropic',
|
|
39
41
|
model_names=MODEL_NAMES,
|
|
40
42
|
)
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
|
|
3
|
+
from omlish import check
|
|
4
|
+
from omlish import marshal as msh
|
|
5
|
+
from omlish.formats import json
|
|
6
|
+
|
|
7
|
+
from .....backends.anthropic.protocol import types as pt
|
|
8
|
+
from ....chat.messages import AiMessage
|
|
9
|
+
from ....chat.messages import Message
|
|
10
|
+
from ....chat.messages import SystemMessage
|
|
11
|
+
from ....chat.messages import ToolUseMessage
|
|
12
|
+
from ....chat.messages import ToolUseResultMessage
|
|
13
|
+
from ....chat.messages import UserMessage
|
|
14
|
+
from ....chat.tools.types import Tool
|
|
15
|
+
from ....content.prepare import prepare_content_str
|
|
16
|
+
from ....tools.jsonschema import build_tool_spec_params_json_schema
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
##
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_message_content(m: Message) -> str | None:
|
|
23
|
+
if isinstance(m, AiMessage):
|
|
24
|
+
return check.isinstance(m.c, str)
|
|
25
|
+
|
|
26
|
+
elif isinstance(m, (UserMessage, SystemMessage)):
|
|
27
|
+
return check.isinstance(m.c, str)
|
|
28
|
+
|
|
29
|
+
else:
|
|
30
|
+
raise TypeError(m)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
#
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class BuiltChatMessages(ta.NamedTuple):
|
|
37
|
+
messages: list[pt.Message]
|
|
38
|
+
system: list[pt.Content] | None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
ROLES_MAP: ta.Mapping[type[Message], str] = {
|
|
42
|
+
SystemMessage: 'system',
|
|
43
|
+
UserMessage: 'user',
|
|
44
|
+
AiMessage: 'assistant',
|
|
45
|
+
ToolUseMessage: 'assistant',
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def build_protocol_chat_messages(msgs: ta.Iterable[Message]) -> BuiltChatMessages:
|
|
50
|
+
messages: list[pt.Message] = []
|
|
51
|
+
system: list[pt.Content] | None = None
|
|
52
|
+
|
|
53
|
+
for i, m in enumerate(msgs):
|
|
54
|
+
if isinstance(m, SystemMessage):
|
|
55
|
+
if i or system is not None:
|
|
56
|
+
raise Exception('Only supports one system message and must be first')
|
|
57
|
+
system = [pt.Text(check.not_none(get_message_content(m)))]
|
|
58
|
+
|
|
59
|
+
elif isinstance(m, ToolUseResultMessage):
|
|
60
|
+
messages.append(pt.Message(
|
|
61
|
+
role='user',
|
|
62
|
+
content=[pt.ToolResult(
|
|
63
|
+
tool_use_id=check.not_none(m.tur.id),
|
|
64
|
+
content=json.dumps_compact(msh.marshal(m.tur.c)) if not isinstance(m.tur.c, str) else m.tur.c,
|
|
65
|
+
)],
|
|
66
|
+
))
|
|
67
|
+
|
|
68
|
+
elif isinstance(m, AiMessage):
|
|
69
|
+
# messages.append(pt.Message(
|
|
70
|
+
# role=ROLES_MAP[type(m)], # noqa
|
|
71
|
+
# content=[pt.Text(check.isinstance(get_message_content(m), str))],
|
|
72
|
+
# ))
|
|
73
|
+
messages.append(pt.Message(
|
|
74
|
+
role='assistant',
|
|
75
|
+
content=[
|
|
76
|
+
*([pt.Text(check.isinstance(m.c, str))] if m.c is not None else []),
|
|
77
|
+
],
|
|
78
|
+
))
|
|
79
|
+
|
|
80
|
+
elif isinstance(m, ToolUseMessage):
|
|
81
|
+
messages.append(pt.Message(
|
|
82
|
+
role='assistant',
|
|
83
|
+
content=[
|
|
84
|
+
pt.ToolUse(
|
|
85
|
+
id=check.not_none(m.tu.id),
|
|
86
|
+
name=check.not_none(m.tu.name),
|
|
87
|
+
input=m.tu.args,
|
|
88
|
+
),
|
|
89
|
+
],
|
|
90
|
+
))
|
|
91
|
+
|
|
92
|
+
else:
|
|
93
|
+
messages.append(pt.Message(
|
|
94
|
+
role=ROLES_MAP[type(m)], # type: ignore[arg-type]
|
|
95
|
+
content=[pt.Text(check.isinstance(get_message_content(m), str))],
|
|
96
|
+
))
|
|
97
|
+
|
|
98
|
+
return BuiltChatMessages(messages, system)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
##
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def build_protocol_tool(t: Tool) -> pt.ToolSpec:
|
|
105
|
+
return pt.ToolSpec(
|
|
106
|
+
name=check.not_none(t.spec.name),
|
|
107
|
+
description=prepare_content_str(t.spec.desc),
|
|
108
|
+
input_schema=build_tool_spec_params_json_schema(t.spec),
|
|
109
|
+
)
|