ommlds 0.0.0.dev451__py3-none-any.whl → 0.0.0.dev452__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ommlds might be problematic. Click here for more details.
- ommlds/.omlish-manifests.json +11 -11
- ommlds/backends/anthropic/protocol/_marshal.py +1 -1
- ommlds/backends/openai/protocol/_common.py +18 -0
- ommlds/backends/openai/protocol/_marshal.py +2 -1
- ommlds/backends/openai/protocol/chatcompletion/chunk.py +4 -0
- ommlds/backends/openai/protocol/chatcompletion/contentpart.py +15 -7
- ommlds/backends/openai/protocol/chatcompletion/message.py +10 -0
- ommlds/backends/openai/protocol/chatcompletion/request.py +25 -7
- ommlds/backends/openai/protocol/chatcompletion/response.py +10 -0
- ommlds/backends/openai/protocol/chatcompletion/responseformat.py +6 -0
- ommlds/backends/openai/protocol/chatcompletion/tokenlogprob.py +4 -0
- ommlds/backends/openai/protocol/completionusage.py +5 -0
- ommlds/cli/sessions/chat/code.py +22 -17
- ommlds/cli/sessions/chat/inject.py +4 -4
- ommlds/cli/sessions/chat/interactive.py +2 -1
- ommlds/cli/sessions/chat/printing.py +2 -2
- ommlds/cli/sessions/chat/prompt.py +28 -27
- ommlds/cli/sessions/chat/tools.py +12 -12
- ommlds/minichain/__init__.py +20 -8
- ommlds/minichain/backends/impls/anthropic/chat.py +27 -23
- ommlds/minichain/backends/impls/anthropic/names.py +3 -3
- ommlds/minichain/backends/impls/anthropic/stream.py +7 -7
- ommlds/minichain/backends/impls/google/chat.py +30 -32
- ommlds/minichain/backends/impls/google/stream.py +8 -4
- ommlds/minichain/backends/impls/llamacpp/chat.py +23 -17
- ommlds/minichain/backends/impls/llamacpp/format.py +4 -2
- ommlds/minichain/backends/impls/llamacpp/stream.py +6 -6
- ommlds/minichain/backends/impls/mistral.py +1 -1
- ommlds/minichain/backends/impls/mlx/chat.py +1 -1
- ommlds/minichain/backends/impls/openai/chat.py +6 -3
- ommlds/minichain/backends/impls/openai/format.py +80 -61
- ommlds/minichain/backends/impls/openai/format2.py +210 -0
- ommlds/minichain/backends/impls/openai/stream.py +9 -6
- ommlds/minichain/backends/impls/tinygrad/chat.py +10 -5
- ommlds/minichain/backends/impls/transformers/transformers.py +20 -16
- ommlds/minichain/chat/_marshal.py +15 -8
- ommlds/minichain/chat/choices/adapters.py +3 -3
- ommlds/minichain/chat/choices/types.py +2 -2
- ommlds/minichain/chat/history.py +1 -1
- ommlds/minichain/chat/messages.py +55 -19
- ommlds/minichain/chat/services.py +2 -2
- ommlds/minichain/chat/stream/_marshal.py +16 -0
- ommlds/minichain/chat/stream/adapters.py +39 -28
- ommlds/minichain/chat/stream/services.py +2 -2
- ommlds/minichain/chat/stream/types.py +20 -13
- ommlds/minichain/chat/tools/execution.py +8 -7
- ommlds/minichain/chat/tools/ids.py +9 -15
- ommlds/minichain/chat/tools/parsing.py +17 -26
- ommlds/minichain/chat/transforms/base.py +29 -38
- ommlds/minichain/chat/transforms/metadata.py +30 -4
- ommlds/minichain/chat/transforms/services.py +5 -7
- ommlds/minichain/tools/jsonschema.py +5 -6
- ommlds/minichain/tools/types.py +24 -1
- ommlds/server/server.py +1 -1
- ommlds/tools/git.py +18 -2
- {ommlds-0.0.0.dev451.dist-info → ommlds-0.0.0.dev452.dist-info}/METADATA +3 -3
- {ommlds-0.0.0.dev451.dist-info → ommlds-0.0.0.dev452.dist-info}/RECORD +61 -58
- {ommlds-0.0.0.dev451.dist-info → ommlds-0.0.0.dev452.dist-info}/WHEEL +0 -0
- {ommlds-0.0.0.dev451.dist-info → ommlds-0.0.0.dev452.dist-info}/entry_points.txt +0 -0
- {ommlds-0.0.0.dev451.dist-info → ommlds-0.0.0.dev452.dist-info}/licenses/LICENSE +0 -0
- {ommlds-0.0.0.dev451.dist-info → ommlds-0.0.0.dev452.dist-info}/top_level.txt +0 -0
ommlds/.omlish-manifests.json
CHANGED
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
"module": ".minichain.backends.impls.anthropic.chat",
|
|
19
19
|
"attr": null,
|
|
20
20
|
"file": "ommlds/minichain/backends/impls/anthropic/chat.py",
|
|
21
|
-
"line":
|
|
21
|
+
"line": 39,
|
|
22
22
|
"value": {
|
|
23
23
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
24
24
|
"module": "ommlds.minichain.backends.impls.anthropic.chat",
|
|
@@ -47,9 +47,9 @@
|
|
|
47
47
|
"claude-opus-4-1-20250805": null,
|
|
48
48
|
"claude-opus-4-1": "claude-opus-4-1-20250805",
|
|
49
49
|
"claude-opus": "claude-opus-4-1",
|
|
50
|
-
"claude-sonnet-4-
|
|
51
|
-
"claude-sonnet-4": "claude-sonnet-4-
|
|
52
|
-
"claude-sonnet": "claude-sonnet-4",
|
|
50
|
+
"claude-sonnet-4-5-20250929": null,
|
|
51
|
+
"claude-sonnet-4-5": "claude-sonnet-4-5-20250929",
|
|
52
|
+
"claude-sonnet": "claude-sonnet-4-5",
|
|
53
53
|
"claude-3-5-haiku-latest": null,
|
|
54
54
|
"claude-haiku-3-5-latest": "claude-3-5-haiku-latest",
|
|
55
55
|
"claude-haiku-3-5": "claude-haiku-3-5-latest",
|
|
@@ -96,7 +96,7 @@
|
|
|
96
96
|
"module": ".minichain.backends.impls.google.chat",
|
|
97
97
|
"attr": null,
|
|
98
98
|
"file": "ommlds/minichain/backends/impls/google/chat.py",
|
|
99
|
-
"line":
|
|
99
|
+
"line": 35,
|
|
100
100
|
"value": {
|
|
101
101
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
102
102
|
"module": "ommlds.minichain.backends.impls.google.chat",
|
|
@@ -170,7 +170,7 @@
|
|
|
170
170
|
"module": ".minichain.backends.impls.llamacpp.chat",
|
|
171
171
|
"attr": null,
|
|
172
172
|
"file": "ommlds/minichain/backends/impls/llamacpp/chat.py",
|
|
173
|
-
"line":
|
|
173
|
+
"line": 33,
|
|
174
174
|
"value": {
|
|
175
175
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
176
176
|
"module": "ommlds.minichain.backends.impls.llamacpp.chat",
|
|
@@ -260,7 +260,7 @@
|
|
|
260
260
|
"module": ".minichain.backends.impls.openai.chat",
|
|
261
261
|
"attr": null,
|
|
262
262
|
"file": "ommlds/minichain/backends/impls/openai/chat.py",
|
|
263
|
-
"line":
|
|
263
|
+
"line": 37,
|
|
264
264
|
"value": {
|
|
265
265
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
266
266
|
"module": "ommlds.minichain.backends.impls.openai.chat",
|
|
@@ -402,7 +402,7 @@
|
|
|
402
402
|
"module": ".minichain.backends.impls.tinygrad.chat",
|
|
403
403
|
"attr": null,
|
|
404
404
|
"file": "ommlds/minichain/backends/impls/tinygrad/chat.py",
|
|
405
|
-
"line":
|
|
405
|
+
"line": 166,
|
|
406
406
|
"value": {
|
|
407
407
|
"!.minichain.backends.strings.manifests.BackendStringsManifest": {
|
|
408
408
|
"service_cls_names": [
|
|
@@ -435,7 +435,7 @@
|
|
|
435
435
|
"module": ".minichain.backends.impls.transformers.transformers",
|
|
436
436
|
"attr": null,
|
|
437
437
|
"file": "ommlds/minichain/backends/impls/transformers/transformers.py",
|
|
438
|
-
"line":
|
|
438
|
+
"line": 43,
|
|
439
439
|
"value": {
|
|
440
440
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
441
441
|
"module": "ommlds.minichain.backends.impls.transformers.transformers",
|
|
@@ -452,7 +452,7 @@
|
|
|
452
452
|
"module": ".minichain.backends.impls.transformers.transformers",
|
|
453
453
|
"attr": null,
|
|
454
454
|
"file": "ommlds/minichain/backends/impls/transformers/transformers.py",
|
|
455
|
-
"line":
|
|
455
|
+
"line": 131,
|
|
456
456
|
"value": {
|
|
457
457
|
"!.minichain.registries.manifests.RegistryManifest": {
|
|
458
458
|
"module": "ommlds.minichain.backends.impls.transformers.transformers",
|
|
@@ -592,7 +592,7 @@
|
|
|
592
592
|
"module": ".tools.git",
|
|
593
593
|
"attr": null,
|
|
594
594
|
"file": "ommlds/tools/git.py",
|
|
595
|
-
"line":
|
|
595
|
+
"line": 188,
|
|
596
596
|
"value": {
|
|
597
597
|
"!omdev.tools.git.messages.GitMessageGeneratorManifest": {
|
|
598
598
|
"module": "ommlds.tools.git",
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from omlish import lang
|
|
2
|
+
from omlish import marshal as msh
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
##
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _set_class_marshal_options(cls):
|
|
9
|
+
msh.update_object_metadata(
|
|
10
|
+
cls,
|
|
11
|
+
field_defaults=msh.FieldMetadata(
|
|
12
|
+
options=msh.FieldOptions(
|
|
13
|
+
omit_if=lang.is_none,
|
|
14
|
+
),
|
|
15
|
+
),
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
return cls
|
|
@@ -3,6 +3,7 @@ import typing as ta
|
|
|
3
3
|
from omlish import dataclasses as dc
|
|
4
4
|
from omlish import lang
|
|
5
5
|
|
|
6
|
+
from .._common import _set_class_marshal_options
|
|
6
7
|
from ..completionusage import CompletionUsage
|
|
7
8
|
from .tokenlogprob import ChatCompletionTokenLogprob
|
|
8
9
|
|
|
@@ -11,6 +12,7 @@ from .tokenlogprob import ChatCompletionTokenLogprob
|
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
15
|
+
@_set_class_marshal_options
|
|
14
16
|
class ChatCompletionChunkChoiceDelta(lang.Final):
|
|
15
17
|
content: str | None = None
|
|
16
18
|
|
|
@@ -46,6 +48,7 @@ class ChatCompletionChunkChoiceDelta(lang.Final):
|
|
|
46
48
|
|
|
47
49
|
|
|
48
50
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
51
|
+
@_set_class_marshal_options
|
|
49
52
|
class ChatCompletionChunkChoice(lang.Final):
|
|
50
53
|
delta: ChatCompletionChunkChoiceDelta
|
|
51
54
|
|
|
@@ -70,6 +73,7 @@ class ChatCompletionChunkChoice(lang.Final):
|
|
|
70
73
|
|
|
71
74
|
|
|
72
75
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
76
|
+
@_set_class_marshal_options
|
|
73
77
|
class ChatCompletionChunk(lang.Final):
|
|
74
78
|
id: str
|
|
75
79
|
|
|
@@ -3,6 +3,8 @@ import typing as ta
|
|
|
3
3
|
from omlish import dataclasses as dc
|
|
4
4
|
from omlish import lang
|
|
5
5
|
|
|
6
|
+
from .._common import _set_class_marshal_options
|
|
7
|
+
|
|
6
8
|
|
|
7
9
|
##
|
|
8
10
|
|
|
@@ -14,7 +16,8 @@ class ChatCompletionContentPart(lang.Abstract):
|
|
|
14
16
|
#
|
|
15
17
|
|
|
16
18
|
|
|
17
|
-
@dc.dataclass(frozen=True
|
|
19
|
+
@dc.dataclass(frozen=True)
|
|
20
|
+
@_set_class_marshal_options
|
|
18
21
|
class TextChatCompletionContentPart(ChatCompletionContentPart, lang.Final):
|
|
19
22
|
text: str
|
|
20
23
|
|
|
@@ -22,9 +25,10 @@ class TextChatCompletionContentPart(ChatCompletionContentPart, lang.Final):
|
|
|
22
25
|
#
|
|
23
26
|
|
|
24
27
|
|
|
25
|
-
@dc.dataclass(frozen=True
|
|
28
|
+
@dc.dataclass(frozen=True)
|
|
29
|
+
@_set_class_marshal_options
|
|
26
30
|
class ImageUrlChatCompletionContentPart(ChatCompletionContentPart, lang.Final):
|
|
27
|
-
@dc.dataclass(frozen=True
|
|
31
|
+
@dc.dataclass(frozen=True)
|
|
28
32
|
class ImageUrl(lang.Final):
|
|
29
33
|
url: str
|
|
30
34
|
detail: ta.Literal[
|
|
@@ -39,7 +43,8 @@ class ImageUrlChatCompletionContentPart(ChatCompletionContentPart, lang.Final):
|
|
|
39
43
|
#
|
|
40
44
|
|
|
41
45
|
|
|
42
|
-
@dc.dataclass(frozen=True
|
|
46
|
+
@dc.dataclass(frozen=True)
|
|
47
|
+
@_set_class_marshal_options
|
|
43
48
|
class FileChatCompletionContentPart(ChatCompletionContentPart, lang.Final):
|
|
44
49
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
45
50
|
class File(lang.Final):
|
|
@@ -53,9 +58,11 @@ class FileChatCompletionContentPart(ChatCompletionContentPart, lang.Final):
|
|
|
53
58
|
#
|
|
54
59
|
|
|
55
60
|
|
|
56
|
-
@dc.dataclass(frozen=True
|
|
61
|
+
@dc.dataclass(frozen=True)
|
|
62
|
+
@_set_class_marshal_options
|
|
57
63
|
class InputAudioChatCompletionContentPart(ChatCompletionContentPart, lang.Final):
|
|
58
|
-
@dc.dataclass(frozen=True
|
|
64
|
+
@dc.dataclass(frozen=True)
|
|
65
|
+
@_set_class_marshal_options
|
|
59
66
|
class InputAudio(lang.Final):
|
|
60
67
|
data: str
|
|
61
68
|
format: ta.Literal[
|
|
@@ -69,6 +76,7 @@ class InputAudioChatCompletionContentPart(ChatCompletionContentPart, lang.Final)
|
|
|
69
76
|
#
|
|
70
77
|
|
|
71
78
|
|
|
72
|
-
@dc.dataclass(frozen=True
|
|
79
|
+
@dc.dataclass(frozen=True)
|
|
80
|
+
@_set_class_marshal_options
|
|
73
81
|
class RefusalChatCompletionContentPart(ChatCompletionContentPart, lang.Final):
|
|
74
82
|
refusal: str
|
|
@@ -3,6 +3,7 @@ import typing as ta
|
|
|
3
3
|
from omlish import dataclasses as dc
|
|
4
4
|
from omlish import lang
|
|
5
5
|
|
|
6
|
+
from .._common import _set_class_marshal_options
|
|
6
7
|
from .contentpart import ChatCompletionContentPart
|
|
7
8
|
from .contentpart import RefusalChatCompletionContentPart
|
|
8
9
|
from .contentpart import TextChatCompletionContentPart
|
|
@@ -19,6 +20,7 @@ class ChatCompletionMessage(lang.Abstract, lang.Sealed):
|
|
|
19
20
|
|
|
20
21
|
|
|
21
22
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
23
|
+
@_set_class_marshal_options
|
|
22
24
|
class DeveloperChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
23
25
|
content: str | ta.Iterable[TextChatCompletionContentPart]
|
|
24
26
|
name: str | None = None
|
|
@@ -28,6 +30,7 @@ class DeveloperChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
|
28
30
|
|
|
29
31
|
|
|
30
32
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
33
|
+
@_set_class_marshal_options
|
|
31
34
|
class SystemChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
32
35
|
content: str | ta.Iterable[TextChatCompletionContentPart]
|
|
33
36
|
name: str | None = None
|
|
@@ -37,6 +40,7 @@ class SystemChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
|
37
40
|
|
|
38
41
|
|
|
39
42
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
43
|
+
@_set_class_marshal_options
|
|
40
44
|
class UserChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
41
45
|
content: str | ta.Iterable[ChatCompletionContentPart]
|
|
42
46
|
name: str | None = None
|
|
@@ -46,8 +50,10 @@ class UserChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
|
46
50
|
|
|
47
51
|
|
|
48
52
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
53
|
+
@_set_class_marshal_options
|
|
49
54
|
class AssistantChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
50
55
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
56
|
+
@_set_class_marshal_options
|
|
51
57
|
class Audio(lang.Final):
|
|
52
58
|
id: str
|
|
53
59
|
|
|
@@ -60,10 +66,12 @@ class AssistantChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
|
60
66
|
refusal: str | None = None
|
|
61
67
|
|
|
62
68
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
69
|
+
@_set_class_marshal_options
|
|
63
70
|
class ToolCall(lang.Final):
|
|
64
71
|
id: str
|
|
65
72
|
|
|
66
73
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
74
|
+
@_set_class_marshal_options
|
|
67
75
|
class Function(lang.Final):
|
|
68
76
|
arguments: str
|
|
69
77
|
name: str
|
|
@@ -79,6 +87,7 @@ class AssistantChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
|
79
87
|
|
|
80
88
|
|
|
81
89
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
90
|
+
@_set_class_marshal_options
|
|
82
91
|
class ToolChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
83
92
|
content: str | ta.Iterable[TextChatCompletionContentPart]
|
|
84
93
|
tool_call_id: str
|
|
@@ -88,6 +97,7 @@ class ToolChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
|
88
97
|
|
|
89
98
|
|
|
90
99
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
100
|
+
@_set_class_marshal_options
|
|
91
101
|
class FunctionChatCompletionMessage(ChatCompletionMessage, lang.Final):
|
|
92
102
|
content: str | None
|
|
93
103
|
name: str
|
|
@@ -4,6 +4,7 @@ import typing as ta
|
|
|
4
4
|
from omlish import dataclasses as dc
|
|
5
5
|
from omlish import lang
|
|
6
6
|
|
|
7
|
+
from .._common import _set_class_marshal_options
|
|
7
8
|
from .contentpart import TextChatCompletionContentPart
|
|
8
9
|
from .message import ChatCompletionMessage
|
|
9
10
|
from .responseformat import ChatCompletionResponseFormat
|
|
@@ -13,6 +14,7 @@ from .responseformat import ChatCompletionResponseFormat
|
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
17
|
+
@_set_class_marshal_options
|
|
16
18
|
class ChatCompletionRequestWebSearchOptions(lang.Final):
|
|
17
19
|
search_context_size: ta.Literal[
|
|
18
20
|
'low',
|
|
@@ -21,8 +23,10 @@ class ChatCompletionRequestWebSearchOptions(lang.Final):
|
|
|
21
23
|
] | None = None
|
|
22
24
|
|
|
23
25
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
26
|
+
@_set_class_marshal_options
|
|
24
27
|
class UserLocation(lang.Final):
|
|
25
28
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
29
|
+
@_set_class_marshal_options
|
|
26
30
|
class Approximate(lang.Final):
|
|
27
31
|
city: str | None = None
|
|
28
32
|
country: str | None = None
|
|
@@ -30,7 +34,8 @@ class ChatCompletionRequestWebSearchOptions(lang.Final):
|
|
|
30
34
|
timezone: str | None = None
|
|
31
35
|
|
|
32
36
|
approximate: Approximate
|
|
33
|
-
|
|
37
|
+
|
|
38
|
+
type: ta.Literal['approximate'] = dc.xfield('approximate', repr=False, kw_only=True)
|
|
34
39
|
|
|
35
40
|
user_location: UserLocation | None = None
|
|
36
41
|
|
|
@@ -39,17 +44,21 @@ class ChatCompletionRequestWebSearchOptions(lang.Final):
|
|
|
39
44
|
|
|
40
45
|
|
|
41
46
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
47
|
+
@_set_class_marshal_options
|
|
42
48
|
class ChatCompletionRequestPrediction(lang.Final):
|
|
43
49
|
content: str | ta.Iterable[TextChatCompletionContentPart]
|
|
44
|
-
|
|
50
|
+
|
|
51
|
+
type: ta.Literal['content'] = dc.xfield('content', repr=False, kw_only=True)
|
|
45
52
|
|
|
46
53
|
|
|
47
54
|
#
|
|
48
55
|
|
|
49
56
|
|
|
50
|
-
@dc.dataclass(frozen=True
|
|
57
|
+
@dc.dataclass(frozen=True)
|
|
58
|
+
@_set_class_marshal_options
|
|
51
59
|
class ChatCompletionRequestTool(lang.Final):
|
|
52
60
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
61
|
+
@_set_class_marshal_options
|
|
53
62
|
class Function(lang.Final):
|
|
54
63
|
name: str
|
|
55
64
|
description: str | None = None
|
|
@@ -57,26 +66,31 @@ class ChatCompletionRequestTool(lang.Final):
|
|
|
57
66
|
strict: bool | None = None
|
|
58
67
|
|
|
59
68
|
function: Function
|
|
60
|
-
|
|
69
|
+
|
|
70
|
+
type: ta.Literal['function'] = dc.xfield('function', repr=False, kw_only=True)
|
|
61
71
|
|
|
62
72
|
|
|
63
73
|
#
|
|
64
74
|
|
|
65
75
|
|
|
66
|
-
@dc.dataclass(frozen=True
|
|
76
|
+
@dc.dataclass(frozen=True)
|
|
77
|
+
@_set_class_marshal_options
|
|
67
78
|
class ChatCompletionRequestNamedToolChoice(lang.Final):
|
|
68
|
-
@dc.dataclass(frozen=True
|
|
79
|
+
@dc.dataclass(frozen=True)
|
|
80
|
+
@_set_class_marshal_options
|
|
69
81
|
class Function(lang.Final):
|
|
70
82
|
name: str
|
|
71
83
|
|
|
72
84
|
function: Function
|
|
73
|
-
|
|
85
|
+
|
|
86
|
+
type: ta.Literal['function'] = dc.xfield('function', repr=False, kw_only=True)
|
|
74
87
|
|
|
75
88
|
|
|
76
89
|
#
|
|
77
90
|
|
|
78
91
|
|
|
79
92
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
93
|
+
@_set_class_marshal_options
|
|
80
94
|
class ChatCompletionRequestAudio(lang.Final):
|
|
81
95
|
format: ta.Literal[
|
|
82
96
|
'wav',
|
|
@@ -94,6 +108,7 @@ class ChatCompletionRequestAudio(lang.Final):
|
|
|
94
108
|
|
|
95
109
|
|
|
96
110
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
111
|
+
@_set_class_marshal_options
|
|
97
112
|
class ChatCompletionRequest(lang.Final):
|
|
98
113
|
messages: ta.Iterable[ChatCompletionMessage]
|
|
99
114
|
|
|
@@ -143,7 +158,10 @@ class ChatCompletionRequest(lang.Final):
|
|
|
143
158
|
|
|
144
159
|
store: bool | None = None
|
|
145
160
|
|
|
161
|
+
stream: bool | None = None
|
|
162
|
+
|
|
146
163
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
164
|
+
@_set_class_marshal_options
|
|
147
165
|
class StreamOptions(lang.Final):
|
|
148
166
|
include_usage: bool | None = None
|
|
149
167
|
|
|
@@ -3,6 +3,7 @@ import typing as ta
|
|
|
3
3
|
from omlish import dataclasses as dc
|
|
4
4
|
from omlish import lang
|
|
5
5
|
|
|
6
|
+
from .._common import _set_class_marshal_options
|
|
6
7
|
from ..completionusage import CompletionUsage
|
|
7
8
|
from .tokenlogprob import ChatCompletionTokenLogprob
|
|
8
9
|
|
|
@@ -11,16 +12,19 @@ from .tokenlogprob import ChatCompletionTokenLogprob
|
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
15
|
+
@_set_class_marshal_options
|
|
14
16
|
class ChatCompletionResponseMessage(lang.Final):
|
|
15
17
|
content: str | None = None
|
|
16
18
|
refusal: str | None = None
|
|
17
19
|
role: ta.Literal['assistant'] = dc.xfield('assistant', repr=False)
|
|
18
20
|
|
|
19
21
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
22
|
+
@_set_class_marshal_options
|
|
20
23
|
class Annotation(lang.Final):
|
|
21
24
|
type: ta.Literal['url_citation'] = dc.xfield('url_citation', repr=False)
|
|
22
25
|
|
|
23
26
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
27
|
+
@_set_class_marshal_options
|
|
24
28
|
class UrlCitation(lang.Final):
|
|
25
29
|
end_index: int
|
|
26
30
|
start_index: int
|
|
@@ -32,6 +36,7 @@ class ChatCompletionResponseMessage(lang.Final):
|
|
|
32
36
|
annotations: ta.Sequence[Annotation] | None = None
|
|
33
37
|
|
|
34
38
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
39
|
+
@_set_class_marshal_options
|
|
35
40
|
class Audio(lang.Final):
|
|
36
41
|
id: str
|
|
37
42
|
data: str
|
|
@@ -41,10 +46,12 @@ class ChatCompletionResponseMessage(lang.Final):
|
|
|
41
46
|
audio: Audio | None = None
|
|
42
47
|
|
|
43
48
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
49
|
+
@_set_class_marshal_options
|
|
44
50
|
class ToolCall(lang.Final):
|
|
45
51
|
id: str
|
|
46
52
|
|
|
47
53
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
54
|
+
@_set_class_marshal_options
|
|
48
55
|
class Function(lang.Final):
|
|
49
56
|
arguments: str
|
|
50
57
|
name: str
|
|
@@ -60,6 +67,7 @@ class ChatCompletionResponseMessage(lang.Final):
|
|
|
60
67
|
|
|
61
68
|
|
|
62
69
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
70
|
+
@_set_class_marshal_options
|
|
63
71
|
class ChatCompletionResponseChoice(lang.Final):
|
|
64
72
|
finish_reason: ta.Literal[
|
|
65
73
|
'stop',
|
|
@@ -71,6 +79,7 @@ class ChatCompletionResponseChoice(lang.Final):
|
|
|
71
79
|
index: int
|
|
72
80
|
|
|
73
81
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
82
|
+
@_set_class_marshal_options
|
|
74
83
|
class Logprobs(lang.Final):
|
|
75
84
|
content: ta.Sequence[ChatCompletionTokenLogprob] | None = None
|
|
76
85
|
refusal: ta.Sequence[ChatCompletionTokenLogprob] | None = None
|
|
@@ -84,6 +93,7 @@ class ChatCompletionResponseChoice(lang.Final):
|
|
|
84
93
|
|
|
85
94
|
|
|
86
95
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
96
|
+
@_set_class_marshal_options
|
|
87
97
|
class ChatCompletionResponse(lang.Final):
|
|
88
98
|
id: str
|
|
89
99
|
|
|
@@ -3,6 +3,8 @@ import typing as ta
|
|
|
3
3
|
from omlish import dataclasses as dc
|
|
4
4
|
from omlish import lang
|
|
5
5
|
|
|
6
|
+
from .._common import _set_class_marshal_options
|
|
7
|
+
|
|
6
8
|
|
|
7
9
|
##
|
|
8
10
|
|
|
@@ -15,6 +17,7 @@ class ChatCompletionResponseFormat(lang.Abstract, lang.Sealed):
|
|
|
15
17
|
|
|
16
18
|
|
|
17
19
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
20
|
+
@_set_class_marshal_options
|
|
18
21
|
class TextChatCompletionResponseFormat(ChatCompletionResponseFormat, lang.Final):
|
|
19
22
|
pass
|
|
20
23
|
|
|
@@ -23,8 +26,10 @@ class TextChatCompletionResponseFormat(ChatCompletionResponseFormat, lang.Final)
|
|
|
23
26
|
|
|
24
27
|
|
|
25
28
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
29
|
+
@_set_class_marshal_options
|
|
26
30
|
class JsonSchemaChatCompletionResponseFormat(ChatCompletionResponseFormat, lang.Final):
|
|
27
31
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
32
|
+
@_set_class_marshal_options
|
|
28
33
|
class JsonSchema(lang.Final):
|
|
29
34
|
name: str
|
|
30
35
|
description: str | None = None
|
|
@@ -38,5 +43,6 @@ class JsonSchemaChatCompletionResponseFormat(ChatCompletionResponseFormat, lang.
|
|
|
38
43
|
|
|
39
44
|
|
|
40
45
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
46
|
+
@_set_class_marshal_options
|
|
41
47
|
class JsonObjectChatCompletionResponseFormat(ChatCompletionResponseFormat, lang.Final):
|
|
42
48
|
pass
|
|
@@ -3,17 +3,21 @@ import typing as ta
|
|
|
3
3
|
from omlish import dataclasses as dc
|
|
4
4
|
from omlish import lang
|
|
5
5
|
|
|
6
|
+
from .._common import _set_class_marshal_options
|
|
7
|
+
|
|
6
8
|
|
|
7
9
|
##
|
|
8
10
|
|
|
9
11
|
|
|
10
12
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
13
|
+
@_set_class_marshal_options
|
|
11
14
|
class ChatCompletionTokenLogprob(lang.Final):
|
|
12
15
|
token: str
|
|
13
16
|
bytes: ta.Sequence[int] | None = None
|
|
14
17
|
logprob: float
|
|
15
18
|
|
|
16
19
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
20
|
+
@_set_class_marshal_options
|
|
17
21
|
class TopLogprob(lang.Final):
|
|
18
22
|
token: str
|
|
19
23
|
bytes: ta.Sequence[int] | None = None
|
|
@@ -1,17 +1,21 @@
|
|
|
1
1
|
from omlish import dataclasses as dc
|
|
2
2
|
from omlish import lang
|
|
3
3
|
|
|
4
|
+
from ._common import _set_class_marshal_options
|
|
5
|
+
|
|
4
6
|
|
|
5
7
|
##
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
11
|
+
@_set_class_marshal_options
|
|
9
12
|
class CompletionUsage(lang.Final):
|
|
10
13
|
completion_tokens: int
|
|
11
14
|
prompt_tokens: int
|
|
12
15
|
total_tokens: int
|
|
13
16
|
|
|
14
17
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
18
|
+
@_set_class_marshal_options
|
|
15
19
|
class CompletionTokensDetails(lang.Final):
|
|
16
20
|
accepted_prediction_tokens: int | None = None
|
|
17
21
|
audio_tokens: int | None = None
|
|
@@ -21,6 +25,7 @@ class CompletionUsage(lang.Final):
|
|
|
21
25
|
completion_tokens_details: CompletionTokensDetails | None = None
|
|
22
26
|
|
|
23
27
|
@dc.dataclass(frozen=True, kw_only=True)
|
|
28
|
+
@_set_class_marshal_options
|
|
24
29
|
class PromptTokensDetails(lang.Final):
|
|
25
30
|
audio_tokens: int | None = None
|
|
26
31
|
cached_tokens: int | None = None
|
ommlds/cli/sessions/chat/code.py
CHANGED
|
@@ -13,7 +13,7 @@ from .base import ChatOptions
|
|
|
13
13
|
from .base import ChatSession
|
|
14
14
|
from .printing import ChatSessionPrinter
|
|
15
15
|
from .state import ChatStateManager
|
|
16
|
-
from .tools import
|
|
16
|
+
from .tools import ToolUseExecutor
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
with lang.auto_proxy_import(globals()):
|
|
@@ -43,7 +43,7 @@ class CodeChatSession(ChatSession['CodeChatSession.Config']):
|
|
|
43
43
|
chat_options: ChatOptions | None = None,
|
|
44
44
|
printer: ChatSessionPrinter,
|
|
45
45
|
backend_catalog: mc.BackendCatalog,
|
|
46
|
-
tool_exec_request_executor:
|
|
46
|
+
tool_exec_request_executor: ToolUseExecutor,
|
|
47
47
|
tools_config: ToolsConfig | None = None,
|
|
48
48
|
) -> None:
|
|
49
49
|
super().__init__(config)
|
|
@@ -102,23 +102,28 @@ class CodeChatSession(ChatSession['CodeChatSession.Config']):
|
|
|
102
102
|
state.chat,
|
|
103
103
|
(self._chat_options or []),
|
|
104
104
|
))
|
|
105
|
-
resp_msg = check.single(response.v).m
|
|
106
105
|
|
|
107
|
-
|
|
108
|
-
|
|
106
|
+
tool_resp_lst = []
|
|
107
|
+
for resp_msg in check.single(response.v).ms:
|
|
108
|
+
state = self._state_manager.extend_chat([resp_msg])
|
|
109
109
|
|
|
110
|
-
|
|
111
|
-
|
|
110
|
+
if isinstance(resp_msg, mc.AiMessage):
|
|
111
|
+
self._printer.print(resp_msg)
|
|
112
112
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
113
|
+
elif isinstance(resp_msg, mc.ToolUseMessage):
|
|
114
|
+
trm = await self._tool_exec_request_executor.execute_tool_use(
|
|
115
|
+
resp_msg.tu,
|
|
116
|
+
fs_tool_context,
|
|
117
|
+
todo_tool_context,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
self._printer.print(trm.tur.c)
|
|
121
|
+
tool_resp_lst.append(trm)
|
|
122
|
+
|
|
123
|
+
else:
|
|
124
|
+
raise TypeError(resp_msg)
|
|
125
|
+
|
|
126
|
+
if not tool_resp_lst:
|
|
127
|
+
break
|
|
123
128
|
|
|
124
129
|
state = self._state_manager.extend_chat(tool_resp_lst)
|
|
@@ -14,9 +14,9 @@ from .state import ChatStateManager
|
|
|
14
14
|
from .state import StateStorageChatStateManager
|
|
15
15
|
from .tools import AskingToolExecutionConfirmation
|
|
16
16
|
from .tools import NopToolExecutionConfirmation
|
|
17
|
-
from .tools import ToolExecRequestExecutor
|
|
18
|
-
from .tools import ToolExecRequestExecutorImpl
|
|
19
17
|
from .tools import ToolExecutionConfirmation
|
|
18
|
+
from .tools import ToolUseExecutor
|
|
19
|
+
from .tools import ToolUseExecutorImpl
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
##
|
|
@@ -89,8 +89,8 @@ def bind_chat_session(cfg: ChatSession.Config) -> inj.Elements:
|
|
|
89
89
|
#
|
|
90
90
|
|
|
91
91
|
els.extend([
|
|
92
|
-
inj.bind(
|
|
93
|
-
inj.bind(
|
|
92
|
+
inj.bind(ToolUseExecutorImpl, singleton=True),
|
|
93
|
+
inj.bind(ToolUseExecutor, to_key=ToolUseExecutorImpl),
|
|
94
94
|
])
|
|
95
95
|
|
|
96
96
|
#
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import dataclasses as dc
|
|
2
2
|
|
|
3
|
+
from omlish import check
|
|
3
4
|
from omlish import lang
|
|
4
5
|
|
|
5
6
|
from .... import minichain as mc
|
|
@@ -63,7 +64,7 @@ class InteractiveChatSession(ChatSession['InteractiveChatSession.Config']):
|
|
|
63
64
|
|
|
64
65
|
response = await mdl.invoke(mc.ChatChoicesRequest([*state.chat, req_msg]))
|
|
65
66
|
|
|
66
|
-
resp_msg = response.v[0].
|
|
67
|
+
resp_msg = check.isinstance(check.single(response.v[0].ms), mc.AiMessage)
|
|
67
68
|
|
|
68
69
|
self._printer.print(resp_msg)
|
|
69
70
|
|
|
@@ -44,8 +44,8 @@ class StringChatSessionPrinter(ChatSessionPrinter, lang.Abstract):
|
|
|
44
44
|
elif isinstance(obj, mc.AiMessage):
|
|
45
45
|
if obj.c is not None:
|
|
46
46
|
self._print_str(check.isinstance(obj.c, str))
|
|
47
|
-
elif isinstance(obj, mc.
|
|
48
|
-
self._print_str(check.isinstance(obj.c, str))
|
|
47
|
+
elif isinstance(obj, mc.ToolUseResultMessage):
|
|
48
|
+
self._print_str(check.isinstance(obj.tur.c, str))
|
|
49
49
|
else:
|
|
50
50
|
raise TypeError(obj)
|
|
51
51
|
|