ommlds 0.0.0.dev426__py3-none-any.whl → 0.0.0.dev485__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ommlds/.omlish-manifests.json +336 -39
- ommlds/__about__.py +16 -10
- ommlds/_hacks/__init__.py +4 -0
- ommlds/_hacks/funcs.py +110 -0
- ommlds/_hacks/names.py +158 -0
- ommlds/_hacks/params.py +73 -0
- ommlds/_hacks/patches.py +0 -3
- ommlds/backends/anthropic/protocol/__init__.py +13 -1
- ommlds/backends/anthropic/protocol/_dataclasses.py +1625 -0
- ommlds/backends/anthropic/protocol/_marshal.py +2 -2
- ommlds/backends/anthropic/protocol/sse/_marshal.py +1 -1
- ommlds/backends/anthropic/protocol/sse/assemble.py +23 -7
- ommlds/backends/anthropic/protocol/sse/events.py +13 -0
- ommlds/backends/anthropic/protocol/types.py +40 -8
- ommlds/backends/google/protocol/__init__.py +16 -0
- ommlds/backends/google/protocol/_dataclasses.py +5997 -0
- ommlds/backends/google/protocol/_marshal.py +16 -0
- ommlds/backends/google/protocol/types.py +626 -0
- ommlds/backends/groq/__init__.py +7 -0
- ommlds/backends/groq/_dataclasses.py +3901 -0
- ommlds/backends/groq/_marshal.py +23 -0
- ommlds/backends/groq/protocol.py +249 -0
- ommlds/backends/llamacpp/logging.py +4 -1
- ommlds/backends/mlx/caching.py +7 -3
- ommlds/backends/mlx/cli.py +10 -7
- ommlds/backends/mlx/generation.py +19 -17
- ommlds/backends/mlx/limits.py +10 -6
- ommlds/backends/mlx/loading.py +65 -5
- ommlds/backends/ollama/__init__.py +7 -0
- ommlds/backends/ollama/_dataclasses.py +3458 -0
- ommlds/backends/ollama/protocol.py +170 -0
- ommlds/backends/openai/protocol/__init__.py +24 -29
- ommlds/backends/openai/protocol/_common.py +18 -0
- ommlds/backends/openai/protocol/_dataclasses.py +7708 -0
- ommlds/backends/openai/protocol/_marshal.py +27 -0
- ommlds/backends/openai/protocol/chatcompletion/chunk.py +58 -31
- ommlds/backends/openai/protocol/chatcompletion/contentpart.py +49 -44
- ommlds/backends/openai/protocol/chatcompletion/message.py +55 -43
- ommlds/backends/openai/protocol/chatcompletion/request.py +114 -66
- ommlds/backends/openai/protocol/chatcompletion/response.py +71 -45
- ommlds/backends/openai/protocol/chatcompletion/responseformat.py +27 -20
- ommlds/backends/openai/protocol/chatcompletion/tokenlogprob.py +16 -7
- ommlds/backends/openai/protocol/completionusage.py +24 -15
- ommlds/backends/tavily/__init__.py +7 -0
- ommlds/backends/tavily/_dataclasses.py +1734 -0
- ommlds/backends/tavily/protocol.py +301 -0
- ommlds/backends/tinygrad/models/llama3/__init__.py +22 -14
- ommlds/backends/transformers/__init__.py +14 -0
- ommlds/backends/transformers/filecache.py +109 -0
- ommlds/backends/transformers/streamers.py +73 -0
- ommlds/cli/__init__.py +7 -0
- ommlds/cli/_dataclasses.py +2562 -0
- ommlds/cli/asyncs.py +30 -0
- ommlds/cli/backends/catalog.py +93 -0
- ommlds/cli/backends/configs.py +9 -0
- ommlds/cli/backends/inject.py +31 -36
- ommlds/cli/backends/injection.py +16 -0
- ommlds/cli/backends/types.py +46 -0
- ommlds/cli/content/messages.py +34 -0
- ommlds/cli/content/strings.py +42 -0
- ommlds/cli/inject.py +17 -32
- ommlds/cli/inputs/__init__.py +0 -0
- ommlds/cli/inputs/asyncs.py +32 -0
- ommlds/cli/inputs/sync.py +75 -0
- ommlds/cli/main.py +270 -110
- ommlds/cli/rendering/__init__.py +0 -0
- ommlds/cli/rendering/configs.py +9 -0
- ommlds/cli/rendering/inject.py +31 -0
- ommlds/cli/rendering/markdown.py +52 -0
- ommlds/cli/rendering/raw.py +73 -0
- ommlds/cli/rendering/types.py +21 -0
- ommlds/cli/secrets.py +21 -0
- ommlds/cli/sessions/base.py +1 -1
- ommlds/cli/sessions/chat/chat/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/ai/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/ai/configs.py +11 -0
- ommlds/cli/sessions/chat/chat/ai/inject.py +74 -0
- ommlds/cli/sessions/chat/chat/ai/injection.py +14 -0
- ommlds/cli/sessions/chat/chat/ai/rendering.py +70 -0
- ommlds/cli/sessions/chat/chat/ai/services.py +79 -0
- ommlds/cli/sessions/chat/chat/ai/tools.py +44 -0
- ommlds/cli/sessions/chat/chat/ai/types.py +28 -0
- ommlds/cli/sessions/chat/chat/state/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/state/configs.py +11 -0
- ommlds/cli/sessions/chat/chat/state/inject.py +36 -0
- ommlds/cli/sessions/chat/chat/state/inmemory.py +33 -0
- ommlds/cli/sessions/chat/chat/state/storage.py +52 -0
- ommlds/cli/sessions/chat/chat/state/types.py +38 -0
- ommlds/cli/sessions/chat/chat/user/__init__.py +0 -0
- ommlds/cli/sessions/chat/chat/user/configs.py +17 -0
- ommlds/cli/sessions/chat/chat/user/inject.py +62 -0
- ommlds/cli/sessions/chat/chat/user/interactive.py +31 -0
- ommlds/cli/sessions/chat/chat/user/oneshot.py +25 -0
- ommlds/cli/sessions/chat/chat/user/types.py +15 -0
- ommlds/cli/sessions/chat/configs.py +27 -0
- ommlds/cli/sessions/chat/driver.py +43 -0
- ommlds/cli/sessions/chat/inject.py +33 -65
- ommlds/cli/sessions/chat/phases/__init__.py +0 -0
- ommlds/cli/sessions/chat/phases/inject.py +27 -0
- ommlds/cli/sessions/chat/phases/injection.py +14 -0
- ommlds/cli/sessions/chat/phases/manager.py +29 -0
- ommlds/cli/sessions/chat/phases/types.py +29 -0
- ommlds/cli/sessions/chat/session.py +27 -0
- ommlds/cli/sessions/chat/tools/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/configs.py +22 -0
- ommlds/cli/sessions/chat/tools/confirmation.py +46 -0
- ommlds/cli/sessions/chat/tools/execution.py +66 -0
- ommlds/cli/sessions/chat/tools/fs/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/fs/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/fs/inject.py +35 -0
- ommlds/cli/sessions/chat/tools/inject.py +88 -0
- ommlds/cli/sessions/chat/tools/injection.py +44 -0
- ommlds/cli/sessions/chat/tools/rendering.py +58 -0
- ommlds/cli/sessions/chat/tools/todo/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/todo/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/todo/inject.py +31 -0
- ommlds/cli/sessions/chat/tools/weather/__init__.py +0 -0
- ommlds/cli/sessions/chat/tools/weather/configs.py +12 -0
- ommlds/cli/sessions/chat/tools/weather/inject.py +22 -0
- ommlds/cli/{tools/weather.py → sessions/chat/tools/weather/tools.py} +1 -1
- ommlds/cli/sessions/completion/configs.py +21 -0
- ommlds/cli/sessions/completion/inject.py +42 -0
- ommlds/cli/sessions/completion/session.py +35 -0
- ommlds/cli/sessions/embedding/configs.py +21 -0
- ommlds/cli/sessions/embedding/inject.py +42 -0
- ommlds/cli/sessions/embedding/session.py +33 -0
- ommlds/cli/sessions/inject.py +28 -11
- ommlds/cli/state/__init__.py +0 -0
- ommlds/cli/state/inject.py +28 -0
- ommlds/cli/{state.py → state/storage.py} +41 -24
- ommlds/minichain/__init__.py +84 -24
- ommlds/minichain/_dataclasses.py +15401 -0
- ommlds/minichain/_marshal.py +49 -9
- ommlds/minichain/_typedvalues.py +2 -4
- ommlds/minichain/backends/catalogs/base.py +20 -1
- ommlds/minichain/backends/catalogs/simple.py +2 -2
- ommlds/minichain/backends/catalogs/strings.py +10 -8
- ommlds/minichain/backends/impls/anthropic/chat.py +65 -27
- ommlds/minichain/backends/impls/anthropic/names.py +10 -8
- ommlds/minichain/backends/impls/anthropic/protocol.py +109 -0
- ommlds/minichain/backends/impls/anthropic/stream.py +111 -43
- ommlds/minichain/backends/impls/duckduckgo/search.py +6 -2
- ommlds/minichain/backends/impls/dummy/__init__.py +0 -0
- ommlds/minichain/backends/impls/dummy/chat.py +69 -0
- ommlds/minichain/backends/impls/google/chat.py +114 -22
- ommlds/minichain/backends/impls/google/search.py +7 -2
- ommlds/minichain/backends/impls/google/stream.py +219 -0
- ommlds/minichain/backends/impls/google/tools.py +149 -0
- ommlds/minichain/backends/impls/groq/__init__.py +0 -0
- ommlds/minichain/backends/impls/groq/chat.py +75 -0
- ommlds/minichain/backends/impls/groq/names.py +48 -0
- ommlds/minichain/backends/impls/groq/protocol.py +143 -0
- ommlds/minichain/backends/impls/groq/stream.py +125 -0
- ommlds/minichain/backends/impls/huggingface/repos.py +1 -5
- ommlds/minichain/backends/impls/llamacpp/chat.py +40 -22
- ommlds/minichain/backends/impls/llamacpp/completion.py +9 -5
- ommlds/minichain/backends/impls/llamacpp/format.py +4 -2
- ommlds/minichain/backends/impls/llamacpp/stream.py +43 -23
- ommlds/minichain/backends/impls/mistral.py +20 -5
- ommlds/minichain/backends/impls/mlx/chat.py +101 -24
- ommlds/minichain/backends/impls/ollama/__init__.py +0 -0
- ommlds/minichain/backends/impls/ollama/chat.py +199 -0
- ommlds/minichain/backends/impls/openai/chat.py +18 -8
- ommlds/minichain/backends/impls/openai/completion.py +10 -3
- ommlds/minichain/backends/impls/openai/embedding.py +10 -3
- ommlds/minichain/backends/impls/openai/format.py +131 -106
- ommlds/minichain/backends/impls/openai/names.py +31 -5
- ommlds/minichain/backends/impls/openai/stream.py +43 -25
- ommlds/minichain/backends/impls/sentencepiece/tokens.py +9 -6
- ommlds/minichain/backends/impls/tavily.py +66 -0
- ommlds/minichain/backends/impls/tinygrad/chat.py +30 -20
- ommlds/minichain/backends/impls/tokenizers/tokens.py +9 -6
- ommlds/minichain/backends/impls/transformers/sentence.py +6 -3
- ommlds/minichain/backends/impls/transformers/tokens.py +10 -7
- ommlds/minichain/backends/impls/transformers/transformers.py +160 -37
- ommlds/minichain/backends/strings/parsing.py +1 -1
- ommlds/minichain/backends/strings/resolving.py +4 -1
- ommlds/minichain/chat/_marshal.py +16 -9
- ommlds/minichain/chat/choices/adapters.py +4 -4
- ommlds/minichain/chat/choices/services.py +1 -1
- ommlds/minichain/chat/choices/stream/__init__.py +0 -0
- ommlds/minichain/chat/choices/stream/adapters.py +35 -0
- ommlds/minichain/chat/choices/stream/joining.py +31 -0
- ommlds/minichain/chat/choices/stream/services.py +45 -0
- ommlds/minichain/chat/choices/stream/types.py +43 -0
- ommlds/minichain/chat/choices/types.py +2 -2
- ommlds/minichain/chat/history.py +3 -3
- ommlds/minichain/chat/messages.py +55 -19
- ommlds/minichain/chat/services.py +3 -3
- ommlds/minichain/chat/stream/_marshal.py +16 -0
- ommlds/minichain/chat/stream/joining.py +85 -0
- ommlds/minichain/chat/stream/services.py +15 -21
- ommlds/minichain/chat/stream/types.py +32 -19
- ommlds/minichain/chat/tools/execution.py +8 -7
- ommlds/minichain/chat/tools/ids.py +9 -15
- ommlds/minichain/chat/tools/parsing.py +17 -26
- ommlds/minichain/chat/transforms/base.py +29 -38
- ommlds/minichain/chat/transforms/metadata.py +30 -4
- ommlds/minichain/chat/transforms/services.py +9 -11
- ommlds/minichain/content/_marshal.py +44 -20
- ommlds/minichain/content/json.py +13 -0
- ommlds/minichain/content/materialize.py +14 -21
- ommlds/minichain/content/prepare.py +4 -0
- ommlds/minichain/content/transforms/interleave.py +1 -1
- ommlds/minichain/content/transforms/squeeze.py +1 -1
- ommlds/minichain/content/transforms/stringify.py +1 -1
- ommlds/minichain/json.py +20 -0
- ommlds/minichain/lib/code/__init__.py +0 -0
- ommlds/minichain/lib/code/prompts.py +6 -0
- ommlds/minichain/lib/fs/binfiles.py +108 -0
- ommlds/minichain/lib/fs/context.py +126 -0
- ommlds/minichain/lib/fs/errors.py +101 -0
- ommlds/minichain/lib/fs/suggestions.py +36 -0
- ommlds/minichain/lib/fs/tools/__init__.py +0 -0
- ommlds/minichain/lib/fs/tools/edit.py +104 -0
- ommlds/minichain/lib/fs/tools/ls.py +38 -0
- ommlds/minichain/lib/fs/tools/read.py +115 -0
- ommlds/minichain/lib/fs/tools/recursivels/__init__.py +0 -0
- ommlds/minichain/lib/fs/tools/recursivels/execution.py +40 -0
- ommlds/minichain/lib/todo/__init__.py +0 -0
- ommlds/minichain/lib/todo/context.py +54 -0
- ommlds/minichain/lib/todo/tools/__init__.py +0 -0
- ommlds/minichain/lib/todo/tools/read.py +44 -0
- ommlds/minichain/lib/todo/tools/write.py +335 -0
- ommlds/minichain/lib/todo/types.py +60 -0
- ommlds/minichain/llms/_marshal.py +25 -17
- ommlds/minichain/llms/types.py +4 -0
- ommlds/minichain/registries/globals.py +18 -4
- ommlds/minichain/resources.py +68 -45
- ommlds/minichain/search.py +1 -1
- ommlds/minichain/services/_marshal.py +46 -39
- ommlds/minichain/services/facades.py +3 -3
- ommlds/minichain/services/services.py +1 -1
- ommlds/minichain/standard.py +8 -0
- ommlds/minichain/stream/services.py +152 -38
- ommlds/minichain/stream/wrap.py +22 -24
- ommlds/minichain/text/toolparsing/llamacpp/hermes2.py +3 -2
- ommlds/minichain/text/toolparsing/llamacpp/llama31.py +3 -2
- ommlds/minichain/text/toolparsing/llamacpp/utils.py +3 -2
- ommlds/minichain/tools/_marshal.py +1 -1
- ommlds/minichain/tools/execution/catalog.py +2 -1
- ommlds/minichain/tools/execution/context.py +34 -14
- ommlds/minichain/tools/execution/errors.py +15 -0
- ommlds/minichain/tools/execution/executors.py +8 -3
- ommlds/minichain/tools/execution/reflect.py +40 -5
- ommlds/minichain/tools/fns.py +46 -9
- ommlds/minichain/tools/jsonschema.py +14 -5
- ommlds/minichain/tools/reflect.py +54 -18
- ommlds/minichain/tools/types.py +33 -1
- ommlds/minichain/utils.py +27 -0
- ommlds/minichain/vectors/_marshal.py +11 -10
- ommlds/minichain/vectors/types.py +1 -1
- ommlds/nanochat/LICENSE +21 -0
- ommlds/nanochat/__init__.py +0 -0
- ommlds/nanochat/rustbpe/LICENSE +21 -0
- ommlds/nanochat/tokenizers.py +406 -0
- ommlds/server/cli.py +1 -2
- ommlds/server/server.py +5 -5
- ommlds/server/service.py +1 -1
- ommlds/specs/__init__.py +0 -0
- ommlds/specs/mcp/__init__.py +0 -0
- ommlds/specs/mcp/_marshal.py +23 -0
- ommlds/specs/mcp/clients.py +146 -0
- ommlds/specs/mcp/protocol.py +371 -0
- ommlds/tools/git.py +35 -12
- ommlds/tools/ocr.py +8 -9
- ommlds/wiki/analyze.py +6 -7
- ommlds/wiki/text/mfh.py +1 -5
- ommlds/wiki/text/wtp.py +1 -3
- ommlds/wiki/utils/xml.py +5 -5
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/METADATA +24 -21
- ommlds-0.0.0.dev485.dist-info/RECORD +436 -0
- ommlds/cli/backends/standard.py +0 -20
- ommlds/cli/sessions/chat/base.py +0 -42
- ommlds/cli/sessions/chat/interactive.py +0 -73
- ommlds/cli/sessions/chat/printing.py +0 -96
- ommlds/cli/sessions/chat/prompt.py +0 -143
- ommlds/cli/sessions/chat/state.py +0 -109
- ommlds/cli/sessions/chat/tools.py +0 -91
- ommlds/cli/sessions/completion/completion.py +0 -44
- ommlds/cli/sessions/embedding/embedding.py +0 -42
- ommlds/cli/tools/config.py +0 -13
- ommlds/cli/tools/inject.py +0 -64
- ommlds/minichain/chat/stream/adapters.py +0 -69
- ommlds/minichain/lib/fs/ls/execution.py +0 -32
- ommlds-0.0.0.dev426.dist-info/RECORD +0 -303
- /ommlds/{cli/tools → backends/google}/__init__.py +0 -0
- /ommlds/{huggingface.py → backends/huggingface.py} +0 -0
- /ommlds/{minichain/lib/fs/ls → cli/content}/__init__.py +0 -0
- /ommlds/minichain/lib/fs/{ls → tools/recursivels}/rendering.py +0 -0
- /ommlds/minichain/lib/fs/{ls → tools/recursivels}/running.py +0 -0
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/WHEEL +0 -0
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/entry_points.txt +0 -0
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/licenses/LICENSE +0 -0
- {ommlds-0.0.0.dev426.dist-info → ommlds-0.0.0.dev485.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from omlish import lang
|
|
2
|
+
from omlish import marshal as msh
|
|
3
|
+
|
|
4
|
+
from .types import Value
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
##
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@lang.static_init
|
|
11
|
+
def _install_standard_marshaling() -> None:
|
|
12
|
+
msh.install_standard_factories(
|
|
13
|
+
*msh.standard_polymorphism_factories(
|
|
14
|
+
msh.polymorphism_from_subclasses(Value),
|
|
15
|
+
),
|
|
16
|
+
)
|
|
@@ -0,0 +1,626 @@
|
|
|
1
|
+
"""
|
|
2
|
+
https://ai.google.dev/api/generate-content
|
|
3
|
+
"""
|
|
4
|
+
import typing as ta
|
|
5
|
+
|
|
6
|
+
from omlish import dataclasses as dc
|
|
7
|
+
from omlish import lang
|
|
8
|
+
from omlish import marshal as msh
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
##
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _set_class_marshal_options(cls):
|
|
15
|
+
msh.update_object_metadata(
|
|
16
|
+
cls,
|
|
17
|
+
field_naming=msh.Naming.LOW_CAMEL,
|
|
18
|
+
field_defaults=msh.FieldMetadata(
|
|
19
|
+
options=msh.FieldOptions(
|
|
20
|
+
omit_if=lang.is_none,
|
|
21
|
+
),
|
|
22
|
+
),
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
return cls
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
29
|
+
@_set_class_marshal_options
|
|
30
|
+
@msh.update_fields_metadata(
|
|
31
|
+
['data'],
|
|
32
|
+
marshaler=msh.Base64MarshalerUnmarshaler(bytes),
|
|
33
|
+
unmarshaler=msh.Base64MarshalerUnmarshaler(bytes),
|
|
34
|
+
)
|
|
35
|
+
class Blob(lang.Final):
|
|
36
|
+
mine_type: str
|
|
37
|
+
data: bytes
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
41
|
+
@_set_class_marshal_options
|
|
42
|
+
class FunctionCall(lang.Final):
|
|
43
|
+
id: str | None = None
|
|
44
|
+
name: str
|
|
45
|
+
args: ta.Mapping[str, ta.Any] | None = None
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
Scheduling: ta.TypeAlias = ta.Literal[
|
|
49
|
+
# This value is unused.
|
|
50
|
+
'SCHEDULING_UNSPECIFIED',
|
|
51
|
+
|
|
52
|
+
# Only add the result to the conversation context, do not interrupt or trigger generation.
|
|
53
|
+
'SILENT',
|
|
54
|
+
|
|
55
|
+
# Add the result to the conversation context, and prompt to generate output without interrupting ongoing
|
|
56
|
+
# generation.
|
|
57
|
+
'WHEN_IDLE',
|
|
58
|
+
|
|
59
|
+
# Add the result to the conversation context, interrupt ongoing generation and prompt to generate output.
|
|
60
|
+
'INTERRUPT',
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
65
|
+
@_set_class_marshal_options
|
|
66
|
+
class FunctionResponse(lang.Final):
|
|
67
|
+
id: str | None = None
|
|
68
|
+
name: str
|
|
69
|
+
response: ta.Mapping[str, ta.Any] | None = None
|
|
70
|
+
will_continue: bool | None = None
|
|
71
|
+
scheduling: Scheduling | None = None
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
75
|
+
@_set_class_marshal_options
|
|
76
|
+
class FileData(lang.Final):
|
|
77
|
+
mime_type: str
|
|
78
|
+
file_uri: str
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
Language: ta.TypeAlias = ta.Literal[
|
|
82
|
+
# Unspecified language. This value should not be used.
|
|
83
|
+
'LANGUAGE_UNSPECIFIED',
|
|
84
|
+
|
|
85
|
+
# Python >= 3.10, with numpy and simpy available.
|
|
86
|
+
'PYTHON',
|
|
87
|
+
]
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
91
|
+
@_set_class_marshal_options
|
|
92
|
+
class ExecutableCode(lang.Final):
|
|
93
|
+
language: Language
|
|
94
|
+
code: str
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
Outcome: ta.TypeAlias = ta.Literal[
|
|
98
|
+
# Unspecified status. This value should not be used.
|
|
99
|
+
'OUTCOME_UNSPECIFIED',
|
|
100
|
+
|
|
101
|
+
# Code execution completed successfully.
|
|
102
|
+
'OUTCOME_OK',
|
|
103
|
+
|
|
104
|
+
# Code execution finished but with a failure. stderr should contain the reason.
|
|
105
|
+
'OUTCOME_FAILED',
|
|
106
|
+
|
|
107
|
+
# Code execution ran for too long, and was cancelled. There may or may not be a partial output present.
|
|
108
|
+
'OUTCOME_DEADLINE_EXCEEDED',
|
|
109
|
+
]
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
113
|
+
@_set_class_marshal_options
|
|
114
|
+
class CodeExecutionResult(lang.Final):
|
|
115
|
+
outcome: Outcome
|
|
116
|
+
output: str
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
120
|
+
@_set_class_marshal_options
|
|
121
|
+
class VideoMetadata(lang.Final):
|
|
122
|
+
start_offset: str # Duration
|
|
123
|
+
end_offset: str # Duration
|
|
124
|
+
fps: float
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
128
|
+
@msh.update_fields_metadata(
|
|
129
|
+
['thought_signature'],
|
|
130
|
+
marshaler=msh.OptionalMarshaler(msh.Base64MarshalerUnmarshaler(bytes)),
|
|
131
|
+
unmarshaler=msh.OptionalUnmarshaler(msh.Base64MarshalerUnmarshaler(bytes)),
|
|
132
|
+
)
|
|
133
|
+
@_set_class_marshal_options
|
|
134
|
+
class Part(lang.Final):
|
|
135
|
+
# TODO: data: msh.oneof ...
|
|
136
|
+
text: str | None = None
|
|
137
|
+
inline_data: Blob | None = None
|
|
138
|
+
function_call: FunctionCall | None = None
|
|
139
|
+
function_response: FunctionResponse | None = None
|
|
140
|
+
file_data: FileData | None = None
|
|
141
|
+
executable_code: ExecutableCode | None = None
|
|
142
|
+
code_execution_result: CodeExecutionResult | None = None
|
|
143
|
+
|
|
144
|
+
thought: bool | None = None
|
|
145
|
+
thought_signature: bytes | None = None
|
|
146
|
+
|
|
147
|
+
# TODO: metadata: msh.oneof ...
|
|
148
|
+
video_metadata: VideoMetadata | None = None
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
ContentRole: ta.TypeAlias = ta.Literal['user', 'model']
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
155
|
+
@_set_class_marshal_options
|
|
156
|
+
class Content(lang.Final):
|
|
157
|
+
parts: ta.Sequence[Part] | None = None
|
|
158
|
+
role: ContentRole | None = None
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
##
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
Type: ta.TypeAlias = ta.Literal[
|
|
165
|
+
# Not specified, should not be used.
|
|
166
|
+
'TYPE_UNSPECIFIED',
|
|
167
|
+
|
|
168
|
+
# String type.
|
|
169
|
+
'STRING',
|
|
170
|
+
|
|
171
|
+
# Number type.
|
|
172
|
+
'NUMBER',
|
|
173
|
+
|
|
174
|
+
# Integer type.
|
|
175
|
+
'INTEGER',
|
|
176
|
+
|
|
177
|
+
# Boolean type.
|
|
178
|
+
'BOOLEAN',
|
|
179
|
+
|
|
180
|
+
# Array type.
|
|
181
|
+
'ARRAY',
|
|
182
|
+
|
|
183
|
+
# Object type.
|
|
184
|
+
'OBJECT',
|
|
185
|
+
|
|
186
|
+
# Null type.
|
|
187
|
+
'NULL',
|
|
188
|
+
]
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
Struct: ta.TypeAlias = ta.Mapping[str, 'Value']
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
@dc.dataclass(frozen=True)
|
|
195
|
+
class Value(lang.Abstract, lang.Sealed):
|
|
196
|
+
"""https://protobuf.dev/reference/protobuf/google.protobuf/#value"""
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
@dc.dataclass(frozen=True)
|
|
200
|
+
@msh.update_object_metadata(field_naming=msh.Naming.LOW_CAMEL)
|
|
201
|
+
class NullValue(Value, lang.Final):
|
|
202
|
+
null_value: None = None
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
@dc.dataclass(frozen=True)
|
|
206
|
+
@_set_class_marshal_options
|
|
207
|
+
class NumberValue(Value, lang.Final):
|
|
208
|
+
number_value: float
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
@dc.dataclass(frozen=True)
|
|
212
|
+
@_set_class_marshal_options
|
|
213
|
+
class StringValue(Value, lang.Final):
|
|
214
|
+
string_value: str
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
@dc.dataclass(frozen=True)
|
|
218
|
+
@_set_class_marshal_options
|
|
219
|
+
class BoolValue(Value, lang.Final):
|
|
220
|
+
bool_value: bool
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
@dc.dataclass(frozen=True)
|
|
224
|
+
@_set_class_marshal_options
|
|
225
|
+
class StructValue(Value, lang.Final):
|
|
226
|
+
struct_value: Struct
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
@dc.dataclass(frozen=True)
|
|
230
|
+
@_set_class_marshal_options
|
|
231
|
+
class ListValue(Value, lang.Final):
|
|
232
|
+
list_value: ta.Sequence[Value]
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
236
|
+
@_set_class_marshal_options
|
|
237
|
+
class Schema(lang.Final):
|
|
238
|
+
type: Type | None = None # FIXME: required
|
|
239
|
+
format: str | None = None
|
|
240
|
+
title: str | None = None
|
|
241
|
+
description: str | None = None
|
|
242
|
+
nullable: bool | None = None
|
|
243
|
+
enum: ta.Sequence[str] | None = None
|
|
244
|
+
max_items: str | None = None # int64
|
|
245
|
+
min_items: str | None = None # int64
|
|
246
|
+
properties: ta.Mapping[str, 'Schema'] | None = None
|
|
247
|
+
required: ta.Sequence[str] | None = None
|
|
248
|
+
min_properties: str | None = None # int64
|
|
249
|
+
max_properties: str | None = None # int64
|
|
250
|
+
min_length: str | None = None # int64
|
|
251
|
+
max_length: str | None = None # int64
|
|
252
|
+
pattern: str | None = None
|
|
253
|
+
example: Value | None = None
|
|
254
|
+
any_of: ta.Sequence['Schema'] | None = None
|
|
255
|
+
property_ordering: ta.Sequence[str] | None = None
|
|
256
|
+
default: Value | None = None
|
|
257
|
+
items: ta.Optional['Schema'] = None
|
|
258
|
+
minimum: float | None = None
|
|
259
|
+
maximum: float | None = None
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
FunctionBehavior: ta.TypeAlias = ta.Literal[
|
|
263
|
+
#This value is unused.
|
|
264
|
+
'UNSPECIFIED',
|
|
265
|
+
|
|
266
|
+
# If set, the system will wait to receive the function response before continuing the conversation.
|
|
267
|
+
'BLOCKING',
|
|
268
|
+
|
|
269
|
+
# If set, the system will not wait to receive the function response. Instead, it will attempt to handle function
|
|
270
|
+
# responses as they become available while maintaining the conversation between the user and the model.
|
|
271
|
+
'NON_BLOCKING',
|
|
272
|
+
]
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
276
|
+
@_set_class_marshal_options
|
|
277
|
+
class FunctionDeclaration(lang.Final):
|
|
278
|
+
name: str
|
|
279
|
+
description: str
|
|
280
|
+
|
|
281
|
+
behavior: FunctionBehavior | None = None
|
|
282
|
+
|
|
283
|
+
parameters: Schema | None = None
|
|
284
|
+
parameters_json_schema: Value | None = None
|
|
285
|
+
|
|
286
|
+
response: Schema | None = None
|
|
287
|
+
response_json_schema: Value | None = None
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
DynamicRetrievalMode: ta.TypeAlias = ta.Literal[
|
|
291
|
+
# Always trigger retrieval.
|
|
292
|
+
'MODE_UNSPECIFIED',
|
|
293
|
+
|
|
294
|
+
# Run retrieval only when system decides it is necessary.
|
|
295
|
+
'MODE_DYNAMIC',
|
|
296
|
+
]
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
300
|
+
@_set_class_marshal_options
|
|
301
|
+
class DynamicRetrievalConfig(lang.Final):
|
|
302
|
+
mode: DynamicRetrievalMode | None = None
|
|
303
|
+
|
|
304
|
+
dynamic_threshold: int | float | None = None
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
308
|
+
@_set_class_marshal_options
|
|
309
|
+
class GoogleSearchRetrieval(lang.Final):
|
|
310
|
+
dynamic_retrieval_config: DynamicRetrievalConfig
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
314
|
+
@_set_class_marshal_options
|
|
315
|
+
class CodeExecution(lang.Final):
|
|
316
|
+
pass
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
320
|
+
@_set_class_marshal_options
|
|
321
|
+
class Interval(lang.Final):
|
|
322
|
+
start_time: str # Timestamp
|
|
323
|
+
end_time: str # Timestamp
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
327
|
+
@_set_class_marshal_options
|
|
328
|
+
class GoogleSearch(lang.Final):
|
|
329
|
+
time_range_filter: Interval | None = None
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
333
|
+
@_set_class_marshal_options
|
|
334
|
+
class UrlContext(lang.Final):
|
|
335
|
+
pass
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
339
|
+
@_set_class_marshal_options
|
|
340
|
+
class Tool(lang.Final):
|
|
341
|
+
function_declarations: ta.Sequence[FunctionDeclaration] | None = None
|
|
342
|
+
google_search_retrieval: GoogleSearchRetrieval | None = None
|
|
343
|
+
code_execution: CodeExecution | None = None
|
|
344
|
+
google_search: GoogleSearch | None = None
|
|
345
|
+
url_context: UrlContext | None = None
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
FunctionCallingMode: ta.TypeAlias = ta.Literal[
|
|
349
|
+
# Unspecified function calling mode. This value should not be used.
|
|
350
|
+
'MODE_UNSPECIFIED',
|
|
351
|
+
|
|
352
|
+
# Default model behavior, model decides to predict either a function call or a natural language response.
|
|
353
|
+
'AUTO',
|
|
354
|
+
|
|
355
|
+
# Model is constrained to always predicting a function call only. If "allowedFunctionNames" are set, the predicted
|
|
356
|
+
# function call will be limited to any one of "allowedFunctionNames", else the predicted function call will be any
|
|
357
|
+
# one of the provided "functionDeclarations".
|
|
358
|
+
'ANY',
|
|
359
|
+
|
|
360
|
+
# Model will not predict any function call. Model behavior is same as when not passing any function declarations.
|
|
361
|
+
'NONE',
|
|
362
|
+
|
|
363
|
+
# Model decides to predict either a function call or a natural language response, but will validate function calls
|
|
364
|
+
# with constrained decoding. If "allowedFunctionNames" are set, the predicted function call will be limited to any
|
|
365
|
+
# one of "allowedFunctionNames", else the predicted function call will be any one of the provided
|
|
366
|
+
# "functionDeclarations".
|
|
367
|
+
'VALIDATED',
|
|
368
|
+
]
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
372
|
+
@_set_class_marshal_options
|
|
373
|
+
class FunctionCallingConfig(lang.Final):
|
|
374
|
+
mode: FunctionCallingMode | None = None
|
|
375
|
+
allowed_function_names: ta.Sequence[str] | None = None
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
379
|
+
@_set_class_marshal_options
|
|
380
|
+
class ToolConfig(lang.Final):
|
|
381
|
+
function_calling_config: FunctionCallingConfig | None = None
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
HarmCategory: ta.TypeAlias = ta.Literal[
|
|
385
|
+
# Category is unspecified.
|
|
386
|
+
'HARM_CATEGORY_UNSPECIFIED',
|
|
387
|
+
|
|
388
|
+
# PaLM - Negative or harmful comments targeting identity and/or protected attribute.
|
|
389
|
+
'HARM_CATEGORY_DEROGATORY',
|
|
390
|
+
|
|
391
|
+
# PaLM - Content that is rude, disrespectful, or profane.
|
|
392
|
+
'HARM_CATEGORY_TOXICITY',
|
|
393
|
+
|
|
394
|
+
# PaLM - Describes scenarios depicting violence against an individual or group, or general descriptions of gore.
|
|
395
|
+
'HARM_CATEGORY_VIOLENCE',
|
|
396
|
+
|
|
397
|
+
# PaLM - Contains references to sexual acts or other lewd content.
|
|
398
|
+
'HARM_CATEGORY_SEXUAL',
|
|
399
|
+
|
|
400
|
+
# PaLM - Promotes unchecked medical advice.
|
|
401
|
+
'HARM_CATEGORY_MEDICAL',
|
|
402
|
+
|
|
403
|
+
# PaLM - Dangerous content that promotes, facilitates, or encourages harmful acts.
|
|
404
|
+
'HARM_CATEGORY_DANGEROUS',
|
|
405
|
+
|
|
406
|
+
# Gemini - Harassment content.
|
|
407
|
+
'HARM_CATEGORY_HARASSMENT',
|
|
408
|
+
|
|
409
|
+
# Gemini - Hate speech and content.
|
|
410
|
+
'HARM_CATEGORY_HATE_SPEECH',
|
|
411
|
+
|
|
412
|
+
# Gemini - Sexually explicit content.
|
|
413
|
+
'HARM_CATEGORY_SEXUALLY_EXPLICIT',
|
|
414
|
+
|
|
415
|
+
# Gemini - Dangerous content.
|
|
416
|
+
'HARM_CATEGORY_DANGEROUS_CONTENT',
|
|
417
|
+
|
|
418
|
+
# Gemini - Content that may be used to harm civic integrity. DEPRECATED: use enableEnhancedCivicAnswers instead.
|
|
419
|
+
'HARM_CATEGORY_CIVIC_INTEGRITY',
|
|
420
|
+
]
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
HarmBlockThreshold: ta.TypeAlias = ta.Literal[
|
|
424
|
+
# Threshold is unspecified.
|
|
425
|
+
'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
|
426
|
+
|
|
427
|
+
# Content with NEGLIGIBLE will be allowed.
|
|
428
|
+
'BLOCK_LOW_AND_ABOVE',
|
|
429
|
+
|
|
430
|
+
# Content with NEGLIGIBLE and LOW will be allowed.
|
|
431
|
+
'BLOCK_MEDIUM_AND_ABOVE',
|
|
432
|
+
|
|
433
|
+
# Content with NEGLIGIBLE, LOW, and MEDIUM will be allowed.
|
|
434
|
+
'BLOCK_ONLY_HIGH',
|
|
435
|
+
|
|
436
|
+
# All content will be allowed.
|
|
437
|
+
'BLOCK_NONE',
|
|
438
|
+
|
|
439
|
+
# Turn off the safety filter.
|
|
440
|
+
'OFF',
|
|
441
|
+
]
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
445
|
+
@_set_class_marshal_options
|
|
446
|
+
class SafetySetting(lang.Final):
|
|
447
|
+
category: HarmCategory
|
|
448
|
+
threshold: HarmBlockThreshold
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
452
|
+
@_set_class_marshal_options
|
|
453
|
+
class ThinkingConfig(lang.Final):
|
|
454
|
+
include_thoughts: bool | None = None
|
|
455
|
+
thinking_budget: int | None = None
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
Modality: ta.TypeAlias = ta.Literal[
|
|
459
|
+
# Default value.
|
|
460
|
+
'MODALITY_UNSPECIFIED',
|
|
461
|
+
|
|
462
|
+
# Indicates the model should return text.
|
|
463
|
+
'TEXT',
|
|
464
|
+
|
|
465
|
+
# Indicates the model should return images.
|
|
466
|
+
'IMAGE',
|
|
467
|
+
|
|
468
|
+
# Indicates the model should return audio.
|
|
469
|
+
'AUDIO',
|
|
470
|
+
]
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
MediaResolution: ta.TypeAlias = ta.Literal[
|
|
474
|
+
# Media resolution has not been set.
|
|
475
|
+
'MEDIA_RESOLUTION_UNSPECIFIED',
|
|
476
|
+
|
|
477
|
+
# Media resolution set to low (64 tokens).
|
|
478
|
+
'MEDIA_RESOLUTION_LOW',
|
|
479
|
+
|
|
480
|
+
# Media resolution set to medium (256 tokens).
|
|
481
|
+
'MEDIA_RESOLUTION_MEDIUM',
|
|
482
|
+
|
|
483
|
+
# Media resolution set to high (zoomed reframing with 256 tokens).
|
|
484
|
+
'MEDIA_RESOLUTION_HIGH',
|
|
485
|
+
]
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
489
|
+
@_set_class_marshal_options
|
|
490
|
+
class GenerationConfig(lang.Final):
|
|
491
|
+
stop_sequences: ta.Sequence[str] | None = None
|
|
492
|
+
|
|
493
|
+
response_mime_type: str | None = None
|
|
494
|
+
response_schema: Schema | None = None
|
|
495
|
+
response_json_schema: Value | None = None
|
|
496
|
+
response_modalities: ta.Sequence[Modality] | None = None
|
|
497
|
+
|
|
498
|
+
candidate_count: int | None = None
|
|
499
|
+
max_output_tokens: int | None = None
|
|
500
|
+
temperature: float | None = None
|
|
501
|
+
top_p: float | None = None
|
|
502
|
+
top_k: int | None = None
|
|
503
|
+
seed: int | None = None
|
|
504
|
+
presence_penalty: float | None = None
|
|
505
|
+
frequency_penalty: float | None = None
|
|
506
|
+
|
|
507
|
+
response_logprobs: bool | None = None
|
|
508
|
+
logprobs: int | None = None
|
|
509
|
+
|
|
510
|
+
enable_enhanced_civic_answers: bool | None = None
|
|
511
|
+
|
|
512
|
+
# speech_config: SpeechConfig | None = None
|
|
513
|
+
|
|
514
|
+
thinking_config: ThinkingConfig | None = None
|
|
515
|
+
|
|
516
|
+
media_resolution: MediaResolution | None = None
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
520
|
+
@_set_class_marshal_options
|
|
521
|
+
class GenerateContentRequest(lang.Final):
|
|
522
|
+
"""https://ai.google.dev/api/generate-content#request-body"""
|
|
523
|
+
|
|
524
|
+
contents: ta.Sequence[Content] | None = None
|
|
525
|
+
tools: ta.Sequence[Tool] | None = None
|
|
526
|
+
tool_config: ToolConfig | None = None
|
|
527
|
+
safety_settings: ta.Sequence[SafetySetting] | None = None
|
|
528
|
+
system_instruction: Content | None = None
|
|
529
|
+
generation_config: GenerationConfig | None = None
|
|
530
|
+
cached_content: str | None = None
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
FinishReason: ta.TypeAlias = ta.Literal[
|
|
534
|
+
# Default value. This value is unused.
|
|
535
|
+
'FINISH_REASON_UNSPECIFIED',
|
|
536
|
+
|
|
537
|
+
# Natural stop point of the model or provided stop sequence.
|
|
538
|
+
'STOP',
|
|
539
|
+
|
|
540
|
+
# The maximum number of tokens as specified in the request was reached.
|
|
541
|
+
'MAX_TOKENS',
|
|
542
|
+
|
|
543
|
+
# The response candidate content was flagged for safety reasons.
|
|
544
|
+
'SAFETY',
|
|
545
|
+
|
|
546
|
+
# The response candidate content was flagged for recitation reasons.
|
|
547
|
+
'RECITATION',
|
|
548
|
+
|
|
549
|
+
# The response candidate content was flagged for using an unsupported language.
|
|
550
|
+
'LANGUAGE',
|
|
551
|
+
|
|
552
|
+
# Unknown reason.
|
|
553
|
+
'OTHER',
|
|
554
|
+
|
|
555
|
+
# Token generation stopped because the content contains forbidden terms.
|
|
556
|
+
'BLOCKLIST',
|
|
557
|
+
|
|
558
|
+
# Token generation stopped for potentially containing prohibited content.
|
|
559
|
+
'PROHIBITED_CONTENT',
|
|
560
|
+
|
|
561
|
+
# Token generation stopped because the content potentially contains Sensitive Personally Identifiable Information
|
|
562
|
+
# (SPII).
|
|
563
|
+
'SPII',
|
|
564
|
+
|
|
565
|
+
# The function call generated by the model is invalid.
|
|
566
|
+
'MALFORMED_FUNCTION_CALL',
|
|
567
|
+
|
|
568
|
+
# Token generation stopped because generated images contain safety violations.
|
|
569
|
+
'IMAGE_SAFETY',
|
|
570
|
+
|
|
571
|
+
# Model generated a tool call but no tools were enabled in the request.
|
|
572
|
+
'UNEXPECTED_TOOL_CALL',
|
|
573
|
+
|
|
574
|
+
# Model called too many tools consecutively, thus the system exited execution.
|
|
575
|
+
'TOO_MANY_TOOL_CALLS',
|
|
576
|
+
]
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
580
|
+
@_set_class_marshal_options
|
|
581
|
+
class GenerateContentResponse(lang.Final):
|
|
582
|
+
"""https://ai.google.dev/api/generate-content#v1beta.GenerateContentResponse"""
|
|
583
|
+
|
|
584
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
585
|
+
@_set_class_marshal_options
|
|
586
|
+
class Candidate(lang.Final):
|
|
587
|
+
content: Content | None = None
|
|
588
|
+
finish_reason: FinishReason | None = None
|
|
589
|
+
finish_message: str | None = None
|
|
590
|
+
# safety_ratings: ta.Sequence[SafetyRating] | None = None
|
|
591
|
+
# citation_metadata: CitationMetadata | None = None
|
|
592
|
+
token_count: int | None = None
|
|
593
|
+
# grounding_attributions: ta.Sequence[GroundingAttribution] | None = None
|
|
594
|
+
# grounding_metadata: GroundingMetadata | None = None
|
|
595
|
+
avg_logprobs: float | None = None
|
|
596
|
+
# logprobs_result: LogprobsResult | None = None
|
|
597
|
+
# url_context_metadata: UrlContextMetadata | None = None
|
|
598
|
+
index: int | None = None
|
|
599
|
+
|
|
600
|
+
candidates: ta.Sequence[Candidate] | None = None
|
|
601
|
+
|
|
602
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
603
|
+
@_set_class_marshal_options
|
|
604
|
+
class UsageMetadata(lang.Final):
|
|
605
|
+
prompt_token_count: int | None = None
|
|
606
|
+
cached_content_token_count: int | None = None
|
|
607
|
+
candidates_token_count: int | None = None
|
|
608
|
+
total_token_count: int | None = None
|
|
609
|
+
thoughts_token_count: int | None = None
|
|
610
|
+
|
|
611
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
612
|
+
@_set_class_marshal_options
|
|
613
|
+
class ModalityTokenCount:
|
|
614
|
+
modality: str | None = None
|
|
615
|
+
token_count: int | None = None
|
|
616
|
+
|
|
617
|
+
prompt_tokens_details: ta.Sequence[ModalityTokenCount] | None = None
|
|
618
|
+
cache_tokens_details: ta.Sequence[ModalityTokenCount] | None = None
|
|
619
|
+
candidates_tokens_details: ta.Sequence[ModalityTokenCount] | None = None
|
|
620
|
+
tool_use_prompt_tokens_details: ta.Sequence[ModalityTokenCount] | None = None
|
|
621
|
+
|
|
622
|
+
usage_metadata: UsageMetadata | None = None
|
|
623
|
+
|
|
624
|
+
model_version: str | None = None
|
|
625
|
+
|
|
626
|
+
response_id: str | None = None
|