chatlas 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of chatlas might be problematic. Click here for more details.
- chatlas/__init__.py +38 -0
- chatlas/_anthropic.py +643 -0
- chatlas/_chat.py +1279 -0
- chatlas/_content.py +242 -0
- chatlas/_content_image.py +272 -0
- chatlas/_display.py +139 -0
- chatlas/_github.py +147 -0
- chatlas/_google.py +456 -0
- chatlas/_groq.py +143 -0
- chatlas/_interpolate.py +133 -0
- chatlas/_logging.py +61 -0
- chatlas/_merge.py +103 -0
- chatlas/_ollama.py +125 -0
- chatlas/_openai.py +654 -0
- chatlas/_perplexity.py +148 -0
- chatlas/_provider.py +143 -0
- chatlas/_tokens.py +87 -0
- chatlas/_tokens_old.py +148 -0
- chatlas/_tools.py +134 -0
- chatlas/_turn.py +147 -0
- chatlas/_typing_extensions.py +26 -0
- chatlas/_utils.py +106 -0
- chatlas/types/__init__.py +32 -0
- chatlas/types/anthropic/__init__.py +14 -0
- chatlas/types/anthropic/_client.py +29 -0
- chatlas/types/anthropic/_client_bedrock.py +23 -0
- chatlas/types/anthropic/_submit.py +57 -0
- chatlas/types/google/__init__.py +12 -0
- chatlas/types/google/_client.py +101 -0
- chatlas/types/google/_submit.py +113 -0
- chatlas/types/openai/__init__.py +14 -0
- chatlas/types/openai/_client.py +22 -0
- chatlas/types/openai/_client_azure.py +25 -0
- chatlas/types/openai/_submit.py +135 -0
- chatlas-0.2.0.dist-info/METADATA +319 -0
- chatlas-0.2.0.dist-info/RECORD +37 -0
- chatlas-0.2.0.dist-info/WHEEL +4 -0
chatlas/_turn.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Generic, Literal, Optional, Sequence, TypeVar
|
|
4
|
+
|
|
5
|
+
from ._content import Content, ContentText
|
|
6
|
+
|
|
7
|
+
__all__ = ("Turn",)
|
|
8
|
+
|
|
9
|
+
CompletionT = TypeVar("CompletionT")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Turn(Generic[CompletionT]):
|
|
13
|
+
"""
|
|
14
|
+
A user or assistant turn
|
|
15
|
+
|
|
16
|
+
Every conversation with a chatbot consists of pairs of user and assistant
|
|
17
|
+
turns, corresponding to an HTTP request and response. These turns are
|
|
18
|
+
represented by the `Turn` object, which contains a list of
|
|
19
|
+
[](`~chatlas.types.Content`)s representing the individual messages within the
|
|
20
|
+
turn. These might be text, images, tool requests (assistant only), or tool
|
|
21
|
+
responses (user only).
|
|
22
|
+
|
|
23
|
+
Note that a call to `.chat()` and related functions may result in multiple
|
|
24
|
+
user-assistant turn cycles. For example, if you have registered tools, chatlas
|
|
25
|
+
will automatically handle the tool calling loop, which may result in any
|
|
26
|
+
number of additional cycles.
|
|
27
|
+
|
|
28
|
+
Examples
|
|
29
|
+
--------
|
|
30
|
+
|
|
31
|
+
```python
|
|
32
|
+
from chatlas import Turn, ChatOpenAI, ChatAnthropic
|
|
33
|
+
|
|
34
|
+
chat = ChatOpenAI()
|
|
35
|
+
str(chat.chat("What is the capital of France?"))
|
|
36
|
+
turns = chat.get_turns()
|
|
37
|
+
assert len(turns) == 2
|
|
38
|
+
assert isinstance(turns[0], Turn)
|
|
39
|
+
assert turns[0].role == "user"
|
|
40
|
+
assert turns[1].role == "assistant"
|
|
41
|
+
|
|
42
|
+
# Load context into a new chat instance
|
|
43
|
+
chat2 = ChatAnthropic(turns=turns)
|
|
44
|
+
turns2 = chat2.get_turns()
|
|
45
|
+
assert turns == turns2
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
Parameters
|
|
49
|
+
----------
|
|
50
|
+
role
|
|
51
|
+
Either "user", "assistant", or "system".
|
|
52
|
+
contents
|
|
53
|
+
A list of [](`~chatlas.types.Content`) objects.
|
|
54
|
+
tokens
|
|
55
|
+
A numeric vector of length 2 representing the number of input and output
|
|
56
|
+
tokens (respectively) used in this turn. Currently only recorded for
|
|
57
|
+
assistant turns.
|
|
58
|
+
finish_reason
|
|
59
|
+
A string indicating the reason why the conversation ended. This is only
|
|
60
|
+
relevant for assistant turns.
|
|
61
|
+
completion
|
|
62
|
+
The completion object returned by the provider. This is useful if there's
|
|
63
|
+
information returned by the provider that chatlas doesn't otherwise expose.
|
|
64
|
+
This is only relevant for assistant turns.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
def __init__(
|
|
68
|
+
self,
|
|
69
|
+
role: Literal["user", "assistant", "system"],
|
|
70
|
+
contents: str | Sequence[Content | str],
|
|
71
|
+
*,
|
|
72
|
+
tokens: Optional[tuple[int, int]] = None,
|
|
73
|
+
finish_reason: Optional[str] = None,
|
|
74
|
+
completion: Optional[CompletionT] = None,
|
|
75
|
+
):
|
|
76
|
+
self.role = role
|
|
77
|
+
|
|
78
|
+
if isinstance(contents, str):
|
|
79
|
+
contents = [ContentText(contents)]
|
|
80
|
+
|
|
81
|
+
contents2: list[Content] = []
|
|
82
|
+
for x in contents:
|
|
83
|
+
if isinstance(x, Content):
|
|
84
|
+
contents2.append(x)
|
|
85
|
+
elif isinstance(x, str):
|
|
86
|
+
contents2.append(ContentText(x))
|
|
87
|
+
else:
|
|
88
|
+
raise ValueError("All contents must be Content objects or str.")
|
|
89
|
+
|
|
90
|
+
self.contents = contents2
|
|
91
|
+
self.text = "".join(x.text for x in self.contents if isinstance(x, ContentText))
|
|
92
|
+
self.tokens = tokens
|
|
93
|
+
self.finish_reason = finish_reason
|
|
94
|
+
self.completion = completion
|
|
95
|
+
|
|
96
|
+
def __str__(self) -> str:
|
|
97
|
+
return self.text
|
|
98
|
+
|
|
99
|
+
def __repr__(self, indent: int = 0) -> str:
|
|
100
|
+
res = " " * indent + f"<Turn role='{self.role}'"
|
|
101
|
+
if self.tokens:
|
|
102
|
+
res += f" tokens={self.tokens}"
|
|
103
|
+
if self.finish_reason:
|
|
104
|
+
res += f" finish_reason='{self.finish_reason}'"
|
|
105
|
+
if self.completion:
|
|
106
|
+
res += f" completion={self.completion}"
|
|
107
|
+
res += ">"
|
|
108
|
+
for content in self.contents:
|
|
109
|
+
res += "\n" + content.__repr__(indent=indent + 2)
|
|
110
|
+
return res + "\n"
|
|
111
|
+
|
|
112
|
+
def __eq__(self, other: Any) -> bool:
|
|
113
|
+
if not isinstance(other, Turn):
|
|
114
|
+
return False
|
|
115
|
+
res = (
|
|
116
|
+
self.role == other.role
|
|
117
|
+
and self.contents == other.contents
|
|
118
|
+
and self.tokens == other.tokens
|
|
119
|
+
and self.finish_reason == other.finish_reason
|
|
120
|
+
and self.completion == other.completion
|
|
121
|
+
)
|
|
122
|
+
return res
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def user_turn(*args: Content | str) -> Turn:
|
|
126
|
+
if len(args) == 0:
|
|
127
|
+
raise ValueError("Must supply at least one input.")
|
|
128
|
+
|
|
129
|
+
return Turn("user", args)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def normalize_turns(turns: list[Turn], system_prompt: str | None = None) -> list[Turn]:
|
|
133
|
+
if system_prompt is not None:
|
|
134
|
+
system_turn = Turn("system", system_prompt)
|
|
135
|
+
|
|
136
|
+
if not turns:
|
|
137
|
+
turns = [system_turn]
|
|
138
|
+
elif turns[0].role != "system":
|
|
139
|
+
turns = [system_turn] + turns
|
|
140
|
+
elif turns[0] == system_turn:
|
|
141
|
+
pass # Duplicate system prompt; don't need to do anything
|
|
142
|
+
else:
|
|
143
|
+
raise ValueError(
|
|
144
|
+
"system_prompt and turns[0] can't contain conflicting system prompts."
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
return turns
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
# # Within file flags to ignore unused imports
|
|
4
|
+
# flake8: noqa: F401
|
|
5
|
+
# pyright: reportUnusedImport=false
|
|
6
|
+
import sys
|
|
7
|
+
|
|
8
|
+
if sys.version_info >= (3, 10):
|
|
9
|
+
from typing import ParamSpec, TypeGuard, is_typeddict
|
|
10
|
+
else:
|
|
11
|
+
from typing_extensions import ParamSpec, TypeGuard, is_typeddict
|
|
12
|
+
|
|
13
|
+
# Even though TypedDict is available in Python 3.8, because it's used with NotRequired,
|
|
14
|
+
# they should both come from the same typing module.
|
|
15
|
+
# https://peps.python.org/pep-0655/#usage-in-python-3-11
|
|
16
|
+
if sys.version_info >= (3, 11):
|
|
17
|
+
from typing import Required, TypedDict
|
|
18
|
+
else:
|
|
19
|
+
from typing_extensions import Required, TypedDict
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# The only purpose of the following line is so that pyright will put all of the
|
|
23
|
+
# conditional imports into the .pyi file when generating type stubs. Without this line,
|
|
24
|
+
# pyright will not include the above imports in the generated .pyi file, and it will
|
|
25
|
+
# result in a lot of red squiggles in user code.
|
|
26
|
+
_: "ParamSpec | TypeGuard | is_typeddict | Required | TypedDict" # type: ignore
|
chatlas/_utils.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import functools
|
|
4
|
+
import inspect
|
|
5
|
+
import os
|
|
6
|
+
import re
|
|
7
|
+
from typing import Awaitable, Callable, TypeVar, cast
|
|
8
|
+
|
|
9
|
+
from ._typing_extensions import ParamSpec, TypeGuard
|
|
10
|
+
|
|
11
|
+
# --------------------------------------------------------------------
|
|
12
|
+
# wrap_async() and is_async_callable() was copied from shiny/_utils.py
|
|
13
|
+
# --------------------------------------------------------------------
|
|
14
|
+
|
|
15
|
+
R = TypeVar("R") # Return type
|
|
16
|
+
P = ParamSpec("P")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def wrap_async(
|
|
20
|
+
fn: Callable[P, R] | Callable[P, Awaitable[R]],
|
|
21
|
+
) -> Callable[P, Awaitable[R]]:
|
|
22
|
+
"""
|
|
23
|
+
Given a synchronous function that returns R, return an async function that wraps the
|
|
24
|
+
original function. If the input function is already async, then return it unchanged.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
if is_async_callable(fn):
|
|
28
|
+
return fn
|
|
29
|
+
|
|
30
|
+
fn = cast(Callable[P, R], fn)
|
|
31
|
+
|
|
32
|
+
@functools.wraps(fn)
|
|
33
|
+
async def fn_async(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
34
|
+
return fn(*args, **kwargs)
|
|
35
|
+
|
|
36
|
+
return fn_async
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def is_async_callable(
|
|
40
|
+
obj: Callable[P, R] | Callable[P, Awaitable[R]],
|
|
41
|
+
) -> TypeGuard[Callable[P, Awaitable[R]]]:
|
|
42
|
+
"""
|
|
43
|
+
Determine if an object is an async function.
|
|
44
|
+
|
|
45
|
+
This is a more general version of `inspect.iscoroutinefunction()`, which only works
|
|
46
|
+
on functions. This function works on any object that has a `__call__` method, such
|
|
47
|
+
as a class instance.
|
|
48
|
+
|
|
49
|
+
Returns
|
|
50
|
+
-------
|
|
51
|
+
:
|
|
52
|
+
Returns True if `obj` is an `async def` function, or if it's an object with a
|
|
53
|
+
`__call__` method which is an `async def` function.
|
|
54
|
+
"""
|
|
55
|
+
if inspect.iscoroutinefunction(obj):
|
|
56
|
+
return True
|
|
57
|
+
if hasattr(obj, "__call__"): # noqa: B004
|
|
58
|
+
if inspect.iscoroutinefunction(obj.__call__): # type: ignore
|
|
59
|
+
return True
|
|
60
|
+
|
|
61
|
+
return False
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# https://docs.pytest.org/en/latest/example/simple.html#pytest-current-test-environment-variable
|
|
65
|
+
def is_testing():
|
|
66
|
+
return os.environ.get("PYTEST_CURRENT_TEST", None) is not None
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class MISSING_TYPE:
|
|
70
|
+
"""
|
|
71
|
+
A singleton representing a missing value.
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
pass
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
MISSING = MISSING_TYPE()
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
# --------------------------------------------------------------------
|
|
81
|
+
# html_escape was copied from htmltools/_utils.py
|
|
82
|
+
# --------------------------------------------------------------------
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
HTML_ESCAPE_TABLE = {
|
|
86
|
+
"&": "&",
|
|
87
|
+
">": ">",
|
|
88
|
+
"<": "<",
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
HTML_ATTRS_ESCAPE_TABLE = {
|
|
92
|
+
**HTML_ESCAPE_TABLE,
|
|
93
|
+
'"': """,
|
|
94
|
+
"'": "'",
|
|
95
|
+
"\r": " ",
|
|
96
|
+
"\n": " ",
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def html_escape(text: str, attr: bool = True) -> str:
|
|
101
|
+
table = HTML_ATTRS_ESCAPE_TABLE if attr else HTML_ESCAPE_TABLE
|
|
102
|
+
if not re.search("|".join(table), text):
|
|
103
|
+
return text
|
|
104
|
+
for key, value in table.items():
|
|
105
|
+
text = text.replace(key, value)
|
|
106
|
+
return text
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from .._chat import ChatResponse, ChatResponseAsync, SubmitInputArgsT
|
|
2
|
+
from .._content import (
|
|
3
|
+
Content,
|
|
4
|
+
ContentImage,
|
|
5
|
+
ContentImageInline,
|
|
6
|
+
ContentImageRemote,
|
|
7
|
+
ContentJson,
|
|
8
|
+
ContentText,
|
|
9
|
+
ContentToolRequest,
|
|
10
|
+
ContentToolResult,
|
|
11
|
+
ImageContentTypes,
|
|
12
|
+
)
|
|
13
|
+
from .._tokens import TokenUsage
|
|
14
|
+
from .._utils import MISSING, MISSING_TYPE
|
|
15
|
+
|
|
16
|
+
__all__ = (
|
|
17
|
+
"Content",
|
|
18
|
+
"ContentImage",
|
|
19
|
+
"ContentImageInline",
|
|
20
|
+
"ContentImageRemote",
|
|
21
|
+
"ContentJson",
|
|
22
|
+
"ContentText",
|
|
23
|
+
"ContentToolRequest",
|
|
24
|
+
"ContentToolResult",
|
|
25
|
+
"ChatResponse",
|
|
26
|
+
"ChatResponseAsync",
|
|
27
|
+
"ImageContentTypes",
|
|
28
|
+
"SubmitInputArgsT",
|
|
29
|
+
"TokenUsage",
|
|
30
|
+
"MISSING_TYPE",
|
|
31
|
+
"MISSING",
|
|
32
|
+
)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from ._client import ChatClientArgs
|
|
7
|
+
from ._client_bedrock import ChatBedrockClientArgs
|
|
8
|
+
from ._submit import SubmitInputArgs
|
|
9
|
+
|
|
10
|
+
__all__ = (
|
|
11
|
+
"ChatClientArgs",
|
|
12
|
+
"ChatBedrockClientArgs",
|
|
13
|
+
"SubmitInputArgs",
|
|
14
|
+
)
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from typing import Mapping, Optional, TypedDict, Union
|
|
7
|
+
|
|
8
|
+
import anthropic
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ChatClientArgs(TypedDict, total=False):
|
|
13
|
+
api_key: str | None
|
|
14
|
+
auth_token: str | None
|
|
15
|
+
base_url: str | httpx.URL | None
|
|
16
|
+
timeout: Union[float, anthropic.Timeout, None, anthropic.NotGiven]
|
|
17
|
+
max_retries: int
|
|
18
|
+
default_headers: Optional[Mapping[str, str]]
|
|
19
|
+
default_query: Optional[Mapping[str, object]]
|
|
20
|
+
http_client: httpx.AsyncClient
|
|
21
|
+
transport: httpx.AsyncBaseTransport
|
|
22
|
+
proxies: Union[
|
|
23
|
+
str,
|
|
24
|
+
httpx.Proxy,
|
|
25
|
+
dict[str | httpx.URL, Union[None, str, httpx.URL, httpx.Proxy]],
|
|
26
|
+
None,
|
|
27
|
+
]
|
|
28
|
+
connection_pool_limits: httpx.Limits
|
|
29
|
+
_strict_response_validation: bool
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
|
|
5
|
+
from typing import Mapping, Optional, TypedDict
|
|
6
|
+
|
|
7
|
+
import anthropic
|
|
8
|
+
import httpx
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ChatBedrockClientArgs(TypedDict, total=False):
|
|
12
|
+
aws_secret_key: str | None
|
|
13
|
+
aws_access_key: str | None
|
|
14
|
+
aws_region: str | None
|
|
15
|
+
aws_profile: str | None
|
|
16
|
+
aws_session_token: str | None
|
|
17
|
+
base_url: str | httpx.URL | None
|
|
18
|
+
timeout: float | anthropic.Timeout | None | anthropic.NotGiven
|
|
19
|
+
max_retries: int
|
|
20
|
+
default_headers: Optional[Mapping[str, str]]
|
|
21
|
+
default_query: Optional[Mapping[str, object]]
|
|
22
|
+
http_client: httpx.AsyncClient
|
|
23
|
+
_strict_response_validation: bool
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from typing import Iterable, Literal, Mapping, Optional, TypedDict, Union
|
|
7
|
+
|
|
8
|
+
import anthropic
|
|
9
|
+
import anthropic._types
|
|
10
|
+
import anthropic.types.message_param
|
|
11
|
+
import anthropic.types.text_block_param
|
|
12
|
+
import anthropic.types.tool_choice_any_param
|
|
13
|
+
import anthropic.types.tool_choice_auto_param
|
|
14
|
+
import anthropic.types.tool_choice_tool_param
|
|
15
|
+
import anthropic.types.tool_param
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class SubmitInputArgs(TypedDict, total=False):
|
|
19
|
+
max_tokens: int
|
|
20
|
+
messages: Iterable[anthropic.types.message_param.MessageParam]
|
|
21
|
+
model: Union[
|
|
22
|
+
str,
|
|
23
|
+
Literal[
|
|
24
|
+
"claude-3-5-haiku-latest",
|
|
25
|
+
"claude-3-5-haiku-20241022",
|
|
26
|
+
"claude-3-5-sonnet-latest",
|
|
27
|
+
"claude-3-5-sonnet-20241022",
|
|
28
|
+
"claude-3-5-sonnet-20240620",
|
|
29
|
+
"claude-3-opus-latest",
|
|
30
|
+
"claude-3-opus-20240229",
|
|
31
|
+
"claude-3-sonnet-20240229",
|
|
32
|
+
"claude-3-haiku-20240307",
|
|
33
|
+
"claude-2.1",
|
|
34
|
+
"claude-2.0",
|
|
35
|
+
],
|
|
36
|
+
]
|
|
37
|
+
stop_sequences: Union[list[str], anthropic.NotGiven]
|
|
38
|
+
stream: Union[Literal[False], Literal[True], anthropic.NotGiven]
|
|
39
|
+
system: Union[
|
|
40
|
+
str,
|
|
41
|
+
Iterable[anthropic.types.text_block_param.TextBlockParam],
|
|
42
|
+
anthropic.NotGiven,
|
|
43
|
+
]
|
|
44
|
+
temperature: float | anthropic.NotGiven
|
|
45
|
+
tool_choice: Union[
|
|
46
|
+
anthropic.types.tool_choice_auto_param.ToolChoiceAutoParam,
|
|
47
|
+
anthropic.types.tool_choice_any_param.ToolChoiceAnyParam,
|
|
48
|
+
anthropic.types.tool_choice_tool_param.ToolChoiceToolParam,
|
|
49
|
+
anthropic.NotGiven,
|
|
50
|
+
]
|
|
51
|
+
tools: Union[Iterable[anthropic.types.tool_param.ToolParam], anthropic.NotGiven]
|
|
52
|
+
top_k: int | anthropic.NotGiven
|
|
53
|
+
top_p: float | anthropic.NotGiven
|
|
54
|
+
extra_headers: Optional[Mapping[str, Union[str, anthropic._types.Omit]]]
|
|
55
|
+
extra_query: Optional[Mapping[str, object]]
|
|
56
|
+
extra_body: object | None
|
|
57
|
+
timeout: float | anthropic.Timeout | None | anthropic.NotGiven
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from ._client import ChatClientArgs
|
|
7
|
+
from ._submit import SubmitInputArgs
|
|
8
|
+
|
|
9
|
+
__all__ = (
|
|
10
|
+
"ChatClientArgs",
|
|
11
|
+
"SubmitInputArgs",
|
|
12
|
+
)
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from typing import Any, Callable, Iterable, TypedDict, Union
|
|
7
|
+
|
|
8
|
+
import google.ai.generativelanguage_v1beta.types.content
|
|
9
|
+
import google.ai.generativelanguage_v1beta.types.file
|
|
10
|
+
import google.ai.generativelanguage_v1beta.types.generative_service
|
|
11
|
+
import google.generativeai.types.content_types
|
|
12
|
+
import google.generativeai.types.file_types
|
|
13
|
+
import google.generativeai.types.generation_types
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ChatClientArgs(TypedDict, total=False):
|
|
17
|
+
model_name: str
|
|
18
|
+
generation_config: Union[
|
|
19
|
+
google.ai.generativelanguage_v1beta.types.generative_service.GenerationConfig,
|
|
20
|
+
google.generativeai.types.generation_types.GenerationConfigDict,
|
|
21
|
+
google.generativeai.types.generation_types.GenerationConfig,
|
|
22
|
+
None,
|
|
23
|
+
]
|
|
24
|
+
tools: Union[
|
|
25
|
+
google.generativeai.types.content_types.FunctionLibrary,
|
|
26
|
+
Iterable[
|
|
27
|
+
Union[
|
|
28
|
+
str,
|
|
29
|
+
google.generativeai.types.content_types.Tool,
|
|
30
|
+
google.ai.generativelanguage_v1beta.types.content.Tool,
|
|
31
|
+
google.generativeai.types.content_types.ToolDict,
|
|
32
|
+
Iterable[
|
|
33
|
+
Union[
|
|
34
|
+
google.generativeai.types.content_types.FunctionDeclaration,
|
|
35
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionDeclaration,
|
|
36
|
+
dict[str, Any],
|
|
37
|
+
Callable[..., Any],
|
|
38
|
+
]
|
|
39
|
+
],
|
|
40
|
+
google.generativeai.types.content_types.FunctionDeclaration,
|
|
41
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionDeclaration,
|
|
42
|
+
dict[str, Any],
|
|
43
|
+
Callable[..., Any],
|
|
44
|
+
]
|
|
45
|
+
],
|
|
46
|
+
str,
|
|
47
|
+
google.generativeai.types.content_types.Tool,
|
|
48
|
+
google.ai.generativelanguage_v1beta.types.content.Tool,
|
|
49
|
+
google.generativeai.types.content_types.ToolDict,
|
|
50
|
+
Iterable[
|
|
51
|
+
Union[
|
|
52
|
+
google.generativeai.types.content_types.FunctionDeclaration,
|
|
53
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionDeclaration,
|
|
54
|
+
dict[str, Any],
|
|
55
|
+
Callable[..., Any],
|
|
56
|
+
]
|
|
57
|
+
],
|
|
58
|
+
google.generativeai.types.content_types.FunctionDeclaration,
|
|
59
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionDeclaration,
|
|
60
|
+
dict[str, Any],
|
|
61
|
+
Callable[..., Any],
|
|
62
|
+
None,
|
|
63
|
+
]
|
|
64
|
+
tool_config: Union[
|
|
65
|
+
google.generativeai.types.content_types.ToolConfigDict,
|
|
66
|
+
google.ai.generativelanguage_v1beta.types.content.ToolConfig,
|
|
67
|
+
None,
|
|
68
|
+
]
|
|
69
|
+
system_instruction: Union[
|
|
70
|
+
google.ai.generativelanguage_v1beta.types.content.Content,
|
|
71
|
+
google.generativeai.types.content_types.ContentDict,
|
|
72
|
+
Iterable[
|
|
73
|
+
Union[
|
|
74
|
+
google.ai.generativelanguage_v1beta.types.content.Part,
|
|
75
|
+
google.generativeai.types.content_types.PartDict,
|
|
76
|
+
google.ai.generativelanguage_v1beta.types.content.Blob,
|
|
77
|
+
google.generativeai.types.content_types.BlobDict,
|
|
78
|
+
Any,
|
|
79
|
+
str,
|
|
80
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionCall,
|
|
81
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionResponse,
|
|
82
|
+
google.generativeai.types.file_types.FileDataDict,
|
|
83
|
+
google.ai.generativelanguage_v1beta.types.content.FileData,
|
|
84
|
+
google.ai.generativelanguage_v1beta.types.file.File,
|
|
85
|
+
google.generativeai.types.file_types.File,
|
|
86
|
+
]
|
|
87
|
+
],
|
|
88
|
+
google.ai.generativelanguage_v1beta.types.content.Part,
|
|
89
|
+
google.generativeai.types.content_types.PartDict,
|
|
90
|
+
google.ai.generativelanguage_v1beta.types.content.Blob,
|
|
91
|
+
google.generativeai.types.content_types.BlobDict,
|
|
92
|
+
Any,
|
|
93
|
+
str,
|
|
94
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionCall,
|
|
95
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionResponse,
|
|
96
|
+
google.generativeai.types.file_types.FileDataDict,
|
|
97
|
+
google.ai.generativelanguage_v1beta.types.content.FileData,
|
|
98
|
+
google.ai.generativelanguage_v1beta.types.file.File,
|
|
99
|
+
google.generativeai.types.file_types.File,
|
|
100
|
+
None,
|
|
101
|
+
]
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from typing import Any, Callable, Iterable, TypedDict, Union
|
|
7
|
+
|
|
8
|
+
import google.ai.generativelanguage_v1beta.types.content
|
|
9
|
+
import google.ai.generativelanguage_v1beta.types.file
|
|
10
|
+
import google.ai.generativelanguage_v1beta.types.generative_service
|
|
11
|
+
import google.generativeai.types.content_types
|
|
12
|
+
import google.generativeai.types.file_types
|
|
13
|
+
import google.generativeai.types.generation_types
|
|
14
|
+
import google.generativeai.types.helper_types
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SubmitInputArgs(TypedDict, total=False):
|
|
18
|
+
contents: Union[
|
|
19
|
+
google.ai.generativelanguage_v1beta.types.content.Content,
|
|
20
|
+
google.generativeai.types.content_types.ContentDict,
|
|
21
|
+
Iterable[
|
|
22
|
+
Union[
|
|
23
|
+
google.ai.generativelanguage_v1beta.types.content.Part,
|
|
24
|
+
google.generativeai.types.content_types.PartDict,
|
|
25
|
+
google.ai.generativelanguage_v1beta.types.content.Blob,
|
|
26
|
+
google.generativeai.types.content_types.BlobDict,
|
|
27
|
+
Any,
|
|
28
|
+
str,
|
|
29
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionCall,
|
|
30
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionResponse,
|
|
31
|
+
google.generativeai.types.file_types.FileDataDict,
|
|
32
|
+
google.ai.generativelanguage_v1beta.types.content.FileData,
|
|
33
|
+
google.ai.generativelanguage_v1beta.types.file.File,
|
|
34
|
+
google.generativeai.types.file_types.File,
|
|
35
|
+
]
|
|
36
|
+
],
|
|
37
|
+
google.ai.generativelanguage_v1beta.types.content.Part,
|
|
38
|
+
google.generativeai.types.content_types.PartDict,
|
|
39
|
+
google.ai.generativelanguage_v1beta.types.content.Blob,
|
|
40
|
+
google.generativeai.types.content_types.BlobDict,
|
|
41
|
+
Any,
|
|
42
|
+
str,
|
|
43
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionCall,
|
|
44
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionResponse,
|
|
45
|
+
google.generativeai.types.file_types.FileDataDict,
|
|
46
|
+
google.ai.generativelanguage_v1beta.types.content.FileData,
|
|
47
|
+
google.ai.generativelanguage_v1beta.types.file.File,
|
|
48
|
+
google.generativeai.types.file_types.File,
|
|
49
|
+
Iterable[
|
|
50
|
+
Union[
|
|
51
|
+
google.ai.generativelanguage_v1beta.types.content.Content,
|
|
52
|
+
google.generativeai.types.content_types.ContentDict,
|
|
53
|
+
]
|
|
54
|
+
],
|
|
55
|
+
None,
|
|
56
|
+
]
|
|
57
|
+
generation_config: Union[
|
|
58
|
+
google.ai.generativelanguage_v1beta.types.generative_service.GenerationConfig,
|
|
59
|
+
google.generativeai.types.generation_types.GenerationConfigDict,
|
|
60
|
+
google.generativeai.types.generation_types.GenerationConfig,
|
|
61
|
+
None,
|
|
62
|
+
]
|
|
63
|
+
stream: bool
|
|
64
|
+
tools: Union[
|
|
65
|
+
google.generativeai.types.content_types.FunctionLibrary,
|
|
66
|
+
Iterable[
|
|
67
|
+
Union[
|
|
68
|
+
str,
|
|
69
|
+
google.generativeai.types.content_types.Tool,
|
|
70
|
+
google.ai.generativelanguage_v1beta.types.content.Tool,
|
|
71
|
+
google.generativeai.types.content_types.ToolDict,
|
|
72
|
+
Iterable[
|
|
73
|
+
Union[
|
|
74
|
+
google.generativeai.types.content_types.FunctionDeclaration,
|
|
75
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionDeclaration,
|
|
76
|
+
dict[str, Any],
|
|
77
|
+
Callable[..., Any],
|
|
78
|
+
]
|
|
79
|
+
],
|
|
80
|
+
google.generativeai.types.content_types.FunctionDeclaration,
|
|
81
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionDeclaration,
|
|
82
|
+
dict[str, Any],
|
|
83
|
+
Callable[..., Any],
|
|
84
|
+
]
|
|
85
|
+
],
|
|
86
|
+
str,
|
|
87
|
+
google.generativeai.types.content_types.Tool,
|
|
88
|
+
google.ai.generativelanguage_v1beta.types.content.Tool,
|
|
89
|
+
google.generativeai.types.content_types.ToolDict,
|
|
90
|
+
Iterable[
|
|
91
|
+
Union[
|
|
92
|
+
google.generativeai.types.content_types.FunctionDeclaration,
|
|
93
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionDeclaration,
|
|
94
|
+
dict[str, Any],
|
|
95
|
+
Callable[..., Any],
|
|
96
|
+
]
|
|
97
|
+
],
|
|
98
|
+
google.generativeai.types.content_types.FunctionDeclaration,
|
|
99
|
+
google.ai.generativelanguage_v1beta.types.content.FunctionDeclaration,
|
|
100
|
+
dict[str, Any],
|
|
101
|
+
Callable[..., Any],
|
|
102
|
+
None,
|
|
103
|
+
]
|
|
104
|
+
tool_config: Union[
|
|
105
|
+
google.generativeai.types.content_types.ToolConfigDict,
|
|
106
|
+
google.ai.generativelanguage_v1beta.types.content.ToolConfig,
|
|
107
|
+
None,
|
|
108
|
+
]
|
|
109
|
+
request_options: Union[
|
|
110
|
+
google.generativeai.types.helper_types.RequestOptions,
|
|
111
|
+
google.generativeai.types.helper_types.RequestOptionsDict,
|
|
112
|
+
None,
|
|
113
|
+
]
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# ---------------------------------------------------------
|
|
2
|
+
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
|
|
3
|
+
# ---------------------------------------------------------
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from ._client import ChatClientArgs
|
|
7
|
+
from ._client_azure import ChatAzureClientArgs
|
|
8
|
+
from ._submit import SubmitInputArgs
|
|
9
|
+
|
|
10
|
+
__all__ = (
|
|
11
|
+
"ChatClientArgs",
|
|
12
|
+
"ChatAzureClientArgs",
|
|
13
|
+
"SubmitInputArgs",
|
|
14
|
+
)
|