chatlas 0.11.0__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chatlas might be problematic. Click here for more details.

chatlas/_auto.py CHANGED
@@ -1,32 +1,46 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import os
4
+ import warnings
4
5
  from typing import Callable, Literal, Optional
5
6
 
6
7
  import orjson
7
8
 
8
9
  from ._chat import Chat
9
10
  from ._provider_anthropic import ChatAnthropic, ChatBedrockAnthropic
11
+ from ._provider_cloudflare import ChatCloudflare
10
12
  from ._provider_databricks import ChatDatabricks
13
+ from ._provider_deepseek import ChatDeepSeek
11
14
  from ._provider_github import ChatGithub
12
15
  from ._provider_google import ChatGoogle, ChatVertex
13
16
  from ._provider_groq import ChatGroq
17
+ from ._provider_huggingface import ChatHuggingFace
18
+ from ._provider_mistral import ChatMistral
14
19
  from ._provider_ollama import ChatOllama
15
20
  from ._provider_openai import ChatAzureOpenAI, ChatOpenAI
21
+ from ._provider_openrouter import ChatOpenRouter
16
22
  from ._provider_perplexity import ChatPerplexity
23
+ from ._provider_portkey import ChatPortkey
17
24
  from ._provider_snowflake import ChatSnowflake
25
+ from ._utils import MISSING_TYPE as DEPRECATED_TYPE
18
26
 
19
27
  AutoProviders = Literal[
20
28
  "anthropic",
21
29
  "bedrock-anthropic",
30
+ "cloudflare",
22
31
  "databricks",
32
+ "deep-seek",
23
33
  "github",
24
34
  "google",
25
35
  "groq",
36
+ "hugging-face",
37
+ "mistral",
26
38
  "ollama",
27
39
  "openai",
28
40
  "azure-openai",
41
+ "open-router",
29
42
  "perplexity",
43
+ "portkey",
30
44
  "snowflake",
31
45
  "vertex",
32
46
  ]
@@ -34,41 +48,40 @@ AutoProviders = Literal[
34
48
  _provider_chat_model_map: dict[AutoProviders, Callable[..., Chat]] = {
35
49
  "anthropic": ChatAnthropic,
36
50
  "bedrock-anthropic": ChatBedrockAnthropic,
51
+ "cloudflare": ChatCloudflare,
37
52
  "databricks": ChatDatabricks,
53
+ "deep-seek": ChatDeepSeek,
38
54
  "github": ChatGithub,
39
55
  "google": ChatGoogle,
40
56
  "groq": ChatGroq,
57
+ "hugging-face": ChatHuggingFace,
58
+ "mistral": ChatMistral,
41
59
  "ollama": ChatOllama,
42
60
  "openai": ChatOpenAI,
43
61
  "azure-openai": ChatAzureOpenAI,
62
+ "open-router": ChatOpenRouter,
44
63
  "perplexity": ChatPerplexity,
64
+ "portkey": ChatPortkey,
45
65
  "snowflake": ChatSnowflake,
46
66
  "vertex": ChatVertex,
47
67
  }
48
68
 
69
+ DEPRECATED = DEPRECATED_TYPE()
70
+
49
71
 
50
72
  def ChatAuto(
51
- system_prompt: Optional[str] = None,
73
+ provider_model: Optional[str] = None,
52
74
  *,
53
- provider: Optional[AutoProviders] = None,
54
- model: Optional[str] = None,
75
+ system_prompt: Optional[str] = None,
76
+ provider: AutoProviders | DEPRECATED_TYPE = DEPRECATED,
77
+ model: str | DEPRECATED_TYPE = DEPRECATED,
55
78
  **kwargs,
56
79
  ) -> Chat:
57
80
  """
58
- Use environment variables (env vars) to configure the Chat provider and model.
81
+ Chat with any provider.
59
82
 
60
- Creates a :class:`~chatlas.Chat` instance based on the specified provider.
61
- The provider may be specified through the `provider` parameter and/or the
62
- `CHATLAS_CHAT_PROVIDER` env var. If both are set, the env var takes
63
- precedence. Similarly, the provider's model may be specified through the
64
- `model` parameter and/or the `CHATLAS_CHAT_MODEL` env var. Also, additional
65
- configuration may be provided through the `kwargs` parameter and/or the
66
- `CHATLAS_CHAT_ARGS` env var (as a JSON string). In this case, when both are
67
- set, they are merged, with the env var arguments taking precedence.
68
-
69
- As a result, `ChatAuto()` provides a convenient way to set a default
70
- provider and model in your Python code, while allowing you to override
71
- these settings through env vars (i.e., without modifying your code).
83
+ This is a generic interface to all the other `Chat*()` functions, allowing
84
+ you to pick the provider (and model) with a simple string.
72
85
 
73
86
  Prerequisites
74
87
  -------------
@@ -86,55 +99,101 @@ def ChatAuto(
86
99
  Python packages.
87
100
  :::
88
101
 
89
-
90
102
  Examples
91
103
  --------
92
- First, set the environment variables for the provider, arguments, and API key:
93
104
 
94
- ```bash
95
- export CHATLAS_CHAT_PROVIDER=anthropic
96
- export CHATLAS_CHAT_MODEL=claude-3-haiku-20240229
97
- export CHATLAS_CHAT_ARGS='{"kwargs": {"max_retries": 3}}'
98
- export ANTHROPIC_API_KEY=your_api_key
105
+ `ChatAuto()` makes it easy to switch between different chat providers and models.
106
+
107
+ ```python
108
+ import pandas as pd
109
+ from chatlas import ChatAuto
110
+
111
+ # Default provider (OpenAI) & model
112
+ chat = ChatAuto()
113
+ print(chat.provider.name)
114
+ print(chat.provider.model)
115
+
116
+ # Different provider (Anthropic) & default model
117
+ chat = ChatAuto("anthropic")
118
+
119
+ # List models available through the provider
120
+ models = chat.list_models()
121
+ print(pd.DataFrame(models))
122
+
123
+ # Choose specific provider/model (Claude Sonnet 4)
124
+ chat = ChatAuto("anthropic/claude-sonnet-4-0")
99
125
  ```
100
126
 
101
- Then, you can use the `ChatAuto` function to create a Chat instance:
127
+ The default provider/model can also be controlled through an environment variable:
128
+
129
+ ```bash
130
+ export CHATLAS_CHAT_PROVIDER_MODEL="anthropic/claude-sonnet-4-0"
131
+ ```
102
132
 
103
133
  ```python
104
134
  from chatlas import ChatAuto
105
135
 
106
136
  chat = ChatAuto()
107
- chat.chat("What is the capital of France?")
137
+ print(chat.provider.name) # anthropic
138
+ print(chat.provider.model) # claude-sonnet-4-0
139
+ ```
140
+
141
+ For application-specific configurations, consider defining your own environment variables:
142
+
143
+ ```bash
144
+ export MYAPP_PROVIDER_MODEL="google/gemini-2.5-flash"
145
+ ```
146
+
147
+ And passing them to `ChatAuto()` as an alternative way to configure the provider/model:
148
+
149
+ ```python
150
+ import os
151
+ from chatlas import ChatAuto
152
+
153
+ chat = ChatAuto(os.getenv("MYAPP_PROVIDER_MODEL"))
154
+ print(chat.provider.name) # google
155
+ print(chat.provider.model) # gemini-2.5-flash
108
156
  ```
109
157
 
110
158
  Parameters
111
159
  ----------
160
+ provider_model
161
+ The name of the provider and model to use in the format
162
+ `"{provider}/{model}"`. Providers are strings formatted in kebab-case,
163
+ e.g. to use `ChatBedrockAnthropic` set `provider="bedrock-anthropic"`,
164
+ and models are the provider-specific model names, e.g.
165
+ `"claude-3-7-sonnet-20250219"`. The `/{model}` portion may also be
166
+ omitted, in which case, the default model for that provider will be
167
+ used.
168
+
169
+ If no value is provided, the `CHATLAS_CHAT_PROVIDER_MODEL` environment
170
+ variable will be consulted for a fallback value. If this variable is also
171
+ not set, a default value of `"openai"` is used.
112
172
  system_prompt
113
173
  A system prompt to set the behavior of the assistant.
114
174
  provider
115
- The name of the default chat provider to use. Providers are strings
116
- formatted in kebab-case, e.g. to use `ChatBedrockAnthropic` set
117
- `provider="bedrock-anthropic"`.
118
-
119
- This value can also be provided via the `CHATLAS_CHAT_PROVIDER`
120
- environment variable, which takes precedence over `provider`
121
- when set.
175
+ Deprecated; use `provider_model` instead.
122
176
  model
123
- The name of the default model to use. This value can also be provided
124
- via the `CHATLAS_CHAT_MODEL` environment variable, which takes
125
- precedence over `model` when set.
177
+ Deprecated; use `provider_model` instead.
126
178
  **kwargs
127
- Additional keyword arguments to pass to the Chat constructor. See the
179
+ Additional keyword arguments to pass to the `Chat` constructor. See the
128
180
  documentation for each provider for more details on the available
129
181
  options.
130
182
 
131
183
  These arguments can also be provided via the `CHATLAS_CHAT_ARGS`
132
- environment variable as a JSON string. When provided, the options
133
- in the `CHATLAS_CHAT_ARGS` envvar take precedence over the options
134
- passed to `kwargs`.
184
+ environment variable as a JSON string. When any additional arguments are
185
+ provided to `ChatAuto()`, the env var is ignored.
186
+
187
+ Note that `system_prompt` and `turns` can't be set via environment variables.
188
+ They must be provided/set directly to/on `ChatAuto()`.
135
189
 
136
- Note that `system_prompt` and `turns` in `kwargs` or in
137
- `CHATLAS_CHAT_ARGS` are ignored.
190
+ Note
191
+ ----
192
+ If you want to work with a specific provider, but don't know what models are
193
+ available (or the exact model name), use
194
+ `ChatAuto('provider_name').list_models()` to list available models. Another
195
+ option is to use the provider more directly (e.g., `ChatAnthropic()`). There,
196
+ the `model` parameter may have type hints for available models.
138
197
 
139
198
  Returns
140
199
  -------
@@ -147,32 +206,85 @@ def ChatAuto(
147
206
  If no valid provider is specified either through parameters or
148
207
  environment variables.
149
208
  """
150
- the_provider = os.environ.get("CHATLAS_CHAT_PROVIDER", provider)
209
+ if provider is not DEPRECATED:
210
+ warn_deprecated_param("provider")
211
+
212
+ if model is not DEPRECATED:
213
+ if provider is DEPRECATED:
214
+ raise ValueError(
215
+ "The `model` parameter is deprecated and cannot be used without the `provider` parameter. "
216
+ "Use `provider_model` instead."
217
+ )
218
+ warn_deprecated_param("model")
219
+
220
+ if provider_model is None:
221
+ provider_model = os.environ.get("CHATLAS_CHAT_PROVIDER_MODEL")
222
+
223
+ # Backwards compatibility: construct from old env vars as a fallback
224
+ if provider_model is None:
225
+ env_provider = get_legacy_env_var("CHATLAS_CHAT_PROVIDER", provider)
226
+ env_model = get_legacy_env_var("CHATLAS_CHAT_MODEL", model)
227
+
228
+ if env_provider:
229
+ provider_model = env_provider
230
+ if env_model:
231
+ provider_model += f"/{env_model}"
232
+
233
+ # Fall back to OpenAI if nothing is specified
234
+ if provider_model is None:
235
+ provider_model = "openai"
236
+
237
+ if "/" in provider_model:
238
+ the_provider, the_model = provider_model.split("/", 1)
239
+ else:
240
+ the_provider, the_model = provider_model, None
151
241
 
152
- if the_provider is None:
153
- raise ValueError(
154
- "Provider name is required as parameter or `CHATLAS_CHAT_PROVIDER` must be set."
155
- )
156
242
  if the_provider not in _provider_chat_model_map:
157
243
  raise ValueError(
158
244
  f"Provider name '{the_provider}' is not a known chatlas provider: "
159
245
  f"{', '.join(_provider_chat_model_map.keys())}"
160
246
  )
161
247
 
162
- # `system_prompt` and `turns` always come from `ChatAuto()`
163
- base_args = {"system_prompt": system_prompt}
164
-
165
- if env_model := os.environ.get("CHATLAS_CHAT_MODEL"):
166
- model = env_model
167
-
168
- if model:
169
- base_args["model"] = model
248
+ # `system_prompt`, `turns` and `model` always come from `ChatAuto()`
249
+ base_args = {
250
+ "system_prompt": system_prompt,
251
+ "turns": None,
252
+ "model": the_model,
253
+ }
170
254
 
255
+ # Environment kwargs, used only if no kwargs provided
171
256
  env_kwargs = {}
172
- if env_kwargs_str := os.environ.get("CHATLAS_CHAT_ARGS"):
173
- env_kwargs = orjson.loads(env_kwargs_str)
174
-
175
- kwargs = {**kwargs, **env_kwargs, **base_args}
176
- kwargs = {k: v for k, v in kwargs.items() if v is not None}
177
-
178
- return _provider_chat_model_map[the_provider](**kwargs)
257
+ if not kwargs:
258
+ env_kwargs = orjson.loads(os.environ.get("CHATLAS_CHAT_ARGS", "{}"))
259
+
260
+ final_kwargs = {**env_kwargs, **kwargs, **base_args}
261
+ final_kwargs = {k: v for k, v in final_kwargs.items() if v is not None}
262
+
263
+ return _provider_chat_model_map[the_provider](**final_kwargs)
264
+
265
+
266
+ def get_legacy_env_var(
267
+ env_var_name: str,
268
+ default: str | DEPRECATED_TYPE,
269
+ ) -> str | None:
270
+ env_value = os.environ.get(env_var_name)
271
+ if env_value:
272
+ warnings.warn(
273
+ f"The '{env_var_name}' environment variable is deprecated. "
274
+ "Use 'CHATLAS_CHAT_PROVIDER_MODEL' instead.",
275
+ DeprecationWarning,
276
+ stacklevel=3,
277
+ )
278
+ return env_value
279
+ elif isinstance(default, DEPRECATED_TYPE):
280
+ return None
281
+ else:
282
+ return default
283
+
284
+
285
+ def warn_deprecated_param(param_name: str, stacklevel: int = 3) -> None:
286
+ warnings.warn(
287
+ f"The '{param_name}' parameter is deprecated. Use 'provider_model' instead.",
288
+ DeprecationWarning,
289
+ stacklevel=stacklevel,
290
+ )
chatlas/_chat.py CHANGED
@@ -34,6 +34,7 @@ from ._content import (
34
34
  ContentText,
35
35
  ContentToolRequest,
36
36
  ContentToolResult,
37
+ ToolInfo,
37
38
  )
38
39
  from ._display import (
39
40
  EchoDisplayOptions,
@@ -52,7 +53,7 @@ from ._typing_extensions import TypedDict, TypeGuard
52
53
  from ._utils import MISSING, MISSING_TYPE, html_escape, wrap_async
53
54
 
54
55
  if TYPE_CHECKING:
55
- from mcp.types import ToolAnnotations
56
+ from ._content import ToolAnnotations
56
57
 
57
58
 
58
59
  class TokensDict(TypedDict):
@@ -1534,9 +1535,10 @@ class Chat(Generic[SubmitInputArgsT, CompletionT]):
1534
1535
 
1535
1536
  def register_tool(
1536
1537
  self,
1537
- func: Callable[..., Any] | Callable[..., Awaitable[Any]],
1538
+ func: Callable[..., Any] | Callable[..., Awaitable[Any]] | Tool,
1538
1539
  *,
1539
1540
  force: bool = False,
1541
+ name: Optional[str] = None,
1540
1542
  model: Optional[type[BaseModel]] = None,
1541
1543
  annotations: "Optional[ToolAnnotations]" = None,
1542
1544
  ):
@@ -1610,6 +1612,9 @@ class Chat(Generic[SubmitInputArgsT, CompletionT]):
1610
1612
  force
1611
1613
  If `True`, overwrite any existing tool with the same name. If `False`
1612
1614
  (the default), raise an error if a tool with the same name already exists.
1615
+ name
1616
+ The name of the tool. If not provided, the name will be inferred from the
1617
+ `func`'s name (or the `model`'s name, if provided).
1613
1618
  model
1614
1619
  A Pydantic model that describes the input parameters for the function.
1615
1620
  If not provided, the model will be inferred from the function's type hints.
@@ -1618,14 +1623,22 @@ class Chat(Generic[SubmitInputArgsT, CompletionT]):
1618
1623
  name and docstring of the function.
1619
1624
  annotations
1620
1625
  Additional properties that describe the tool and its behavior.
1621
- Should be a `from mcp.types import ToolAnnotations` instance.
1622
1626
 
1623
1627
  Raises
1624
1628
  ------
1625
1629
  ValueError
1626
1630
  If a tool with the same name already exists and `force` is `False`.
1627
1631
  """
1628
- tool = Tool.from_func(func, model=model, annotations=annotations)
1632
+ if isinstance(func, Tool):
1633
+ name = name or func.name
1634
+ annotations = annotations or func.annotations
1635
+ if model is not None:
1636
+ func = Tool.from_func(
1637
+ func.func, name=name, model=model, annotations=annotations
1638
+ )
1639
+ func = func.func
1640
+
1641
+ tool = Tool.from_func(func, name=name, model=model, annotations=annotations)
1629
1642
  if tool.name in self._tools and not force:
1630
1643
  raise ValueError(
1631
1644
  f"Tool with name '{tool.name}' is already registered. "
@@ -1933,7 +1946,9 @@ class Chat(Generic[SubmitInputArgsT, CompletionT]):
1933
1946
  all_results: list[ContentToolResult] = []
1934
1947
  for x in turn.contents:
1935
1948
  if isinstance(x, ContentToolRequest):
1936
- x.tool = self._tools.get(x.name)
1949
+ tool = self._tools.get(x.name)
1950
+ if tool is not None:
1951
+ x.tool = ToolInfo.from_tool(tool)
1937
1952
  if echo == "output":
1938
1953
  self._echo_content(f"\n\n{x}\n\n")
1939
1954
  if content == "all":
@@ -1994,7 +2009,9 @@ class Chat(Generic[SubmitInputArgsT, CompletionT]):
1994
2009
  all_results: list[ContentToolResult] = []
1995
2010
  for x in turn.contents:
1996
2011
  if isinstance(x, ContentToolRequest):
1997
- x.tool = self._tools.get(x.name)
2012
+ tool = self._tools.get(x.name)
2013
+ if tool is not None:
2014
+ x.tool = ToolInfo.from_tool(tool)
1998
2015
  if echo == "output":
1999
2016
  self._echo_content(f"\n\n{x}\n\n")
2000
2017
  if content == "all":
@@ -2152,7 +2169,7 @@ class Chat(Generic[SubmitInputArgsT, CompletionT]):
2152
2169
  self._turns.extend([user_turn, turn])
2153
2170
 
2154
2171
  def _invoke_tool(self, request: ContentToolRequest):
2155
- tool = request.tool
2172
+ tool = self._tools.get(request.name)
2156
2173
  func = tool.func if tool is not None else None
2157
2174
 
2158
2175
  if func is None:
@@ -2200,7 +2217,7 @@ class Chat(Generic[SubmitInputArgsT, CompletionT]):
2200
2217
  yield self._handle_tool_error_result(request, e)
2201
2218
 
2202
2219
  async def _invoke_tool_async(self, request: ContentToolRequest):
2203
- tool = request.tool
2220
+ tool = self._tools.get(request.name)
2204
2221
 
2205
2222
  if tool is None:
2206
2223
  yield self._handle_tool_error_result(
chatlas/_content.py CHANGED
@@ -6,9 +6,64 @@ from typing import TYPE_CHECKING, Any, Literal, Optional, Union
6
6
  import orjson
7
7
  from pydantic import BaseModel, ConfigDict
8
8
 
9
+ from ._typing_extensions import TypedDict
10
+
9
11
  if TYPE_CHECKING:
10
12
  from ._tools import Tool
11
13
 
14
+
15
+ class ToolAnnotations(TypedDict, total=False):
16
+ """
17
+ Additional properties describing a Tool to clients.
18
+
19
+ NOTE: all properties in ToolAnnotations are **hints**.
20
+ They are not guaranteed to provide a faithful description of
21
+ tool behavior (including descriptive properties like `title`).
22
+
23
+ Clients should never make tool use decisions based on ToolAnnotations
24
+ received from untrusted servers.
25
+ """
26
+
27
+ title: str
28
+ """A human-readable title for the tool."""
29
+
30
+ readOnlyHint: bool
31
+ """
32
+ If true, the tool does not modify its environment.
33
+ Default: false
34
+ """
35
+
36
+ destructiveHint: bool
37
+ """
38
+ If true, the tool may perform destructive updates to its environment.
39
+ If false, the tool performs only additive updates.
40
+ (This property is meaningful only when `readOnlyHint == false`)
41
+ Default: true
42
+ """
43
+
44
+ idempotentHint: bool
45
+ """
46
+ If true, calling the tool repeatedly with the same arguments
47
+ will have no additional effect on the its environment.
48
+ (This property is meaningful only when `readOnlyHint == false`)
49
+ Default: false
50
+ """
51
+
52
+ openWorldHint: bool
53
+ """
54
+ If true, this tool may interact with an "open world" of external
55
+ entities. If false, the tool's domain of interaction is closed.
56
+ For example, the world of a web search tool is open, whereas that
57
+ of a memory tool is not.
58
+ Default: true
59
+ """
60
+
61
+ extra: dict[str, Any]
62
+ """
63
+ Additional metadata about the tool.
64
+ """
65
+
66
+
12
67
  ImageContentTypes = Literal[
13
68
  "image/png",
14
69
  "image/jpeg",
@@ -19,6 +74,45 @@ ImageContentTypes = Literal[
19
74
  Allowable content types for images.
20
75
  """
21
76
 
77
+
78
+ class ToolInfo(BaseModel):
79
+ """
80
+ Serializable tool information
81
+
82
+ This contains only the serializable parts of a Tool that are needed
83
+ for ContentToolRequest to be JSON-serializable. This allows tool
84
+ metadata to be preserved without including the non-serializable
85
+ function reference.
86
+
87
+ Parameters
88
+ ----------
89
+ name
90
+ The name of the tool.
91
+ description
92
+ A description of what the tool does.
93
+ parameters
94
+ A dictionary describing the input parameters and their types.
95
+ annotations
96
+ Additional properties that describe the tool and its behavior.
97
+ """
98
+
99
+ name: str
100
+ description: str
101
+ parameters: dict[str, Any]
102
+ annotations: Optional[ToolAnnotations] = None
103
+
104
+ @classmethod
105
+ def from_tool(cls, tool: "Tool") -> "ToolInfo":
106
+ """Create a ToolInfo from a Tool instance."""
107
+ func_schema = tool.schema["function"]
108
+ return cls(
109
+ name=tool.name,
110
+ description=func_schema.get("description", ""),
111
+ parameters=func_schema.get("parameters", {}),
112
+ annotations=tool.annotations,
113
+ )
114
+
115
+
22
116
  ContentTypeEnum = Literal[
23
117
  "text",
24
118
  "image_remote",
@@ -175,14 +269,15 @@ class ContentToolRequest(Content):
175
269
  arguments
176
270
  The arguments to pass to the tool/function.
177
271
  tool
178
- The tool/function to be called. This is set internally by chatlas's tool
179
- calling loop.
272
+ Serializable information about the tool. This is set internally by
273
+ chatlas's tool calling loop and contains only the metadata needed
274
+ for serialization (name, description, parameters, annotations).
180
275
  """
181
276
 
182
277
  id: str
183
278
  name: str
184
279
  arguments: object
185
- tool: Optional["Tool"] = None
280
+ tool: Optional[ToolInfo] = None
186
281
 
187
282
  content_type: ContentTypeEnum = "tool_request"
188
283
 
chatlas/_tools.py CHANGED
@@ -2,7 +2,15 @@ from __future__ import annotations
2
2
 
3
3
  import inspect
4
4
  import warnings
5
- from typing import TYPE_CHECKING, Any, AsyncGenerator, Awaitable, Callable, Optional
5
+ from typing import (
6
+ TYPE_CHECKING,
7
+ Any,
8
+ AsyncGenerator,
9
+ Awaitable,
10
+ Callable,
11
+ Optional,
12
+ cast,
13
+ )
6
14
 
7
15
  import openai
8
16
  from pydantic import BaseModel, Field, create_model
@@ -12,6 +20,7 @@ from ._content import (
12
20
  ContentToolResult,
13
21
  ContentToolResultImage,
14
22
  ContentToolResultResource,
23
+ ToolAnnotations,
15
24
  )
16
25
 
17
26
  __all__ = (
@@ -22,7 +31,6 @@ __all__ = (
22
31
  if TYPE_CHECKING:
23
32
  from mcp import ClientSession as MCPClientSession
24
33
  from mcp import Tool as MCPTool
25
- from mcp.types import ToolAnnotations
26
34
  from openai.types.chat import ChatCompletionToolParam
27
35
 
28
36
 
@@ -44,8 +52,7 @@ class Tool:
44
52
  parameters
45
53
  A dictionary describing the input parameters and their types.
46
54
  annotations
47
- Additional properties that describe the tool and its behavior. Should be
48
- a `from mcp.types import ToolAnnotations` instance.
55
+ Additional properties that describe the tool and its behavior.
49
56
  """
50
57
 
51
58
  func: Callable[..., Any] | Callable[..., Awaitable[Any]]
@@ -77,6 +84,7 @@ class Tool:
77
84
  cls: type["Tool"],
78
85
  func: Callable[..., Any] | Callable[..., Awaitable[Any]],
79
86
  *,
87
+ name: Optional[str] = None,
80
88
  model: Optional[type[BaseModel]] = None,
81
89
  annotations: "Optional[ToolAnnotations]" = None,
82
90
  ) -> "Tool":
@@ -87,6 +95,9 @@ class Tool:
87
95
  ----------
88
96
  func
89
97
  The function to wrap as a tool.
98
+ name
99
+ The name of the tool. If not provided, the name will be inferred from the
100
+ function's name.
90
101
  model
91
102
  A Pydantic model that describes the input parameters for the function.
92
103
  If not provided, the model will be inferred from the function's type hints.
@@ -94,8 +105,7 @@ class Tool:
94
105
  Note that the name and docstring of the model takes precedence over the
95
106
  name and docstring of the function.
96
107
  annotations
97
- Additional properties that describe the tool and its behavior. Should be
98
- a `from mcp.types import ToolAnnotations` instance.
108
+ Additional properties that describe the tool and its behavior.
99
109
 
100
110
  Returns
101
111
  -------
@@ -114,7 +124,8 @@ class Tool:
114
124
  # Throw if there is a mismatch between the model and the function parameters
115
125
  params = inspect.signature(func).parameters
116
126
  fields = model.model_fields
117
- diff = set(params) ^ set(fields)
127
+ fields_alias = [val.alias if val.alias else key for key, val in fields.items()]
128
+ diff = set(params) ^ set(fields_alias)
118
129
  if diff:
119
130
  raise ValueError(
120
131
  f"`model` fields must match tool function parameters exactly. "
@@ -125,7 +136,7 @@ class Tool:
125
136
 
126
137
  return cls(
127
138
  func=func,
128
- name=model.__name__ or func.__name__,
139
+ name=name or model.__name__ or func.__name__,
129
140
  description=model.__doc__ or func.__doc__ or "",
130
141
  parameters=params,
131
142
  annotations=annotations,
@@ -203,12 +214,17 @@ class Tool:
203
214
 
204
215
  params = mcp_tool_input_schema_to_param_schema(mcp_tool.inputSchema)
205
216
 
217
+ # Convert MCP ToolAnnotations to our TypedDict format
218
+ annotations = None
219
+ if mcp_tool.annotations:
220
+ annotations = cast(ToolAnnotations, mcp_tool.annotations.model_dump())
221
+
206
222
  return cls(
207
223
  func=_utils.wrap_async(_call),
208
224
  name=mcp_tool.name,
209
225
  description=mcp_tool.description or "",
210
226
  parameters=params,
211
- annotations=mcp_tool.annotations,
227
+ annotations=annotations,
212
228
  )
213
229
 
214
230
 
@@ -310,13 +326,21 @@ def func_to_basemodel(func: Callable) -> type[BaseModel]:
310
326
  )
311
327
  annotation = Any
312
328
 
329
+ # create_model() will error if the field name starts with `_` (since Pydantic
330
+ # uses this to indicate private fields). We can work around this by using an alias.
331
+ alias = None
332
+ if name.startswith("_"):
333
+ field_name, alias = (name.lstrip("_"), name)
334
+ else:
335
+ field_name, alias = (name, None)
336
+
313
337
  if param.default != inspect.Parameter.empty:
314
- field = Field(default=param.default)
338
+ field = Field(default=param.default, alias=alias)
315
339
  else:
316
- field = Field()
340
+ field = Field(alias=alias)
317
341
 
318
342
  # Add the field to our fields dict
319
- fields[name] = (annotation, field)
343
+ fields[field_name] = (annotation, field)
320
344
 
321
345
  return create_model(func.__name__, **fields)
322
346
 
@@ -13,7 +13,7 @@ else:
13
13
  # Even though TypedDict is available in Python 3.8, because it's used with NotRequired,
14
14
  # they should both come from the same typing module.
15
15
  # https://peps.python.org/pep-0655/#usage-in-python-3-11
16
- if sys.version_info >= (3, 11):
16
+ if sys.version_info >= (3, 12):
17
17
  from typing import NotRequired, Required, TypedDict
18
18
  else:
19
19
  from typing_extensions import NotRequired, Required, TypedDict
chatlas/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.11.0'
32
- __version_tuple__ = version_tuple = (0, 11, 0)
31
+ __version__ = version = '0.12.0'
32
+ __version_tuple__ = version_tuple = (0, 12, 0)
33
33
 
34
34
  __commit_id__ = commit_id = None
chatlas/types/__init__.py CHANGED
@@ -13,6 +13,8 @@ from .._content import (
13
13
  ContentToolRequest,
14
14
  ContentToolResult,
15
15
  ImageContentTypes,
16
+ ToolAnnotations,
17
+ ToolInfo,
16
18
  )
17
19
  from .._provider import ModelInfo
18
20
  from .._tokens import TokenUsage
@@ -32,6 +34,8 @@ __all__ = (
32
34
  "ImageContentTypes",
33
35
  "SubmitInputArgsT",
34
36
  "TokenUsage",
37
+ "ToolAnnotations",
38
+ "ToolInfo",
35
39
  "MISSING_TYPE",
36
40
  "MISSING",
37
41
  "ModelInfo",
@@ -3,7 +3,7 @@
3
3
  # ---------------------------------------------------------
4
4
 
5
5
 
6
- from typing import Iterable, Literal, Mapping, Optional, TypedDict, Union
6
+ from typing import Iterable, Literal, Mapping, Optional, Sequence, TypedDict, Union
7
7
 
8
8
  import anthropic
9
9
  import anthropic.types.message_param
@@ -48,7 +48,7 @@ class SubmitInputArgs(TypedDict, total=False):
48
48
  str,
49
49
  ]
50
50
  service_tier: Union[Literal["auto", "standard_only"], anthropic.NotGiven]
51
- stop_sequences: Union[list[str], anthropic.NotGiven]
51
+ stop_sequences: Union[Sequence[str], anthropic.NotGiven]
52
52
  stream: Union[Literal[False], Literal[True], anthropic.NotGiven]
53
53
  system: Union[
54
54
  str,
@@ -3,14 +3,14 @@
3
3
  # ---------------------------------------------------------
4
4
 
5
5
 
6
- from typing import Mapping, Optional, TypedDict, Union
6
+ from typing import Awaitable, Callable, Mapping, Optional, TypedDict, Union
7
7
 
8
8
  import httpx
9
9
  import openai
10
10
 
11
11
 
12
12
  class ChatClientArgs(TypedDict, total=False):
13
- api_key: str | None
13
+ api_key: Union[str, Callable[[], Awaitable[str]], None]
14
14
  organization: str | None
15
15
  project: str | None
16
16
  webhook_secret: str | None
@@ -2,7 +2,7 @@
2
2
  # Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
3
3
  # ---------------------------------------------------------
4
4
 
5
- from typing import Mapping, Optional, TypedDict
5
+ from typing import Awaitable, Callable, Mapping, Optional, TypedDict, Union
6
6
 
7
7
  import httpx
8
8
  import openai
@@ -12,7 +12,7 @@ class ChatAzureClientArgs(TypedDict, total=False):
12
12
  azure_endpoint: str | None
13
13
  azure_deployment: str | None
14
14
  api_version: str | None
15
- api_key: str | None
15
+ api_key: Union[str, Callable[[], Awaitable[str]], None]
16
16
  azure_ad_token: str | None
17
17
  organization: str | None
18
18
  project: str | None
@@ -3,7 +3,7 @@
3
3
  # ---------------------------------------------------------
4
4
 
5
5
 
6
- from typing import Iterable, Literal, Mapping, Optional, TypedDict, Union
6
+ from typing import Iterable, Literal, Mapping, Optional, Sequence, TypedDict, Union
7
7
 
8
8
  import openai
9
9
  import openai.types.chat.chat_completion_allowed_tool_choice_param
@@ -148,7 +148,7 @@ class SubmitInputArgs(TypedDict, total=False):
148
148
  service_tier: Union[
149
149
  Literal["auto", "default", "flex", "scale", "priority"], None, openai.NotGiven
150
150
  ]
151
- stop: Union[str, None, list[str], openai.NotGiven]
151
+ stop: Union[str, None, Sequence[str], openai.NotGiven]
152
152
  store: Union[bool, None, openai.NotGiven]
153
153
  stream: Union[Literal[False], None, Literal[True], openai.NotGiven]
154
154
  stream_options: Union[
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: chatlas
3
- Version: 0.11.0
3
+ Version: 0.12.0
4
4
  Summary: A simple and consistent interface for chatting with LLMs
5
5
  Project-URL: Homepage, https://posit-dev.github.io/chatlas
6
6
  Project-URL: Documentation, https://posit-dev.github.io/chatlas
@@ -1,8 +1,8 @@
1
1
  chatlas/__init__.py,sha256=CyViGMiz50clcVu3vpZgOq_qP4hmoYGOlcHKlRPcLJo,2416
2
- chatlas/_auto.py,sha256=-s7XGzsKLX4RipWtk4WOE8iKbOBhXPUPtI0-63PpXCY,5660
2
+ chatlas/_auto.py,sha256=aeMN2_EM-xK-Yx5JaCuwYRZZ29eqn_0oM7QR5zayrec,8912
3
3
  chatlas/_callbacks.py,sha256=3RpPaOQonTqScjXbaShgKJ1Rc-YxzWerxKRBjVssFnc,1838
4
- chatlas/_chat.py,sha256=HDNH_UA604sfyda-bVBs05GGs8-ISBwU4c2nM5bOd40,84997
5
- chatlas/_content.py,sha256=Hg4IQwoOXC72MaL3H-zpuN0JcBluEmsp2vstfSoBn_k,19984
4
+ chatlas/_chat.py,sha256=pZOmlg0rqrzn7bi5QECAraG3ZA1MJPS0Ovk96wvgAGg,85712
5
+ chatlas/_content.py,sha256=xi00PQgYi0hC2mTROOGVZiKfTkvlG9dud213iRQcwnY,22761
6
6
  chatlas/_content_image.py,sha256=EUK6wAint-JatLsiwvaPDu4D3W-NcIsDCkzABkXgfDg,8304
7
7
  chatlas/_content_pdf.py,sha256=cffeuJxzhUDukQ-Srkmpy62M8X12skYpU_FVq-Wvya4,2420
8
8
  chatlas/_display.py,sha256=wyQzSc6z1VqrJfkTLkw1wQcti9s1Pr4qT8UxFJESn4U,4664
@@ -29,26 +29,26 @@ chatlas/_provider_portkey.py,sha256=6wKrLZmKVxOqyO6P3HBgWqPe7y1N8une_1wp0aJq7pU,
29
29
  chatlas/_provider_snowflake.py,sha256=G66tG_zs_cIlrHaHY96GvBreTNHHk1O5012Y0BtYRqI,24578
30
30
  chatlas/_tokens.py,sha256=QUsBLNJPgXk8vovcG5JdQU8NarCv7FRpOVBdgFkBgHs,5388
31
31
  chatlas/_tokens_old.py,sha256=L9d9oafrXvEx2u4nIn_Jjn7adnQyLBnYBuPwJUE8Pl8,5005
32
- chatlas/_tools.py,sha256=SCmGP9bLHvVxQPznWfagG7GdzQamnyrPwYwDJ6EaWpw,11692
32
+ chatlas/_tools.py,sha256=8rhGOsEviBJXk5Qb-a1RRb_C-DE2T3DOeN6IhblkxqI,12408
33
33
  chatlas/_turn.py,sha256=yK7alUxeP8d2iBc7amyz20BtEqcpvX6BCwWZsnlQ5R4,4515
34
- chatlas/_typing_extensions.py,sha256=MB9vWMWlm-IF8uOQfrTcfb66MV6gYXn3zgnbdwAC7BQ,1076
34
+ chatlas/_typing_extensions.py,sha256=BXmbhywjm5ssmyVLGwyP_5TWZMAobzrrgZLYkB6_etE,1076
35
35
  chatlas/_utils.py,sha256=Kku2fa1mvTYCr5D28VxE6-fwfy2e2doCi-eKQkLEg4Y,4686
36
- chatlas/_version.py,sha256=9eKRDJ72C44i2IPiti-C7phzF429SwV2Nogzt0etpr0,706
36
+ chatlas/_version.py,sha256=VDAgmYWykomGcTucvPJWNS0ePk26QojGHvaE9chtgGc,706
37
37
  chatlas/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  chatlas/data/prices.json,sha256=X6qALp-dWc4nfus9lIqHoKzk3PZDPHTLoxxcN2m6fXc,62645
39
- chatlas/types/__init__.py,sha256=4KWksOfX87xtkPWqTEV0qkCFij0UPJM39VNc00baiLc,776
39
+ chatlas/types/__init__.py,sha256=1n0xrJ7TRIKsZ2z06FLFgGqfKMFtXSIxxPvJ2j0hvPw,850
40
40
  chatlas/types/anthropic/__init__.py,sha256=OwubA-DPHYpYo0XyRyAFwftOI0mOxtHzAyhUSLcDx54,417
41
41
  chatlas/types/anthropic/_client.py,sha256=t_tnOzzsW1xWNADkNoAuZJYoE9QJ8ie7DQNnFO1pvoM,697
42
42
  chatlas/types/anthropic/_client_bedrock.py,sha256=2J6U1QcSx1KwiiHfXs3i4YEXDXw11sp-x3iLOuESrgQ,792
43
- chatlas/types/anthropic/_submit.py,sha256=o5bpKEne6lqBz4YBLoYwRLKCVmBIdzRetiMCoOdUfb0,3661
43
+ chatlas/types/anthropic/_submit.py,sha256=X7ER31k7bTZGJ9X9u8Mx-X4enJC13W0VQzt8Wz-mLeQ,3675
44
44
  chatlas/types/google/__init__.py,sha256=ZJhi8Kwvio2zp8T1TQqmvdHqkS-Khb6BGESPjREADgo,337
45
45
  chatlas/types/google/_client.py,sha256=t7aKbxYq_xOA1Z3RnWcjewifdQFSHi7vKEj6MyKMCJk,729
46
46
  chatlas/types/google/_submit.py,sha256=19Ji4fAo1lTCbNSpR6Yi0i64RJwMGBdiZKQcnoDNRwY,1796
47
47
  chatlas/types/openai/__init__.py,sha256=Q2RAr1bSH1nHsxICK05nAmKmxdhKmhbBkWD_XHiVSrI,411
48
- chatlas/types/openai/_client.py,sha256=SttisELwAd52_Je_5q3RfWGdX5wbg2CoGbxhS8ThS0A,792
49
- chatlas/types/openai/_client_azure.py,sha256=b8Hr7iKYA5-sq9r7uEqbBFv9yo3itppmHIgkEGvChMs,896
50
- chatlas/types/openai/_submit.py,sha256=rhft1h7zy6eSlSBLkt7ZAySFh-8WnR5UEG-BXaFTxag,7815
51
- chatlas-0.11.0.dist-info/METADATA,sha256=E10nf0f9IXxzwtqdX2cOyY4iygNcpmC5met5iL7ng5k,5594
52
- chatlas-0.11.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
53
- chatlas-0.11.0.dist-info/licenses/LICENSE,sha256=zyuGzPOC7CcbOaBHsQ3UEyKYRO56KDUkor0OA4LqqDg,1081
54
- chatlas-0.11.0.dist-info/RECORD,,
48
+ chatlas/types/openai/_client.py,sha256=mAoQftcJIp0ssIhS8q3TIW9u6zTRNtYDmpZJO8L0mC0,849
49
+ chatlas/types/openai/_client_azure.py,sha256=Tf_PFRl0QAj4Nk5CD0ZNIO-SRsT39bVkEJlUTry1fb8,960
50
+ chatlas/types/openai/_submit.py,sha256=EDtIUFcNIJ5QAt0wVyBXvUshK8FA9e86wcZDQ_HUOYs,7829
51
+ chatlas-0.12.0.dist-info/METADATA,sha256=Xd9L5THh_2C6ckT6Khvn2B6js3JhT8WyCjlAp1JzOOc,5594
52
+ chatlas-0.12.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
53
+ chatlas-0.12.0.dist-info/licenses/LICENSE,sha256=zyuGzPOC7CcbOaBHsQ3UEyKYRO56KDUkor0OA4LqqDg,1081
54
+ chatlas-0.12.0.dist-info/RECORD,,