chatlas 0.4.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chatlas might be problematic. Click here for more details.

chatlas/_snowflake.py ADDED
@@ -0,0 +1,321 @@
1
+ from typing import TYPE_CHECKING, Literal, Optional, TypedDict, overload
2
+
3
+ from pydantic import BaseModel
4
+
5
+ from ._chat import Chat
6
+ from ._content import Content
7
+ from ._logging import log_model_default
8
+ from ._provider import Provider
9
+ from ._tools import Tool
10
+ from ._turn import Turn, normalize_turns
11
+ from ._utils import drop_none
12
+
13
+ if TYPE_CHECKING:
14
+ from snowflake.snowpark import Column
15
+
16
+ # Types inferred from the return type of the `snowflake.cortex.complete` function
17
+ Completion = str | Column
18
+ CompletionChunk = str
19
+
20
+ from .types.snowflake import SubmitInputArgs
21
+
22
+
23
+ # The main prompt input type for Snowflake
24
+ # This was copy-pasted from `snowflake.cortex._complete.ConversationMessage`
25
+ class ConversationMessage(TypedDict):
26
+ role: str
27
+ content: str
28
+
29
+
30
+ def ChatSnowflake(
31
+ *,
32
+ system_prompt: Optional[str] = None,
33
+ model: Optional[str] = None,
34
+ turns: Optional[list[Turn]] = None,
35
+ connection_name: Optional[str] = None,
36
+ account: Optional[str] = None,
37
+ user: Optional[str] = None,
38
+ password: Optional[str] = None,
39
+ private_key_file: Optional[str] = None,
40
+ private_key_file_pwd: Optional[str] = None,
41
+ kwargs: Optional[dict[str, "str | int"]] = None,
42
+ ) -> Chat["SubmitInputArgs", "Completion"]:
43
+ """
44
+ Chat with a Snowflake Cortex LLM
45
+
46
+ https://docs.snowflake.com/en/user-guide/snowflake-cortex/llm-functions
47
+
48
+ Prerequisites
49
+ -------------
50
+
51
+ ::: {.callout-note}
52
+ ## Python requirements
53
+
54
+ `ChatSnowflake`, requires the `snowflake-ml-python` package:
55
+ `pip install "chatlas[snowflake]"`.
56
+ :::
57
+
58
+ ::: {.callout-note}
59
+ ## Snowflake credentials
60
+
61
+ Snowflake provides a handful of ways to authenticate, but it's recommended
62
+ to use [key-pair
63
+ auth](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect#label-python-connection-toml)
64
+ to generate a `private_key_file`. It's also recommended to place your
65
+ credentials in a [`connections.toml`
66
+ file](https://docs.snowflake.com/en/developer-guide/snowpark/python/creating-session#connect-by-using-the-connections-toml-file).
67
+
68
+ This way, once your credentials are in the `connections.toml` file, you can
69
+ simply call `ChatSnowflake(connection_name="my_connection")` to
70
+ authenticate. If you don't want to use a `connections.toml` file, you can
71
+ specify the connection parameters directly (with `account`, `user`,
72
+ `password`, etc.).
73
+ :::
74
+
75
+
76
+ Parameters
77
+ ----------
78
+ system_prompt
79
+ A system prompt to set the behavior of the assistant.
80
+ model
81
+ The model to use for the chat. The default, None, will pick a reasonable
82
+ default, and warn you about it. We strongly recommend explicitly
83
+ choosing a model for all but the most casual use.
84
+ turns
85
+ A list of turns to start the chat with (i.e., continuing a previous
86
+ conversation). If not provided, the conversation begins from scratch. Do
87
+ not provide non-None values for both `turns` and `system_prompt`. Each
88
+ message in the list should be a dictionary with at least `role` (usually
89
+ `system`, `user`, or `assistant`, but `tool` is also possible). Normally
90
+ there is also a `content` field, which is a string.
91
+ connection_name
92
+ The name of the connection (i.e., section) within the connections.toml file.
93
+ This is useful if you want to keep your credentials in a connections.toml file
94
+ rather than specifying them directly in the arguments.
95
+ https://docs.snowflake.com/en/developer-guide/snowpark/python/creating-session#connect-by-using-the-connections-toml-file
96
+ account
97
+ Your Snowflake account identifier. Required if `connection_name` is not provided.
98
+ https://docs.snowflake.com/en/user-guide/admin-account-identifier
99
+ user
100
+ Your Snowflake user name. Required if `connection_name` is not provided.
101
+ password
102
+ Your Snowflake password. Required if doing password authentication and
103
+ `connection_name` is not provided.
104
+ private_key_file
105
+ The path to your private key file. Required if you are using key pair authentication.
106
+ https://docs.snowflake.com/en/user-guide/key-pair-auth
107
+ private_key_file_pwd
108
+ The password for your private key file. Required if you are using key pair authentication.
109
+ https://docs.snowflake.com/en/user-guide/key-pair-auth
110
+ kwargs
111
+ Additional keyword arguments passed along to the Snowflake connection builder. These can
112
+ include any parameters supported by the `snowflake-ml-python` package.
113
+ https://docs.snowflake.com/en/developer-guide/snowpark/python/creating-session#connect-by-specifying-connection-parameters
114
+ """
115
+
116
+ if model is None:
117
+ model = log_model_default("llama3.1-70b")
118
+
119
+ return Chat(
120
+ provider=SnowflakeProvider(
121
+ model=model,
122
+ connection_name=connection_name,
123
+ account=account,
124
+ user=user,
125
+ password=password,
126
+ private_key_file=private_key_file,
127
+ private_key_file_pwd=private_key_file_pwd,
128
+ kwargs=kwargs,
129
+ ),
130
+ turns=normalize_turns(
131
+ turns or [],
132
+ system_prompt,
133
+ ),
134
+ )
135
+
136
+
137
+ class SnowflakeProvider(Provider["Completion", "CompletionChunk", "CompletionChunk"]):
138
+ def __init__(
139
+ self,
140
+ *,
141
+ model: str,
142
+ connection_name: Optional[str],
143
+ account: Optional[str],
144
+ user: Optional[str],
145
+ password: Optional[str],
146
+ private_key_file: Optional[str],
147
+ private_key_file_pwd: Optional[str],
148
+ kwargs: Optional[dict[str, "str | int"]],
149
+ ):
150
+ try:
151
+ from snowflake.snowpark import Session
152
+ except ImportError:
153
+ raise ImportError(
154
+ "`ChatSnowflake()` requires the `snowflake-ml-python` package. "
155
+ "Please install it via `pip install snowflake-ml-python`."
156
+ )
157
+
158
+ configs: dict[str, str | int] = drop_none(
159
+ {
160
+ "connection_name": connection_name,
161
+ "account": account,
162
+ "user": user,
163
+ "password": password,
164
+ "private_key_file": private_key_file,
165
+ "private_key_file_pwd": private_key_file_pwd,
166
+ **(kwargs or {}),
167
+ }
168
+ )
169
+
170
+ self._model = model
171
+ self._session = Session.builder.configs(configs).create()
172
+
173
+ @overload
174
+ def chat_perform(
175
+ self,
176
+ *,
177
+ stream: Literal[False],
178
+ turns: list[Turn],
179
+ tools: dict[str, Tool],
180
+ data_model: Optional[type[BaseModel]] = None,
181
+ kwargs: Optional["SubmitInputArgs"] = None,
182
+ ): ...
183
+
184
+ @overload
185
+ def chat_perform(
186
+ self,
187
+ *,
188
+ stream: Literal[True],
189
+ turns: list[Turn],
190
+ tools: dict[str, Tool],
191
+ data_model: Optional[type[BaseModel]] = None,
192
+ kwargs: Optional["SubmitInputArgs"] = None,
193
+ ): ...
194
+
195
+ def chat_perform(
196
+ self,
197
+ *,
198
+ stream: bool,
199
+ turns: list[Turn],
200
+ tools: dict[str, Tool],
201
+ data_model: Optional[type[BaseModel]] = None,
202
+ kwargs: Optional["SubmitInputArgs"] = None,
203
+ ):
204
+ from snowflake.cortex import complete
205
+
206
+ kwargs = self._chat_perform_args(stream, turns, tools, data_model, kwargs)
207
+ return complete(**kwargs)
208
+
209
+ @overload
210
+ async def chat_perform_async(
211
+ self,
212
+ *,
213
+ stream: Literal[False],
214
+ turns: list[Turn],
215
+ tools: dict[str, Tool],
216
+ data_model: Optional[type[BaseModel]] = None,
217
+ kwargs: Optional["SubmitInputArgs"] = None,
218
+ ): ...
219
+
220
+ @overload
221
+ async def chat_perform_async(
222
+ self,
223
+ *,
224
+ stream: Literal[True],
225
+ turns: list[Turn],
226
+ tools: dict[str, Tool],
227
+ data_model: Optional[type[BaseModel]] = None,
228
+ kwargs: Optional["SubmitInputArgs"] = None,
229
+ ): ...
230
+
231
+ async def chat_perform_async(
232
+ self,
233
+ *,
234
+ stream: bool,
235
+ turns: list[Turn],
236
+ tools: dict[str, Tool],
237
+ data_model: Optional[type[BaseModel]] = None,
238
+ kwargs: Optional["SubmitInputArgs"] = None,
239
+ ):
240
+ raise NotImplementedError(
241
+ "Snowflake does not currently support async completions."
242
+ )
243
+
244
+ def _chat_perform_args(
245
+ self,
246
+ stream: bool,
247
+ turns: list[Turn],
248
+ tools: dict[str, Tool],
249
+ data_model: Optional[type[BaseModel]] = None,
250
+ kwargs: Optional["SubmitInputArgs"] = None,
251
+ ):
252
+ # Cortex doesn't seem to support tools
253
+ if tools:
254
+ raise ValueError("Snowflake does not currently support tools.")
255
+
256
+ # TODO: implement data_model when this PR makes it into snowflake-ml-python
257
+ # https://github.com/snowflakedb/snowflake-ml-python/pull/141
258
+ # https://docs.snowflake.com/en/user-guide/snowflake-cortex/cortex-llm-rest-api#structured-output-example
259
+ if data_model:
260
+ raise NotImplementedError(
261
+ "The snowflake-ml-python package currently doesn't support structured output. "
262
+ "Upvote this PR to help prioritize it: "
263
+ "https://github.com/snowflakedb/snowflake-ml-python/pull/141"
264
+ )
265
+
266
+ kwargs_full: "SubmitInputArgs" = {
267
+ "stream": stream,
268
+ "prompt": self._as_prompt_input(turns),
269
+ "model": self._model,
270
+ **(kwargs or {}),
271
+ }
272
+
273
+ return kwargs_full
274
+
275
+ def stream_text(self, chunk):
276
+ return chunk
277
+
278
+ def stream_merge_chunks(self, completion, chunk):
279
+ if completion is None:
280
+ return chunk
281
+ return completion + chunk
282
+
283
+ def stream_turn(self, completion, has_data_model) -> Turn:
284
+ return self._as_turn(completion, has_data_model)
285
+
286
+ def value_turn(self, completion, has_data_model) -> Turn:
287
+ return self._as_turn(completion, has_data_model)
288
+
289
+ def token_count(
290
+ self,
291
+ *args: "Content | str",
292
+ tools: dict[str, Tool],
293
+ data_model: Optional[type[BaseModel]],
294
+ ) -> int:
295
+ raise NotImplementedError(
296
+ "Snowflake does not currently support token counting."
297
+ )
298
+
299
+ async def token_count_async(
300
+ self,
301
+ *args: "Content | str",
302
+ tools: dict[str, Tool],
303
+ data_model: Optional[type[BaseModel]],
304
+ ) -> int:
305
+ raise NotImplementedError(
306
+ "Snowflake does not currently support token counting."
307
+ )
308
+
309
+ def _as_prompt_input(self, turns: list[Turn]) -> list["ConversationMessage"]:
310
+ res: list["ConversationMessage"] = []
311
+ for turn in turns:
312
+ res.append(
313
+ {
314
+ "role": turn.role,
315
+ "content": turn.text,
316
+ }
317
+ )
318
+ return res
319
+
320
+ def _as_turn(self, completion, has_data_model) -> Turn:
321
+ return Turn("assistant", completion)
chatlas/_turn.py CHANGED
@@ -1,15 +1,17 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Any, Generic, Literal, Optional, Sequence, TypeVar
3
+ from typing import Generic, Literal, Optional, Sequence, TypeVar
4
4
 
5
- from ._content import Content, ContentText
5
+ from pydantic import BaseModel, ConfigDict, Field
6
+
7
+ from ._content import Content, ContentText, ContentUnion, create_content
6
8
 
7
9
  __all__ = ("Turn",)
8
10
 
9
11
  CompletionT = TypeVar("CompletionT")
10
12
 
11
13
 
12
- class Turn(Generic[CompletionT]):
14
+ class Turn(BaseModel, Generic[CompletionT]):
13
15
  """
14
16
  A user or assistant turn
15
17
 
@@ -64,6 +66,14 @@ class Turn(Generic[CompletionT]):
64
66
  This is only relevant for assistant turns.
65
67
  """
66
68
 
69
+ role: Literal["user", "assistant", "system"]
70
+ contents: list[ContentUnion] = Field(default_factory=list)
71
+ tokens: Optional[tuple[int, int]] = None
72
+ finish_reason: Optional[str] = None
73
+ completion: Optional[CompletionT] = Field(default=None, exclude=True)
74
+
75
+ model_config = ConfigDict(arbitrary_types_allowed=True)
76
+
67
77
  def __init__(
68
78
  self,
69
79
  role: Literal["user", "assistant", "system"],
@@ -72,26 +82,34 @@ class Turn(Generic[CompletionT]):
72
82
  tokens: Optional[tuple[int, int]] = None,
73
83
  finish_reason: Optional[str] = None,
74
84
  completion: Optional[CompletionT] = None,
85
+ **kwargs,
75
86
  ):
76
- self.role = role
77
-
78
87
  if isinstance(contents, str):
79
- contents = [ContentText(contents)]
88
+ contents = [ContentText(text=contents)]
80
89
 
81
90
  contents2: list[Content] = []
82
91
  for x in contents:
83
92
  if isinstance(x, Content):
84
93
  contents2.append(x)
85
94
  elif isinstance(x, str):
86
- contents2.append(ContentText(x))
95
+ contents2.append(ContentText(text=x))
96
+ elif isinstance(x, dict):
97
+ contents2.append(create_content(x))
87
98
  else:
88
99
  raise ValueError("All contents must be Content objects or str.")
89
100
 
90
- self.contents = contents2
91
- self.text = "".join(x.text for x in self.contents if isinstance(x, ContentText))
92
- self.tokens = tokens
93
- self.finish_reason = finish_reason
94
- self.completion = completion
101
+ super().__init__(
102
+ role=role,
103
+ contents=contents2,
104
+ tokens=tokens,
105
+ finish_reason=finish_reason,
106
+ completion=completion,
107
+ **kwargs,
108
+ )
109
+
110
+ @property
111
+ def text(self) -> str:
112
+ return "".join(x.text for x in self.contents if isinstance(x, ContentText))
95
113
 
96
114
  def __str__(self) -> str:
97
115
  return self.text
@@ -109,18 +127,6 @@ class Turn(Generic[CompletionT]):
109
127
  res += "\n" + content.__repr__(indent=indent + 2)
110
128
  return res + "\n"
111
129
 
112
- def __eq__(self, other: Any) -> bool:
113
- if not isinstance(other, Turn):
114
- return False
115
- res = (
116
- self.role == other.role
117
- and self.contents == other.contents
118
- and self.tokens == other.tokens
119
- and self.finish_reason == other.finish_reason
120
- and self.completion == other.completion
121
- )
122
- return res
123
-
124
130
 
125
131
  def user_turn(*args: Content | str) -> Turn:
126
132
  if len(args) == 0:
chatlas/_utils.py CHANGED
@@ -61,6 +61,13 @@ def is_async_callable(
61
61
  return False
62
62
 
63
63
 
64
+ T = TypeVar("T")
65
+
66
+
67
+ def drop_none(x: dict[str, T | None]) -> dict[str, T]:
68
+ return {k: v for k, v in x.items() if v is not None}
69
+
70
+
64
71
  # https://docs.pytest.org/en/latest/example/simple.html#pytest-current-test-environment-variable
65
72
  def is_testing():
66
73
  return os.environ.get("PYTEST_CURRENT_TEST", None) is not None
chatlas/_version.py ADDED
@@ -0,0 +1,21 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
6
+ TYPE_CHECKING = False
7
+ if TYPE_CHECKING:
8
+ from typing import Tuple
9
+ from typing import Union
10
+
11
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
12
+ else:
13
+ VERSION_TUPLE = object
14
+
15
+ version: str
16
+ __version__: str
17
+ __version_tuple__: VERSION_TUPLE
18
+ version_tuple: VERSION_TUPLE
19
+
20
+ __version__ = version = '0.6.0'
21
+ __version_tuple__ = version_tuple = (0, 6, 0)
@@ -8,10 +8,15 @@ from typing import Iterable, Literal, Mapping, Optional, TypedDict, Union
8
8
  import anthropic
9
9
  import anthropic.types.message_param
10
10
  import anthropic.types.text_block_param
11
+ import anthropic.types.thinking_config_disabled_param
12
+ import anthropic.types.thinking_config_enabled_param
13
+ import anthropic.types.tool_bash_20250124_param
11
14
  import anthropic.types.tool_choice_any_param
12
15
  import anthropic.types.tool_choice_auto_param
16
+ import anthropic.types.tool_choice_none_param
13
17
  import anthropic.types.tool_choice_tool_param
14
18
  import anthropic.types.tool_param
19
+ import anthropic.types.tool_text_editor_20250124_param
15
20
 
16
21
 
17
22
  class SubmitInputArgs(TypedDict, total=False):
@@ -19,6 +24,8 @@ class SubmitInputArgs(TypedDict, total=False):
19
24
  messages: Iterable[anthropic.types.message_param.MessageParam]
20
25
  model: Union[
21
26
  Literal[
27
+ "claude-3-7-sonnet-latest",
28
+ "claude-3-7-sonnet-20250219",
22
29
  "claude-3-5-haiku-latest",
23
30
  "claude-3-5-haiku-20241022",
24
31
  "claude-3-5-sonnet-latest",
@@ -41,13 +48,28 @@ class SubmitInputArgs(TypedDict, total=False):
41
48
  anthropic.NotGiven,
42
49
  ]
43
50
  temperature: float | anthropic.NotGiven
51
+ thinking: Union[
52
+ anthropic.types.thinking_config_enabled_param.ThinkingConfigEnabledParam,
53
+ anthropic.types.thinking_config_disabled_param.ThinkingConfigDisabledParam,
54
+ anthropic.NotGiven,
55
+ ]
44
56
  tool_choice: Union[
45
57
  anthropic.types.tool_choice_auto_param.ToolChoiceAutoParam,
46
58
  anthropic.types.tool_choice_any_param.ToolChoiceAnyParam,
47
59
  anthropic.types.tool_choice_tool_param.ToolChoiceToolParam,
60
+ anthropic.types.tool_choice_none_param.ToolChoiceNoneParam,
61
+ anthropic.NotGiven,
62
+ ]
63
+ tools: Union[
64
+ Iterable[
65
+ Union[
66
+ anthropic.types.tool_param.ToolParam,
67
+ anthropic.types.tool_bash_20250124_param.ToolBash20250124Param,
68
+ anthropic.types.tool_text_editor_20250124_param.ToolTextEditor20250124Param,
69
+ ]
70
+ ],
48
71
  anthropic.NotGiven,
49
72
  ]
50
- tools: Union[Iterable[anthropic.types.tool_param.ToolParam], anthropic.NotGiven]
51
73
  top_k: int | anthropic.NotGiven
52
74
  top_p: float | anthropic.NotGiven
53
75
  extra_headers: Optional[Mapping[str, Union[str, anthropic.Omit]]]
@@ -55,6 +55,10 @@ class SubmitInputArgs(TypedDict, total=False):
55
55
  "gpt-4o-audio-preview-2024-12-17",
56
56
  "gpt-4o-mini-audio-preview",
57
57
  "gpt-4o-mini-audio-preview-2024-12-17",
58
+ "gpt-4o-search-preview",
59
+ "gpt-4o-mini-search-preview",
60
+ "gpt-4o-search-preview-2025-03-11",
61
+ "gpt-4o-mini-search-preview-2025-03-11",
58
62
  "chatgpt-4o-latest",
59
63
  "gpt-4o-mini",
60
64
  "gpt-4o-mini-2024-07-18",
@@ -110,8 +114,8 @@ class SubmitInputArgs(TypedDict, total=False):
110
114
  reasoning_effort: Union[Literal["low", "medium", "high"], None, openai.NotGiven]
111
115
  response_format: Union[
112
116
  openai.types.shared_params.response_format_text.ResponseFormatText,
113
- openai.types.shared_params.response_format_json_object.ResponseFormatJSONObject,
114
117
  openai.types.shared_params.response_format_json_schema.ResponseFormatJSONSchema,
118
+ openai.types.shared_params.response_format_json_object.ResponseFormatJSONObject,
115
119
  openai.NotGiven,
116
120
  ]
117
121
  seed: Union[int, None, openai.NotGiven]
@@ -0,0 +1,8 @@
1
+ # ---------------------------------------------------------
2
+ # Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
3
+ # ---------------------------------------------------------
4
+
5
+
6
+ from ._submit import SubmitInputArgs
7
+
8
+ __all__ = ("SubmitInputArgs",)
@@ -0,0 +1,24 @@
1
+ # ---------------------------------------------------------
2
+ # Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
3
+ # ---------------------------------------------------------
4
+
5
+
6
+ from typing import Optional, TypedDict, Union
7
+
8
+ import snowflake.cortex._complete
9
+ import snowflake.snowpark.column
10
+ import snowflake.snowpark.session
11
+
12
+
13
+ class SubmitInputArgs(TypedDict, total=False):
14
+ model: Union[str, snowflake.snowpark.column.Column]
15
+ prompt: Union[
16
+ str,
17
+ list[snowflake.cortex._complete.ConversationMessage],
18
+ snowflake.snowpark.column.Column,
19
+ ]
20
+ options: Optional[snowflake.cortex._complete.CompleteOptions]
21
+ session: Optional[snowflake.snowpark.session.Session]
22
+ stream: bool
23
+ timeout: Optional[float]
24
+ deadline: Optional[float]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: chatlas
3
- Version: 0.4.0
3
+ Version: 0.6.0
4
4
  Summary: A simple and consistent interface for chatting with LLMs
5
5
  Project-URL: Homepage, https://posit-dev.github.io/chatlas
6
6
  Project-URL: Documentation, https://posit-dev.github.io/chatlas
@@ -20,6 +20,12 @@ Requires-Python: >=3.9
20
20
  Requires-Dist: jinja2
21
21
  Requires-Dist: pydantic>=2.0
22
22
  Requires-Dist: rich
23
+ Provides-Extra: anthropic
24
+ Requires-Dist: anthropic; extra == 'anthropic'
25
+ Provides-Extra: azure-openai
26
+ Requires-Dist: openai; extra == 'azure-openai'
27
+ Provides-Extra: bedrock-anthropic
28
+ Requires-Dist: anthropic[bedrock]; extra == 'bedrock-anthropic'
23
29
  Provides-Extra: dev
24
30
  Requires-Dist: anthropic[bedrock]; extra == 'dev'
25
31
  Requires-Dist: google-genai>=1.2.0; extra == 'dev'
@@ -30,7 +36,9 @@ Requires-Dist: pillow; extra == 'dev'
30
36
  Requires-Dist: python-dotenv; extra == 'dev'
31
37
  Requires-Dist: ruff>=0.6.5; extra == 'dev'
32
38
  Requires-Dist: shiny; extra == 'dev'
39
+ Requires-Dist: snowflake-ml-python; extra == 'dev'
33
40
  Requires-Dist: tiktoken; extra == 'dev'
41
+ Requires-Dist: torch; (python_version <= '3.11') and extra == 'dev'
34
42
  Provides-Extra: docs
35
43
  Requires-Dist: griffe>=1; extra == 'docs'
36
44
  Requires-Dist: ipykernel; extra == 'docs'
@@ -42,11 +50,27 @@ Requires-Dist: pandas; extra == 'docs'
42
50
  Requires-Dist: pyyaml; extra == 'docs'
43
51
  Requires-Dist: quartodoc>=0.7; extra == 'docs'
44
52
  Requires-Dist: sentence-transformers; extra == 'docs'
53
+ Provides-Extra: github
54
+ Requires-Dist: openai; extra == 'github'
55
+ Provides-Extra: google
56
+ Requires-Dist: google-genai; extra == 'google'
57
+ Provides-Extra: groq
58
+ Requires-Dist: openai; extra == 'groq'
59
+ Provides-Extra: ollama
60
+ Requires-Dist: openai; extra == 'ollama'
61
+ Provides-Extra: openai
62
+ Requires-Dist: openai; extra == 'openai'
63
+ Provides-Extra: perplexity
64
+ Requires-Dist: openai; extra == 'perplexity'
65
+ Provides-Extra: snowflake
66
+ Requires-Dist: snowflake-ml-python; extra == 'snowflake'
45
67
  Provides-Extra: test
46
68
  Requires-Dist: pyright>=1.1.379; extra == 'test'
47
69
  Requires-Dist: pytest-asyncio; extra == 'test'
48
70
  Requires-Dist: pytest>=8.3.2; extra == 'test'
49
71
  Requires-Dist: syrupy>=4; extra == 'test'
72
+ Provides-Extra: vertex
73
+ Requires-Dist: google-genai; extra == 'vertex'
50
74
  Description-Content-Type: text/markdown
51
75
 
52
76
  <h1 class="unnumbered unlisted"> chatlas <a href="https://posit-dev.github.io/chatlas"><img src="docs/images/logo.png" align="right" height="138" alt="chatlas website" /></a> </h1>
@@ -98,6 +122,7 @@ It also supports the following enterprise cloud providers:
98
122
 
99
123
  * AWS Bedrock: [`ChatBedrockAnthropic()`](https://posit-dev.github.io/chatlas/reference/ChatBedrockAnthropic.html).
100
124
  * Azure OpenAI: [`ChatAzureOpenAI()`](https://posit-dev.github.io/chatlas/reference/ChatAzureOpenAI.html).
125
+ * Snowflake Cortex: [`ChatSnowflake()`](https://posit-dev.github.io/chatlas/reference/ChatSnowflake.html).
101
126
  * Vertex AI: [`ChatVertex()`](https://posit-dev.github.io/chatlas/reference/ChatVertex.html).
102
127
 
103
128
  To use a model provider that isn't listed here, you have two options:
@@ -0,0 +1,45 @@
1
+ chatlas/__init__.py,sha256=IVHVEEN6pspb-5WqWfBLc9wOQH-1R8vmi1Eeh-OSVFY,1358
2
+ chatlas/_anthropic.py,sha256=IvTC1xJYeKi7Liz_Czt1wmkG_Tx12e2ME663xZTNpdI,24745
3
+ chatlas/_auto.py,sha256=4tpwla09la4VA2PAh3phAMWs2Amgtp_4Qsjx6K02ib0,6032
4
+ chatlas/_chat.py,sha256=czfSjsEbRX5bHLclF1IbYtfHlyZrAOH0xcP-1hzcNNk,46032
5
+ chatlas/_content.py,sha256=yXB1IukyMfK9-Zc8ISm4h1p09O4i79YEJandzyT4UtM,8726
6
+ chatlas/_content_image.py,sha256=EUK6wAint-JatLsiwvaPDu4D3W-NcIsDCkzABkXgfDg,8304
7
+ chatlas/_content_pdf.py,sha256=cffeuJxzhUDukQ-Srkmpy62M8X12skYpU_FVq-Wvya4,2420
8
+ chatlas/_display.py,sha256=eqdRIwQenyJxswmTEjnJ1n9YxxSxsa8vHVmA79449_o,4439
9
+ chatlas/_github.py,sha256=8_vvUIBCprgrQ5UItky5yETfEQPG2fCMM57ga77p28E,4377
10
+ chatlas/_google.py,sha256=lXqqLwXlqFoKh0GWx-OSgJ1pge0Dv7FH8Sg-MkcXpJs,19138
11
+ chatlas/_groq.py,sha256=iuFvxeXkq81sDHxVV9zbVHjf2ZuNT94P-XkuXvqtGms,4160
12
+ chatlas/_interpolate.py,sha256=ykwLP3x-ya9Q33U4knSU75dtk6pzJAeythEEIW-43Pc,3631
13
+ chatlas/_live_render.py,sha256=UMZltE35LxziDKPMEeDwQ9meZ95SeqwhJi7j-y9pcro,4004
14
+ chatlas/_logging.py,sha256=7a20sAl1PkW1qBNrfd_ieUbQXV8Gf4Vuf0Wn62LNBmk,2290
15
+ chatlas/_merge.py,sha256=SGj_BetgA7gaOqSBKOhYmW3CYeQKTEehFrXvx3y4OYE,3924
16
+ chatlas/_ollama.py,sha256=EgTwmphVwBV7xCIqmPC_cNlr4Uo9N5Xy4eDCb1sJoPI,3764
17
+ chatlas/_openai.py,sha256=xnJPzZzVuRoH7al7Tq01J7SIgF7bZm3UwcO2noDENk4,24523
18
+ chatlas/_perplexity.py,sha256=j-jfOIYefZC5XzGjmya9GCCGQN003cRmiAv6vmo0rTQ,4454
19
+ chatlas/_provider.py,sha256=YmdBbz_u5aP_kBxl6s26OPiSnWG_vZ_fvf9L2qvBmyI,3809
20
+ chatlas/_snowflake.py,sha256=WUNdT3irxgLVqoc1TAeDmxnYsjBWiBw-CoH-dY4mFps,10944
21
+ chatlas/_tokens.py,sha256=3W3EPUp9eWXUiwuzJwEPBv43AUznbK46pm59Htti7z4,2392
22
+ chatlas/_tokens_old.py,sha256=L9d9oafrXvEx2u4nIn_Jjn7adnQyLBnYBuPwJUE8Pl8,5005
23
+ chatlas/_tools.py,sha256=-qt4U1AFkebQoX9kpsBy5QXK8a2PpHX6Amgm44gcQ68,4113
24
+ chatlas/_turn.py,sha256=7pve6YmD-L4c7Oxd6_ZAPkDudJ8AMpa6pP-pSroA1dM,5067
25
+ chatlas/_typing_extensions.py,sha256=YdzmlyPSBpIEcsOkoz12e6jETT1XEMV2Q72haE4cfwY,1036
26
+ chatlas/_utils.py,sha256=2TPy5_8dr9QDF1YShZN-CjxRVHeArSujRiaF0SKnI4o,2895
27
+ chatlas/_version.py,sha256=jF9TuoEIJRaca3ScKo6qaz6PzaMlu7jjuSQIrJ3nX4U,511
28
+ chatlas/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
+ chatlas/types/__init__.py,sha256=P_EDL4eqsigKwB-u2qRmKlYQS5Y65m7oWjGC3cYmxO4,719
30
+ chatlas/types/anthropic/__init__.py,sha256=OwubA-DPHYpYo0XyRyAFwftOI0mOxtHzAyhUSLcDx54,417
31
+ chatlas/types/anthropic/_client.py,sha256=G0LRhoFBcsSOMr5qhP-0rAScsVXaVlHCpggfVp54bnQ,690
32
+ chatlas/types/anthropic/_client_bedrock.py,sha256=mNazQlu0pQt8JdzrYn3LKNgE4n732GjhQUJdQQK9QkY,785
33
+ chatlas/types/anthropic/_submit.py,sha256=xoQyZ3SUUttWDPAjZTPfFch7D1bIU0AJNsenViYhAKs,2974
34
+ chatlas/types/google/__init__.py,sha256=ZJhi8Kwvio2zp8T1TQqmvdHqkS-Khb6BGESPjREADgo,337
35
+ chatlas/types/google/_client.py,sha256=t7aKbxYq_xOA1Z3RnWcjewifdQFSHi7vKEj6MyKMCJk,729
36
+ chatlas/types/google/_submit.py,sha256=b-ZqMvI551Ia7pFlWdqUQJjov3neHmVwLFw-P2bgU8w,1883
37
+ chatlas/types/openai/__init__.py,sha256=Q2RAr1bSH1nHsxICK05nAmKmxdhKmhbBkWD_XHiVSrI,411
38
+ chatlas/types/openai/_client.py,sha256=YGm_EHtRSSHeeOZe-CV7oNvMJpEblEta3UTuU7lSRO8,754
39
+ chatlas/types/openai/_client_azure.py,sha256=jx8D_p46CLDGzTP-k-TtGzj-f3junj6or-86m8DD_0w,858
40
+ chatlas/types/openai/_submit.py,sha256=mflYHZ5Q3dWBR2PdVEq6lhC9qNrQGNvyMiORglYLByE,6271
41
+ chatlas/types/snowflake/__init__.py,sha256=NVKw_gLVnSlMNdE6BpikrQw8GV8LvIn5SR8eI8Afgbs,273
42
+ chatlas/types/snowflake/_submit.py,sha256=Fgcb2Z4mXYwAR2b7Kn3SdEYFlO4gJiUvkDJ3lDoN0IY,799
43
+ chatlas-0.6.0.dist-info/METADATA,sha256=qJ2toXASnFjXvQOYJg0cXGx5AABxsgWBDBWN-1gZf04,14409
44
+ chatlas-0.6.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
45
+ chatlas-0.6.0.dist-info/RECORD,,