shinychat 0.0.1a2__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,350 +1,315 @@
1
+ from __future__ import annotations
2
+
1
3
  import sys
2
- from abc import ABC, abstractmethod
3
- from typing import TYPE_CHECKING, Any, Optional, cast
4
+ from functools import singledispatch
4
5
 
5
- from htmltools import HTML, Tagifiable
6
+ from htmltools import HTML, Tagifiable, TagList
6
7
 
8
+ from ._chat_normalize_chatlas import tool_request_contents, tool_result_contents
7
9
  from ._chat_types import ChatMessage
8
10
 
9
- if TYPE_CHECKING:
10
- from anthropic.types import Message as AnthropicMessage
11
- from anthropic.types import MessageStreamEvent
11
+ __all__ = ["message_content", "message_content_chunk"]
12
+
13
+
14
+ @singledispatch
15
+ def message_content(message):
16
+ """
17
+ Extract content from various message types into a ChatMessage.
18
+
19
+ This function uses `singledispatch` to allow for easy extension to support
20
+ new message types. To add support for a new type, register a new function
21
+ using the `@message_content.register` decorator.
12
22
 
13
- if sys.version_info >= (3, 9):
14
- from google.generativeai.types.generation_types import ( # pyright: ignore[reportMissingTypeStubs]
15
- GenerateContentResponse,
23
+ Parameters
24
+ ----------
25
+ message
26
+ The message object to extract content from (e.g., ChatCompletion,
27
+ BaseMessage, etc.).
28
+
29
+ Note
30
+ ----
31
+ This function is implicitly called by `Chat.append_message()` to support
32
+ handling of various message types. It is not intended to be called directly
33
+ by users, but may be useful for debugging or advanced use cases.
34
+
35
+ Returns
36
+ -------
37
+ ChatMessage
38
+ A ChatMessage object containing the extracted content and role.
39
+
40
+ Raises
41
+ ------
42
+ ValueError
43
+ If the message type is unsupported.
44
+ """
45
+ if isinstance(message, (str, HTML)) or message is None:
46
+ return ChatMessage(content=message)
47
+ if isinstance(message, dict):
48
+ if "content" not in message:
49
+ raise ValueError("Message dictionary must have a 'content' key")
50
+ return ChatMessage(
51
+ content=message["content"],
52
+ role=message.get("role", "assistant"),
16
53
  )
17
- else:
54
+ raise ValueError(
55
+ f"Don't know how to extract content for message type {type(message)}: {message}. "
56
+ "Consider registering a function to handle this type via `@message_content.register`"
57
+ )
18
58
 
19
- class GenerateContentResponse:
20
- text: str
21
59
 
22
- from langchain_core.messages import BaseMessage, BaseMessageChunk
23
- from openai.types.chat import ChatCompletion, ChatCompletionChunk
60
+ @singledispatch
61
+ def message_content_chunk(chunk):
62
+ """
63
+ Extract content from various message chunk types into a ChatMessage.
64
+
65
+ This function uses `singledispatch` to allow for easy extension to support
66
+ new chunk types. To add support for a new type, register a new function
67
+ using the `@message_content_chunk.register` decorator.
68
+
69
+ Parameters
70
+ ----------
71
+ chunk
72
+ The message chunk object to extract content from (e.g., ChatCompletionChunk,
73
+ BaseMessageChunk, etc.).
74
+
75
+ Note
76
+ ----
77
+ This function is implicitly called by `Chat.append_message_stream()` (on every
78
+ chunk of a message stream). It is not intended to be called directly by
79
+ users, but may be useful for debugging or advanced use cases.
80
+
81
+ Returns
82
+ -------
83
+ ChatMessage
84
+ A ChatMessage object containing the extracted content and role.
85
+
86
+ Raises
87
+ ------
88
+ ValueError
89
+ If the chunk type is unsupported.
90
+ """
91
+ if isinstance(chunk, (str, HTML)) or chunk is None:
92
+ return ChatMessage(content=chunk)
93
+ if isinstance(chunk, dict):
94
+ if "content" not in chunk:
95
+ raise ValueError("Chunk dictionary must have a 'content' key")
96
+ return ChatMessage(
97
+ content=chunk["content"],
98
+ role=chunk.get("role", "assistant"),
99
+ )
100
+ raise ValueError(
101
+ f"Don't know how to extract content for message chunk type {type(chunk)}: {chunk}. "
102
+ "Consider registering a function to handle this type via `@message_content_chunk.register`"
103
+ )
24
104
 
25
105
 
26
- class BaseMessageNormalizer(ABC):
27
- @abstractmethod
28
- def normalize(self, message: Any) -> ChatMessage:
29
- pass
106
+ # ------------------------------------------------------------------
107
+ # Shiny tagifiable content extractor
108
+ # ------------------------------------------------------------------
30
109
 
31
- @abstractmethod
32
- def normalize_chunk(self, chunk: Any) -> ChatMessage:
33
- pass
34
110
 
35
- @abstractmethod
36
- def can_normalize(self, message: Any) -> bool:
37
- pass
111
+ @message_content.register
112
+ def _(message: Tagifiable):
113
+ return ChatMessage(content=message)
38
114
 
39
- @abstractmethod
40
- def can_normalize_chunk(self, chunk: Any) -> bool:
41
- pass
42
115
 
116
+ @message_content_chunk.register
117
+ def _(chunk: Tagifiable):
118
+ return ChatMessage(content=chunk)
43
119
 
44
- class StringNormalizer(BaseMessageNormalizer):
45
- def normalize(self, message: Any) -> ChatMessage:
46
- x = cast(Optional[str], message)
47
- return ChatMessage(content=x or "", role="assistant")
48
120
 
49
- def normalize_chunk(self, chunk: Any) -> ChatMessage:
50
- x = cast(Optional[str], chunk)
51
- return ChatMessage(content=x or "", role="assistant")
121
+ # -----------------------------------------------------------------
122
+ # chatlas tool call display
123
+ # -----------------------------------------------------------------
124
+ try:
125
+ from chatlas import ContentToolRequest, ContentToolResult, Turn
126
+ from chatlas.types import Content, ContentText
52
127
 
53
- def can_normalize(self, message: Any) -> bool:
54
- return isinstance(message, (str, HTML)) or message is None
128
+ @message_content.register
129
+ def _(message: Content):
130
+ return ChatMessage(content=str(message))
55
131
 
56
- def can_normalize_chunk(self, chunk: Any) -> bool:
57
- return isinstance(chunk, (str, HTML)) or chunk is None
132
+ @message_content_chunk.register
133
+ def _(chunk: Content):
134
+ return message_content(chunk)
58
135
 
136
+ @message_content.register
137
+ def _(message: ContentText):
138
+ return ChatMessage(content=message.text)
59
139
 
60
- class DictNormalizer(BaseMessageNormalizer):
61
- def normalize(self, message: Any) -> ChatMessage:
62
- x = cast("dict[str, Any]", message)
63
- if "content" not in x:
64
- raise ValueError("Message must have 'content' key")
65
- return ChatMessage(content=x["content"], role=x.get("role", "assistant"))
140
+ @message_content_chunk.register
141
+ def _(chunk: ContentText):
142
+ return message_content(chunk)
66
143
 
67
- def normalize_chunk(self, chunk: Any) -> ChatMessage:
68
- x = cast("dict[str, Any]", chunk)
69
- if "content" not in x:
70
- raise ValueError("Message must have 'content' key")
71
- return ChatMessage(content=x["content"], role=x.get("role", "assistant"))
144
+ @message_content.register
145
+ def _(chunk: ContentToolRequest):
146
+ return ChatMessage(content=tool_request_contents(chunk))
72
147
 
73
- def can_normalize(self, message: Any) -> bool:
74
- return isinstance(message, dict)
148
+ @message_content_chunk.register
149
+ def _(chunk: ContentToolRequest):
150
+ return message_content(chunk)
75
151
 
76
- def can_normalize_chunk(self, chunk: Any) -> bool:
77
- return isinstance(chunk, dict)
152
+ @message_content.register
153
+ def _(chunk: ContentToolResult):
154
+ return ChatMessage(content=tool_result_contents(chunk))
78
155
 
156
+ @message_content_chunk.register
157
+ def _(chunk: ContentToolResult):
158
+ return message_content(chunk)
79
159
 
80
- class TagifiableNormalizer(DictNormalizer):
81
- def normalize(self, message: Any) -> ChatMessage:
82
- x = cast("Tagifiable", message)
83
- return super().normalize({"content": x})
160
+ @message_content.register
161
+ def _(message: Turn):
162
+ contents = TagList()
163
+ for x in message.contents:
164
+ contents.append(message_content(x).content)
165
+ return ChatMessage(content=contents)
84
166
 
85
- def normalize_chunk(self, chunk: Any) -> ChatMessage:
86
- x = cast("Tagifiable", chunk)
87
- return super().normalize_chunk({"content": x})
167
+ @message_content_chunk.register
168
+ def _(chunk: Turn):
169
+ return message_content(chunk)
88
170
 
89
- def can_normalize(self, message: Any) -> bool:
90
- return isinstance(message, Tagifiable)
171
+ # N.B., unlike R, Python Chat stores UI state and so can replay
172
+ # it with additional workarounds. That's why R currently has a
173
+ # shinychat_contents() method for Chat, but Python doesn't.
174
+ except ImportError:
175
+ pass
91
176
 
92
- def can_normalize_chunk(self, chunk: Any) -> bool:
93
- return isinstance(chunk, Tagifiable)
177
+ # ------------------------------------------------------------------
178
+ # LangChain content extractor
179
+ # ------------------------------------------------------------------
94
180
 
181
+ try:
182
+ from langchain_core.messages import BaseMessage, BaseMessageChunk
95
183
 
96
- class LangChainNormalizer(BaseMessageNormalizer):
97
- def normalize(self, message: Any) -> ChatMessage:
98
- x = cast("BaseMessage", message)
99
- if isinstance(x.content, list): # type: ignore
184
+ @message_content.register
185
+ def _(message: BaseMessage):
186
+ if isinstance(message.content, list):
100
187
  raise ValueError(
101
188
  "The `message.content` provided seems to represent numerous messages. "
102
189
  "Consider iterating over `message.content` and calling .append_message() on each iteration."
103
190
  )
104
- return ChatMessage(content=x.content, role="assistant")
191
+ return ChatMessage(
192
+ content=message.content,
193
+ role="assistant",
194
+ )
105
195
 
106
- def normalize_chunk(self, chunk: Any) -> ChatMessage:
107
- x = cast("BaseMessageChunk", chunk)
108
- if isinstance(x.content, list): # type: ignore
196
+ @message_content_chunk.register
197
+ def _(chunk: BaseMessageChunk):
198
+ if isinstance(chunk.content, list):
109
199
  raise ValueError(
110
- "The `message.content` provided seems to represent numerous messages. "
111
- "Consider iterating over `message.content` and calling .append_message() on each iteration."
200
+ "The `chunk.content` provided seems to represent numerous message chunks. "
201
+ "Consider iterating over `chunk.content` and calling .append_message() on each iteration."
112
202
  )
113
- return ChatMessage(content=x.content, role="assistant")
114
-
115
- def can_normalize(self, message: Any) -> bool:
116
- try:
117
- from langchain_core.messages import BaseMessage
118
-
119
- return isinstance(message, BaseMessage)
120
- except Exception:
121
- return False
122
-
123
- def can_normalize_chunk(self, chunk: Any) -> bool:
124
- try:
125
- from langchain_core.messages import BaseMessageChunk
126
-
127
- return isinstance(chunk, BaseMessageChunk)
128
- except Exception:
129
- return False
203
+ return ChatMessage(
204
+ content=chunk.content,
205
+ role="assistant",
206
+ )
207
+ except ImportError:
208
+ pass
130
209
 
131
210
 
132
- class OpenAINormalizer(StringNormalizer):
133
- def normalize(self, message: Any) -> ChatMessage:
134
- x = cast("ChatCompletion", message)
135
- return super().normalize(x.choices[0].message.content)
211
+ # ------------------------------------------------------------------
212
+ # OpenAI content extractor
213
+ # ------------------------------------------------------------------
136
214
 
137
- def normalize_chunk(self, chunk: Any) -> ChatMessage:
138
- x = cast("ChatCompletionChunk", chunk)
139
- return super().normalize_chunk(x.choices[0].delta.content)
215
+ try:
216
+ from openai.types.chat import ChatCompletion, ChatCompletionChunk
140
217
 
141
- def can_normalize(self, message: Any) -> bool:
142
- try:
143
- from openai.types.chat import ChatCompletion
218
+ @message_content.register
219
+ def _(message: ChatCompletion):
220
+ return ChatMessage(
221
+ content=message.choices[0].message.content,
222
+ role="assistant",
223
+ )
144
224
 
145
- return isinstance(message, ChatCompletion)
146
- except Exception:
147
- return False
225
+ @message_content_chunk.register
226
+ def _(chunk: ChatCompletionChunk):
227
+ return ChatMessage(
228
+ content=chunk.choices[0].delta.content,
229
+ role="assistant",
230
+ )
231
+ except ImportError:
232
+ pass
148
233
 
149
- def can_normalize_chunk(self, chunk: Any) -> bool:
150
- try:
151
- from openai.types.chat import ChatCompletionChunk
152
234
 
153
- return isinstance(chunk, ChatCompletionChunk)
154
- except Exception:
155
- return False
235
+ # ------------------------------------------------------------------
236
+ # Anthropic content extractor
237
+ # ------------------------------------------------------------------
156
238
 
239
+ try:
240
+ from anthropic.types import ( # pyright: ignore[reportMissingImports]
241
+ Message as AnthropicMessage,
242
+ )
157
243
 
158
- class AnthropicNormalizer(BaseMessageNormalizer):
159
- def normalize(self, message: Any) -> ChatMessage:
160
- x = cast("AnthropicMessage", message)
161
- content = x.content[0]
244
+ @message_content.register
245
+ def _(message: AnthropicMessage):
246
+ content = message.content[0]
162
247
  if content.type != "text":
163
248
  raise ValueError(
164
249
  f"Anthropic message type {content.type} not supported. "
165
250
  "Only 'text' type is currently supported"
166
251
  )
167
- return ChatMessage(content=content.text, role="assistant")
168
-
169
- def normalize_chunk(self, chunk: Any) -> ChatMessage:
170
- x = cast("MessageStreamEvent", chunk)
171
- content = ""
172
- if x.type == "content_block_delta":
173
- if x.delta.type != "text_delta":
174
- raise ValueError(
175
- f"Anthropic message delta type {x.delta.type} not supported. "
176
- "Only 'text_delta' type is supported"
177
- )
178
- content = x.delta.text
179
-
180
- return ChatMessage(content=content, role="assistant")
181
-
182
- def can_normalize(self, message: Any) -> bool:
183
- try:
184
- from anthropic.types import Message as AnthropicMessage
185
-
186
- return isinstance(message, AnthropicMessage)
187
- except Exception:
188
- return False
189
-
190
- def can_normalize_chunk(self, chunk: Any) -> bool:
191
- try:
192
- from anthropic.types import (
193
- RawContentBlockDeltaEvent,
194
- RawContentBlockStartEvent,
195
- RawContentBlockStopEvent,
196
- RawMessageDeltaEvent,
197
- RawMessageStartEvent,
198
- RawMessageStopEvent,
199
- )
252
+ return ChatMessage(content=content.text)
200
253
 
201
- # The actual MessageStreamEvent is a generic, so isinstance() can't
202
- # be used to check the type. Instead, we manually construct the relevant
203
- # union of relevant classes...
204
- return (
205
- isinstance(chunk, RawContentBlockDeltaEvent)
206
- or isinstance(chunk, RawContentBlockStartEvent)
207
- or isinstance(chunk, RawContentBlockStopEvent)
208
- or isinstance(chunk, RawMessageDeltaEvent)
209
- or isinstance(chunk, RawMessageStartEvent)
210
- or isinstance(chunk, RawMessageStopEvent)
211
- )
212
- except Exception:
213
- return False
254
+ # Old versions of singledispatch doesn't seem to support union types
255
+ if sys.version_info >= (3, 11):
256
+ from anthropic.types import ( # pyright: ignore[reportMissingImports]
257
+ RawMessageStreamEvent,
258
+ )
214
259
 
260
+ @message_content_chunk.register
261
+ def _(chunk: RawMessageStreamEvent):
262
+ content = ""
263
+ if chunk.type == "content_block_delta":
264
+ if chunk.delta.type != "text_delta":
265
+ raise ValueError(
266
+ f"Anthropic message delta type {chunk.delta.type} not supported. "
267
+ "Only 'text_delta' type is supported"
268
+ )
269
+ content = chunk.delta.text
270
+
271
+ return ChatMessage(content=content)
272
+ except ImportError:
273
+ pass
274
+
275
+
276
+ # ------------------------------------------------------------------
277
+ # Google content extractor
278
+ # ------------------------------------------------------------------
279
+
280
+ try:
281
+ from google.generativeai.types.generation_types import (
282
+ GenerateContentResponse,
283
+ )
215
284
 
216
- class GoogleNormalizer(BaseMessageNormalizer):
217
- def normalize(self, message: Any) -> ChatMessage:
218
- x = cast("GenerateContentResponse", message)
219
- return ChatMessage(content=x.text, role="assistant")
285
+ @message_content.register
286
+ def _(message: GenerateContentResponse):
287
+ return ChatMessage(content=message.text)
220
288
 
221
- def normalize_chunk(self, chunk: Any) -> ChatMessage:
222
- x = cast("GenerateContentResponse", chunk)
223
- return ChatMessage(content=x.text, role="assistant")
289
+ @message_content_chunk.register
290
+ def _(chunk: GenerateContentResponse):
291
+ return ChatMessage(content=chunk.text)
224
292
 
225
- def can_normalize(self, message: Any) -> bool:
226
- try:
227
- import google.generativeai.types.generation_types as gtypes # pyright: ignore[reportMissingTypeStubs, reportMissingImports]
293
+ except ImportError:
294
+ pass
228
295
 
229
- return isinstance(
230
- message,
231
- gtypes.GenerateContentResponse, # pyright: ignore[reportUnknownMemberType]
232
- )
233
- except Exception:
234
- return False
235
-
236
- def can_normalize_chunk(self, chunk: Any) -> bool:
237
- return self.can_normalize(chunk)
238
-
239
-
240
- class OllamaNormalizer(DictNormalizer):
241
- def normalize(self, message: Any) -> ChatMessage:
242
- x = cast("dict[str, Any]", message["message"])
243
- return super().normalize(x)
244
-
245
- def normalize_chunk(self, chunk: "dict[str, Any]") -> ChatMessage:
246
- msg = cast("dict[str, Any]", chunk["message"])
247
- return super().normalize_chunk(msg)
248
-
249
- def can_normalize(self, message: Any) -> bool:
250
- try:
251
- from ollama import ChatResponse
252
-
253
- # Ollama<0.4 used TypedDict (now it uses pydantic)
254
- # https://github.com/ollama/ollama-python/pull/276
255
- if isinstance(ChatResponse, dict):
256
- return "message" in message and super().can_normalize(
257
- message["message"]
258
- )
259
- else:
260
- return isinstance(message, ChatResponse)
261
- except Exception:
262
- return False
263
-
264
- def can_normalize_chunk(self, chunk: Any) -> bool:
265
- return self.can_normalize(chunk)
266
-
267
-
268
- class NormalizerRegistry:
269
- def __init__(self) -> None:
270
- # Order of strategies matters (the 1st one that can normalize the message is used)
271
- # So make sure to put the most specific strategies first
272
- self._strategies: dict[str, BaseMessageNormalizer] = {
273
- "openai": OpenAINormalizer(),
274
- "anthropic": AnthropicNormalizer(),
275
- "google": GoogleNormalizer(),
276
- "langchain": LangChainNormalizer(),
277
- "ollama": OllamaNormalizer(),
278
- "tagify": TagifiableNormalizer(),
279
- "dict": DictNormalizer(),
280
- "string": StringNormalizer(),
281
- }
282
-
283
- def register(
284
- self, provider: str, strategy: BaseMessageNormalizer, force: bool = False
285
- ) -> None:
286
- if provider in self._strategies:
287
- if force:
288
- del self._strategies[provider]
289
- else:
290
- raise ValueError(f"Provider {provider} already exists in registry")
291
- # Update the strategies dict such that the new strategy is the first to be considered
292
- self._strategies = {provider: strategy, **self._strategies}
293
-
294
-
295
- message_normalizer_registry = NormalizerRegistry()
296
-
297
-
298
- def register_custom_normalizer(
299
- provider: str, normalizer: BaseMessageNormalizer, force: bool = False
300
- ) -> None:
301
- """
302
- Register a custom normalizer for handling specific message types.
303
296
 
304
- Parameters
305
- ----------
306
- provider : str
307
- A unique identifier for this normalizer in the registry
308
- normalizer : BaseMessageNormalizer
309
- A normalizer instance that can handle your specific message type
310
- force : bool, optional
311
- Whether to override an existing normalizer with the same provider name,
312
- by default False
313
-
314
- Examples
315
- --------
316
- >>> class MyCustomMessage:
317
- ... def __init__(self, content):
318
- ... self.content = content
319
- ...
320
- >>> class MyCustomNormalizer(StringNormalizer):
321
- ... def normalize(self, message):
322
- ... return ChatMessage(content=message.content, role="assistant")
323
- ... def can_normalize(self, message):
324
- ... return isinstance(message, MyCustomMessage)
325
- ...
326
- >>> register_custom_normalizer("my_provider", MyCustomNormalizer())
327
- """
328
- message_normalizer_registry.register(provider, normalizer, force)
297
+ # ------------------------------------------------------------------
298
+ # Ollama content extractor
299
+ # ------------------------------------------------------------------
329
300
 
301
+ try:
302
+ from ollama import ChatResponse
330
303
 
331
- def normalize_message(message: Any) -> ChatMessage:
332
- strategies = message_normalizer_registry._strategies
333
- for strategy in strategies.values():
334
- if strategy.can_normalize(message):
335
- return strategy.normalize(message)
336
- raise ValueError(
337
- f"Could not find a normalizer for message of type {type(message)}: {message}. "
338
- "Consider registering a custom normalizer via shiny.ui._chat_types.registry.register()"
339
- )
304
+ @message_content.register
305
+ def _(message: ChatResponse):
306
+ msg = message.message
307
+ return ChatMessage(msg.content)
340
308
 
309
+ @message_content_chunk.register
310
+ def _(chunk: ChatResponse):
311
+ msg = chunk.message
312
+ return ChatMessage(msg.content)
341
313
 
342
- def normalize_message_chunk(chunk: Any) -> ChatMessage:
343
- strategies = message_normalizer_registry._strategies
344
- for strategy in strategies.values():
345
- if strategy.can_normalize_chunk(chunk):
346
- return strategy.normalize_chunk(chunk)
347
- raise ValueError(
348
- f"Could not find a normalizer for message chunk of type {type(chunk)}: {chunk}. "
349
- "Consider registering a custom normalizer via shiny.ui._chat_normalize.register_custom_normalizer()"
350
- )
314
+ except ImportError:
315
+ pass