xiaogpt 2.22__tar.gz → 2.23__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {xiaogpt-2.22 → xiaogpt-2.23}/PKG-INFO +2 -2
  2. {xiaogpt-2.22 → xiaogpt-2.23}/pyproject.toml +2 -2
  3. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/bard_bot.py +1 -0
  4. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/gemini_bot.py +1 -0
  5. xiaogpt-2.23/xiaogpt/bot/glm_bot.py +63 -0
  6. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/qwen_bot.py +3 -4
  7. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/cli.py +2 -2
  8. xiaogpt-2.22/xiaogpt/bot/glm_bot.py +0 -46
  9. {xiaogpt-2.22 → xiaogpt-2.23}/LICENSE +0 -0
  10. {xiaogpt-2.22 → xiaogpt-2.23}/README.md +0 -0
  11. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/__init__.py +0 -0
  12. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/__main__.py +0 -0
  13. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/__init__.py +0 -0
  14. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/base_bot.py +0 -0
  15. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/chatgptapi_bot.py +0 -0
  16. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/gpt3_bot.py +0 -0
  17. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/langchain_bot.py +0 -0
  18. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/bot/newbing_bot.py +0 -0
  19. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/config.py +0 -0
  20. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/langchain/callbacks.py +0 -0
  21. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/langchain/chain.py +0 -0
  22. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/langchain/examples/email/mail_box.py +0 -0
  23. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/langchain/examples/email/mail_summary_tools.py +0 -0
  24. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/tts/__init__.py +0 -0
  25. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/tts/base.py +0 -0
  26. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/tts/edge.py +0 -0
  27. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/tts/mi.py +0 -0
  28. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/tts/openai.py +0 -0
  29. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/utils.py +0 -0
  30. {xiaogpt-2.22 → xiaogpt-2.23}/xiaogpt/xiaogpt.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: xiaogpt
3
- Version: 2.22
3
+ Version: 2.23
4
4
  Summary: Play ChatGPT or other LLM with xiaomi AI speaker
5
5
  Author-Email: yihong0618 <zouzou0208@gmail.com>
6
6
  License: MIT
@@ -13,7 +13,7 @@ Requires-Dist: miservice_fork
13
13
  Requires-Dist: openai>=1
14
14
  Requires-Dist: aiohttp
15
15
  Requires-Dist: rich
16
- Requires-Dist: zhipuai
16
+ Requires-Dist: zhipuai==2.0.1
17
17
  Requires-Dist: bardapi
18
18
  Requires-Dist: edge-tts>=6.1.3
19
19
  Requires-Dist: EdgeGPT==0.1.26
@@ -16,7 +16,7 @@ dependencies = [
16
16
  "openai>=1",
17
17
  "aiohttp",
18
18
  "rich",
19
- "zhipuai",
19
+ "zhipuai==2.0.1",
20
20
  "bardapi",
21
21
  "edge-tts>=6.1.3",
22
22
  "EdgeGPT==0.1.26",
@@ -28,7 +28,7 @@ dependencies = [
28
28
  "dashscope==1.10.0",
29
29
  ]
30
30
  dynamic = []
31
- version = "2.22"
31
+ version = "2.23"
32
32
 
33
33
  [project.license]
34
34
  text = "MIT"
@@ -1,4 +1,5 @@
1
1
  """ChatGLM bot"""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from typing import Any
@@ -1,4 +1,5 @@
1
1
  """Google Gemini bot"""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from typing import Any
@@ -0,0 +1,63 @@
1
+ """ChatGLM bot"""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Any
6
+
7
+ from rich import print
8
+
9
+ from xiaogpt.bot.base_bot import BaseBot, ChatHistoryMixin
10
+
11
+
12
+ class GLMBot(ChatHistoryMixin, BaseBot):
13
+ name = "Chat GLM"
14
+ default_options = {"model": "chatglm_turbo"}
15
+
16
+ def __init__(self, glm_key: str) -> None:
17
+ from zhipuai import ZhipuAI
18
+
19
+ self.model = "glm-4" # Change glm model here
20
+
21
+ self.history = []
22
+ self.client = ZhipuAI(api_key=glm_key)
23
+
24
+ @classmethod
25
+ def from_config(cls, config):
26
+ return cls(glm_key=config.glm_key)
27
+
28
+ def ask(self, query, **options):
29
+ ms = self.get_messages()
30
+ kwargs = {**self.default_options, **options}
31
+ kwargs["model"] = self.model
32
+ ms.append({"role": "user", "content": f"{query}"})
33
+ kwargs["messages"] = ms
34
+ try:
35
+ r = self.client.chat.completions.create(**kwargs)
36
+ except Exception as e:
37
+ print(str(e))
38
+ return
39
+ message = r.choices[0].message.content
40
+
41
+ self.add_message(query, message)
42
+ print(message)
43
+ return message
44
+
45
+ async def ask_stream(self, query: str, **options: Any):
46
+ ms = self.get_messages()
47
+ kwargs = {**self.default_options, **options}
48
+ kwargs["model"] = self.model
49
+ ms.append({"role": "user", "content": f"{query}"})
50
+ kwargs["messages"] = ms
51
+ kwargs["stream"] = True
52
+ try:
53
+ r = self.client.chat.completions.create(**kwargs)
54
+ except Exception as e:
55
+ print(str(e))
56
+ return
57
+ full_content = ""
58
+ for chunk in r:
59
+ content = chunk.choices[0].delta.content
60
+ full_content += content
61
+ print(content, end="")
62
+ yield content
63
+ self.add_message(query, full_content)
@@ -1,4 +1,5 @@
1
- """ChatGLM bot"""
1
+ """Qwen bot"""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from http import HTTPStatus
@@ -16,9 +17,7 @@ class QwenBot(ChatHistoryMixin, BaseBot):
16
17
  import dashscope
17
18
  from dashscope.api_entities.dashscope_response import Role
18
19
 
19
- self.history = [
20
- {"role": Role.SYSTEM, "content": "You are a helpful assistant."}
21
- ]
20
+ self.history = []
22
21
  dashscope.api_key = qwen_key
23
22
 
24
23
  @classmethod
@@ -195,8 +195,8 @@ def main():
195
195
  )
196
196
 
197
197
  options = parser.parse_args()
198
- if options.bot in ["glm", "bard"] and options.stream:
199
- raise Exception("For now ChatGLM do not support stream")
198
+ if options.bot in ["bard"] and options.stream:
199
+ raise Exception("For now Bard do not support stream")
200
200
  config = Config.from_options(options)
201
201
 
202
202
  miboy = MiGPT(config)
@@ -1,46 +0,0 @@
1
- """ChatGLM bot"""
2
- from __future__ import annotations
3
-
4
- from typing import Any
5
-
6
- from rich import print
7
-
8
- from xiaogpt.bot.base_bot import BaseBot, ChatHistoryMixin
9
-
10
-
11
- class GLMBot(ChatHistoryMixin, BaseBot):
12
- name = "Chat GLM"
13
- default_options = {"model": "chatglm_turbo"}
14
-
15
- def __init__(self, glm_key: str) -> None:
16
- import zhipuai
17
-
18
- self.history = []
19
- zhipuai.api_key = glm_key
20
-
21
- @classmethod
22
- def from_config(cls, config):
23
- return cls(glm_key=config.glm_key)
24
-
25
- def ask(self, query, **options):
26
- import zhipuai
27
-
28
- ms = self.get_messages()
29
- kwargs = {**self.default_options, **options}
30
- kwargs["prompt"] = ms
31
- ms.append({"role": "user", "content": f"{query}"})
32
- try:
33
- r = zhipuai.model_api.sse_invoke(**kwargs)
34
- except Exception as e:
35
- print(str(e))
36
- return
37
- message = ""
38
- for i in r.events():
39
- message += str(i.data)
40
-
41
- self.add_message(query, message)
42
- print(message)
43
- return message
44
-
45
- def ask_stream(self, query: str, **options: Any):
46
- raise Exception("GLM do not support stream")
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes