xiaogpt 2.31__py3-none-any.whl → 2.40__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xiaogpt/bot/__init__.py +0 -6
- xiaogpt/cli.py +0 -26
- xiaogpt/config.py +1 -9
- xiaogpt/tts/azure.py +1 -0
- xiaogpt/tts/base.py +14 -19
- xiaogpt/xiaogpt.py +0 -2
- {xiaogpt-2.31.dist-info → xiaogpt-2.40.dist-info}/METADATA +2 -16
- {xiaogpt-2.31.dist-info → xiaogpt-2.40.dist-info}/RECORD +11 -13
- xiaogpt/bot/bard_bot.py +0 -37
- xiaogpt/bot/gpt3_bot.py +0 -90
- {xiaogpt-2.31.dist-info → xiaogpt-2.40.dist-info}/WHEEL +0 -0
- {xiaogpt-2.31.dist-info → xiaogpt-2.40.dist-info}/entry_points.txt +0 -0
- {xiaogpt-2.31.dist-info → xiaogpt-2.40.dist-info}/licenses/LICENSE +0 -0
xiaogpt/bot/__init__.py
CHANGED
@@ -2,21 +2,17 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
from xiaogpt.bot.base_bot import BaseBot
|
4
4
|
from xiaogpt.bot.chatgptapi_bot import ChatGPTBot
|
5
|
-
from xiaogpt.bot.gpt3_bot import GPT3Bot
|
6
5
|
from xiaogpt.bot.newbing_bot import NewBingBot
|
7
6
|
from xiaogpt.bot.glm_bot import GLMBot
|
8
|
-
from xiaogpt.bot.bard_bot import BardBot
|
9
7
|
from xiaogpt.bot.gemini_bot import GeminiBot
|
10
8
|
from xiaogpt.bot.qwen_bot import QwenBot
|
11
9
|
from xiaogpt.bot.langchain_bot import LangChainBot
|
12
10
|
from xiaogpt.config import Config
|
13
11
|
|
14
12
|
BOTS: dict[str, type[BaseBot]] = {
|
15
|
-
"gpt3": GPT3Bot,
|
16
13
|
"newbing": NewBingBot,
|
17
14
|
"chatgptapi": ChatGPTBot,
|
18
15
|
"glm": GLMBot,
|
19
|
-
"bard": BardBot,
|
20
16
|
"gemini": GeminiBot,
|
21
17
|
"qwen": QwenBot,
|
22
18
|
"langchain": LangChainBot,
|
@@ -31,11 +27,9 @@ def get_bot(config: Config) -> BaseBot:
|
|
31
27
|
|
32
28
|
|
33
29
|
__all__ = [
|
34
|
-
"GPT3Bot",
|
35
30
|
"ChatGPTBot",
|
36
31
|
"NewBingBot",
|
37
32
|
"GLMBot",
|
38
|
-
"BardBot",
|
39
33
|
"GeminiBot",
|
40
34
|
"QwenBot",
|
41
35
|
"get_bot",
|
xiaogpt/cli.py
CHANGED
@@ -42,11 +42,6 @@ def main():
|
|
42
42
|
dest="qwen_key",
|
43
43
|
help="Alibaba Qwen api key",
|
44
44
|
)
|
45
|
-
parser.add_argument(
|
46
|
-
"--bard_token",
|
47
|
-
dest="bard_token",
|
48
|
-
help="google bard token see https://github.com/dsdanielpark/Bard-API",
|
49
|
-
)
|
50
45
|
parser.add_argument(
|
51
46
|
"--serpapi_api_key",
|
52
47
|
dest="serpapi_api_key",
|
@@ -114,13 +109,6 @@ def main():
|
|
114
109
|
choices=["mi", "edge", "openai", "azure"],
|
115
110
|
)
|
116
111
|
bot_group = parser.add_mutually_exclusive_group()
|
117
|
-
bot_group.add_argument(
|
118
|
-
"--use_gpt3",
|
119
|
-
dest="bot",
|
120
|
-
action="store_const",
|
121
|
-
const="gpt3",
|
122
|
-
help="if use openai gpt3 api",
|
123
|
-
)
|
124
112
|
bot_group.add_argument(
|
125
113
|
"--use_chatgpt_api",
|
126
114
|
dest="bot",
|
@@ -149,13 +137,6 @@ def main():
|
|
149
137
|
const="glm",
|
150
138
|
help="if use chatglm",
|
151
139
|
)
|
152
|
-
bot_group.add_argument(
|
153
|
-
"--use_bard",
|
154
|
-
dest="bot",
|
155
|
-
action="store_const",
|
156
|
-
const="bard",
|
157
|
-
help="if use bard",
|
158
|
-
)
|
159
140
|
bot_group.add_argument(
|
160
141
|
"--use_qwen",
|
161
142
|
dest="bot",
|
@@ -180,11 +161,9 @@ def main():
|
|
180
161
|
dest="bot",
|
181
162
|
help="bot type",
|
182
163
|
choices=[
|
183
|
-
"gpt3",
|
184
164
|
"chatgptapi",
|
185
165
|
"newbing",
|
186
166
|
"glm",
|
187
|
-
"bard",
|
188
167
|
"gemini",
|
189
168
|
"langchain",
|
190
169
|
"qwen",
|
@@ -209,11 +188,6 @@ def main():
|
|
209
188
|
)
|
210
189
|
|
211
190
|
options = parser.parse_args()
|
212
|
-
if options.bot in ["bard"] and options.stream:
|
213
|
-
raise Exception("For now Bard do not support stream")
|
214
|
-
if options.tts in ["edge", "openai", "azure"]:
|
215
|
-
print("Will close stream to better tts")
|
216
|
-
options.stream = False
|
217
191
|
config = Config.from_options(options)
|
218
192
|
|
219
193
|
miboy = MiGPT(config)
|
xiaogpt/config.py
CHANGED
@@ -64,7 +64,6 @@ class Config:
|
|
64
64
|
glm_key: str = os.getenv("CHATGLM_KEY", "")
|
65
65
|
gemini_key: str = os.getenv("GEMINI_KEY", "") # keep the old rule
|
66
66
|
qwen_key: str = os.getenv("DASHSCOPE_API_KEY", "") # keep the old rule
|
67
|
-
bard_token: str = os.getenv("BARD_TOKEN", "")
|
68
67
|
serpapi_api_key: str = os.getenv("SERPAPI_API_KEY", "")
|
69
68
|
proxy: str | None = None
|
70
69
|
mi_did: str = os.getenv("MI_DID", "")
|
@@ -107,16 +106,13 @@ class Config:
|
|
107
106
|
"Using Azure OpenAI needs deployment_id, read this: "
|
108
107
|
"https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/chatgpt?pivots=programming-language-chat-completions"
|
109
108
|
)
|
110
|
-
if self.bot in ["chatgptapi"
|
109
|
+
if self.bot in ["chatgptapi"]:
|
111
110
|
if not self.openai_key:
|
112
111
|
raise Exception(
|
113
112
|
"Using GPT api needs openai API key, please google how to"
|
114
113
|
)
|
115
114
|
if self.tts == "azure" and not self.azure_tts_speech_key:
|
116
115
|
raise Exception("Using Azure TTS needs azure speech key")
|
117
|
-
if self.tts in ["azure", "edge", "openai"]:
|
118
|
-
print("Will close stream when use tts: {self.tts} for better experience")
|
119
|
-
self.stream = False
|
120
116
|
|
121
117
|
@property
|
122
118
|
def tts_command(self) -> str:
|
@@ -150,8 +146,6 @@ class Config:
|
|
150
146
|
value = [kw for kw in value if kw]
|
151
147
|
elif key == "use_chatgpt_api":
|
152
148
|
key, value = "bot", "chatgptapi"
|
153
|
-
elif key == "use_gpt3":
|
154
|
-
key, value = "bot", "gpt3"
|
155
149
|
elif key == "use_newbing":
|
156
150
|
key, value = "bot", "newbing"
|
157
151
|
elif key == "use_glm":
|
@@ -160,8 +154,6 @@ class Config:
|
|
160
154
|
key, value = "bot", "gemini"
|
161
155
|
elif key == "use_qwen":
|
162
156
|
key, value = "bot", "qwen"
|
163
|
-
elif key == "use_bard":
|
164
|
-
key, value = "bot", "bard"
|
165
157
|
elif key == "use_langchain":
|
166
158
|
key, value = "bot", "langchain"
|
167
159
|
elif key == "enable_edge_tts":
|
xiaogpt/tts/azure.py
CHANGED
@@ -32,6 +32,7 @@ class AzureTTS(AudioFileTTS):
|
|
32
32
|
# Check result
|
33
33
|
if result.reason == speechsdk.ResultReason.SynthesizingAudioCompleted:
|
34
34
|
logger.debug("Speech synthesized for text [{}]".format(text))
|
35
|
+
|
35
36
|
return Path(output_file.name), calculate_tts_elapse(text)
|
36
37
|
elif result.reason == speechsdk.ResultReason.Canceled:
|
37
38
|
cancellation_details = result.cancellation_details
|
xiaogpt/tts/base.py
CHANGED
@@ -106,26 +106,21 @@ class AudioFileTTS(TTS):
|
|
106
106
|
finished.set()
|
107
107
|
|
108
108
|
task = asyncio.create_task(worker())
|
109
|
-
while not queue.empty() or not finished.is_set():
|
110
|
-
done, other = await asyncio.wait(
|
111
|
-
[
|
112
|
-
asyncio.ensure_future(queue.get()),
|
113
|
-
asyncio.ensure_future(finished.wait()),
|
114
|
-
],
|
115
|
-
return_when=asyncio.FIRST_COMPLETED,
|
116
|
-
)
|
117
|
-
if other:
|
118
|
-
other.pop().cancel()
|
119
109
|
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
110
|
+
while True:
|
111
|
+
try:
|
112
|
+
url, duration = queue.get_nowait()
|
113
|
+
except asyncio.QueueEmpty:
|
114
|
+
if finished.is_set():
|
115
|
+
break
|
116
|
+
else:
|
117
|
+
await asyncio.sleep(0.1)
|
118
|
+
continue
|
119
|
+
logger.debug("Playing URL %s (%s seconds)", url, duration)
|
120
|
+
await asyncio.gather(
|
121
|
+
self.mina_service.play_by_url(self.device_id, url),
|
122
|
+
self.wait_for_duration(duration),
|
123
|
+
)
|
129
124
|
await task
|
130
125
|
|
131
126
|
def _start_http_server(self):
|
xiaogpt/xiaogpt.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: xiaogpt
|
3
|
-
Version: 2.
|
3
|
+
Version: 2.40
|
4
4
|
Summary: Play ChatGPT or other LLM with xiaomi AI speaker
|
5
5
|
Author-Email: yihong0618 <zouzou0208@gmail.com>
|
6
6
|
License: MIT
|
@@ -15,7 +15,6 @@ Requires-Dist: aiohttp
|
|
15
15
|
Requires-Dist: rich
|
16
16
|
Requires-Dist: zhipuai==2.0.1
|
17
17
|
Requires-Dist: httpx==0.24.1
|
18
|
-
Requires-Dist: bardapi
|
19
18
|
Requires-Dist: edge-tts>=6.1.3
|
20
19
|
Requires-Dist: EdgeGPT==0.1.26
|
21
20
|
Requires-Dist: langchain>=0.0.343
|
@@ -43,12 +42,10 @@ Play ChatGPT and other LLM with Xiaomi AI Speaker
|
|
43
42
|
|
44
43
|
## 支持的 AI 类型
|
45
44
|
|
46
|
-
- GPT3
|
47
45
|
- ChatGPT
|
48
46
|
- New Bing
|
49
47
|
- [ChatGLM](http://open.bigmodel.cn/)
|
50
48
|
- [Gemini](https://makersuite.google.com/app/apikey)
|
51
|
-
- [Bard](https://github.com/dsdanielpark/Bard-API)
|
52
49
|
- [通义千问](https://help.aliyun.com/zh/dashscope/developer-reference/api-details)
|
53
50
|
|
54
51
|
## 获取小米音响DID
|
@@ -102,7 +99,6 @@ export MI_DID=xxxx
|
|
102
99
|
- 使用 `--account ${account} --password ${password}`
|
103
100
|
- 如果有能力可以自行替换唤醒词,也可以去掉唤醒词
|
104
101
|
- 使用 `--use_chatgpt_api` 的 api 那样可以更流畅的对话,速度特别快,达到了对话的体验, [openai api](https://platform.openai.com/account/api-keys), 命令 `--use_chatgpt_api`
|
105
|
-
- 使用 gpt-3 的 api 那样可以更流畅的对话,速度快, 请 google 如何用 [openai api](https://platform.openai.com/account/api-keys) 命令 --use_gpt3
|
106
102
|
- 如果你遇到了墙需要用 Cloudflare Workers 替换 api_base 请使用 `--api_base ${url}` 来替换。 **请注意,此处你输入的api应该是'`https://xxxx/v1`'的字样,域名需要用引号包裹**
|
107
103
|
- 可以跟小爱说 `开始持续对话` 自动进入持续对话状态,`结束持续对话` 结束持续对话状态。
|
108
104
|
- 可以使用 `--tts edge` 来获取更好的 tts 能力
|
@@ -123,9 +119,6 @@ xiaogpt --hardware LX06 --account ${your_xiaomi_account} --password ${your_passw
|
|
123
119
|
xiaogpt --hardware LX06 --mute_xiaoai --use_chatgpt_api
|
124
120
|
# 使用流式响应,获得更快的响应
|
125
121
|
xiaogpt --hardware LX06 --mute_xiaoai --stream
|
126
|
-
# 如果你想使用 gpt3 ai
|
127
|
-
export OPENAI_API_KEY=${your_api_key}
|
128
|
-
xiaogpt --hardware LX06 --mute_xiaoai --use_gpt3
|
129
122
|
# 如果你想使用 google 的 gemini
|
130
123
|
xiaogpt --hardware LX06 --mute_xiaoai --use_gemini --gemini_key ${gemini_key}
|
131
124
|
# 如果你想使用阿里的通义千问
|
@@ -151,13 +144,8 @@ python3 xiaogpt.py --hardware LX06 --account ${your_xiaomi_account} --password $
|
|
151
144
|
python3 xiaogpt.py --hardware LX06 --mute_xiaoai
|
152
145
|
# 使用流式响应,获得更快的响应
|
153
146
|
python3 xiaogpt.py --hardware LX06 --mute_xiaoai --stream
|
154
|
-
# 如果你想使用 gpt3 ai
|
155
|
-
export OPENAI_API_KEY=${your_api_key}
|
156
|
-
python3 xiaogpt.py --hardware LX06 --mute_xiaoai --use_gpt3
|
157
147
|
# 如果你想使用 ChatGLM api
|
158
148
|
python3 xiaogpt.py --hardware LX06 --mute_xiaoai --use_glm --glm_key ${glm_key}
|
159
|
-
# 如果你想使用 google 的 bard
|
160
|
-
python3 xiaogpt.py --hardware LX06 --mute_xiaoai --use_bard --bard_token ${bard_token}
|
161
149
|
# 如果你想使用 google 的 gemini
|
162
150
|
python3 xiaogpt.py --hardware LX06 --mute_xiaoai --use_gemini --gemini_key ${gemini_key}
|
163
151
|
# 如果你想使用阿里的通义千问
|
@@ -202,7 +190,6 @@ python3 xiaogpt.py
|
|
202
190
|
|
203
191
|
具体参数作用请参考 [Open AI API 文档](https://platform.openai.com/docs/api-reference/chat/create)。
|
204
192
|
ChatGLM [文档](http://open.bigmodel.cn/doc/api#chatglm_130b)
|
205
|
-
Bard-API [参考](https://github.com/dsdanielpark/Bard-API)
|
206
193
|
|
207
194
|
## 配置项说明
|
208
195
|
|
@@ -216,13 +203,12 @@ Bard-API [参考](https://github.com/dsdanielpark/Bard-API)
|
|
216
203
|
| glm_key | chatglm 的 apikey | | |
|
217
204
|
| gemini_key | gemini 的 apikey [参考](https://makersuite.google.com/app/apikey) | | |
|
218
205
|
| qwen_key | qwen 的 apikey [参考](https://help.aliyun.com/zh/dashscope/developer-reference/api-details) | | |
|
219
|
-
| bard_token | bard 的 token 参考 [Bard-API](https://github.com/dsdanielpark/Bard-API) | | |
|
220
206
|
| cookie | 小爱账户cookie (如果用上面密码登录可以不填) | | |
|
221
207
|
| mi_did | 设备did | | |
|
222
208
|
| use_command | 使用 MI command 与小爱交互 | `false` | |
|
223
209
|
| mute_xiaoai | 快速停掉小爱自己的回答 | `true` | |
|
224
210
|
| verbose | 是否打印详细日志 | `false` | |
|
225
|
-
| bot | 使用的 bot 类型,目前支持
|
211
|
+
| bot | 使用的 bot 类型,目前支持 chatgptapi,newbing, qwen, gemini | `chatgptapi` | |
|
226
212
|
| tts | 使用的 TTS 类型 | `mi` | `edge`、 `openai`、`azure` |
|
227
213
|
| tts_voice | TTS 的嗓音 | `zh-CN-XiaoxiaoNeural`(edge), `alloy`(openai), `zh-CN-XiaoxiaoMultilingualNeural`(azure) | |
|
228
214
|
| prompt | 自定义prompt | `请用100字以内回答` | |
|
@@ -1,31 +1,29 @@
|
|
1
|
-
xiaogpt-2.
|
2
|
-
xiaogpt-2.
|
3
|
-
xiaogpt-2.
|
4
|
-
xiaogpt-2.
|
1
|
+
xiaogpt-2.40.dist-info/METADATA,sha256=g5S1znqm_74UkhNwydLgOqPUxEUOdZg9euKWVprXm7Y,23668
|
2
|
+
xiaogpt-2.40.dist-info/WHEEL,sha256=N2J68yzZqJh3mI_Wg92rwhw0rtJDFpZj9bwQIMJgaVg,90
|
3
|
+
xiaogpt-2.40.dist-info/entry_points.txt,sha256=zLFzA72qQ_eWBepdA2YU5vdXFqORH8wXhv2Ox1vnYP8,46
|
4
|
+
xiaogpt-2.40.dist-info/licenses/LICENSE,sha256=XdClh516MvlnOf9749JZHCxSB7y6_fyXcWmLDz6IkZY,1063
|
5
5
|
xiaogpt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
6
|
xiaogpt/__main__.py,sha256=MSmt_5Xg84uHqzTN38JwgseJK8rsJn_11A8WD99VtEo,61
|
7
|
-
xiaogpt/bot/__init__.py,sha256=
|
8
|
-
xiaogpt/bot/bard_bot.py,sha256=qC8m87mWvtbiwipKl_OMYCqRbh79gkyZEytgz5DysC0,840
|
7
|
+
xiaogpt/bot/__init__.py,sha256=7K9v6j6xDkuvIrJwjby0Ec_1ywd4Si0Ww9S_R8M3LeU,919
|
9
8
|
xiaogpt/bot/base_bot.py,sha256=oKn6LLFHXol4hKrSrjnxknrOqrcGICtT_GPPYRNxpkw,1467
|
10
9
|
xiaogpt/bot/chatgptapi_bot.py,sha256=JYlq1D-YZxRwAPpd5dTFSBU7h1KNdWA7FTwgyvMxr7c,3656
|
11
10
|
xiaogpt/bot/gemini_bot.py,sha256=udKrWYP7U83AWpNBggwRp9bvgR2DTHqLMX9E_DLFv-I,1840
|
12
11
|
xiaogpt/bot/glm_bot.py,sha256=QoMJbnu5_rHDz4tzwn7gh3IoAuw7E4hZQLAfziMAvNY,1825
|
13
|
-
xiaogpt/bot/gpt3_bot.py,sha256=enX45_wrGjAtOh-anf8KnjCJluWSARZbjTGD_WZgoms,2781
|
14
12
|
xiaogpt/bot/langchain_bot.py,sha256=4Uz5iOYzA2ongCklS-9zBse2fw-7kEE_9wITH7wdVCc,1944
|
15
13
|
xiaogpt/bot/newbing_bot.py,sha256=afUmw6tyMXbgGZvfQQWaA5h0-e0V0isFolW-WGhd0Vs,2289
|
16
14
|
xiaogpt/bot/qwen_bot.py,sha256=325lMa4Z38rRh47HDa3J4XjvSs4SWOqMVhrMWzkGNo4,3657
|
17
|
-
xiaogpt/cli.py,sha256=
|
18
|
-
xiaogpt/config.py,sha256=
|
15
|
+
xiaogpt/cli.py,sha256=GnRj-AawthaZ5oE2jUzp_ML64afsSBnGXFvbnaqgJHE,4738
|
16
|
+
xiaogpt/config.py,sha256=bhaOdmAr18qe9mv2CtL64aJxtf_-_g_-dDI0C7pLdpE,6301
|
19
17
|
xiaogpt/langchain/callbacks.py,sha256=yR9AXQt9OHVYBWC47Q1I_BUT4Xg9iM44vnW2vv0BLpE,2616
|
20
18
|
xiaogpt/langchain/chain.py,sha256=z0cqRlL0ElWnf31ByxZBN7AKOT-svXQDt5_NDft_nYc,1495
|
21
19
|
xiaogpt/langchain/examples/email/mail_box.py,sha256=xauqrjE4-G4XPQnokUPE-MZgAaHQ_VrUDLlbfYTdCoo,6372
|
22
20
|
xiaogpt/langchain/examples/email/mail_summary_tools.py,sha256=6cWvBJUaA7iaywcHdbUoww8WiCtaNw3TmwyxyF4DY7E,1561
|
23
21
|
xiaogpt/tts/__init__.py,sha256=SZ0FVKbKVbuV7xfRtpwUt5cmqyNQaFa7LyGRYsmdDNE,220
|
24
|
-
xiaogpt/tts/azure.py,sha256=
|
25
|
-
xiaogpt/tts/base.py,sha256
|
22
|
+
xiaogpt/tts/azure.py,sha256=JuE1wirQQAsYnnHmlc3sziuVXQUU0yGeGt5cHgiY388,3979
|
23
|
+
xiaogpt/tts/base.py,sha256=a7J5cpcDNefr7deXJQWwDKw9XPFm6EQQL9O-GLe23hM,4660
|
26
24
|
xiaogpt/tts/edge.py,sha256=yMFGxRTi086XS1d_mbMzQ365bvG4KgAz8ZptaoDAfGU,1172
|
27
25
|
xiaogpt/tts/mi.py,sha256=9HkgGWByAs7k8sTpRdVlgJnnmjc44RNAccJa6tGDlXk,1096
|
28
26
|
xiaogpt/tts/openai.py,sha256=_Qk12zYY-UuXLKvQVe3PqIvCmoRW9OcVCqQRoGCXvNc,1533
|
29
27
|
xiaogpt/utils.py,sha256=B7NCH7g19hcwHDXsnBJPTU6UcWnXoEntKWm-pgcet2I,2072
|
30
|
-
xiaogpt/xiaogpt.py,sha256=
|
31
|
-
xiaogpt-2.
|
28
|
+
xiaogpt/xiaogpt.py,sha256=YyDTm5DaO_US7cJOg-nPcjdQ_Xug0-tT3yQWg9APpBs,15337
|
29
|
+
xiaogpt-2.40.dist-info/RECORD,,
|
xiaogpt/bot/bard_bot.py
DELETED
@@ -1,37 +0,0 @@
|
|
1
|
-
"""ChatGLM bot"""
|
2
|
-
|
3
|
-
from __future__ import annotations
|
4
|
-
|
5
|
-
from typing import Any
|
6
|
-
|
7
|
-
from rich import print
|
8
|
-
|
9
|
-
from xiaogpt.bot.base_bot import BaseBot, ChatHistoryMixin
|
10
|
-
|
11
|
-
|
12
|
-
class BardBot(ChatHistoryMixin, BaseBot):
|
13
|
-
name = "Bard"
|
14
|
-
|
15
|
-
def __init__(
|
16
|
-
self,
|
17
|
-
bard_token: str,
|
18
|
-
) -> None:
|
19
|
-
from bardapi import BardAsync
|
20
|
-
|
21
|
-
self._bot = BardAsync(token=bard_token)
|
22
|
-
self.history = []
|
23
|
-
|
24
|
-
@classmethod
|
25
|
-
def from_config(cls, config):
|
26
|
-
return cls(bard_token=config.bard_token)
|
27
|
-
|
28
|
-
async def ask(self, query, **options):
|
29
|
-
try:
|
30
|
-
r = await self._bot.get_answer(query)
|
31
|
-
except Exception as e:
|
32
|
-
print(str(e))
|
33
|
-
print(r["content"])
|
34
|
-
return r["content"]
|
35
|
-
|
36
|
-
def ask_stream(self, query: str, **options: Any):
|
37
|
-
raise Exception("Bard do not support stream")
|
xiaogpt/bot/gpt3_bot.py
DELETED
@@ -1,90 +0,0 @@
|
|
1
|
-
from __future__ import annotations
|
2
|
-
|
3
|
-
import dataclasses
|
4
|
-
from typing import ClassVar
|
5
|
-
|
6
|
-
import httpx
|
7
|
-
from rich import print
|
8
|
-
|
9
|
-
from xiaogpt.bot.base_bot import BaseBot, ChatHistoryMixin
|
10
|
-
from xiaogpt.utils import split_sentences
|
11
|
-
|
12
|
-
|
13
|
-
@dataclasses.dataclass
|
14
|
-
class GPT3Bot(ChatHistoryMixin, BaseBot):
|
15
|
-
name: ClassVar[str] = "GPT3"
|
16
|
-
openai_key: str
|
17
|
-
api_base: str | None = None
|
18
|
-
proxy: str | None = None
|
19
|
-
history: list[tuple[str, str]] = dataclasses.field(default_factory=list, init=False)
|
20
|
-
|
21
|
-
@classmethod
|
22
|
-
def from_config(cls, config):
|
23
|
-
return cls(
|
24
|
-
openai_key=config.openai_key, api_base=config.api_base, proxy=config.proxy
|
25
|
-
)
|
26
|
-
|
27
|
-
async def ask(self, query, **options):
|
28
|
-
import openai
|
29
|
-
|
30
|
-
data = {
|
31
|
-
"prompt": query,
|
32
|
-
"model": "text-davinci-003",
|
33
|
-
"max_tokens": 1024,
|
34
|
-
"temperature": 1,
|
35
|
-
"top_p": 1,
|
36
|
-
**options,
|
37
|
-
}
|
38
|
-
httpx_kwargs = {}
|
39
|
-
if self.config.proxy:
|
40
|
-
httpx_kwargs["proxies"] = self.config.proxy
|
41
|
-
async with httpx.AsyncClient(trust_env=True, **httpx_kwargs) as sess:
|
42
|
-
client = openai.AsyncOpenAI(
|
43
|
-
api_key=self.openai_key, http_client=sess, base_url=self.api_base
|
44
|
-
)
|
45
|
-
try:
|
46
|
-
completion = await client.completions.create(**data)
|
47
|
-
except Exception as e:
|
48
|
-
print(str(e))
|
49
|
-
return ""
|
50
|
-
print(completion.choices[0].text)
|
51
|
-
return completion.choices[0].text
|
52
|
-
|
53
|
-
async def ask_stream(self, query, **options):
|
54
|
-
import openai
|
55
|
-
|
56
|
-
data = {
|
57
|
-
"prompt": query,
|
58
|
-
"model": "text-davinci-003",
|
59
|
-
"max_tokens": 1024,
|
60
|
-
"temperature": 1,
|
61
|
-
"top_p": 1,
|
62
|
-
"stream": True,
|
63
|
-
**options,
|
64
|
-
}
|
65
|
-
httpx_kwargs = {}
|
66
|
-
if self.config.proxy:
|
67
|
-
httpx_kwargs["proxies"] = self.config.proxy
|
68
|
-
async with httpx.AsyncClient(trust_env=True, **httpx_kwargs) as sess:
|
69
|
-
client = openai.AsyncOpenAI(
|
70
|
-
api_key=self.openai_key, http_client=sess, base_url=self.api_base
|
71
|
-
)
|
72
|
-
try:
|
73
|
-
completion = await client.completions.create(**data)
|
74
|
-
except Exception as e:
|
75
|
-
print(str(e))
|
76
|
-
return
|
77
|
-
|
78
|
-
async def text_gen():
|
79
|
-
async for event in completion:
|
80
|
-
if not event.choices:
|
81
|
-
continue
|
82
|
-
text = event.choices[0].text
|
83
|
-
print(text, end="")
|
84
|
-
yield text
|
85
|
-
|
86
|
-
try:
|
87
|
-
async for sentence in split_sentences(text_gen()):
|
88
|
-
yield sentence
|
89
|
-
finally:
|
90
|
-
print()
|
File without changes
|
File without changes
|
File without changes
|