llm-client-redis 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_client_redis-0.1.0/MANIFEST.in +2 -0
- llm_client_redis-0.1.0/PKG-INFO +94 -0
- llm_client_redis-0.1.0/README.md +74 -0
- llm_client_redis-0.1.0/pyproject.toml +39 -0
- llm_client_redis-0.1.0/setup.cfg +4 -0
- llm_client_redis-0.1.0/src/__init__.py +0 -0
- llm_client_redis-0.1.0/src/config/config.ini +54 -0
- llm_client_redis-0.1.0/src/config/llm_resources.json +92 -0
- llm_client_redis-0.1.0/src/llm_client_redis/__init__.py +3 -0
- llm_client_redis-0.1.0/src/llm_client_redis/chat_session_main.py +31 -0
- llm_client_redis-0.1.0/src/llm_client_redis/cmd_chat/__init__.py +9 -0
- llm_client_redis-0.1.0/src/llm_client_redis/cmd_chat/chat_session.py +238 -0
- llm_client_redis-0.1.0/src/llm_client_redis/cmd_chat/cmd_templates/__init__.py +9 -0
- llm_client_redis-0.1.0/src/llm_client_redis/cmd_chat/cmd_templates/send_template_cmd.py +128 -0
- llm_client_redis-0.1.0/src/llm_client_redis/cmd_chat/cmd_templates/show_text_block_cmd.py +81 -0
- llm_client_redis-0.1.0/src/llm_client_redis/cmd_chat/cmd_templates/text_block_command.py +115 -0
- llm_client_redis-0.1.0/src/llm_client_redis/cmd_chat/command_def.py +204 -0
- llm_client_redis-0.1.0/src/llm_client_redis/llm_client.py +269 -0
- llm_client_redis-0.1.0/src/llm_client_redis/tools/__init__.py +10 -0
- llm_client_redis-0.1.0/src/llm_client_redis/tools/llm_redis_manager.py +481 -0
- llm_client_redis-0.1.0/src/llm_client_redis/tools/llm_resources_tools.py +48 -0
- llm_client_redis-0.1.0/src/llm_client_redis/tools/output_tools.py +39 -0
- llm_client_redis-0.1.0/src/llm_client_redis.egg-info/PKG-INFO +94 -0
- llm_client_redis-0.1.0/src/llm_client_redis.egg-info/SOURCES.txt +26 -0
- llm_client_redis-0.1.0/src/llm_client_redis.egg-info/dependency_links.txt +1 -0
- llm_client_redis-0.1.0/src/llm_client_redis.egg-info/entry_points.txt +2 -0
- llm_client_redis-0.1.0/src/llm_client_redis.egg-info/requires.txt +5 -0
- llm_client_redis-0.1.0/src/llm_client_redis.egg-info/top_level.txt +3 -0
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: llm_client_redis
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: LLM redis中介的客户端程序
|
|
5
|
+
Author-email: Fang JiaWei <fjw12998@hotmail.com>
|
|
6
|
+
License-Expression: GPL-3.0-or-later
|
|
7
|
+
Classifier: Programming Language :: Python :: 3
|
|
8
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.9
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
Requires-Dist: langchain_core<0.4.0,>=0.3.72
|
|
16
|
+
Requires-Dist: pytest<9.0.0,==8.4.1
|
|
17
|
+
Requires-Dist: redis<6.0.0,==5.3.0
|
|
18
|
+
Requires-Dist: Requests<3.0.0,==2.32.4
|
|
19
|
+
Requires-Dist: llm_tokenizers>=0.1.2
|
|
20
|
+
|
|
21
|
+
# llm_client_redis
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
## 功能
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# llm_client_redis
|
|
31
|
+
|
|
32
|
+
## 介绍
|
|
33
|
+
整合多种llm 的api接入,使用 redis 作为消息队列,实现多客户端并发调用 llm 服务。本项目是调用部分,还有另一个项目专门用于接收 redis 消息,
|
|
34
|
+
实现与 llm 的通信,并将返回结果给 redis,
|
|
35
|
+
|
|
36
|
+
## 软件架构
|
|
37
|
+
软件架构说明
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
## 安装教程
|
|
41
|
+
|
|
42
|
+
1. xxxx
|
|
43
|
+
2. xxxx
|
|
44
|
+
3. xxxx
|
|
45
|
+
|
|
46
|
+
## 使用说明
|
|
47
|
+
|
|
48
|
+
### 1. python api 调用
|
|
49
|
+
|
|
50
|
+
`llm_client_redis.llm_client.py`
|
|
51
|
+
|
|
52
|
+
一次获取所有回答内容,等待出现相应的时间会较长
|
|
53
|
+
|
|
54
|
+
```python
|
|
55
|
+
|
|
56
|
+
from src.llm_client_redis import LLMClientRedis
|
|
57
|
+
from langchain_core.messages import BaseMessage, SystemMessage, HumanMessage
|
|
58
|
+
from typing import List
|
|
59
|
+
|
|
60
|
+
llm_client_redis: LLMClientRedis = LLMClientRedis(llm_json_path="../config/llm_resources.json",
|
|
61
|
+
config_path="../config/config.ini")
|
|
62
|
+
|
|
63
|
+
model: str = "home_qwen3:32b"
|
|
64
|
+
|
|
65
|
+
messages: List[BaseMessage] = [SystemMessage("你是一个好助手"), HumanMessage("你好")]
|
|
66
|
+
|
|
67
|
+
data = llm_client_redis.request(messages=messages, model=model)
|
|
68
|
+
|
|
69
|
+
print(data)
|
|
70
|
+
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
### 2. cmd 调用
|
|
74
|
+
|
|
75
|
+
```shell
|
|
76
|
+
chat-session
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
## 参与贡献
|
|
80
|
+
|
|
81
|
+
1. Fork 本仓库
|
|
82
|
+
2. 新建 Feat_xxx 分支
|
|
83
|
+
3. 提交代码
|
|
84
|
+
4. 新建 Pull Request
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
## 特技
|
|
88
|
+
|
|
89
|
+
1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md
|
|
90
|
+
2. Gitee 官方博客 [blog.gitee.com](https://blog.gitee.com)
|
|
91
|
+
3. 你可以 [https://gitee.com/explore](https://gitee.com/explore) 这个地址来了解 Gitee 上的优秀开源项目
|
|
92
|
+
4. [GVP](https://gitee.com/gvp) 全称是 Gitee 最有价值开源项目,是综合评定出的优秀开源项目
|
|
93
|
+
5. Gitee 官方提供的使用手册 [https://gitee.com/help](https://gitee.com/help)
|
|
94
|
+
6. Gitee 封面人物是一档用来展示 Gitee 会员风采的栏目 [https://gitee.com/gitee-stars/](https://gitee.com/gitee-stars/)
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
# llm_client_redis
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
## 功能
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# llm_client_redis
|
|
11
|
+
|
|
12
|
+
## 介绍
|
|
13
|
+
整合多种llm 的api接入,使用 redis 作为消息队列,实现多客户端并发调用 llm 服务。本项目是调用部分,还有另一个项目专门用于接收 redis 消息,
|
|
14
|
+
实现与 llm 的通信,并将返回结果给 redis,
|
|
15
|
+
|
|
16
|
+
## 软件架构
|
|
17
|
+
软件架构说明
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
## 安装教程
|
|
21
|
+
|
|
22
|
+
1. xxxx
|
|
23
|
+
2. xxxx
|
|
24
|
+
3. xxxx
|
|
25
|
+
|
|
26
|
+
## 使用说明
|
|
27
|
+
|
|
28
|
+
### 1. python api 调用
|
|
29
|
+
|
|
30
|
+
`llm_client_redis.llm_client.py`
|
|
31
|
+
|
|
32
|
+
一次获取所有回答内容,等待出现相应的时间会较长
|
|
33
|
+
|
|
34
|
+
```python
|
|
35
|
+
|
|
36
|
+
from src.llm_client_redis import LLMClientRedis
|
|
37
|
+
from langchain_core.messages import BaseMessage, SystemMessage, HumanMessage
|
|
38
|
+
from typing import List
|
|
39
|
+
|
|
40
|
+
llm_client_redis: LLMClientRedis = LLMClientRedis(llm_json_path="../config/llm_resources.json",
|
|
41
|
+
config_path="../config/config.ini")
|
|
42
|
+
|
|
43
|
+
model: str = "home_qwen3:32b"
|
|
44
|
+
|
|
45
|
+
messages: List[BaseMessage] = [SystemMessage("你是一个好助手"), HumanMessage("你好")]
|
|
46
|
+
|
|
47
|
+
data = llm_client_redis.request(messages=messages, model=model)
|
|
48
|
+
|
|
49
|
+
print(data)
|
|
50
|
+
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### 2. cmd 调用
|
|
54
|
+
|
|
55
|
+
```shell
|
|
56
|
+
chat-session
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## 参与贡献
|
|
60
|
+
|
|
61
|
+
1. Fork 本仓库
|
|
62
|
+
2. 新建 Feat_xxx 分支
|
|
63
|
+
3. 提交代码
|
|
64
|
+
4. 新建 Pull Request
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
## 特技
|
|
68
|
+
|
|
69
|
+
1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md
|
|
70
|
+
2. Gitee 官方博客 [blog.gitee.com](https://blog.gitee.com)
|
|
71
|
+
3. 你可以 [https://gitee.com/explore](https://gitee.com/explore) 这个地址来了解 Gitee 上的优秀开源项目
|
|
72
|
+
4. [GVP](https://gitee.com/gvp) 全称是 Gitee 最有价值开源项目,是综合评定出的优秀开源项目
|
|
73
|
+
5. Gitee 官方提供的使用手册 [https://gitee.com/help](https://gitee.com/help)
|
|
74
|
+
6. Gitee 封面人物是一档用来展示 Gitee 会员风采的栏目 [https://gitee.com/gitee-stars/](https://gitee.com/gitee-stars/)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=42", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "llm_client_redis"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
authors = [
|
|
9
|
+
{ name="Fang JiaWei", email="fjw12998@hotmail.com" },
|
|
10
|
+
]
|
|
11
|
+
description = "LLM redis中介的客户端程序"
|
|
12
|
+
readme = "README.md"
|
|
13
|
+
requires-python = ">=3.9"
|
|
14
|
+
dependencies = [
|
|
15
|
+
"langchain_core>=0.3.72,<0.4.0",
|
|
16
|
+
"pytest==8.4.1,<9.0.0",
|
|
17
|
+
"redis==5.3.0,<6.0.0",
|
|
18
|
+
"Requests==2.32.4,<3.0.0",
|
|
19
|
+
"llm_tokenizers>=0.1.2"
|
|
20
|
+
]
|
|
21
|
+
classifiers = [
|
|
22
|
+
"Programming Language :: Python :: 3",
|
|
23
|
+
"Programming Language :: Python :: 3.9",
|
|
24
|
+
"Programming Language :: Python :: 3.10",
|
|
25
|
+
"Programming Language :: Python :: 3.11",
|
|
26
|
+
"Programming Language :: Python :: 3.12",
|
|
27
|
+
"Operating System :: OS Independent"
|
|
28
|
+
]
|
|
29
|
+
license = "GPL-3.0-or-later"
|
|
30
|
+
|
|
31
|
+
[tool.setuptools.package-data]
|
|
32
|
+
"src.config" = ["*.ini", "*.json"]
|
|
33
|
+
|
|
34
|
+
[project.scripts]
|
|
35
|
+
chat-session = "chat_session_main:main"
|
|
36
|
+
|
|
37
|
+
[project.urls]
|
|
38
|
+
## Homepage = "https://github.com/yourusername/llm_tools"
|
|
39
|
+
## Documentation = "https://github.com/yourusername/llm_tools/docs"
|
|
File without changes
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# config.ini v0.1.0
|
|
2
|
+
|
|
3
|
+
[redis_server]
|
|
4
|
+
# Redis 服务器的主机地址 redis03.home
|
|
5
|
+
host = 006.jq48.cn
|
|
6
|
+
|
|
7
|
+
# Redis 服务器的端口号
|
|
8
|
+
port = 26379
|
|
9
|
+
|
|
10
|
+
# Redis 密码的环境变量名称(通过环境变量读取密码)
|
|
11
|
+
password_env_var_name = REDIS03_AUTHENTICATION
|
|
12
|
+
|
|
13
|
+
# Redis 数据库编号
|
|
14
|
+
db = 2
|
|
15
|
+
|
|
16
|
+
# 用于请求的 Redis 数据流名称
|
|
17
|
+
request_stream_name = request_stream
|
|
18
|
+
|
|
19
|
+
# 用于响应的 Redis 数据配对名称
|
|
20
|
+
answer_map_name = answer_map
|
|
21
|
+
|
|
22
|
+
# 用于响应的 Redis 数据流名称前辍,前辍包含:号,加上序号则表示响应流 list 的名称
|
|
23
|
+
chunk_stream_prefix = chunk_stream:
|
|
24
|
+
|
|
25
|
+
# 用于深入分析的 Redis 数据流名称前辍,前辍包含:号,加上序号则表示响应流 list 的名称
|
|
26
|
+
reasoning_stream_prefix = reasoning_stream:
|
|
27
|
+
|
|
28
|
+
[logging]
|
|
29
|
+
# 日志级别,可选值有 DEBUG, INFO, WARNING, ERROR, CRITICAL
|
|
30
|
+
level = INFO
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
[redis_arch]
|
|
34
|
+
# 是否启用 Redis 归档功能,true 表示启用,false 表示禁用
|
|
35
|
+
redis_arch_enable = true
|
|
36
|
+
|
|
37
|
+
# Redis 归档服务器的主机地址 redis03.home
|
|
38
|
+
redis_arch_host = 006.jq48.cn
|
|
39
|
+
|
|
40
|
+
# Redis 归档服务器的端口号
|
|
41
|
+
redis_arch_port = 26379
|
|
42
|
+
|
|
43
|
+
# Redis 归档服务器密码的环境变量名称(通过环境变量读取密码)
|
|
44
|
+
redis_arch_password_env_var_name = REDIS03_AUTHENTICATION
|
|
45
|
+
|
|
46
|
+
# Redis 归档服务器使用的数据库编号
|
|
47
|
+
redis_arch_db = 3
|
|
48
|
+
|
|
49
|
+
# Redis 归档数据流名称
|
|
50
|
+
redis_arch_data_stream_name = arch_stream
|
|
51
|
+
|
|
52
|
+
[local_llm]
|
|
53
|
+
local_llm_id = home_qwen3:32b
|
|
54
|
+
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
{
|
|
2
|
+
"deepseek_r1": {
|
|
3
|
+
"model": "deepseek-reasoner",
|
|
4
|
+
"version": "R1",
|
|
5
|
+
"base_url": "https://api.deepseek.com",
|
|
6
|
+
"type": "BaseChatOpenAI",
|
|
7
|
+
"provider": "langchain-deepseek",
|
|
8
|
+
"env_api_key_name": "DEEPSEEK_API_KEY",
|
|
9
|
+
"response_type": "deepseek-reasoner",
|
|
10
|
+
"description": "DeepSeek R1 模型 LangChain 接口"
|
|
11
|
+
},
|
|
12
|
+
"deepseek_v3": {
|
|
13
|
+
"model": "deepseek-chat",
|
|
14
|
+
"version": "V3",
|
|
15
|
+
"base_url": "https://api.deepseek.com",
|
|
16
|
+
"type": "BaseChatOpenAI",
|
|
17
|
+
"provider": "langchain-deepseek",
|
|
18
|
+
"env_api_key_name": "DEEPSEEK_API_KEY",
|
|
19
|
+
"response_type": "deepseek-chat",
|
|
20
|
+
"description": "DeepSeek V3 模型 LangChain 接口"
|
|
21
|
+
},
|
|
22
|
+
"huawei_deepseek_r1_32k": {
|
|
23
|
+
"model": "DeepSeek-R1",
|
|
24
|
+
"version": "R1",
|
|
25
|
+
"base_url": "https://maas-cn-southwest-2.modelarts-maas.com/deepseek-r1/v1",
|
|
26
|
+
"type": "BaseChatOpenAI",
|
|
27
|
+
"provider": "langchain-deepseek",
|
|
28
|
+
"env_api_key_name": "HUAWEI_MODEL_ART_API_KEY",
|
|
29
|
+
"response_type": "deepseek-reasoner",
|
|
30
|
+
"description": "华为云的 DeepSeek R1 32K 模型 LangChain DeepSeek 接口"
|
|
31
|
+
},
|
|
32
|
+
"huawei_deepseek_v3_32k": {
|
|
33
|
+
"model": "DeepSeek-V3",
|
|
34
|
+
"version": "V1",
|
|
35
|
+
"base_url": "https://maas-cn-southwest-2.modelarts-maas.com/deepseek-v3/v1",
|
|
36
|
+
"type": "BaseChatOpenAI",
|
|
37
|
+
"provider": "langchain-deepseek",
|
|
38
|
+
"env_api_key_name": "HUAWEI_MODEL_ART_API_KEY",
|
|
39
|
+
"response_type": "deepseek-chat",
|
|
40
|
+
"description": "华为云的 DeepSeek V3 32K 模型 LangChain DeepSeek 接口"
|
|
41
|
+
},
|
|
42
|
+
"huawei_DeepSeek-R1-32K-0528": {
|
|
43
|
+
"model": "deepseek-r1-250528",
|
|
44
|
+
"version": "R1",
|
|
45
|
+
"base_url": "https://api.modelarts-maas.com/v1",
|
|
46
|
+
"type": "BaseChatOpenAI",
|
|
47
|
+
"provider": "langchain-deepseek",
|
|
48
|
+
"env_api_key_name": "HUAWEI_MODEL_ART_API_KEY",
|
|
49
|
+
"response_type": "deepseek-reasoner",
|
|
50
|
+
"description": "华为云的 DeepSeek-R1-32K-0528 模型 LangChain DeepSeek 接口"
|
|
51
|
+
},
|
|
52
|
+
"huawei_qwen3-32b": {
|
|
53
|
+
"model": "qwen3-32b",
|
|
54
|
+
"version": "V1",
|
|
55
|
+
"base_url": "https://api.modelarts-maas.com/v1",
|
|
56
|
+
"type": "BaseChatOpenAI",
|
|
57
|
+
"provider": "langchain-deepseek",
|
|
58
|
+
"env_api_key_name": "HUAWEI_MODEL_ART_API_KEY",
|
|
59
|
+
"response_type": "deepseek-chat",
|
|
60
|
+
"description": "华为云的 qwen3-32b 模型 LangChain DeepSeek 接口"
|
|
61
|
+
},
|
|
62
|
+
"huawei_qwen3-235b-a22b": {
|
|
63
|
+
"model": "qwen3-235b-a22b",
|
|
64
|
+
"version": "V1",
|
|
65
|
+
"base_url": "https://api.modelarts-maas.com/v1",
|
|
66
|
+
"type": "BaseChatOpenAI",
|
|
67
|
+
"provider": "langchain-deepseek",
|
|
68
|
+
"env_api_key_name": "HUAWEI_MODEL_ART_API_KEY",
|
|
69
|
+
"response_type": "deepseek-chat",
|
|
70
|
+
"description": "华为云的 qwen3-235b-a22b 模型 LangChain DeepSeek 接口"
|
|
71
|
+
},
|
|
72
|
+
"home_deepseek-r1:32b": {
|
|
73
|
+
"model": "deepseek-r1:32b",
|
|
74
|
+
"version": "32b",
|
|
75
|
+
"base_url": "https://localhost:11434",
|
|
76
|
+
"type": "BaseLLM",
|
|
77
|
+
"provider": "langchain-ollama",
|
|
78
|
+
"env_api_key_name": null,
|
|
79
|
+
"response_type": "deepseek-reasoner",
|
|
80
|
+
"description": "本地 DeepSeek R1 32b 模型 LangChain 接口"
|
|
81
|
+
},
|
|
82
|
+
"home_qwen3:32b": {
|
|
83
|
+
"model": "qwen3:32b",
|
|
84
|
+
"version": "32b",
|
|
85
|
+
"base_url": "https://localhost:11434",
|
|
86
|
+
"type": "BaseLLM",
|
|
87
|
+
"provider": "langchain-ollama",
|
|
88
|
+
"env_api_key_name": null,
|
|
89
|
+
"response_type": "deepseek-reasoner",
|
|
90
|
+
"description": "本地 qwen3:32b 模型 LangChain 接口"
|
|
91
|
+
}
|
|
92
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from configparser import ConfigParser
|
|
2
|
+
|
|
3
|
+
from cmd_chat import ChatSession
|
|
4
|
+
from cmd_chat.cmd_templates import SendTemplateCommand
|
|
5
|
+
from cmd_chat.cmd_templates import ShowTextBlockCommand
|
|
6
|
+
from cmd_chat.cmd_templates import TextBlockCommand
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def main():
|
|
10
|
+
|
|
11
|
+
config_path: str = "../config/config.ini"
|
|
12
|
+
|
|
13
|
+
configparser: ConfigParser = ConfigParser()
|
|
14
|
+
configparser.read(config_path, encoding="utf-8")
|
|
15
|
+
|
|
16
|
+
model: str = configparser['local_llm']['local_llm_id']
|
|
17
|
+
|
|
18
|
+
chat = ChatSession(model=model,
|
|
19
|
+
llm_json_path="../config/llm_resources.json",
|
|
20
|
+
config_path=config_path,
|
|
21
|
+
max_history=8) # 保存最近4轮对话
|
|
22
|
+
|
|
23
|
+
chat.command_registry.register(SendTemplateCommand())
|
|
24
|
+
chat.command_registry.register(TextBlockCommand())
|
|
25
|
+
chat.command_registry.register(ShowTextBlockCommand())
|
|
26
|
+
|
|
27
|
+
chat.start()
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
if __name__ == '__main__':
|
|
31
|
+
main()
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
from langchain_core.messages import HumanMessage, SystemMessage
|
|
5
|
+
|
|
6
|
+
from src.llm_client_redis.tools import OutputTools
|
|
7
|
+
from .command_def import Command
|
|
8
|
+
from .command_def import AbstractChatSession
|
|
9
|
+
|
|
10
|
+
# ------------------------ 新增命令体系 ------------------------
|
|
11
|
+
class ExitCommand(Command):
|
|
12
|
+
"""退出命令"""
|
|
13
|
+
def __init__(self):
|
|
14
|
+
super().__init__("exit", ["quit"], "退出程序")
|
|
15
|
+
|
|
16
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
17
|
+
print("再见!")
|
|
18
|
+
exit(0)
|
|
19
|
+
|
|
20
|
+
class HistoryCommand(Command):
|
|
21
|
+
"""显示历史命令"""
|
|
22
|
+
def __init__(self):
|
|
23
|
+
super().__init__("history", [], "显示对话历史")
|
|
24
|
+
|
|
25
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
26
|
+
session.show_history()
|
|
27
|
+
|
|
28
|
+
class DeleteHistoryCommand(Command):
|
|
29
|
+
"""删除历史命令"""
|
|
30
|
+
def __init__(self):
|
|
31
|
+
super().__init__("hdel", [], "删除指定历史记录 hdel[索引]")
|
|
32
|
+
|
|
33
|
+
def match(self, input_str: str, session: AbstractChatSession) -> bool:
|
|
34
|
+
|
|
35
|
+
if input_str.lower().startswith(self.name):
|
|
36
|
+
idx: int = int(input_str.lower().strip()[len("hdel"):])
|
|
37
|
+
return 0 <= idx < len(session.llm_redis.llm_resources_tools.list_llm_def())
|
|
38
|
+
else:
|
|
39
|
+
return False
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
43
|
+
idx_str = session.user_input[len(self.name):]
|
|
44
|
+
try:
|
|
45
|
+
idx = int(idx_str)
|
|
46
|
+
session.del_history(idx)
|
|
47
|
+
except ValueError:
|
|
48
|
+
print(f"无效的索引: {idx_str}")
|
|
49
|
+
|
|
50
|
+
class ClearHistoryCommand(Command):
|
|
51
|
+
"""清空历史命令"""
|
|
52
|
+
def __init__(self):
|
|
53
|
+
super().__init__("clear", [], "清空对话历史")
|
|
54
|
+
|
|
55
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
56
|
+
session.clear_history()
|
|
57
|
+
|
|
58
|
+
class HelpCommand(Command):
|
|
59
|
+
"""帮助命令"""
|
|
60
|
+
def __init__(self):
|
|
61
|
+
super().__init__("help", ["?"], "显示帮助信息")
|
|
62
|
+
|
|
63
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
64
|
+
session.show_welcome()
|
|
65
|
+
|
|
66
|
+
class ListModelsCommand(Command):
|
|
67
|
+
"""列出模型命令"""
|
|
68
|
+
def __init__(self):
|
|
69
|
+
super().__init__("list", [], "列出支持的所有模型")
|
|
70
|
+
|
|
71
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
72
|
+
session.list_models()
|
|
73
|
+
|
|
74
|
+
class CurrentModelCommand(Command):
|
|
75
|
+
"""当前模型命令"""
|
|
76
|
+
def __init__(self):
|
|
77
|
+
super().__init__("model", [], "显示当前使用模型")
|
|
78
|
+
|
|
79
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
80
|
+
session.show_model()
|
|
81
|
+
|
|
82
|
+
class SummaryCommand(Command):
|
|
83
|
+
"""摘要命令"""
|
|
84
|
+
def __init__(self):
|
|
85
|
+
super().__init__("summary", [], "生成历史摘要")
|
|
86
|
+
|
|
87
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
88
|
+
session.summary_history()
|
|
89
|
+
|
|
90
|
+
class SaveCommand(Command):
|
|
91
|
+
"""保存命令"""
|
|
92
|
+
def __init__(self):
|
|
93
|
+
super().__init__("save", [], "保存对话历史")
|
|
94
|
+
|
|
95
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
96
|
+
session.save_history()
|
|
97
|
+
|
|
98
|
+
class ChangeModelCommand(Command):
|
|
99
|
+
"""切换模型命令"""
|
|
100
|
+
def __init__(self):
|
|
101
|
+
super().__init__(f"chg", [], f"切换到list 返回的[索引]模型作为当前模型")
|
|
102
|
+
|
|
103
|
+
def match(self, input_str: str, session: AbstractChatSession) -> bool:
|
|
104
|
+
|
|
105
|
+
if input_str.lower().startswith(self.name):
|
|
106
|
+
idx: int = int(input_str.lower().strip()[len("chg"):])
|
|
107
|
+
return 0 <= idx < len(session.llm_redis.llm_resources_tools.list_llm_def())
|
|
108
|
+
else:
|
|
109
|
+
return False
|
|
110
|
+
|
|
111
|
+
def execute(self, session: AbstractChatSession, args=None):
|
|
112
|
+
|
|
113
|
+
idx_str = session.user_input[len(self.name):]
|
|
114
|
+
|
|
115
|
+
model: str = session.model
|
|
116
|
+
try:
|
|
117
|
+
idx = int(idx_str)
|
|
118
|
+
model = session.llm_redis.llm_resources_tools.list_llm_def()[idx]
|
|
119
|
+
except ValueError:
|
|
120
|
+
print(f"无效的索引: {idx_str}")
|
|
121
|
+
|
|
122
|
+
session.model = model
|
|
123
|
+
print(f"当前模型变更为:{session.model}")
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
# ------------------------ 修改后的ChatSession类 ------------------------
|
|
129
|
+
class ChatSession(AbstractChatSession):
|
|
130
|
+
|
|
131
|
+
def init_commands(self):
|
|
132
|
+
"""初始化所有命令"""
|
|
133
|
+
# 注册基础命令
|
|
134
|
+
self.command_registry.register(ExitCommand())
|
|
135
|
+
self.command_registry.register(HistoryCommand())
|
|
136
|
+
self.command_registry.register(DeleteHistoryCommand())
|
|
137
|
+
self.command_registry.register(ClearHistoryCommand())
|
|
138
|
+
self.command_registry.register(HelpCommand())
|
|
139
|
+
self.command_registry.register(ListModelsCommand())
|
|
140
|
+
self.command_registry.register(CurrentModelCommand())
|
|
141
|
+
self.command_registry.register(SummaryCommand())
|
|
142
|
+
self.command_registry.register(SaveCommand())
|
|
143
|
+
self.command_registry.register(ChangeModelCommand())
|
|
144
|
+
|
|
145
|
+
def show_history(self) -> str:
|
|
146
|
+
"""
|
|
147
|
+
定义一个私有方法_show_history,用于显示对话历史,返回类型为字符串
|
|
148
|
+
|
|
149
|
+
:return:
|
|
150
|
+
"""
|
|
151
|
+
text: str = "" # 初始化一个空字符串变量text,用于存储对话历史文本
|
|
152
|
+
|
|
153
|
+
text += "\n=== 对话历史 ===\n" # 向text中添加对话历史开始的分隔符
|
|
154
|
+
for idx, line in enumerate(self.history[-self.max_history:]):
|
|
155
|
+
text += f"{idx}. {line.type}: {line.content}\n" # 打印历史记录,每条记录前加上序号
|
|
156
|
+
text += "=================\n"
|
|
157
|
+
|
|
158
|
+
print(text)
|
|
159
|
+
return text
|
|
160
|
+
|
|
161
|
+
def del_history(self, idx: int):
|
|
162
|
+
"""
|
|
163
|
+
定义一个私有方法del_history,用于删除指定索引的历史记录
|
|
164
|
+
:param idx: 要删除的历史记录的索引
|
|
165
|
+
:return: None
|
|
166
|
+
"""
|
|
167
|
+
# 检查索引是否在有效范围内
|
|
168
|
+
if idx < 0 or idx >= len(self.history):
|
|
169
|
+
# 如果索引无效,打印错误信息并返回
|
|
170
|
+
print(f"无效的历史记录索引: {idx}")
|
|
171
|
+
return
|
|
172
|
+
# 删除指定索引的历史记录
|
|
173
|
+
del self.history[idx]
|
|
174
|
+
# 打印删除成功的信息
|
|
175
|
+
print(f"删除历史记录: {idx}")
|
|
176
|
+
# 调用私有方法_show_history显示当前的历史记录
|
|
177
|
+
self.show_history()
|
|
178
|
+
# 返回None,表示方法执行完毕
|
|
179
|
+
return
|
|
180
|
+
|
|
181
|
+
def summary_history(self):
|
|
182
|
+
print("对当前的所有历史记录进行汇总摘要,归纳成一个上下文提示词。")
|
|
183
|
+
self.show_history()
|
|
184
|
+
self.history.append(HumanMessage("对当前的所有历史记录进行汇总摘要,归纳成一个上下文提示词,"
|
|
185
|
+
"以便在后续的对话中获得前面话题的核心内容。"))
|
|
186
|
+
|
|
187
|
+
print(f"\n系统:", end="")
|
|
188
|
+
|
|
189
|
+
answer_text: str = ""
|
|
190
|
+
# 调用大模型接口
|
|
191
|
+
for answer_chunk in self.llm_redis.request_stream(messages=self.history.copy(), model=self.model):
|
|
192
|
+
answer_text += answer_chunk
|
|
193
|
+
print(answer_chunk, end="", flush=True)
|
|
194
|
+
|
|
195
|
+
print("摘要替代原上下文")
|
|
196
|
+
self.history = [SystemMessage(OutputTools.remove_think(answer_text))]
|
|
197
|
+
self.show_history()
|
|
198
|
+
|
|
199
|
+
def save_history(self):
|
|
200
|
+
# 调用_show_history方法获取对话历史文本
|
|
201
|
+
text: str = self.show_history()
|
|
202
|
+
|
|
203
|
+
# 生成文件的路径,文件名包含当前的时间
|
|
204
|
+
# 使用os.path.join拼接保存路径和文件名
|
|
205
|
+
# 文件名格式为"chat_history_YYYYMMDD_HHMMSS.txt",其中YYYYMMDD_HHMMSS为当前时间
|
|
206
|
+
file_path: str = os.path.join(self.save_output_path,
|
|
207
|
+
f"chat_history_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.txt")
|
|
208
|
+
# 使用with语句打开文件,确保文件在操作完成后自动关闭
|
|
209
|
+
# 'w'模式表示写入文件,如果文件不存在则创建文件
|
|
210
|
+
# encoding='utf-8'指定文件编码为UTF-8
|
|
211
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
212
|
+
# 将对话历史文本写入文件
|
|
213
|
+
f.write(text)
|
|
214
|
+
# 打印保存文件的路径
|
|
215
|
+
print(f"保存对话历史,路径:{file_path}")
|
|
216
|
+
|
|
217
|
+
def list_models(self):
|
|
218
|
+
models: list = self.llm_redis.llm_resources_tools.list_llm_def()
|
|
219
|
+
|
|
220
|
+
print(f"\n当前系统支持以下模型:")
|
|
221
|
+
for idx, model in enumerate(models):
|
|
222
|
+
print(f"{idx} - {model}")
|
|
223
|
+
print()
|
|
224
|
+
|
|
225
|
+
def show_model(self):
|
|
226
|
+
print(f"\n当前使用的模型:{self.model}")
|
|
227
|
+
|
|
228
|
+
def clear_history(self):
|
|
229
|
+
self.history = []
|
|
230
|
+
print("对话历史已清空。")
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
if __name__ == "__main__":
|
|
234
|
+
chat = ChatSession(model="home_qwen3:32b",
|
|
235
|
+
llm_json_path="../config/llm_resources.json",
|
|
236
|
+
config_path="../config/config.ini",
|
|
237
|
+
max_history=8) # 保存最近4轮对话
|
|
238
|
+
chat.start()
|