nonebot-plugin-githubmodels 0.2.8__tar.gz → 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,27 +1,18 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: nonebot-plugin-githubmodels
3
- Version: 0.2.8
4
- Summary: 一个调用 GitHub Models 的 AI 对话插件
5
- Home-page: https://github.com/lyqgzbl/nonebot-plugin-githubmodels
6
- License: MIT
7
- Keywords: nonebot2,GitHub Models
3
+ Version: 1.0.0
4
+ Summary: NoneBot 集成 GitHub Copilot 和 OpenAI 的 AI 代码生成模型插件
8
5
  Author: lyqgzbl
9
- Author-email: admin@lyqgzbl.com
10
- Requires-Python: >=3.8,<4.0
11
- Classifier: License :: OSI Approved :: MIT License
12
- Classifier: Programming Language :: Python :: 3
13
- Classifier: Programming Language :: Python :: 3.8
14
- Classifier: Programming Language :: Python :: 3.9
15
- Classifier: Programming Language :: Python :: 3.10
16
- Classifier: Programming Language :: Python :: 3.11
17
- Classifier: Programming Language :: Python :: 3.12
18
- Classifier: Programming Language :: Python :: 3.13
19
- Requires-Dist: nonebot-plugin-alconna (>=0.54.0,<0.55.0)
20
- Requires-Dist: nonebot-plugin-htmlrender (>=0.5.0,<0.6.0)
21
- Requires-Dist: nonebot2 (>=2.2.1,<3.0.0)
22
- Requires-Dist: openai (>=1.44.1,<2.0.0)
23
- Project-URL: Documentation, https://github.com/lyqgzbl/nonebot-plugin-githubmodels#readme
24
- Project-URL: Repository, https://github.com/lyqgzbl/nonebot-plugin-githubmodels
6
+ Author-email: lyqgzbl <122811297+lyqgzbl@users.noreply.github.com>
7
+ License-Expression: MIT
8
+ Requires-Dist: nonebot2>=2.2.1,<3.0.0
9
+ Requires-Dist: azure-ai-inference>=1.0.0,<2.0.0
10
+ Requires-Dist: nonebot-plugin-htmlrender>=0.5.0,<1.0.0
11
+ Requires-Dist: nonebot-plugin-alconna>=0.54.0,<1.0.0
12
+ Requires-Python: >=3.10, <4.0
13
+ Project-URL: documentation, https://github.com/lyqgzbl/nonebot-plugin-daily-bing#readme
14
+ Project-URL: homepage, https://github.com/lyqgzbl/nonebot-plugin-daily-bing
15
+ Project-URL: repository, https://github.com/lyqgzbl/nonebot-plugin-daily-bing
25
16
  Description-Content-Type: text/markdown
26
17
 
27
18
  <!-- markdownlint-disable MD033 MD036 MD041 -->
@@ -90,7 +81,7 @@ pip install nonebot-plugin-githubmodels
90
81
  ### ai_model_name [选填]
91
82
 
92
83
  - 类型: `str`
93
- - 默认: `gpt-4o-mini`
84
+ - 默认: `openai/gpt-4.1-mini`
94
85
  - 说明: 所使用的模型
95
86
 
96
87
  ### ai_temperature [选填]
@@ -99,14 +90,8 @@ pip install nonebot-plugin-githubmodels
99
90
  - 默认: `1.0`
100
91
  - 说明: 生成的文本的多样性和连贯性
101
92
 
102
- ### ai_max_tokens [选填]
103
-
104
- - 类型: `int`
105
- - 默认: `1024`
106
- - 说明: 模型生成文本的字数
107
-
108
93
  ## ai_top_p [选填]
109
94
 
110
95
  - 类型: `float`
111
96
  - 默认: `1.0`
112
- - 说明: 生成文本随机性
97
+ - 说明: 生成文本随机性
@@ -64,7 +64,7 @@ pip install nonebot-plugin-githubmodels
64
64
  ### ai_model_name [选填]
65
65
 
66
66
  - 类型: `str`
67
- - 默认: `gpt-4o-mini`
67
+ - 默认: `openai/gpt-4.1-mini`
68
68
  - 说明: 所使用的模型
69
69
 
70
70
  ### ai_temperature [选填]
@@ -73,12 +73,6 @@ pip install nonebot-plugin-githubmodels
73
73
  - 默认: `1.0`
74
74
  - 说明: 生成的文本的多样性和连贯性
75
75
 
76
- ### ai_max_tokens [选填]
77
-
78
- - 类型: `int`
79
- - 默认: `1024`
80
- - 说明: 模型生成文本的字数
81
-
82
76
  ## ai_top_p [选填]
83
77
 
84
78
  - 类型: `float`
@@ -0,0 +1,45 @@
1
+ [project]
2
+ name = "nonebot-plugin-githubmodels"
3
+ version = "1.0.0"
4
+ description = "NoneBot 集成 GitHub Copilot 和 OpenAI 的 AI 代码生成模型插件"
5
+ readme = "README.md"
6
+ authors = [
7
+ { name = "lyqgzbl", email = "122811297+lyqgzbl@users.noreply.github.com" }
8
+ ]
9
+ requires-python = ">=3.10, <4.0"
10
+ license = "MIT"
11
+ dependencies = [
12
+ "nonebot2 >=2.2.1, <3.0.0",
13
+ "azure-ai-inference >=1.0.0, <2.0.0",
14
+ "nonebot-plugin-htmlrender >=0.5.0, <1.0.0",
15
+ "nonebot-plugin-alconna >=0.54.0, <1.0.0",
16
+ ]
17
+
18
+ [project.urls]
19
+ homepage = "https://github.com/lyqgzbl/nonebot-plugin-daily-bing"
20
+ repository = "https://github.com/lyqgzbl/nonebot-plugin-daily-bing"
21
+ documentation = "https://github.com/lyqgzbl/nonebot-plugin-daily-bing#readme"
22
+
23
+ [build-system]
24
+ requires = ["uv_build>=0.8.17,<0.9.0"]
25
+ build-backend = "uv_build"
26
+
27
+ [dependency-groups]
28
+ dev = [
29
+ "ruff>=0.14.0",
30
+ ]
31
+
32
+ [tool.ruff]
33
+ line-length = 88
34
+ target-version = "py310"
35
+
36
+ [tool.ruff.lint]
37
+ select = ["E", "W", "F", "UP", "C", "T", "PYI", "PT", "Q"]
38
+ ignore = ["E402", "C901"]
39
+ fixable = ["ALL"]
40
+
41
+ [tool.ruff.format]
42
+ quote-style = "double"
43
+ indent-style = "space"
44
+ skip-magic-trailing-comma = false
45
+ line-ending = "auto"
@@ -1,17 +1,17 @@
1
1
  import datetime
2
2
  from pathlib import Path
3
3
 
4
- import nonebot
5
- from nonebot import require, get_plugin_config
4
+
6
5
  from nonebot.rule import Rule
7
6
  from nonebot.log import logger
7
+ from nonebot import require, get_plugin_config
8
+ from azure.ai.inference.models import SystemMessage
8
9
  from nonebot.plugin import PluginMetadata, inherit_supported_adapters
9
10
  require("nonebot_plugin_alconna")
10
11
  require("nonebot_plugin_htmlrender")
11
- from openai import BadRequestError
12
- from arclet.alconna import Args, Option, Alconna, MultiVar, CommandMeta
13
- from nonebot_plugin_alconna import UniMessage, on_alconna, Match
14
12
  from nonebot_plugin_htmlrender import md_to_pic
13
+ from nonebot_plugin_alconna import UniMessage, on_alconna, Match
14
+ from arclet.alconna import Args, Option, Alconna, MultiVar, CommandMeta
15
15
 
16
16
  from .config import Config
17
17
  from .openai_handler import OPENAI_Handler
@@ -34,16 +34,16 @@ REPLY_IMAGE = plugin_config.ai_reply_image
34
34
 
35
35
 
36
36
  if not plugin_config.github_token:
37
- logger.opt(colors=True).warning("<yellow>缺失必要配置项 'github_token',已禁用该插件</yellow>")
38
- openai_handler = None
37
+ logger.opt(colors=True).warning("<yellow>缺失必要配置项 'github_token'" \
38
+ "已禁用该插件</yellow>")
39
+ Openai_Handler = None
39
40
  else:
40
- openai_handler = OPENAI_Handler(
41
+ Openai_Handler = OPENAI_Handler(
41
42
  api_key=plugin_config.github_token,
42
- endpoint="https://models.inference.ai.azure.com",
43
+ endpoint="https://models.github.ai/inference",
43
44
  model_name=plugin_config.ai_model_name,
44
45
  max_context_length=plugin_config.max_context_length,
45
46
  temperature=plugin_config.ai_temperature,
46
- max_tokens=plugin_config.ai_max_tokens,
47
47
  top_p=plugin_config.ai_top_p,
48
48
  )
49
49
 
@@ -54,7 +54,7 @@ def is_enable() -> Rule:
54
54
  return Rule(_rule)
55
55
 
56
56
 
57
- context_manager = ContextManager(max_context_length=plugin_config.max_context_length)
57
+ Context_Manager = ContextManager(max_context_length=plugin_config.max_context_length)
58
58
 
59
59
 
60
60
  ai = on_alconna(
@@ -79,7 +79,7 @@ ai = on_alconna(
79
79
 
80
80
  @ai.assign("reset")
81
81
  async def ai_reset():
82
- context_manager.reset_context()
82
+ Context_Manager.reset_context()
83
83
  await ai.finish("上下文已重置")
84
84
 
85
85
 
@@ -101,23 +101,25 @@ async def handle_function(user_input: Match[tuple[str]]):
101
101
  async def got_location(user_input: str):
102
102
  global REPLY_IMAGE
103
103
  try:
104
- messages = [{"role": "system", "content": "回答尽量简练,请始终用中文回答"}]
105
- context_manager.add_message("user", user_input)
106
- messages += context_manager.get_context()
107
- reply = await openai_handler.get_response(messages)
108
- context_manager.add_message("assistant", reply)
104
+ messages = [SystemMessage(content="回答尽量简练,请始终用中文回答")]
105
+ Context_Manager.add_message("user", user_input)
106
+ messages += Context_Manager.get_context()
107
+ if Openai_Handler is not None:
108
+ reply = await Openai_Handler.get_response(messages)
109
+ Context_Manager.add_message("assistant", reply)
109
110
  if REPLY_IMAGE:
110
111
  current_hour = datetime.datetime.now().hour
111
112
  is_dark_mode = 18 <= current_hour or current_hour < 6
112
113
  css_file = (
113
- Path(__file__).parent / "css" / ("dark.css" if is_dark_mode else "light.css")
114
+ Path(__file__).
115
+ parent / "css" / ("dark.css" if is_dark_mode else "light.css")
114
116
  )
115
117
  pic = await md_to_pic(md=reply, css_path=str(css_file))
116
118
  await UniMessage.image(raw=pic).send(reply_to=True)
117
119
  else:
118
120
  await UniMessage.text(reply).send(reply_to=True)
119
- except BadRequestError as e:
121
+ except Exception as e:
120
122
  logger.opt(colors=True).error(f"<red>API 请求失败: {e}</red>")
121
123
  await ai.send("问题触发了内容过滤策略,请修改问题后重试")
122
124
  finally:
123
- REPLY_IMAGE = plugin_config.ai_reply_image
125
+ REPLY_IMAGE = plugin_config.ai_reply_image
@@ -1,11 +1,9 @@
1
- from typing import Optional
2
1
  from pydantic import BaseModel, Field
3
2
 
4
3
  class Config(BaseModel):
5
- github_token: Optional[str] = None
4
+ github_token: str | None = None
6
5
  max_context_length: int = Field(20)
7
6
  ai_reply_image: bool = False
8
- ai_model_name: str = "gpt-4o-mini"
7
+ ai_model_name: str = "openai/gpt-4.1-mini"
9
8
  ai_temperature: float = Field(1.0)
10
- ai_max_tokens: int = Field(1024)
11
- ai_top_p: float = Field(1.0)
9
+ ai_top_p: float = Field(1.0)
@@ -0,0 +1,41 @@
1
+ from __future__ import annotations
2
+ from azure.ai.inference.models import (
3
+ AssistantMessage,
4
+ ChatRequestMessage,
5
+ SystemMessage,
6
+ UserMessage,
7
+ )
8
+
9
+
10
+ class ContextManager:
11
+ def __init__(self, max_context_length: int):
12
+ self.max_context_length = max_context_length
13
+ self.shared_context: list[ChatRequestMessage] = []
14
+
15
+ def add_message(self, role: str, content: str):
16
+ if self.max_context_length <= 0:
17
+ return
18
+
19
+ message = self._create_message(role, content)
20
+ if message is None:
21
+ return
22
+
23
+ self.shared_context.append(message)
24
+ if len(self.shared_context) > self.max_context_length:
25
+ self.shared_context = self.shared_context[-self.max_context_length:]
26
+
27
+ def get_context(self) -> list[ChatRequestMessage]:
28
+ return self.shared_context.copy() if self.max_context_length > 0 else []
29
+
30
+ def reset_context(self):
31
+ self.shared_context = []
32
+
33
+ @staticmethod
34
+ def _create_message(role: str, content: str) -> ChatRequestMessage | None:
35
+ if role == "user":
36
+ return UserMessage(content=content)
37
+ if role == "assistant":
38
+ return AssistantMessage(content=content)
39
+ if role == "system":
40
+ return SystemMessage(content=content)
41
+ return None
@@ -0,0 +1,36 @@
1
+ from collections.abc import Sequence
2
+ from azure.ai.inference.aio import ChatCompletionsClient
3
+ from azure.ai.inference.models import ChatRequestMessage
4
+ from azure.core.credentials import AzureKeyCredential
5
+
6
+
7
+ class OPENAI_Handler:
8
+ def __init__(
9
+ self,
10
+ api_key: str,
11
+ endpoint: str,
12
+ model_name: str,
13
+ max_context_length: int,
14
+ temperature: float,
15
+ top_p: float,
16
+ ):
17
+ self.client = ChatCompletionsClient(
18
+ endpoint=endpoint,
19
+ credential=AzureKeyCredential(api_key)
20
+ )
21
+ self.model_name = model_name
22
+ self.max_context_length = max_context_length
23
+ self.temperature = temperature
24
+ self.top_p = top_p
25
+
26
+ async def get_response(self, messages: Sequence[ChatRequestMessage]) -> str:
27
+ response = await self.client.complete(
28
+ messages=list(messages),
29
+ model=self.model_name,
30
+ temperature=self.temperature,
31
+ top_p=self.top_p,
32
+ )
33
+ content = response.choices[0].message.content
34
+ if not content:
35
+ raise ValueError("模型未返回内容")
36
+ return content
@@ -1,19 +0,0 @@
1
- The MIT License (MIT)
2
- Copyright (c) 2024 lyqgzbl
3
-
4
- Permission is hereby granted, free of charge, to any person obtaining a copy of
5
- this software and associated documentation files (the "Software"), to deal in
6
- the Software without restriction, including without limitation the rights to
7
- use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
8
- the Software, and to permit persons to whom the Software is furnished to do so,
9
- subject to the following conditions:
10
-
11
- The above copyright notice and this permission notice shall be included in all
12
- copies or substantial portions of the Software.
13
-
14
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
16
- FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
17
- COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
18
- IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
19
- CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -1,16 +0,0 @@
1
- class ContextManager:
2
- def __init__(self, max_context_length: int):
3
- self.max_context_length = max_context_length
4
- self.shared_context = []
5
-
6
- def add_message(self, role: str, content: str):
7
- if self.max_context_length > 0:
8
- self.shared_context.append({"role": role, "content": content})
9
- if len(self.shared_context) > self.max_context_length:
10
- self.shared_context = self.shared_context[-self.max_context_length:]
11
-
12
- def get_context(self):
13
- return self.shared_context.copy() if self.max_context_length > 0 else []
14
-
15
- def reset_context(self):
16
- self.shared_context = []
@@ -1,29 +0,0 @@
1
- from openai import AsyncOpenAI
2
-
3
- class OPENAI_Handler:
4
- def __init__(
5
- self,
6
- api_key: str,
7
- endpoint: str,
8
- model_name: str,
9
- max_context_length: int,
10
- temperature: float,
11
- max_tokens: int,
12
- top_p: float,
13
- ):
14
- self.client = AsyncOpenAI(base_url=endpoint, api_key=api_key)
15
- self.model_name = model_name
16
- self.max_context_length = max_context_length
17
- self.temperature = temperature
18
- self.max_tokens = max_tokens
19
- self.top_p = top_p
20
-
21
- async def get_response(self, messages: list) -> str:
22
- response = await self.client.chat.completions.create(
23
- messages=messages,
24
- model=self.model_name,
25
- temperature=self.temperature,
26
- max_tokens=self.max_tokens,
27
- top_p=self.top_p,
28
- )
29
- return response.choices[0].message.content
@@ -1,22 +0,0 @@
1
- [tool.poetry]
2
- name = "nonebot-plugin-githubmodels"
3
- version = "0.2.8"
4
- description = "一个调用 GitHub Models 的 AI 对话插件"
5
- readme = "README.md"
6
- authors = ["lyqgzbl <admin@lyqgzbl.com>"]
7
- license = "MIT"
8
- homepage = "https://github.com/lyqgzbl/nonebot-plugin-githubmodels"
9
- repository = "https://github.com/lyqgzbl/nonebot-plugin-githubmodels"
10
- documentation = "https://github.com/lyqgzbl/nonebot-plugin-githubmodels#readme"
11
- keywords = ["nonebot2", "GitHub Models"]
12
-
13
- [tool.poetry.dependencies]
14
- python = "^3.8"
15
- nonebot2 = "^2.2.1"
16
- openai = "^1.44.1"
17
- nonebot-plugin-htmlrender = "^0.5.0"
18
- nonebot-plugin-alconna = "^0.54.0"
19
-
20
- [build-system]
21
- requires = ["poetry-core>=1.0.0"]
22
- build-backend = "poetry.core.masonry.api"