entari-plugin-hyw 4.0.0rc5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of entari-plugin-hyw might be problematic. Click here for more details.
- entari_plugin_hyw/__init__.py +532 -0
- entari_plugin_hyw/assets/card-dist/index.html +387 -0
- entari_plugin_hyw/assets/card-dist/logos/anthropic.svg +1 -0
- entari_plugin_hyw/assets/card-dist/logos/cerebras.svg +9 -0
- entari_plugin_hyw/assets/card-dist/logos/deepseek.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/gemini.svg +1 -0
- entari_plugin_hyw/assets/card-dist/logos/google.svg +1 -0
- entari_plugin_hyw/assets/card-dist/logos/grok.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/huggingface.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/microsoft.svg +15 -0
- entari_plugin_hyw/assets/card-dist/logos/minimax.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/mistral.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/nvida.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/openai.svg +1 -0
- entari_plugin_hyw/assets/card-dist/logos/openrouter.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/perplexity.svg +24 -0
- entari_plugin_hyw/assets/card-dist/logos/qwen.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/xai.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/xiaomi.png +0 -0
- entari_plugin_hyw/assets/card-dist/logos/zai.png +0 -0
- entari_plugin_hyw/assets/card-dist/vite.svg +1 -0
- entari_plugin_hyw/assets/icon/anthropic.svg +1 -0
- entari_plugin_hyw/assets/icon/cerebras.svg +9 -0
- entari_plugin_hyw/assets/icon/deepseek.png +0 -0
- entari_plugin_hyw/assets/icon/gemini.svg +1 -0
- entari_plugin_hyw/assets/icon/google.svg +1 -0
- entari_plugin_hyw/assets/icon/grok.png +0 -0
- entari_plugin_hyw/assets/icon/huggingface.png +0 -0
- entari_plugin_hyw/assets/icon/microsoft.svg +15 -0
- entari_plugin_hyw/assets/icon/minimax.png +0 -0
- entari_plugin_hyw/assets/icon/mistral.png +0 -0
- entari_plugin_hyw/assets/icon/nvida.png +0 -0
- entari_plugin_hyw/assets/icon/openai.svg +1 -0
- entari_plugin_hyw/assets/icon/openrouter.png +0 -0
- entari_plugin_hyw/assets/icon/perplexity.svg +24 -0
- entari_plugin_hyw/assets/icon/qwen.png +0 -0
- entari_plugin_hyw/assets/icon/xai.png +0 -0
- entari_plugin_hyw/assets/icon/xiaomi.png +0 -0
- entari_plugin_hyw/assets/icon/zai.png +0 -0
- entari_plugin_hyw/browser/__init__.py +10 -0
- entari_plugin_hyw/browser/engines/base.py +13 -0
- entari_plugin_hyw/browser/engines/bing.py +95 -0
- entari_plugin_hyw/browser/engines/searxng.py +137 -0
- entari_plugin_hyw/browser/landing.html +172 -0
- entari_plugin_hyw/browser/manager.py +153 -0
- entari_plugin_hyw/browser/service.py +275 -0
- entari_plugin_hyw/card-ui/.gitignore +24 -0
- entari_plugin_hyw/card-ui/README.md +5 -0
- entari_plugin_hyw/card-ui/index.html +16 -0
- entari_plugin_hyw/card-ui/package-lock.json +2342 -0
- entari_plugin_hyw/card-ui/package.json +31 -0
- entari_plugin_hyw/card-ui/public/logos/anthropic.svg +1 -0
- entari_plugin_hyw/card-ui/public/logos/cerebras.svg +9 -0
- entari_plugin_hyw/card-ui/public/logos/deepseek.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/gemini.svg +1 -0
- entari_plugin_hyw/card-ui/public/logos/google.svg +1 -0
- entari_plugin_hyw/card-ui/public/logos/grok.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/huggingface.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/microsoft.svg +15 -0
- entari_plugin_hyw/card-ui/public/logos/minimax.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/mistral.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/nvida.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/openai.svg +1 -0
- entari_plugin_hyw/card-ui/public/logos/openrouter.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/perplexity.svg +24 -0
- entari_plugin_hyw/card-ui/public/logos/qwen.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/xai.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/xiaomi.png +0 -0
- entari_plugin_hyw/card-ui/public/logos/zai.png +0 -0
- entari_plugin_hyw/card-ui/public/vite.svg +1 -0
- entari_plugin_hyw/card-ui/src/App.vue +756 -0
- entari_plugin_hyw/card-ui/src/assets/vue.svg +1 -0
- entari_plugin_hyw/card-ui/src/components/HelloWorld.vue +41 -0
- entari_plugin_hyw/card-ui/src/components/MarkdownContent.vue +382 -0
- entari_plugin_hyw/card-ui/src/components/SectionCard.vue +41 -0
- entari_plugin_hyw/card-ui/src/components/StageCard.vue +240 -0
- entari_plugin_hyw/card-ui/src/main.ts +5 -0
- entari_plugin_hyw/card-ui/src/style.css +29 -0
- entari_plugin_hyw/card-ui/src/test_regex.js +103 -0
- entari_plugin_hyw/card-ui/src/types.ts +61 -0
- entari_plugin_hyw/card-ui/tsconfig.app.json +16 -0
- entari_plugin_hyw/card-ui/tsconfig.json +7 -0
- entari_plugin_hyw/card-ui/tsconfig.node.json +26 -0
- entari_plugin_hyw/card-ui/vite.config.ts +16 -0
- entari_plugin_hyw/definitions.py +130 -0
- entari_plugin_hyw/history.py +248 -0
- entari_plugin_hyw/image_cache.py +274 -0
- entari_plugin_hyw/misc.py +135 -0
- entari_plugin_hyw/modular_pipeline.py +351 -0
- entari_plugin_hyw/render_vue.py +401 -0
- entari_plugin_hyw/search.py +116 -0
- entari_plugin_hyw/stage_base.py +88 -0
- entari_plugin_hyw/stage_instruct.py +328 -0
- entari_plugin_hyw/stage_instruct_review.py +92 -0
- entari_plugin_hyw/stage_summary.py +164 -0
- entari_plugin_hyw-4.0.0rc5.dist-info/METADATA +116 -0
- entari_plugin_hyw-4.0.0rc5.dist-info/RECORD +99 -0
- entari_plugin_hyw-4.0.0rc5.dist-info/WHEEL +5 -0
- entari_plugin_hyw-4.0.0rc5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,532 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from importlib.metadata import version as get_version
|
|
3
|
+
from typing import List, Dict, Any, Optional, Union
|
|
4
|
+
import time
|
|
5
|
+
import asyncio
|
|
6
|
+
|
|
7
|
+
# 从 pyproject.toml 读取版本号,避免重复维护
|
|
8
|
+
try:
|
|
9
|
+
__version__ = get_version("entari_plugin_hyw")
|
|
10
|
+
except Exception:
|
|
11
|
+
__version__ = "0.0.0"
|
|
12
|
+
|
|
13
|
+
from arclet.alconna import Alconna, Args, AllParam, CommandMeta, Option, Arparma, MultiVar, store_true
|
|
14
|
+
from arclet.entari import metadata, listen, Session, plugin_config, BasicConfModel, plugin, command
|
|
15
|
+
from arclet.letoderea import on
|
|
16
|
+
from arclet.entari import MessageChain, Text, Image, MessageCreatedEvent, Quote, At
|
|
17
|
+
from satori.element import Custom
|
|
18
|
+
from loguru import logger
|
|
19
|
+
import arclet.letoderea as leto
|
|
20
|
+
from arclet.entari.event.command import CommandReceive
|
|
21
|
+
|
|
22
|
+
from .modular_pipeline import ModularPipeline
|
|
23
|
+
from .history import HistoryManager
|
|
24
|
+
from .render_vue import ContentRenderer, get_content_renderer
|
|
25
|
+
from .misc import process_onebot_json, process_images, resolve_model_name, render_refuse_answer, REFUSE_ANSWER_MARKDOWN
|
|
26
|
+
from arclet.entari.event.lifespan import Cleanup
|
|
27
|
+
|
|
28
|
+
import os
|
|
29
|
+
import secrets
|
|
30
|
+
import base64
|
|
31
|
+
|
|
32
|
+
import re
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def parse_color(color: str) -> str:
|
|
36
|
+
"""
|
|
37
|
+
Parse color from hex or RGB tuple to hex format.
|
|
38
|
+
Supports: #ff0000, ff0000, (255, 0, 0), 255,0,0
|
|
39
|
+
"""
|
|
40
|
+
if not color:
|
|
41
|
+
return "#ef4444"
|
|
42
|
+
|
|
43
|
+
color = str(color).strip()
|
|
44
|
+
|
|
45
|
+
# Hex format: #fff or #ffffff or ffffff
|
|
46
|
+
if color.startswith('#') and len(color) in [4, 7]:
|
|
47
|
+
return color
|
|
48
|
+
if re.match(r'^[0-9a-fA-F]{6}$', color):
|
|
49
|
+
return f'#{color}'
|
|
50
|
+
|
|
51
|
+
# RGB tuple: (r, g, b) or r,g,b
|
|
52
|
+
rgb_match = re.match(r'^\(?(\d+)[,\s]+(\d+)[,\s]+(\d+)\)?$', color)
|
|
53
|
+
if rgb_match:
|
|
54
|
+
r, g, b = (max(0, min(255, int(x))) for x in rgb_match.groups())
|
|
55
|
+
return f'#{r:02x}{g:02x}{b:02x}'
|
|
56
|
+
|
|
57
|
+
logger.warning(f"Invalid color '{color}', using default #ef4444")
|
|
58
|
+
return "#ef4444"
|
|
59
|
+
|
|
60
|
+
class _RecentEventDeduper:
|
|
61
|
+
def __init__(self, ttl_seconds: float = 30.0, max_size: int = 2048):
|
|
62
|
+
self.ttl_seconds = ttl_seconds
|
|
63
|
+
self.max_size = max_size
|
|
64
|
+
self._seen: Dict[str, float] = {}
|
|
65
|
+
|
|
66
|
+
def seen_recently(self, key: str) -> bool:
|
|
67
|
+
now = time.time()
|
|
68
|
+
if len(self._seen) > self.max_size:
|
|
69
|
+
self._prune(now)
|
|
70
|
+
ts = self._seen.get(key)
|
|
71
|
+
if ts is None or now - ts > self.ttl_seconds:
|
|
72
|
+
self._seen[key] = now
|
|
73
|
+
return False
|
|
74
|
+
return True
|
|
75
|
+
|
|
76
|
+
def _prune(self, now: float):
|
|
77
|
+
expired = [k for k, ts in self._seen.items() if now - ts > self.ttl_seconds]
|
|
78
|
+
for k in expired:
|
|
79
|
+
self._seen.pop(k, None)
|
|
80
|
+
if len(self._seen) > self.max_size:
|
|
81
|
+
for k, _ in sorted(self._seen.items(), key=lambda kv: kv[1])[: len(self._seen) - self.max_size]:
|
|
82
|
+
self._seen.pop(k, None)
|
|
83
|
+
|
|
84
|
+
_event_deduper = _RecentEventDeduper()
|
|
85
|
+
|
|
86
|
+
@dataclass
|
|
87
|
+
class ModelConfig:
|
|
88
|
+
"""Model configuration for a specific stage."""
|
|
89
|
+
model_name: Optional[str] = None
|
|
90
|
+
api_key: Optional[str] = None
|
|
91
|
+
base_url: Optional[str] = None
|
|
92
|
+
extra_body: Optional[Dict[str, Any]] = None
|
|
93
|
+
model_provider: Optional[str] = None
|
|
94
|
+
input_price: Optional[float] = None
|
|
95
|
+
output_price: Optional[float] = None
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@dataclass
|
|
99
|
+
class HywConfig(BasicConfModel):
|
|
100
|
+
# Core Settings
|
|
101
|
+
admins: List[str] = field(default_factory=list)
|
|
102
|
+
models: List[Dict[str, Any]] = field(default_factory=list)
|
|
103
|
+
question_command: str = "/q"
|
|
104
|
+
language: str = "Simplified Chinese"
|
|
105
|
+
temperature: float = 0.4
|
|
106
|
+
|
|
107
|
+
# Root-level defaults (backward compatible)
|
|
108
|
+
model_name: Optional[str] = None
|
|
109
|
+
api_key: Optional[str] = None
|
|
110
|
+
base_url: str = "https://openrouter.ai/api/v1"
|
|
111
|
+
extra_body: Optional[Dict[str, Any]] = None
|
|
112
|
+
model_provider: Optional[str] = None
|
|
113
|
+
input_price: Optional[float] = None
|
|
114
|
+
output_price: Optional[float] = None
|
|
115
|
+
|
|
116
|
+
# Nested Stage Configs
|
|
117
|
+
instruct: Optional[ModelConfig] = None
|
|
118
|
+
qa: Optional[ModelConfig] = None
|
|
119
|
+
main: Optional[ModelConfig] = None # Summary stage
|
|
120
|
+
|
|
121
|
+
# Search/Fetch Settings
|
|
122
|
+
search_engine: str = "bing"
|
|
123
|
+
enable_domain_blocking: bool = True
|
|
124
|
+
page_content_mode: str = "text"
|
|
125
|
+
|
|
126
|
+
# Rendering Settings
|
|
127
|
+
headless: bool = False
|
|
128
|
+
render_timeout_ms: int = 6000
|
|
129
|
+
render_image_timeout_ms: int = 3000
|
|
130
|
+
|
|
131
|
+
# Bot Behavior
|
|
132
|
+
save_conversation: bool = False
|
|
133
|
+
reaction: bool = False
|
|
134
|
+
quote: bool = True
|
|
135
|
+
|
|
136
|
+
# UI Theme
|
|
137
|
+
theme_color: str = "#ef4444"
|
|
138
|
+
|
|
139
|
+
def __post_init__(self):
|
|
140
|
+
"""Parse and normalize theme color after initialization."""
|
|
141
|
+
self.theme_color = parse_color(self.theme_color)
|
|
142
|
+
# Convert dicts to ModelConfig if needed
|
|
143
|
+
if isinstance(self.instruct, dict):
|
|
144
|
+
self.instruct = ModelConfig(**self.instruct)
|
|
145
|
+
if isinstance(self.qa, dict):
|
|
146
|
+
self.qa = ModelConfig(**self.qa)
|
|
147
|
+
if isinstance(self.main, dict):
|
|
148
|
+
self.main = ModelConfig(**self.main)
|
|
149
|
+
|
|
150
|
+
def get_model_config(self, stage: str) -> Dict[str, Any]:
|
|
151
|
+
"""
|
|
152
|
+
Get resolved model config for a stage.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
stage: "instruct", "qa", or "main" (summary)
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
Dict with model_name, api_key, base_url, extra_body, etc.
|
|
159
|
+
"""
|
|
160
|
+
# Determine primary and secondary config sources
|
|
161
|
+
primary = None
|
|
162
|
+
secondary = None
|
|
163
|
+
|
|
164
|
+
if stage == "instruct":
|
|
165
|
+
primary = self.instruct
|
|
166
|
+
secondary = self.main # Fallback to main
|
|
167
|
+
elif stage == "qa":
|
|
168
|
+
# QA fallback to main as well if ever used
|
|
169
|
+
primary = self.qa
|
|
170
|
+
secondary = self.main
|
|
171
|
+
elif stage == "main":
|
|
172
|
+
primary = self.main
|
|
173
|
+
|
|
174
|
+
# Build result with fallback logic
|
|
175
|
+
def resolve(field_name: str, is_essential: bool = True):
|
|
176
|
+
"""Resolve a field with fallback: Primary -> Secondary -> Root."""
|
|
177
|
+
# 1. Try Primary
|
|
178
|
+
val = getattr(primary, field_name, None) if primary else None
|
|
179
|
+
|
|
180
|
+
# 2. Try Secondary (if value missing)
|
|
181
|
+
if val is None and secondary:
|
|
182
|
+
val = getattr(secondary, field_name, None)
|
|
183
|
+
|
|
184
|
+
# 3. Try Root (if value still missing)
|
|
185
|
+
if val is None:
|
|
186
|
+
val = getattr(self, field_name, None)
|
|
187
|
+
return val
|
|
188
|
+
|
|
189
|
+
return {
|
|
190
|
+
"model_name": resolve("model_name"),
|
|
191
|
+
"api_key": resolve("api_key"),
|
|
192
|
+
"base_url": resolve("base_url"),
|
|
193
|
+
"extra_body": resolve("extra_body", is_essential=False),
|
|
194
|
+
"model_provider": resolve("model_provider", is_essential=False),
|
|
195
|
+
"input_price": resolve("input_price", is_essential=False),
|
|
196
|
+
"output_price": resolve("output_price", is_essential=False),
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
conf = plugin_config(HywConfig)
|
|
201
|
+
history_manager = HistoryManager()
|
|
202
|
+
renderer = ContentRenderer(headless=conf.headless)
|
|
203
|
+
from .render_vue import set_global_renderer
|
|
204
|
+
set_global_renderer(renderer)
|
|
205
|
+
|
|
206
|
+
# Pre-start Crawl4AI browser for fast fetching/screenshots
|
|
207
|
+
from .browser.service import prestart_browser, close_screenshot_service
|
|
208
|
+
# prestart_browser(headless=conf.headless) # Removed to avoid RuntimeError: no running event loop
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class GlobalCache:
|
|
212
|
+
models_image_path: Optional[str] = None
|
|
213
|
+
|
|
214
|
+
global_cache = GlobalCache()
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
@listen(Cleanup)
|
|
218
|
+
async def cleanup_screenshot_service():
|
|
219
|
+
"""Cleanup shared browser on shutdown."""
|
|
220
|
+
try:
|
|
221
|
+
await close_screenshot_service()
|
|
222
|
+
# Also close the shared browser manager
|
|
223
|
+
from .browser.manager import close_shared_browser
|
|
224
|
+
await close_shared_browser()
|
|
225
|
+
except Exception as e:
|
|
226
|
+
logger.warning(f"Failed to cleanup browser services: {e}")
|
|
227
|
+
|
|
228
|
+
async def react(session: Session, emoji: str):
|
|
229
|
+
if not conf.reaction: return
|
|
230
|
+
try:
|
|
231
|
+
await session.reaction_create(emoji=emoji)
|
|
232
|
+
except Exception as e:
|
|
233
|
+
logger.warning(f"Reaction failed: {e}")
|
|
234
|
+
|
|
235
|
+
async def process_request(
|
|
236
|
+
session: Session[MessageCreatedEvent],
|
|
237
|
+
all_param: Optional[MessageChain] = None,
|
|
238
|
+
selected_model: Optional[str] = None,
|
|
239
|
+
selected_vision_model: Optional[str] = None,
|
|
240
|
+
conversation_key_override: Optional[str] = None,
|
|
241
|
+
local_mode: bool = False,
|
|
242
|
+
) -> None:
|
|
243
|
+
mc = MessageChain(all_param)
|
|
244
|
+
if session.reply:
|
|
245
|
+
try:
|
|
246
|
+
# Check if reply is from self (the bot)
|
|
247
|
+
# 1. Check by Message ID (reliable for bot's own messages if recorded)
|
|
248
|
+
reply_msg_id = str(session.reply.origin.id) if hasattr(session.reply.origin, 'id') else None
|
|
249
|
+
is_bot = False
|
|
250
|
+
|
|
251
|
+
if reply_msg_id and history_manager.is_bot_message(reply_msg_id):
|
|
252
|
+
is_bot = True
|
|
253
|
+
|
|
254
|
+
if is_bot:
|
|
255
|
+
pass # Reply is from bot - ignoring
|
|
256
|
+
else:
|
|
257
|
+
mc.extend(MessageChain(" ") + session.reply.origin.message)
|
|
258
|
+
except Exception as e:
|
|
259
|
+
logger.warning(f"Failed to process reply origin: {e}")
|
|
260
|
+
mc.extend(MessageChain(" ") + session.reply.origin.message)
|
|
261
|
+
|
|
262
|
+
# Filter and reconstruct MessageChain
|
|
263
|
+
filtered_elements = mc.get(Text) + mc.get(Image) + mc.get(Custom)
|
|
264
|
+
mc = MessageChain(filtered_elements)
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
text_content = str(mc.get(Text)).strip()
|
|
268
|
+
# Remove HTML image tags from text content to prevent "unreasonable code behavior"
|
|
269
|
+
text_content = re.sub(r'<img[^>]+>', '', text_content, flags=re.IGNORECASE)
|
|
270
|
+
|
|
271
|
+
if not text_content and not mc.get(Image) and not mc.get(Custom):
|
|
272
|
+
return
|
|
273
|
+
|
|
274
|
+
# History & Context
|
|
275
|
+
hist_key = conversation_key_override
|
|
276
|
+
if not hist_key and session.reply and hasattr(session.reply.origin, 'id'):
|
|
277
|
+
hist_key = history_manager.get_conversation_id(str(session.reply.origin.id))
|
|
278
|
+
|
|
279
|
+
hist_payload = history_manager.get_history(hist_key) if hist_key else []
|
|
280
|
+
meta = history_manager.get_metadata(hist_key) if hist_key else {}
|
|
281
|
+
context_id = f"guild_{session.guild.id}" if session.guild else f"user_{session.user.id}"
|
|
282
|
+
|
|
283
|
+
if conf.reaction: await react(session, "✨")
|
|
284
|
+
|
|
285
|
+
try:
|
|
286
|
+
msg_text = str(mc.get(Text)).strip() if mc.get(Text) else ""
|
|
287
|
+
msg_text = re.sub(r'<img[^>]+>', '', msg_text, flags=re.IGNORECASE)
|
|
288
|
+
|
|
289
|
+
# If message is empty but has images, use a placeholder
|
|
290
|
+
if not msg_text and (mc.get(Image) or mc.get(Custom)):
|
|
291
|
+
msg_text = "[图片]"
|
|
292
|
+
|
|
293
|
+
for custom in [e for e in mc if isinstance(e, Custom)]:
|
|
294
|
+
if custom.tag == 'onebot:json':
|
|
295
|
+
if decoded := process_onebot_json(custom.attributes()): msg_text += f"\n{decoded}"
|
|
296
|
+
break
|
|
297
|
+
|
|
298
|
+
# Model Selection (Step 1)
|
|
299
|
+
# Resolve model names from config if they are short names/keywords
|
|
300
|
+
model = selected_model or meta.get("model")
|
|
301
|
+
if model and model != "off":
|
|
302
|
+
resolved, err = resolve_model_name(model, conf.models)
|
|
303
|
+
if resolved:
|
|
304
|
+
model = resolved
|
|
305
|
+
elif err:
|
|
306
|
+
logger.warning(f"Model resolution warning for {model}: {err}")
|
|
307
|
+
|
|
308
|
+
vision_model = selected_vision_model or meta.get("vision_model")
|
|
309
|
+
if vision_model and vision_model != "off":
|
|
310
|
+
resolved_v, err_v = resolve_model_name(vision_model, conf.models)
|
|
311
|
+
if resolved_v:
|
|
312
|
+
vision_model = resolved_v
|
|
313
|
+
elif err_v:
|
|
314
|
+
logger.warning(f"Vision model resolution warning for {vision_model}: {err_v}")
|
|
315
|
+
|
|
316
|
+
images, err = await process_images(mc, vision_model)
|
|
317
|
+
|
|
318
|
+
# Start preparing render tab (async)
|
|
319
|
+
renderer = await get_content_renderer()
|
|
320
|
+
render_tab_task = asyncio.create_task(renderer.prepare_tab())
|
|
321
|
+
tab_id = None
|
|
322
|
+
|
|
323
|
+
# Call Pipeline directly
|
|
324
|
+
safe_input = msg_text
|
|
325
|
+
pipeline = ModularPipeline(conf)
|
|
326
|
+
try:
|
|
327
|
+
resp = await pipeline.execute(
|
|
328
|
+
safe_input,
|
|
329
|
+
hist_payload,
|
|
330
|
+
model_name=model,
|
|
331
|
+
images=images,
|
|
332
|
+
selected_vision_model=vision_model,
|
|
333
|
+
)
|
|
334
|
+
finally:
|
|
335
|
+
await pipeline.close()
|
|
336
|
+
|
|
337
|
+
# Step 1 Results
|
|
338
|
+
step1_vision_model = resp.get("vision_model_used")
|
|
339
|
+
step1_model = resp.get("model_used")
|
|
340
|
+
step1_history = resp.get("conversation_history", [])
|
|
341
|
+
step1_stats = resp.get("stats", {})
|
|
342
|
+
|
|
343
|
+
final_resp = resp
|
|
344
|
+
|
|
345
|
+
# Step 2 (Optional)
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
# Extract Response Data
|
|
350
|
+
content = final_resp.get("llm_response", "")
|
|
351
|
+
structured = final_resp.get("structured_response", {})
|
|
352
|
+
|
|
353
|
+
# Wait for tab preparation if needed (should be ready by now)
|
|
354
|
+
try:
|
|
355
|
+
tab_id = await render_tab_task
|
|
356
|
+
except Exception as e:
|
|
357
|
+
logger.warning(f"Failed to prepare render tab: {e}")
|
|
358
|
+
tab_id = None
|
|
359
|
+
|
|
360
|
+
# Render
|
|
361
|
+
import tempfile
|
|
362
|
+
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tf:
|
|
363
|
+
output_path = tf.name
|
|
364
|
+
model_used = final_resp.get("model_used")
|
|
365
|
+
|
|
366
|
+
# Determine session short code
|
|
367
|
+
if hist_key:
|
|
368
|
+
display_session_id = history_manager.get_code_by_key(hist_key)
|
|
369
|
+
if not display_session_id:
|
|
370
|
+
display_session_id = history_manager.generate_short_code()
|
|
371
|
+
else:
|
|
372
|
+
display_session_id = history_manager.generate_short_code()
|
|
373
|
+
|
|
374
|
+
# Use stats_list if available, otherwise standard stats
|
|
375
|
+
stats_to_render = final_resp.get("stats_list", final_resp.get("stats", {}))
|
|
376
|
+
|
|
377
|
+
# Check if refuse_answer was triggered
|
|
378
|
+
if final_resp.get("refuse_answer"):
|
|
379
|
+
logger.info(f"Refuse answer triggered. Rendering refuse image. Reason: {final_resp.get('refuse_reason', '')}")
|
|
380
|
+
render_ok = await render_refuse_answer(
|
|
381
|
+
renderer=renderer,
|
|
382
|
+
output_path=output_path,
|
|
383
|
+
reason=final_resp.get('refuse_reason', 'Instruct 专家分配此任务流程失败,请尝试提出其他问题~'),
|
|
384
|
+
theme_color=conf.theme_color,
|
|
385
|
+
tab_id=tab_id,
|
|
386
|
+
)
|
|
387
|
+
else:
|
|
388
|
+
render_ok = await renderer.render(
|
|
389
|
+
markdown_content=content,
|
|
390
|
+
output_path=output_path,
|
|
391
|
+
tab_id=tab_id,
|
|
392
|
+
stats=stats_to_render,
|
|
393
|
+
references=structured.get("references", []),
|
|
394
|
+
page_references=structured.get("page_references", []),
|
|
395
|
+
image_references=structured.get("image_references", []),
|
|
396
|
+
stages_used=final_resp.get("stages_used", []),
|
|
397
|
+
theme_color=conf.theme_color,
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
# Send & Save
|
|
401
|
+
if not render_ok:
|
|
402
|
+
logger.error("Render failed; skipping reply.")
|
|
403
|
+
if os.path.exists(output_path):
|
|
404
|
+
try:
|
|
405
|
+
os.remove(output_path)
|
|
406
|
+
except Exception as exc:
|
|
407
|
+
logger.warning(f"Failed to delete render output {output_path}: {exc}")
|
|
408
|
+
sent = None
|
|
409
|
+
else:
|
|
410
|
+
# Convert to base64
|
|
411
|
+
with open(output_path, "rb") as f:
|
|
412
|
+
img_data = base64.b64encode(f.read()).decode()
|
|
413
|
+
|
|
414
|
+
# Build single reply chain (image only now)
|
|
415
|
+
elements = []
|
|
416
|
+
elements.append(Image(src=f'data:image/png;base64,{img_data}'))
|
|
417
|
+
|
|
418
|
+
msg_chain = MessageChain(*elements)
|
|
419
|
+
|
|
420
|
+
if conf.quote:
|
|
421
|
+
msg_chain = MessageChain(Quote(session.event.message.id)) + msg_chain
|
|
422
|
+
|
|
423
|
+
# Use reply_to instead of manual Quote insertion to avoid ActionFailed errors
|
|
424
|
+
sent = await session.send(msg_chain)
|
|
425
|
+
|
|
426
|
+
sent_id = next((str(e.id) for e in sent if hasattr(e, 'id')), None) if sent else None
|
|
427
|
+
msg_id = str(session.event.message.id) if hasattr(session.event, 'message') else str(session.event.id)
|
|
428
|
+
related = [msg_id] + ([str(session.reply.origin.id)] if session.reply and hasattr(session.reply.origin, 'id') else [])
|
|
429
|
+
|
|
430
|
+
history_manager.remember(
|
|
431
|
+
sent_id,
|
|
432
|
+
final_resp.get("conversation_history", []),
|
|
433
|
+
related,
|
|
434
|
+
{
|
|
435
|
+
"model": model_used,
|
|
436
|
+
"trace_markdown": final_resp.get("trace_markdown"),
|
|
437
|
+
},
|
|
438
|
+
context_id,
|
|
439
|
+
code=display_session_id,
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
if conf.save_conversation and sent_id:
|
|
443
|
+
try:
|
|
444
|
+
# Pass web_results to save fetched pages as markdown, and output image
|
|
445
|
+
history_manager.save_to_disk(
|
|
446
|
+
sent_id,
|
|
447
|
+
web_results=final_resp.get("web_results"),
|
|
448
|
+
image_path=output_path if 'output_path' in locals() else None
|
|
449
|
+
)
|
|
450
|
+
except Exception as e:
|
|
451
|
+
logger.warning(f"Failed to save conversation: {e}")
|
|
452
|
+
|
|
453
|
+
# Cleanup temp image
|
|
454
|
+
if 'output_path' in locals() and output_path and os.path.exists(output_path):
|
|
455
|
+
try:
|
|
456
|
+
os.remove(output_path)
|
|
457
|
+
except Exception:
|
|
458
|
+
pass
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
except Exception as e:
|
|
464
|
+
logger.exception(f"Error: {e}")
|
|
465
|
+
err_msg = f"Error: {e}"
|
|
466
|
+
if conf.quote:
|
|
467
|
+
await session.send([Quote(session.event.message.id), err_msg])
|
|
468
|
+
else:
|
|
469
|
+
await session.send(err_msg)
|
|
470
|
+
|
|
471
|
+
# Save conversation on error if response was generated
|
|
472
|
+
if 'resp' in locals() and resp and conf.save_conversation:
|
|
473
|
+
try:
|
|
474
|
+
# Use a temporary ID for error cases
|
|
475
|
+
error_id = f"error_{int(time.time())}_{secrets.token_hex(4)}"
|
|
476
|
+
|
|
477
|
+
# Try to salvage history
|
|
478
|
+
partial_hist = []
|
|
479
|
+
if 'resp' in locals() and resp:
|
|
480
|
+
partial_hist = resp.get("conversation_history", [])
|
|
481
|
+
elif 'context' in locals() and context and hasattr(context, 'instruct_history'):
|
|
482
|
+
partial_hist = context.instruct_history
|
|
483
|
+
|
|
484
|
+
related_ids = []
|
|
485
|
+
if 'session' in locals():
|
|
486
|
+
msg_id = str(session.event.message.id) if hasattr(session.event, 'message') else str(session.event.id)
|
|
487
|
+
related_ids = [msg_id]
|
|
488
|
+
|
|
489
|
+
history_manager.remember(error_id, partial_hist, related_ids, {"model": "error", "error": str(e)}, context_id, code=display_session_id if 'display_session_id' in locals() else None)
|
|
490
|
+
|
|
491
|
+
# Save debug data on error
|
|
492
|
+
web_res = context.web_results if 'context' in locals() and context else []
|
|
493
|
+
|
|
494
|
+
history_manager.save_to_disk(
|
|
495
|
+
error_id,
|
|
496
|
+
web_results=web_res
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
except Exception as save_err:
|
|
500
|
+
logger.error(f"Failed to save error conversation: {save_err}")
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
alc = Alconna(
|
|
504
|
+
conf.question_command,
|
|
505
|
+
Args["all_param;?", AllParam],
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
@command.on(alc)
|
|
509
|
+
async def handle_question_command(session: Session[MessageCreatedEvent], result: Arparma):
|
|
510
|
+
"""Handle main Question command"""
|
|
511
|
+
try:
|
|
512
|
+
logger.info(f"Question Command Triggered. Message: {result}")
|
|
513
|
+
mid = str(session.event.message.id) if getattr(session.event, "message", None) else str(session.event.id)
|
|
514
|
+
dedupe_key = f"{getattr(session.account, 'id', 'account')}:{mid}"
|
|
515
|
+
if _event_deduper.seen_recently(dedupe_key):
|
|
516
|
+
logger.warning(f"Duplicate command event ignored: {dedupe_key}")
|
|
517
|
+
return
|
|
518
|
+
except Exception:
|
|
519
|
+
pass
|
|
520
|
+
|
|
521
|
+
logger.info(f"Question Command Triggered. Message: {session.event.message}")
|
|
522
|
+
|
|
523
|
+
args = result.all_matched_args
|
|
524
|
+
|
|
525
|
+
await process_request(session, args.get("all_param"), selected_model=None, selected_vision_model=None, conversation_key_override=None)
|
|
526
|
+
|
|
527
|
+
metadata("hyw", author=[{"name": "kumoSleeping", "email": "zjr2992@outlook.com"}], version=__version__, config=HywConfig)
|
|
528
|
+
|
|
529
|
+
|
|
530
|
+
@listen(CommandReceive)
|
|
531
|
+
async def remove_at(content: MessageChain):
|
|
532
|
+
return content.lstrip(At)
|