entari-plugin-hyw 4.0.0rc17__py3-none-any.whl → 4.0.0rc19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of entari-plugin-hyw might be problematic. Click here for more details.
- entari_plugin_hyw-4.0.0rc19.dist-info/METADATA +26 -0
- entari_plugin_hyw-4.0.0rc19.dist-info/RECORD +4 -0
- entari_plugin_hyw-4.0.0rc19.dist-info/top_level.txt +1 -0
- entari_plugin_hyw/__init__.py +0 -914
- entari_plugin_hyw/filters.py +0 -83
- entari_plugin_hyw/history.py +0 -251
- entari_plugin_hyw/misc.py +0 -214
- entari_plugin_hyw/search_cache.py +0 -253
- entari_plugin_hyw-4.0.0rc17.dist-info/METADATA +0 -119
- entari_plugin_hyw-4.0.0rc17.dist-info/RECORD +0 -52
- entari_plugin_hyw-4.0.0rc17.dist-info/top_level.txt +0 -2
- hyw_core/__init__.py +0 -94
- hyw_core/agent.py +0 -876
- hyw_core/browser_control/__init__.py +0 -63
- hyw_core/browser_control/assets/card-dist/index.html +0 -429
- hyw_core/browser_control/assets/card-dist/logos/anthropic.svg +0 -1
- hyw_core/browser_control/assets/card-dist/logos/cerebras.svg +0 -9
- hyw_core/browser_control/assets/card-dist/logos/deepseek.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/gemini.svg +0 -1
- hyw_core/browser_control/assets/card-dist/logos/google.svg +0 -1
- hyw_core/browser_control/assets/card-dist/logos/grok.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/huggingface.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/microsoft.svg +0 -15
- hyw_core/browser_control/assets/card-dist/logos/minimax.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/mistral.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/nvida.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/openai.svg +0 -1
- hyw_core/browser_control/assets/card-dist/logos/openrouter.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/perplexity.svg +0 -24
- hyw_core/browser_control/assets/card-dist/logos/qwen.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/xai.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/xiaomi.png +0 -0
- hyw_core/browser_control/assets/card-dist/logos/zai.png +0 -0
- hyw_core/browser_control/assets/card-dist/vite.svg +0 -1
- hyw_core/browser_control/engines/__init__.py +0 -15
- hyw_core/browser_control/engines/base.py +0 -13
- hyw_core/browser_control/engines/default.py +0 -166
- hyw_core/browser_control/engines/duckduckgo.py +0 -171
- hyw_core/browser_control/landing.html +0 -172
- hyw_core/browser_control/manager.py +0 -173
- hyw_core/browser_control/renderer.py +0 -446
- hyw_core/browser_control/service.py +0 -1002
- hyw_core/config.py +0 -154
- hyw_core/core.py +0 -454
- hyw_core/crawling/__init__.py +0 -18
- hyw_core/crawling/completeness.py +0 -437
- hyw_core/crawling/models.py +0 -88
- hyw_core/definitions.py +0 -166
- hyw_core/image_cache.py +0 -274
- hyw_core/pipeline.py +0 -502
- hyw_core/search.py +0 -169
- hyw_core/stages/__init__.py +0 -21
- hyw_core/stages/base.py +0 -95
- hyw_core/stages/summary.py +0 -218
- {entari_plugin_hyw-4.0.0rc17.dist-info → entari_plugin_hyw-4.0.0rc19.dist-info}/WHEEL +0 -0
entari_plugin_hyw/__init__.py
DELETED
|
@@ -1,914 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
entari-plugin-hyw - Entari Plugin for HYW
|
|
3
|
-
|
|
4
|
-
Use large language models to interpret chat messages.
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
from dataclasses import dataclass, field
|
|
8
|
-
from importlib.metadata import version as get_version
|
|
9
|
-
from typing import List, Dict, Any, Optional
|
|
10
|
-
import asyncio
|
|
11
|
-
import os
|
|
12
|
-
import base64
|
|
13
|
-
import re
|
|
14
|
-
import tempfile
|
|
15
|
-
|
|
16
|
-
from arclet.alconna import Alconna, Args, AllParam, Arparma
|
|
17
|
-
from arclet.entari import metadata, listen, Session, plugin_config, BasicConfModel, command
|
|
18
|
-
from arclet.entari import MessageChain, Text, Image, MessageCreatedEvent, Quote, At
|
|
19
|
-
from satori.element import Custom
|
|
20
|
-
from loguru import logger
|
|
21
|
-
from arclet.entari.event.command import CommandReceive
|
|
22
|
-
from arclet.entari.event.lifespan import Cleanup
|
|
23
|
-
|
|
24
|
-
# Import from internal hyw_core
|
|
25
|
-
from hyw_core import HywCore, HywCoreConfig, QueryRequest
|
|
26
|
-
from hyw_core.browser_control import (
|
|
27
|
-
ContentRenderer,
|
|
28
|
-
get_content_renderer,
|
|
29
|
-
set_global_renderer,
|
|
30
|
-
close_screenshot_service,
|
|
31
|
-
)
|
|
32
|
-
from hyw_core.browser_control.manager import close_shared_browser
|
|
33
|
-
|
|
34
|
-
# Local modules
|
|
35
|
-
from .history import HistoryManager
|
|
36
|
-
from .misc import (
|
|
37
|
-
process_onebot_json,
|
|
38
|
-
process_images,
|
|
39
|
-
resolve_model_name,
|
|
40
|
-
render_refuse_answer,
|
|
41
|
-
render_image_unsupported,
|
|
42
|
-
parse_color,
|
|
43
|
-
RecentEventDeduper,
|
|
44
|
-
)
|
|
45
|
-
from .filters import parse_filter_syntax
|
|
46
|
-
from .search_cache import SearchResultCache, parse_single_index, parse_multi_indices, crop_to_square_thumbnail
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
try:
|
|
50
|
-
__version__ = get_version("entari_plugin_hyw")
|
|
51
|
-
except Exception:
|
|
52
|
-
__version__ = "4.0.0-rc8"
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
_event_deduper = RecentEventDeduper()
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
class TaskManager:
|
|
59
|
-
"""Manages async tasks for cancellation"""
|
|
60
|
-
def __init__(self):
|
|
61
|
-
self.tasks: Dict[str, asyncio.Task] = {}
|
|
62
|
-
self.cleanups: Dict[str, callable] = {}
|
|
63
|
-
|
|
64
|
-
def register(self, msg_id: str, task: asyncio.Task, cleanup: Optional[callable] = None):
|
|
65
|
-
self.tasks[msg_id] = task
|
|
66
|
-
if cleanup:
|
|
67
|
-
self.cleanups[msg_id] = cleanup
|
|
68
|
-
|
|
69
|
-
def unregister(self, msg_id: str):
|
|
70
|
-
self.tasks.pop(msg_id, None)
|
|
71
|
-
self.cleanups.pop(msg_id, None)
|
|
72
|
-
|
|
73
|
-
async def cancel(self, msg_id: str) -> bool:
|
|
74
|
-
task = self.tasks.get(msg_id)
|
|
75
|
-
if task and not task.done():
|
|
76
|
-
task.cancel()
|
|
77
|
-
|
|
78
|
-
# Run cleanup if available
|
|
79
|
-
cleanup = self.cleanups.get(msg_id)
|
|
80
|
-
if cleanup:
|
|
81
|
-
try:
|
|
82
|
-
if asyncio.iscoroutinefunction(cleanup):
|
|
83
|
-
await cleanup()
|
|
84
|
-
else:
|
|
85
|
-
cleanup()
|
|
86
|
-
except Exception as e:
|
|
87
|
-
logger.warning(f"Cleanup failed for task {msg_id}: {e}")
|
|
88
|
-
|
|
89
|
-
self.unregister(msg_id)
|
|
90
|
-
return True
|
|
91
|
-
return False
|
|
92
|
-
|
|
93
|
-
_task_manager = TaskManager()
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
@dataclass
|
|
97
|
-
class HywConfig(BasicConfModel):
|
|
98
|
-
"""Plugin configuration"""
|
|
99
|
-
admins: List[str] = field(default_factory=list)
|
|
100
|
-
models: List[Dict[str, Any]] = field(default_factory=list)
|
|
101
|
-
question_command: str = "/q"
|
|
102
|
-
web_command: str = "/w"
|
|
103
|
-
stop_command: str = "/x"
|
|
104
|
-
help_command: str = "/h"
|
|
105
|
-
language: str = "Simplified Chinese"
|
|
106
|
-
temperature: float = 0.4
|
|
107
|
-
|
|
108
|
-
model_name: Optional[str] = None
|
|
109
|
-
api_key: Optional[str] = None
|
|
110
|
-
base_url: str = "https://openrouter.ai/api/v1"
|
|
111
|
-
|
|
112
|
-
search_engine: str = "duckduckgo"
|
|
113
|
-
|
|
114
|
-
headless: bool = False
|
|
115
|
-
save_conversation: bool = False
|
|
116
|
-
reaction: bool = False
|
|
117
|
-
quote: bool = False
|
|
118
|
-
theme_color: str = "#ff0000"
|
|
119
|
-
|
|
120
|
-
# Main model configuration (used for summary/main LLM calls)
|
|
121
|
-
main: Optional[Dict[str, Any]] = None
|
|
122
|
-
|
|
123
|
-
def __post_init__(self):
|
|
124
|
-
self.theme_color = parse_color(self.theme_color)
|
|
125
|
-
|
|
126
|
-
def to_hyw_core_config(self) -> HywCoreConfig:
|
|
127
|
-
main_cfg = self.main or {}
|
|
128
|
-
|
|
129
|
-
return HywCoreConfig.from_dict({
|
|
130
|
-
"models": self.models,
|
|
131
|
-
"model_name": self.model_name or "",
|
|
132
|
-
"api_key": self.api_key or "",
|
|
133
|
-
"base_url": self.base_url,
|
|
134
|
-
"temperature": self.temperature,
|
|
135
|
-
"search_engine": self.search_engine,
|
|
136
|
-
"headless": self.headless,
|
|
137
|
-
"language": self.language,
|
|
138
|
-
"theme_color": self.theme_color,
|
|
139
|
-
|
|
140
|
-
# Map nested 'main' config to summary stage
|
|
141
|
-
"summary_model": main_cfg.get("model_name"),
|
|
142
|
-
"summary_api_key": main_cfg.get("api_key"),
|
|
143
|
-
"summary_base_url": main_cfg.get("base_url"),
|
|
144
|
-
"summary_extra_body": main_cfg.get("extra_body"),
|
|
145
|
-
})
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
conf = plugin_config(HywConfig)
|
|
149
|
-
history_manager = HistoryManager()
|
|
150
|
-
renderer = ContentRenderer(headless=conf.headless)
|
|
151
|
-
set_global_renderer(renderer)
|
|
152
|
-
search_cache = SearchResultCache(ttl_seconds=600.0) # 10 minutes
|
|
153
|
-
|
|
154
|
-
# Initialize HywCore immediately at plugin load time (not lazy)
|
|
155
|
-
# This avoids the 2s delay on first user request caused by AsyncOpenAI client creation
|
|
156
|
-
_hyw_core: HywCore = HywCore(conf.to_hyw_core_config())
|
|
157
|
-
|
|
158
|
-
def get_hyw_core() -> HywCore:
|
|
159
|
-
return _hyw_core
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
@listen(Cleanup)
|
|
163
|
-
async def cleanup_screenshot_service():
|
|
164
|
-
global _hyw_core
|
|
165
|
-
try:
|
|
166
|
-
if _hyw_core:
|
|
167
|
-
await _hyw_core.close()
|
|
168
|
-
_hyw_core = None
|
|
169
|
-
await close_screenshot_service()
|
|
170
|
-
close_shared_browser()
|
|
171
|
-
except Exception as e:
|
|
172
|
-
logger.warning(f"Failed to cleanup: {e}")
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
async def react(session: Session, emoji: str):
|
|
176
|
-
if not conf.reaction: return
|
|
177
|
-
try:
|
|
178
|
-
await session.reaction_create(emoji=emoji)
|
|
179
|
-
except Exception as e:
|
|
180
|
-
logger.warning(f"Reaction failed: {e}")
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
async def process_request(
|
|
184
|
-
session: Session[MessageCreatedEvent],
|
|
185
|
-
all_param: Optional[MessageChain] = None,
|
|
186
|
-
selected_model: Optional[str] = None,
|
|
187
|
-
) -> None:
|
|
188
|
-
mc = MessageChain(all_param)
|
|
189
|
-
if session.reply:
|
|
190
|
-
try:
|
|
191
|
-
reply_msg_id = str(session.reply.origin.id) if hasattr(session.reply.origin, 'id') else None
|
|
192
|
-
if not (reply_msg_id and history_manager.is_bot_message(reply_msg_id)):
|
|
193
|
-
mc.extend(MessageChain(" ") + session.reply.origin.message)
|
|
194
|
-
except Exception:
|
|
195
|
-
mc.extend(MessageChain(" ") + session.reply.origin.message)
|
|
196
|
-
|
|
197
|
-
filtered = mc.get(Text) + mc.get(Image) + mc.get(Custom)
|
|
198
|
-
mc = MessageChain(filtered)
|
|
199
|
-
|
|
200
|
-
text_content = str(mc.get(Text)).strip()
|
|
201
|
-
text_content = re.sub(r'<img[^>]+>', '', text_content, flags=re.IGNORECASE)
|
|
202
|
-
|
|
203
|
-
if not text_content and not mc.get(Image) and not mc.get(Custom):
|
|
204
|
-
return
|
|
205
|
-
|
|
206
|
-
hist_key = None
|
|
207
|
-
if session.reply and hasattr(session.reply.origin, 'id'):
|
|
208
|
-
hist_key = history_manager.get_conversation_id(str(session.reply.origin.id))
|
|
209
|
-
|
|
210
|
-
hist_payload = history_manager.get_history(hist_key) if hist_key else []
|
|
211
|
-
context_id = f"guild_{session.guild.id}" if session.guild else f"user_{session.user.id}"
|
|
212
|
-
|
|
213
|
-
if conf.reaction: await react(session, "✨")
|
|
214
|
-
|
|
215
|
-
try:
|
|
216
|
-
msg_text = str(mc.get(Text)).strip() if mc.get(Text) else ""
|
|
217
|
-
msg_text = re.sub(r'<img[^>]+>', '', msg_text, flags=re.IGNORECASE)
|
|
218
|
-
|
|
219
|
-
if not msg_text and (mc.get(Image) or mc.get(Custom)):
|
|
220
|
-
msg_text = "[图片]"
|
|
221
|
-
|
|
222
|
-
for custom in [e for e in mc if isinstance(e, Custom)]:
|
|
223
|
-
if custom.tag == 'onebot:json':
|
|
224
|
-
if decoded := process_onebot_json(custom.attributes()):
|
|
225
|
-
msg_text += f"\n{decoded}"
|
|
226
|
-
break
|
|
227
|
-
|
|
228
|
-
model = selected_model
|
|
229
|
-
if model:
|
|
230
|
-
resolved, _ = resolve_model_name(model, conf.models)
|
|
231
|
-
if resolved:
|
|
232
|
-
model = resolved
|
|
233
|
-
|
|
234
|
-
images, _ = await process_images(mc, None)
|
|
235
|
-
|
|
236
|
-
# Prepare renderer
|
|
237
|
-
local_renderer = await get_content_renderer()
|
|
238
|
-
render_tab_task = asyncio.create_task(local_renderer.prepare_tab())
|
|
239
|
-
|
|
240
|
-
# Register cleanup for this specific request's resources
|
|
241
|
-
msg_id = str(session.event.message.id) if hasattr(session.event, 'message') else str(session.event.id)
|
|
242
|
-
|
|
243
|
-
async def cleanup_resources():
|
|
244
|
-
try:
|
|
245
|
-
# If tab task is still running, cancel it
|
|
246
|
-
if not render_tab_task.done():
|
|
247
|
-
render_tab_task.cancel()
|
|
248
|
-
else:
|
|
249
|
-
# If tab is ready, close it
|
|
250
|
-
try:
|
|
251
|
-
tab_id = render_tab_task.result()
|
|
252
|
-
if tab_id:
|
|
253
|
-
await local_renderer.close_tab(tab_id)
|
|
254
|
-
except:
|
|
255
|
-
pass
|
|
256
|
-
except Exception as e:
|
|
257
|
-
logger.warning(f"Resource cleanup failed: {e}")
|
|
258
|
-
|
|
259
|
-
# Update task manager with cleanup callback
|
|
260
|
-
if _task_manager.tasks.get(msg_id):
|
|
261
|
-
_task_manager.cleanups[msg_id] = cleanup_resources
|
|
262
|
-
|
|
263
|
-
async def send_noti(msg: str):
|
|
264
|
-
try:
|
|
265
|
-
if conf.quote:
|
|
266
|
-
await session.send([Quote(session.event.message.id), msg])
|
|
267
|
-
else:
|
|
268
|
-
await session.send(msg)
|
|
269
|
-
except Exception as e:
|
|
270
|
-
logger.warning(f"Failed to send notification: {e}")
|
|
271
|
-
|
|
272
|
-
request = QueryRequest(
|
|
273
|
-
user_input=msg_text,
|
|
274
|
-
images=images,
|
|
275
|
-
conversation_history=hist_payload,
|
|
276
|
-
model_name=model,
|
|
277
|
-
send_notification=send_noti
|
|
278
|
-
)
|
|
279
|
-
|
|
280
|
-
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tf:
|
|
281
|
-
output_path = tf.name
|
|
282
|
-
|
|
283
|
-
core = get_hyw_core()
|
|
284
|
-
# Use agent mode with tool-calling capability
|
|
285
|
-
# Agent can autonomously call web_tool up to 2 times, with IM notifications
|
|
286
|
-
response = await core.query_agent(request, output_path=None)
|
|
287
|
-
|
|
288
|
-
# 2. Get the warmed-up tab
|
|
289
|
-
try:
|
|
290
|
-
tab_id = await render_tab_task
|
|
291
|
-
except Exception:
|
|
292
|
-
tab_id = None
|
|
293
|
-
|
|
294
|
-
display_session_id = history_manager.generate_short_code()
|
|
295
|
-
|
|
296
|
-
if response.should_refuse:
|
|
297
|
-
render_ok = await render_refuse_answer(
|
|
298
|
-
renderer=local_renderer,
|
|
299
|
-
output_path=output_path,
|
|
300
|
-
reason=response.refuse_reason or 'Refused',
|
|
301
|
-
theme_color=conf.theme_color,
|
|
302
|
-
tab_id=tab_id,
|
|
303
|
-
)
|
|
304
|
-
elif not response.success:
|
|
305
|
-
await session.send(f"Error: {response.error}")
|
|
306
|
-
return
|
|
307
|
-
else:
|
|
308
|
-
# Process screenshots: cache full images and create thumbnails
|
|
309
|
-
for ref in response.references:
|
|
310
|
-
if ref.get("raw_screenshot_b64"):
|
|
311
|
-
full_b64 = ref["raw_screenshot_b64"]
|
|
312
|
-
url = ref.get("url", "")
|
|
313
|
-
|
|
314
|
-
# Cache full screenshot
|
|
315
|
-
cache_id = search_cache.store_screenshot(full_b64, url)
|
|
316
|
-
ref["screenshot_cache_id"] = cache_id
|
|
317
|
-
|
|
318
|
-
# Create thumbnail (1:1 crop from top)
|
|
319
|
-
thumbnail = crop_to_square_thumbnail(full_b64, max_size=400)
|
|
320
|
-
if thumbnail:
|
|
321
|
-
ref["raw_screenshot_b64"] = thumbnail
|
|
322
|
-
ref["is_thumbnail"] = True
|
|
323
|
-
|
|
324
|
-
# 3. Explicit External Render using the Parallel Tab
|
|
325
|
-
render_ok = await core.render(
|
|
326
|
-
markdown_content=response.content,
|
|
327
|
-
output_path=output_path,
|
|
328
|
-
stats={"total_time": response.total_time},
|
|
329
|
-
references=response.references,
|
|
330
|
-
page_references=response.page_references,
|
|
331
|
-
image_references=response.image_references,
|
|
332
|
-
stages_used=response.stages_used,
|
|
333
|
-
tab_id=tab_id
|
|
334
|
-
)
|
|
335
|
-
if render_ok:
|
|
336
|
-
response.image_path = output_path
|
|
337
|
-
|
|
338
|
-
if render_ok:
|
|
339
|
-
with open(output_path, "rb") as f:
|
|
340
|
-
img_data = base64.b64encode(f.read()).decode()
|
|
341
|
-
|
|
342
|
-
msg_chain = MessageChain(Image(src=f'data:image/png;base64,{img_data}'))
|
|
343
|
-
if conf.quote:
|
|
344
|
-
msg_chain = MessageChain(Quote(session.event.message.id)) + msg_chain
|
|
345
|
-
|
|
346
|
-
sent = await session.send(msg_chain)
|
|
347
|
-
|
|
348
|
-
sent_id = next((str(e.id) for e in sent if hasattr(e, 'id')), None) if sent else None
|
|
349
|
-
msg_id = str(session.event.message.id) if hasattr(session.event, 'message') else str(session.event.id)
|
|
350
|
-
|
|
351
|
-
updated_history = hist_payload + [
|
|
352
|
-
{"role": "user", "content": msg_text},
|
|
353
|
-
{"role": "assistant", "content": response.content}
|
|
354
|
-
]
|
|
355
|
-
|
|
356
|
-
# Save to Memory
|
|
357
|
-
history_manager.remember(
|
|
358
|
-
sent_id, updated_history, [msg_id],
|
|
359
|
-
{"model": model}, context_id, code=display_session_id,
|
|
360
|
-
)
|
|
361
|
-
|
|
362
|
-
# Store web results in search cache for continuous conversation context
|
|
363
|
-
# This allows users to reply to this message and have the AI "remember" the search results
|
|
364
|
-
if response.web_results and sent_id:
|
|
365
|
-
search_cache.store(sent_id, response.web_results, f"Context for {msg_id}")
|
|
366
|
-
|
|
367
|
-
# Save to Disk (Debug/Logging)
|
|
368
|
-
if conf.save_conversation:
|
|
369
|
-
# Extract traces from response
|
|
370
|
-
trace = response.stages_trace
|
|
371
|
-
instruct_traces = trace.get("instruct_rounds") if trace else None
|
|
372
|
-
|
|
373
|
-
# Check for web_results in response (needs Core update)
|
|
374
|
-
web_results = getattr(response, "web_results", [])
|
|
375
|
-
|
|
376
|
-
history_manager.save_to_disk(
|
|
377
|
-
key=sent_id,
|
|
378
|
-
image_path=output_path,
|
|
379
|
-
web_results=web_results,
|
|
380
|
-
instruct_traces=instruct_traces,
|
|
381
|
-
vision_trace=None # Vision integrated into Instruct now
|
|
382
|
-
)
|
|
383
|
-
|
|
384
|
-
if os.path.exists(output_path):
|
|
385
|
-
os.remove(output_path)
|
|
386
|
-
|
|
387
|
-
except Exception as e:
|
|
388
|
-
logger.exception(f"Error: {e}")
|
|
389
|
-
await session.send(f"Error: {e}")
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
alc = Alconna(conf.question_command, Args["all_param;?", AllParam])
|
|
394
|
-
|
|
395
|
-
@command.on(alc)
|
|
396
|
-
async def handle_question_command(session: Session[MessageCreatedEvent], result: Arparma):
|
|
397
|
-
try:
|
|
398
|
-
mid = str(session.event.message.id) if getattr(session.event, "message", None) else str(session.event.id)
|
|
399
|
-
dedupe_key = f"{getattr(session.account, 'id', 'account')}:{mid}"
|
|
400
|
-
if _event_deduper.seen_recently(dedupe_key):
|
|
401
|
-
return
|
|
402
|
-
except Exception:
|
|
403
|
-
pass
|
|
404
|
-
|
|
405
|
-
args = result.all_matched_args
|
|
406
|
-
all_param = args.get("all_param")
|
|
407
|
-
|
|
408
|
-
# Check if replying to a cached search result (/w context summary)
|
|
409
|
-
reply_msg_id = None
|
|
410
|
-
if session.reply and hasattr(session.reply.origin, 'id'):
|
|
411
|
-
reply_msg_id = str(session.reply.origin.id)
|
|
412
|
-
|
|
413
|
-
if reply_msg_id:
|
|
414
|
-
cached = search_cache.get(reply_msg_id)
|
|
415
|
-
if cached:
|
|
416
|
-
# Extract current user query
|
|
417
|
-
if all_param:
|
|
418
|
-
if isinstance(all_param, MessageChain):
|
|
419
|
-
current_query = str(all_param.get(Text)).strip()
|
|
420
|
-
else:
|
|
421
|
-
current_query = str(all_param).strip()
|
|
422
|
-
else:
|
|
423
|
-
current_query = ""
|
|
424
|
-
|
|
425
|
-
# If empty query, assume request for summary
|
|
426
|
-
if not current_query:
|
|
427
|
-
current_query = "请详细总结上述搜索结果"
|
|
428
|
-
|
|
429
|
-
# Build full context from cached results
|
|
430
|
-
context_parts = []
|
|
431
|
-
for i, res in enumerate(cached.results):
|
|
432
|
-
title = res.get("title", f"Result {i+1}")
|
|
433
|
-
url = res.get("url", "")
|
|
434
|
-
content = res.get("content", "") or res.get("snippet", "")
|
|
435
|
-
context_parts.append(f"## [{i+1}] {title}\nURL: {url}\n\n{content}")
|
|
436
|
-
|
|
437
|
-
full_context = "\n\n".join(context_parts)
|
|
438
|
-
|
|
439
|
-
# Construct augmented prompt
|
|
440
|
-
new_prompt = f"基于以下搜索结果回答问题:\n\n【搜索上下文】\nSearch Query: {cached.query}\n\n{full_context}\n\n【用户问题】\n{current_query}"
|
|
441
|
-
|
|
442
|
-
# Use MessageChain with Text for compatibility
|
|
443
|
-
# This injects the search context into the prompt while maintaining the 'reply' link in history
|
|
444
|
-
all_param = MessageChain(Text(new_prompt))
|
|
445
|
-
|
|
446
|
-
# Log for debug
|
|
447
|
-
logger.info(f"Injecting search context from message {reply_msg_id} into query")
|
|
448
|
-
|
|
449
|
-
# Normal query mode (Standard Agentic Chat)
|
|
450
|
-
# Register task for cancellation
|
|
451
|
-
msg_id = str(session.event.message.id) if hasattr(session.event, 'message') else str(session.event.id)
|
|
452
|
-
task = asyncio.create_task(process_request(session, all_param))
|
|
453
|
-
|
|
454
|
-
# Define cleanup to close potential tabs (handled inside process_request but good to have backup)
|
|
455
|
-
# process_request handles its own cleanup, but we need to track the task itself
|
|
456
|
-
_task_manager.register(msg_id, task)
|
|
457
|
-
|
|
458
|
-
try:
|
|
459
|
-
await task
|
|
460
|
-
except asyncio.CancelledError:
|
|
461
|
-
logger.info(f"Task {msg_id} cancelled by user")
|
|
462
|
-
await session.send("❌ 任务已停止")
|
|
463
|
-
except Exception as e:
|
|
464
|
-
logger.error(f"Task failed: {e}")
|
|
465
|
-
finally:
|
|
466
|
-
_task_manager.unregister(msg_id)
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
# Search/Web Command (/w)
|
|
470
|
-
alc_search = Alconna(conf.web_command, Args["query;?", AllParam])
|
|
471
|
-
|
|
472
|
-
@command.on(alc_search)
|
|
473
|
-
async def handle_web_command(session: Session[MessageCreatedEvent], result: Arparma):
|
|
474
|
-
"""
|
|
475
|
-
Handle web command /w:
|
|
476
|
-
- If query is index + Quote -> Screenshot cached result
|
|
477
|
-
- If query is URL -> Screenshot
|
|
478
|
-
- If query is text -> Search
|
|
479
|
-
"""
|
|
480
|
-
query = result.all_matched_args.get("query")
|
|
481
|
-
|
|
482
|
-
# Extract query text
|
|
483
|
-
if query:
|
|
484
|
-
if isinstance(query, MessageChain):
|
|
485
|
-
query = str(query.get(Text)).strip()
|
|
486
|
-
query = str(query).strip()
|
|
487
|
-
else:
|
|
488
|
-
query = ""
|
|
489
|
-
|
|
490
|
-
# Check if replying to a cached search result
|
|
491
|
-
reply_msg_id = None
|
|
492
|
-
if session.reply and hasattr(session.reply.origin, 'id'):
|
|
493
|
-
reply_msg_id = str(session.reply.origin.id)
|
|
494
|
-
|
|
495
|
-
# Quote + Index mode: Screenshot specific cached result(s)
|
|
496
|
-
if reply_msg_id:
|
|
497
|
-
cached = search_cache.get(reply_msg_id)
|
|
498
|
-
if cached:
|
|
499
|
-
# Case 1: No query - show all results as Sources card
|
|
500
|
-
if not query:
|
|
501
|
-
local_renderer = await get_content_renderer()
|
|
502
|
-
tab_task = asyncio.create_task(local_renderer.prepare_tab())
|
|
503
|
-
|
|
504
|
-
# Build references from cached results
|
|
505
|
-
references = []
|
|
506
|
-
for i, res in enumerate(cached.results[:10]):
|
|
507
|
-
references.append({
|
|
508
|
-
"title": res.get("title", f"Result {i+1}"),
|
|
509
|
-
"url": res.get("url", ""),
|
|
510
|
-
"snippet": res.get("content", "") or res.get("snippet", ""),
|
|
511
|
-
"original_idx": i + 1,
|
|
512
|
-
})
|
|
513
|
-
|
|
514
|
-
try:
|
|
515
|
-
tab_id = await tab_task
|
|
516
|
-
except Exception:
|
|
517
|
-
tab_id = None
|
|
518
|
-
|
|
519
|
-
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tf:
|
|
520
|
-
output_path = tf.name
|
|
521
|
-
|
|
522
|
-
core = get_hyw_core()
|
|
523
|
-
render_ok = await core.render(
|
|
524
|
-
markdown_content=f"# 搜索结果: {cached.query}",
|
|
525
|
-
output_path=output_path,
|
|
526
|
-
stats={"total_time": 0},
|
|
527
|
-
references=references,
|
|
528
|
-
page_references=[],
|
|
529
|
-
stages_used=[{"name": "cache", "description": f"缓存结果 ({len(references)} 条)", "time": 0}],
|
|
530
|
-
tab_id=tab_id
|
|
531
|
-
)
|
|
532
|
-
|
|
533
|
-
if render_ok and os.path.exists(output_path):
|
|
534
|
-
with open(output_path, "rb") as f:
|
|
535
|
-
img_data = base64.b64encode(f.read()).decode()
|
|
536
|
-
|
|
537
|
-
msg_chain = MessageChain(Image(src=f'data:image/png;base64,{img_data}'))
|
|
538
|
-
if conf.quote:
|
|
539
|
-
msg_chain = MessageChain(Quote(session.event.message.id)) + msg_chain
|
|
540
|
-
|
|
541
|
-
sent = await session.send(msg_chain)
|
|
542
|
-
|
|
543
|
-
# Re-cache with new message ID for chaining
|
|
544
|
-
sent_id = next((str(e.id) for e in sent if hasattr(e, 'id')), None) if sent else None
|
|
545
|
-
if sent_id:
|
|
546
|
-
search_cache.store(sent_id, cached.results[:10], cached.query)
|
|
547
|
-
|
|
548
|
-
os.remove(output_path)
|
|
549
|
-
else:
|
|
550
|
-
await session.send("渲染搜索结果失败")
|
|
551
|
-
|
|
552
|
-
search_cache.cleanup()
|
|
553
|
-
return
|
|
554
|
-
|
|
555
|
-
# Case 2: Multi-index mode - try parsing multiple indices first
|
|
556
|
-
indices = parse_multi_indices(query)
|
|
557
|
-
if indices is not None:
|
|
558
|
-
# Validate all indices
|
|
559
|
-
invalid_indices = [i + 1 for i in indices if i >= len(cached.results)]
|
|
560
|
-
if invalid_indices:
|
|
561
|
-
await session.send(f"序号超出范围: {invalid_indices} (最大: {len(cached.results)})")
|
|
562
|
-
search_cache.cleanup()
|
|
563
|
-
return
|
|
564
|
-
|
|
565
|
-
# Collect URLs to screenshot
|
|
566
|
-
urls_to_screenshot = []
|
|
567
|
-
for idx in indices:
|
|
568
|
-
target_url = cached.results[idx].get("url", "")
|
|
569
|
-
if target_url and target_url not in urls_to_screenshot:
|
|
570
|
-
urls_to_screenshot.append(target_url)
|
|
571
|
-
|
|
572
|
-
if not urls_to_screenshot:
|
|
573
|
-
await session.send("所选结果无有效URL")
|
|
574
|
-
search_cache.cleanup()
|
|
575
|
-
return
|
|
576
|
-
|
|
577
|
-
if conf.reaction:
|
|
578
|
-
asyncio.create_task(react(session, "📸"))
|
|
579
|
-
|
|
580
|
-
core = get_hyw_core()
|
|
581
|
-
screenshot_results = await core.screenshot_batch(urls_to_screenshot)
|
|
582
|
-
|
|
583
|
-
images = [Image(src=f'data:image/jpeg;base64,{b64}') for b64 in screenshot_results if b64]
|
|
584
|
-
|
|
585
|
-
if images:
|
|
586
|
-
msg_chain = MessageChain(images)
|
|
587
|
-
if conf.quote:
|
|
588
|
-
msg_chain = MessageChain(Quote(session.event.message.id)) + msg_chain
|
|
589
|
-
await session.send(msg_chain)
|
|
590
|
-
else:
|
|
591
|
-
await session.send("截图失败")
|
|
592
|
-
|
|
593
|
-
search_cache.cleanup()
|
|
594
|
-
return
|
|
595
|
-
|
|
596
|
-
# Case 3: Single index fallback
|
|
597
|
-
idx = parse_single_index(query)
|
|
598
|
-
if idx is None:
|
|
599
|
-
# No valid index - show prompt
|
|
600
|
-
await session.send("请指定序号,如: /w 1 或 /w 2、3")
|
|
601
|
-
search_cache.cleanup() # Lazy cleanup
|
|
602
|
-
return
|
|
603
|
-
|
|
604
|
-
if idx >= len(cached.results):
|
|
605
|
-
await session.send(f"序号超出范围 (1-{len(cached.results)})")
|
|
606
|
-
search_cache.cleanup()
|
|
607
|
-
return
|
|
608
|
-
|
|
609
|
-
# Screenshot the cached URL - check if already cached first
|
|
610
|
-
target_result = cached.results[idx]
|
|
611
|
-
target_url = target_result.get("url", "")
|
|
612
|
-
screenshot_cache_id = target_result.get("screenshot_cache_id")
|
|
613
|
-
|
|
614
|
-
if not target_url:
|
|
615
|
-
await session.send("该结果无有效URL")
|
|
616
|
-
search_cache.cleanup()
|
|
617
|
-
return
|
|
618
|
-
|
|
619
|
-
# Try to get from cache first
|
|
620
|
-
b64_img = None
|
|
621
|
-
if screenshot_cache_id:
|
|
622
|
-
b64_img = search_cache.get_screenshot(screenshot_cache_id)
|
|
623
|
-
if b64_img:
|
|
624
|
-
logger.info(f"/w using cached screenshot: {screenshot_cache_id}")
|
|
625
|
-
|
|
626
|
-
# Fetch if not in cache
|
|
627
|
-
if not b64_img:
|
|
628
|
-
if conf.reaction:
|
|
629
|
-
asyncio.create_task(react(session, "📸"))
|
|
630
|
-
|
|
631
|
-
core = get_hyw_core()
|
|
632
|
-
b64_img = await core.screenshot(target_url)
|
|
633
|
-
else:
|
|
634
|
-
if conf.reaction:
|
|
635
|
-
asyncio.create_task(react(session, "✨"))
|
|
636
|
-
|
|
637
|
-
if b64_img:
|
|
638
|
-
msg_chain = MessageChain(Image(src=f'data:image/jpeg;base64,{b64_img}'))
|
|
639
|
-
if conf.quote:
|
|
640
|
-
msg_chain = MessageChain(Quote(session.event.message.id)) + msg_chain
|
|
641
|
-
await session.send(msg_chain)
|
|
642
|
-
else:
|
|
643
|
-
await session.send(f"截图失败: {target_url}")
|
|
644
|
-
|
|
645
|
-
search_cache.cleanup()
|
|
646
|
-
return
|
|
647
|
-
else:
|
|
648
|
-
# Reply to a non-cached message: append reply content to query
|
|
649
|
-
try:
|
|
650
|
-
# session.reply.origin.message is a list, wrap it in MessageChain
|
|
651
|
-
reply_msg = MessageChain(session.reply.origin.message)
|
|
652
|
-
reply_content = str(reply_msg.get(Text)).strip()
|
|
653
|
-
if reply_content:
|
|
654
|
-
query = f"{query} {reply_content}".strip() if query else reply_content
|
|
655
|
-
logger.info(f"/w appended reply content, new query: '{query}'")
|
|
656
|
-
except Exception as e:
|
|
657
|
-
logger.warning(f"/w failed to extract reply content: {e}")
|
|
658
|
-
|
|
659
|
-
# No query and no cache context - nothing to do
|
|
660
|
-
if not query:
|
|
661
|
-
return
|
|
662
|
-
|
|
663
|
-
try:
|
|
664
|
-
core = get_hyw_core()
|
|
665
|
-
|
|
666
|
-
# 1. URL Detection
|
|
667
|
-
url_pattern = re.compile(r'^https?://(?:[-\w./?=&%#]+)')
|
|
668
|
-
if url_pattern.match(query):
|
|
669
|
-
# === URL Screenshot Mode ===
|
|
670
|
-
if conf.reaction: asyncio.create_task(react(session, "📸"))
|
|
671
|
-
|
|
672
|
-
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tf:
|
|
673
|
-
output_path = tf.name
|
|
674
|
-
|
|
675
|
-
b64_img = await core.screenshot(query)
|
|
676
|
-
|
|
677
|
-
if b64_img:
|
|
678
|
-
with open(output_path, "wb") as f:
|
|
679
|
-
f.write(base64.b64decode(b64_img))
|
|
680
|
-
|
|
681
|
-
msg_chain = MessageChain(Image(src=f'data:image/jpeg;base64,{b64_img}'))
|
|
682
|
-
if conf.quote:
|
|
683
|
-
msg_chain = MessageChain(Quote(session.event.message.id)) + msg_chain
|
|
684
|
-
|
|
685
|
-
await session.send(msg_chain)
|
|
686
|
-
|
|
687
|
-
if conf.save_conversation:
|
|
688
|
-
mid = str(session.event.message.id) if getattr(session.event, "message", None) else str(session.event.id)
|
|
689
|
-
context_id = f"guild_{session.guild.id}" if session.guild else "user"
|
|
690
|
-
history_manager.remember(mid, [{"role": "user", "content": f"/w {query}"}], [], {}, context_id=context_id)
|
|
691
|
-
history_manager.save_to_disk(mid, image_path=output_path, web_results=[{"url": query, "title": "Screenshot", "_type": "screenshot"}])
|
|
692
|
-
|
|
693
|
-
os.remove(output_path)
|
|
694
|
-
else:
|
|
695
|
-
await session.send(f"Failed to screenshot URL: {query}")
|
|
696
|
-
return
|
|
697
|
-
|
|
698
|
-
# 2. Search Mode (Fallthrough)
|
|
699
|
-
|
|
700
|
-
# Parse enhanced filter syntax
|
|
701
|
-
filters, search_query, filter_error = parse_filter_syntax(query, max_count=3)
|
|
702
|
-
|
|
703
|
-
if filter_error:
|
|
704
|
-
await session.send(filter_error)
|
|
705
|
-
return
|
|
706
|
-
|
|
707
|
-
# Start search first
|
|
708
|
-
local_renderer = await get_content_renderer()
|
|
709
|
-
search_task = asyncio.create_task(core.search([search_query]))
|
|
710
|
-
|
|
711
|
-
# Only pre-warm tab if NOT in filter mode (filter mode = screenshots only, no card render)
|
|
712
|
-
tab_task = None
|
|
713
|
-
if not filters:
|
|
714
|
-
tab_task = asyncio.create_task(local_renderer.prepare_tab())
|
|
715
|
-
|
|
716
|
-
if conf.reaction:
|
|
717
|
-
asyncio.create_task(react(session, "🔍"))
|
|
718
|
-
|
|
719
|
-
results = await search_task
|
|
720
|
-
flat_results = results[0] if results else []
|
|
721
|
-
|
|
722
|
-
if not flat_results:
|
|
723
|
-
if tab_task:
|
|
724
|
-
try: await tab_task
|
|
725
|
-
except: pass
|
|
726
|
-
await session.send("Search returned no results.")
|
|
727
|
-
return
|
|
728
|
-
|
|
729
|
-
visible = [r for r in flat_results if not r.get("_hidden", False)]
|
|
730
|
-
|
|
731
|
-
if not visible:
|
|
732
|
-
if tab_task:
|
|
733
|
-
try: await tab_task
|
|
734
|
-
except: pass
|
|
735
|
-
await session.send("Search returned no visible results.")
|
|
736
|
-
return
|
|
737
|
-
|
|
738
|
-
# === Filter Mode: Screenshot matching links (NO tab needed) ===
|
|
739
|
-
if filters:
|
|
740
|
-
|
|
741
|
-
urls_to_screenshot = []
|
|
742
|
-
|
|
743
|
-
for filter_type, filter_value, count in filters:
|
|
744
|
-
if filter_type == 'index':
|
|
745
|
-
# Index-based (1-based)
|
|
746
|
-
idx = filter_value - 1
|
|
747
|
-
if 0 <= idx < len(visible):
|
|
748
|
-
url = visible[idx].get("url", "")
|
|
749
|
-
if url and url not in urls_to_screenshot:
|
|
750
|
-
urls_to_screenshot.append(url)
|
|
751
|
-
else:
|
|
752
|
-
await session.send(f"⚠️ 序号 {filter_value} 超出范围 (1-{len(visible)})")
|
|
753
|
-
return
|
|
754
|
-
else:
|
|
755
|
-
# Link filter: find URLs containing filter term
|
|
756
|
-
found_count = 0
|
|
757
|
-
for res in visible:
|
|
758
|
-
url = res.get("url", "")
|
|
759
|
-
title = res.get("title", "")
|
|
760
|
-
# Match filter against both URL and title
|
|
761
|
-
if (filter_value in url.lower() or filter_value in title.lower()) and url not in urls_to_screenshot:
|
|
762
|
-
urls_to_screenshot.append(url)
|
|
763
|
-
found_count += 1
|
|
764
|
-
if found_count >= count:
|
|
765
|
-
break
|
|
766
|
-
|
|
767
|
-
if found_count == 0:
|
|
768
|
-
await session.send(f"⚠️ 未找到包含 \"{filter_value}\" 的链接")
|
|
769
|
-
return
|
|
770
|
-
|
|
771
|
-
if not urls_to_screenshot:
|
|
772
|
-
await session.send("⚠️ 未找到匹配的链接")
|
|
773
|
-
return
|
|
774
|
-
|
|
775
|
-
if conf.reaction:
|
|
776
|
-
asyncio.create_task(react(session, "📸"))
|
|
777
|
-
|
|
778
|
-
# Take screenshots concurrently
|
|
779
|
-
screenshot_tasks = [core.screenshot(url) for url in urls_to_screenshot]
|
|
780
|
-
screenshot_results = await asyncio.gather(*screenshot_tasks)
|
|
781
|
-
|
|
782
|
-
images = [Image(src=f'data:image/jpeg;base64,{b64}') for b64 in screenshot_results if b64]
|
|
783
|
-
|
|
784
|
-
if images:
|
|
785
|
-
msg_chain = MessageChain(images)
|
|
786
|
-
if conf.quote:
|
|
787
|
-
msg_chain = MessageChain(Quote(session.event.message.id)) + msg_chain
|
|
788
|
-
await session.send(msg_chain)
|
|
789
|
-
|
|
790
|
-
if conf.save_conversation:
|
|
791
|
-
mid = str(session.event.message.id) if getattr(session.event, "message", None) else str(session.event.id)
|
|
792
|
-
context_id = f"guild_{session.guild.id}" if session.guild else "user"
|
|
793
|
-
history_manager.remember(mid, [{"role": "user", "content": f"/w {query}"}], [], {}, context_id=context_id)
|
|
794
|
-
else:
|
|
795
|
-
await session.send("截图失败")
|
|
796
|
-
return
|
|
797
|
-
|
|
798
|
-
# === Normal Search Mode: Render search results as Sources card ===
|
|
799
|
-
|
|
800
|
-
# Build references from search results for Sources card
|
|
801
|
-
references = []
|
|
802
|
-
for i, res in enumerate(visible[:10]):
|
|
803
|
-
references.append({
|
|
804
|
-
"title": res.get("title", f"Result {i+1}"),
|
|
805
|
-
"url": res.get("url", ""),
|
|
806
|
-
"snippet": res.get("content", "") or res.get("snippet", ""),
|
|
807
|
-
"original_idx": i + 1,
|
|
808
|
-
})
|
|
809
|
-
|
|
810
|
-
try:
|
|
811
|
-
tab_id = await tab_task
|
|
812
|
-
except Exception:
|
|
813
|
-
tab_id = None
|
|
814
|
-
|
|
815
|
-
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tf:
|
|
816
|
-
output_path = tf.name
|
|
817
|
-
|
|
818
|
-
# Render Sources card with search results (no markdown content, just references)
|
|
819
|
-
render_ok = await core.render(
|
|
820
|
-
markdown_content=f"# 搜索结果: {search_query}",
|
|
821
|
-
output_path=output_path,
|
|
822
|
-
stats={"total_time": 0},
|
|
823
|
-
references=references,
|
|
824
|
-
page_references=[],
|
|
825
|
-
stages_used=[{"name": "search", "description": f"搜索 \"{search_query}\"", "time": 0}],
|
|
826
|
-
tab_id=tab_id
|
|
827
|
-
)
|
|
828
|
-
|
|
829
|
-
if render_ok and os.path.exists(output_path):
|
|
830
|
-
with open(output_path, "rb") as f:
|
|
831
|
-
img_data = base64.b64encode(f.read()).decode()
|
|
832
|
-
|
|
833
|
-
msg_chain = MessageChain(Image(src=f'data:image/png;base64,{img_data}'))
|
|
834
|
-
if conf.quote:
|
|
835
|
-
msg_chain = MessageChain(Quote(session.event.message.id)) + msg_chain
|
|
836
|
-
|
|
837
|
-
sent = await session.send(msg_chain)
|
|
838
|
-
|
|
839
|
-
# Store in cache for future /w and /q lookups
|
|
840
|
-
sent_id = next((str(e.id) for e in sent if hasattr(e, 'id')), None) if sent else None
|
|
841
|
-
if sent_id:
|
|
842
|
-
search_cache.store(sent_id, visible[:10], search_query)
|
|
843
|
-
|
|
844
|
-
if conf.save_conversation:
|
|
845
|
-
mid = str(session.event.message.id) if getattr(session.event, "message", None) else str(session.event.id)
|
|
846
|
-
context_id = f"guild_{session.guild.id}" if session.guild else "user"
|
|
847
|
-
history_manager.remember(mid, [{"role": "user", "content": f"/w {query}"}], [], {}, context_id=context_id)
|
|
848
|
-
|
|
849
|
-
os.remove(output_path)
|
|
850
|
-
else:
|
|
851
|
-
await session.send("渲染搜索结果失败")
|
|
852
|
-
|
|
853
|
-
search_cache.cleanup() # Lazy cleanup
|
|
854
|
-
|
|
855
|
-
except Exception as e:
|
|
856
|
-
logger.error(f"Search command failed: {e}")
|
|
857
|
-
await session.send(f"Search error: {e}")
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
metadata("hyw", author=[{"name": "kumoSleeping", "email": "zjr2992@outlook.com"}], version=__version__, config=HywConfig)
|
|
861
|
-
|
|
862
|
-
# Help command (/h)
|
|
863
|
-
alc_help = Alconna(conf.help_command)
|
|
864
|
-
|
|
865
|
-
@command.on(alc_help)
|
|
866
|
-
async def handle_help_command(session: Session[MessageCreatedEvent], result: Arparma):
|
|
867
|
-
"""Display help information for all commands."""
|
|
868
|
-
help_text = f"""HYW Plugin v{__version__}
|
|
869
|
-
|
|
870
|
-
Question Agent:
|
|
871
|
-
• {conf.question_command} tell me...
|
|
872
|
-
• {conf.question_command} [picture] tell me...
|
|
873
|
-
Stop Task:
|
|
874
|
-
• {conf.stop_command} (reply to the question/web command)
|
|
875
|
-
Web_tool Search:
|
|
876
|
-
• {conf.web_command} query
|
|
877
|
-
Web_tool Screenshot:
|
|
878
|
-
• {conf.web_command} https://example.com
|
|
879
|
-
Web_tool Filter(search and screenshot):
|
|
880
|
-
• {conf.web_command} github: fastapi
|
|
881
|
-
• {conf.web_command} 1,2: minecraft
|
|
882
|
-
• {conf.web_command} mcmod=2: forge mod
|
|
883
|
-
Web_tool Context(screenshot):
|
|
884
|
-
• [quote: web_tool search] + {conf.web_command} 1
|
|
885
|
-
• [quote: web_tool search] + {conf.web_command} 1, 3
|
|
886
|
-
"""
|
|
887
|
-
|
|
888
|
-
await session.send(help_text)
|
|
889
|
-
|
|
890
|
-
# Stop command (/x)
|
|
891
|
-
alc_stop = Alconna(conf.stop_command)
|
|
892
|
-
|
|
893
|
-
@command.on(alc_stop)
|
|
894
|
-
async def handle_stop_command(session: Session[MessageCreatedEvent], result: Arparma):
|
|
895
|
-
"""Stop a running task by replying to the original command message."""
|
|
896
|
-
if not session.reply or not hasattr(session.reply.origin, 'id'):
|
|
897
|
-
await session.send("请回复正在执行的任务消息以停止它")
|
|
898
|
-
return
|
|
899
|
-
|
|
900
|
-
target_msg_id = str(session.reply.origin.id)
|
|
901
|
-
|
|
902
|
-
if await _task_manager.cancel(target_msg_id):
|
|
903
|
-
# Determine notification based on reaction config
|
|
904
|
-
if conf.reaction:
|
|
905
|
-
asyncio.create_task(react(session, "🛑"))
|
|
906
|
-
else:
|
|
907
|
-
await session.send("正在停止任务...")
|
|
908
|
-
else:
|
|
909
|
-
await session.send("未找到可停止的任务或任务已结束")
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
@listen(CommandReceive)
|
|
913
|
-
async def remove_at(content: MessageChain):
|
|
914
|
-
return content.lstrip(At)
|