entari-plugin-hyw 3.3.4__tar.gz → 3.3.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of entari-plugin-hyw might be problematic. Click here for more details.

Files changed (53) hide show
  1. {entari_plugin_hyw-3.3.4/src/entari_plugin_hyw.egg-info → entari_plugin_hyw-3.3.6}/PKG-INFO +2 -1
  2. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/pyproject.toml +2 -1
  3. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/__init__.py +14 -351
  4. entari_plugin_hyw-3.3.6/src/entari_plugin_hyw/assets/libs/tailwind.css +1 -0
  5. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/tailwind.input.css +1 -1
  6. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/template.j2 +113 -20
  7. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/core/config.py +2 -0
  8. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/core/pipeline.py +116 -112
  9. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/core/render.py +39 -42
  10. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/utils/prompts.py +26 -15
  11. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/utils/search.py +234 -4
  12. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6/src/entari_plugin_hyw.egg-info}/PKG-INFO +2 -1
  13. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw.egg-info/SOURCES.txt +0 -1
  14. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw.egg-info/requires.txt +1 -0
  15. entari_plugin_hyw-3.3.4/src/entari_plugin_hyw/assets/libs/tailwind.css +0 -1
  16. entari_plugin_hyw-3.3.4/src/entari_plugin_hyw/core/render.py.bak +0 -926
  17. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/MANIFEST.in +0 -0
  18. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/README.md +0 -0
  19. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/setup.cfg +0 -0
  20. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/anthropic.svg +0 -0
  21. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/deepseek.png +0 -0
  22. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/gemini.svg +0 -0
  23. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/google.svg +0 -0
  24. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/grok.png +0 -0
  25. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/microsoft.svg +0 -0
  26. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/minimax.png +0 -0
  27. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/mistral.png +0 -0
  28. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/nvida.png +0 -0
  29. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/openai.svg +0 -0
  30. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/openrouter.png +0 -0
  31. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/perplexity.svg +0 -0
  32. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/qwen.png +0 -0
  33. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/xai.png +0 -0
  34. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/icon/zai.png +0 -0
  35. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/libs/highlight.css +0 -0
  36. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/libs/highlight.js +0 -0
  37. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/libs/katex-auto-render.js +0 -0
  38. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/libs/katex.css +0 -0
  39. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/libs/katex.js +0 -0
  40. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/package-lock.json +0 -0
  41. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/package.json +0 -0
  42. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/tailwind.config.js +0 -0
  43. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/template.html +0 -0
  44. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/assets/template.html.bak +0 -0
  45. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/core/__init__.py +0 -0
  46. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/core/history.py +0 -0
  47. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/core/hyw.py +0 -0
  48. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/utils/__init__.py +0 -0
  49. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/utils/browser.py +0 -0
  50. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/utils/misc.py +0 -0
  51. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw/utils/playwright_tool.py +0 -0
  52. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw.egg-info/dependency_links.txt +0 -0
  53. {entari_plugin_hyw-3.3.4 → entari_plugin_hyw-3.3.6}/src/entari_plugin_hyw.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: entari_plugin_hyw
3
- Version: 3.3.4
3
+ Version: 3.3.6
4
4
  Summary: Use large language models to interpret chat messages
5
5
  Author-email: kumoSleeping <zjr2992@outlook.com>
6
6
  License: MIT
@@ -22,6 +22,7 @@ Requires-Dist: httpx
22
22
  Requires-Dist: markdown>=3.10
23
23
  Requires-Dist: crawl4ai>=0.7.8
24
24
  Requires-Dist: jinja2>=3.0
25
+ Requires-Dist: ddgs>=9.10.0
25
26
  Provides-Extra: dev
26
27
  Requires-Dist: entari-plugin-server>=0.5.0; extra == "dev"
27
28
  Requires-Dist: satori-python-adapter-onebot11>=0.2.5; extra == "dev"
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "entari_plugin_hyw"
7
- version = "3.3.4"
7
+ version = "3.3.6"
8
8
  description = "Use large language models to interpret chat messages"
9
9
  authors = [{name = "kumoSleeping", email = "zjr2992@outlook.com"}]
10
10
  dependencies = [
@@ -14,6 +14,7 @@ dependencies = [
14
14
  "markdown>=3.10",
15
15
  "crawl4ai>=0.7.8",
16
16
  "jinja2>=3.0",
17
+ "ddgs>=9.10.0",
17
18
  ]
18
19
  requires-python = ">=3.10"
19
20
  readme = "README.md"
@@ -14,7 +14,7 @@ from .core.hyw import HYW
14
14
  from .core.history import HistoryManager
15
15
  from .core.render import ContentRenderer
16
16
  from .utils.misc import process_onebot_json, process_images, resolve_model_name
17
- from arclet.entari.event.lifespan import Startup, Ready, Cleanup
17
+ from arclet.entari.event.lifespan import Cleanup
18
18
 
19
19
  import os
20
20
  import secrets
@@ -85,120 +85,21 @@ class HywConfig(BasicConfModel):
85
85
  # Instruct model pricing overrides (defaults to main model pricing if not set)
86
86
  intruct_input_price: Optional[float] = None
87
87
  intruct_output_price: Optional[float] = None
88
-
89
88
  # Provider Names
90
89
  search_name: str = "DuckDuckGo"
91
- search_provider: str = "Crawl4AI"
90
+ search_provider: str = "crawl4ai" # crawl4ai | httpx | ddgs
92
91
  model_provider: Optional[str] = None
93
92
  vision_model_provider: Optional[str] = None
94
93
  intruct_model_provider: Optional[str] = None
95
-
96
- start_test: Optional[Union[str, bool]] = None
94
+
95
+
97
96
 
98
97
  conf = plugin_config(HywConfig)
99
98
  history_manager = HistoryManager()
100
99
  renderer = ContentRenderer()
101
100
  hyw = HYW(config=conf)
102
101
 
103
- @listen(Ready, once=True)
104
- async def _run_ui_test():
105
- """Run UI rendering test on startup if configured."""
106
- # Debug log to confirm listener is active
107
- logger.info(f"UI TEST Listener Active. start_test config: {conf.start_test} (type: {type(conf.start_test)})")
108
-
109
- if not conf.start_test:
110
- return
111
-
112
- test_file = ""
113
- if isinstance(conf.start_test, str):
114
- test_file = conf.start_test
115
- elif conf.start_test is True:
116
- # User enabled boolean toggle, assume default path
117
- # Try a few locations
118
- candidates = ["data/conversations/ui-test.md", "ui-test.md", "README.md"]
119
- for c in candidates:
120
- if os.path.exists(c):
121
- test_file = c
122
- break
123
- if not test_file:
124
- logger.warning("UI TEST: start_test=True but no default test file found (tried: data/conversations/ui-test.md, ui-test.md, README.md)")
125
- return
126
-
127
- logger.info(f"UI TEST: Starting render test with file {test_file}")
128
-
129
- if not os.path.exists(test_file):
130
- logger.error(f"UI TEST: File not found: {test_file}")
131
- return
132
-
133
- try:
134
- with open(test_file, "r", encoding="utf-8") as f:
135
- content = f.read()
136
-
137
- # Mock Data for Full UI Test
138
- stats = {
139
- "total_time": 12.5,
140
- "vision_duration": 3.2,
141
- "cost": 0.0015
142
- }
143
-
144
- stages = [
145
- {"name": "Vision", "model": "google/gemini-pro-vision", "time": 3.2, "cost": 0.0005, "provider": "Google", "icon_config": "google"},
146
- {"name": "Search", "model": "duckduckgo", "time": 1.5, "cost": 0.0, "provider": "DDG", "icon_config": "search",
147
- "children": {"references": [
148
- {"title": "Crawl4AI, Open-source LLM-Friendly Web Crawler & Scraper", "url": "https://docs.crawl4ai.com/core/llmtxt", "domain": "docs.crawl4ai.com"}
149
- ]}},
150
- {"name": "Crawler", "model": "Crawl4AI", "time": 2.5, "cost": 0.0, "provider": "Page Fetcher", "icon_config": "browser",
151
- "children": {"crawled_pages": [
152
- {"title": "Quick Start - Crawl4AI Documentation (v0.7.x)", "url": "https://docs.crawl4ai.com/core/quickstart/", "domain": "docs.crawl4ai.com"},
153
- {"title": "Crawl4AI Explained: The AI-Friendly Web Crawling Framework", "url": "https://scrapfly.io/blog/posts/crawl4AI-explained/", "domain": "scrapfly.io"},
154
- {"title": "Llmtxt - Crawl4AI Documentation (v0.7.x)", "url": "https://docs.crawl4ai.com/core/llmtxt/", "domain": "docs.crawl4ai.com"},
155
- {"title": "Multi-URL Crawling - Crawl4AI Documentation (v0.7.x)", "url": "https://docs.crawl4ai.com/advanced/multi-url-crawling/", "domain": "docs.crawl4ai.com"}
156
- ]}},
157
- {"name": "Agent", "model": "anthropic/claude-3-5-sonnet", "time": 7.8, "cost": 0.0010, "provider": "Anthropic", "icon_config": "anthropic"}
158
- ]
159
-
160
- # References come from search results
161
- references = [
162
- {"title": "Crawl4AI, Open-source LLM-Friendly Web Crawler & Scraper", "url": "https://docs.crawl4ai.com/core/llmtxt", "domain": "docs.crawl4ai.com"}
163
- ]
164
-
165
- # Page references come from crawled pages
166
- page_references = [
167
- {"title": "Quick Start - Crawl4AI Documentation (v0.7.x)", "url": "https://docs.crawl4ai.com/core/quickstart/", "domain": "docs.crawl4ai.com"},
168
- {"title": "Crawl4AI Explained: The AI-Friendly Web Crawling Framework", "url": "https://scrapfly.io/blog/posts/crawl4AI-explained/", "domain": "scrapfly.io"},
169
- {"title": "Llmtxt - Crawl4AI Documentation (v0.7.x)", "url": "https://docs.crawl4ai.com/core/llmtxt/", "domain": "docs.crawl4ai.com"},
170
- {"title": "Multi-URL Crawling - Crawl4AI Documentation (v0.7.x)", "url": "https://docs.crawl4ai.com/advanced/multi-url-crawling/", "domain": "docs.crawl4ai.com"}
171
- ]
172
-
173
- output_dir = "data/cache"
174
- os.makedirs(output_dir, exist_ok=True)
175
- output_path = f"{output_dir}/ui_test_result.jpg"
176
-
177
- logger.info(f"UI TEST: Rendering to {output_path}...")
178
-
179
- start = time.time()
180
- success = await renderer.render(
181
- markdown_content=content,
182
- output_path=output_path,
183
- stats=stats,
184
- stages_used=stages,
185
- references=references,
186
- page_references=page_references,
187
- flow_steps=[],
188
- model_name="CLAUDE-3-5-SONNET",
189
- provider_name="Anthropic",
190
- behavior_summary="Automated Test",
191
- icon_config="anthropic",
192
- render_timeout_ms=10000
193
- )
194
-
195
- if success:
196
- logger.success(f"UI TEST: Render completed in {time.time() - start:.2f}s. Saved to {output_path}")
197
- else:
198
- logger.error("UI TEST: Render FAILED.")
199
-
200
- except Exception as e:
201
- logger.error(f"UI TEST: Exception during test: {e}")
102
+
202
103
 
203
104
 
204
105
  @listen(Cleanup, once=True)
@@ -261,8 +162,7 @@ async def react(session: Session, emoji: str):
261
162
 
262
163
  async def process_request(session: Session[MessageCreatedEvent], all_param: Optional[MessageChain] = None,
263
164
  selected_model: Optional[str] = None, selected_vision_model: Optional[str] = None,
264
- conversation_key_override: Optional[str] = None, local_mode: bool = False,
265
- next_prompt: Optional[str] = None, next_text_model: Optional[str] = None, next_vision_model: Optional[str] = None):
165
+ conversation_key_override: Optional[str] = None, local_mode: bool = False):
266
166
  logger.info(f"Processing request: {all_param}")
267
167
  mc = MessageChain(all_param)
268
168
  logger.info(f"reply: {session.reply}")
@@ -358,73 +258,7 @@ async def process_request(session: Session[MessageCreatedEvent], all_param: Opti
358
258
  final_resp = resp
359
259
 
360
260
  # Step 2 (Optional)
361
- if next_prompt:
362
- logger.info(f"Executing Step 2 with prompt: {next_prompt}")
363
-
364
- # Use Step 1 history as base for Step 2
365
- # hyw.agent already returns the updated history including the new turn
366
- # So we just pass step1_history
367
-
368
- # Determine Step 2 models
369
- # If not specified, inherit from Step 1 or config?
370
- # Usually inherit from config or meta if not specified in -n
371
- step2_model = next_text_model or model
372
- if step2_model and step2_model != "off":
373
- resolved_s2, err_s2 = resolve_model_name(step2_model, conf.models)
374
- if resolved_s2:
375
- step2_model = resolved_s2
376
-
377
- step2_vision_model = next_vision_model or vision_model # Probably not used if no new images, but consistent
378
- if step2_vision_model and step2_vision_model != "off":
379
- resolved_s2v, err_s2v = resolve_model_name(step2_vision_model, conf.models)
380
- if resolved_s2v:
381
- step2_vision_model = resolved_s2v
382
-
383
- # No new images for Step 2 usually, unless we want to carry over images?
384
- # The user said "First round image model, second round text model".
385
- # Usually Step 2 is text-only follow-up.
386
- # But hyw.agent stateless? No, we pass history.
387
- # We don't pass 'images' again to Step 2 unless we want them re-analyzed.
388
- # If Step 1 analyzed images, the analysis is in history (as assistant message or system message?).
389
- # In hyw.agent, image analysis result is added to history.
390
- # So we don't need to pass images again.
391
-
392
- resp2 = await hyw.agent(str(next_prompt), conversation_history=step1_history, images=None,
393
- selected_model=step2_model, selected_vision_model=step2_vision_model, local_mode=local_mode)
394
-
395
- final_resp = resp2
396
-
397
- # Merge Stats
398
- # Instead of merging into a single dict, we prepare a list of stats for the renderer
399
- # But we also need a combined stats for history recording?
400
- # History manager likely expects a single dict or doesn't care much (it stores what we give)
401
-
402
- # Let's keep step1_stats and resp2["stats"] separate for rendering
403
- # But for history, maybe we still want a merged one?
404
- # The code below uses final_resp["stats"] for rendering AND history.
405
-
406
- # Let's create a list for rendering
407
- stats_for_render = [step1_stats, resp2.get("stats", {})]
408
-
409
- # And a merged one for history/final_resp
410
- merged_stats = step1_stats.copy()
411
- if "stats" in resp2:
412
- for k, v in resp2["stats"].items():
413
- if isinstance(v, (int, float)) and k in merged_stats:
414
- merged_stats[k] += v
415
- elif k == "visited_domains":
416
- merged_stats[k] = list(set(merged_stats.get(k, []) + v))
417
- else:
418
- merged_stats[k] = v
419
-
420
- final_resp["stats"] = merged_stats
421
- final_resp["stats_list"] = stats_for_render # Pass this to renderer if available
422
-
423
- # Merge Model Info for Display
424
- # We want to show Step 1 Vision Model AND Step 2 Text Model
425
- if step1_vision_model:
426
- final_resp["vision_model_used"] = step1_vision_model
427
- # final_resp["model_used"] is already from Step 2
261
+
428
262
 
429
263
 
430
264
  # Extract Response Data
@@ -528,6 +362,7 @@ async def process_request(session: Session[MessageCreatedEvent], all_param: Opti
528
362
  stats=stats_to_render,
529
363
  references=structured.get("references", []),
530
364
  page_references=structured.get("page_references", []),
365
+ image_references=structured.get("image_references", []),
531
366
  flow_steps=structured.get("flow_steps", []),
532
367
  stages_used=final_resp.get("stages_used", []),
533
368
  model_name=render_model_name,
@@ -607,45 +442,19 @@ async def process_request(session: Session[MessageCreatedEvent], all_param: Opti
607
442
  except Exception as save_err:
608
443
  logger.error(f"Failed to save error conversation: {save_err}")
609
444
 
610
- # Secondary Parser for -n content
611
- next_alc = Alconna(
612
- "next",
613
- Option("-v|--vision", Args["vision_model", str], help_text="设置视觉模型(设为off禁用)"),
614
- Option("-t|--text", Args["text_model", str], help_text="设置文本模型"),
615
- Args["prompt", AllParam],
616
- )
445
+
617
446
 
618
447
  # Main Command (Question)
619
448
  alc = Alconna(
620
449
  conf.question_command,
621
- Option("-v|--vision", Args["vision_model", str]),
622
- Option("-t|--text", Args["text_model", str]),
623
- Option("-c|--code", Args["code", str]),
624
- Option("-n|--next", Args["next_input", AllParam]),
625
- Args["list_models;?", "-m|--models"],
626
- Args["all_chat;?", "-a"],
627
- Args["local_mode;?", "-l"],
628
- Args["all_param?", MultiVar(str | Image | Custom)],
629
- meta=CommandMeta(
630
- compact=False,
631
- description=f"""使用方法:
632
- {conf.question_command} -a : 列出所有会话
633
- {conf.question_command} -m : 列出所有模型
634
- {conf.question_command} -v <模型名> : 设置主要视觉模型, 设为 off 禁用
635
- {conf.question_command} -t <模型名> : 设置主要文本模型
636
- {conf.question_command} -l : 开启本地模式 (关闭Web索引)
637
- {conf.question_command} -c <4位消息码> : 继续指定会话
638
- {conf.question_command} -n <后续提示词> : 在第一步完成后执行后续操作 (支持 -t/-v)
639
- {conf.question_command} <问题> : 发起问题
640
- 特性:
641
- """
642
- )
450
+ Args["all_param;?", AllParam],
643
451
  )
644
452
 
645
453
  @command.on(alc)
646
454
  async def handle_question_command(session: Session[MessageCreatedEvent], result: Arparma):
647
455
  """Handle main Question command"""
648
456
  try:
457
+ # logger.info(f"Question Command Triggered. Message: {result}")
649
458
  mid = str(session.event.message.id) if getattr(session.event, "message", None) else str(session.event.id)
650
459
  dedupe_key = f"{getattr(session.account, 'id', 'account')}:{mid}"
651
460
  if _event_deduper.seen_recently(dedupe_key):
@@ -659,156 +468,10 @@ async def handle_question_command(session: Session[MessageCreatedEvent], result:
659
468
  args = result.all_matched_args
660
469
  logger.info(f"Matched Args: {args}")
661
470
 
662
- text_model_val = args.get("text_model")
663
- vision_model_val = args.get("vision_model")
664
- code_val = args.get("code")
665
- all_flag_val = args.get("all_chat")
666
- list_models_val = args.get("list_models")
667
- local_mode_val = True if args.get("local_mode") else False
668
- logger.info(f"Local mode: {local_mode_val} (type: {type(local_mode_val)})")
669
-
670
- # Handle -m (List Models)
671
- if list_models_val:
672
- # global_cache is already imported/defined in __init__.py
673
-
674
- if global_cache.models_image_path and os.path.exists(global_cache.models_image_path):
675
- logger.info(f"Using cached models list: {global_cache.models_image_path}")
676
- with open(global_cache.models_image_path, "rb") as f:
677
- img_data = base64.b64encode(f.read()).decode()
678
- msg = MessageChain(Image(src=f'data:image/png;base64,{img_data}'))
679
- if conf.quote: msg = MessageChain(Quote(session.event.message.id)) + msg
680
- await session.send(msg)
681
- return
682
-
683
- output_dir = "data/cache"
684
- os.makedirs(output_dir, exist_ok=True)
685
- output_path = f"{output_dir}/models_list_cache.png"
686
-
687
- await renderer.render_models_list(
688
- conf.models,
689
- output_path,
690
- default_base_url=conf.base_url,
691
- render_timeout_ms=conf.render_timeout_ms,
692
- )
693
- global_cache.models_image_path = os.path.abspath(output_path)
694
-
695
- with open(output_path, "rb") as f:
696
- img_data = base64.b64encode(f.read()).decode()
697
- msg = MessageChain(Image(src=f'data:image/png;base64,{img_data}'))
698
- if conf.quote: msg = MessageChain(Quote(session.event.message.id)) + msg
699
- await session.send(msg)
700
- return
701
-
702
- # Handle -a (List History)
703
- if all_flag_val:
704
- context_id = f"guild_{session.guild.id}" if session.guild else f"user_{session.user.id}"
705
- keys = history_manager.list_by_context(context_id, limit=10)
706
- if not keys:
707
- msg = "暂无历史会话"
708
- if conf.quote: await session.send([Quote(session.event.message.id), msg])
709
- else: await session.send(msg)
710
- return
711
-
712
- msg = "历史会话 [最近10条]\n"
713
- for i, key in enumerate(keys):
714
- short_code = history_manager.get_code_by_key(key) or "????"
715
- hist = history_manager.get_history(key)
716
- preview = "..."
717
- if hist and len(hist) > 0:
718
- last_content = hist[-1].get("content", "")
719
- preview = (last_content[:20] + "...") if len(last_content) > 20 else last_content
720
-
721
- msg += f"{short_code} {preview}\n"
722
- if conf.quote: await session.send([Quote(session.event.message.id), msg])
723
- else: await session.send(msg)
724
- return
471
+ # Only all_param is supported now
472
+ # Context ID for history lookup is automatically handled in process_request
725
473
 
726
- selected_vision_model = None
727
- selected_text_model = None
728
-
729
- if vision_model_val:
730
- if vision_model_val.lower() == "off":
731
- selected_vision_model = "off"
732
- else:
733
- selected_vision_model, err = resolve_model_name(vision_model_val, conf.models)
734
- if err:
735
- if conf.quote: await session.send([Quote(session.event.message.id), err])
736
- else: await session.send(err)
737
- return
738
- logger.info(f"Selected vision model: {selected_vision_model}")
739
-
740
- if text_model_val:
741
- selected_text_model, err = resolve_model_name(text_model_val, conf.models)
742
- if err:
743
- if conf.quote: await session.send([Quote(session.event.message.id), err])
744
- else: await session.send(err)
745
- return
746
- logger.info(f"Selected text model: {selected_text_model}")
747
-
748
- # Determine History to Continue
749
- target_key = None
750
- context_id = f"guild_{session.guild.id}" if session.guild else f"user_{session.user.id}"
751
-
752
- # 1. Explicit Code
753
- if code_val:
754
- target_code = code_val
755
- target_key = history_manager.get_key_by_code(target_code)
756
- if not target_key:
757
- msg = f"未找到代码为 {target_code} 的会话"
758
- if conf.quote: await session.send([Quote(session.event.message.id), msg])
759
- else: await session.send(msg)
760
- return
761
- logger.info(f"Question: Continuing session {target_code} -> {target_key}")
762
-
763
- next_input_val = args.get("next_input")
764
- next_text_model = None
765
- next_vision_model = None
766
- next_prompt = None
767
-
768
- if next_input_val:
769
- # Parse secondary command
770
- # next_input_val is likely a MessageChain or string depending on AllParam behavior with Alconna
771
- # We need to ensure it's a string or compatible input for parse
772
- logger.info(f"Parsing next input: {next_input_val}")
773
- try:
774
- # Convert next_input_val to string
775
- if isinstance(next_input_val, list):
776
- # It's a list of segments (e.g. [Text(...)])
777
- # We need to join them into a string
778
- # Assuming they are Satori elements or similar
779
- cmd_str = "".join(str(x) for x in next_input_val)
780
- else:
781
- cmd_str = str(next_input_val)
782
-
783
- # Prepend 'next' header for Alconna
784
- parse_target = f"next {cmd_str}"
785
-
786
- next_res = next_alc.parse(parse_target)
787
- if next_res.matched:
788
- next_args = next_res.all_matched_args
789
- next_text_model = next_args.get("text_model")
790
- next_vision_model = next_args.get("vision_model")
791
- next_prompt = next_args.get("prompt")
792
-
793
- # If prompt is AllParam, it might be captured as a list or string depending on Alconna version
794
- # If it's a list, join it back to string
795
- if isinstance(next_prompt, list):
796
- next_prompt = "".join(str(x) for x in next_prompt)
797
-
798
- logger.info(f"Next Command Parsed: text={next_text_model}, vision={next_vision_model}, prompt={next_prompt}")
799
- else:
800
- logger.warning(f"Next command parsing failed or no match for: {parse_target}")
801
- # Fallback: treat the whole string as prompt if parsing failed (e.g. if it didn't match options but Alconna should have matched prompt)
802
- # But next_alc has Args["prompt", AllParam], so it should match everything else.
803
- # If it failed, maybe something else is wrong.
804
- # Let's assume if it failed, we just use the raw string as prompt?
805
- # But wait, if we prepend "next ", and next_alc starts with "next", it should match.
806
- pass
807
- except Exception as e:
808
- logger.error(f"Failed to parse next command: {e}")
809
-
810
- await process_request(session, args.get("all_param"), selected_model=selected_text_model, selected_vision_model=selected_vision_model, conversation_key_override=target_key, local_mode=local_mode_val,
811
- next_prompt=next_prompt, next_text_model=next_text_model, next_vision_model=next_vision_model)
474
+ await process_request(session, args.get("all_param"), selected_model=None, selected_vision_model=None, conversation_key_override=None, local_mode=False)
812
475
 
813
476
  metadata("hyw", author=[{"name": "kumoSleeping", "email": "zjr2992@outlook.com"}], version="3.2.105", config=HywConfig)
814
477
 
@@ -0,0 +1 @@
1
+ *,:after,:before{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }::backdrop{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:rgba(59,130,246,.5);--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }/*! tailwindcss v3.4.17 | MIT License | https://tailwindcss.com*/*,:after,:before{box-sizing:border-box;border:0 solid #e5e7eb}:after,:before{--tw-content:""}:host,html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:Inter,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,sans-serif;font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:SFMono-Regular,Consolas,Liberation Mono,Menlo,monospace;font-feature-settings:normal;font-variation-settings:normal;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0}fieldset,legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]:where(:not([hidden=until-found])){display:none}.relative{position:relative}.-top-0\.5{top:-.125rem}.z-20{z-index:20}.m-0{margin:0}.mx-0\.5{margin-left:.125rem;margin-right:.125rem}.mx-auto{margin-left:auto;margin-right:auto}.mb-1{margin-bottom:.25rem}.mb-3{margin-bottom:.75rem}.ml-2{margin-left:.5rem}.ml-4{margin-left:1rem}.ml-auto{margin-left:auto}.mt-0\.5{margin-top:.125rem}.mt-1{margin-top:.25rem}.mt-2{margin-top:.5rem}.box-border{box-sizing:border-box}.block{display:block}.inline{display:inline}.flex{display:flex}.inline-flex{display:inline-flex}.grid{display:grid}.h-10{height:2.5rem}.h-2{height:.5rem}.h-3\.5{height:.875rem}.h-4{height:1rem}.h-5{height:1.25rem}.h-6{height:1.5rem}.h-8{height:2rem}.h-fit{height:-moz-fit-content;height:fit-content}.w-10{width:2.5rem}.w-2{width:.5rem}.w-3\.5{width:.875rem}.w-5{width:1.25rem}.w-6{width:1.5rem}.w-8{width:2rem}.w-full{width:100%}.min-w-0{min-width:0}.min-w-\[14px\]{min-width:14px}.min-w-\[16px\]{min-width:16px}.max-w-\[450px\]{max-width:450px}.max-w-\[80px\]{max-width:80px}.flex-1{flex:1 1 0%}.shrink-0{flex-shrink:0}.cursor-default{cursor:default}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.flex-col{flex-direction:column}.items-start{align-items:flex-start}.items-center{align-items:center}.items-baseline{align-items:baseline}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.gap-0{gap:0}.gap-0\.5{gap:.125rem}.gap-1\.5{gap:.375rem}.gap-2{gap:.5rem}.gap-2\.5{gap:.625rem}.gap-3{gap:.75rem}.gap-4{gap:1rem}.overflow-hidden,.truncate{overflow:hidden}.truncate{white-space:nowrap}.text-ellipsis,.truncate{text-overflow:ellipsis}.whitespace-nowrap{white-space:nowrap}.rounded{border-radius:.25rem}.rounded-2xl{border-radius:1rem}.rounded-full{border-radius:9999px}.rounded-lg{border-radius:.5rem}.rounded-md{border-radius:.375rem}.rounded-sm{border-radius:.125rem}.border{border-width:1px}.border-b{border-bottom-width:1px}.border-l-2{border-left-width:2px}.border-blue-200{--tw-border-opacity:1;border-color:rgb(191 219 254/var(--tw-border-opacity,1))}.border-gray-100{--tw-border-opacity:1;border-color:rgb(243 244 246/var(--tw-border-opacity,1))}.border-gray-200{--tw-border-opacity:1;border-color:rgb(229 231 235/var(--tw-border-opacity,1))}.border-orange-200{--tw-border-opacity:1;border-color:rgb(254 215 170/var(--tw-border-opacity,1))}.border-white\/50{border-color:hsla(0,0%,100%,.5)}.bg-\[\#f2f2f2\]{--tw-bg-opacity:1;background-color:rgb(242 242 242/var(--tw-bg-opacity,1))}.bg-blue-50{--tw-bg-opacity:1;background-color:rgb(239 246 255/var(--tw-bg-opacity,1))}.bg-gray-50{--tw-bg-opacity:1;background-color:rgb(249 250 251/var(--tw-bg-opacity,1))}.bg-green-400{--tw-bg-opacity:1;background-color:rgb(74 222 128/var(--tw-bg-opacity,1))}.bg-orange-50{--tw-bg-opacity:1;background-color:rgb(255 247 237/var(--tw-bg-opacity,1))}.bg-pink-500{--tw-bg-opacity:1;background-color:rgb(236 72 153/var(--tw-bg-opacity,1))}.bg-purple-400{--tw-bg-opacity:1;background-color:rgb(192 132 252/var(--tw-bg-opacity,1))}.bg-white{--tw-bg-opacity:1;background-color:rgb(255 255 255/var(--tw-bg-opacity,1))}.bg-white\/60{background-color:hsla(0,0%,100%,.6)}.bg-white\/80{background-color:hsla(0,0%,100%,.8)}.object-contain{-o-object-fit:contain;object-fit:contain}.p-0{padding:0}.p-2\.5{padding:.625rem}.px-0\.5{padding-left:.125rem;padding-right:.125rem}.px-2{padding-left:.5rem;padding-right:.5rem}.px-3{padding-left:.75rem;padding-right:.75rem}.px-3\.5{padding-left:.875rem;padding-right:.875rem}.px-4{padding-left:1rem;padding-right:1rem}.px-5{padding-left:1.25rem;padding-right:1.25rem}.px-6{padding-left:1.5rem;padding-right:1.5rem}.py-1{padding-top:.25rem;padding-bottom:.25rem}.py-1\.5{padding-top:.375rem;padding-bottom:.375rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.py-2\.5{padding-top:.625rem;padding-bottom:.625rem}.py-3\.5{padding-top:.875rem;padding-bottom:.875rem}.pb-2{padding-bottom:.5rem}.pb-3{padding-bottom:.75rem}.pb-5{padding-bottom:1.25rem}.pb-8{padding-bottom:2rem}.pl-4{padding-left:1rem}.pt-0{padding-top:0}.pt-2{padding-top:.5rem}.pt-5{padding-top:1.25rem}.pt-7{padding-top:1.75rem}.align-top{vertical-align:top}.font-mono{font-family:SFMono-Regular,Consolas,Liberation Mono,Menlo,monospace}.font-sans{font-family:Inter,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,sans-serif}.text-\[10px\]{font-size:10px}.text-\[12px\]{font-size:12px}.text-\[13px\]{font-size:13px}.text-\[15px\]{font-size:15px}.text-\[16px\]{font-size:16px}.text-\[9px\]{font-size:9px}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-sm{font-size:.875rem;line-height:1.25rem}.font-bold{font-weight:700}.font-medium{font-weight:500}.uppercase{text-transform:uppercase}.leading-relaxed{line-height:1.625}.leading-tight{line-height:1.25}.tracking-wide{letter-spacing:.025em}.tracking-wider{letter-spacing:.05em}.text-blue-600{--tw-text-opacity:1;color:rgb(37 99 235/var(--tw-text-opacity,1))}.text-gray-400{--tw-text-opacity:1;color:rgb(156 163 175/var(--tw-text-opacity,1))}.text-gray-500{--tw-text-opacity:1;color:rgb(107 114 128/var(--tw-text-opacity,1))}.text-gray-600{--tw-text-opacity:1;color:rgb(75 85 99/var(--tw-text-opacity,1))}.text-gray-700{--tw-text-opacity:1;color:rgb(55 65 81/var(--tw-text-opacity,1))}.text-gray-800{--tw-text-opacity:1;color:rgb(31 41 55/var(--tw-text-opacity,1))}.text-gray-900{--tw-text-opacity:1;color:rgb(17 24 39/var(--tw-text-opacity,1))}.text-inherit{color:inherit}.text-orange-600{--tw-text-opacity:1;color:rgb(234 88 12/var(--tw-text-opacity,1))}.text-orange-700{--tw-text-opacity:1;color:rgb(194 65 12/var(--tw-text-opacity,1))}.text-pink-600{--tw-text-opacity:1;color:rgb(219 39 119/var(--tw-text-opacity,1))}.underline{text-decoration-line:underline}.no-underline{text-decoration-line:none}.decoration-gray-300{text-decoration-color:#d1d5db}.decoration-0{text-decoration-thickness:0}.decoration-1{text-decoration-thickness:1px}.underline-offset-2{text-underline-offset:2px}.opacity-80{opacity:.8}.shadow-sm{--tw-shadow:0 1px 2px 0 rgba(0,0,0,.05);--tw-shadow-colored:0 1px 2px 0 var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.ring-1{--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.ring-inset{--tw-ring-inset:inset}.ring-black\/5{--tw-ring-color:rgba(0,0,0,.05)}.filter{filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow)}.backdrop-blur{--tw-backdrop-blur:blur(8px)}.backdrop-blur,.backdrop-blur-sm{-webkit-backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia)}.backdrop-blur-sm{--tw-backdrop-blur:blur(4px)}.transition-colors{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.transition-shadow{transition-property:box-shadow;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.\[page\:1\]{page:1}.\[page\:N\]{page:N}.\[search\:1\]{search:1}.\[search\:N\]{search:N}pre::-webkit-scrollbar{height:8px;background-color:transparent}pre::-webkit-scrollbar-thumb{background-color:hsla(0,0%,100%,.2);border-radius:4px}.markdown-body h1{margin-top:1.5rem;margin-bottom:1rem;border-bottom-width:1px;--tw-border-opacity:1;border-color:rgb(229 231 235/var(--tw-border-opacity,1));padding-bottom:.5rem;font-size:1.5rem;line-height:2rem;font-weight:700;--tw-text-opacity:1;color:rgb(31 41 55/var(--tw-text-opacity,1))}.markdown-body h2{margin-top:1.25rem;margin-bottom:.75rem;font-size:1.25rem}.markdown-body h2,.markdown-body h3{line-height:1.75rem;font-weight:700;--tw-text-opacity:1;color:rgb(31 41 55/var(--tw-text-opacity,1))}.markdown-body h3{margin-top:1rem;margin-bottom:.5rem;font-size:1.125rem}.markdown-body h4{margin-top:.75rem;margin-bottom:.5rem;font-size:1rem;line-height:1.5rem;font-weight:700;--tw-text-opacity:1;color:rgb(31 41 55/var(--tw-text-opacity,1))}.markdown-body p{margin-bottom:1rem;line-height:1.75rem;--tw-text-opacity:1;color:rgb(55 65 81/var(--tw-text-opacity,1))}.markdown-body ul{margin-bottom:1rem;list-style-type:disc}.markdown-body ul>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.25rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.25rem*var(--tw-space-y-reverse))}.markdown-body ul{padding-left:1.25rem;--tw-text-opacity:1;color:rgb(55 65 81/var(--tw-text-opacity,1))}.markdown-body ol{margin-bottom:1rem;list-style-type:decimal}.markdown-body ol>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.25rem*(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.25rem*var(--tw-space-y-reverse))}.markdown-body ol{padding-left:1.25rem;--tw-text-opacity:1;color:rgb(55 65 81/var(--tw-text-opacity,1))}.markdown-body li{padding-left:.25rem}.markdown-body li>p{margin-bottom:.25rem}.markdown-body a{--tw-text-opacity:1;color:rgb(37 99 235/var(--tw-text-opacity,1));text-decoration-line:underline;text-decoration-color:#93c5fd;text-underline-offset:2px;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.markdown-body a:hover{--tw-text-opacity:1;color:rgb(29 78 216/var(--tw-text-opacity,1));text-decoration-color:#1d4ed8}.markdown-body blockquote{margin-top:1rem;margin-bottom:1rem;border-top-right-radius:.25rem;border-bottom-right-radius:.25rem;border-left-width:4px;--tw-border-opacity:1;border-color:rgb(209 213 219/var(--tw-border-opacity,1));--tw-bg-opacity:1;background-color:rgb(249 250 251/var(--tw-bg-opacity,1));padding:.5rem .5rem .5rem 1rem;font-style:italic;--tw-text-opacity:1;color:rgb(75 85 99/var(--tw-text-opacity,1))}.hljs-built_in,.hljs-keyword,.hljs-name,.hljs-selector-tag,.hljs-tag{color:#db2777!important}.hljs-addition,.hljs-attribute,.hljs-literal,.hljs-section,.hljs-string,.hljs-template-tag,.hljs-template-variable,.hljs-title,.hljs-type{color:#d97706!important}.hljs-comment,.hljs-deletion,.hljs-meta,.hljs-quote{color:#9ca3af!important}.hljs-bullet,.hljs-number,.hljs-symbol{color:#ea580c!important}.hljs-doctag,.hljs-keyword,.hljs-literal,.hljs-name,.hljs-section,.hljs-selector-tag,.hljs-strong,.hljs-title,.hljs-type{font-weight:700}.markdown-body code{border-radius:.25rem;--tw-bg-opacity:1;background-color:rgb(243 244 246/var(--tw-bg-opacity,1));padding:.125rem .375rem;font-family:SFMono-Regular,Consolas,Liberation Mono,Menlo,monospace;font-size:.875rem;line-height:1.25rem;--tw-text-opacity:1;color:rgb(55 65 81/var(--tw-text-opacity,1))}.markdown-body pre{margin-bottom:1rem;overflow-x:auto;border-radius:.5rem;border-width:1px;--tw-border-opacity:1;border-color:rgb(243 244 246/var(--tw-border-opacity,1));--tw-bg-opacity:1;background-color:rgb(249 250 251/var(--tw-bg-opacity,1));padding:1rem;font-size:.875rem;line-height:1.25rem;line-height:1.5;--tw-shadow:0 1px 2px 0 rgba(0,0,0,.05);--tw-shadow-colored:0 1px 2px 0 var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.markdown-body pre code{border-style:none;background-color:transparent;padding:0;--tw-text-opacity:1;color:rgb(31 41 55/var(--tw-text-opacity,1))}.markdown-body img{margin-top:1rem;margin-bottom:1rem;height:auto;border-radius:.5rem;border-width:1px;--tw-border-opacity:1;border-color:rgb(243 244 246/var(--tw-border-opacity,1));--tw-shadow:0 1px 2px 0 rgba(0,0,0,.05);--tw-shadow-colored:0 1px 2px 0 var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.markdown-body table{margin-bottom:1rem;width:100%;border-collapse:collapse;font-size:.875rem;line-height:1.25rem}.markdown-body th{--tw-border-opacity:1;--tw-bg-opacity:1;background-color:rgb(249 250 251/var(--tw-bg-opacity,1));text-align:left;font-weight:600;--tw-text-opacity:1}.markdown-body td,.markdown-body th{border-width:1px;border-color:rgb(209 213 219/var(--tw-border-opacity,1));padding:.5rem 1rem;color:rgb(55 65 81/var(--tw-text-opacity,1))}.markdown-body td{--tw-border-opacity:1;--tw-text-opacity:1}.markdown-body hr{margin-top:1.5rem;margin-bottom:1.5rem;--tw-border-opacity:1;border-color:rgb(229 231 235/var(--tw-border-opacity,1))}.markdown-body>:last-child{margin-bottom:0}.katex-display{overflow-x:auto;overflow-y:hidden;padding-top:.5rem;padding-bottom:.5rem}.mermaid{margin-top:1rem;margin-bottom:1rem;display:flex;justify-content:center;border-radius:.5rem;border-width:1px;--tw-border-opacity:1;border-color:rgb(243 244 246/var(--tw-border-opacity,1));--tw-bg-opacity:1;background-color:rgb(249 250 251/var(--tw-bg-opacity,1));padding:1rem}.citation-ref{display:inline-flex;align-items:center;justify-content:center;vertical-align:super;font-size:.75em;font-weight:700;color:#4b5563;background-color:#f3f4f6;border:1px solid #e5e7eb;border-radius:9999px;width:1.4em;height:1.4em;margin-left:2px;cursor:pointer;text-decoration:none!important;transition:all .2s;position:relative;line-height:1;box-shadow:none!important}.citation-ref:hover{background-color:#4b5563;color:#fff;text-decoration:none!important}.citation-tooltip{position:absolute;bottom:100%;left:50%;transform:translateX(-50%);margin-bottom:8px;background:#fff;border:1px solid #e5e7eb;box-shadow:0 4px 6px -1px rgba(0,0,0,.1),0 2px 4px -1px rgba(0,0,0,.06);border-radius:8px;padding:8px 12px;width:-moz-max-content;width:max-content;max-width:300px;z-index:50;opacity:0;visibility:hidden;transition:all .2s;pointer-events:none;font-size:12px;line-height:1.4;color:#374151;text-align:left;font-weight:400}.citation-ref:hover .citation-tooltip{opacity:1;visibility:visible;transform:translateX(-50%) translateY(-4px)}.citation-tooltip-title{font-weight:600;color:#111827;margin-bottom:2px;display:block;white-space:nowrap;overflow:hidden;text-overflow:ellipsis;max-width:280px}.citation-tooltip-url{color:#6b7280;font-size:10px;display:flex;align-items:center;gap:4px}.hover\:bg-gray-50:hover{--tw-bg-opacity:1;background-color:rgb(249 250 251/var(--tw-bg-opacity,1))}.hover\:text-black:hover{--tw-text-opacity:1;color:rgb(0 0 0/var(--tw-text-opacity,1))}.hover\:decoration-gray-500:hover{text-decoration-color:#6b7280}.hover\:shadow:hover{--tw-shadow:0 1px 3px 0 rgba(0,0,0,.1),0 1px 2px -1px rgba(0,0,0,.1);--tw-shadow-colored:0 1px 3px 0 var(--tw-shadow-color),0 1px 2px -1px var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.\[\&\>\*\:first-child\]\:\!mt-0>:first-child{margin-top:0!important}
@@ -121,7 +121,7 @@ pre::-webkit-scrollbar-thumb {
121
121
  }
122
122
 
123
123
  .markdown-body img {
124
- @apply max-w-[60%] h-auto rounded-lg my-4 shadow-sm border border-gray-100;
124
+ @apply h-auto rounded-lg my-4 shadow-sm border border-gray-100;
125
125
  }
126
126
 
127
127
  .markdown-body table {