MeUtils 2025.2.6.13.5.49__py3-none-any.whl → 2025.2.6.19.30.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2025.2.6.13.5.49.dist-info → MeUtils-2025.2.6.19.30.6.dist-info}/METADATA +27 -27
- {MeUtils-2025.2.6.13.5.49.dist-info → MeUtils-2025.2.6.19.30.6.dist-info}/RECORD +12 -12
- meutils/apis/search/__init__.py +0 -1
- meutils/apis/siliconflow/images.py +12 -1
- meutils/data/VERSION +1 -1
- meutils/llm/completions/agents/search.py +2 -3
- meutils/llm/completions/reasoner.py +99 -25
- meutils/schemas/oneapi/common.py +8 -2
- {MeUtils-2025.2.6.13.5.49.dist-info → MeUtils-2025.2.6.19.30.6.dist-info}/LICENSE +0 -0
- {MeUtils-2025.2.6.13.5.49.dist-info → MeUtils-2025.2.6.19.30.6.dist-info}/WHEEL +0 -0
- {MeUtils-2025.2.6.13.5.49.dist-info → MeUtils-2025.2.6.19.30.6.dist-info}/entry_points.txt +0 -0
- {MeUtils-2025.2.6.13.5.49.dist-info → MeUtils-2025.2.6.19.30.6.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: MeUtils
|
3
|
-
Version: 2025.2.6.
|
3
|
+
Version: 2025.2.6.19.30.6
|
4
4
|
Summary: description
|
5
5
|
Home-page: https://github.com/yuanjie-ai/MeUtils
|
6
6
|
Author: yuanjie
|
@@ -64,41 +64,41 @@ Requires-Dist: sse-starlette ; extra == 'ai'
|
|
64
64
|
Requires-Dist: openai ; extra == 'ai'
|
65
65
|
Requires-Dist: langchain ; extra == 'ai'
|
66
66
|
Provides-Extra: all
|
67
|
-
Requires-Dist:
|
67
|
+
Requires-Dist: reportlab ; extra == 'all'
|
68
|
+
Requires-Dist: pandas-profiling[notebook] ; extra == 'all'
|
69
|
+
Requires-Dist: faiss-cpu ; extra == 'all'
|
70
|
+
Requires-Dist: jieba ; extra == 'all'
|
68
71
|
Requires-Dist: langchain ; extra == 'all'
|
69
|
-
Requires-Dist:
|
70
|
-
Requires-Dist: geopy ; extra == 'all'
|
71
|
-
Requires-Dist: seaborn ; extra == 'all'
|
72
|
-
Requires-Dist: pyarrow ; extra == 'all'
|
72
|
+
Requires-Dist: jinja2 ; extra == 'all'
|
73
73
|
Requires-Dist: streamlit ; extra == 'all'
|
74
|
-
Requires-Dist:
|
75
|
-
Requires-Dist: iteration-utilities ; extra == 'all'
|
76
|
-
Requires-Dist: fastapi ; extra == 'all'
|
77
|
-
Requires-Dist: thefuck ; extra == 'all'
|
78
|
-
Requires-Dist: pandas-summary ; extra == 'all'
|
79
|
-
Requires-Dist: dataframe-image ; extra == 'all'
|
80
|
-
Requires-Dist: gunicorn ; extra == 'all'
|
81
|
-
Requires-Dist: asyncmy ; extra == 'all'
|
82
|
-
Requires-Dist: pretty-errors ; extra == 'all'
|
83
|
-
Requires-Dist: polars ; extra == 'all'
|
84
|
-
Requires-Dist: pandas-profiling[notebook] ; extra == 'all'
|
74
|
+
Requires-Dist: filetype ; extra == 'all'
|
85
75
|
Requires-Dist: sse-starlette ; extra == 'all'
|
86
76
|
Requires-Dist: fastapi[all] ; extra == 'all'
|
87
|
-
Requires-Dist:
|
88
|
-
Requires-Dist:
|
89
|
-
Requires-Dist: redis-py-cluster ; extra == 'all'
|
77
|
+
Requires-Dist: pretty-errors ; extra == 'all'
|
78
|
+
Requires-Dist: geopy ; extra == 'all'
|
90
79
|
Requires-Dist: schedule ; extra == 'all'
|
91
|
-
Requires-Dist:
|
92
|
-
Requires-Dist:
|
80
|
+
Requires-Dist: sqlalchemy ; extra == 'all'
|
81
|
+
Requires-Dist: openai ; extra == 'all'
|
93
82
|
Requires-Dist: pymupd ; extra == 'all'
|
94
|
-
Requires-Dist:
|
95
|
-
Requires-Dist:
|
96
|
-
Requires-Dist:
|
83
|
+
Requires-Dist: thefuck ; extra == 'all'
|
84
|
+
Requires-Dist: uvicorn ; extra == 'all'
|
85
|
+
Requires-Dist: simplejson ; extra == 'all'
|
97
86
|
Requires-Dist: cachetools ; extra == 'all'
|
98
|
-
Requires-Dist:
|
87
|
+
Requires-Dist: seaborn ; extra == 'all'
|
99
88
|
Requires-Dist: pymongo ; extra == 'all'
|
100
|
-
Requires-Dist: thriftpy2 ; extra == 'all'
|
101
89
|
Requires-Dist: pymysql ; extra == 'all'
|
90
|
+
Requires-Dist: thriftpy2 ; extra == 'all'
|
91
|
+
Requires-Dist: fastapi ; extra == 'all'
|
92
|
+
Requires-Dist: redis-py-cluster ; extra == 'all'
|
93
|
+
Requires-Dist: pandas-summary ; extra == 'all'
|
94
|
+
Requires-Dist: polars ; extra == 'all'
|
95
|
+
Requires-Dist: iteration-utilities ; extra == 'all'
|
96
|
+
Requires-Dist: dataframe-image ; extra == 'all'
|
97
|
+
Requires-Dist: asyncmy ; extra == 'all'
|
98
|
+
Requires-Dist: pymilvus ; extra == 'all'
|
99
|
+
Requires-Dist: pyarrow ; extra == 'all'
|
100
|
+
Requires-Dist: missingno ; extra == 'all'
|
101
|
+
Requires-Dist: gunicorn ; extra == 'all'
|
102
102
|
Provides-Extra: ann
|
103
103
|
Requires-Dist: pymilvus ; extra == 'ann'
|
104
104
|
Requires-Dist: faiss-cpu ; extra == 'ann'
|
@@ -431,7 +431,7 @@ meutils/apis/replicateai/images.py,sha256=dDdi6IG2Ry5WFAEy06d9koarnrnS8T91f8BIja
|
|
431
431
|
meutils/apis/replicateai/raw.py,sha256=v_Hztoa8uplkkUSDrnxlhc4qizW3-4tyra685Qnwxtk,1605
|
432
432
|
meutils/apis/runwayml/__init__.py,sha256=90oL_3o8YkFPYm4R4PCxd3m9hgnHpOCLr0u-X4BDg-w,244
|
433
433
|
meutils/apis/runwayml/gen.py,sha256=JrfegC4pI2r7DprDLGMi5px0K1C1pSgopnz0lcJU5RY,5194
|
434
|
-
meutils/apis/search/__init__.py,sha256=
|
434
|
+
meutils/apis/search/__init__.py,sha256=1lg0ogEBgtwTi3XvkL5hMfSd14A6YNLO7M9itEJmaYI,244
|
435
435
|
meutils/apis/search/baichuan.py,sha256=OUD0TH7KzqSxtuRJgjJPWO8XRQIoUwW5RX7lWMo7BXw,268
|
436
436
|
meutils/apis/search/metaso.py,sha256=V7gOUM9_8aNwxVaPhM0GSTFHRMc853nZRdVN_26W40A,9871
|
437
437
|
meutils/apis/search/metaso_.py,sha256=PqMX3FLYbbAcc9qpqrPZ58LuSF29h0asZO1XMYmXbes,3144
|
@@ -440,7 +440,7 @@ meutils/apis/search/searxng.py,sha256=RBPeq-AYj5D42gROrZNg0SxIWwWEW0oqrbQ3wEDH9k
|
|
440
440
|
meutils/apis/siliconflow/__init__.py,sha256=DQ-A6wAWGna49pmyGhcIWgc2zx6TN9DfQmSUdAW7qjk,241
|
441
441
|
meutils/apis/siliconflow/audio.py,sha256=pk1OROATtAURPQ6VkswmR5gJFOFYJJPnjp-boDAf8X0,2017
|
442
442
|
meutils/apis/siliconflow/image_to_image.py,sha256=EnoT8p2IS35XGZ0ivFKTBII3b_iIG9NR_Ae2nOA-orc,89442
|
443
|
-
meutils/apis/siliconflow/images.py,sha256=
|
443
|
+
meutils/apis/siliconflow/images.py,sha256=zdiXL6drHWQjZoGs8q79h-gkjj4o2jTyUgaV8tq35ow,6548
|
444
444
|
meutils/apis/siliconflow/rerankers.py,sha256=GuKT33MmAiVULqNaAO9IIy1Zp570-kYJ6xP03eLCp5Y,1259
|
445
445
|
meutils/apis/siliconflow/text_to_image.py,sha256=ZSyvCF_CfH6pV4BXM4TJjmQiu2x4nF197GliDddWI8Q,4458
|
446
446
|
meutils/apis/siliconflow/utils.py,sha256=uPpJVgqVmTzTN0LuHWliTWaf1rzneNyA-mELwwrUbdQ,2092
|
@@ -518,7 +518,7 @@ meutils/config_utils/lark_utils/demo.py,sha256=3g0Fs7oLaeW75T60gYWMLgyNg1OnfOjfH
|
|
518
518
|
meutils/config_utils/lark_utils/x.py,sha256=MlMQGhehP9xMEgetxVCX68XFaosfKoW1JA5cZ3JqN2w,1857
|
519
519
|
meutils/crawlers/__init__.py,sha256=TBU4xA-IOsHV-0yIkW7YXxn_QT7TT8NncqxO7IykEfs,271
|
520
520
|
meutils/data/SimHei.ttf,sha256=-XEnekS5yHP_URkT4XBI2w22ylV-KxudhkeIYFbrILA,10062565
|
521
|
-
meutils/data/VERSION,sha256=
|
521
|
+
meutils/data/VERSION,sha256=KW0YQXuKie7B4BhaUQetRNs-K-FmkZvbrbcwBSmD9z0,19
|
522
522
|
meutils/data/_FLAG,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
523
523
|
meutils/data/_SUCCESS,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
524
524
|
meutils/data/__init__.py,sha256=Hfwkkxs4zHqKhxht0YrhS566a9P5axtmgNvM5wF8ceQ,243
|
@@ -585,7 +585,7 @@ meutils/llm/completions/modelscope.py,sha256=umPlJuHSL2RR0oNdTIAqts8iqEmGp_MR6Vh
|
|
585
585
|
meutils/llm/completions/oi.py,sha256=DmvzxJfdLA_I3MWcULhFtiUdo90j7V2dLvYvmZ-Tm9U,293
|
586
586
|
meutils/llm/completions/qwenllm.py,sha256=d3E51-iKSCuL2HrVO4aUmGN_fa5Jwz5K1ENbK7MtXEE,2043
|
587
587
|
meutils/llm/completions/rag.py,sha256=cpQ1aZX2QI1QDfFpODTxWHPYglK6_3qrlUM_KHLBNhc,1112
|
588
|
-
meutils/llm/completions/reasoner.py,sha256=
|
588
|
+
meutils/llm/completions/reasoner.py,sha256=3spXH61ZByy-L3mvVhG4fLJScYgv6CTjayhKrDfPLTA,5318
|
589
589
|
meutils/llm/completions/tryblend.py,sha256=NfVRktn1QiE2k60PzEI-LmKuxZCc7mtL_KlsrVcZg4k,8957
|
590
590
|
meutils/llm/completions/tune.py,sha256=FypfUgsB34t-E9xI6WydJYEk_-phscrLOX1iUCeKb_E,10138
|
591
591
|
meutils/llm/completions/x.py,sha256=XE8aCyuhkpYq0nho1mq9lAdRU1yOPc8BpvCpNYYMYqM,630
|
@@ -593,7 +593,7 @@ meutils/llm/completions/xx.py,sha256=pPfVRczvXZFUvAUOAFr0MMHMb5kVPNfKVhOLKugq52M
|
|
593
593
|
meutils/llm/completions/yuanbao.py,sha256=aN3MwR6yORFv5pGA1ODiLqlNKCfAKQslIfpruAD6m08,5468
|
594
594
|
meutils/llm/completions/agents/__init__.py,sha256=Wklnf7FTHm43hyVwfT8FXX44nqdOTykPb_HORIqDuuw,270
|
595
595
|
meutils/llm/completions/agents/file.py,sha256=4SGCkHSBEdAB6NbxPaViXYUJq5giGtEF9FEgq2WxyBY,4934
|
596
|
-
meutils/llm/completions/agents/search.py,sha256=
|
596
|
+
meutils/llm/completions/agents/search.py,sha256=oFM7fh5z5Uujkv53m4yM2sRdbnL52eDIc_T1H0IEv0I,4560
|
597
597
|
meutils/llm/completions/rag/__init__.py,sha256=VH4g9H0pqMM1Rkjc1xefQb4Uh8f9vlTt6tjdD6G6Wc0,272
|
598
598
|
meutils/llm/completions/rag/fire.py,sha256=0a_POkY9mEy1YwZHXOGnqf9EVyjiJwx3fWhmv_bQ54U,5743
|
599
599
|
meutils/llm/completions/rag/qwen.py,sha256=1dBNLLbQDRsM-7EGaStcWFU8HRa-rp3RxJ9cpxu6TBg,265
|
@@ -744,7 +744,7 @@ meutils/schemas/db/__init__.py,sha256=m1maURVoM6dIW0yt6ELZrZTzULtkHybVOSXtHNJRVI
|
|
744
744
|
meutils/schemas/db/oneapi_types.py,sha256=YanT0q9pU7dva2ZBPWjCpwGNUuifnJh0zUJJCQ9070c,3848
|
745
745
|
meutils/schemas/oneapi/__init__.py,sha256=uevbi3QAvFzN9WPbx9bYKTDyKt7P2ueZO6W0nSiD0sk,289
|
746
746
|
meutils/schemas/oneapi/_types.py,sha256=ClvAaNy3SahEN8lL8KEErHTD6HANelXUeKc_3iLfosQ,1488
|
747
|
-
meutils/schemas/oneapi/common.py,sha256=
|
747
|
+
meutils/schemas/oneapi/common.py,sha256=OdxupI70-Vukr37xG_ERUuInaH5KPe5BySDGpjyUhI0,27706
|
748
748
|
meutils/schemas/oneapi/icons.py,sha256=T7W5gInBJoHe62wzMimbG_UI-wn3_-rmQ1O4O2z-CQY,1089
|
749
749
|
meutils/schemas/oneapi/model_group_info.py,sha256=rGtflYJuFIjk5MsVEvK9JUR4IciX8jfErqeLqf8DIlQ,1586
|
750
750
|
meutils/schemas/oneapi/model_info.py,sha256=_uwKEPIIqm7ZYfhmpxtXB2QNsS83SpJY-OaBzvogC9w,772
|
@@ -872,9 +872,9 @@ meutils/tools/seize.py,sha256=nOKAS63w-Lbi48I0m2MPhdsokUTwxco0laPxYVmW4Mw,1064
|
|
872
872
|
meutils/tools/service_monitor.py,sha256=ibsLtBN2g2DL7ZnLJ8vhiZOiOcqTAyx711djDdBK-3M,1255
|
873
873
|
meutils/tools/sys_monitor.py,sha256=6MoyzrItqDUOSjfHcMJmMofQkEPTW36CT_aKui0rg84,429
|
874
874
|
meutils/tools/token_monitor.py,sha256=Np-YK-R4P4IPAXyZvMxwvXI4sFmNJQAQK1lSegNaYpA,997
|
875
|
-
MeUtils-2025.2.6.
|
876
|
-
MeUtils-2025.2.6.
|
877
|
-
MeUtils-2025.2.6.
|
878
|
-
MeUtils-2025.2.6.
|
879
|
-
MeUtils-2025.2.6.
|
880
|
-
MeUtils-2025.2.6.
|
875
|
+
MeUtils-2025.2.6.19.30.6.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
876
|
+
MeUtils-2025.2.6.19.30.6.dist-info/METADATA,sha256=h4jhcsxNjJwqoqZiMc7XQPhHEDZNlboNaOkblPwVX-4,6141
|
877
|
+
MeUtils-2025.2.6.19.30.6.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
878
|
+
MeUtils-2025.2.6.19.30.6.dist-info/entry_points.txt,sha256=lufZlBHRqqZKdY-ZQJ4CSZb0qhV5hQC37egZna9M7ug,357
|
879
|
+
MeUtils-2025.2.6.19.30.6.dist-info/top_level.txt,sha256=cInfxMmkgNOskurdjwP5unau4rA7Uw48nu07tYhS7KY,22
|
880
|
+
MeUtils-2025.2.6.19.30.6.dist-info/RECORD,,
|
meutils/apis/search/__init__.py
CHANGED
@@ -81,7 +81,7 @@ async def generate(request: ImageRequest, api_key: Optional[str] = None):
|
|
81
81
|
request.model = MODELS.get(request.model, DEFAULT_MODEL)
|
82
82
|
logger.debug(request)
|
83
83
|
|
84
|
-
if any(i in request.model.lower() for i in {"pro-max", }):
|
84
|
+
if any(i in request.model.lower() for i in {"pro-max", "pro"}):
|
85
85
|
request.num_inference_steps = 20
|
86
86
|
api_key = api_key or await get_next_token_for_polling(
|
87
87
|
FEISHU_URL,
|
@@ -136,6 +136,17 @@ if __name__ == '__main__':
|
|
136
136
|
'negative_prompt': '', 'n': 1, 'response_format': 'url', 'size': '1152x2048', 'num_inference_steps': 20,
|
137
137
|
'seed': None}
|
138
138
|
|
139
|
+
data = {
|
140
|
+
"model": "flux-dev",
|
141
|
+
"prompt": "(Chinese dragon soaring through the clouds).(majestic, colorful, mythical, powerful, ancient).(DSLR camera).(wide-angle lens).(dawn)(fantasy photography).(Kodak Ektar 100)",
|
142
|
+
"negative_prompt": "",
|
143
|
+
"n": 1,
|
144
|
+
"response_format": "url",
|
145
|
+
"size":"1366x768",
|
146
|
+
"num_inference_steps": 20,
|
147
|
+
"seed": None
|
148
|
+
}
|
149
|
+
|
139
150
|
# request = FluxImageRequest(model="flux", prompt="a dog", size="1024x1024", num_inference_steps=1)
|
140
151
|
# request = FluxImageRequest(model="flux-pro", prompt="a dog", size="10x10", num_inference_steps=1)
|
141
152
|
request = FluxImageRequest(**data)
|
meutils/data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
2025.02.06.
|
1
|
+
2025.02.06.19.30.06
|
@@ -96,7 +96,7 @@ class Completions(object):
|
|
96
96
|
data['model'] = "web-search-pro"
|
97
97
|
data['stream'] = False
|
98
98
|
search_completion = await zhipuai_client.chat.completions.create(**data)
|
99
|
-
logger.debug(search_completion)
|
99
|
+
logger.debug(search_completion) # todo: 返回详细信息
|
100
100
|
|
101
101
|
# 大模型
|
102
102
|
request.messages.append({
|
@@ -105,7 +105,6 @@ class Completions(object):
|
|
105
105
|
})
|
106
106
|
|
107
107
|
data = to_openai_params(request)
|
108
|
-
|
109
108
|
completion = await chatfire_client.chat.completions.create(**data)
|
110
109
|
return completion
|
111
110
|
|
@@ -121,7 +120,7 @@ if __name__ == '__main__':
|
|
121
120
|
# model="doubao",
|
122
121
|
|
123
122
|
messages=[
|
124
|
-
{"role": "user", "content": "
|
123
|
+
{"role": "user", "content": "deepseek-r1 vs o3-mini"}
|
125
124
|
],
|
126
125
|
|
127
126
|
stream=False
|
@@ -6,8 +6,12 @@
|
|
6
6
|
# @Author : betterme
|
7
7
|
# @WeChat : meutils
|
8
8
|
# @Software : PyCharm
|
9
|
-
# @Description :
|
9
|
+
# @Description :
|
10
|
+
"""
|
11
|
+
1. 适配任意客户端 think标识输出
|
12
|
+
2. 开源标准化
|
10
13
|
|
14
|
+
"""
|
11
15
|
|
12
16
|
from openai import AsyncOpenAI
|
13
17
|
|
@@ -17,38 +21,102 @@ from meutils.llm.openai_utils import to_openai_params
|
|
17
21
|
|
18
22
|
from meutils.schemas.openai_types import chat_completion, chat_completion_chunk, ChatCompletionRequest, CompletionUsage
|
19
23
|
|
24
|
+
reasoner_system = {
|
25
|
+
'role': 'system',
|
26
|
+
'content': '你是个AI助手,请务必在深度思考之后,再回答用户问题'
|
27
|
+
}
|
28
|
+
# re.DOTALL 标志使得 . 可以匹配换行符
|
29
|
+
think_pattern = re.compile(r'<think>(.*?)</think>', re.DOTALL)
|
30
|
+
|
31
|
+
|
32
|
+
def extract_think_content(content):
|
33
|
+
# 匹配<think>和</think>之间的内容,包括换行符
|
34
|
+
# re.DOTALL 标志使得 . 可以匹配换行符
|
35
|
+
think_content = think_pattern.search(content)
|
36
|
+
|
37
|
+
if think_content:
|
38
|
+
# 获取think标签中的内容
|
39
|
+
think_text = think_content.group(1)
|
40
|
+
# 移除整个think标签及其内容
|
41
|
+
cleaned_content = think_pattern.sub('', content)
|
42
|
+
# 返回think中的内容和清理后的文本
|
43
|
+
return think_text.strip(), cleaned_content.strip()
|
44
|
+
return "", content
|
45
|
+
|
46
|
+
|
47
|
+
reasoning = True
|
48
|
+
|
20
49
|
|
21
50
|
class Completions(object):
|
22
51
|
|
23
|
-
def __init__(self, api_key: Optional[str] = None, base_url: Optional[str] = None):
|
24
|
-
|
25
|
-
self.
|
52
|
+
def __init__(self, api_key: Optional[str] = None, base_url: Optional[str] = None, reasoning_stream: bool = True):
|
53
|
+
|
54
|
+
self.reasoning_stream = reasoning_stream
|
26
55
|
|
27
56
|
self.client = AsyncOpenAI(
|
28
|
-
base_url=
|
57
|
+
base_url=base_url, api_key=api_key,
|
29
58
|
)
|
30
59
|
|
31
60
|
async def create(self, request: ChatCompletionRequest):
|
61
|
+
"""适配任意客户端"""
|
62
|
+
s = time.perf_counter()
|
63
|
+
request.messages.insert(0, reasoner_system)
|
64
|
+
|
32
65
|
data = to_openai_params(request)
|
33
66
|
|
34
67
|
if request.stream:
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
68
|
+
completion_chunks = await self.client.chat.completions.create(**data)
|
69
|
+
|
70
|
+
is_reasoning_content = True
|
71
|
+
reasoning_prefix = "> Reasoning\n"
|
72
|
+
reasoning_suffix = "Reasoned for "
|
73
|
+
|
74
|
+
async for chunk in completion_chunks:
|
75
|
+
# chunk.model = "deepseek-reasoner"
|
76
|
+
message = chunk.choices[0].delta
|
77
|
+
message.content = message.content or ""
|
78
|
+
|
79
|
+
if not hasattr(message, 'reasoning_content'): # 标准化
|
80
|
+
message.reasoning_content = ""
|
81
|
+
if is_reasoning_content:
|
82
|
+
message.reasoning_content = (
|
83
|
+
message.content.replace("<think>", "")
|
84
|
+
.replace("</think>", "")
|
85
|
+
.replace("\n\n", "\n")
|
86
|
+
)
|
87
|
+
if message.content == "</think>": # 思考结束
|
88
|
+
is_reasoning_content = False
|
89
|
+
message.content = ""
|
90
|
+
|
91
|
+
logger.debug(message)
|
92
|
+
|
93
|
+
if self.reasoning_stream: # 适配任意客户端: 展示推理内容
|
94
|
+
|
95
|
+
if message.reasoning_content.strip(): # 思考开始
|
96
|
+
message.content = f"{reasoning_prefix}{message.reasoning_content}"
|
97
|
+
reasoning_prefix = ""
|
98
|
+
|
99
|
+
elif message.content: # 思考结束
|
100
|
+
if reasoning_suffix:
|
101
|
+
message.content = f"{reasoning_suffix} {time.perf_counter() - s:.0f} seconds.\n{message.content}"
|
102
|
+
reasoning_suffix = ""
|
103
|
+
|
104
|
+
yield chunk
|
45
105
|
else:
|
46
|
-
yield
|
106
|
+
yield chunk
|
47
107
|
|
48
|
-
|
49
|
-
|
50
|
-
else:
|
108
|
+
else: # 非流
|
51
109
|
completions = await self.client.chat.completions.create(**data)
|
110
|
+
|
111
|
+
completions.model = "deepseek-reasoner"
|
112
|
+
message = completions.choices[0].message
|
113
|
+
|
114
|
+
if not hasattr(message, 'reasoning_content'):
|
115
|
+
reasoning_content, content = extract_think_content(message.content)
|
116
|
+
|
117
|
+
completions.choices[0].message.reasoning_content = reasoning_content
|
118
|
+
completions.choices[0].message.content = content
|
119
|
+
|
52
120
|
yield completions
|
53
121
|
|
54
122
|
|
@@ -68,20 +136,26 @@ if __name__ == '__main__':
|
|
68
136
|
# model="qwen-max-latest",
|
69
137
|
# model="qwen-plus-latest",
|
70
138
|
|
71
|
-
model="deepseek-r1:1.5b",
|
139
|
+
# model="deepseek-r1:1.5b",
|
140
|
+
model="deepseek-r1:32b",
|
141
|
+
|
72
142
|
# model="deepseek-r1",
|
73
143
|
|
74
144
|
messages=[
|
75
|
-
{
|
76
|
-
|
77
|
-
|
78
|
-
},
|
145
|
+
# {
|
146
|
+
# 'role': 'system',
|
147
|
+
# 'content': '深度思考之后在回答问题并给出详细的理由'
|
148
|
+
# },
|
149
|
+
# {
|
150
|
+
# 'role': 'system',
|
151
|
+
# 'content': '你是个AI助手,请务必在深度思考之后,再回答用户问题'
|
152
|
+
# },
|
79
153
|
{
|
80
154
|
'role': 'user',
|
81
155
|
'content': '你好'
|
82
156
|
},
|
83
157
|
|
84
158
|
],
|
85
|
-
stream=
|
159
|
+
stream=False,
|
86
160
|
)
|
87
161
|
arun(Completions().create(request))
|
meutils/schemas/oneapi/common.py
CHANGED
@@ -579,7 +579,10 @@ MODEL_RATIO = {
|
|
579
579
|
"gemini-1.5-flash-exp-0827": 0.1,
|
580
580
|
"google/gemini-flash-1.5-exp": 0.1, # openrouter免费
|
581
581
|
"google/gemini-flash-1.5-8b-exp": 0.1, # openrouter免费
|
582
|
-
|
582
|
+
|
583
|
+
"gemini-2.0-flash": 0.75,
|
584
|
+
"gemini-2.0-flash-001": 0.75,
|
585
|
+
|
583
586
|
"gemini-2.0-flash-exp": 0.5,
|
584
587
|
"gemini-2.0-flash-thinking-exp": 1,
|
585
588
|
"gemini-2.0-flash-thinking-exp-1219": 1,
|
@@ -751,7 +754,10 @@ COMPLETION_RATIO = {
|
|
751
754
|
|
752
755
|
"gemini-exp-1206": 5,
|
753
756
|
|
754
|
-
|
757
|
+
|
758
|
+
"gemini-2.0-flash": 4,
|
759
|
+
"gemini-2.0-flash-001": 4,
|
760
|
+
|
755
761
|
"gemini-2.0-flash-exp": 5,
|
756
762
|
"gemini-2.0-flash-thinking-exp": 5,
|
757
763
|
"gemini-2.0-flash-thinking-exp-1219": 5,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|