MeUtils 2025.5.27.14.55.19__py3-none-any.whl → 2025.5.29.18.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2025.5.27.14.55.19.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/METADATA +263 -263
- {MeUtils-2025.5.27.14.55.19.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/RECORD +21 -19
- examples/_openaisdk/openai_sophnet.py +47 -0
- meutils/apis/audio/minimax.py +202 -0
- meutils/apis/jimeng/videos.py +6 -3
- meutils/apis/search/metaso.py +26 -8
- meutils/apis/siliconflow/videos.py +1 -1
- meutils/apis/volcengine_apis/__init__.py +0 -1
- meutils/apis/volcengine_apis/images.py +5 -4
- meutils/apis/volcengine_apis/tasks.py +74 -8
- meutils/config_utils/lark_utils/common.py +1 -1
- meutils/data/VERSION +1 -1
- meutils/llm/check_utils.py +35 -3
- meutils/llm/completions/qwenllm.py +12 -2
- meutils/schemas/metaso_types.py +9 -3
- meutils/schemas/oneapi/common.py +24 -2
- meutils/schemas/openai_types.py +5 -2
- {MeUtils-2025.5.27.14.55.19.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/LICENSE +0 -0
- {MeUtils-2025.5.27.14.55.19.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/WHEEL +0 -0
- {MeUtils-2025.5.27.14.55.19.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/entry_points.txt +0 -0
- {MeUtils-2025.5.27.14.55.19.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/top_level.txt +0 -0
meutils/llm/check_utils.py
CHANGED
@@ -7,6 +7,7 @@
|
|
7
7
|
# @WeChat : meutils
|
8
8
|
# @Software : PyCharm
|
9
9
|
# @Description :
|
10
|
+
import os
|
10
11
|
|
11
12
|
from meutils.pipe import *
|
12
13
|
from meutils.decorators.retry import retrying
|
@@ -156,7 +157,7 @@ async def check_token_for_ppinfra(api_key, threshold: float = 1):
|
|
156
157
|
try:
|
157
158
|
client = AsyncOpenAI(base_url="https://api.ppinfra.com/v3/user", api_key=api_key)
|
158
159
|
data = await client.get("", cast_to=object)
|
159
|
-
|
160
|
+
logger.debug(data) # credit_balance
|
160
161
|
return data["credit_balance"] > threshold
|
161
162
|
except TimeoutException as e:
|
162
163
|
raise
|
@@ -166,8 +167,30 @@ async def check_token_for_ppinfra(api_key, threshold: float = 1):
|
|
166
167
|
return False
|
167
168
|
|
168
169
|
|
170
|
+
@retrying()
|
171
|
+
async def check_token_for_sophnet(api_key, threshold: float = 1):
|
172
|
+
if not isinstance(api_key, str):
|
173
|
+
return await check_tokens(api_key, check_token_for_sophnet)
|
174
|
+
|
175
|
+
try:
|
176
|
+
client = AsyncOpenAI(base_url=os.getenv("SOPHNET_BASE_URL"), api_key=api_key)
|
177
|
+
data = await client.chat.completions.create(
|
178
|
+
model="DeepSeek-v3",
|
179
|
+
messages=[{"role": "user", "content": "hi"}],
|
180
|
+
stream=True,
|
181
|
+
max_tokens=1
|
182
|
+
)
|
183
|
+
return True
|
184
|
+
except TimeoutException as e:
|
185
|
+
raise
|
186
|
+
|
187
|
+
except Exception as e:
|
188
|
+
logger.error(f"Error: {e}\n{api_key}")
|
189
|
+
return False
|
190
|
+
|
191
|
+
|
169
192
|
if __name__ == '__main__':
|
170
|
-
from meutils.config_utils.lark_utils import get_next_token_for_polling
|
193
|
+
from meutils.config_utils.lark_utils import get_next_token_for_polling, get_series
|
171
194
|
|
172
195
|
check_valid_token = partial(check_token_for_siliconflow, threshold=-1)
|
173
196
|
|
@@ -189,4 +212,13 @@ if __name__ == '__main__':
|
|
189
212
|
|
190
213
|
# arun(check_token_for_moonshot("sk-Qnr87vtf2Q6MEfc2mVNkVZ4qaoZg3smH9527I25QgcFe7HrT"))
|
191
214
|
|
192
|
-
arun(check_token_for_ppinfra("sk_DkIaRrPq7sTiRPevhjV9WFZN3FvLk6WhCXOj1JAwu6c"))
|
215
|
+
# arun(check_token_for_ppinfra("sk_DkIaRrPq7sTiRPevhjV9WFZN3FvLk6WhCXOj1JAwu6c"))
|
216
|
+
|
217
|
+
# from meutils.config_utils.lark_utils import get_next_token_for_polling, get_series
|
218
|
+
#
|
219
|
+
# arun(get_series("https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=PP1PGr"))
|
220
|
+
|
221
|
+
# arun(check_token_for_sophnet(["gzHpp_zRtGaw1IjpepCiWu_ySyke3Hu5wR5VNNYMLyXwAESqZoZWUZ4T3tiWUxtac6n9Hk-kRRo4_jPQmndo-g"]))
|
222
|
+
|
223
|
+
|
224
|
+
arun(check_token_for_ppinfra("sk_F0kgPyCMTzmOH_-VCEJucOK8HIrbnLGYm_IWxBToHZQ"))
|
@@ -85,7 +85,17 @@ async def create(request: CompletionRequest, token: Optional[str] = None, cookie
|
|
85
85
|
)
|
86
86
|
# qwen结构
|
87
87
|
model = request.model.lower()
|
88
|
-
if any(i in model for i in ("
|
88
|
+
if any(i in model for i in ("research",)): # 遇到错误 任意切换
|
89
|
+
request.model = np.random.choice({""})
|
90
|
+
request.messages[-1]['chat_type'] = "deep_research"
|
91
|
+
|
92
|
+
# request.messages["extra"] = {
|
93
|
+
# "meta": {
|
94
|
+
# "subChatType": "deep_thinking"
|
95
|
+
# }
|
96
|
+
# }
|
97
|
+
|
98
|
+
elif any(i in model for i in ("search",)):
|
89
99
|
request.model = "qwen-max-latest"
|
90
100
|
request.messages[-1]['chat_type'] = "search"
|
91
101
|
|
@@ -263,7 +273,7 @@ if __name__ == '__main__':
|
|
263
273
|
# 'content': "9.8 9.11哪个大",
|
264
274
|
'content': 'https://oss.ffire.cc/files/AIGC.pdf 总结下',
|
265
275
|
|
266
|
-
# "chat_type": "search",
|
276
|
+
# "chat_type": "search", deep_research
|
267
277
|
|
268
278
|
# 'content': user_content,
|
269
279
|
|
meutils/schemas/metaso_types.py
CHANGED
@@ -15,15 +15,20 @@ FEISHU_URL = "https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?shee
|
|
15
15
|
|
16
16
|
|
17
17
|
class MetasoRequest(BaseModel):
|
18
|
-
model: Optional[Literal["ds-r1",]] = None
|
18
|
+
model: Optional[Literal["ds-r1", "fast_thinking"]] = None
|
19
19
|
|
20
|
-
"""search-mini search search-pro
|
20
|
+
"""search-mini search search-pro
|
21
|
+
|
22
|
+
model-mode
|
23
|
+
|
24
|
+
"""
|
21
25
|
mode: Union[str, Literal["concise", "detail", "research", "strong-research"]] = "detail" # concise detail research
|
22
26
|
|
23
27
|
question: str = "Chatfire"
|
24
28
|
|
25
|
-
"""全网 文库 学术 图片 播客"""
|
29
|
+
"""全网 文库 学术 图片 播客 视频"""
|
26
30
|
scholarSearchDomain: str = "all"
|
31
|
+
engineType: Optional[Literal["pdf", "scholar", "image", "podcast", "video"]] = None
|
27
32
|
|
28
33
|
url: str = "https://metaso.cn/"
|
29
34
|
lang: str = "zh"
|
@@ -35,6 +40,7 @@ class MetasoRequest(BaseModel):
|
|
35
40
|
newEngine: str = 'true'
|
36
41
|
enableImage: str = 'true'
|
37
42
|
|
43
|
+
#
|
38
44
|
|
39
45
|
# question: hi
|
40
46
|
# mode: detail
|
meutils/schemas/oneapi/common.py
CHANGED
@@ -49,8 +49,8 @@ MODEL_PRICE = {
|
|
49
49
|
"kling_effects": 1,
|
50
50
|
|
51
51
|
"kling_video": 1.2,
|
52
|
-
"kling_extend": 1
|
53
|
-
"kling_lip_sync": 1
|
52
|
+
"kling_extend": 1,
|
53
|
+
"kling_lip_sync": 1,
|
54
54
|
|
55
55
|
"minimax_files_retrieve": 0.01,
|
56
56
|
|
@@ -66,6 +66,16 @@ MODEL_PRICE = {
|
|
66
66
|
"minimax_video-01": MINIMAX_VIDEO,
|
67
67
|
"minimax_video-01-live2d": MINIMAX_VIDEO,
|
68
68
|
|
69
|
+
# 火山
|
70
|
+
"api-volcengine-high_aes_general_v30l_zt2i": 0.05,
|
71
|
+
"api-volcengine-byteedit_v2.0": 0.05,
|
72
|
+
|
73
|
+
# videos
|
74
|
+
"api-videos-wan-ai/wan2.1-t2v-14b": 1.2,
|
75
|
+
"api-videos-wan-ai/wan2.1-t2v-14b-turbo": 1.2,
|
76
|
+
"api-videos-wan-ai/wan2.1-i2v-14b-720p": 1.2,
|
77
|
+
"api-videos-wan-ai/wan2.1-i2v-14b-720p-turbo": 1.2,
|
78
|
+
|
69
79
|
# chatfire
|
70
80
|
"ppu-0001": 0.0001,
|
71
81
|
"ppu-001": 0.001,
|
@@ -297,7 +307,15 @@ MODEL_PRICE = {
|
|
297
307
|
"net-gpt-4": 0.1,
|
298
308
|
"perplexity": 0.01,
|
299
309
|
"net-claude": 0.015,
|
310
|
+
|
311
|
+
# 秘塔
|
300
312
|
"meta-search": 0.02,
|
313
|
+
"meta-deepsearch": 0.05,
|
314
|
+
"meta-deepresearch": 0.1,
|
315
|
+
|
316
|
+
"meta-search:scholar": 0.02,
|
317
|
+
"meta-deepsearch:scholar": 0.05,
|
318
|
+
"meta-deepresearch:scholar": 0.1,
|
301
319
|
|
302
320
|
# 逆向
|
303
321
|
"cogview-3": 0.01,
|
@@ -580,6 +598,7 @@ MODEL_RATIO = {
|
|
580
598
|
"deepseek-v3": 1,
|
581
599
|
"deepseek-v3-0324": 1,
|
582
600
|
"deepseek-v3-250324": 1,
|
601
|
+
"deepseek-v3-fast": 1,
|
583
602
|
|
584
603
|
"deepseek-v3-8k": 0.5,
|
585
604
|
"deepseek-v3-128k": 5,
|
@@ -597,6 +616,7 @@ MODEL_RATIO = {
|
|
597
616
|
'deepseek-reasoner': 2,
|
598
617
|
'deepseek-reasoner-8k': 1,
|
599
618
|
"deepseek-r1-250120": 2,
|
619
|
+
"deepseek-r1-0528": 2,
|
600
620
|
|
601
621
|
"deepseek-search": 1,
|
602
622
|
'deepseek-r1-search': 2,
|
@@ -1205,11 +1225,13 @@ COMPLETION_RATIO = {
|
|
1205
1225
|
"deepseek-v3-0324": 4,
|
1206
1226
|
"deepseek-v3-250324": 4,
|
1207
1227
|
"deepseek-chat": 4,
|
1228
|
+
"deepseek-v3-fast": 4,
|
1208
1229
|
|
1209
1230
|
'deepseek-r1': 4,
|
1210
1231
|
'deepseek-reasoner': 4,
|
1211
1232
|
"deepseek-reasoner-164k": 8,
|
1212
1233
|
"deepseek-r1-250120": 4,
|
1234
|
+
"deepseek-r1-0528": 4,
|
1213
1235
|
|
1214
1236
|
"deepseek-chat:function": 4,
|
1215
1237
|
|
meutils/schemas/openai_types.py
CHANGED
@@ -427,11 +427,14 @@ class ImagesResponse(_ImagesResponse):
|
|
427
427
|
|
428
428
|
class TTSRequest(BaseModel):
|
429
429
|
model: Optional[Union[str, Literal["tts-1", "tts-1-hd"]]] = 'tts'
|
430
|
-
voice: Optional[Union[str, Literal["alloy", "echo", "fable", "onyx", "nova", "shimmer"
|
430
|
+
voice: Optional[Union[str, Literal["alloy", "echo", "fable", "onyx", "nova", "shimmer",
|
431
|
+
"male", "femal",
|
432
|
+
]]] = ""
|
431
433
|
|
432
434
|
input: str
|
433
435
|
instructions: Optional[str] = None
|
434
|
-
|
436
|
+
emotion: Optional[Literal[
|
437
|
+
"happy", "angry", "surprise", "coldness", "disgust", "fear", "excited", "hate", "sad", "fearful", "disgusted", "surprised", "neutral"]] = None
|
435
438
|
|
436
439
|
speed: Optional[float] = None
|
437
440
|
|
File without changes
|
File without changes
|
{MeUtils-2025.5.27.14.55.19.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/entry_points.txt
RENAMED
File without changes
|
File without changes
|