MeUtils 2025.5.27.14.55.19__py3-none-any.whl → 2025.5.29.18.35.35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,13 +15,28 @@ https://www.volcengine.com/docs/6791/1384311
15
15
  import os
16
16
 
17
17
  from meutils.pipe import *
18
- from meutils.io.files_utils import to_url
18
+ from meutils.caches import rcache
19
+
20
+ from meutils.decorators.retry import retrying
21
+ from meutils.db.redis_db import redis_aclient
22
+ from meutils.config_utils.lark_utils import get_next_token_for_polling
19
23
  from meutils.schemas.jimeng_types import VideoRequest, ImageRequest
20
24
 
21
25
  from volcengine.visual.VisualService import VisualService
26
+ from fastapi import APIRouter, File, UploadFile, Query, Form, Depends, Request, HTTPException, status, BackgroundTasks
27
+
28
+ FEISHU = "https://xchatllm.feishu.cn/sheets/GYCHsvI4qhnDPNtI4VPcdw2knEd?sheet=OiHxsE"
29
+
22
30
 
31
+ # and "Access Denied" not in str(r)
32
+
33
+ @retrying(max_retries=5, predicate=lambda r: "Concurrent Limit" in str(r)) # 限流
34
+ async def create_task(request: Union[ImageRequest, VideoRequest, dict], token: Optional[str] = None):
35
+ """https://www.volcengine.com/docs/6791/1399614"""
36
+ token = token or await get_next_token_for_polling(FEISHU)
37
+
38
+ logger.debug(token)
23
39
 
24
- def create_task(request: Union[ImageRequest, VideoRequest], token: Optional[str] = None):
25
40
  visual_service = VisualService()
26
41
 
27
42
  if token:
@@ -30,26 +45,77 @@ def create_task(request: Union[ImageRequest, VideoRequest], token: Optional[str]
30
45
  visual_service.set_sk(sk)
31
46
 
32
47
  # request
33
- payload = request.model_dump(exclude_none=True)
48
+ if not isinstance(request, dict):
49
+ request = request.model_dump(exclude_none=True)
50
+
51
+ response = visual_service.cv_sync2async_submit_task(request) # 同步转异步
52
+
53
+ """
54
+ {'code': 10000,
55
+ 'data': {'task_id': '15106285208671192523'},
56
+ 'message': 'Success',
57
+ 'request_id': '202505291707517FC0D2B135CEE77BC4A5',
58
+ 'status': 10000,
59
+ 'time_elapsed': '150.967328ms'}
60
+ """
61
+
62
+ logger.debug(response)
63
+ if response.get('code') == 10000:
64
+ await redis_aclient.set(response['data']['task_id'], token, ex=7 * 24 * 3600)
65
+ else:
66
+ raise Exception(response)
67
+
68
+ return response
69
+
70
+
71
+ #
72
+ # @retrying(max_retries=5, predicate=lambda r: "Concurrent Limit" in str(r)) # 限流
73
+ # @rcache(ttl=5)
74
+ async def get_task(request: dict):
75
+ task_id = request.get("task_id", "")
76
+ token = await redis_aclient.get(task_id) # 绑定对应的 token
77
+ token = token and token.decode()
78
+ if not token:
79
+ raise HTTPException(status_code=404, detail="TaskID not found")
80
+
81
+ visual_service = VisualService()
82
+
83
+ if token:
84
+ ak, sk = token.split('|')
85
+ visual_service.set_ak(ak)
86
+ visual_service.set_sk(sk)
87
+
88
+ response = visual_service.cv_get_result(request) # 同步转异步
89
+
90
+ logger.debug(response)
34
91
 
35
- response = visual_service.cv_submit_task(payload)
36
92
  return response
37
93
 
38
94
 
39
95
  if __name__ == '__main__':
40
96
  token = f"""{os.getenv("VOLC_ACCESSKEY")}|{os.getenv("VOLC_SECRETKEY")}"""
97
+ token = "AKLTOWM5ZTc5ZDFhZWNlNDIzODkwYmZiNjEyNzYwNzE0MTI|T0RCbFpHRTJaRFEyWmpjeE5ERXpNR0ptWlRCaU16WmhPRE0wWVdKa01tTQ=="
41
98
  prompt = """
42
99
  3D魔童哪吒 c4d 搬砖 很开心, 很快乐, 精神抖擞, 背景是数不清的敖丙虚化 视觉冲击力强 大师构图 色彩鲜艳丰富 吸引人 背景用黄金色艺术字写着“搬砖挣钱” 冷暖色对比
43
100
  """
44
101
 
45
102
  request = ImageRequest(
103
+ req_key="high_aes_general_v30l_zt2i",
46
104
  prompt=prompt,
47
105
  )
48
106
 
49
- request = VideoRequest(
50
- prompt=prompt
51
- )
107
+ # request = VideoRequest(
108
+ # prompt=prompt
109
+ # )
52
110
 
111
+ # arun(create_task(request, token))
112
+ # arun(create_task(request))
53
113
 
114
+ # request = {
115
+ # "task_id": "141543714223689974",
116
+ # "req_key": "high_aes_general_v30l_zt2i"
117
+ # }
118
+ # #
119
+ # arun(get_task(request))
54
120
 
55
- print(create_task(request, token))
121
+ print(bjson(request))
@@ -307,7 +307,7 @@ async def get_dataframe(iloc_tuple: Optional[tuple] = None, feishu_url: Optional
307
307
  async def get_series(feishu_url: str, index: int = 0, duplicated: bool = False): # 系统配置
308
308
  df = await aget_spreadsheet_values(feishu_url=feishu_url, to_dataframe=True)
309
309
  series = df[index]
310
- values = [i for i in series if i] # todo: 非标准字符串处理
310
+ values = [i for i in series if i and isinstance(i, str)] # todo: 非标准字符串处理
311
311
  if duplicated: # 有序去重
312
312
  values = values | xUnique
313
313
 
meutils/data/VERSION CHANGED
@@ -1 +1 @@
1
- 2025.05.27.14.55.19
1
+ 2025.05.29.18.35.35
@@ -7,6 +7,7 @@
7
7
  # @WeChat : meutils
8
8
  # @Software : PyCharm
9
9
  # @Description :
10
+ import os
10
11
 
11
12
  from meutils.pipe import *
12
13
  from meutils.decorators.retry import retrying
@@ -156,7 +157,7 @@ async def check_token_for_ppinfra(api_key, threshold: float = 1):
156
157
  try:
157
158
  client = AsyncOpenAI(base_url="https://api.ppinfra.com/v3/user", api_key=api_key)
158
159
  data = await client.get("", cast_to=object)
159
- # logger.debug(data) # credit_balance
160
+ logger.debug(data) # credit_balance
160
161
  return data["credit_balance"] > threshold
161
162
  except TimeoutException as e:
162
163
  raise
@@ -166,8 +167,30 @@ async def check_token_for_ppinfra(api_key, threshold: float = 1):
166
167
  return False
167
168
 
168
169
 
170
+ @retrying()
171
+ async def check_token_for_sophnet(api_key, threshold: float = 1):
172
+ if not isinstance(api_key, str):
173
+ return await check_tokens(api_key, check_token_for_sophnet)
174
+
175
+ try:
176
+ client = AsyncOpenAI(base_url=os.getenv("SOPHNET_BASE_URL"), api_key=api_key)
177
+ data = await client.chat.completions.create(
178
+ model="DeepSeek-v3",
179
+ messages=[{"role": "user", "content": "hi"}],
180
+ stream=True,
181
+ max_tokens=1
182
+ )
183
+ return True
184
+ except TimeoutException as e:
185
+ raise
186
+
187
+ except Exception as e:
188
+ logger.error(f"Error: {e}\n{api_key}")
189
+ return False
190
+
191
+
169
192
  if __name__ == '__main__':
170
- from meutils.config_utils.lark_utils import get_next_token_for_polling
193
+ from meutils.config_utils.lark_utils import get_next_token_for_polling, get_series
171
194
 
172
195
  check_valid_token = partial(check_token_for_siliconflow, threshold=-1)
173
196
 
@@ -189,4 +212,13 @@ if __name__ == '__main__':
189
212
 
190
213
  # arun(check_token_for_moonshot("sk-Qnr87vtf2Q6MEfc2mVNkVZ4qaoZg3smH9527I25QgcFe7HrT"))
191
214
 
192
- arun(check_token_for_ppinfra("sk_DkIaRrPq7sTiRPevhjV9WFZN3FvLk6WhCXOj1JAwu6c"))
215
+ # arun(check_token_for_ppinfra("sk_DkIaRrPq7sTiRPevhjV9WFZN3FvLk6WhCXOj1JAwu6c"))
216
+
217
+ # from meutils.config_utils.lark_utils import get_next_token_for_polling, get_series
218
+ #
219
+ # arun(get_series("https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=PP1PGr"))
220
+
221
+ # arun(check_token_for_sophnet(["gzHpp_zRtGaw1IjpepCiWu_ySyke3Hu5wR5VNNYMLyXwAESqZoZWUZ4T3tiWUxtac6n9Hk-kRRo4_jPQmndo-g"]))
222
+
223
+
224
+ arun(check_token_for_ppinfra("sk_F0kgPyCMTzmOH_-VCEJucOK8HIrbnLGYm_IWxBToHZQ"))
@@ -85,7 +85,17 @@ async def create(request: CompletionRequest, token: Optional[str] = None, cookie
85
85
  )
86
86
  # qwen结构
87
87
  model = request.model.lower()
88
- if any(i in model for i in ("search",)):
88
+ if any(i in model for i in ("research",)): # 遇到错误 任意切换
89
+ request.model = np.random.choice({""})
90
+ request.messages[-1]['chat_type'] = "deep_research"
91
+
92
+ # request.messages["extra"] = {
93
+ # "meta": {
94
+ # "subChatType": "deep_thinking"
95
+ # }
96
+ # }
97
+
98
+ elif any(i in model for i in ("search",)):
89
99
  request.model = "qwen-max-latest"
90
100
  request.messages[-1]['chat_type'] = "search"
91
101
 
@@ -263,7 +273,7 @@ if __name__ == '__main__':
263
273
  # 'content': "9.8 9.11哪个大",
264
274
  'content': 'https://oss.ffire.cc/files/AIGC.pdf 总结下',
265
275
 
266
- # "chat_type": "search",
276
+ # "chat_type": "search", deep_research
267
277
 
268
278
  # 'content': user_content,
269
279
 
@@ -15,15 +15,20 @@ FEISHU_URL = "https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?shee
15
15
 
16
16
 
17
17
  class MetasoRequest(BaseModel):
18
- model: Optional[Literal["ds-r1",]] = None
18
+ model: Optional[Literal["ds-r1", "fast_thinking"]] = None
19
19
 
20
- """search-mini search search-pro"""
20
+ """search-mini search search-pro
21
+
22
+ model-mode
23
+
24
+ """
21
25
  mode: Union[str, Literal["concise", "detail", "research", "strong-research"]] = "detail" # concise detail research
22
26
 
23
27
  question: str = "Chatfire"
24
28
 
25
- """全网 文库 学术 图片 播客"""
29
+ """全网 文库 学术 图片 播客 视频"""
26
30
  scholarSearchDomain: str = "all"
31
+ engineType: Optional[Literal["pdf", "scholar", "image", "podcast", "video"]] = None
27
32
 
28
33
  url: str = "https://metaso.cn/"
29
34
  lang: str = "zh"
@@ -35,6 +40,7 @@ class MetasoRequest(BaseModel):
35
40
  newEngine: str = 'true'
36
41
  enableImage: str = 'true'
37
42
 
43
+ #
38
44
 
39
45
  # question: hi
40
46
  # mode: detail
@@ -49,8 +49,8 @@ MODEL_PRICE = {
49
49
  "kling_effects": 1,
50
50
 
51
51
  "kling_video": 1.2,
52
- "kling_extend": 1.2,
53
- "kling_lip_sync": 1.2,
52
+ "kling_extend": 1,
53
+ "kling_lip_sync": 1,
54
54
 
55
55
  "minimax_files_retrieve": 0.01,
56
56
 
@@ -66,6 +66,16 @@ MODEL_PRICE = {
66
66
  "minimax_video-01": MINIMAX_VIDEO,
67
67
  "minimax_video-01-live2d": MINIMAX_VIDEO,
68
68
 
69
+ # 火山
70
+ "api-volcengine-high_aes_general_v30l_zt2i": 0.05,
71
+ "api-volcengine-byteedit_v2.0": 0.05,
72
+
73
+ # videos
74
+ "api-videos-wan-ai/wan2.1-t2v-14b": 1.2,
75
+ "api-videos-wan-ai/wan2.1-t2v-14b-turbo": 1.2,
76
+ "api-videos-wan-ai/wan2.1-i2v-14b-720p": 1.2,
77
+ "api-videos-wan-ai/wan2.1-i2v-14b-720p-turbo": 1.2,
78
+
69
79
  # chatfire
70
80
  "ppu-0001": 0.0001,
71
81
  "ppu-001": 0.001,
@@ -297,7 +307,15 @@ MODEL_PRICE = {
297
307
  "net-gpt-4": 0.1,
298
308
  "perplexity": 0.01,
299
309
  "net-claude": 0.015,
310
+
311
+ # 秘塔
300
312
  "meta-search": 0.02,
313
+ "meta-deepsearch": 0.05,
314
+ "meta-deepresearch": 0.1,
315
+
316
+ "meta-search:scholar": 0.02,
317
+ "meta-deepsearch:scholar": 0.05,
318
+ "meta-deepresearch:scholar": 0.1,
301
319
 
302
320
  # 逆向
303
321
  "cogview-3": 0.01,
@@ -580,6 +598,7 @@ MODEL_RATIO = {
580
598
  "deepseek-v3": 1,
581
599
  "deepseek-v3-0324": 1,
582
600
  "deepseek-v3-250324": 1,
601
+ "deepseek-v3-fast": 1,
583
602
 
584
603
  "deepseek-v3-8k": 0.5,
585
604
  "deepseek-v3-128k": 5,
@@ -597,6 +616,7 @@ MODEL_RATIO = {
597
616
  'deepseek-reasoner': 2,
598
617
  'deepseek-reasoner-8k': 1,
599
618
  "deepseek-r1-250120": 2,
619
+ "deepseek-r1-0528": 2,
600
620
 
601
621
  "deepseek-search": 1,
602
622
  'deepseek-r1-search': 2,
@@ -1205,11 +1225,13 @@ COMPLETION_RATIO = {
1205
1225
  "deepseek-v3-0324": 4,
1206
1226
  "deepseek-v3-250324": 4,
1207
1227
  "deepseek-chat": 4,
1228
+ "deepseek-v3-fast": 4,
1208
1229
 
1209
1230
  'deepseek-r1': 4,
1210
1231
  'deepseek-reasoner': 4,
1211
1232
  "deepseek-reasoner-164k": 8,
1212
1233
  "deepseek-r1-250120": 4,
1234
+ "deepseek-r1-0528": 4,
1213
1235
 
1214
1236
  "deepseek-chat:function": 4,
1215
1237
 
@@ -427,11 +427,14 @@ class ImagesResponse(_ImagesResponse):
427
427
 
428
428
  class TTSRequest(BaseModel):
429
429
  model: Optional[Union[str, Literal["tts-1", "tts-1-hd"]]] = 'tts'
430
- voice: Optional[Union[str, Literal["alloy", "echo", "fable", "onyx", "nova", "shimmer"]]] = ""
430
+ voice: Optional[Union[str, Literal["alloy", "echo", "fable", "onyx", "nova", "shimmer",
431
+ "male", "femal",
432
+ ]]] = ""
431
433
 
432
434
  input: str
433
435
  instructions: Optional[str] = None
434
- # emotion: Optional[Literal["happy", "angry", "surprise", "coldness", "disgust", "fear", "excited", "hate"]] = None
436
+ emotion: Optional[Literal[
437
+ "happy", "angry", "surprise", "coldness", "disgust", "fear", "excited", "hate", "sad", "fearful", "disgusted", "surprised", "neutral"]] = None
435
438
 
436
439
  speed: Optional[float] = None
437
440