MeUtils 2025.6.18.16.41.23__py3-none-any.whl → 2025.6.20.18.7.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.20.18.7.13.dist-info}/METADATA +261 -261
  2. {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.20.18.7.13.dist-info}/RECORD +29 -27
  3. examples/_openaisdk/openai_doubao.py +7 -7
  4. examples/_openaisdk/openai_modelscope.py +1 -0
  5. examples/_openaisdk/openai_ppinfra.py +7 -2
  6. meutils/apis/hailuoai/videos.py +9 -1
  7. meutils/apis/oneapi/channel.py +57 -6
  8. meutils/apis/{veo → ppio}/__init__.py +1 -1
  9. meutils/apis/ppio/videos.py +142 -0
  10. meutils/apis/proxy/kdlapi.py +18 -14
  11. meutils/apis/transparent_transmission/__init__.py +10 -0
  12. meutils/apis/transparent_transmission/tasks.py +28 -0
  13. meutils/apis/utils.py +4 -4
  14. meutils/apis/videos/veo.py +68 -0
  15. meutils/apis/volcengine_apis/videos.py +5 -2
  16. meutils/config_utils/lark_utils/common.py +19 -3
  17. meutils/data/VERSION +1 -1
  18. meutils/llm/check_utils.py +2 -1
  19. meutils/llm/models_mapping.py +76 -0
  20. meutils/schemas/hailuo_types.py +13 -4
  21. meutils/schemas/oneapi/_types.py +19 -7
  22. meutils/schemas/oneapi/common.py +13 -6
  23. meutils/serving/fastapi/dependencies/auth.py +2 -1
  24. meutils/str_utils/__init__.py +21 -0
  25. meutils/str_utils/regular_expression.py +8 -1
  26. examples/pyinstxtractor.py +0 -340
  27. meutils/apis/kuaidi.py +0 -32
  28. meutils/apis/veo/videos.py +0 -48
  29. {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.20.18.7.13.dist-info}/LICENSE +0 -0
  30. {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.20.18.7.13.dist-info}/WHEEL +0 -0
  31. {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.20.18.7.13.dist-info}/entry_points.txt +0 -0
  32. {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.20.18.7.13.dist-info}/top_level.txt +0 -0
meutils/apis/utils.py CHANGED
@@ -8,8 +8,8 @@
8
8
  # @Software : PyCharm
9
9
  # @Description : textinparsex
10
10
 
11
- from openai import AsyncClient
12
11
  from meutils.pipe import *
12
+ from openai import AsyncClient
13
13
 
14
14
 
15
15
  # upstream_base_url: Optional[str] = Header(None),
@@ -17,16 +17,16 @@ from meutils.pipe import *
17
17
 
18
18
  async def make_request(
19
19
  base_url: str,
20
- path: str,
21
20
  api_key: Optional[str] = None,
22
-
23
- method: str = "GET",
24
21
  headers: Optional[dict] = None,
25
22
 
23
+ path: str = "/",
24
+
26
25
  params: Optional[dict] = None,
27
26
  payload: Optional[dict] = None,
28
27
  files: Optional[dict] = None,
29
28
 
29
+ method: str = "POST",
30
30
  ):
31
31
  client = AsyncClient(base_url=base_url, api_key=api_key, default_headers=headers)
32
32
 
@@ -0,0 +1,68 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # @Project : AI. @by PyCharm
4
+ # @File : videos
5
+ # @Time : 2025/6/18 16:34
6
+ # @Author : betterme
7
+ # @WeChat : meutils
8
+ # @Software : PyCharm
9
+ # @Description :
10
+
11
+ from meutils.pipe import *
12
+ from meutils.db.redis_db import redis_aclient
13
+ from meutils.apis.utils import make_request
14
+ from fastapi import APIRouter, File, UploadFile, Query, Form, Depends, Request, HTTPException, status, BackgroundTasks
15
+
16
+ base_url = "https://api.gptgod.online"
17
+
18
+
19
+ async def create_task(
20
+ request: dict,
21
+ api_key: Optional[str] = None
22
+ ):
23
+ response = await make_request(
24
+ base_url=base_url,
25
+ api_key=api_key,
26
+ path="/v1/video/create",
27
+ payload=request,
28
+ )
29
+
30
+ if task_id := response.get('id'):
31
+ await redis_aclient.set(task_id, api_key, ex=7 * 24 * 3600)
32
+
33
+ return response
34
+
35
+
36
+ async def get_task(task_id: str):
37
+ token = await redis_aclient.get(task_id) # 绑定对应的 token
38
+ token = token and token.decode()
39
+ if not token:
40
+ raise HTTPException(status_code=404, detail="TaskID not found")
41
+
42
+ response = await make_request(
43
+ base_url=base_url,
44
+ api_key=token,
45
+ path=f"/v1/video/query?id={task_id}",
46
+ method="GET",
47
+ )
48
+ return response
49
+
50
+
51
+ if __name__ == '__main__':
52
+ api_key = "sk-h0Cgw9qeyotIUC9WnWSBEB0aO4RbjgEbhZmtg2Ja0kL5npDZ1"
53
+ payload = {
54
+ "prompt": "牛飞上天了",
55
+ "model": "veo3",
56
+ "images": [
57
+ "https://filesystem.site/cdn/20250612/VfgB5ubjInVt8sG6rzMppxnu7gEfde.png",
58
+ "https://filesystem.site/cdn/20250612/998IGmUiM2koBGZM3UnZeImbPBNIUL.png"
59
+ ],
60
+ "enhance_prompt": True
61
+ }
62
+
63
+ # arun(create_task(payload, api_key))
64
+
65
+ arun(get_task("veo3:2ba161ec-747f-4d5b-b58b-2a610bfc2c31"))
66
+
67
+
68
+
@@ -6,7 +6,8 @@
6
6
  # @Author : betterme
7
7
  # @WeChat : meutils
8
8
  # @Software : PyCharm
9
- # @Description :
9
+ # @Description :
10
+ import time
10
11
 
11
12
  from meutils.pipe import *
12
13
  from meutils.io.files_utils import to_url
@@ -27,14 +28,16 @@ FEISHU_URL = "https://xchatllm.feishu.cn/sheets/GYCHsvI4qhnDPNtI4VPcdw2knEd?shee
27
28
 
28
29
  async def get_valid_token(tokens: Optional[list] = None):
29
30
  tokens = tokens or await get_series(FEISHU_URL)
31
+
30
32
  for token in tokens:
31
33
  if await check(token):
34
+
32
35
  logger.debug(f"有效 {token}")
33
36
 
34
37
  return token
35
38
  else:
36
39
  logger.debug(f"无效 {token}")
37
- _ = f"{FEISHU_URL}\n\n所有token无效"
40
+ _ = f"{time.ctime()}\n\n{FEISHU_URL}\n\n所有token无效"
38
41
  logger.error(_)
39
42
  send_message(_, n=3)
40
43
 
@@ -10,14 +10,14 @@
10
10
 
11
11
  from meutils.pipe import *
12
12
  from meutils.caches import rcache
13
-
13
+ from meutils.str_utils import parse_slice
14
14
  from meutils.decorators.retry import retrying
15
15
  from meutils.decorators.contextmanagers import try_catcher
16
16
  from meutils.notice.feishu import send_message
17
17
  from meutils.db.redis_db import redis_client, redis_aclient
18
18
  from typing import Optional, Union
19
19
 
20
- from urllib.parse import urlparse, parse_qs
20
+ from urllib.parse import urlparse, parse_qs, unquote
21
21
 
22
22
  FEISHU_BASE_URL = "https://open.feishu.cn/open-apis/"
23
23
 
@@ -307,12 +307,22 @@ async def get_dataframe(iloc_tuple: Optional[tuple] = None, feishu_url: Optional
307
307
 
308
308
 
309
309
  async def get_series(feishu_url: str, index: int = 0, duplicated: bool = False): # 系统配置
310
+ # 前置处理
311
+ # https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=Y7HVfo[:2]
312
+ slice_obj = None
313
+ if feishu_url.endswith(']') and '[' in feishu_url: # 是否解码
314
+ logger.debug(feishu_url)
315
+ feishu_url, slice_string = feishu_url.rsplit('[', maxsplit=1)
316
+ slice_obj = parse_slice(f"[{slice_string}")
317
+
310
318
  df = await aget_spreadsheet_values(feishu_url=feishu_url, to_dataframe=True)
311
319
  series = df[index]
312
320
  values = [i for i in series if i and isinstance(i, str)] # todo: 非标准字符串处理
313
321
  if duplicated: # 有序去重
314
322
  values = values | xUnique
315
323
 
324
+ if slice_obj:
325
+ values = values[slice_obj]
316
326
  return values
317
327
 
318
328
 
@@ -395,4 +405,10 @@ if __name__ == '__main__':
395
405
  # token = arun(get_next_token(feishu_url))
396
406
 
397
407
  # FEISHU_URL = "https://xchatllm.feishu.cn/sheets/XfjqszII3hZAEvtTOgecOgv2nye?sheet=c14b34"
398
- # print(arun(get_series(FEISHU_URL)))
408
+ FEISHU_URL="https://xchatllm.feishu.cn/sheets/RIv6sAUtFhlZYItyYa6ctdv1nvb?sheet=0bcf4a[:2]"
409
+ FEISHU_URL="https://xchatllm.feishu.cn/sheets/RIv6sAUtFhlZYItyYa6ctdv1nvb?sheet=0bcf4a"
410
+
411
+ # FEISHU_URL="https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=Y7HVfo%5B%3A2%5D"
412
+
413
+ r = arun(get_series(FEISHU_URL))
414
+
meutils/data/VERSION CHANGED
@@ -1 +1 @@
1
- 2025.06.18.16.41.23
1
+ 2025.06.20.18.07.13
@@ -258,4 +258,5 @@ if __name__ == '__main__':
258
258
 
259
259
  # arun(check_token_for_ppinfra("sk_F0kgPyCMTzmOH_-VCEJucOK8HIrbnLGYm_IWxBToHZQ"))
260
260
 
261
- arun(check_token_for_volc("07139a08-e360-44e2-ba31-07f379bf99ed"))
261
+ # arun(check_token_for_volc("07139a08-e360-44e2-ba31-07f379bf99ed"))
262
+ arun(check_token_for_ppinfra("sk_mCb5sRGTi6GXkSRp5F679Rbs0V_Hfee3p85lccGXCOo"))
@@ -0,0 +1,76 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # @Project : AI. @by PyCharm
4
+ # @File : models
5
+ # @Time : 2025/4/14 11:09
6
+ # @Author : betterme
7
+ # @WeChat : meutils
8
+ # @Software : PyCharm
9
+ # @Description :
10
+
11
+ """
12
+
13
+ [Model(id='LLM-Research/c4ai-command-r-plus-08-2024', created=1725120000, object='model', owned_by='system'),
14
+ Model(id='mistralai/Mistral-Small-Instruct-2409', created=1725120000, object='model', owned_by='system'),
15
+ Model(id='mistralai/Ministral-8B-Instruct-2410', created=1727712000, object='model', owned_by='system'),
16
+ Model(id='mistralai/Mistral-Large-Instruct-2407', created=1719763200, object='model', owned_by='system'),
17
+ Model(id='Qwen/Qwen2.5-Coder-32B-Instruct', created=1731340800, object='model', owned_by='system'),
18
+ Model(id='Qwen/Qwen2.5-Coder-14B-Instruct', created=1731340800, object='model', owned_by='system'),
19
+ Model(id='Qwen/Qwen2.5-Coder-7B-Instruct', created=1731340800, object='model', owned_by='system'),
20
+ Model(id='Qwen/Qwen2.5-72B-Instruct', created=1737907200, object='model', owned_by='system'),
21
+ Model(id='Qwen/Qwen2.5-32B-Instruct', created=1737907200, object='model', owned_by='system'),
22
+ Model(id='Qwen/Qwen2.5-14B-Instruct', created=1737907200, object='model', owned_by='system'),
23
+ Model(id='Qwen/Qwen2.5-7B-Instruct', created=1737907200, object='model', owned_by='system'),
24
+ Model(id='Qwen/QwQ-32B-Preview', created=1737907200, object='model', owned_by='system'),
25
+ Model(id='LLM-Research/Llama-3.3-70B-Instruct', created=1733414400, object='model', owned_by='system'),
26
+ Model(id='opencompass/CompassJudger-1-32B-Instruct', created=1733414400, object='model', owned_by='system'),
27
+ Model(id='Qwen/QVQ-72B-Preview', created=1735056000, object='model', owned_by='system'),
28
+ Model(id='LLM-Research/Meta-Llama-3.1-405B-Instruct', created=1721664000, object='model', owned_by='system'),
29
+ Model(id='LLM-Research/Meta-Llama-3.1-8B-Instruct', created=1721664000, object='model', owned_by='system'),
30
+ Model(id='Qwen/Qwen2-VL-7B-Instruct', created=1726675200, object='model', owned_by='system'),
31
+ Model(id='LLM-Research/Meta-Llama-3.1-70B-Instruct', created=1721664000, object='model', owned_by='system'),
32
+ Model(id='Qwen/Qwen2.5-14B-Instruct-1M', created=1737907200, object='model', owned_by='system'),
33
+ Model(id='Qwen/Qwen2.5-7B-Instruct-1M', created=1737907200, object='model', owned_by='system'),
34
+ Model(id='Qwen/Qwen2.5-VL-3B-Instruct', created=1737907200, object='model', owned_by='system'),
35
+ Model(id='Qwen/Qwen2.5-VL-7B-Instruct', created=1737907200, object='model', owned_by='system'),
36
+ Model(id='Qwen/Qwen2.5-VL-72B-Instruct', created=1737907200, object='model', owned_by='system'),
37
+ Model(id='deepseek-ai/DeepSeek-R1-Distill-Llama-70B', created=1737302400, object='model', owned_by='system'),
38
+ Model(id='deepseek-ai/DeepSeek-R1-Distill-Llama-8B', created=1737302400, object='model', owned_by='system'),
39
+ Model(id='deepseek-ai/DeepSeek-R1-Distill-Qwen-32B', created=1737302400, object='model', owned_by='system'),
40
+ Model(id='deepseek-ai/DeepSeek-R1-Distill-Qwen-14B', created=1737302400, object='model', owned_by='system'),
41
+ Model(id='deepseek-ai/DeepSeek-R1-Distill-Qwen-7B', created=1737302400, object='model', owned_by='system'),
42
+ Model(id='deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B', created=1737302400, object='model', owned_by='system'),
43
+ Model(id='deepseek-ai/DeepSeek-V3', created=1737302400, object='model', owned_by='system'),
44
+ Model(id='Qwen/QwQ-32B', created=1732517497, object='model', owned_by='system'),
45
+ Model(id='XGenerationLab/XiYanSQL-QwenCoder-32B-2412', created=1732517497, object='model', owned_by='system'),
46
+ Model(id='Qwen/Qwen2.5-VL-32B-Instruct', created=1732517497, object='model', owned_by='system'),
47
+ Model(id='deepseek-ai/DeepSeek-V3-0324', created=1732517497, object='model', owned_by='system'),
48
+ Model(id='Wan-AI/Wan2.1-T2V-1.3B', created=0, object='model', owned_by='system'),
49
+ Model(id='LLM-Research/Llama-4-Scout-17B-16E-Instruct', created=1732517497, object='model', owned_by='system'),
50
+ Model(id='LLM-Research/Llama-4-Maverick-17B-128E-Instruct', created=1732517497, object='model', owned_by='system'),
51
+ Model(id='Qwen/Qwen3-0.6B', created=1745856000, object='model', owned_by='system'),
52
+ Model(id='Qwen/Qwen3-1.7B', created=1745856000, object='model', owned_by='system'),
53
+ Model(id='Qwen/Qwen3-4B', created=1745856000, object='model', owned_by='system'),
54
+ Model(id='Qwen/Qwen3-8B', created=1745856000, object='model', owned_by='system'),
55
+ Model(id='Qwen/Qwen3-14B', created=1745856000, object='model', owned_by='system'),
56
+ Model(id='Qwen/Qwen3-30B-A3B', created=1745856000, object='model', owned_by='system'),
57
+ Model(id='Qwen/Qwen3-32B', created=1745856000, object='model', owned_by='system'),
58
+ Model(id='Qwen/Qwen3-235B-A22B', created=1745856000, object='model', owned_by='system'),
59
+ Model(id='XGenerationLab/XiYanSQL-QwenCoder-32B-2504', created=1732517497, object='model', owned_by='system'),
60
+ Model(id='deepseek-ai/DeepSeek-R1-0528', created=1748361600, object='model', owned_by='system')]
61
+ """
62
+
63
+ modelscope_model_mapping = {
64
+ "deepseek-reasoner": "deepseek-ai/DeepSeek-R1-0528",
65
+ "deepseek-r1": "deepseek-ai/DeepSeek-R1-0528",
66
+ "deepseek-r1-0528": "deepseek-ai/DeepSeek-R1-0528",
67
+ "deepseek-r1-250528": "deepseek-ai/DeepSeek-R1-0528",
68
+
69
+ "deepseek-chat": "deepseek-ai/DeepSeek-V3",
70
+ "deepseek-v3": "deepseek-ai/DeepSeek-V3",
71
+ "deepseek-v3-0324": "deepseek-ai/DeepSeek-V3-0324",
72
+ "deepseek-v3-250324": "deepseek-ai/DeepSeek-V3-0324",
73
+
74
+ "majicflus_v1": "MAILAND/majicflus_v1",
75
+
76
+ }
@@ -18,8 +18,6 @@ FEISHU_URL_ABROAD = "https://xchatllm.feishu.cn/sheets/GYCHsvI4qhnDPNtI4VPcdw2kn
18
18
 
19
19
  FEISHU_URL_OSS = "https://xchatllm.feishu.cn/sheets/MekfsfVuohfUf1tsWV0cCvTmn3c?sheet=Kcg6QC"
20
20
 
21
-
22
-
23
21
  # """生成中是1 成功是2 失败是5 内容审核7 排队中11"""
24
22
  # "status": 12,
25
23
  # "message": "Optimizing prompt in progress...",
@@ -51,7 +49,8 @@ class VideoRequest(BaseModel):
51
49
 
52
50
  {"desc":"跳动","useOriginPrompt":true,"fileList":[{"id":"304987062153912323","name":"3a71b0bb-3cab-4e69-b1f0-592976d0897b_00001_.png","type":"png"}]}
53
51
  """
54
- model: Union[str, Literal["video-01", "T2V-01", "I2V-01", "I2V-01-live", "S2V-01"]] = "video-01"
52
+ model: Union[
53
+ str, Literal["MiniMax-Hailuo-02", "video-01", "T2V-01", "I2V-01", "I2V-01-live", "S2V-01"]] = "video-01"
55
54
 
56
55
  """生成视频的描述。(注:需少于2000字)"""
57
56
  prompt: Optional[str] = None
@@ -66,6 +65,9 @@ class VideoRequest(BaseModel):
66
65
  """本参数仅当model选择为S2V-01时可用。模型将依据此参数中上传的主体来生成视频。目前仅支持单主体参考(数组长度为 1)。"""
67
66
  # subject_reference: list = [{"type": "character", "image": ""}]
68
67
 
68
+ duration: Literal[6, 10] = 6
69
+ resolution: Union[str, Literal["768P", "1080P"]] = "768P"
70
+
69
71
  callback_url: Optional[str] = None
70
72
 
71
73
  "n"
@@ -161,7 +163,7 @@ class VideoResponse(BaseModel):
161
163
  self.file_id = self.videos[0].downloadURL
162
164
 
163
165
  if self.videos:
164
- self.status = status_mapper.get(self.videos[0].status, "Failed")
166
+ self.status = status_mapper.get(self.videos[0].status or 2, "Failed")
165
167
 
166
168
 
167
169
  #
@@ -240,3 +242,10 @@ class MusicRequet(BaseModel):
240
242
  class MusicResponse(BaseModel):
241
243
  trace_id: str
242
244
  base_resp: BaseResponse
245
+
246
+
247
+ if __name__ == '__main__':
248
+
249
+ print(VideoResponse(task_id='1', base_resp={"status_code": 0, "status_msg": "success"}).model_dump())
250
+
251
+ print(Video(x='xxx'))
@@ -38,7 +38,7 @@ class ModelInfo(BaseModel):
38
38
 
39
39
 
40
40
  class ChannelInfo(BaseModel):
41
- id: Optional[int] = None # 不存在就新建
41
+ id: Optional[Union[int, str]] = None # 不存在就新建
42
42
  type: int = 1 # 枚举值 openai
43
43
 
44
44
  name: str = ''
@@ -46,7 +46,7 @@ class ChannelInfo(BaseModel):
46
46
  group: str = 'default'
47
47
 
48
48
  base_url: str = ''
49
- key: str
49
+ key: str # 与id相对应
50
50
  models: str = 'MODEL'
51
51
 
52
52
  access_token: str = ''
@@ -87,11 +87,14 @@ class ChannelInfo(BaseModel):
87
87
 
88
88
  def __init__(self, /, **data: Any):
89
89
  super().__init__(**data)
90
- self.name = self.name or self.base_url or "NAME"
91
- self.tag = self.tag or self.base_url or "TAG"
90
+
91
+ self.id = self.id or None
92
+
93
+ self.name = f"""{str(datetime.datetime.now())[:10]} {self.name or self.base_url or "NAME"}"""
94
+ self.tag = f"""{str(datetime.datetime.now())[:10]} {self.tag or self.base_url or "TAG"}"""
92
95
  self.group = self.group or self.base_url or "GROUP"
93
96
 
94
- self.setting= self.setting or ""
97
+ self.setting = self.setting or ""
95
98
  self.param_override = self.param_override or ""
96
99
  if isinstance(self.model_mapping, dict):
97
100
  self.model_mapping = json.dumps(self.model_mapping)
@@ -103,6 +106,15 @@ if __name__ == '__main__':
103
106
 
104
107
  from meutils.apis.oneapi import option, channel
105
108
 
106
- option()
109
+ # option()
107
110
  #
108
- arun(channel.edit_channel(MODEL_PRICE))
111
+ # arun(channel.edit_channel(MODEL_PRICE))
112
+
113
+ c = ChannelInfo(key='key')
114
+
115
+ c_ = c.copy()
116
+ l = []
117
+ for i in range(3):
118
+ c_.key = f"######{i}"
119
+ l.append(c_)
120
+ print(l)
@@ -44,8 +44,6 @@ MODEL_PRICE = {
44
44
  "gemini-2.5-flash-video": 0.05,
45
45
  "gemini-2.5-pro-video": 0.1,
46
46
 
47
- "veo3": 4,
48
-
49
47
  # rix
50
48
  "kling_image": 0.05,
51
49
  "kling_virtual_try_on": 1,
@@ -95,6 +93,18 @@ MODEL_PRICE = {
95
93
  "api-videos-wan-ai/wan2.1-i2v-14b-720p": 1.2,
96
94
  "api-videos-wan-ai/wan2.1-i2v-14b-720p-turbo": 1.2,
97
95
 
96
+ # veo
97
+ "veo3": 2,
98
+ "api-veo3": 2,
99
+ "api-veo3-pro": 8,
100
+ "api-veo3-pro-frames": 8,
101
+
102
+ # hailuo
103
+ "api-minimax-hailuo-01-6s": 0.5,
104
+ "api-minimax-hailuo-02-6s-768p": 1,
105
+ "api-minimax-hailuo-02-10s-768p": 1,
106
+ "api-minimax-hailuo-02-6s-1080p": 2,
107
+
98
108
  # chatfire
99
109
  "ppu-0001": 0.0001,
100
110
  "ppu-001": 0.001,
@@ -146,9 +156,6 @@ MODEL_PRICE = {
146
156
 
147
157
  "official-api-vidu": 0.5,
148
158
 
149
- "official-api-hailuo-video": 0.5,
150
- "api-hailuo-video": 0.5,
151
-
152
159
  "api-videos-seedream-3.0": 0.5,
153
160
 
154
161
  "api-kling-video/v2/master/text-to-video": 5,
@@ -1018,7 +1025,7 @@ MODEL_RATIO = {
1018
1025
 
1019
1026
  COMPLETION_RATIO = {
1020
1027
  "minimax-text-01": 8,
1021
- "minimax-m1-80k":4,
1028
+ "minimax-m1-80k": 4,
1022
1029
 
1023
1030
  # 智能体
1024
1031
  "gpt-4-plus": 5,
@@ -46,7 +46,8 @@ async def get_bearer_token(
46
46
 
47
47
  elif token.startswith("http") and "feishu.cn" in token: # feishu 取所有 keys 主要针对 channel
48
48
  feishu_url = token
49
- tokens = await get_series(feishu_url)
49
+
50
+ tokens = await get_series(feishu_url, duplicated=True)
50
51
  token = '\n'.join(tokens) # 多渠道
51
52
 
52
53
  elif ',' in token: # 内存里随机轮询
@@ -246,6 +246,16 @@ def validate_url(url):
246
246
 
247
247
  return False
248
248
 
249
+ def parse_slice(slice_str):
250
+ content = slice_str.strip()[1:-1]
251
+ parts = content.split(':', 2)
252
+ parts += [''] * (3 - len(parts)) # 确保有3部分
253
+ return slice(
254
+ int(parts[0]) if parts[0] else None,
255
+ int(parts[1]) if parts[1] else None,
256
+ int(parts[2]) if parts[2] else None
257
+ )
258
+
249
259
 
250
260
  if __name__ == '__main__':
251
261
  # print(str_replace('abcd', {'a': '8', 'd': '88'}))
@@ -291,3 +301,14 @@ if __name__ == '__main__':
291
301
  print(validate_url(url))
292
302
 
293
303
  print(re.findall("http", url*2))
304
+
305
+ # 示例用法
306
+ slice_str = "[:2]"
307
+ slice_str = ":2]"
308
+
309
+ # slice_str="https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=Y7HVfo[:2]"
310
+ # https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=Y7HVfo%5B%3A2%5D
311
+ slice_obj = parse_slice(slice_str)
312
+ l = list(range(10))
313
+ print(l[slice_obj]) # [0, 1]
314
+
@@ -8,6 +8,7 @@
8
8
  # @Software : PyCharm
9
9
  # @Description :
10
10
  import json
11
+ import mimetypes
11
12
  import re
12
13
 
13
14
  from meutils.pipe import *
@@ -97,6 +98,8 @@ def parse_url(text: str, for_image=False, fn: Optional[Callable] = None):
97
98
  # url_pattern = r'https?://[\w\-\.]+/\S+\.(?:png|jpg|jpeg|gif)'
98
99
  url_pattern = r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[#]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\.(?:jpg|jpeg|png|gif|svg|webp)'
99
100
 
101
+ # "https://i.miji.bid/2025/06/10/d018000aed9b872c7b248dccf14c4450.pngA" 纠正
102
+
100
103
  urls = re.findall(url_pattern, text)
101
104
 
102
105
  valid_urls = []
@@ -176,8 +179,10 @@ if __name__ == '__main__':
176
179
  https://p26-bot-workflow-sign.byteimg.com/tos-cn-i-mdko3gqilj/f13171faeed2447b8b9c301ba912f25c.jpg~tplv-mdko3gqilj-image.image?rk3s=81d4c505&x-expires=1779880356&x-signature=AJop4%2FM8VjCUfjqiEzUugprc0CI%3D&x-wf-file_name=B0DCGKG71N.MAIN.jpg
177
180
 
178
181
  还有这种url,两个.jpg的也能兼容么
182
+
183
+ https://i.miji.bid/2025/06/10/d018000aed9b872c7b248dccf14c4450.pngA
179
184
  """
180
- print(parse_url(text))
185
+ print(parse_url(text, for_image=True))
181
186
 
182
187
 
183
188
  # print(parse_url(text, for_image=False))
@@ -186,3 +191,5 @@ https://p26-bot-workflow-sign.byteimg.com/tos-cn-i-mdko3gqilj/f13171faeed2447b8b
186
191
  # print(parse_url(text))
187
192
  #
188
193
  # valid_urls = parse_url(text, for_image=True)
194
+
195
+ print(mimetypes.guess_type("xx.ico"))