MeUtils 2025.6.18.16.41.23__py3-none-any.whl → 2025.6.24.11.13.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.24.11.13.32.dist-info}/METADATA +260 -260
- {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.24.11.13.32.dist-info}/RECORD +40 -36
- examples/_openaisdk/openai_doubao.py +7 -7
- examples/_openaisdk/openai_edits.py +48 -0
- examples/_openaisdk/openai_images.py +10 -3
- examples/_openaisdk/openai_lingyi.py +44 -10
- examples/_openaisdk/openai_modelscope.py +1 -0
- examples/_openaisdk/openai_ppinfra.py +7 -2
- examples/_openaisdk/openai_siliconflow.py +3 -3
- meutils/apis/hailuoai/images.py +2 -2
- meutils/apis/hailuoai/videos.py +10 -0
- meutils/apis/oneapi/channel.py +57 -6
- meutils/apis/oneapi/user.py +3 -3
- meutils/apis/{veo → ppio}/__init__.py +1 -1
- meutils/apis/ppio/videos.py +142 -0
- meutils/apis/proxy/kdlapi.py +18 -14
- meutils/apis/transparent_transmission/__init__.py +10 -0
- meutils/apis/transparent_transmission/tasks.py +28 -0
- meutils/apis/utils.py +10 -9
- meutils/apis/videos/veo.py +68 -0
- meutils/apis/volcengine_apis/videos.py +5 -2
- meutils/config_utils/lark_utils/common.py +19 -3
- meutils/data/VERSION +1 -1
- meutils/llm/check_utils.py +26 -13
- meutils/llm/completions/qwenllm.py +5 -4
- meutils/llm/models_mapping.py +76 -0
- meutils/llm/openai_utils/usage_utils.py +19 -0
- meutils/llm/utils.py +1 -54
- meutils/schemas/hailuo_types.py +13 -4
- meutils/schemas/image_types.py +1 -1
- meutils/schemas/oneapi/_types.py +19 -7
- meutils/schemas/oneapi/common.py +20 -12
- meutils/serving/fastapi/dependencies/auth.py +2 -1
- meutils/serving/fastapi/dependencies/headers.py +19 -15
- meutils/str_utils/__init__.py +21 -0
- meutils/str_utils/regular_expression.py +8 -1
- examples/pyinstxtractor.py +0 -340
- meutils/apis/kuaidi.py +0 -32
- meutils/apis/veo/videos.py +0 -48
- {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.24.11.13.32.dist-info}/LICENSE +0 -0
- {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.24.11.13.32.dist-info}/WHEEL +0 -0
- {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.24.11.13.32.dist-info}/entry_points.txt +0 -0
- {MeUtils-2025.6.18.16.41.23.dist-info → MeUtils-2025.6.24.11.13.32.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,76 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : models
|
5
|
+
# @Time : 2025/4/14 11:09
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
|
11
|
+
"""
|
12
|
+
|
13
|
+
[Model(id='LLM-Research/c4ai-command-r-plus-08-2024', created=1725120000, object='model', owned_by='system'),
|
14
|
+
Model(id='mistralai/Mistral-Small-Instruct-2409', created=1725120000, object='model', owned_by='system'),
|
15
|
+
Model(id='mistralai/Ministral-8B-Instruct-2410', created=1727712000, object='model', owned_by='system'),
|
16
|
+
Model(id='mistralai/Mistral-Large-Instruct-2407', created=1719763200, object='model', owned_by='system'),
|
17
|
+
Model(id='Qwen/Qwen2.5-Coder-32B-Instruct', created=1731340800, object='model', owned_by='system'),
|
18
|
+
Model(id='Qwen/Qwen2.5-Coder-14B-Instruct', created=1731340800, object='model', owned_by='system'),
|
19
|
+
Model(id='Qwen/Qwen2.5-Coder-7B-Instruct', created=1731340800, object='model', owned_by='system'),
|
20
|
+
Model(id='Qwen/Qwen2.5-72B-Instruct', created=1737907200, object='model', owned_by='system'),
|
21
|
+
Model(id='Qwen/Qwen2.5-32B-Instruct', created=1737907200, object='model', owned_by='system'),
|
22
|
+
Model(id='Qwen/Qwen2.5-14B-Instruct', created=1737907200, object='model', owned_by='system'),
|
23
|
+
Model(id='Qwen/Qwen2.5-7B-Instruct', created=1737907200, object='model', owned_by='system'),
|
24
|
+
Model(id='Qwen/QwQ-32B-Preview', created=1737907200, object='model', owned_by='system'),
|
25
|
+
Model(id='LLM-Research/Llama-3.3-70B-Instruct', created=1733414400, object='model', owned_by='system'),
|
26
|
+
Model(id='opencompass/CompassJudger-1-32B-Instruct', created=1733414400, object='model', owned_by='system'),
|
27
|
+
Model(id='Qwen/QVQ-72B-Preview', created=1735056000, object='model', owned_by='system'),
|
28
|
+
Model(id='LLM-Research/Meta-Llama-3.1-405B-Instruct', created=1721664000, object='model', owned_by='system'),
|
29
|
+
Model(id='LLM-Research/Meta-Llama-3.1-8B-Instruct', created=1721664000, object='model', owned_by='system'),
|
30
|
+
Model(id='Qwen/Qwen2-VL-7B-Instruct', created=1726675200, object='model', owned_by='system'),
|
31
|
+
Model(id='LLM-Research/Meta-Llama-3.1-70B-Instruct', created=1721664000, object='model', owned_by='system'),
|
32
|
+
Model(id='Qwen/Qwen2.5-14B-Instruct-1M', created=1737907200, object='model', owned_by='system'),
|
33
|
+
Model(id='Qwen/Qwen2.5-7B-Instruct-1M', created=1737907200, object='model', owned_by='system'),
|
34
|
+
Model(id='Qwen/Qwen2.5-VL-3B-Instruct', created=1737907200, object='model', owned_by='system'),
|
35
|
+
Model(id='Qwen/Qwen2.5-VL-7B-Instruct', created=1737907200, object='model', owned_by='system'),
|
36
|
+
Model(id='Qwen/Qwen2.5-VL-72B-Instruct', created=1737907200, object='model', owned_by='system'),
|
37
|
+
Model(id='deepseek-ai/DeepSeek-R1-Distill-Llama-70B', created=1737302400, object='model', owned_by='system'),
|
38
|
+
Model(id='deepseek-ai/DeepSeek-R1-Distill-Llama-8B', created=1737302400, object='model', owned_by='system'),
|
39
|
+
Model(id='deepseek-ai/DeepSeek-R1-Distill-Qwen-32B', created=1737302400, object='model', owned_by='system'),
|
40
|
+
Model(id='deepseek-ai/DeepSeek-R1-Distill-Qwen-14B', created=1737302400, object='model', owned_by='system'),
|
41
|
+
Model(id='deepseek-ai/DeepSeek-R1-Distill-Qwen-7B', created=1737302400, object='model', owned_by='system'),
|
42
|
+
Model(id='deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B', created=1737302400, object='model', owned_by='system'),
|
43
|
+
Model(id='deepseek-ai/DeepSeek-V3', created=1737302400, object='model', owned_by='system'),
|
44
|
+
Model(id='Qwen/QwQ-32B', created=1732517497, object='model', owned_by='system'),
|
45
|
+
Model(id='XGenerationLab/XiYanSQL-QwenCoder-32B-2412', created=1732517497, object='model', owned_by='system'),
|
46
|
+
Model(id='Qwen/Qwen2.5-VL-32B-Instruct', created=1732517497, object='model', owned_by='system'),
|
47
|
+
Model(id='deepseek-ai/DeepSeek-V3-0324', created=1732517497, object='model', owned_by='system'),
|
48
|
+
Model(id='Wan-AI/Wan2.1-T2V-1.3B', created=0, object='model', owned_by='system'),
|
49
|
+
Model(id='LLM-Research/Llama-4-Scout-17B-16E-Instruct', created=1732517497, object='model', owned_by='system'),
|
50
|
+
Model(id='LLM-Research/Llama-4-Maverick-17B-128E-Instruct', created=1732517497, object='model', owned_by='system'),
|
51
|
+
Model(id='Qwen/Qwen3-0.6B', created=1745856000, object='model', owned_by='system'),
|
52
|
+
Model(id='Qwen/Qwen3-1.7B', created=1745856000, object='model', owned_by='system'),
|
53
|
+
Model(id='Qwen/Qwen3-4B', created=1745856000, object='model', owned_by='system'),
|
54
|
+
Model(id='Qwen/Qwen3-8B', created=1745856000, object='model', owned_by='system'),
|
55
|
+
Model(id='Qwen/Qwen3-14B', created=1745856000, object='model', owned_by='system'),
|
56
|
+
Model(id='Qwen/Qwen3-30B-A3B', created=1745856000, object='model', owned_by='system'),
|
57
|
+
Model(id='Qwen/Qwen3-32B', created=1745856000, object='model', owned_by='system'),
|
58
|
+
Model(id='Qwen/Qwen3-235B-A22B', created=1745856000, object='model', owned_by='system'),
|
59
|
+
Model(id='XGenerationLab/XiYanSQL-QwenCoder-32B-2504', created=1732517497, object='model', owned_by='system'),
|
60
|
+
Model(id='deepseek-ai/DeepSeek-R1-0528', created=1748361600, object='model', owned_by='system')]
|
61
|
+
"""
|
62
|
+
|
63
|
+
modelscope_model_mapping = {
|
64
|
+
"deepseek-reasoner": "deepseek-ai/DeepSeek-R1-0528",
|
65
|
+
"deepseek-r1": "deepseek-ai/DeepSeek-R1-0528",
|
66
|
+
"deepseek-r1-0528": "deepseek-ai/DeepSeek-R1-0528",
|
67
|
+
"deepseek-r1-250528": "deepseek-ai/DeepSeek-R1-0528",
|
68
|
+
|
69
|
+
"deepseek-chat": "deepseek-ai/DeepSeek-V3",
|
70
|
+
"deepseek-v3": "deepseek-ai/DeepSeek-V3",
|
71
|
+
"deepseek-v3-0324": "deepseek-ai/DeepSeek-V3-0324",
|
72
|
+
"deepseek-v3-250324": "deepseek-ai/DeepSeek-V3-0324",
|
73
|
+
|
74
|
+
"majicflus_v1": "MAILAND/majicflus_v1",
|
75
|
+
|
76
|
+
}
|
@@ -0,0 +1,19 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : usage_utils
|
5
|
+
# @Time : 2025/6/24 08:53
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
"""
|
11
|
+
1. 同步任务(流 非流)
|
12
|
+
- 按次
|
13
|
+
- 按量
|
14
|
+
2. 异步任务
|
15
|
+
- 按次
|
16
|
+
- 按量
|
17
|
+
"""
|
18
|
+
|
19
|
+
from meutils.pipe import *
|
meutils/llm/utils.py
CHANGED
@@ -17,59 +17,6 @@ from meutils.notice.feishu import send_message
|
|
17
17
|
from meutils.apis.oneapi.utils import get_user_quota
|
18
18
|
|
19
19
|
|
20
|
-
async def ppu(
|
21
|
-
model: str = 'ppu',
|
22
|
-
api_key: Optional[str] = None,
|
23
|
-
base_url: Optional[str] = None,
|
24
|
-
):
|
25
|
-
if model not in MODEL_PRICE:
|
26
|
-
send_message(f"模型未找到{model},ppu-1默认", title=__name__)
|
27
|
-
model = "ppu-1"
|
28
|
-
|
29
|
-
client = AsyncOpenAI(api_key=api_key, base_url=base_url)
|
30
|
-
r = await client.chat.completions.create(messages=[{'role': 'user', 'content': 'hi'}], model=model)
|
31
|
-
|
32
|
-
|
33
|
-
@asynccontextmanager
|
34
|
-
async def ppu_flow(
|
35
|
-
api_key: str,
|
36
|
-
base_url: Optional[str] = None,
|
37
|
-
|
38
|
-
model: str = "ppu-1", # 后计费
|
39
|
-
|
40
|
-
n: float = 1, # 计费次数
|
41
|
-
|
42
|
-
**kwargs
|
43
|
-
):
|
44
|
-
"""
|
45
|
-
查余额
|
46
|
-
失败,先扣费
|
47
|
-
成功,充足,后扣费
|
48
|
-
成功,不足,报错
|
49
|
-
"""
|
50
|
-
n = int(np.ceil(n)) # 0 不计费
|
51
|
-
|
52
|
-
if n: # 计费
|
53
|
-
try:
|
54
|
-
money = await get_user_quota(api_key)
|
55
|
-
logger.debug(f"PREPAY: USER 余额 {money}")
|
56
|
-
except Exception as e:
|
57
|
-
logger.error(e)
|
58
|
-
money = None
|
59
|
-
|
60
|
-
if money and money > MODEL_PRICE.get(model, 0.1):
|
61
|
-
yield # 先执行
|
62
|
-
|
63
|
-
# 执行计费逻辑
|
64
|
-
await asyncio.gather(*[ppu(model, api_key=api_key, base_url=base_url) for _ in range(n)])
|
65
|
-
|
66
|
-
if money is None:
|
67
|
-
yield # 后执行
|
68
|
-
|
69
|
-
else: # 不计费
|
70
|
-
yield
|
71
|
-
|
72
|
-
|
73
20
|
def oneturn2multiturn(messages, template: Optional[str] = None, ignore_system: bool = True):
|
74
21
|
"""todo: https://github.com/hiyouga/LLaMA-Factory/blob/e898fabbe3efcd8b44d0e119e7afaed4542a9f39/src/llmtuner/data/template.py#L423-L427
|
75
22
|
|
@@ -162,4 +109,4 @@ if __name__ == '__main__':
|
|
162
109
|
|
163
110
|
]
|
164
111
|
|
165
|
-
print(oneturn2multiturn(messages,ignore_system=False))
|
112
|
+
print(oneturn2multiturn(messages, ignore_system=False))
|
meutils/schemas/hailuo_types.py
CHANGED
@@ -18,8 +18,6 @@ FEISHU_URL_ABROAD = "https://xchatllm.feishu.cn/sheets/GYCHsvI4qhnDPNtI4VPcdw2kn
|
|
18
18
|
|
19
19
|
FEISHU_URL_OSS = "https://xchatllm.feishu.cn/sheets/MekfsfVuohfUf1tsWV0cCvTmn3c?sheet=Kcg6QC"
|
20
20
|
|
21
|
-
|
22
|
-
|
23
21
|
# """生成中是1 成功是2 失败是5 内容审核7 排队中11"""
|
24
22
|
# "status": 12,
|
25
23
|
# "message": "Optimizing prompt in progress...",
|
@@ -51,7 +49,8 @@ class VideoRequest(BaseModel):
|
|
51
49
|
|
52
50
|
{"desc":"跳动","useOriginPrompt":true,"fileList":[{"id":"304987062153912323","name":"3a71b0bb-3cab-4e69-b1f0-592976d0897b_00001_.png","type":"png"}]}
|
53
51
|
"""
|
54
|
-
model: Union[
|
52
|
+
model: Union[
|
53
|
+
str, Literal["MiniMax-Hailuo-02", "video-01", "T2V-01", "I2V-01", "I2V-01-live", "S2V-01"]] = "video-01"
|
55
54
|
|
56
55
|
"""生成视频的描述。(注:需少于2000字)"""
|
57
56
|
prompt: Optional[str] = None
|
@@ -66,6 +65,9 @@ class VideoRequest(BaseModel):
|
|
66
65
|
"""本参数仅当model选择为S2V-01时可用。模型将依据此参数中上传的主体来生成视频。目前仅支持单主体参考(数组长度为 1)。"""
|
67
66
|
# subject_reference: list = [{"type": "character", "image": ""}]
|
68
67
|
|
68
|
+
duration: Literal[6, 10] = 6
|
69
|
+
resolution: Union[str, Literal["768P", "1080P"]] = "768P"
|
70
|
+
|
69
71
|
callback_url: Optional[str] = None
|
70
72
|
|
71
73
|
"n"
|
@@ -161,7 +163,7 @@ class VideoResponse(BaseModel):
|
|
161
163
|
self.file_id = self.videos[0].downloadURL
|
162
164
|
|
163
165
|
if self.videos:
|
164
|
-
self.status = status_mapper.get(self.videos[0].status, "Failed")
|
166
|
+
self.status = status_mapper.get(self.videos[0].status or 2, "Failed")
|
165
167
|
|
166
168
|
|
167
169
|
#
|
@@ -240,3 +242,10 @@ class MusicRequet(BaseModel):
|
|
240
242
|
class MusicResponse(BaseModel):
|
241
243
|
trace_id: str
|
242
244
|
base_resp: BaseResponse
|
245
|
+
|
246
|
+
|
247
|
+
if __name__ == '__main__':
|
248
|
+
|
249
|
+
print(VideoResponse(task_id='1', base_resp={"status_code": 0, "status_msg": "success"}).model_dump())
|
250
|
+
|
251
|
+
print(Video(x='xxx'))
|
meutils/schemas/image_types.py
CHANGED
meutils/schemas/oneapi/_types.py
CHANGED
@@ -38,7 +38,7 @@ class ModelInfo(BaseModel):
|
|
38
38
|
|
39
39
|
|
40
40
|
class ChannelInfo(BaseModel):
|
41
|
-
id: Optional[int] = None # 不存在就新建
|
41
|
+
id: Optional[Union[int, str]] = None # 不存在就新建
|
42
42
|
type: int = 1 # 枚举值 openai
|
43
43
|
|
44
44
|
name: str = ''
|
@@ -46,7 +46,7 @@ class ChannelInfo(BaseModel):
|
|
46
46
|
group: str = 'default'
|
47
47
|
|
48
48
|
base_url: str = ''
|
49
|
-
key: str
|
49
|
+
key: str # 与id相对应
|
50
50
|
models: str = 'MODEL'
|
51
51
|
|
52
52
|
access_token: str = ''
|
@@ -87,11 +87,14 @@ class ChannelInfo(BaseModel):
|
|
87
87
|
|
88
88
|
def __init__(self, /, **data: Any):
|
89
89
|
super().__init__(**data)
|
90
|
-
|
91
|
-
self.
|
90
|
+
|
91
|
+
self.id = self.id or None
|
92
|
+
|
93
|
+
self.name = f"""{str(datetime.datetime.now())[:10]} {self.name or self.base_url or "NAME"}"""
|
94
|
+
self.tag = f"""{str(datetime.datetime.now())[:10]} {self.tag or self.base_url or "TAG"}"""
|
92
95
|
self.group = self.group or self.base_url or "GROUP"
|
93
96
|
|
94
|
-
self.setting= self.setting or ""
|
97
|
+
self.setting = self.setting or ""
|
95
98
|
self.param_override = self.param_override or ""
|
96
99
|
if isinstance(self.model_mapping, dict):
|
97
100
|
self.model_mapping = json.dumps(self.model_mapping)
|
@@ -103,6 +106,15 @@ if __name__ == '__main__':
|
|
103
106
|
|
104
107
|
from meutils.apis.oneapi import option, channel
|
105
108
|
|
106
|
-
option()
|
109
|
+
# option()
|
107
110
|
#
|
108
|
-
arun(channel.edit_channel(MODEL_PRICE))
|
111
|
+
# arun(channel.edit_channel(MODEL_PRICE))
|
112
|
+
|
113
|
+
c = ChannelInfo(key='key')
|
114
|
+
|
115
|
+
c_ = c.copy()
|
116
|
+
l = []
|
117
|
+
for i in range(3):
|
118
|
+
c_.key = f"######{i}"
|
119
|
+
l.append(c_)
|
120
|
+
print(l)
|
meutils/schemas/oneapi/common.py
CHANGED
@@ -44,8 +44,6 @@ MODEL_PRICE = {
|
|
44
44
|
"gemini-2.5-flash-video": 0.05,
|
45
45
|
"gemini-2.5-pro-video": 0.1,
|
46
46
|
|
47
|
-
"veo3": 4,
|
48
|
-
|
49
47
|
# rix
|
50
48
|
"kling_image": 0.05,
|
51
49
|
"kling_virtual_try_on": 1,
|
@@ -95,6 +93,20 @@ MODEL_PRICE = {
|
|
95
93
|
"api-videos-wan-ai/wan2.1-i2v-14b-720p": 1.2,
|
96
94
|
"api-videos-wan-ai/wan2.1-i2v-14b-720p-turbo": 1.2,
|
97
95
|
|
96
|
+
# veo
|
97
|
+
"veo3": 2,
|
98
|
+
"veo3-pro": 8,
|
99
|
+
"veo3-pro-frames": 8,
|
100
|
+
"api-veo3": 2,
|
101
|
+
"api-veo3-pro": 8,
|
102
|
+
"api-veo3-pro-frames": 8,
|
103
|
+
|
104
|
+
# hailuo
|
105
|
+
"api-minimax-hailuo-01-6s": 0.5,
|
106
|
+
"api-minimax-hailuo-02-6s-768p": 1,
|
107
|
+
"api-minimax-hailuo-02-10s-768p": 2,
|
108
|
+
"api-minimax-hailuo-02-6s-1080p": 2,
|
109
|
+
|
98
110
|
# chatfire
|
99
111
|
"ppu-0001": 0.0001,
|
100
112
|
"ppu-001": 0.001,
|
@@ -126,8 +138,7 @@ MODEL_PRICE = {
|
|
126
138
|
"ppt-islide": 0.1,
|
127
139
|
|
128
140
|
# grok
|
129
|
-
"grok-3": 0.
|
130
|
-
"grok-3-image": 0.02,
|
141
|
+
"grok-3-image": 0.1,
|
131
142
|
|
132
143
|
# 虚拟换衣fish
|
133
144
|
"api-kolors-virtual-try-on": 0.1,
|
@@ -146,9 +157,6 @@ MODEL_PRICE = {
|
|
146
157
|
|
147
158
|
"official-api-vidu": 0.5,
|
148
159
|
|
149
|
-
"official-api-hailuo-video": 0.5,
|
150
|
-
"api-hailuo-video": 0.5,
|
151
|
-
|
152
160
|
"api-videos-seedream-3.0": 0.5,
|
153
161
|
|
154
162
|
"api-kling-video/v2/master/text-to-video": 5,
|
@@ -471,10 +479,10 @@ MODEL_RATIO = {
|
|
471
479
|
"grok-2": 1,
|
472
480
|
"grok-2-1212": 1,
|
473
481
|
"grok-2-vision-1212": 1,
|
474
|
-
"grok-3":
|
475
|
-
"grok-3-deepsearch":
|
476
|
-
"grok-3-reasoner":
|
477
|
-
"grok-3-deepersearch":
|
482
|
+
"grok-3": 1.5,
|
483
|
+
"grok-3-deepsearch": 1.5,
|
484
|
+
"grok-3-reasoner": 1.5,
|
485
|
+
"grok-3-deepersearch": 1.5,
|
478
486
|
|
479
487
|
"grok-3-beta": 1.5,
|
480
488
|
"grok-3-fast-beta": 2.5,
|
@@ -1018,7 +1026,7 @@ MODEL_RATIO = {
|
|
1018
1026
|
|
1019
1027
|
COMPLETION_RATIO = {
|
1020
1028
|
"minimax-text-01": 8,
|
1021
|
-
"minimax-m1-80k":4,
|
1029
|
+
"minimax-m1-80k": 4,
|
1022
1030
|
|
1023
1031
|
# 智能体
|
1024
1032
|
"gpt-4-plus": 5,
|
@@ -46,7 +46,8 @@ async def get_bearer_token(
|
|
46
46
|
|
47
47
|
elif token.startswith("http") and "feishu.cn" in token: # feishu 取所有 keys 主要针对 channel
|
48
48
|
feishu_url = token
|
49
|
-
|
49
|
+
|
50
|
+
tokens = await get_series(feishu_url, duplicated=True)
|
50
51
|
token = '\n'.join(tokens) # 多渠道
|
51
52
|
|
52
53
|
elif ',' in token: # 内存里随机轮询
|
@@ -13,19 +13,23 @@ from fastapi import FastAPI, Request, Depends, HTTPException
|
|
13
13
|
from typing import Dict, Optional
|
14
14
|
|
15
15
|
|
16
|
-
# 定义一个依赖函数来获取所有请求头
|
17
|
-
# def get_headers(request: Request) -> Dict[str, str]:
|
18
|
-
# return dict(request.headers)
|
19
|
-
|
20
16
|
def get_headers(request: Request):
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
17
|
+
dic = dict(request.headers)
|
18
|
+
_dic = {k.replace('-', '_'): v for k, v in dic.items()}
|
19
|
+
|
20
|
+
return {**dic, **_dic}
|
21
|
+
|
22
|
+
|
23
|
+
if __name__ == '__main__':
|
24
|
+
def get_headers():
|
25
|
+
d = {"upstream-base-url": 'xx'}
|
26
|
+
d = {}
|
27
|
+
dic = {}
|
28
|
+
# upstream_base_url = headers.get('upstream-base-url')
|
29
|
+
if d:
|
30
|
+
dic = {k.replace('-', '_'): v for k, v in d.items()}
|
31
|
+
|
32
|
+
return {**d, **dic}
|
33
|
+
|
34
|
+
|
35
|
+
print(get_headers())
|
meutils/str_utils/__init__.py
CHANGED
@@ -246,6 +246,16 @@ def validate_url(url):
|
|
246
246
|
|
247
247
|
return False
|
248
248
|
|
249
|
+
def parse_slice(slice_str):
|
250
|
+
content = slice_str.strip()[1:-1]
|
251
|
+
parts = content.split(':', 2)
|
252
|
+
parts += [''] * (3 - len(parts)) # 确保有3部分
|
253
|
+
return slice(
|
254
|
+
int(parts[0]) if parts[0] else None,
|
255
|
+
int(parts[1]) if parts[1] else None,
|
256
|
+
int(parts[2]) if parts[2] else None
|
257
|
+
)
|
258
|
+
|
249
259
|
|
250
260
|
if __name__ == '__main__':
|
251
261
|
# print(str_replace('abcd', {'a': '8', 'd': '88'}))
|
@@ -291,3 +301,14 @@ if __name__ == '__main__':
|
|
291
301
|
print(validate_url(url))
|
292
302
|
|
293
303
|
print(re.findall("http", url*2))
|
304
|
+
|
305
|
+
# 示例用法
|
306
|
+
slice_str = "[:2]"
|
307
|
+
slice_str = ":2]"
|
308
|
+
|
309
|
+
# slice_str="https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=Y7HVfo[:2]"
|
310
|
+
# https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=Y7HVfo%5B%3A2%5D
|
311
|
+
slice_obj = parse_slice(slice_str)
|
312
|
+
l = list(range(10))
|
313
|
+
print(l[slice_obj]) # [0, 1]
|
314
|
+
|
@@ -8,6 +8,7 @@
|
|
8
8
|
# @Software : PyCharm
|
9
9
|
# @Description :
|
10
10
|
import json
|
11
|
+
import mimetypes
|
11
12
|
import re
|
12
13
|
|
13
14
|
from meutils.pipe import *
|
@@ -97,6 +98,8 @@ def parse_url(text: str, for_image=False, fn: Optional[Callable] = None):
|
|
97
98
|
# url_pattern = r'https?://[\w\-\.]+/\S+\.(?:png|jpg|jpeg|gif)'
|
98
99
|
url_pattern = r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[#]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\.(?:jpg|jpeg|png|gif|svg|webp)'
|
99
100
|
|
101
|
+
# "https://i.miji.bid/2025/06/10/d018000aed9b872c7b248dccf14c4450.pngA" 纠正
|
102
|
+
|
100
103
|
urls = re.findall(url_pattern, text)
|
101
104
|
|
102
105
|
valid_urls = []
|
@@ -176,8 +179,10 @@ if __name__ == '__main__':
|
|
176
179
|
https://p26-bot-workflow-sign.byteimg.com/tos-cn-i-mdko3gqilj/f13171faeed2447b8b9c301ba912f25c.jpg~tplv-mdko3gqilj-image.image?rk3s=81d4c505&x-expires=1779880356&x-signature=AJop4%2FM8VjCUfjqiEzUugprc0CI%3D&x-wf-file_name=B0DCGKG71N.MAIN.jpg
|
177
180
|
|
178
181
|
还有这种url,两个.jpg的也能兼容么
|
182
|
+
|
183
|
+
https://i.miji.bid/2025/06/10/d018000aed9b872c7b248dccf14c4450.pngA
|
179
184
|
"""
|
180
|
-
print(parse_url(text))
|
185
|
+
print(parse_url(text, for_image=True))
|
181
186
|
|
182
187
|
|
183
188
|
# print(parse_url(text, for_image=False))
|
@@ -186,3 +191,5 @@ https://p26-bot-workflow-sign.byteimg.com/tos-cn-i-mdko3gqilj/f13171faeed2447b8b
|
|
186
191
|
# print(parse_url(text))
|
187
192
|
#
|
188
193
|
# valid_urls = parse_url(text, for_image=True)
|
194
|
+
|
195
|
+
print(mimetypes.guess_type("xx.ico"))
|