MeUtils 2025.2.20.17.30.7__py3-none-any.whl → 2025.2.25.19.14.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2025.2.20.17.30.7.dist-info → MeUtils-2025.2.25.19.14.31.dist-info}/METADATA +261 -261
- {MeUtils-2025.2.20.17.30.7.dist-info → MeUtils-2025.2.25.19.14.31.dist-info}/RECORD +38 -32
- examples/_openaisdk/4v.py +3 -2
- examples/_openaisdk/openai_chatfire.py +7 -3
- examples/_openaisdk/openai_files.py +11 -7
- examples/_openaisdk/openai_images.py +15 -10
- examples/_openaisdk/openai_jina.py +58 -0
- meutils/apis/jimeng/images.py +3 -2
- meutils/apis/kling/api.py +1 -1
- meutils/apis/niutrans.py +2 -0
- meutils/apis/oneapi/token.py +0 -2
- meutils/apis/siliconflow/images.py +4 -1
- meutils/apis/sunoai/suno_api.py +42 -0
- meutils/apis/to_image/md.py +24 -2
- meutils/apis/translator/deeplx.py +2 -1
- meutils/caches/acache.py +51 -7
- meutils/data/VERSION +1 -1
- meutils/data/oneapi/FOOTER.md +2 -2
- meutils/data/oneapi/NOTICE.md +1 -151
- meutils/data/oneapi/_NOTICE.md +140 -0
- meutils/files/__init__.py +11 -0
- meutils/files/qwen_files.py +30 -0
- meutils/io/files_utils.py +2 -2
- meutils/llm/check_utils.py +2 -1
- meutils/llm/clients.py +5 -2
- meutils/llm/completions/qwenllm.py +55 -8
- meutils/llm/utils.py +1 -0
- meutils/schemas/oneapi/common.py +26 -2
- meutils/schemas/openai_types.py +60 -3
- meutils/schemas/suno_types.py +1 -1
- meutils/schemas/task_types.py +1 -0
- meutils/schemas/vidu_types.py +4 -4
- meutils/serving/fastapi/dependencies/auth.py +8 -2
- meutils/serving/fastapi/dependencies/headers.py +31 -0
- {MeUtils-2025.2.20.17.30.7.dist-info → MeUtils-2025.2.25.19.14.31.dist-info}/LICENSE +0 -0
- {MeUtils-2025.2.20.17.30.7.dist-info → MeUtils-2025.2.25.19.14.31.dist-info}/WHEEL +0 -0
- {MeUtils-2025.2.20.17.30.7.dist-info → MeUtils-2025.2.25.19.14.31.dist-info}/entry_points.txt +0 -0
- {MeUtils-2025.2.20.17.30.7.dist-info → MeUtils-2025.2.25.19.14.31.dist-info}/top_level.txt +0 -0
meutils/schemas/oneapi/common.py
CHANGED
@@ -101,8 +101,9 @@ MODEL_PRICE = {
|
|
101
101
|
"api-tripo3d": 0.1,
|
102
102
|
|
103
103
|
# 图片 音频 视频
|
104
|
-
"recraftv3": 0.
|
105
|
-
"
|
104
|
+
"recraftv3": 0.1, # 官方的
|
105
|
+
"recraft-api": 0.1,
|
106
|
+
"chat-recraftv3": 0.1,
|
106
107
|
|
107
108
|
"api-asr": 0.01,
|
108
109
|
"api-stt": 0.01,
|
@@ -277,6 +278,7 @@ MODEL_PRICE = {
|
|
277
278
|
|
278
279
|
'deepseek-search': 0.01,
|
279
280
|
'deepseek-r1-search': 0.01,
|
281
|
+
"deepseek-r1-search-pro": 0.02,
|
280
282
|
'deepseek-reasoner-search': 0.01,
|
281
283
|
|
282
284
|
'deepseek-r1-metasearch': 0.03,
|
@@ -334,6 +336,9 @@ MODEL_RATIO = {
|
|
334
336
|
# 智能体
|
335
337
|
"gpt-4-plus": 2.5,
|
336
338
|
"gpt-4o-plus": 2.5,
|
339
|
+
"jina-deepsearch": 2,
|
340
|
+
"deepresearch": 2,
|
341
|
+
"deepsearch": 2,
|
337
342
|
|
338
343
|
# embedding & rerank
|
339
344
|
"rerank-multilingual-v2.0": 0.1,
|
@@ -364,9 +369,13 @@ MODEL_RATIO = {
|
|
364
369
|
"claude-3-5-haiku-20241022": 0.5,
|
365
370
|
"anthropic/claude-3-5-haiku-20241022:beta": 0.5,
|
366
371
|
|
372
|
+
# grok
|
367
373
|
"grok-2": 1,
|
368
374
|
"grok-2-1212": 1,
|
369
375
|
"grok-2-vision-1212": 1,
|
376
|
+
"grok-3": 2,
|
377
|
+
"grok-3-deepsearch": 2,
|
378
|
+
"grok-3-reasoner": 2,
|
370
379
|
|
371
380
|
# 定制
|
372
381
|
"lingxi-all": 1,
|
@@ -594,6 +603,10 @@ MODEL_RATIO = {
|
|
594
603
|
"anthropic/claude-3.5-sonnet": 1.5,
|
595
604
|
"anthropic/claude-3.5-sonnet:beta": 4, # 1022
|
596
605
|
|
606
|
+
"claude-3-7-sonnet-thinking": 1.5,
|
607
|
+
"claude-3-7-sonnet-latest": 1.5,
|
608
|
+
"claude-3-7-sonnet-20250219": 1.5,
|
609
|
+
|
597
610
|
"command": 0.5 * 2,
|
598
611
|
"command-light": 0.5 * 2,
|
599
612
|
"command-light-nightly": 0.5 * 2,
|
@@ -743,11 +756,18 @@ COMPLETION_RATIO = {
|
|
743
756
|
# 智能体
|
744
757
|
"gpt-4-plus": 5,
|
745
758
|
"gpt-4o-plus": 5,
|
759
|
+
"jina-deepsearch": 4,
|
760
|
+
"deepresearch": 4,
|
761
|
+
"deepsearch": 4,
|
746
762
|
|
747
763
|
"grok-2": 5,
|
748
764
|
"grok-2-1212": 5,
|
749
765
|
"grok-2-vision-1212": 5,
|
750
766
|
|
767
|
+
"grok-3": 5,
|
768
|
+
"grok-3-deepsearch": 5,
|
769
|
+
"grok-3-reasoner": 5,
|
770
|
+
|
751
771
|
"claude-3-5-haiku-20241022": 5,
|
752
772
|
"anthropic/claude-3-5-haiku-20241022:beta": 5,
|
753
773
|
|
@@ -772,6 +792,10 @@ COMPLETION_RATIO = {
|
|
772
792
|
"anthropic/claude-3.5-sonnet": 5,
|
773
793
|
"anthropic/claude-3.5-sonnet:beta": 5,
|
774
794
|
|
795
|
+
"claude-3-7-sonnet-think": 5,
|
796
|
+
"claude-3-7-sonnet-latest": 5,
|
797
|
+
"claude-3-7-sonnet-20250219": 5,
|
798
|
+
|
775
799
|
"llama-3.1-70b-instruct": 2,
|
776
800
|
"meta-llama/Meta-Llama-3.1-70B-Instruct": 2,
|
777
801
|
|
meutils/schemas/openai_types.py
CHANGED
@@ -87,6 +87,62 @@ chat_completion_chunk_stop = ChatCompletionChunk(
|
|
87
87
|
# chat_completion.choices[0].message.content = "*"
|
88
88
|
# chat_completion_chunk.choices[0].delta.content = "*"
|
89
89
|
|
90
|
+
class CompletionRequest(BaseModel):
|
91
|
+
"""
|
92
|
+
prompt_filter_result.content_filter_results
|
93
|
+
choice.content_filter_results
|
94
|
+
|
95
|
+
todo: ['messages', 'model', 'frequency_penalty', 'function_call', 'functions', 'logit_bias', 'logprobs', 'max_tokens', 'n', 'presence_penalty', 'response_format', 'seed', 'stop', 'stream', 'temperature', 'tool_choice', 'tools', 'top_logprobs', 'top_p', 'user']
|
96
|
+
"""
|
97
|
+
model: str = '' # "gpt-3.5-turbo-file-id"
|
98
|
+
|
99
|
+
# [{'role': 'user', 'content': 'hi'}]
|
100
|
+
# [{'role': 'user', 'content': [{"type": "text", "text": ""}]]
|
101
|
+
# [{'role': 'user', 'content': [{"type": "image_url", "image_url": ""}]}] # 也兼容文件
|
102
|
+
# [{'role': 'user', 'content': [{"type": "image_url", "image_url": {"url": ""}}]}] # 也兼容文件
|
103
|
+
# [{'role': 'user', 'content': [{"type": "file", "file_url": ""}]]
|
104
|
+
messages: Optional[List[Dict[str, Any]]] = None # 标准化
|
105
|
+
|
106
|
+
stream: Optional[bool] = False
|
107
|
+
stream_options: Optional[dict] = None
|
108
|
+
|
109
|
+
top_p: Optional[float] = 0.7
|
110
|
+
temperature: Optional[float] = 0.7
|
111
|
+
|
112
|
+
n: Optional[int] = 1
|
113
|
+
max_tokens: Optional[int] = None
|
114
|
+
stop: Optional[Union[str, List[str]]] = None
|
115
|
+
|
116
|
+
frequency_penalty: Optional[float] = None
|
117
|
+
user: Optional[str] = None
|
118
|
+
|
119
|
+
# 拓展字段
|
120
|
+
last_message: Optional[Dict[str, Any]] = None
|
121
|
+
|
122
|
+
user_content: Optional[Any] = None # str dict
|
123
|
+
|
124
|
+
system_messages: Optional[list] = None
|
125
|
+
last_content: Optional[Any] = None
|
126
|
+
urls: List[str] = []
|
127
|
+
|
128
|
+
def __init__(self, **kwargs):
|
129
|
+
super().__init__(**kwargs)
|
130
|
+
|
131
|
+
last_message = self.messages[-1] # role content
|
132
|
+
if last_message.get("role") == "user":
|
133
|
+
user_content = last_message.get("content")
|
134
|
+
if isinstance(user_content, list): # todo: 多模态 'image_url','video_url' and 'video' 'file' audio
|
135
|
+
for i, c in enumerate(user_content):
|
136
|
+
if c.get("type") == "image_url":
|
137
|
+
user_content[i]["type"] = "image_url"
|
138
|
+
user_content[i]["image_url"] = user_content[i].get("image_url", {}).get("url", "")
|
139
|
+
|
140
|
+
self.messages = self.messages or [{'role': 'user', 'content': 'hi'}]
|
141
|
+
self.system_messages = [m for m in self.messages if m.get("role") == "system"]
|
142
|
+
|
143
|
+
class Config:
|
144
|
+
extra = "allow"
|
145
|
+
|
90
146
|
|
91
147
|
class ChatCompletionRequest(BaseModel):
|
92
148
|
"""
|
@@ -170,8 +226,10 @@ class ChatCompletionRequest(BaseModel):
|
|
170
226
|
if self.max_tokens:
|
171
227
|
self.max_tokens = min(self.max_tokens, 4096)
|
172
228
|
|
173
|
-
|
174
|
-
"
|
229
|
+
class Config:
|
230
|
+
extra = "allow"
|
231
|
+
|
232
|
+
json_schema_extra = {
|
175
233
|
"examples": [
|
176
234
|
{
|
177
235
|
"model": "gpt-3.5-turbo",
|
@@ -222,7 +280,6 @@ class ChatCompletionRequest(BaseModel):
|
|
222
280
|
|
223
281
|
]
|
224
282
|
}
|
225
|
-
}
|
226
283
|
|
227
284
|
|
228
285
|
class ImageRequest(BaseModel):
|
meutils/schemas/suno_types.py
CHANGED
meutils/schemas/task_types.py
CHANGED
meutils/schemas/vidu_types.py
CHANGED
@@ -92,7 +92,8 @@ class VideoRequest(BaseModel):
|
|
92
92
|
|
93
93
|
class ViduRequest(BaseModel):
|
94
94
|
"""quality 倍率2"""
|
95
|
-
model: Union[
|
95
|
+
model: Union[
|
96
|
+
str, Literal['vidu-2.0', 'vidu-1.5', 'vidu-high-performance', 'vidu-high-quality']] = "vidu-high-performance"
|
96
97
|
|
97
98
|
prompt: Optional[str] = None
|
98
99
|
|
@@ -156,13 +157,12 @@ class ViduRequest(BaseModel):
|
|
156
157
|
}
|
157
158
|
)
|
158
159
|
|
159
|
-
|
160
|
-
if self.model in ("vidu-1.5", "vidu-2.0"):
|
160
|
+
if self.model in ("vidu-2.0",):
|
161
161
|
self.payload = {
|
162
162
|
"input": input,
|
163
163
|
"type": self.type or type,
|
164
164
|
"settings": {
|
165
|
-
"model_version": "2.0",
|
165
|
+
"model_version": "2.0", #######
|
166
166
|
"style": "general",
|
167
167
|
"duration": self.duration,
|
168
168
|
|
@@ -14,6 +14,8 @@ from typing import Optional, Union
|
|
14
14
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
15
15
|
from fastapi import Depends, HTTPException, status
|
16
16
|
|
17
|
+
from meutils.config_utils.lark_utils import get_series, get_next_token
|
18
|
+
|
17
19
|
http_bearer = HTTPBearer()
|
18
20
|
|
19
21
|
|
@@ -32,9 +34,13 @@ async def get_bearer_token(
|
|
32
34
|
|
33
35
|
token = auth.credentials
|
34
36
|
if token.startswith('redis:'): # 初始化吧,太长?
|
35
|
-
|
36
|
-
|
37
|
+
if "feishu.cn" in token:
|
38
|
+
feishu_url = token.removeprefix("redis:")
|
39
|
+
token = await get_next_token(feishu_url) # 初始化redis
|
40
|
+
|
41
|
+
elif ',' in token: # todo: 初始化redis
|
37
42
|
pass
|
43
|
+
|
38
44
|
elif ',' in token: # 分隔符
|
39
45
|
token = np.random.choice(token.split(','))
|
40
46
|
|
@@ -0,0 +1,31 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : headers
|
5
|
+
# @Time : 2025/2/23 00:20
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
|
11
|
+
|
12
|
+
from fastapi import FastAPI, Request, Depends, HTTPException
|
13
|
+
from typing import Dict, Optional
|
14
|
+
|
15
|
+
|
16
|
+
# 定义一个依赖函数来获取所有请求头
|
17
|
+
# def get_headers(request: Request) -> Dict[str, str]:
|
18
|
+
# return dict(request.headers)
|
19
|
+
|
20
|
+
def get_headers(request: Request):
|
21
|
+
return request.headers
|
22
|
+
|
23
|
+
# lambda request: dict(request.headers)
|
24
|
+
# @app.get("/headers/")
|
25
|
+
# async def read_headers(headers: Dict[str, str] = Depends(get_headers)):
|
26
|
+
# # 在这里你可以使用 headers 字典
|
27
|
+
# if "upstream_api_key" not in headers:
|
28
|
+
# raise HTTPException(status_code=400, detail="API key is required")
|
29
|
+
#
|
30
|
+
# # 返回所有请求头
|
31
|
+
# return {"headers": headers}
|
File without changes
|
File without changes
|
{MeUtils-2025.2.20.17.30.7.dist-info → MeUtils-2025.2.25.19.14.31.dist-info}/entry_points.txt
RENAMED
File without changes
|
File without changes
|