MeUtils 2025.5.15.14.3.22__py3-none-any.whl → 2025.5.29.18.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2025.5.15.14.3.22.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/METADATA +262 -262
- {MeUtils-2025.5.15.14.3.22.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/RECORD +44 -37
- examples/_openaisdk/openai_audio.py +5 -3
- examples/_openaisdk/openai_chatfire.py +5 -0
- examples/_openaisdk/openai_sophnet.py +47 -0
- meutils/ai_audio/asr/openai_asr.py +5 -5
- meutils/apis/audio/minimax.py +202 -0
- meutils/apis/dreamfaceapp/__init__.py +11 -0
- meutils/apis/dreamfaceapp/audios.py +31 -0
- meutils/apis/dreamfaceapp/images.py +110 -0
- meutils/apis/dreamfaceapp/videos.py +115 -0
- meutils/apis/fal/videos.py +1 -0
- meutils/apis/google/chat.py +3 -0
- meutils/apis/google/gemini_sdk.py +34 -0
- meutils/apis/images/recraft.py +11 -8
- meutils/apis/jimeng/audio.py +1 -1
- meutils/apis/jimeng/doubao_images.py +70 -0
- meutils/apis/jimeng/doubao_utils.py +4 -4
- meutils/apis/jimeng/videos.py +8 -3
- meutils/apis/oneapi/channel.py +11 -7
- meutils/apis/oneapi/user.py +1 -1
- meutils/apis/search/metaso.py +26 -8
- meutils/apis/siliconflow/videos.py +9 -16
- meutils/apis/volcengine_apis/__init__.py +0 -1
- meutils/apis/volcengine_apis/images.py +5 -4
- meutils/apis/volcengine_apis/tasks.py +74 -8
- meutils/config_utils/lark_utils/common.py +5 -5
- meutils/data/VERSION +1 -1
- meutils/io/files_utils.py +5 -3
- meutils/io/openai_files.py +3 -2
- meutils/llm/check_utils.py +35 -3
- meutils/llm/completions/chat_spark.py +31 -13
- meutils/llm/completions/qwenllm.py +25 -18
- meutils/llm/openai_polling/chat.py +2 -2
- meutils/office_automation/pdf.py +38 -2
- meutils/pipe.py +4 -1
- meutils/schemas/dreamfaceapp_types.py +12 -0
- meutils/schemas/metaso_types.py +9 -3
- meutils/schemas/oneapi/common.py +59 -21
- meutils/schemas/openai_types.py +5 -2
- meutils/apis/jimeng/doubao.py +0 -68
- {MeUtils-2025.5.15.14.3.22.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/LICENSE +0 -0
- {MeUtils-2025.5.15.14.3.22.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/WHEEL +0 -0
- {MeUtils-2025.5.15.14.3.22.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/entry_points.txt +0 -0
- {MeUtils-2025.5.15.14.3.22.dist-info → MeUtils-2025.5.29.18.12.8.dist-info}/top_level.txt +0 -0
@@ -20,19 +20,17 @@ from openai import OpenAI, AsyncOpenAI
|
|
20
20
|
|
21
21
|
check_token = partial(check_token_for_siliconflow, threshold=0.01)
|
22
22
|
|
23
|
-
MODELS_MAP = {
|
24
|
-
"hunyuan-video": "tencent/HunyuanVideo",
|
25
|
-
"hunyuanvideo": "tencent/HunyuanVideo",
|
26
|
-
"mochi-1-preview": "genmo/mochi-1-preview",
|
27
|
-
"ltx-video": "Lightricks/LTX-Video",
|
28
|
-
}
|
29
23
|
"""
|
24
|
+
|
25
|
+
tencent/HunyuanVideo-HD
|
26
|
+
|
30
27
|
Wan-AI/Wan2.1-T2V-14B
|
31
28
|
Wan-AI/Wan2.1-T2V-14B-Turbo
|
32
29
|
|
33
30
|
Wan-AI/Wan2.1-I2V-14B-720P
|
34
31
|
Wan-AI/Wan2.1-I2V-14B-720P-Turbo
|
35
32
|
|
33
|
+
|
36
34
|
16:9 👉 1280×720
|
37
35
|
9:16 👉 720×1280
|
38
36
|
1:1 👉 960×960
|
@@ -44,17 +42,12 @@ Wan-AI/Wan2.1-I2V-14B-720P-Turbo
|
|
44
42
|
async def create_task(request: VideoRequest, token: Optional[str] = None):
|
45
43
|
token = token or await get_next_token_for_polling(FEISHU_URL_FREE, check_token=check_token, from_redis=True)
|
46
44
|
|
47
|
-
if request.
|
48
|
-
request.
|
45
|
+
if 'Wan-AI' in request.model:
|
46
|
+
request.model = "Wan-AI/Wan2.1-T2V-14B-720P-Turbo"
|
49
47
|
|
50
|
-
if request.
|
51
|
-
request.
|
52
|
-
|
53
|
-
elif request.model == "Wan-AI/Wan2.1-I2V-14B-720P":
|
54
|
-
request.model = "Wan-AI/Wan2.1-I2V-14B-720P-Turbo"
|
55
|
-
|
56
|
-
else:
|
57
|
-
request.model = "Wan-AI/Wan2.1-I2V-14B-720P-Turbo" if request.image else "Wan-AI/Wan2.1-T2V-14B-Turbo"
|
48
|
+
if request.image:
|
49
|
+
# request.image = await to_base64(request.image)
|
50
|
+
request.model = request.model.replace("-T2V-", "-I2V-")
|
58
51
|
|
59
52
|
payload = request.model_dump(exclude_none=True)
|
60
53
|
|
@@ -90,14 +90,15 @@ async def generate(request: ImageRequest, token: Optional[str] = None):
|
|
90
90
|
|
91
91
|
if __name__ == '__main__':
|
92
92
|
token = f"""{os.getenv("VOLC_ACCESSKEY")}|{os.getenv("VOLC_SECRETKEY")}"""
|
93
|
+
token = "AKLTOWM5ZTc5ZDFhZWNlNDIzODkwYmZiNjEyNzYwNzE0MTI|T0RCbFpHRTJaRFEyWmpjeE5ERXpNR0ptWlRCaU16WmhPRE0wWVdKa01tTQ=="
|
93
94
|
prompt = """
|
94
95
|
3D魔童哪吒 c4d 搬砖 很开心, 很快乐, 精神抖擞, 背景是数不清的敖丙虚化 视觉冲击力强 大师构图 色彩鲜艳丰富 吸引人 背景用黄金色艺术字写着“搬砖挣钱” 冷暖色对比
|
95
96
|
"""
|
96
97
|
|
97
|
-
prompt = """
|
98
|
-
https://oss.ffire.cc/files/kling_watermark.png
|
99
|
-
让这个女人带上眼镜 衣服换个颜色
|
100
|
-
"""
|
98
|
+
# prompt = """
|
99
|
+
# https://oss.ffire.cc/files/kling_watermark.png
|
100
|
+
# 让这个女人带上眼镜 衣服换个颜色
|
101
|
+
# """
|
101
102
|
|
102
103
|
request = ImageRequest(
|
103
104
|
model="high_aes_general_v30l_zt2i",
|
@@ -15,13 +15,28 @@ https://www.volcengine.com/docs/6791/1384311
|
|
15
15
|
import os
|
16
16
|
|
17
17
|
from meutils.pipe import *
|
18
|
-
from meutils.
|
18
|
+
from meutils.caches import rcache
|
19
|
+
|
20
|
+
from meutils.decorators.retry import retrying
|
21
|
+
from meutils.db.redis_db import redis_aclient
|
22
|
+
from meutils.config_utils.lark_utils import get_next_token_for_polling
|
19
23
|
from meutils.schemas.jimeng_types import VideoRequest, ImageRequest
|
20
24
|
|
21
25
|
from volcengine.visual.VisualService import VisualService
|
26
|
+
from fastapi import APIRouter, File, UploadFile, Query, Form, Depends, Request, HTTPException, status, BackgroundTasks
|
27
|
+
|
28
|
+
FEISHU = "https://xchatllm.feishu.cn/sheets/GYCHsvI4qhnDPNtI4VPcdw2knEd?sheet=OiHxsE"
|
29
|
+
|
22
30
|
|
31
|
+
# and "Access Denied" not in str(r)
|
32
|
+
|
33
|
+
@retrying(max_retries=5, predicate=lambda r: "Concurrent Limit" in str(r)) # 限流
|
34
|
+
async def create_task(request: Union[ImageRequest, VideoRequest, dict], token: Optional[str] = None):
|
35
|
+
"""https://www.volcengine.com/docs/6791/1399614"""
|
36
|
+
token = token or await get_next_token_for_polling(FEISHU)
|
37
|
+
|
38
|
+
logger.debug(token)
|
23
39
|
|
24
|
-
def create_task(request: Union[ImageRequest, VideoRequest], token: Optional[str] = None):
|
25
40
|
visual_service = VisualService()
|
26
41
|
|
27
42
|
if token:
|
@@ -30,26 +45,77 @@ def create_task(request: Union[ImageRequest, VideoRequest], token: Optional[str]
|
|
30
45
|
visual_service.set_sk(sk)
|
31
46
|
|
32
47
|
# request
|
33
|
-
|
48
|
+
if not isinstance(request, dict):
|
49
|
+
request = request.model_dump(exclude_none=True)
|
50
|
+
|
51
|
+
response = visual_service.cv_sync2async_submit_task(request) # 同步转异步
|
52
|
+
|
53
|
+
"""
|
54
|
+
{'code': 10000,
|
55
|
+
'data': {'task_id': '15106285208671192523'},
|
56
|
+
'message': 'Success',
|
57
|
+
'request_id': '202505291707517FC0D2B135CEE77BC4A5',
|
58
|
+
'status': 10000,
|
59
|
+
'time_elapsed': '150.967328ms'}
|
60
|
+
"""
|
61
|
+
|
62
|
+
logger.debug(response)
|
63
|
+
if response.get('code') == 10000:
|
64
|
+
await redis_aclient.set(response['data']['task_id'], token, ex=7 * 24 * 3600)
|
65
|
+
else:
|
66
|
+
raise Exception(response)
|
67
|
+
|
68
|
+
return response
|
69
|
+
|
70
|
+
|
71
|
+
#
|
72
|
+
# @retrying(max_retries=5, predicate=lambda r: "Concurrent Limit" in str(r)) # 限流
|
73
|
+
# @rcache(ttl=5)
|
74
|
+
async def get_task(request: dict):
|
75
|
+
task_id = request.get("task_id", "")
|
76
|
+
token = await redis_aclient.get(task_id) # 绑定对应的 token
|
77
|
+
token = token and token.decode()
|
78
|
+
if not token:
|
79
|
+
raise HTTPException(status_code=404, detail="TaskID not found")
|
80
|
+
|
81
|
+
visual_service = VisualService()
|
82
|
+
|
83
|
+
if token:
|
84
|
+
ak, sk = token.split('|')
|
85
|
+
visual_service.set_ak(ak)
|
86
|
+
visual_service.set_sk(sk)
|
87
|
+
|
88
|
+
response = visual_service.cv_get_result(request) # 同步转异步
|
89
|
+
|
90
|
+
logger.debug(response)
|
34
91
|
|
35
|
-
response = visual_service.cv_submit_task(payload)
|
36
92
|
return response
|
37
93
|
|
38
94
|
|
39
95
|
if __name__ == '__main__':
|
40
96
|
token = f"""{os.getenv("VOLC_ACCESSKEY")}|{os.getenv("VOLC_SECRETKEY")}"""
|
97
|
+
token = "AKLTOWM5ZTc5ZDFhZWNlNDIzODkwYmZiNjEyNzYwNzE0MTI|T0RCbFpHRTJaRFEyWmpjeE5ERXpNR0ptWlRCaU16WmhPRE0wWVdKa01tTQ=="
|
41
98
|
prompt = """
|
42
99
|
3D魔童哪吒 c4d 搬砖 很开心, 很快乐, 精神抖擞, 背景是数不清的敖丙虚化 视觉冲击力强 大师构图 色彩鲜艳丰富 吸引人 背景用黄金色艺术字写着“搬砖挣钱” 冷暖色对比
|
43
100
|
"""
|
44
101
|
|
45
102
|
request = ImageRequest(
|
103
|
+
req_key="high_aes_general_v30l_zt2i",
|
46
104
|
prompt=prompt,
|
47
105
|
)
|
48
106
|
|
49
|
-
request = VideoRequest(
|
50
|
-
|
51
|
-
)
|
107
|
+
# request = VideoRequest(
|
108
|
+
# prompt=prompt
|
109
|
+
# )
|
52
110
|
|
111
|
+
# arun(create_task(request, token))
|
112
|
+
# arun(create_task(request))
|
53
113
|
|
114
|
+
# request = {
|
115
|
+
# "task_id": "141543714223689974",
|
116
|
+
# "req_key": "high_aes_general_v30l_zt2i"
|
117
|
+
# }
|
118
|
+
# #
|
119
|
+
# arun(get_task(request))
|
54
120
|
|
55
|
-
print(
|
121
|
+
print(bjson(request))
|
@@ -80,7 +80,7 @@ def get_spreadsheet_values(
|
|
80
80
|
|
81
81
|
|
82
82
|
@alru_cache(ttl=600)
|
83
|
-
@rcache(ttl=
|
83
|
+
@rcache(ttl=300, serializer='pickle') # 缓存
|
84
84
|
async def aget_spreadsheet_values(
|
85
85
|
spreadsheet_token: Optional[str] = None,
|
86
86
|
sheet_id: Optional[str] = None,
|
@@ -101,10 +101,10 @@ async def aget_spreadsheet_values(
|
|
101
101
|
if response.is_success:
|
102
102
|
data = response.json()
|
103
103
|
if to_dataframe:
|
104
|
-
|
105
|
-
|
104
|
+
values = data.get('data').get('valueRange').get('values')
|
105
|
+
|
106
|
+
return pd.DataFrame(values)
|
106
107
|
|
107
|
-
return pd.DataFrame(data.get('data').get('valueRange').get('values'))
|
108
108
|
return data
|
109
109
|
else:
|
110
110
|
send_message(
|
@@ -307,7 +307,7 @@ async def get_dataframe(iloc_tuple: Optional[tuple] = None, feishu_url: Optional
|
|
307
307
|
async def get_series(feishu_url: str, index: int = 0, duplicated: bool = False): # 系统配置
|
308
308
|
df = await aget_spreadsheet_values(feishu_url=feishu_url, to_dataframe=True)
|
309
309
|
series = df[index]
|
310
|
-
values = [i for i in series if i] # todo: 非标准字符串处理
|
310
|
+
values = [i for i in series if i and isinstance(i, str)] # todo: 非标准字符串处理
|
311
311
|
if duplicated: # 有序去重
|
312
312
|
values = values | xUnique
|
313
313
|
|
meutils/data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
2025.05.
|
1
|
+
2025.05.29.18.12.08
|
meutils/io/files_utils.py
CHANGED
@@ -26,7 +26,7 @@ from httpx import AsyncClient
|
|
26
26
|
def guess_mime_type(file):
|
27
27
|
mime_type = None
|
28
28
|
if isinstance(file, str):
|
29
|
-
mime_type, _ = mimetypes.guess_type(file)
|
29
|
+
mime_type, _ = mimetypes.guess_type(file.strip())
|
30
30
|
return mime_type or "application/octet-stream"
|
31
31
|
|
32
32
|
|
@@ -280,5 +280,7 @@ if __name__ == '__main__':
|
|
280
280
|
|
281
281
|
# arun(to_bytes("https://oss.ffire.cc/files/kling_watermark.png"))
|
282
282
|
|
283
|
-
file = "https://v3.fal.media/files/penguin/Rx-8V0MVgkVZM6PJ0RiPD_douyin.mp4"
|
284
|
-
arun(to_bytes(file))
|
283
|
+
# file = "https://v3.fal.media/files/penguin/Rx-8V0MVgkVZM6PJ0RiPD_douyin.mp4"
|
284
|
+
# arun(to_bytes(file))
|
285
|
+
|
286
|
+
print(guess_mime_type("http://admin.ilovechatgpt.top/file/ceshiwendangdocx_31118702.docx "))
|
meutils/io/openai_files.py
CHANGED
@@ -50,7 +50,7 @@ async def delete_files(client, threshold: int = 666):
|
|
50
50
|
|
51
51
|
|
52
52
|
@rcache(ttl=7 * 24 * 3600)
|
53
|
-
async def file_extract(file):
|
53
|
+
async def file_extract(file, enable_reader: bool = True):
|
54
54
|
"""
|
55
55
|
|
56
56
|
:param file: url bytes path
|
@@ -63,8 +63,9 @@ async def file_extract(file):
|
|
63
63
|
filename = Path(file).name if isinstance(file, str) else 'untitled'
|
64
64
|
mime_type = guess_mime_type(file)
|
65
65
|
|
66
|
-
if str(file).startswith("http") and mime_type in {"application/octet-stream", "text/html"}:
|
66
|
+
if enable_reader and str(file).startswith("http") and mime_type in {"application/octet-stream", "text/html"}:
|
67
67
|
logger.debug(f"jina reader")
|
68
|
+
|
68
69
|
markdown_content = await url_reader(file)
|
69
70
|
return {
|
70
71
|
'filename': filename,
|
meutils/llm/check_utils.py
CHANGED
@@ -7,6 +7,7 @@
|
|
7
7
|
# @WeChat : meutils
|
8
8
|
# @Software : PyCharm
|
9
9
|
# @Description :
|
10
|
+
import os
|
10
11
|
|
11
12
|
from meutils.pipe import *
|
12
13
|
from meutils.decorators.retry import retrying
|
@@ -156,7 +157,7 @@ async def check_token_for_ppinfra(api_key, threshold: float = 1):
|
|
156
157
|
try:
|
157
158
|
client = AsyncOpenAI(base_url="https://api.ppinfra.com/v3/user", api_key=api_key)
|
158
159
|
data = await client.get("", cast_to=object)
|
159
|
-
|
160
|
+
logger.debug(data) # credit_balance
|
160
161
|
return data["credit_balance"] > threshold
|
161
162
|
except TimeoutException as e:
|
162
163
|
raise
|
@@ -166,8 +167,30 @@ async def check_token_for_ppinfra(api_key, threshold: float = 1):
|
|
166
167
|
return False
|
167
168
|
|
168
169
|
|
170
|
+
@retrying()
|
171
|
+
async def check_token_for_sophnet(api_key, threshold: float = 1):
|
172
|
+
if not isinstance(api_key, str):
|
173
|
+
return await check_tokens(api_key, check_token_for_sophnet)
|
174
|
+
|
175
|
+
try:
|
176
|
+
client = AsyncOpenAI(base_url=os.getenv("SOPHNET_BASE_URL"), api_key=api_key)
|
177
|
+
data = await client.chat.completions.create(
|
178
|
+
model="DeepSeek-v3",
|
179
|
+
messages=[{"role": "user", "content": "hi"}],
|
180
|
+
stream=True,
|
181
|
+
max_tokens=1
|
182
|
+
)
|
183
|
+
return True
|
184
|
+
except TimeoutException as e:
|
185
|
+
raise
|
186
|
+
|
187
|
+
except Exception as e:
|
188
|
+
logger.error(f"Error: {e}\n{api_key}")
|
189
|
+
return False
|
190
|
+
|
191
|
+
|
169
192
|
if __name__ == '__main__':
|
170
|
-
from meutils.config_utils.lark_utils import get_next_token_for_polling
|
193
|
+
from meutils.config_utils.lark_utils import get_next_token_for_polling, get_series
|
171
194
|
|
172
195
|
check_valid_token = partial(check_token_for_siliconflow, threshold=-1)
|
173
196
|
|
@@ -189,4 +212,13 @@ if __name__ == '__main__':
|
|
189
212
|
|
190
213
|
# arun(check_token_for_moonshot("sk-Qnr87vtf2Q6MEfc2mVNkVZ4qaoZg3smH9527I25QgcFe7HrT"))
|
191
214
|
|
192
|
-
arun(check_token_for_ppinfra("sk_DkIaRrPq7sTiRPevhjV9WFZN3FvLk6WhCXOj1JAwu6c"))
|
215
|
+
# arun(check_token_for_ppinfra("sk_DkIaRrPq7sTiRPevhjV9WFZN3FvLk6WhCXOj1JAwu6c"))
|
216
|
+
|
217
|
+
# from meutils.config_utils.lark_utils import get_next_token_for_polling, get_series
|
218
|
+
#
|
219
|
+
# arun(get_series("https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=PP1PGr"))
|
220
|
+
|
221
|
+
# arun(check_token_for_sophnet(["gzHpp_zRtGaw1IjpepCiWu_ySyke3Hu5wR5VNNYMLyXwAESqZoZWUZ4T3tiWUxtac6n9Hk-kRRo4_jPQmndo-g"]))
|
222
|
+
|
223
|
+
|
224
|
+
arun(check_token_for_ppinfra("sk_F0kgPyCMTzmOH_-VCEJucOK8HIrbnLGYm_IWxBToHZQ"))
|
@@ -33,7 +33,7 @@ class Completions(object):
|
|
33
33
|
file_url, *texts = request.last_user_content.split(maxsplit=1) + ["总结下"]
|
34
34
|
text = texts[0]
|
35
35
|
|
36
|
-
file_content = await file_extract(file_url)
|
36
|
+
file_content = await file_extract(file_url, enable_reader=False)
|
37
37
|
|
38
38
|
request.messages = [
|
39
39
|
{
|
@@ -73,7 +73,7 @@ class Completions(object):
|
|
73
73
|
|
74
74
|
text, file_url = texts[-1], file_urls[-1]
|
75
75
|
if file_url in image_urls:
|
76
|
-
file_content = await file_extract(file_url)
|
76
|
+
file_content = await file_extract(file_url, enable_reader=False)
|
77
77
|
|
78
78
|
message["content"] = f"""{json.dumps(file_content, ensure_ascii=False)}\n\n{text}"""
|
79
79
|
|
@@ -104,7 +104,7 @@ if __name__ == '__main__':
|
|
104
104
|
request = CompletionRequest(
|
105
105
|
# model="qwen-turbo-2024-11-01",
|
106
106
|
# model="claude-3-5-sonnet-20241022",
|
107
|
-
model="
|
107
|
+
model="deepseek-chat",
|
108
108
|
|
109
109
|
messages=[
|
110
110
|
# {
|
@@ -133,15 +133,15 @@ if __name__ == '__main__':
|
|
133
133
|
{
|
134
134
|
'role': 'user',
|
135
135
|
# "content": '你好',
|
136
|
-
"content": [
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
],
|
136
|
+
# "content": [
|
137
|
+
# {"type": "text", "text": "https://oss.ffire.cc/files/kling_watermark.png 描述第一张图片"},
|
138
|
+
#
|
139
|
+
# # {"type": "text", "text": "描述第一张图片"},
|
140
|
+
# #
|
141
|
+
# # {"type": "image_url", "image_url": "https://oss.ffire.cc/files/kling_watermark.png"},
|
142
|
+
# # # {"type": "image_url", "image_url": "https://oss.ffire.cc/files/nsfw.jpg"}
|
143
|
+
# #
|
144
|
+
# ],
|
145
145
|
|
146
146
|
# 'content': {
|
147
147
|
# "type": "file_url",
|
@@ -153,7 +153,7 @@ if __name__ == '__main__':
|
|
153
153
|
# "content": "https://oss.ffire.cc/files/百炼系列手机产品介绍.docx 总结下"
|
154
154
|
# "content": "https://mj101-1317487292.cos.ap-shanghai.myqcloud.com/ai/test.pdf\n\n总结下"
|
155
155
|
|
156
|
-
|
156
|
+
"content": "http://admin.ilovechatgpt.top/file/4docx_86529298.docx 我无法确定你是否准确识别word里面的论文?",
|
157
157
|
# "content": "http://admin.ilovechatgpt.top/file/xinjianMicrosoftWordwendangdoc-9052714901036-bGSJLeKbqQdnIZZn.doc 111111234234",
|
158
158
|
|
159
159
|
},
|
@@ -276,4 +276,22 @@ if __name__ == '__main__':
|
|
276
276
|
# {},
|
277
277
|
# {'role': 'user', 'content': '总结一下'}]
|
278
278
|
|
279
|
+
request = {
|
280
|
+
"model": "gemini-all",
|
281
|
+
"messages": [
|
282
|
+
{
|
283
|
+
"role": "system",
|
284
|
+
"content": "\\n Current date: 2025-05-21"
|
285
|
+
},
|
286
|
+
{
|
287
|
+
"role": "user",
|
288
|
+
"content": "http://admin.ilovechatgpt.top/file/ceshiwendangdocx_31118702.docx 你好"
|
289
|
+
}
|
290
|
+
],
|
291
|
+
"stream": True,
|
292
|
+
"top_p": 0.7,
|
293
|
+
"temperature": 0.8,
|
294
|
+
"n": 1
|
295
|
+
}
|
296
|
+
request = CompletionRequest(**request)
|
279
297
|
arun(c.create(request))
|
@@ -34,9 +34,6 @@ base_url = "https://chat.qwen.ai/api"
|
|
34
34
|
from fake_useragent import UserAgent
|
35
35
|
|
36
36
|
ua = UserAgent()
|
37
|
-
cookie = storage_to_cookie(
|
38
|
-
"cookies.json"
|
39
|
-
)
|
40
37
|
|
41
38
|
thinking_budget_mapping = {
|
42
39
|
"low": 1000,
|
@@ -44,15 +41,19 @@ thinking_budget_mapping = {
|
|
44
41
|
"high": 24000
|
45
42
|
}
|
46
43
|
|
44
|
+
COOKIE = """
|
45
|
+
cna=KP9DIEqqyjUCATrw/+LjJV8F; _bl_uid=LXmp28z7dwezpmyejeXL9wh6U1Rb; cnaui=310cbdaf-3754-461c-a3ff-9ec8005329c9; aui=310cbdaf-3754-461c-a3ff-9ec8005329c9; x-ap=ap-southeast-1; sca=43897cb0; acw_tc=0a03e53417483123807755658e597c5e3685457054f2ca60a0a8d87b657874; _gcl_au=1.1.106229673.1748312382; xlly_s=1; token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjMxMGNiZGFmLTM3NTQtNDYxYy1hM2ZmLTllYzgwMDUzMjljOSIsImV4cCI6MTc1MDkwNDU2MH0.nV7I1sp6rAE0QnqXYKNm4I0vLZmA-TdOKWEHg_37_tw; SERVERID=1e5b6792fa61468bae321990103ad502|1748312579|1748312380; atpsida=0450727e7c9d8a7299a0b2bd_1748312579_5; ssxmod_itna=iqGxRDuQqWqxgDUxeKYI5q=xBDeMDWK07DzxC5750CDmxjKidKDUGQN0bFP=jhWOGxDkYCA+UQSD0HPKmDA5DnGx7YDtr4FN4SeKhDwIa78YRwwHQiGoh=LTX90w=3qflZqcj1T3xoD==hYDmeDUxD1GDDkS0PDxOPD5xDTDWeDGDD3t4DCCfYYD0RpiboIVxRhTABpDYPYPR4YDY56DAqEz4SpVaxXTDDzw4iaPf4vwDi8D7FRG0RpD7P6fmQDXaYDEAWmFk=Dv6Lh+QwI1/oYOyyDc=DoCO0Km0DTVj2qPGGiU5eiBdnNC4490i+yte+in2MWYHDDW=4=5rzqDxKGe1qC+LimweRk5yxmLhdYY4KGYqOqIheUk5ZB5x2QgohQBxN7spmxFezNiDD; ssxmod_itna2=iqGxRDuQqWqxgDUxeKYI5q=xBDeMDWK07DzxC5750CDmxjKidKDUGQN0bFP=jhWOGxDkYCA+UQmDDpU3qY6obeDLWr7pfFyiDDsa7QaIhEVl4FKFerBUoQiRQlSzS+caiWpDOqz2HrsO6xFOwpKsSOYH0Q0WwhDs0Ye1mah+b99w34Im3AYwt4oztFoQ7xBhThYqatKQWcgkRDBDCOiLViK62z51rnpzpbHH7pFpS=Y4zfHiWfozYCf+9FcGmRMsMEYFGRP+TgG9EbEi3Khm0lQmt2AL=quK6RomKnFmmpjzYzxHQ/QEK0AAa3qGPOl3crGu7DDpQnxjfEEgWD/oEHaE4l6jOpKp6DI6P=vQa39gN6h5i3R5eInP2Gob9pY7DKDr3lYiTZrC3R2Yiz/rsFIG1j5n=2=DD3=obLGPQsWnOZWSinlZ=KjKGpIwRPKn3jpohCU+2PciIEehVxTSnioeIx6xdp91zK29tHtN2Zcgi0clGNaY4jIenYlnw+/Qlapjg6Qho=3E/2v3mIjaYygren01ebwPI4EKgDY+4NyiTXYmU+o/lGSogI=GT/38rPOnA5mjuDg2Ub=+HtTjKnpYoEaTvwj3K0GI7FKARVAv=Wojxe0nHRBwhrQIa1vMpv5pfQ8LGCXGp=lZ3Q8v+6=lSfexroPjNP9MvyNVAXQhnKvyAwT/KEsgh/eOdqx0YiHaP1kwxzsu54i4eGiQDshEOQoiRlPBqiDiSRDQay2k1x4xWDhpBTjqZ0Neer0qlDK0YbDpBYKxSGDD; tfstk=gy8S5jal7pL47z_LOgc4laG38cQQOjuaeW1psBUz9aQRdW9e6MCzzbxBcpJ5eU7-ZoUBQpYrZMdRRipBCgUKqMCvlpJBzkd82-QvQ1KUpv4LH-C1yMxPLTWCRBvsgAuZ7QAl-akZQVyg4ccA76BdpJPAkw5IYeMoXQAl-SDk9fRkZW1NLUsRJpQAD1CRJkB8yjsAs1bLyMU8D-BcHwCdvTQAk61dpkIJpIhfttBdJgpdkj1n_yV1O06kNbzD-uH4ifA5hyUplsHhpQig5_8lNgW9wUwzUXf5VOdRhAjyDMIBFgTqPRfWDC9PNp0graIB2hQJRAg5kCx2es9KCk6vfUYCbUM_jTLlKd_JcxU5JaK95wQIf7f2PILGAUD7kT9DMh7kWx4BBIRwriYIC-BH41bANnGLDTId4azNCak3ASsgRs6ZGjZ3xSw1l2pVr0vc2sf50jGbeHjRis1mGjZ3xgCcN_GjG8Kh.; isg=BOrqXB6_dpCyTPX0tTuBOG9yO1aMW261hQXS_3ShLD3Op4xhWOtyxWGRN9O7V-ZN
|
46
|
+
""".strip()
|
47
|
+
|
47
48
|
|
48
49
|
@retrying()
|
49
|
-
async def to_file(file, api_key):
|
50
|
+
async def to_file(file, api_key, cookie: Optional[str] = None):
|
50
51
|
qwen_client = AsyncOpenAI(
|
51
52
|
base_url="https://all.chatfire.cn/qwen/v1",
|
52
53
|
api_key=api_key,
|
53
54
|
default_headers={
|
54
55
|
'User-Agent': ua.random,
|
55
|
-
'Cookie': cookie
|
56
|
+
'Cookie': cookie or COOKIE
|
56
57
|
}
|
57
58
|
)
|
58
59
|
filename = Path(file).name if isinstance(file, str) else 'untitled'
|
@@ -64,7 +65,8 @@ async def to_file(file, api_key):
|
|
64
65
|
return file_object
|
65
66
|
|
66
67
|
|
67
|
-
async def create(request: CompletionRequest, token: Optional[str] = None
|
68
|
+
async def create(request: CompletionRequest, token: Optional[str] = None, cookie: Optional[str] = None):
|
69
|
+
cookie = cookie or COOKIE
|
68
70
|
|
69
71
|
if request.temperature > 1:
|
70
72
|
request.temperature = 1
|
@@ -79,14 +81,21 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
|
|
79
81
|
default_headers={
|
80
82
|
'User-Agent': ua.random,
|
81
83
|
'Cookie': cookie,
|
82
|
-
"bx-v": "2.5.28",
|
83
|
-
"bx-umidtoken": "T2gAeo1Mqj6q05L65Ro3Hjf9KHuOsB63ttVeMP_-13M2-R82AvHnHe-o9nAbz7J8Si4=",
|
84
|
-
"x-request-id": "08cb2785-53d2-4c6d-a24c-069cf0303d1a"
|
85
84
|
}
|
86
85
|
)
|
87
86
|
# qwen结构
|
88
87
|
model = request.model.lower()
|
89
|
-
if any(i in model for i in ("
|
88
|
+
if any(i in model for i in ("research",)): # 遇到错误 任意切换
|
89
|
+
request.model = np.random.choice({""})
|
90
|
+
request.messages[-1]['chat_type'] = "deep_research"
|
91
|
+
|
92
|
+
# request.messages["extra"] = {
|
93
|
+
# "meta": {
|
94
|
+
# "subChatType": "deep_thinking"
|
95
|
+
# }
|
96
|
+
# }
|
97
|
+
|
98
|
+
elif any(i in model for i in ("search",)):
|
90
99
|
request.model = "qwen-max-latest"
|
91
100
|
request.messages[-1]['chat_type'] = "search"
|
92
101
|
|
@@ -120,13 +129,13 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
|
|
120
129
|
for i, content in enumerate(user_content):
|
121
130
|
if content.get("type") == 'file_url': # image_url file_url video_url
|
122
131
|
url = content.get(content.get("type")).get("url")
|
123
|
-
file_object = await to_file(url, token)
|
132
|
+
file_object = await to_file(url, token, cookie)
|
124
133
|
|
125
134
|
user_content[i] = {"type": "file", "file": file_object.id}
|
126
135
|
|
127
136
|
elif content.get("type") == 'image_url':
|
128
137
|
url = content.get(content.get("type")).get("url")
|
129
|
-
file_object = await to_file(url, token)
|
138
|
+
file_object = await to_file(url, token, cookie)
|
130
139
|
|
131
140
|
user_content[i] = {"type": "image", "image": file_object.id}
|
132
141
|
|
@@ -135,7 +144,7 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
|
|
135
144
|
|
136
145
|
user_content = [{"type": "text", "text": user_content}]
|
137
146
|
|
138
|
-
file_object = await to_file(file_url, token)
|
147
|
+
file_object = await to_file(file_url, token, cookie)
|
139
148
|
|
140
149
|
content_type = file_object.meta.get("content_type", "")
|
141
150
|
if content_type.startswith("image"):
|
@@ -262,11 +271,11 @@ if __name__ == '__main__':
|
|
262
271
|
'role': 'user',
|
263
272
|
# 'content': '今天南京天气',
|
264
273
|
# 'content': "9.8 9.11哪个大",
|
265
|
-
|
274
|
+
'content': 'https://oss.ffire.cc/files/AIGC.pdf 总结下',
|
266
275
|
|
267
|
-
# "chat_type": "search",
|
276
|
+
# "chat_type": "search", deep_research
|
268
277
|
|
269
|
-
'content': user_content,
|
278
|
+
# 'content': user_content,
|
270
279
|
|
271
280
|
# "content": [
|
272
281
|
# {
|
@@ -332,8 +341,6 @@ if __name__ == '__main__':
|
|
332
341
|
|
333
342
|
token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjMxMGNiZGFmLTM3NTQtNDYxYy1hM2ZmLTllYzgwMDUzMjljOSIsImV4cCI6MTc0ODQ3OTE0M30.oAIE1K0XA0YYqlxB8Su-u0UJbY_BBZa4_tvZpFJKxGY"
|
334
343
|
|
335
|
-
# token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEwNzY1N2Y1LTgxN2ItNDg5Yi1iNjk4LWFhZjAyM2EwZTE4MyIsImV4cCI6MTc0NjI5NTAwNH0.D1uJN44NHiEt6URce4upbHvs7v73_Vd0V1s3T_JzclI"
|
336
344
|
arun(create(request, token))
|
337
|
-
# token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEwNzY1N2Y1LTgxN2ItNDg5Yi1iNjk4LWFhZjAyM2EwZTE4MyIsImV4cCI6MTc0NjI5NTAwNH0.D1uJN44NHiEt6URce4upbHvs7v73_Vd0V1s3T_JzclI"
|
338
345
|
|
339
346
|
# arun(to_file("https://oss.ffire.cc/files/kling_watermark.png", token))
|
@@ -72,7 +72,7 @@ class Completions(object):
|
|
72
72
|
self.client = zhipuai_client
|
73
73
|
|
74
74
|
elif "deepseek-r" in request.model:
|
75
|
-
request.separate_reasoning = True
|
75
|
+
request.separate_reasoning = True # pp
|
76
76
|
###########################################################################
|
77
77
|
|
78
78
|
data = to_openai_params(request)
|
@@ -82,7 +82,7 @@ class Completions(object):
|
|
82
82
|
data.pop("frequency_penalty", None)
|
83
83
|
data.pop("extra_body", None)
|
84
84
|
|
85
|
-
if not request.reasoning_effort: # 默认关闭思考
|
85
|
+
if '2.5' in request.model and not request.reasoning_effort: # 默认关闭思考
|
86
86
|
data['reasoning_effort'] = "none"
|
87
87
|
|
88
88
|
if "thinking" in request.model:
|
meutils/office_automation/pdf.py
CHANGED
@@ -56,10 +56,46 @@ def doc2text(filename):
|
|
56
56
|
pass
|
57
57
|
|
58
58
|
|
59
|
+
def extract_images_from_pdf(file, output: Optional[str] = None):
|
60
|
+
import fitz
|
61
|
+
|
62
|
+
# 打开PDF文件
|
63
|
+
|
64
|
+
pdf_document = fitz.open(file)
|
65
|
+
|
66
|
+
# 遍历每一页
|
67
|
+
|
68
|
+
for page_number in range(pdf_document.page_count):
|
69
|
+
|
70
|
+
page = pdf_document.load_page(page_number)
|
71
|
+
|
72
|
+
image_list = page.get_images(full=True)
|
73
|
+
|
74
|
+
# 遍历每个图像
|
75
|
+
|
76
|
+
for image_index, img in enumerate(image_list):
|
77
|
+
xref = img[0]
|
78
|
+
|
79
|
+
base_image = pdf_document.extract_image(xref)
|
80
|
+
|
81
|
+
image_bytes = base_image["image"]
|
82
|
+
|
83
|
+
image_ext = base_image["ext"]
|
84
|
+
|
85
|
+
image_filename = f"{output or ''}/image{page_number + 1}_{image_index + 1}.{image_ext}"
|
86
|
+
Path(image_filename).parent.mkdir(parents=True, exist_ok=True)
|
87
|
+
|
88
|
+
# 将图像写入文件
|
89
|
+
|
90
|
+
with open(image_filename, "wb") as image_file:
|
91
|
+
image_file.write(image_bytes)
|
92
|
+
|
93
|
+
|
59
94
|
if __name__ == '__main__':
|
60
95
|
with timer():
|
61
|
-
|
96
|
+
# r = extract_text('x.pdf')
|
62
97
|
|
98
|
+
r = extract_images_from_pdf('《锋利的jQuery》(高清扫描版-有书签)_副本_加水印.pdf', 'images')
|
63
99
|
|
64
100
|
# import tiktoken
|
65
|
-
# print(tiktoken.encoding_for_model('gpt-3.5-turbo'))
|
101
|
+
# print(tiktoken.encoding_for_model('gpt-3.5-turbo'))
|
meutils/pipe.py
CHANGED
@@ -0,0 +1,12 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : dreamfaceapp_types
|
5
|
+
# @Time : 2025/5/23 13:42
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
|
11
|
+
from meutils.pipe import *
|
12
|
+
|
meutils/schemas/metaso_types.py
CHANGED
@@ -15,15 +15,20 @@ FEISHU_URL = "https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?shee
|
|
15
15
|
|
16
16
|
|
17
17
|
class MetasoRequest(BaseModel):
|
18
|
-
model: Optional[Literal["ds-r1",]] = None
|
18
|
+
model: Optional[Literal["ds-r1", "fast_thinking"]] = None
|
19
19
|
|
20
|
-
"""search-mini search search-pro
|
20
|
+
"""search-mini search search-pro
|
21
|
+
|
22
|
+
model-mode
|
23
|
+
|
24
|
+
"""
|
21
25
|
mode: Union[str, Literal["concise", "detail", "research", "strong-research"]] = "detail" # concise detail research
|
22
26
|
|
23
27
|
question: str = "Chatfire"
|
24
28
|
|
25
|
-
"""全网 文库 学术 图片 播客"""
|
29
|
+
"""全网 文库 学术 图片 播客 视频"""
|
26
30
|
scholarSearchDomain: str = "all"
|
31
|
+
engineType: Optional[Literal["pdf", "scholar", "image", "podcast", "video"]] = None
|
27
32
|
|
28
33
|
url: str = "https://metaso.cn/"
|
29
34
|
lang: str = "zh"
|
@@ -35,6 +40,7 @@ class MetasoRequest(BaseModel):
|
|
35
40
|
newEngine: str = 'true'
|
36
41
|
enableImage: str = 'true'
|
37
42
|
|
43
|
+
#
|
38
44
|
|
39
45
|
# question: hi
|
40
46
|
# mode: detail
|