MeUtils 2025.3.23.13.35.22__py3-none-any.whl → 2025.3.31.19.40.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2025.3.23.13.35.22.dist-info → MeUtils-2025.3.31.19.40.39.dist-info}/METADATA +263 -263
- {MeUtils-2025.3.23.13.35.22.dist-info → MeUtils-2025.3.31.19.40.39.dist-info}/RECORD +34 -31
- examples/_openaisdk/openai_google.py +114 -0
- meutils/apis/common.py +12 -10
- meutils/apis/dashscope/__init__.py +11 -0
- meutils/apis/dashscope/audio.py +37 -0
- meutils/apis/images/recraft.py +11 -6
- meutils/apis/jina/common.py +3 -2
- meutils/apis/siliconflow/videos.py +40 -21
- meutils/apis/textin_apis/common.py +5 -3
- meutils/caches/common.py +1 -2
- meutils/data/VERSION +1 -1
- meutils/data/oneapi/NOTICE.html +4 -1
- meutils/io/files_utils.py +4 -2
- meutils/io/openai_files.py +4 -4
- meutils/io/x.py +12 -58
- meutils/llm/check_utils.py +2 -2
- meutils/llm/clients.py +1 -4
- meutils/llm/completions/chat_gemini.py +2 -2
- meutils/llm/completions/chat_plus.py +24 -11
- meutils/llm/completions/chat_spark.py +4 -2
- meutils/llm/completions/deep2x.py +16 -6
- meutils/llm/completions/qwenllm.py +37 -35
- meutils/llm/completions/yuanbao.py +12 -6
- meutils/notice/feishu.py +3 -0
- meutils/request_utils/crawler.py +3 -2
- meutils/schemas/image_types.py +1 -5
- meutils/schemas/oneapi/common.py +49 -8
- meutils/schemas/siliconflow_types.py +24 -2
- meutils/serving/fastapi/exceptions/http_error.py +29 -9
- {MeUtils-2025.3.23.13.35.22.dist-info → MeUtils-2025.3.31.19.40.39.dist-info}/LICENSE +0 -0
- {MeUtils-2025.3.23.13.35.22.dist-info → MeUtils-2025.3.31.19.40.39.dist-info}/WHEEL +0 -0
- {MeUtils-2025.3.23.13.35.22.dist-info → MeUtils-2025.3.31.19.40.39.dist-info}/entry_points.txt +0 -0
- {MeUtils-2025.3.23.13.35.22.dist-info → MeUtils-2025.3.31.19.40.39.dist-info}/top_level.txt +0 -0
meutils/data/oneapi/NOTICE.html
CHANGED
@@ -7,6 +7,9 @@
|
|
7
7
|
</head>
|
8
8
|
<body>
|
9
9
|
<!-- <h1>Embedded Webpage using iframe</h1>-->
|
10
|
-
<iframe src="https://api-notice.chatfire.cn" width="100%" height="600px" frameborder="0"></iframe
|
10
|
+
<!-- <iframe src="https://api-notice.chatfire.cn" width="100%" height="600px" frameborder="0"></iframe>-->
|
11
|
+
|
12
|
+
<iframe src="https://api.chatfire.cn/docs/5141179m0" width="100%" height="600px" frameborder="0"></iframe>
|
13
|
+
|
11
14
|
</body>
|
12
15
|
</html>
|
meutils/io/files_utils.py
CHANGED
@@ -256,8 +256,10 @@ if __name__ == '__main__':
|
|
256
256
|
# content_type=None))
|
257
257
|
|
258
258
|
# arun(to_url_fal(url))
|
259
|
-
|
259
|
+
print(guess_mime_type("base64xxxxxxxxxxxxxxxxxx.mp4"))
|
260
260
|
|
261
261
|
# arun(to_url(Path('img_1.png').read_bytes()))
|
262
262
|
|
263
|
-
arun(markdown_base64_to_url(""))
|
263
|
+
# arun(markdown_base64_to_url(""))
|
264
|
+
|
265
|
+
|
meutils/io/openai_files.py
CHANGED
@@ -7,10 +7,6 @@
|
|
7
7
|
# @WeChat : meutils
|
8
8
|
# @Software : PyCharm
|
9
9
|
# @Description : https://bigmodel.cn/dev/activities/freebie/fileextracion
|
10
|
-
import asyncio
|
11
|
-
|
12
|
-
import httpx
|
13
|
-
import shortuuid
|
14
10
|
|
15
11
|
from meutils.pipe import *
|
16
12
|
from meutils.io.files_utils import to_bytes, guess_mime_type
|
@@ -154,7 +150,11 @@ if __name__ == '__main__':
|
|
154
150
|
|
155
151
|
with timer():
|
156
152
|
file = "https://top.baidu.com/board?tab=realtime"
|
153
|
+
file = "http://admin.ilovechatgpt.top/file/yuzhicaizaibutongnianlingquntixiaofeixingweijishichangdiaochawenjuanweishanjianbanpptx_59787479.pptx"
|
157
154
|
file = "https://oss.ffire.cc/files/百炼系列手机产品介绍.docx"
|
155
|
+
# file = "https://app.yinxiang.com/fx/8b8bba1e-b254-40ff-81e1-fa3427429efe"
|
156
|
+
|
157
|
+
print(guess_mime_type(file))
|
158
158
|
|
159
159
|
arun(file_extract(file))
|
160
160
|
|
meutils/io/x.py
CHANGED
@@ -11,65 +11,19 @@
|
|
11
11
|
from meutils.pipe import *
|
12
12
|
# from minio import Minio
|
13
13
|
from meutils.oss.minio_oss import Minio
|
14
|
+
import os
|
15
|
+
from openai import OpenAI
|
14
16
|
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
# secret_key="your-secret-key",
|
20
|
-
# secure=True # set to False if not using HTTPS
|
21
|
-
# )
|
22
|
-
minio_client = Minio()
|
23
|
-
|
24
|
-
|
25
|
-
async def download_and_upload(video_url, bucket_name, object_name):
|
26
|
-
buffer_size = 5 * 1024 * 1024 # 5MB buffer to meet MinIO's minimum part size
|
27
|
-
|
28
|
-
async with httpx.AsyncClient() as client:
|
29
|
-
try:
|
30
|
-
async with client.stream("GET", video_url) as response:
|
31
|
-
response.raise_for_status()
|
32
|
-
|
33
|
-
total_size = int(response.headers.get('content-length', 0))
|
34
|
-
buffer = io.BytesIO()
|
35
|
-
uploaded = 0
|
36
|
-
|
37
|
-
async for chunk in response.aiter_bytes(chunk_size=buffer_size):
|
38
|
-
buffer.write(chunk)
|
39
|
-
buffer_size = buffer.tell()
|
40
|
-
buffer.seek(0)
|
41
|
-
|
42
|
-
if buffer_size >= 5 * 1024 * 1024 or response.is_closed:
|
43
|
-
try:
|
44
|
-
minio_client.put_object(
|
45
|
-
bucket_name,
|
46
|
-
object_name,
|
47
|
-
buffer,
|
48
|
-
length=buffer_size,
|
49
|
-
part_size=5 * 1024 * 1024,
|
50
|
-
content_type='video/mp4'
|
51
|
-
)
|
52
|
-
uploaded += buffer_size
|
53
|
-
print(f"Uploaded {uploaded}/{total_size} bytes")
|
54
|
-
|
55
|
-
except Exception as upload_error:
|
56
|
-
print(f"Unexpected upload error: {upload_error}")
|
57
|
-
raise
|
58
|
-
|
59
|
-
buffer = io.BytesIO() # Reset buffer after upload
|
60
|
-
|
61
|
-
print("Upload completed")
|
62
|
-
except httpx.HTTPStatusError as http_error:
|
63
|
-
print(f"HTTP error occurred: {http_error}")
|
64
|
-
except Exception as e:
|
65
|
-
print(f"An unexpected error occurred: {e}")
|
17
|
+
client = OpenAI(
|
18
|
+
api_key=os.getenv("DASHSCOPE_API_KEY"),
|
19
|
+
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
|
20
|
+
)
|
66
21
|
|
22
|
+
file = client.files.create(
|
23
|
+
file=open("/Users/betterme/PycharmProjects/AI/docker-compose.yml", "rb"),
|
24
|
+
purpose='file-extract'
|
25
|
+
)
|
26
|
+
client.files.content(file.id)
|
67
27
|
|
68
28
|
if __name__ == '__main__':
|
69
|
-
|
70
|
-
url = "https://s22-def.ap4r.com/bs2/upload-ylab-stunt-sgp/se/ai_portal_sgp_queue_m2v_txt2video_camera/b7eded0c-452c-4282-ad0a-02d96bd97f3e/0.mp4"
|
71
|
-
bucket_name = "videos"
|
72
|
-
object_name = "video11.mp4"
|
73
|
-
|
74
|
-
with timer():
|
75
|
-
arun(download_and_upload(url, bucket_name, object_name))
|
29
|
+
print(file)
|
meutils/llm/check_utils.py
CHANGED
@@ -146,7 +146,7 @@ if __name__ == '__main__':
|
|
146
146
|
|
147
147
|
# arun(check_token_for_jina(["jina_c8da77fed9704d558c8def39837960edplTLkNYrsPTJHBF1HcYg_RkRVh0X"]*10))
|
148
148
|
|
149
|
-
|
149
|
+
arun(check_token_for_siliconflow("sk-vpeietyomqjvizlzfuztzthggcqvutowgbmhjggsmwuhsomg"))
|
150
150
|
"https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=79272d"
|
151
151
|
|
152
|
-
arun(check_token_for_moonshot("sk-Qnr87vtf2Q6MEfc2mVNkVZ4qaoZg3smH9527I25QgcFe7HrT"))
|
152
|
+
# arun(check_token_for_moonshot("sk-Qnr87vtf2Q6MEfc2mVNkVZ4qaoZg3smH9527I25QgcFe7HrT"))
|
meutils/llm/clients.py
CHANGED
@@ -36,10 +36,7 @@ zhipuai_sdk_client = ZhipuAI(
|
|
36
36
|
|
37
37
|
|
38
38
|
|
39
|
-
|
40
|
-
base_url="https://all.chatfire.cn/qwen/v1",
|
41
|
-
api_key="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjMxMGNiZGFmLTM3NTQtNDYxYy1hM2ZmLTllYzgwMDUzMjljOSIsImV4cCI6MTc0MzAzNTk4OH0.GVAoSFtK94a9CgxqHCEnxzAnRi7gafIvYyH9mIJUh4s"
|
42
|
-
)
|
39
|
+
|
43
40
|
|
44
41
|
# zhipuai_client = OpenAI(
|
45
42
|
# api_key=os.getenv("ZHIPUAI_API_KEY"),
|
@@ -69,7 +69,7 @@ class Completions(object):
|
|
69
69
|
message["content"] += [{"type": "text", "text": request.last_user_content}]
|
70
70
|
|
71
71
|
# 调用模型
|
72
|
-
logger.debug(request.model_dump_json(indent=4))
|
72
|
+
# logger.debug(request.model_dump_json(indent=4))
|
73
73
|
|
74
74
|
data = to_openai_params(request)
|
75
75
|
response = await self.client.chat.completions.create(**data)
|
@@ -97,7 +97,7 @@ class Completions(object):
|
|
97
97
|
yield chunk
|
98
98
|
else:
|
99
99
|
yield chunk
|
100
|
-
logger.debug(str(chunk))
|
100
|
+
# logger.debug(str(chunk))
|
101
101
|
|
102
102
|
|
103
103
|
if __name__ == '__main__':
|
@@ -46,6 +46,8 @@ class Completions(object):
|
|
46
46
|
:param request:
|
47
47
|
:return:
|
48
48
|
"""
|
49
|
+
request.model = request.model.removesuffix("-all").removesuffix("-plus") ### 避免死循环
|
50
|
+
|
49
51
|
if request.last_user_content.startswith(("画",)): # 画画
|
50
52
|
return await self.create_images(request) # str
|
51
53
|
|
@@ -58,7 +60,9 @@ class Completions(object):
|
|
58
60
|
return metaso.create(request)
|
59
61
|
|
60
62
|
elif request.last_user_content.startswith(("http",)):
|
61
|
-
|
63
|
+
|
64
|
+
file_url, *texts = request.last_user_content.split(maxsplit=1) + ["总结下"] # application/octet-stream
|
65
|
+
text = texts[0]
|
62
66
|
|
63
67
|
if guess_mime_type(file_url).startswith("image"): # 识图
|
64
68
|
request.model = "glm-4v-flash"
|
@@ -81,7 +85,7 @@ class Completions(object):
|
|
81
85
|
}
|
82
86
|
]
|
83
87
|
data = to_openai_params(request)
|
84
|
-
return await
|
88
|
+
return await zhipuai_client.chat.completions.create(**data)
|
85
89
|
|
86
90
|
elif guess_mime_type(file_url).startswith(("video", "audio")): # 音频 视频
|
87
91
|
request.model = "gemini" # 果果
|
@@ -106,26 +110,31 @@ class Completions(object):
|
|
106
110
|
return await self.client.chat.completions.create(**data)
|
107
111
|
|
108
112
|
else:
|
109
|
-
|
113
|
+
# logger.debug(f"file_url: {file_url}")
|
110
114
|
file_content = await file_extract(file_url) # 文件问答-单轮
|
111
115
|
|
116
|
+
logger.debug(file_content)
|
117
|
+
|
112
118
|
request.messages = [
|
113
119
|
{
|
114
120
|
'role': 'user',
|
115
121
|
'content': f"""{json.dumps(file_content, ensure_ascii=False)}\n\n{text}"""
|
116
122
|
}
|
117
123
|
]
|
124
|
+
# logger.debug(request)
|
118
125
|
data = to_openai_params(request)
|
119
126
|
return await self.client.chat.completions.create(**data)
|
120
127
|
|
121
128
|
if image_urls := request.last_urls.get("image_url"): # 识图
|
122
129
|
request.model = "glm-4v-flash"
|
123
130
|
data = to_openai_params(request)
|
124
|
-
return await
|
131
|
+
return await zhipuai_client.chat.completions.create(**data)
|
125
132
|
|
126
133
|
elif file_urls := request.last_urls.get("file_url"):
|
127
134
|
return await self.chat_files(request)
|
128
135
|
|
136
|
+
# todo 标准格式的audio_url video_url
|
137
|
+
|
129
138
|
data = to_openai_params(request)
|
130
139
|
return await self.client.chat.completions.create(**data)
|
131
140
|
|
@@ -189,15 +198,19 @@ if __name__ == '__main__':
|
|
189
198
|
'content': [
|
190
199
|
{
|
191
200
|
"type": "text",
|
192
|
-
"text": "总结下"
|
201
|
+
# "text": "总结下"
|
202
|
+
"text": "https://app.yinxiang.com/fx/8b8bba1e-b254-40ff-81e1-fa3427429efe 总结下",
|
203
|
+
|
204
|
+
# "text": "https://app.yinxiang.com/fx/8b8bba1e-b254-40ff-81e1-fa3427429efe"
|
205
|
+
|
193
206
|
},
|
194
207
|
|
195
|
-
{
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
}
|
208
|
+
# {
|
209
|
+
# "type": "file_url",
|
210
|
+
# "file_url": {
|
211
|
+
# "url": "https://oss.ffire.cc/files/招标文件备案表(第二次).pdf"
|
212
|
+
# }
|
213
|
+
# }
|
201
214
|
]
|
202
215
|
}])
|
203
216
|
|
@@ -26,13 +26,15 @@ class Completions(object):
|
|
26
26
|
|
27
27
|
async def create(self, request: CompletionRequest):
|
28
28
|
if request.last_user_content.startswith("http"): # 文件问答-单轮
|
29
|
-
file_url,
|
29
|
+
file_url, *texts = request.last_user_content.split(maxsplit=1) + ["总结下"]
|
30
|
+
text = texts[0]
|
31
|
+
|
30
32
|
file_content = await file_extract(file_url)
|
31
33
|
|
32
34
|
request.messages = [
|
33
35
|
{
|
34
36
|
'role': 'user',
|
35
|
-
'content': f"""{json.dumps(file_content, ensure_ascii=False)}\n\n{
|
37
|
+
'content': f"""{json.dumps(file_content, ensure_ascii=False)}\n\n{text}"""
|
36
38
|
}
|
37
39
|
]
|
38
40
|
elif image_urls := request.last_urls.get("image_url"): # 长度为1
|
@@ -20,7 +20,6 @@ from meutils.io.files_utils import to_bytes
|
|
20
20
|
from meutils.io.openai_files import file_extract, guess_mime_type
|
21
21
|
from meutils.str_utils.json_utils import json_path
|
22
22
|
from meutils.apis.search import metaso
|
23
|
-
# from meutils.apis.chatglm import glm_video_api
|
24
23
|
|
25
24
|
from meutils.llm.clients import chatfire_client, zhipuai_client, AsyncOpenAI
|
26
25
|
from meutils.llm.openai_utils import to_openai_params
|
@@ -28,7 +27,12 @@ from meutils.llm.openai_utils import to_openai_params
|
|
28
27
|
from meutils.schemas.openai_types import ChatCompletionRequest
|
29
28
|
from meutils.schemas.openai_types import chat_completion, chat_completion_chunk, CompletionRequest, ImageRequest
|
30
29
|
|
31
|
-
|
30
|
+
"""
|
31
|
+
delta = chunk.choices[0].delta
|
32
|
+
| │ └ []
|
33
|
+
| └ ChatCompletionChunk(id='02174299556532927e9140493fc1cd076b4fe1b883ff101a83257', choices=[], created=1742995565, model='deep-d...
|
34
|
+
|
|
35
|
+
"""
|
32
36
|
class Completions(object):
|
33
37
|
|
34
38
|
def __init__(self, api_key: Optional[str] = None):
|
@@ -47,10 +51,14 @@ class Completions(object):
|
|
47
51
|
reasoning_content = ""
|
48
52
|
completions = await chatfire_client.chat.completions.create(**data)
|
49
53
|
async for chunk in completions:
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
+
if chunk.choices: # 自定义没问题,todo:openai通道报错
|
55
|
+
# logger.debug(chunk)
|
56
|
+
yield chunk
|
57
|
+
delta = chunk.choices[0].delta
|
58
|
+
if hasattr(delta, "reasoning_content"):
|
59
|
+
reasoning_content += delta.reasoning_content
|
60
|
+
else:
|
61
|
+
logger.error(chunk)
|
54
62
|
|
55
63
|
request.messages = [
|
56
64
|
{
|
@@ -58,8 +66,10 @@ class Completions(object):
|
|
58
66
|
'content': f"""<think>\n\n{reasoning_content}\n\n</think>\n\n{request.last_user_content}"""
|
59
67
|
}
|
60
68
|
]
|
69
|
+
logger.debug(request)
|
61
70
|
data = to_openai_params(request)
|
62
71
|
async for chunk in await self.client.chat.completions.create(**data):
|
72
|
+
# logger.debug(chunk)
|
63
73
|
yield chunk
|
64
74
|
else:
|
65
75
|
reasoning_content = ""
|
@@ -21,7 +21,6 @@ from meutils.decorators.retry import retrying
|
|
21
21
|
from meutils.io.files_utils import to_bytes, guess_mime_type
|
22
22
|
from meutils.caches import rcache
|
23
23
|
|
24
|
-
from meutils.llm.clients import qwen_client
|
25
24
|
from meutils.llm.openai_utils import to_openai_params
|
26
25
|
|
27
26
|
from meutils.config_utils.lark_utils import get_next_token_for_polling
|
@@ -38,8 +37,15 @@ cookie = "_gcl_au=1.1.1784604298.1740443944;xlly_s=1;isg=BNHREY3fUiqB6r5JMj56XbB
|
|
38
37
|
|
39
38
|
|
40
39
|
@retrying()
|
41
|
-
|
42
|
-
|
40
|
+
async def to_file(file, api_key):
|
41
|
+
qwen_client = AsyncOpenAI(
|
42
|
+
base_url="https://all.chatfire.cn/qwen/v1",
|
43
|
+
api_key=api_key,
|
44
|
+
default_headers={
|
45
|
+
'User-Agent': ua.random,
|
46
|
+
'Cookie': cookie
|
47
|
+
}
|
48
|
+
)
|
43
49
|
filename = Path(file).name if isinstance(file, str) else 'untitled'
|
44
50
|
mime_type = guess_mime_type(file)
|
45
51
|
file_bytes: bytes = await to_bytes(file)
|
@@ -71,7 +77,7 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
|
|
71
77
|
request.model = "qwen-max-latest"
|
72
78
|
request.messages[-1]['chat_type'] = "search"
|
73
79
|
|
74
|
-
if any(i in model for i in ("qwq", "think")): # qwq-max-search
|
80
|
+
if any(i in model for i in ("qwq", "qvq", "think", "thinking")): # qwq-max-search
|
75
81
|
request.model = "qwen-max-latest"
|
76
82
|
request.messages[-1]['feature_config'] = {"thinking_enabled": True}
|
77
83
|
|
@@ -87,13 +93,13 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
|
|
87
93
|
for i, content in enumerate(user_content):
|
88
94
|
if content.get("type") == 'file_url': # image_url file_url video_url
|
89
95
|
url = content.get(content.get("type")).get("url")
|
90
|
-
file_object = await to_file(url)
|
96
|
+
file_object = await to_file(url, client)
|
91
97
|
|
92
98
|
user_content[i] = {"type": "file", "file": file_object.id}
|
93
99
|
|
94
100
|
elif content.get("type") == 'image_url':
|
95
101
|
url = content.get(content.get("type")).get("url")
|
96
|
-
file_object = await to_file(url)
|
102
|
+
file_object = await to_file(url, client)
|
97
103
|
|
98
104
|
user_content[i] = {"type": "image", "image": file_object.id}
|
99
105
|
|
@@ -102,7 +108,7 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
|
|
102
108
|
|
103
109
|
user_content = [{"type": "text", "text": user_content}]
|
104
110
|
|
105
|
-
file_object = await to_file(file_url)
|
111
|
+
file_object = await to_file(file_url, client)
|
106
112
|
|
107
113
|
content_type = file_object.meta.get("content_type", "")
|
108
114
|
if content_type.startswith("image"):
|
@@ -130,13 +136,6 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
|
|
130
136
|
if not isinstance(response, str):
|
131
137
|
yield response.choices[0].message.content # isinstance(response, str)
|
132
138
|
|
133
|
-
# for i in range(3):
|
134
|
-
# if not isinstance(response, str): # 报错
|
135
|
-
# yield response.choices[0].message.content
|
136
|
-
# break
|
137
|
-
# else:
|
138
|
-
# logger.warning(f"重试 {i}\n{response}")
|
139
|
-
|
140
139
|
|
141
140
|
if __name__ == '__main__':
|
142
141
|
# [
|
@@ -150,36 +149,39 @@ if __name__ == '__main__':
|
|
150
149
|
# "qwen2.5-32b-instruct"
|
151
150
|
# ]
|
152
151
|
|
153
|
-
# user_content = [
|
154
|
-
# {
|
155
|
-
# "type": "text",
|
156
|
-
# "text": "解读图片"
|
157
|
-
# },
|
158
|
-
# {
|
159
|
-
# "type": "image_url",
|
160
|
-
# "image_url": {
|
161
|
-
# "url": "https://fyb-pc-static.cdn.bcebos.com/static/asset/homepage@2x_daaf4f0f6cf971ed6d9329b30afdf438.png"
|
162
|
-
# }
|
163
|
-
# }
|
164
|
-
# ]
|
165
|
-
|
166
152
|
user_content = [
|
167
153
|
{
|
168
154
|
"type": "text",
|
169
|
-
"text": "
|
155
|
+
"text": "一句话总结"
|
170
156
|
},
|
171
157
|
{
|
172
|
-
"type": "
|
173
|
-
"
|
174
|
-
"url": "https://
|
158
|
+
"type": "image_url",
|
159
|
+
"image_url": {
|
160
|
+
"url": "https://fyb-pc-static.cdn.bcebos.com/static/asset/homepage@2x_daaf4f0f6cf971ed6d9329b30afdf438.png"
|
175
161
|
}
|
176
162
|
}
|
177
|
-
|
178
163
|
]
|
179
164
|
|
165
|
+
# user_content = [
|
166
|
+
# {
|
167
|
+
# "type": "text",
|
168
|
+
# "text": "总结下"
|
169
|
+
# },
|
170
|
+
# {
|
171
|
+
# "type": "file_url",
|
172
|
+
# "file_url": {
|
173
|
+
# "url": "https://oss.ffire.cc/files/AIGC.pdf"
|
174
|
+
# }
|
175
|
+
# }
|
176
|
+
#
|
177
|
+
# ]
|
178
|
+
|
180
179
|
request = CompletionRequest(
|
181
180
|
# model="qwen-turbo-2024-11-01",
|
182
|
-
model="qwen-max-latest",
|
181
|
+
# model="qwen-max-latest",
|
182
|
+
# model="qvq-max-2025-03-25",
|
183
|
+
model="qvq-72b-preview-0310",
|
184
|
+
|
183
185
|
# model="qwen-max-latest-search",
|
184
186
|
# model="qwq-max",
|
185
187
|
# model="qwq-32b-preview",
|
@@ -195,12 +197,12 @@ if __name__ == '__main__':
|
|
195
197
|
{
|
196
198
|
'role': 'user',
|
197
199
|
# 'content': '今天南京天气',
|
198
|
-
'content': "9.8 9.11哪个大",
|
200
|
+
# 'content': "9.8 9.11哪个大",
|
199
201
|
# 'content': 'https://oss.ffire.cc/files/AIGC.pdf 总结下',
|
200
202
|
|
201
203
|
# "chat_type": "search",
|
202
204
|
|
203
|
-
|
205
|
+
'content': user_content,
|
204
206
|
|
205
207
|
# "content": [
|
206
208
|
# {
|
@@ -7,9 +7,7 @@
|
|
7
7
|
# @WeChat : meutils
|
8
8
|
# @Software : PyCharm
|
9
9
|
# @Description :
|
10
|
-
import asyncio
|
11
10
|
|
12
|
-
import pandas as pd
|
13
11
|
from aiostream import stream
|
14
12
|
|
15
13
|
from meutils.pipe import *
|
@@ -52,6 +50,9 @@ class Completions(object):
|
|
52
50
|
|
53
51
|
prompt = request and oneturn2multiturn(request.messages) or image_request.prompt
|
54
52
|
|
53
|
+
if isinstance(prompt, list):
|
54
|
+
prompt = prompt[-1].get("text", "") # [{'type': 'text', 'text': 'hi'}]
|
55
|
+
|
55
56
|
payload = {
|
56
57
|
"model": "gpt_175B_0404",
|
57
58
|
"chatModelId": request.model,
|
@@ -101,13 +102,14 @@ class Completions(object):
|
|
101
102
|
}
|
102
103
|
)
|
103
104
|
|
105
|
+
# logger.debug(bjson(payload))
|
104
106
|
headers = {
|
105
107
|
'cookie': token
|
106
108
|
}
|
107
109
|
async with httpx.AsyncClient(base_url=YUANBAO_BASE_URL, headers=headers, timeout=300) as client:
|
108
110
|
# chatid = (await client.post(API_GENERATE_ID)).text
|
109
111
|
chatid = uuid.uuid4()
|
110
|
-
|
112
|
+
# https://yuanbao.tencent.com/api/chat/90802631-22dc-4d5d-9d3f-f27f57d5fec8'
|
111
113
|
async with client.stream(method="POST", url=f"{API_CHAT}/{chatid}", json=payload) as response:
|
112
114
|
logger.debug(response.status_code)
|
113
115
|
response.raise_for_status()
|
@@ -138,8 +140,10 @@ class Completions(object):
|
|
138
140
|
|
139
141
|
for i, ref in enumerate(df['title'], 1):
|
140
142
|
references.append(f"[^{i}]: {ref}\n")
|
143
|
+
if sse.content:
|
144
|
+
yield sse.content
|
141
145
|
|
142
|
-
|
146
|
+
# logger.debug(sse.content)
|
143
147
|
if references:
|
144
148
|
yield '\n\n'
|
145
149
|
for ref in references:
|
@@ -195,13 +199,15 @@ if __name__ == '__main__':
|
|
195
199
|
# model = 'deep_seek_v3-search'
|
196
200
|
# model = 'deep_seek-search'
|
197
201
|
model = 'deep_seek'
|
198
|
-
model = 'hunyuan_t1'
|
202
|
+
# model = 'hunyuan_t1'
|
199
203
|
model = 'hunyuan_t1-search'
|
204
|
+
# model = 'deep_seek-search'
|
200
205
|
|
201
206
|
arun(Completions().create(
|
202
207
|
CompletionRequest(
|
203
208
|
model=model,
|
204
|
-
messages=[{'role': 'user', 'content': '南京天气如何'}]
|
209
|
+
messages=[{'role': 'user', 'content': '南京天气如何'}],
|
210
|
+
stream=True
|
205
211
|
),
|
206
212
|
# image_request=request,
|
207
213
|
# token=token
|
meutils/notice/feishu.py
CHANGED
@@ -106,6 +106,9 @@ def catch(
|
|
106
106
|
|
107
107
|
send_message_for_images = partial(send_message, url=IMAGES)
|
108
108
|
|
109
|
+
httpexception_feishu_url = "https://open.feishu.cn/open-apis/bot/v2/hook/d1c7b67d-b0f8-4067-a2f5-109f20eeb696"
|
110
|
+
send_message_for_httpexception = partial(send_message, url=httpexception_feishu_url)
|
111
|
+
|
109
112
|
if __name__ == '__main__':
|
110
113
|
# send_message("xxx", title=None)
|
111
114
|
send_message(None, title=None)
|
meutils/request_utils/crawler.py
CHANGED
@@ -56,10 +56,11 @@ if __name__ == '__main__':
|
|
56
56
|
# print(Crawler(url).xpath('//*[@id="houselist"]/li/a//text()'))
|
57
57
|
|
58
58
|
url = "https://chat.tune.app/?id=7f268d94-d2d4-4bd4-a732-f196aa20dceb"
|
59
|
+
url = "https://app.yinxiang.com/fx/8b8bba1e-b254-40ff-81e1-fa3427429efe"
|
59
60
|
|
60
|
-
|
61
|
+
print(Crawler(url).xpath('//script//text()'))
|
61
62
|
|
62
|
-
html_content = httpx.get(url).text
|
63
|
+
# html_content = httpx.get(url).text
|
63
64
|
|
64
65
|
|
65
66
|
# # 正则表达式匹配以 "/_next/static/chunks/7116-" 开头的 JS 文件
|
meutils/schemas/image_types.py
CHANGED
@@ -343,11 +343,6 @@ class RecraftImageRequest(ImageRequest):
|
|
343
343
|
if self.image_type in {"natural", }:
|
344
344
|
self.image_type = "any"
|
345
345
|
|
346
|
-
if "halloween" in self.model:
|
347
|
-
self.user_controls = self.user_controls or {
|
348
|
-
"special_event": "halloween"
|
349
|
-
}
|
350
|
-
|
351
346
|
|
352
347
|
class ImageProcessRequest(BaseModel):
|
353
348
|
task: Optional[str] = None
|
@@ -508,3 +503,4 @@ if __name__ == '__main__':
|
|
508
503
|
# print(ImagesResponse(data=[{'url': 1}]))
|
509
504
|
|
510
505
|
print(RecraftImageRequest(prompt="").model_dump_json())
|
506
|
+
print(RecraftImageRequest(prompt=""))
|