MeUtils 2025.3.5.20.22.10__py3-none-any.whl → 2025.3.6.18.55.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2025.3.5.20.22.10.dist-info → MeUtils-2025.3.6.18.55.2.dist-info}/METADATA +262 -262
- {MeUtils-2025.3.5.20.22.10.dist-info → MeUtils-2025.3.6.18.55.2.dist-info}/RECORD +20 -20
- examples/_openaisdk/_openai_chatfire.py +23 -8
- examples/_openaisdk/openai_chatfire.py +2 -4
- meutils/apis/hailuoai/yy.py +26 -24
- meutils/apis/jimeng/common.py +2 -2
- meutils/apis/jimeng/files.py +49 -7
- meutils/apis/jimeng/images.py +43 -23
- meutils/apis/search/metaso.py +49 -67
- meutils/caches/common.py +1 -1
- meutils/data/VERSION +1 -1
- meutils/llm/completions/qwenllm.py +6 -3
- meutils/llm/completions/yuanbao.py +16 -9
- meutils/notice/feishu.py +4 -0
- meutils/schemas/metaso_types.py +5 -4
- meutils/schemas/oneapi/common.py +16 -6
- {MeUtils-2025.3.5.20.22.10.dist-info → MeUtils-2025.3.6.18.55.2.dist-info}/LICENSE +0 -0
- {MeUtils-2025.3.5.20.22.10.dist-info → MeUtils-2025.3.6.18.55.2.dist-info}/WHEEL +0 -0
- {MeUtils-2025.3.5.20.22.10.dist-info → MeUtils-2025.3.6.18.55.2.dist-info}/entry_points.txt +0 -0
- {MeUtils-2025.3.5.20.22.10.dist-info → MeUtils-2025.3.6.18.55.2.dist-info}/top_level.txt +0 -0
meutils/apis/search/metaso.py
CHANGED
@@ -6,7 +6,7 @@
|
|
6
6
|
# @Author : betterme
|
7
7
|
# @WeChat : meutils
|
8
8
|
# @Software : PyCharm
|
9
|
-
# @Description :
|
9
|
+
# @Description : todo 重构
|
10
10
|
|
11
11
|
from meutils.pipe import *
|
12
12
|
from meutils.schemas.metaso_types import FEISHU_URL, BASE_URL, MetasoRequest, MetasoResponse
|
@@ -16,7 +16,7 @@ from meutils.apis.proxy.ips import FEISHU_URL_METASO, get_one_proxy, get_proxies
|
|
16
16
|
from meutils.schemas.openai_types import ChatCompletionRequest
|
17
17
|
from meutils.notice.feishu import send_message
|
18
18
|
|
19
|
-
from
|
19
|
+
from urllib.parse import quote_plus
|
20
20
|
|
21
21
|
token = "wr8+pHu3KYryzz0O2MaBSNUZbVLjLUYC1FR4sKqSW0p19vmcZAoEmHC72zPh/fHtOhYhCcR5GKXrxQs9QjN6dulxfOKfQkLdVkLMahMclPPjNVCPE8bLQut3zBABECLaSqpI0fVWBrdbJptnhASrSw=="
|
22
22
|
|
@@ -26,10 +26,23 @@ MODELS = {
|
|
26
26
|
"ai-search-pro": "research",
|
27
27
|
|
28
28
|
"ai-search:scholar": "detail",
|
29
|
-
"ai-search-pro:scholar": "research"
|
29
|
+
"ai-search-pro:scholar": "research",
|
30
|
+
|
31
|
+
"deepseek-r1-metasearch": "strong-research",
|
32
|
+
|
33
|
+
"meta-research": "strong-research",
|
34
|
+
"meta-deepresearch": "strong-research",
|
35
|
+
"deepseek-r1-metaresearch": "strong-research",
|
30
36
|
}
|
31
37
|
|
32
38
|
""
|
39
|
+
# pattern = re.compile('\[\[(\d+)\]\]')
|
40
|
+
pattern = re.compile(r'\[\[(\d+)\]\]')
|
41
|
+
|
42
|
+
|
43
|
+
def replace_ref(match):
|
44
|
+
ref_num = match.group(1)
|
45
|
+
return f'[^{ref_num}]' # [[1]] -> [^1]
|
33
46
|
|
34
47
|
|
35
48
|
async def get_session_id(request: MetasoRequest, headers: Optional[dict] = None, proxy: Optional[str] = None):
|
@@ -51,8 +64,7 @@ async def get_session_id(request: MetasoRequest, headers: Optional[dict] = None,
|
|
51
64
|
response.raise_for_status()
|
52
65
|
data = response.json()
|
53
66
|
|
54
|
-
|
55
|
-
|
67
|
+
logger.debug(bjson(data))
|
56
68
|
# {
|
57
69
|
# "errCode": 4001,
|
58
70
|
# "errMsg": "搜索次数超出限制"
|
@@ -83,7 +95,7 @@ async def get_access_token(session_id: Optional[str] = None):
|
|
83
95
|
return tokens and tokens[0]
|
84
96
|
|
85
97
|
|
86
|
-
async def create(request: ChatCompletionRequest
|
98
|
+
async def create(request: ChatCompletionRequest):
|
87
99
|
system_fingerprint = request.system_fingerprint
|
88
100
|
|
89
101
|
engine_type = ''
|
@@ -91,7 +103,7 @@ async def create(request: ChatCompletionRequest, response_format: Optional[str]
|
|
91
103
|
_, engine_type = request.model.split(':')
|
92
104
|
|
93
105
|
model = None
|
94
|
-
if request.model.startswith("deepseek"):
|
106
|
+
if request.model.startswith(("meta", "deepseek")):
|
95
107
|
model = "ds-r1"
|
96
108
|
system_fingerprint = "deepseek-r1"
|
97
109
|
|
@@ -99,26 +111,22 @@ async def create(request: ChatCompletionRequest, response_format: Optional[str]
|
|
99
111
|
model=model,
|
100
112
|
mode=MODELS.get(request.model, "detail"),
|
101
113
|
question=request.last_content,
|
102
|
-
response_format=response_format
|
103
114
|
)
|
104
115
|
|
105
116
|
logger.debug(request.model_dump_json(indent=4))
|
106
117
|
|
107
118
|
headers = {}
|
108
|
-
if request.mode
|
119
|
+
if "research" in request.mode: # 登录
|
109
120
|
cookie = await get_next_token_for_polling(FEISHU_URL)
|
110
121
|
headers["cookie"] = cookie
|
122
|
+
logger.debug(cookie)
|
111
123
|
|
112
124
|
proxy = None
|
113
125
|
# proxies = await get_proxies()
|
114
126
|
session_id = await get_session_id(request, headers=headers, proxy=proxy)
|
115
127
|
# session_id = None
|
116
128
|
if session_id is None: # 走代理: 随机轮询
|
117
|
-
|
118
|
-
# proxies = {
|
119
|
-
# "http://": f"http://{ip}:8443", # 自建 8443
|
120
|
-
# "https://": f"http://{ip}:8443",
|
121
|
-
# }
|
129
|
+
|
122
130
|
proxy = await get_one_proxy(feishu_url=FEISHU_URL_METASO)
|
123
131
|
session_id = await get_session_id(request, headers=headers, proxy=proxy)
|
124
132
|
|
@@ -127,13 +135,11 @@ async def create(request: ChatCompletionRequest, response_format: Optional[str]
|
|
127
135
|
params = request.model_dump(exclude_none=True)
|
128
136
|
params['token'] = token
|
129
137
|
|
130
|
-
|
131
|
-
async with httpx.AsyncClient(base_url=BASE_URL, headers=headers, params=params, proxy=proxy) as client:
|
138
|
+
async with httpx.AsyncClient(base_url=BASE_URL, headers=headers, params=params, proxy=proxy, timeout=100) as client:
|
132
139
|
async with client.stream(method="GET", url="/api/searchV2") as response:
|
133
140
|
response.raise_for_status()
|
134
|
-
logger.debug(response.status_code)
|
135
141
|
|
136
|
-
|
142
|
+
references = []
|
137
143
|
async for chunk in response.aiter_lines():
|
138
144
|
|
139
145
|
if (chunk := chunk.strip()) and chunk != "data:[DONE]":
|
@@ -141,64 +147,28 @@ async def create(request: ChatCompletionRequest, response_format: Optional[str]
|
|
141
147
|
|
142
148
|
try:
|
143
149
|
response = MetasoResponse(chunk=chunk)
|
144
|
-
|
145
|
-
|
146
|
-
if not response.content and response.data.get("infoType") == 'weather':
|
147
|
-
# yield response.data
|
148
|
-
|
149
|
-
################################################################ todo: 外部搜索
|
150
|
-
chatcompletionchunks = await zhipuai_client.chat.completions.create(
|
151
|
-
model="glm-4-flash",
|
152
|
-
messages=[
|
153
|
-
{"role": "user", "content": response.data}
|
154
|
-
],
|
155
|
-
stream=True
|
156
|
-
)
|
157
|
-
async for chatcompletionchunk in chatcompletionchunks:
|
158
|
-
yield chatcompletionchunk.choices[0].delta.content
|
159
|
-
################################################################
|
160
|
-
break
|
150
|
+
references += response.references
|
161
151
|
|
162
152
|
if len(response.content) == 1 and response.content.startswith('秘'): # 替换 模型水印
|
163
153
|
response.content = f"{system_fingerprint} AI搜索,它是一款能够深入理解您的问题的AI搜索引擎。"
|
164
154
|
yield response.content
|
165
155
|
break
|
166
156
|
|
167
|
-
if request.response_format: # 返回原始内容,方便二次加工或者debug
|
168
|
-
yield response.data
|
169
|
-
continue
|
170
|
-
|
171
|
-
if response.type in {"query", "set-reference", "update-reference"}:
|
172
|
-
logger.debug(bjson(response.data))
|
173
|
-
|
174
|
-
reference_mapping.update(
|
175
|
-
{
|
176
|
-
str(i): (
|
177
|
-
reference.get("link")
|
178
|
-
or reference.get("url")
|
179
|
-
or reference.get("file_meta", {}).get("url")
|
180
|
-
)
|
181
|
-
for i, reference in
|
182
|
-
enumerate(response.data.get('list', []), len(reference_mapping) + 1)
|
183
|
-
}
|
184
|
-
)
|
185
|
-
|
186
|
-
# logger.debug(bjson(reference_mapping))
|
187
|
-
|
188
|
-
def replace_ref(match):
|
189
|
-
ref_num = match.group(1)
|
190
|
-
|
191
|
-
return f"[[{ref_num}]({reference_mapping.get(str(ref_num))})]"
|
192
|
-
|
193
157
|
_ = pattern.sub(replace_ref, response.content)
|
194
|
-
# print(_)
|
195
158
|
yield _
|
196
159
|
|
197
160
|
except Exception as e:
|
198
161
|
logger.error(e)
|
199
162
|
logger.debug(response)
|
163
|
+
if references:
|
164
|
+
for i, ref in enumerate(references, 1):
|
165
|
+
title = ref.get("title")
|
166
|
+
url = ref.get("link") or ref.get("url") or ref.get("file_meta", {}).get("url", "")
|
167
|
+
url = quote_plus(url)
|
200
168
|
|
201
|
-
|
169
|
+
yield f"\n[^{i}]: [{title}]({url})\n"
|
170
|
+
|
171
|
+
# logger.debug(bjson(references))
|
202
172
|
|
203
173
|
|
204
174
|
if __name__ == '__main__':
|
@@ -211,10 +181,14 @@ if __name__ == '__main__':
|
|
211
181
|
|
212
182
|
# arun(get_session_id(request))
|
213
183
|
# arun(get_access_token(request))
|
184
|
+
"""
|
185
|
+
metasearch-
|
186
|
+
"""
|
214
187
|
|
215
188
|
request = ChatCompletionRequest(
|
216
189
|
# model="deepseek-search",
|
217
|
-
model="deepseek-r1-search",
|
190
|
+
# model="deepseek-r1-search",
|
191
|
+
model="meta-deepresearch",
|
218
192
|
# model="ai-search",
|
219
193
|
# model="ai-search:scholar",
|
220
194
|
# model="ai-search-pro:scholar",
|
@@ -222,15 +196,23 @@ if __name__ == '__main__':
|
|
222
196
|
# model="search-pro",
|
223
197
|
|
224
198
|
# messages=[{'role': 'user', 'content': '今天南京天气怎么样'}]
|
225
|
-
messages=[{'role': 'user', 'content': '
|
199
|
+
# messages=[{'role': 'user', 'content': '1+1'}]
|
200
|
+
messages=[{'role': 'user', 'content': '周杰伦'}]
|
226
201
|
|
227
202
|
)
|
228
203
|
|
229
204
|
arun(create(request))
|
230
205
|
|
231
206
|
# with timer():
|
232
|
-
#
|
207
|
+
# request = MetasoRequest(
|
208
|
+
# model='ds-r1',
|
209
|
+
# mode="research",
|
210
|
+
# question="南京今天天气",
|
211
|
+
#
|
212
|
+
# )
|
213
|
+
# arun(get_session_id(request))
|
214
|
+
# session_id = "8544840144331366400"
|
233
215
|
#
|
234
|
-
#
|
216
|
+
# arun(get_access_token(session_id))
|
235
217
|
|
236
218
|
# wr8+pHu3KYryzz0O2MaBSNUZbVLjLUYC1FR4sKqSW0p19vmcZAoEmHC72zPh/fHtOhYhCcR5GKXrxQs9QjN6dulxfOKfQkLdVkLMahMclPPjNVCPE8bLQut3zBABECLaSqpI0fVWBrdbJptnhASrSw==
|
meutils/caches/common.py
CHANGED
@@ -17,7 +17,7 @@ cache = memory_cache = cached
|
|
17
17
|
|
18
18
|
|
19
19
|
def rcache(**kwargs):
|
20
|
-
"""
|
20
|
+
"""serializer="pickle"
|
21
21
|
:param endpoint: str with the endpoint to connect to. Default is "127.0.0.1".
|
22
22
|
:param port: int with the port to connect to. Default is 6379.
|
23
23
|
:param db: int indicating database to use. Default is 0.
|
meutils/data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
2025.03.
|
1
|
+
2025.03.06.18.55.02
|
@@ -49,6 +49,8 @@ async def to_file(file):
|
|
49
49
|
|
50
50
|
|
51
51
|
async def create(request: CompletionRequest, token: Optional[str] = None): # ChatCompletionRequest 重构
|
52
|
+
if request.temperature > 2:
|
53
|
+
request.temperature = 1
|
52
54
|
|
53
55
|
token = token or await get_next_token_for_polling(feishu_url=FEISHU_URL)
|
54
56
|
|
@@ -164,9 +166,10 @@ if __name__ == '__main__':
|
|
164
166
|
|
165
167
|
request = CompletionRequest(
|
166
168
|
# model="qwen-turbo-2024-11-01",
|
167
|
-
model="qwen-max-latest",
|
169
|
+
# model="qwen-max-latest",
|
168
170
|
# model="qwen-max-latest-search",
|
169
171
|
# model="qwq-max",
|
172
|
+
model="qwq-32b-preview",
|
170
173
|
# model="qwq-max-search",
|
171
174
|
|
172
175
|
# model="qwen2.5-vl-72b-instruct",
|
@@ -177,8 +180,8 @@ if __name__ == '__main__':
|
|
177
180
|
{
|
178
181
|
'role': 'user',
|
179
182
|
# 'content': '今天南京天气',
|
180
|
-
|
181
|
-
'content': 'https://oss.ffire.cc/files/AIGC.pdf 总结下',
|
183
|
+
'content': "9.8 9.11哪个大",
|
184
|
+
# 'content': 'https://oss.ffire.cc/files/AIGC.pdf 总结下',
|
182
185
|
|
183
186
|
# "chat_type": "search",
|
184
187
|
|
@@ -112,6 +112,7 @@ class Completions(object):
|
|
112
112
|
logger.debug(response.status_code)
|
113
113
|
response.raise_for_status()
|
114
114
|
|
115
|
+
references = []
|
115
116
|
reasoning = "<think>\n" # </think>
|
116
117
|
async for chunk in response.aiter_lines():
|
117
118
|
sse = SSEData(chunk=chunk)
|
@@ -129,16 +130,20 @@ class Completions(object):
|
|
129
130
|
yield reasoning
|
130
131
|
|
131
132
|
if sse.search_content:
|
132
|
-
|
133
|
-
df
|
134
|
-
df['
|
135
|
-
df = df[
|
136
|
-
df
|
137
|
-
|
138
|
-
|
139
|
-
|
133
|
+
# references
|
134
|
+
df = pd.DataFrame(sse.search_content).fillna('')
|
135
|
+
df['icon'] = "![" + df['sourceName'] + "](" + df['icon_url'] + ")"
|
136
|
+
df['web_site_name'] = df['icon'] + df['web_site_name'] + ": "
|
137
|
+
df['title'] = df['web_site_name'] + "[" + df['title'] + "](" + df['url'] + ")"
|
138
|
+
|
139
|
+
for i, ref in enumerate(df['title'], 1):
|
140
|
+
references.append(f"[^{i}]: {ref}\n")
|
140
141
|
|
141
142
|
yield sse.content
|
143
|
+
if references:
|
144
|
+
yield '\n\n'
|
145
|
+
for ref in references:
|
146
|
+
yield ref
|
142
147
|
|
143
148
|
def generate_id(self, random: bool = True):
|
144
149
|
if random:
|
@@ -188,7 +193,9 @@ if __name__ == '__main__':
|
|
188
193
|
# request = HunyuanImageRequest(prompt='画条狗', size='16:9')
|
189
194
|
# deep_seek deep_seek_v3 hunyuan_t1 hunyuan_gpt_175B_0404
|
190
195
|
# model = 'deep_seek_v3-search'
|
191
|
-
model = 'deep_seek-search'
|
196
|
+
# model = 'deep_seek-search'
|
197
|
+
model = 'deep_seek'
|
198
|
+
model = 'hunyuan_t1'
|
192
199
|
|
193
200
|
arun(Completions().create(
|
194
201
|
CompletionRequest(
|
meutils/notice/feishu.py
CHANGED
@@ -104,10 +104,14 @@ def catch(
|
|
104
104
|
return r
|
105
105
|
|
106
106
|
|
107
|
+
send_message_for_images = partial(send_message, url=IMAGES)
|
108
|
+
|
107
109
|
if __name__ == '__main__':
|
108
110
|
# send_message("xxx", title=None)
|
109
111
|
send_message(None, title=None)
|
110
112
|
|
113
|
+
send_message_for_images("xxxxxxxx", title=None)
|
114
|
+
|
111
115
|
# @catch(task_name='这是一个任务名')
|
112
116
|
# def f():
|
113
117
|
# time.sleep(3)
|
meutils/schemas/metaso_types.py
CHANGED
@@ -18,7 +18,7 @@ class MetasoRequest(BaseModel):
|
|
18
18
|
model: Optional[Literal["ds-r1",]] = None
|
19
19
|
|
20
20
|
"""search-mini search search-pro"""
|
21
|
-
mode: Literal["concise", "detail", "research"] = "detail" # concise detail research
|
21
|
+
mode: Union[str, Literal["concise", "detail", "research", "strong-research"]] = "detail" # concise detail research
|
22
22
|
|
23
23
|
question: str = "Chatfire"
|
24
24
|
|
@@ -35,9 +35,6 @@ class MetasoRequest(BaseModel):
|
|
35
35
|
newEngine: str = 'true'
|
36
36
|
enableImage: str = 'true'
|
37
37
|
|
38
|
-
# 自定义字段
|
39
|
-
response_format: Optional[str] = None # 原生内容
|
40
|
-
|
41
38
|
|
42
39
|
# question: hi
|
43
40
|
# mode: detail
|
@@ -55,6 +52,7 @@ class MetasoResponse(BaseModel): # sse
|
|
55
52
|
content: str = ""
|
56
53
|
|
57
54
|
data: Optional[dict] = None
|
55
|
+
references: list = []
|
58
56
|
|
59
57
|
# 原生内容
|
60
58
|
chunk: str
|
@@ -74,6 +72,9 @@ class MetasoResponse(BaseModel): # sse
|
|
74
72
|
self.data.pop("debugId", None)
|
75
73
|
self.content = f"""> 🚀AISearch\n```json\n{self.data}\n```\n\n"""
|
76
74
|
|
75
|
+
if self.type in {"set-reference", "update-reference"}:
|
76
|
+
self.references = self.data.get("list", [])
|
77
|
+
|
77
78
|
|
78
79
|
if __name__ == '__main__':
|
79
80
|
chunk = """data:{"type":"heartbeat"}"""
|
meutils/schemas/oneapi/common.py
CHANGED
@@ -16,6 +16,7 @@ BASE_URL = "https://api.chatfire.cn"
|
|
16
16
|
FREE = 0.001
|
17
17
|
MJ_RELAX = 1
|
18
18
|
STEP = 2
|
19
|
+
MINIMAX_VIDEO = 3
|
19
20
|
|
20
21
|
MODEL_PRICE = {
|
21
22
|
"o1:free": FREE,
|
@@ -32,12 +33,19 @@ MODEL_PRICE = {
|
|
32
33
|
"kling_extend": 1.2,
|
33
34
|
"kling_lip_sync": 1.2,
|
34
35
|
|
35
|
-
"
|
36
|
+
"minimax_files_retrieve": 0.01,
|
36
37
|
|
37
|
-
"
|
38
|
-
|
39
|
-
"minimax_i2v-01
|
40
|
-
"
|
38
|
+
"minimax_s2v-01": MINIMAX_VIDEO * 1.5,
|
39
|
+
|
40
|
+
"minimax_i2v-01": MINIMAX_VIDEO,
|
41
|
+
"minimax_i2v-01-live": MINIMAX_VIDEO,
|
42
|
+
"minimax_i2v-01-director": MINIMAX_VIDEO,
|
43
|
+
|
44
|
+
"minimax_t2v-01": MINIMAX_VIDEO,
|
45
|
+
"minimax_t2v-01-director": MINIMAX_VIDEO,
|
46
|
+
|
47
|
+
"minimax_video-01": MINIMAX_VIDEO,
|
48
|
+
"minimax_video-01-live2d": MINIMAX_VIDEO,
|
41
49
|
|
42
50
|
# free
|
43
51
|
"google/gemini-2.0-flash-thinking-exp:free": 0.00001,
|
@@ -546,7 +554,7 @@ MODEL_RATIO = {
|
|
546
554
|
"doubao-1.5-pro-32k": 0.8,
|
547
555
|
"doubao-1.5-pro-256k": 5,
|
548
556
|
|
549
|
-
"doubao-1.5-vision-pro-32k":
|
557
|
+
"doubao-1.5-vision-pro-32k": 1.5,
|
550
558
|
"doubao-vision-lite-32k": 0.75,
|
551
559
|
"doubao-vision-pro-32k": 1.5,
|
552
560
|
|
@@ -679,6 +687,8 @@ MODEL_RATIO = {
|
|
679
687
|
"chatgpt-4o-latest": 2.5,
|
680
688
|
"gpt-4o-realtime-preview": 2.5,
|
681
689
|
"gpt-4o-realtime-preview-2024-10-01": 2.5,
|
690
|
+
"gpt-4o-audio-preview": 2.5 / 2,
|
691
|
+
"gpt-4o-mini-audio-preview": 0.15 / 2,
|
682
692
|
|
683
693
|
"gpt-4o": 1.25,
|
684
694
|
"gpt-4o-all": 2.5, # 逆向
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|