MeUtils 2025.1.23.10.16.28__py3-none-any.whl → 2025.1.29.10.0.47__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {MeUtils-2025.1.23.10.16.28.dist-info → MeUtils-2025.1.29.10.0.47.dist-info}/METADATA +29 -30
  2. {MeUtils-2025.1.23.10.16.28.dist-info → MeUtils-2025.1.29.10.0.47.dist-info}/RECORD +34 -32
  3. examples/_openaisdk/4v.py +3 -4
  4. examples/_openaisdk/openai_baichuan.py +7 -3
  5. examples/_openaisdk/openai_chatfire.py +15 -4
  6. examples/_openaisdk/openai_deepinfra.py +2 -2
  7. examples/_openaisdk/openai_modelscope.py +2 -1
  8. examples/_openaisdk/openai_together.py +2 -1
  9. examples/_openaisdk/openai_zhipu.py +9 -5
  10. examples/json_jmespath.py +13 -13
  11. meutils/apis/hunyuan/image_tools.py +6 -2
  12. meutils/apis/images/edits.py +70 -2
  13. meutils/apis/jimeng/common.py +0 -144
  14. meutils/apis/jimeng/files.py +25 -12
  15. meutils/apis/jimeng/images.py +166 -4
  16. meutils/apis/jimeng/videos.py +176 -107
  17. meutils/apis/vidu/vidu_video.py +3 -3
  18. meutils/data/VERSION +1 -1
  19. meutils/data/oneapi/NOTICE.md +25 -8
  20. meutils/jwt_utils/common.py +46 -0
  21. meutils/llm/clients.py +4 -1
  22. meutils/llm/completions/agents/search.py +115 -0
  23. meutils/parsers/fileparser/mineru.py +48 -0
  24. meutils/schemas/image_types.py +12 -1
  25. meutils/schemas/oneapi/common.py +17 -5
  26. meutils/schemas/oneapi/models.py +1 -1
  27. meutils/schemas/openai_types.py +1 -1
  28. meutils/schemas/video_types.py +9 -1
  29. meutils/schemas/vidu_types.py +1 -1
  30. meutils/request_utils/jwt_utils/common.py +0 -42
  31. {MeUtils-2025.1.23.10.16.28.dist-info → MeUtils-2025.1.29.10.0.47.dist-info}/LICENSE +0 -0
  32. {MeUtils-2025.1.23.10.16.28.dist-info → MeUtils-2025.1.29.10.0.47.dist-info}/WHEEL +0 -0
  33. {MeUtils-2025.1.23.10.16.28.dist-info → MeUtils-2025.1.29.10.0.47.dist-info}/entry_points.txt +0 -0
  34. {MeUtils-2025.1.23.10.16.28.dist-info → MeUtils-2025.1.29.10.0.47.dist-info}/top_level.txt +0 -0
  35. /meutils/{request_utils/jwt_utils → jwt_utils}/__init__.py +0 -0
@@ -13,13 +13,15 @@
13
13
  3. 上传音频+创建任务
14
14
 
15
15
  """
16
+ import asyncio
17
+
16
18
  from meutils.pipe import *
17
19
  from meutils.str_utils.json_utils import json_path
18
20
 
19
21
  from meutils.schemas.jimeng_types import BASE_URL, MODELS_MAP, FEISHU_URL
20
22
  from meutils.schemas.video_types import LipsyncVideoRquest
21
23
  from meutils.schemas.task_types import TaskResponse
22
- from meutils.apis.jimeng.common import create_draft_content, get_headers, check_token
24
+ from meutils.apis.jimeng.common import get_headers, check_token
23
25
  from meutils.apis.jimeng.files import upload_for_image, upload_for_video
24
26
 
25
27
  from meutils.config_utils.lark_utils import get_next_token_for_polling
@@ -29,50 +31,52 @@ from fake_useragent import UserAgent
29
31
  ua = UserAgent()
30
32
 
31
33
 
32
- async def create_realman_avatar(image_uri, token: str):
34
+ async def create_realman_avatar(image_url: str, token: str):
35
+ if image_url.startswith("http"):
36
+ image_url = await upload_for_image(image_url, token)
37
+
33
38
  url = "/mweb/v1/create_realman_avatar"
34
39
  headers = get_headers(url, token)
35
40
 
36
41
  payload = {
37
42
  "input_list": [
38
43
  {
39
- "image_uri": image_uri,
44
+ "image_uri": image_url,
40
45
  "submit_id": str(uuid.uuid4()),
41
46
  "mode": 0
42
47
  },
43
48
  {
44
- "image_uri": image_uri,
49
+ "image_uri": image_url,
45
50
  "submit_id": str(uuid.uuid4()),
46
51
  "mode": 1
47
52
  }
48
53
  ]
49
54
  }
55
+
50
56
  async with httpx.AsyncClient(base_url=BASE_URL, headers=headers, timeout=60) as client:
51
57
  response = await client.post(url, json=payload)
52
58
  response.raise_for_status()
53
59
  data = response.json()
54
60
  logger.debug(bjson(data)) # 1914628189186
55
61
 
56
- if task_ids := json_path(data, "$..task_id"):
57
- return task_ids
58
- else: # task_id 解析失败
59
- raise Exception("create_realman_avatar failed: task_id 解析失败")
62
+ response = TaskResponse(metadata=data, system_fingerprint=token)
63
+ if task_ids := json_path(data, "$..task_id"): # 返回 imageurl vid
64
+ response.task_id = ' '.join(task_ids)
65
+ return response
60
66
 
61
- # mget_generate_task
62
- # todo: 从任务结果解析 resource_id_std, resource_id_loopy
67
+ else:
68
+ response.message = str(json_path(data, "$..message"))
69
+ response.status = "fail"
70
+ return response
63
71
 
64
72
 
65
- async def get_task(task_ids):
73
+ async def get_task(task_id: str, token: str = "916fed81175f5186a2c05375699ea40d"):
66
74
  """
67
75
  $..image_to_avatar 成功: 先检测图片or视频
68
76
  :param task_ids:
69
77
  :return:
70
78
  """
71
- if isinstance(task_ids, str):
72
- task_ids = [task_ids]
73
-
74
- token = "916fed81175f5186a2c05375699ea40d"
75
-
79
+ task_ids = task_id.split()
76
80
 
77
81
  url = "/mweb/v1/mget_generate_task"
78
82
  headers = get_headers(url, token)
@@ -84,19 +88,42 @@ async def get_task(task_ids):
84
88
  data = response.json()
85
89
  logger.debug(bjson(data))
86
90
 
87
- if messages := json_path(data, "$..image_to_avatar.message"):
88
- if message := "".join(messages): # and "fail" in message
89
- logger.error(message)
90
- else:
91
- logger.info("image_to_avatar success")
91
+ if json_path(data, "$..image_to_avatar"): # 角色检测 create_realman_avatar
92
+ resource_id_std = resource_id_loopy = ""
93
+ if resource_id_stds := json_path(data, "$..resource_id_std"):
94
+ resource_id_std = "".join(resource_id_stds)
95
+
96
+ if resource_id_loopys := json_path(data, "$..resource_id_loopy"):
97
+ resource_id_loopy = "".join(resource_id_loopys)
98
+
99
+ task_data = {
100
+ "resource_id_std": resource_id_std,
101
+ "resource_id_loopy": resource_id_loopy
102
+ }
103
+ response = TaskResponse(task_id=task_id, data=task_data, metadata=data)
104
+ if resource_id_std and resource_id_loopy:
105
+ response.status = "success"
92
106
 
93
- if resource_id_stds := json_path(data, "$..resource_id_std"):
94
- resource_id_std = "".join(resource_id_stds)
107
+ if (message := json_path(data, "$..image_to_avatar.message")) and "fail" in str(message).lower():
108
+ response.message = str(message)
109
+ response.status = "fail"
95
110
 
96
- if resource_id_loopys := json_path(data, "$..resource_id_loopy"):
97
- resource_id_loopy = "".join(resource_id_loopys)
98
- # return TaskResponse()
111
+ return response
99
112
 
113
+ else:
114
+ response = TaskResponse(task_id=task_id, metadata=data)
115
+ if (message := json_path(data, "$..fail_msg")) and "success" not in str(message).lower():
116
+ response.message = str(message)
117
+ response.status = "fail"
118
+ return response
119
+
120
+ if will_cost := json_path(data, "$..will_cost"):
121
+ response.will_cost = will_cost[0]
122
+
123
+ if video_urls := json_path(data, "$..[360p,480p,720p].video_url"):
124
+ response.data = [{"video": _} for _ in video_urls]
125
+
126
+ return response
100
127
 
101
128
 
102
129
  async def create_task(request: LipsyncVideoRquest, token: Optional[str] = None):
@@ -108,43 +135,121 @@ async def create_task(request: LipsyncVideoRquest, token: Optional[str] = None):
108
135
 
109
136
  headers = get_headers(url, token)
110
137
 
111
- i2v_opt = {}
112
- v2v_opt = {}
138
+ model = request.model
139
+ scene = "lip_sync_image"
140
+ image_url = await upload_for_image(request.image_url, token)
141
+
142
+ # 角色检测
143
+ realman_avatar_response = await create_realman_avatar(image_url, token)
144
+ if realman_avatar_response.status == "fail":
145
+ return realman_avatar_response
146
+
147
+ else:
148
+ for _ in range(10):
149
+ task_response = await get_task(realman_avatar_response.task_id, token)
150
+ if task_response.status == "fail":
151
+ logger.debug("fail")
152
+ return task_response
153
+ elif task_response.status == "success":
154
+ logger.debug("success")
155
+
156
+ realman_avatar_response = task_response
157
+ break
158
+ else:
159
+ await asyncio.sleep(3)
160
+ continue
161
+
162
+ audio_vid, audio_url = await upload_for_video(request.audio_url, token)
163
+
164
+ resource_id_std = realman_avatar_response.data.get("resource_id_std")
165
+ resource_id_loopy = realman_avatar_response.data.get("resource_id_loopy")
166
+
167
+ i2v_opt = v2v_opt = {}
113
168
  if request.video_url:
114
169
  v2v_opt = {}
115
170
 
171
+ # payload = {
172
+ # "submit_id": "",
173
+ # "task_extra": "{\"promptSource\":\"photo_lip_sync\",\"generateTimes\":1,\"lipSyncInfo\":{\"sourceType\":\"local-file\",\"name\":\"vyFWygmZsIZlUO4s0nr2n.wav\"},\"isUseAiGenPrompt\":false,\"batchNumber\":1}",
174
+ # "http_common_info": {
175
+ # "aid": 513695
176
+ # },
177
+ # "input": {
178
+ # "seed": 3112889115,
179
+ # "video_gen_inputs": [
180
+ # {
181
+ # "v2v_opt": {},
182
+ # "i2v_opt": {
183
+ # "realman_avatar": {
184
+ # "enable": True,
185
+ # "origin_image": {
186
+ # # "width": 800,
187
+ # # "height": 1200,
188
+ # "image_uri": "tos-cn-i-tb4s082cfz/4dead1bfc8e84572a91f2e047016a351",
189
+ # "image_url": ""
190
+ # },
191
+ # "origin_audio": {
192
+ # # "duration": 9.976625,
193
+ # "vid": "v02870g10004cu8d4r7og65j2vr5opb0"
194
+ # },
195
+ #
196
+ # "resource_id_std": "381c534f-bcef-482e-8f17-5b30b64e41a1",
197
+ # "resource_id_loopy": "b9ac51cb-e26c-4b63-81d9-34ed24053032",
198
+ # #
199
+ # # "tts_info": "{\"name\":\"vyFWygmZsIZlUO4s0nr2n.wav\",\"source_type\":\"local-file\"}"
200
+ # }
201
+ # },
202
+ # "audio_vid": "v02870g10004cu8d4r7og65j2vr5opb0",
203
+ # "video_mode": 4
204
+ # }
205
+ # ]
206
+ # },
207
+ # "mode": "workbench",
208
+ # "history_option": {},
209
+ # "commerce_info": {
210
+ # "resource_id": "generate_video",
211
+ # "resource_id_type": "str",
212
+ # "resource_sub_type": "aigc",
213
+ # "benefit_type": "lip_sync_avatar_std", # 5积分
214
+ # # "benefit_type": "lip_sync_avatar_lively" # 10积分
215
+ # },
216
+ # "scene": "lip_sync_image",
217
+ # "client_trace_data": {},
218
+ # "submit_id_list": [
219
+ # str(uuid.uuid4())
220
+ # ]
221
+ # }
222
+
223
+ if request.image_url:
224
+ i2v_opt = {
225
+ "realman_avatar": {
226
+ "enable": True,
227
+ "origin_image": {
228
+ "image_uri": image_url,
229
+ "image_url": ""
230
+ },
231
+ "resource_id_loopy": resource_id_loopy,
232
+ "resource_id_std": resource_id_std,
233
+ "origin_audio": {
234
+ "vid": audio_vid
235
+ },
236
+ # "tts_info": "{\"name\":\"vyFWygmZsIZlUO4s0nr2n.wav\",\"source_type\":\"local-file\"}"
237
+ }
238
+ }
239
+
116
240
  payload = {
117
241
  "submit_id": "",
118
- "task_extra": "{\"promptSource\":\"photo_lip_sync\",\"generateTimes\":1,\"lipSyncInfo\":{\"sourceType\":\"local-file\",\"name\":\"vyFWygmZsIZlUO4s0nr2n.wav\"},\"isUseAiGenPrompt\":false,\"batchNumber\":1}",
242
+ # "task_extra": "{\"promptSource\":\"photo_lip_sync\",\"generateTimes\":1,\"lipSyncInfo\":{\"sourceType\":\"local-file\",\"name\":\"vyFWygmZsIZlUO4s0nr2n.wav\"},\"isUseAiGenPrompt\":false,\"batchNumber\":1}",
119
243
  "http_common_info": {
120
244
  "aid": 513695
121
245
  },
122
246
  "input": {
123
- "seed": 3112889115,
247
+ "seed": 2032846910,
124
248
  "video_gen_inputs": [
125
249
  {
126
- "v2v_opt": {},
127
- "i2v_opt": {
128
- "realman_avatar": {
129
- "enable": True,
130
- "origin_image": {
131
- # "width": 800,
132
- # "height": 1200,
133
- "image_uri": "tos-cn-i-tb4s082cfz/4dead1bfc8e84572a91f2e047016a351",
134
- "image_url": ""
135
- },
136
- "origin_audio": {
137
- # "duration": 9.976625,
138
- "vid": "v02870g10004cu8d4r7og65j2vr5opb0"
139
- },
140
-
141
- "resource_id_std": "381c534f-bcef-482e-8f17-5b30b64e41a1",
142
- "resource_id_loopy": "b9ac51cb-e26c-4b63-81d9-34ed24053032",
143
- #
144
- # "tts_info": "{\"name\":\"vyFWygmZsIZlUO4s0nr2n.wav\",\"source_type\":\"local-file\"}"
145
- }
146
- },
147
- "audio_vid": "v02870g10004cu8d4r7og65j2vr5opb0",
250
+ "v2v_opt": v2v_opt,
251
+ "i2v_opt": i2v_opt,
252
+ "audio_vid": audio_vid,
148
253
  "video_mode": 4
149
254
  }
150
255
  ]
@@ -155,67 +260,17 @@ async def create_task(request: LipsyncVideoRquest, token: Optional[str] = None):
155
260
  "resource_id": "generate_video",
156
261
  "resource_id_type": "str",
157
262
  "resource_sub_type": "aigc",
158
- "benefit_type": "lip_sync_avatar_std", # 5积分
263
+ "benefit_type": model,
159
264
  # "benefit_type": "lip_sync_avatar_lively" # 10积分
160
265
  },
161
- "scene": "lip_sync_image",
266
+ "scene": scene,
162
267
  "client_trace_data": {},
163
268
  "submit_id_list": [
164
269
  str(uuid.uuid4())
165
270
  ]
166
271
  }
167
272
 
168
- # if request.image_url:
169
- # i2v_opt = {
170
- # "realman_avatar": {
171
- # "enable": True,
172
- # "origin_image": {
173
- # "width": 800,
174
- # "height": 1200, ######## 没必要把
175
- # "image_uri": request.image_url,
176
- # "image_url": ""
177
- # },
178
- # "resource_id_loopy": "9c397499-a59f-47b5-9bfd-e1397ec62f61",
179
- # "resource_id_std": "0a8c8d72-5543-4e9e-8843-c03fe5b3a8c7",
180
- # "origin_audio": {
181
- # "duration": 9.976625,
182
- # "vid": "v03870g10004cu6vpgfog65nc9ivupg0"
183
- # },
184
- # "tts_info": "{\"name\":\"vyFWygmZsIZlUO4s0nr2n.wav\",\"source_type\":\"local-file\"}"
185
- # }
186
- # }
187
- #
188
- # payload = {
189
- # "submit_id": "",
190
- # "task_extra": "{\"promptSource\":\"photo_lip_sync\",\"generateTimes\":1,\"lipSyncInfo\":{\"sourceType\":\"local-file\",\"name\":\"vyFWygmZsIZlUO4s0nr2n.wav\"},\"isUseAiGenPrompt\":false,\"batchNumber\":1}",
191
- # "http_common_info": {
192
- # "aid": 513695
193
- # },
194
- # "input": {
195
- # "seed": 2032846910,
196
- # "video_gen_inputs": [
197
- # {
198
- # "v2v_opt": v2v_opt,
199
- # "i2v_opt": i2v_opt,
200
- # "audio_vid": "v03870g10004cu6vpgfog65nc9ivupg0",
201
- # "video_mode": 4
202
- # }
203
- # ]
204
- # },
205
- # "mode": "workbench",
206
- # "history_option": {},
207
- # "commerce_info": {
208
- # "resource_id": "generate_video",
209
- # "resource_id_type": "str",
210
- # "resource_sub_type": "aigc",
211
- # "benefit_type": "lip_sync_avatar_std"
212
- # },
213
- # "scene": "lip_sync_image",
214
- # "client_trace_data": {},
215
- # "submit_id_list": [
216
- # "4717038e-f4fd-4c1c-b5a5-39ae4118099c"
217
- # ]
218
- # }
273
+ logger.debug(bjson(payload))
219
274
 
220
275
  async with httpx.AsyncClient(base_url=BASE_URL, headers=headers, timeout=60) as client:
221
276
  response = await client.post(url, json=payload)
@@ -229,8 +284,22 @@ async def create_task(request: LipsyncVideoRquest, token: Optional[str] = None):
229
284
 
230
285
 
231
286
  if __name__ == '__main__':
287
+ token = "916fed81175f5186a2c05375699ea40d"
288
+
289
+ request = LipsyncVideoRquest(
290
+ model="lip_sync_avatar_std",
291
+ image_url="https://oss.ffire.cc/files/kling_watermark.png",
292
+ video_url="",
293
+ audio_url="https://oss.ffire.cc/files/lipsync.mp3"
294
+ )
295
+
296
+ # with timer():
297
+ # r = arun(create_realman_avatar(request.image_url, token))
298
+ # arun(get_task(r.task_id))
299
+
232
300
  # image_uri = "tos-cn-i-tb4s082cfz/387649a361e546f89549bd3510ab926d"
233
301
  # task_ids = arun(create_realman_avatar(image_uri, token="7c5e148d9fa858e3180c42f843c20454"))
234
302
  # arun(mget_generate_task(task_ids))
235
- r = arun(create_task(LipsyncVideoRquest()))
236
- arun(get_task(r.task_id))
303
+ with timer():
304
+ r = arun(create_task(request))
305
+ # arun(get_task(r.task_id))
@@ -195,7 +195,7 @@ if __name__ == '__main__':
195
195
  # token = '_GRECAPTCHA=09AA5Y-DKEaPCPtfl_s0o9z-HKEP5Tkfrn7CsmZfUj5MUYAFZiW7ELincbr2c2baFkM5Vu_KDPJ11l_N_DJhHTx_A; HMACCOUNT_BFESS=4189E6AE98589913; Hm_lvt_a3c8711bce1795293b1793d35916c067=1722407159; Hm_lpvt_a3c8711bce1795293b1793d35916c067=1722407159; HMACCOUNT=4189E6AE98589913; io=FKh7a-dvlK-UnBmOAB7M; JWT=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjM3MDMxNjgsImlhdCI6MTcyMjQwNzE2OCwiaXNzIjoiaWFtIiwic3ViIjoiMjM2ODY0MDkxNDEwMTMxOCJ9.mbNkzI6piihIVDsGSshSCiLlakAAG_Hxh0xAnPkx_vs; Shunt=; Hm_lvt_a3c8711bce1795293b1793d35916c067=1753943158594|1722407159; shortid=parsvjrji; debug=undefined; _grecaptcha=09AA5Y-DJauC2Mo_KPc5_R5OUPR__wsqLYpOjUViIajU8hpDPOpg6LcH1xECbelcZwXl_BSHnZlCWRGxGWvRvMyQr7QGnCCJam75DKvw; VIDU_SELECTED_LOCALE=zh; VIDU_TOUR="v1"'
196
196
  # token = "JWT=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjM3MDIyNjAsImlhdCI6MTcyMjQwNjI2MCwiaXNzIjoiaWFtIiwic3ViIjoiMjM2ODYyNjA0MzE4MTg1NCJ9.8P8WuktyjFM0utFbGcxLQptzfv43ugmMcE31RXR9JJQ"
197
197
  file = Path('/Users/betterme/PycharmProjects/AI/cover.jpeg').read_bytes()
198
- url = arun(upload(file, vip=True)).url
198
+ # url = arun(upload(file, vip=True)).url
199
199
 
200
200
  # print(arun(get_next_token_for_polling(FEISHU_URL)) == token)
201
201
 
@@ -208,13 +208,13 @@ if __name__ == '__main__':
208
208
  # arun(get_credits(token))
209
209
  #
210
210
  d = {
211
- "model": 'vidu-1.5',
211
+ "model": 'vidu-2.0',
212
212
  "prompt": "这个女人笑起来",
213
213
  "url": "https://oss.ffire.cc/files/kling_watermark.png" # failed to save uploads
214
214
  }
215
215
  token = None
216
216
  # print(bjson(ViduRequest(**d).payload))
217
- # arun(create_task(ViduRequest(**d)))
217
+ arun(create_task(ViduRequest(**d)))
218
218
  # arun(create_task(ViduRequest(**d), vip=False))
219
219
  # # pass
220
220
  # token = "sensorsdata2015jssdkcross=dfm-enc-%7B%22Va28a6y8_aV%22%3A%22sSsAAnHAInAGEtIG%22%2C%22gae28_aV%22%3A%22EGEuAnststSEirt-ARSAigSVHIiHVs-EtHsHnIR-sARInAA-EGEuAnststHsIti%22%2C%22OemO2%22%3A%7B%22%24ki8r28_8eiggay_2mbeyr_8cOr%22%3A%22%E7%9B%B4%E6%8E%A5%E6%B5%81%E9%87%8F%22%2C%22%24ki8r28_2rieyz_lrcMmeV%22%3A%22%E6%9C%AA%E5%8F%96%E5%88%B0%E5%80%BC_%E7%9B%B4%E6%8E%A5%E6%89%93%E5%BC%80%22%2C%22%24ki8r28_ergreere%22%3A%22%22%7D%2C%22aVr68a8ar2%22%3A%22rc3liZ7ku67OV5kgPsGCiskkDskl3qmawFlJPq0swqfcpXNJPZKSBF0IQXLzWq7lQFQzQZNcBF1SQF3EQqwIBF3MQhwswX08wFlJPq0swqfcpXKcwhzz3aMax9klWZHAiD7HDsJCWskbDskl3qmawqNcwX0sQF0hQq0HwFfhp4xG%22%2C%22za28mec_kmfa6_aV%22%3A%7B%226ior%22%3A%22%24aVr68a8c_kmfa6_aV%22%2C%22Cikbr%22%3A%22sSsAAnHAInAGEtIG%22%7D%7D;_ga=GA1.1.2058758439.1724312077;_ga_ZJBV7VYP55=GS1.1.1727080335.38.1.1727080510.0.0.0;JWT=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjgzNTAzODAsImlhdCI6MTcyNzA1NDM4MCwiaXNzIjoiaWFtIiwic3ViIjoiMjQyMDA2NTAzNjA5MTgzOSJ9.PkjQqjYB56vYetYwmlagnWn_6bSCwoxCjI7BjfelBOU;Shunt="
meutils/data/VERSION CHANGED
@@ -1 +1 @@
1
- 2025.01.23.10.16.28
1
+ 2025.01.29.10.00.47
@@ -32,13 +32,35 @@
32
32
  </details>
33
33
 
34
34
  <details markdown="1">
35
- <summary><b>大额充值优惠</b></summary>
35
+ <summary><b>大额对公,请联系客服</b></summary>
36
+ </details>
36
37
 
37
- - 充值1000-2000 增送10%
38
+ ## 2025-01-27(预祝大家新年快乐)
38
39
 
39
- </details>
40
+ ### 充值活动
41
+
42
+ - 充值200增送 5%
43
+
44
+ - 充值500增送 10%
45
+ - 充值1000增送 20%
46
+ - 充值2000增送 25%
47
+
48
+ ### 新上线模型Deepseek系列
49
+
50
+ - deepseek-v3
51
+ - deepseek-r1
52
+ - deepseek-r1:7b
53
+
54
+
55
+
56
+
57
+ ---
58
+
59
+ <details markdown="1">
60
+ <summary><b>历史更新</b></summary>
40
61
 
41
62
  ## 2025-01-04
63
+
42
64
  - 增加模型配额 gemini-2.0-flash-exp、gemini-2.0-flash-thinking-exp-1219
43
65
 
44
66
  ## 2024-12-31
@@ -87,11 +109,6 @@
87
109
  - Chat模式:`sora-1:1-480p-5s`
88
110
  - 异步任务接口在路上
89
111
 
90
- ---
91
-
92
- <details markdown="1">
93
- <summary><b>历史更新</b></summary>
94
-
95
112
  ## 2024-12-13
96
113
 
97
114
  - 新增模型 混元视频(支持高并发,非逆向可商用,限时特价1毛)[接口文档](https://api.chatfire.cn/docs/api-244309840)
@@ -0,0 +1,46 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # @Project : AI. @by PyCharm
4
+ # @File : common
5
+ # @Time : 2024/10/28 20:57
6
+ # @Author : betterme
7
+ # @WeChat : meutils
8
+ # @Software : PyCharm
9
+ # @Description :
10
+
11
+ from meutils.pipe import *
12
+
13
+ import jwt
14
+ import time
15
+ import datetime
16
+
17
+ # Header and payload
18
+ header = {
19
+ "alg": "HS512",
20
+ "type": "JWT"
21
+ }
22
+
23
+
24
+ payload = {
25
+ "jti": "80004477",
26
+ "rol": "ROLE_REGISTER",
27
+ "iss": "OpenXLab",
28
+ "clientId": "lkzdx57nvy22jkpq9x2w",
29
+ "phone": "",
30
+ "uuid": "73a8d9b0-8bbf-4973-9b71-4b687ea23a78",
31
+ "email": "313303303@qq.com",
32
+
33
+ "iat": int(time.time()),
34
+ "exp": int(time.time()) + 3600
35
+ }
36
+
37
+ # Your secret key
38
+ secret = ""
39
+
40
+ # Create the JWT
41
+ token = jwt.encode(payload, secret, algorithm="HS512", headers=header)
42
+
43
+ print(token)
44
+
45
+
46
+
meutils/llm/clients.py CHANGED
@@ -32,7 +32,10 @@ if __name__ == '__main__':
32
32
 
33
33
  # web-search-pro
34
34
 
35
- arun(zhipuai_client.chat.completions.create(
35
+ r = arun(zhipuai_client.chat.completions.create(
36
36
  messages=[{"role": "user", "content": "中国队奥运会拿了多少奖牌"}],
37
37
  model='web-search-pro')
38
38
  )
39
+
40
+ r.model_dump_json()
41
+
@@ -0,0 +1,115 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # @Project : AI. @by PyCharm
4
+ # @File : search
5
+ # @Time : 2025/1/27 13:41
6
+ # @Author : betterme
7
+ # @WeChat : meutils
8
+ # @Software : PyCharm
9
+ # @Description : todo: 格式匹配
10
+
11
+ from meutils.pipe import *
12
+ from meutils.llm.clients import AsyncOpenAI, zhipuai_client, moonshot_client
13
+ from meutils.llm.openai_utils import to_openai_params
14
+ from meutils.schemas.openai_types import chat_completion, chat_completion_chunk, ChatCompletionRequest, CompletionUsage
15
+
16
+
17
+ class Completions(object):
18
+
19
+ def __init__(self, api_key: Optional[str] = None):
20
+ self.api_key = api_key
21
+
22
+ async def create(self, request: ChatCompletionRequest):
23
+ request.model = request.model.removesuffix("-search")
24
+
25
+ if request.model.startswith("baichuan"):
26
+ base_url = os.getenv("BAICHUAN_BASE_URL")
27
+ api_key = self.api_key or os.getenv("BAICHUAN_API_KEY")
28
+
29
+ request.tools = [
30
+ {
31
+ "type": "web_search",
32
+ "web_search": {
33
+ "enable": True,
34
+ "search_mode": "performance_first"
35
+ }
36
+ }
37
+ ]
38
+ data = to_openai_params(request)
39
+ client = AsyncOpenAI(base_url=base_url, api_key=api_key)
40
+ completion = await client.chat.completions.create(**data)
41
+ return completion
42
+
43
+ elif request.model.startswith(("moonshot", "kimi")):
44
+ tool_call_name = "$web_search"
45
+ request.tools = [
46
+ {
47
+ "type": "builtin_function", # <-- 我们使用 builtin_function 来表示 Kimi 内置工具,也用于区分普通 function
48
+ "function": {
49
+ "name": "$web_search",
50
+ },
51
+ },
52
+ ]
53
+
54
+ data = to_openai_params(request)
55
+ completion = await moonshot_client.chat.completions.create(**data)
56
+
57
+ tool_call = completion.choices[0].message.tool_calls[0]
58
+ tool_call_arguments = tool_call.function.arguments
59
+ print(tool_call_arguments)
60
+
61
+ request.messages.append({
62
+ "role": "tool",
63
+ "tool_call_id": tool_call.id,
64
+ "name": tool_call_name,
65
+ "content": tool_call_arguments,
66
+ })
67
+ logger.debug(request.messages)
68
+ data = to_openai_params(request)
69
+ logger.debug(data)
70
+ completion = await moonshot_client.chat.completions.create(**data)
71
+ return completion
72
+
73
+ elif request.model.startswith("doubao"):
74
+ base_url = os.getenv("DOUBAO_BASE_URL")
75
+ api_key = self.api_key or os.getenv("DOUBAO_API_KEY")
76
+
77
+ request.model = "bot-20250127143547-c8q8m"
78
+ request.tools = [
79
+ {
80
+ "type": "web_search",
81
+ "web_search": {
82
+ "enable": True,
83
+ "search_mode": "performance_first"
84
+ }
85
+ }
86
+ ]
87
+ data = to_openai_params(request)
88
+ client = AsyncOpenAI(base_url=base_url, api_key=api_key)
89
+
90
+ completion = await client.chat.completions.create(**data)
91
+ return completion
92
+
93
+ else: # glm
94
+ request.model = "web-search-pro"
95
+ data = to_openai_params(request)
96
+ completion = await zhipuai_client.chat.completions.create(**data)
97
+ logger.debug(completion)
98
+ return completion
99
+
100
+
101
+ if __name__ == '__main__':
102
+ request = ChatCompletionRequest(
103
+ # model="baichuan4-turbo",
104
+ model="xx",
105
+ # model="moonshot-v1-8k",
106
+ # model="doubao",
107
+
108
+ messages=[
109
+ {"role": "user", "content": "南京天气如何"}
110
+ ],
111
+
112
+ stream=False
113
+ )
114
+
115
+ arun(Completions().create(request))