MeUtils 2025.4.1.18.47.48__py3-none-any.whl → 2025.4.8.19.54.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. {MeUtils-2025.4.1.18.47.48.dist-info → MeUtils-2025.4.8.19.54.40.dist-info}/METADATA +262 -262
  2. {MeUtils-2025.4.1.18.47.48.dist-info → MeUtils-2025.4.8.19.54.40.dist-info}/RECORD +36 -29
  3. examples/_openaisdk/openai_ark_bots.py +4 -22
  4. examples/_openaisdk/openai_audio_gpt.py +67 -0
  5. examples/_openaisdk/openai_google.py +128 -77
  6. examples/_openaisdk/zhipu_/346/231/272/350/203/275/344/275/223.py +3 -3
  7. meutils/apis/google_apis/__init__.py +11 -0
  8. meutils/apis/google_apis/audios.py +115 -0
  9. meutils/apis/google_apis/common.py +251 -0
  10. meutils/apis/google_apis/files.py +19 -0
  11. meutils/apis/google_apis/gemini_sdk.py +170 -0
  12. meutils/{llm/completions → apis/google_apis}/google2openai.py +22 -8
  13. meutils/apis/google_apis/images.py +11 -0
  14. meutils/apis/google_apis/search.py +46 -0
  15. meutils/apis/hailuoai/videos.py +1 -1
  16. meutils/apis/jimeng/images.py +1 -1
  17. meutils/apis/search/ark_web_search.py +8 -4
  18. meutils/apis/search/zhipu_web_search.py +6 -3
  19. meutils/apis/textin_apis/common.py +1 -1
  20. meutils/apis/tripo3d/images.py +1 -1
  21. meutils/caches/common.py +2 -2
  22. meutils/data/VERSION +1 -1
  23. meutils/io/files_utils.py +27 -22
  24. meutils/llm/completions/assistants/ppt.py +17 -13
  25. meutils/llm/completions/qwenllm.py +8 -2
  26. meutils/notice/feishu.py +2 -0
  27. meutils/oss/minio_oss.py +38 -8
  28. meutils/oss/minio_utils.py +2 -2
  29. meutils/schemas/jimeng_types.py +1 -1
  30. meutils/schemas/oneapi/common.py +23 -6
  31. meutils/schemas/openai_types.py +70 -40
  32. meutils/str_utils/regular_expression.py +6 -3
  33. meutils/apis/kling/api.py +0 -62
  34. {MeUtils-2025.4.1.18.47.48.dist-info → MeUtils-2025.4.8.19.54.40.dist-info}/LICENSE +0 -0
  35. {MeUtils-2025.4.1.18.47.48.dist-info → MeUtils-2025.4.8.19.54.40.dist-info}/WHEEL +0 -0
  36. {MeUtils-2025.4.1.18.47.48.dist-info → MeUtils-2025.4.8.19.54.40.dist-info}/entry_points.txt +0 -0
  37. {MeUtils-2025.4.1.18.47.48.dist-info → MeUtils-2025.4.8.19.54.40.dist-info}/top_level.txt +0 -0
@@ -376,7 +376,7 @@ if __name__ == '__main__':
376
376
  # arun(generate(ImageRequest(**data)))
377
377
 
378
378
  # arun(generate(ImageRequest(prompt="fuck you")))
379
- prompt = "A plump Chinese beauty wearing a wedding dress revealing her skirt and underwear is swinging on the swing,Happy smile,cleavage,Exposed thighs,Spread your legs open,Extend your leg,panties,upskirt,Barefoot,sole"
379
+ prompt = "A plump Chinese beauty wearing a wedding dress revealing her skirt and underwear is swinging on the swing,Happy smile,cleavage,Exposed thighs,Spread your legs open,Extend your leg,panties,upskirt,Barefoot,sole"
380
380
  request = ImageRequest(prompt=prompt)
381
381
  # task = arun(create_task(ImageRequest(**data), token=token))
382
382
 
@@ -37,12 +37,12 @@ class Completions(object):
37
37
  messages=[
38
38
  {"role": "user", "content": q},
39
39
  ],
40
-
41
- max_tokens=1,
40
+ temperature=0,
41
+ max_tokens=10,
42
42
  stream=False,
43
-
44
43
  )
45
44
  completion = await self.create(request)
45
+ logger.debug(completion)
46
46
  # print(completion.choices[0].message.content)
47
47
 
48
48
  data = {"data": []}
@@ -61,5 +61,9 @@ if __name__ == '__main__':
61
61
  # ],
62
62
  # stream=True,
63
63
  # ))
64
+ q = "今天有什么热点新闻?"
65
+ q = "今日热点"
66
+
67
+ # arun(c.query("今天南京天气如何?"))
64
68
 
65
- arun(c.query("今天南京天气如何?"))
69
+ arun(c.query(q=q))
@@ -48,6 +48,9 @@ class Completions(object):
48
48
 
49
49
  def create(self, request: Union[CompletionRequest, str]):
50
50
 
51
+ q = request.last_user_content if isinstance(request, CompletionRequest) else request
52
+ q = f"{q} 【最新动态、相关信息或新闻】"
53
+
51
54
  chunks = zhipuai_sdk_client.assistant.conversation(
52
55
 
53
56
  assistant_id="659e54b1b8006379b4b2abd6", # 搜索智能体
@@ -60,7 +63,7 @@ class Completions(object):
60
63
  "type": "text",
61
64
  # "text": "北京未来七天气温,做个折线图",
62
65
  # "text": "画条狗"
63
- "text": request.last_user_content if isinstance(request, CompletionRequest) else request,
66
+ "text": q,
64
67
 
65
68
  }]
66
69
  }
@@ -116,5 +119,5 @@ if __name__ == '__main__':
116
119
 
117
120
  stream=True
118
121
  )
119
- # arun(Completions().search('周杰伦'))
120
- arun(Completions().query(request.last_user_content))
122
+ # arun(Completions().create(request))
123
+ # arun(Completions().query(request.last_user_content))
@@ -57,7 +57,7 @@ if __name__ == '__main__':
57
57
  # image = "doc_watermark.jpg"
58
58
 
59
59
  # image = "https://oss.ffire.cc/files/nsfw.jpg"
60
- image = "https://oss.ffire.cc/files/kling_watermark.png" # 无水印
60
+ image = "https://oss.ffire.cc/files/kling_watermark.png" # 无水印
61
61
 
62
62
  request = WatermarkRemove(
63
63
  image=image,
@@ -103,4 +103,4 @@ if __name__ == '__main__':
103
103
  request = ImageRequest(
104
104
  prompt="一只活泼的柴犬,戴着红白相间的头巾,叼着一根魔法棒,眼睛闪烁着星星,正在表演马戏团特技",
105
105
  )
106
- arun(create_task(request))
106
+ arun(create_task(request)) # 60*100
meutils/caches/common.py CHANGED
@@ -58,8 +58,8 @@ if __name__ == '__main__':
58
58
  return a
59
59
 
60
60
 
61
- @cache(ttl=3)
62
- @rcache(ttl=2)
61
+ # @cache(ttl=3)
62
+ @rcache(ttl=10)
63
63
  async def mfn(a):
64
64
  logger.debug(a)
65
65
  return a
meutils/data/VERSION CHANGED
@@ -1 +1 @@
1
- 2025.04.01.18.47.48
1
+ 2025.04.08.19.54.40
meutils/io/files_utils.py CHANGED
@@ -14,7 +14,8 @@ import shortuuid
14
14
 
15
15
  from meutils.pipe import *
16
16
  from meutils.decorators.retry import retrying
17
- from meutils.caches.redis_cache import cache
17
+ from meutils.caches import rcache, cache
18
+ from meutils.oss.minio_oss import Minio
18
19
 
19
20
  # from fastapi import UploadFile 有点区别
20
21
  from starlette.datastructures import UploadFile
@@ -49,6 +50,7 @@ def base64_to_bytes(base64_image_string):
49
50
 
50
51
 
51
52
  @retrying()
53
+ @rcache(ttl=300, serializer='pickle') # todo: UploadFile不一定兼容
52
54
  async def to_bytes(
53
55
  file: Union[UploadFile, str, bytes],
54
56
  headers: Optional[dict] = None
@@ -73,7 +75,7 @@ async def to_bytes(
73
75
  elif isinstance(file, str) and file.startswith('http'): # url
74
76
  logger.debug(f"FileType: HTTP")
75
77
 
76
- async with AsyncClient(headers=headers or {}, timeout=60) as cilent:
78
+ async with AsyncClient(headers=headers or {}, timeout=100) as cilent:
77
79
  resp = await cilent.get(file)
78
80
  file_bytes = resp.content
79
81
 
@@ -139,33 +141,34 @@ async def to_url_fal(
139
141
 
140
142
 
141
143
  async def to_url(
142
- file: Union[UploadFile, str, bytes],
144
+ file: Union[str, bytes, List],
143
145
  filename: Optional[str] = None,
144
146
  headers: Optional[dict] = None,
145
- content_type: Optional[str] = "image/png" # todo: 适配 mime_type = "application/octet-stream"
147
+
148
+ content_type: Optional[str] = None,
149
+ mime_type: Optional[str] = None
146
150
  ):
147
- """对象存储"""
148
- if not file: return
151
+ if isinstance(file, list):
152
+ tasks = [to_url(_, f"{shortuuid.random()}_{filename}", headers) for _ in file]
153
+ urls = await asyncio.gather(*tasks)
154
+ return urls
149
155
 
150
- from openai.types.file_object import FileObject
151
- from meutils.oss.minio_oss import Minio
152
- uploads = [partial(Minio().put_object_for_openai, filename=filename, content_type=content_type)]
156
+ if not file: return
153
157
 
154
- for upload in uploads:
155
- try:
156
- file = await to_bytes(file, headers=headers)
157
- url = await upload(file)
158
- if isinstance(url, FileObject):
159
- url = url.filename
160
- return url
161
- except Exception as e:
162
- logger.error(e)
158
+ content_type = content_type or mime_type
159
+ file = await to_bytes(file, headers=headers)
160
+ file_url = await Minio().upload(file, filename, content_type=content_type)
161
+ return file_url
163
162
 
164
163
 
165
- async def to_base64(file: Union[UploadFile, str, bytes, list]):
164
+ async def to_base64(file: Union[UploadFile, str, bytes, list], content_type: Optional[str] = None):
166
165
  if not file: return
167
166
 
168
167
  _ = base64.b64encode(await to_bytes(file)).decode('utf-8')
168
+
169
+ if content_type: # "image/png"
170
+ _ = f"data:{content_type};base64,{_}"
171
+
169
172
  return _
170
173
 
171
174
 
@@ -256,10 +259,12 @@ if __name__ == '__main__':
256
259
  # content_type=None))
257
260
 
258
261
  # arun(to_url_fal(url))
259
- print(guess_mime_type("base64xxxxxxxxxxxxxxxxxx.mp4"))
262
+ # print(guess_mime_type(b"base64xxxxxxxxxxxxxxxxxx.mp4"))
260
263
 
261
- # arun(to_url(Path('img_1.png').read_bytes()))
264
+ # arun(to_url([Path('img_1.png').read_bytes()], filename='x.png'))
265
+ file = "/Users/betterme/PycharmProjects/AI/ppt.txt"
266
+ # arun(to_url(Path(file).read_bytes(), filename='ppt.txt'))
262
267
 
263
268
  # arun(markdown_base64_to_url("![Image_0](data:image)"))
264
269
 
265
-
270
+ arun(to_bytes("https://oss.ffire.cc/files/kling_watermark.png"))
@@ -49,6 +49,8 @@ class Completions(object):
49
49
  }
50
50
  ]
51
51
 
52
+ logger.debug(request)
53
+
52
54
  data = to_openai_params(request)
53
55
  return await self.client.chat.completions.create(**data)
54
56
 
@@ -59,19 +61,21 @@ if __name__ == '__main__':
59
61
  "messages": [
60
62
  {
61
63
  "role": "user",
62
- "content": [
63
- {
64
- "type": "file",
65
- "file_url": {
66
- "url": "https://mj101-1317487292.cos.ap-shanghai.myqcloud.com/ai/test.pdf"
67
- }
68
- },
69
- {
70
- "type": "text",
71
- # "text": "基于内容写个ppt给我"
72
- "text": "生成PPT"
73
- }
74
- ]
64
+
65
+ "content": "https://s3.ffire.cc/cdn/20250403/6MxhHmxeX7Z7WYMb8QWqfp_ppt 基于文件做个ppt"
66
+ # "content": [
67
+ # {
68
+ # "type": "file",
69
+ # "file_url": {
70
+ # "url": "https://mj101-1317487292.cos.ap-shanghai.myqcloud.com/ai/test.pdf"
71
+ # }
72
+ # },
73
+ # {
74
+ # "type": "text",
75
+ # # "text": "基于内容写个ppt给我"
76
+ # "text": "生成PPT"
77
+ # }
78
+ # ]
75
79
  }
76
80
  ],
77
81
  # "stream": false
@@ -60,7 +60,9 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
60
60
  if request.temperature > 1:
61
61
  request.temperature = 1
62
62
 
63
- token = token or await get_next_token_for_polling(feishu_url=FEISHU_URL)
63
+ token = token or await get_next_token_for_polling(feishu_url=FEISHU_URL, from_redis=True)
64
+
65
+ logger.debug(token)
64
66
 
65
67
  client = AsyncOpenAI(
66
68
  base_url=base_url,
@@ -81,6 +83,9 @@ async def create(request: CompletionRequest, token: Optional[str] = None): # Ch
81
83
  request.model = "qwen-max-latest"
82
84
  request.messages[-1]['feature_config'] = {"thinking_enabled": True}
83
85
 
86
+ if "omni" in model:
87
+ request.max_tokens = 2048
88
+
84
89
  # 多模态: todo
85
90
  # if any(i in request.model.lower() for i in ("-vl", "qvq")):
86
91
  # # await to_file
@@ -180,7 +185,8 @@ if __name__ == '__main__':
180
185
  # model="qwen-turbo-2024-11-01",
181
186
  # model="qwen-max-latest",
182
187
  # model="qvq-max-2025-03-25",
183
- model="qvq-72b-preview-0310",
188
+ # model="qvq-72b-preview-0310",
189
+ model="qwen2.5-omni-7b",
184
190
 
185
191
  # model="qwen-max-latest-search",
186
192
  # model="qwq-max",
meutils/notice/feishu.py CHANGED
@@ -20,6 +20,7 @@ Vison = ""
20
20
  # AUDIOS_TTS = "https://open.feishu.cn/open-apis/bot/v2/hook/ff7d4b86-d238-436c-9447-f88cf603454d"
21
21
  AUDIO = "https://open.feishu.cn/open-apis/bot/v2/hook/80c2a700-adfa-4b9b-8e3f-00b78f2f5c8b"
22
22
  FILES = "https://open.feishu.cn/open-apis/bot/v2/hook/075fb2fa-a559-4a7e-89ac-3ab9934ff15c"
23
+ KLING = "https://open.feishu.cn/open-apis/bot/v2/hook/e9a850c2-d171-4637-b976-ee93f7654c40"
23
24
 
24
25
 
25
26
  @background_task
@@ -105,6 +106,7 @@ def catch(
105
106
 
106
107
 
107
108
  send_message_for_images = partial(send_message, url=IMAGES)
109
+ send_message_for_kling = partial(send_message, url=KLING)
108
110
 
109
111
  httpexception_feishu_url = "https://open.feishu.cn/open-apis/bot/v2/hook/d1c7b67d-b0f8-4067-a2f5-109f20eeb696"
110
112
  send_message_for_httpexception = partial(send_message, url=httpexception_feishu_url)
meutils/oss/minio_oss.py CHANGED
@@ -11,6 +11,7 @@ import datetime
11
11
  import mimetypes
12
12
 
13
13
  from meutils.pipe import *
14
+
14
15
  from minio import Minio as _Minio
15
16
  from openai.types.file_object import FileObject
16
17
  from fastapi import APIRouter, File, UploadFile, Query, Form, BackgroundTasks, Depends, HTTPException, Request, status
@@ -23,7 +24,7 @@ class Minio(_Minio):
23
24
  access_key: Optional[str] = None,
24
25
  secret_key: Optional[str] = None,
25
26
  **kwargs):
26
- self.endpoint = endpoint or os.getenv('MINIO_ENDPOINT', 'oss.ffire.cc') # 默认国内 oss.ffire.cn
27
+ self.endpoint = endpoint or os.getenv('MINIO_ENDPOINT', 's3.ffire.cc')
27
28
  access_key = access_key or os.getenv('MINIO_ACCESS_KEY', 'minio')
28
29
  secret_key = secret_key or os.getenv('MINIO_SECRET_KEY')
29
30
 
@@ -35,6 +36,35 @@ class Minio(_Minio):
35
36
  # super().list_buckets()
36
37
  # super().list_objects('中职职教高考政策解读.pdf')
37
38
  # return super().list_buckets()
39
+ async def upload(
40
+ self,
41
+ file: bytes,
42
+ filename: Optional[str] = None,
43
+
44
+ content_type: Optional[str] = None,
45
+
46
+ bucket_name: str = "cdn",
47
+
48
+ ):
49
+ file_name = filename or shortuuid.random()
50
+
51
+ content_type = (
52
+ content_type
53
+ or mimetypes.guess_type(file_name)[0]
54
+ or "application/octet-stream"
55
+ )
56
+
57
+ object_name = f"""{datetime.datetime.now().strftime("%Y%m%d")}/{file_name}"""
58
+ _ = await self.aput_object(
59
+ bucket_name,
60
+ object_name=object_name,
61
+ content_type=content_type,
62
+
63
+ data=io.BytesIO(file),
64
+ length=len(file),
65
+ )
66
+
67
+ return f"https://{self.endpoint}/{bucket_name}/{object_name}"
38
68
 
39
69
  async def put_object_for_openai(
40
70
  self,
@@ -89,7 +119,7 @@ class Minio(_Minio):
89
119
  )
90
120
 
91
121
  logger.debug(f"content_type: {content_type}")
92
- object_name = f"""{datetime.datetime.now().strftime("%Y-%m-%d")}/{file_name}"""
122
+ object_name = f"""{datetime.datetime.now().strftime("%Y%m%d")}/{file_name}"""
93
123
  _ = await self.aput_object(
94
124
  bucket_name,
95
125
  object_name=object_name,
@@ -177,10 +207,10 @@ if __name__ == '__main__':
177
207
  # )
178
208
  # print(arun(_, debug=True))
179
209
 
180
- _ = client.put_object_for_openai(
181
- url,
182
- filename='cff.png'
183
- )
184
- arun(_)
185
-
210
+ # _ = client.put_object_for_openai(
211
+ # url,
212
+ # filename='cff.png'
213
+ # )
214
+ # arun(_)
186
215
 
216
+ f = client.upload(Path("/Users/betterme/PycharmProjects/AI/qun.png").read_bytes(), filename='x.png')
@@ -14,7 +14,7 @@ minioClient = Minio(
14
14
  endpoint=os.getenv('MINIO_ENDPOINT'),
15
15
  access_key=os.getenv('MINIO_ACCESS_KEY'),
16
16
  secret_key=os.getenv('MINIO_SECRET_KEY'),
17
- # secure=False
17
+ # secure=False,
18
18
  )
19
19
 
20
20
  # Make a bucket with the make_bucket API call.
@@ -31,7 +31,7 @@ print(minioClient.list_buckets())
31
31
  # minioClient.fput_object(bucket_name, 'file.py', 'file.py')
32
32
 
33
33
 
34
- minioClient.fput_object(bucket_name, 'x/img','img.png', content_type='image/png')
34
+ # minioClient.fput_object(bucket_name, 'x/img','img.png', content_type='image/png')
35
35
 
36
36
 
37
37
  # url = minioClient.get_presigned_url(
@@ -28,5 +28,5 @@ MODELS_MAP = {
28
28
  "jimeng-1.4": "high_aes_general_v14:general_v1.4",
29
29
  "jimeng-xl-pro": "text2img_xl_sft",
30
30
 
31
- "default": "high_aes_general_v21_L:general_v2.1_L",
31
+ "default": "high_aes_general_v30l:general_v3.0_18b"
32
32
  }
@@ -633,6 +633,7 @@ MODEL_RATIO = {
633
633
  "anthropic/claude-3.5-sonnet:beta": 4, # 1022
634
634
 
635
635
  "claude-3-7-sonnet-thinking": 1.5,
636
+ "claude-3-7-sonnet-20250219-thinking": 1.5,
636
637
  "claude-3-7-sonnet-latest": 1.5,
637
638
  "claude-3-7-sonnet-20250219": 1.5,
638
639
 
@@ -719,9 +720,12 @@ MODEL_RATIO = {
719
720
  "chatgpt-4o-latest": 2.5,
720
721
  "gpt-4o-realtime-preview": 2.5,
721
722
  "gpt-4o-realtime-preview-2024-10-01": 2.5,
722
- "gpt-4o-audio-preview": 2.5 / 2,
723
+
723
724
  "gpt-4o-mini-audio-preview": 0.15 / 2,
725
+
726
+ "gpt-4o-audio-preview": 2.5 / 2,
724
727
  "gpt-4o-audio-preview-2024-12-17": 2.5 / 2,
728
+ "gemini-2.0-flash-audio": 0.15 / 2,
725
729
 
726
730
  "gpt-4o": 1.25,
727
731
  "gpt-4o-all": 2.5, # 逆向
@@ -735,9 +739,10 @@ MODEL_RATIO = {
735
739
  "o1": 7.5,
736
740
  "o1-2024-12-17": 7.5,
737
741
 
738
- "o1-mini": 1.5,
742
+ "o1-mini": 0.55,
743
+ "o1-mini-2024-09-12": 0.55,
744
+
739
745
  "o1-preview": 7.5,
740
- "o1-mini-2024-09-12": 1.5,
741
746
  "o1-preview-2024-09-12": 7.5,
742
747
  "o3-mini": 0.55,
743
748
 
@@ -752,7 +757,8 @@ MODEL_RATIO = {
752
757
  "meta-llama/Llama-3.3-70B-Instruct": 2,
753
758
  "llama-3.3-70b-instruct": 2,
754
759
 
755
- "hunyuan-a52b-instruct": 2.5, # 10.5
760
+ "meta-llama/Llama-4-Scout-17B-16E-Instruct": 0.1,
761
+ "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": 0.2,
756
762
 
757
763
  # groq https://console.groq.com/docs/models
758
764
  "llama3-8b-8192": 0.01,
@@ -828,6 +834,12 @@ COMPLETION_RATIO = {
828
834
  "gpt-4o-realtime-preview-2024-10-01": 4,
829
835
  "gpt-4o-2024-11-20": 4,
830
836
 
837
+ "gpt-4o-mini-audio-preview": 4,
838
+
839
+ "gpt-4o-audio-preview": 4,
840
+ "gpt-4o-audio-preview-2024-12-17": 4,
841
+ "gemini-2.0-flash-audio": 4,
842
+
831
843
  # claude
832
844
  "claude-3-opus-20240229": 5,
833
845
  "anthropic/claude-3-opus:beta": 5, # openrouter
@@ -845,6 +857,9 @@ COMPLETION_RATIO = {
845
857
  "llama-3.1-405b-instruct": 6,
846
858
  "meta-llama/Meta-Llama-3.1-405B-Instruct": 6,
847
859
 
860
+ "meta-llama/Llama-4-Scout-17B-16E-Instruct": 3,
861
+ "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": 3,
862
+
848
863
  "llama-3.3-70b-instruct": 4,
849
864
 
850
865
  "command": 4,
@@ -1212,5 +1227,7 @@ if __name__ == '__main__':
1212
1227
  from meutils.apis.oneapi import option, channel
1213
1228
 
1214
1229
  option()
1215
- #
1216
- arun(channel.edit_channel(MODEL_PRICE))
1230
+ # #
1231
+ # arun(channel.edit_channel(MODEL_PRICE))
1232
+
1233
+ print(bjson({k: v * 6 for k, v in MODEL_RATIO.items() if k.startswith('claude')}))
@@ -127,30 +127,43 @@ class CompletionRequest(BaseModel):
127
127
 
128
128
  @cached_property
129
129
  def last_user_content(self) -> str:
130
+ """text"""
130
131
  for i, message in enumerate(self.messages[::-1], 1):
131
132
  if message.get("role") == "user":
132
133
  contents = message.get("content")
133
134
  if isinstance(contents, list):
134
135
  for content in contents:
135
- return content.get('text', "")
136
+ if content.get("type") == "text":
137
+ return content.get('text', "")
138
+
136
139
  else:
137
140
  return str(contents)
141
+ return ""
138
142
 
139
143
  @cached_property
140
144
  def last_assistant_content(self) -> str:
145
+ """text"""
141
146
  for i, message in enumerate(self.messages[::-1], 1):
142
147
  if message.get("role") == "assistant":
143
148
  contents = message.get("content")
144
149
  if isinstance(contents, list):
145
150
  for content in contents:
146
- return content.get('text', "")
151
+ if content.get("type") == "text":
152
+ return content.get('text', "")
147
153
  else:
148
154
  return str(contents)
155
+ return ""
149
156
 
150
157
  @cached_property
151
158
  def last_urls(self): # file_url 多轮对话需要 sum(request.last_urls.values(), [])
152
159
  """最新一轮的 user url 列表"""
153
- content_types = {"image_url", "file", "file_url", "video_url", "audio_url"}
160
+ content_types = {
161
+ "image_url",
162
+ "file", "file_url",
163
+ "audio_url", "input_audio",
164
+
165
+ "video_url",
166
+ }
154
167
  for i, message in enumerate(self.messages[::-1], 1):
155
168
  data = {}
156
169
  if message.get("role") == "user": # 每一轮还要处理
@@ -159,11 +172,12 @@ class CompletionRequest(BaseModel):
159
172
  for content in user_contents:
160
173
  content_type = content.get("type")
161
174
  if content_type in content_types:
175
+ # logger.debug(content)
162
176
  if _url := content.get(content_type, {}): # {"type": "file", "file": fileid}
163
177
  if isinstance(_url, str): # 兼容了spark qwenai
164
178
  url = _url
165
179
  else:
166
- url = _url.get("url")
180
+ url = _url.get("url") or _url.get("data")
167
181
  url and data.setdefault(content_type, []).append(url)
168
182
 
169
183
  if data: return data
@@ -523,39 +537,52 @@ if __name__ == '__main__':
523
537
  {},
524
538
  {'role': 'user', 'content': '总结'}]
525
539
 
526
- mesages = [{'role': 'system',
527
- 'content': 'undefined\n Current date: 2025-03-13'},
528
- {'role': 'user',
529
- 'content': [{'type': 'text', 'text': '一句话总结'},
530
- {'type': 'image_url',
531
- 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
532
- {},
533
- {'role': 'user',
534
- 'content': [{'type': 'text', 'text': '一句话总结'},
535
- {'type': 'image_url',
536
- 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
537
- {},
538
- {'role': 'user',
539
- 'content': [{'type': 'text', 'text': '一句话总结'},
540
- {'type': 'image_url',
541
- 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
542
- {},
543
- {'role': 'user',
544
- 'content': [{'type': 'text', 'text': '一句话总结'},
545
- {'type': 'image_url',
546
- 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
547
- {},
548
- {'role': 'user',
549
- 'content': [{'type': 'text', 'text': '一句话总结'},
550
- {'type': 'image_url',
551
- 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
552
- {},
553
- {'role': 'user',
554
- 'content': [{'type': 'text', 'text': '一句话总结'},
555
- {'type': 'image_url',
556
- 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
557
- {},
558
- {'role': 'user', 'content': '总结'}]
540
+ messages = [{'role': 'system',
541
+ 'content': 'undefined\n Current date: 2025-03-13'},
542
+ {'role': 'user',
543
+ 'content': [{'type': 'text', 'text': '一句话总结'},
544
+ {'type': 'image_url',
545
+ 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
546
+ {},
547
+ {'role': 'user',
548
+ 'content': [{'type': 'text', 'text': '一句话总结'},
549
+ {'type': 'image_url',
550
+ 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
551
+ {},
552
+ {'role': 'assistant',
553
+ 'content': [{'type': 'text', 'text': '一句话总结'},
554
+ {'type': 'image_url',
555
+ 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
556
+ {},
557
+ {'role': 'user',
558
+ 'content': [{'type': 'text', 'text': '一句话总结'},
559
+ {'type': 'image_url',
560
+ 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
561
+ {},
562
+ {'role': 'user',
563
+ 'content': [{'type': 'text', 'text': '一句话总结'},
564
+ {'type': 'image_url',
565
+ 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'}]},
566
+ {},
567
+ {'role': 'user',
568
+ 'content': [
569
+ {'type': 'image_url',
570
+ 'image_url': 'https://ai.chatfire.cn/files/images/uniacess删除-1741853464016-b176842ad.txt'},
571
+ {
572
+ "type": "input_audio",
573
+ "input_audio": {
574
+ "data": "base64_audio",
575
+ "format": "wav"
576
+ }
577
+ },
578
+ {'type': 'text', 'text': '一句话总结'},
579
+
580
+ ],
581
+
582
+ },
583
+ {},
584
+ # {'role': 'user', 'content': '总结'}
585
+ ]
559
586
  #
560
587
  # r = ChatCompletionRequest(model="gpt-3.5-turbo", messages=messages)
561
588
  # r.messages[-1]['content'] = [{"type": "image_url", "image_url": {"url": r.urls[-1]}}]
@@ -566,7 +593,10 @@ if __name__ == '__main__':
566
593
  # print(chat_completion_chunk_stop)
567
594
 
568
595
  # print(CompletionRequest(messages=messages).last_urls)
569
- # print(CompletionRequest(messages=messages).last_user_content)
596
+ # print(CompletionRequest(messages=messages).last_urls)
570
597
 
571
- print(chat_completion_chunk)
572
- print(chat_completion)
598
+ # print(mesages)
599
+ print(CompletionRequest(messages=messages).last_assistant_content)
600
+
601
+ # print(chat_completion_chunk)
602
+ # print(chat_completion)