MeUtils 2025.2.25.19.48.56__py3-none-any.whl → 2025.3.1.12.23.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,8 +17,10 @@ AttributeError: 'str' object has no attribute 'choices'
17
17
  from openai import AsyncOpenAI
18
18
 
19
19
  from meutils.pipe import *
20
- from meutils.llm.clients import qwen_client
20
+ from meutils.io.files_utils import to_bytes
21
+ from meutils.decorators.retry import retrying
21
22
 
23
+ from meutils.llm.clients import qwen_client
22
24
  from meutils.llm.openai_utils import to_openai_params
23
25
 
24
26
  from meutils.config_utils.lark_utils import get_next_token_for_polling
@@ -33,7 +35,16 @@ from fake_useragent import UserAgent
33
35
  ua = UserAgent()
34
36
 
35
37
 
38
+ @retrying()
39
+ async def to_file(file, mime_type: str = "application/octet-stream"):
40
+ file_bytes = await to_bytes(file)
41
+ file = ("filename", file_bytes, mime_type)
42
+ file_object = await qwen_client.files.create(file=file, purpose="file-extract")
43
+ return file_object
44
+
45
+
36
46
  async def create(request: ChatCompletionRequest, token: Optional[str] = None): # ChatCompletionRequest 重构
47
+
37
48
  token = token or await get_next_token_for_polling(feishu_url=FEISHU_URL)
38
49
 
39
50
  client = AsyncOpenAI(
@@ -44,18 +55,35 @@ async def create(request: ChatCompletionRequest, token: Optional[str] = None):
44
55
 
45
56
  # qwen结构
46
57
  if any(i in request.model.lower() for i in ("search",)):
47
- request.model = "qwen-plus-latest"
58
+ request.model = "qwen-max-latest"
48
59
  request.messages[-1]['chat_type'] = "search"
49
60
 
50
61
  if any(i in request.model.lower() for i in ("qwq", "think")):
51
- request.model = "qwen-plus-latest"
62
+ request.model = "qwen-max-latest"
52
63
  request.messages[-1]['feature_config'] = {"thinking_enabled": True}
53
64
 
54
65
  # 多模态: todo
55
- # file_object = await qwen_client.files.create(file=(file.filename, file.file), purpose="file-extract")
66
+ # if any(i in request.model.lower() for i in ("-vl", "qvq")):
67
+ # # await to_file
68
+ last_message = request.messages[-1]
69
+ logger.debug(last_message)
70
+ if last_message.get("role") == "user":
71
+ user_content = last_message.get("content")
72
+ if isinstance(user_content, list):
73
+ for i, content in enumerate(user_content):
74
+ if content.get("type") == 'file_url': # image_url file_url video_url
75
+ url = content.get(content.get("type")).get("url")
76
+ file_object = await to_file(url)
77
+
78
+ user_content[i] = {"type": "file", "file": file_object.id}
79
+
80
+ elif content.get("type") == 'image_url':
81
+ url = content.get(content.get("type")).get("url")
82
+ file_object = await to_file(url, "image/png")
83
+
84
+ user_content[i] = {"type": "image", "image": file_object.id}
56
85
 
57
86
  data = to_openai_params(request)
58
-
59
87
  if request.stream:
60
88
  _chunk = ""
61
89
  async for chunk in await client.chat.completions.create(**data):
@@ -83,10 +111,40 @@ if __name__ == '__main__':
83
111
  # "qwen2.5-72b-instruct",
84
112
  # "qwen2.5-32b-instruct"
85
113
  # ]
114
+
115
+ # user_content = [
116
+ # {
117
+ # "type": "text",
118
+ # "text": "解读图片"
119
+ # },
120
+ # {
121
+ # "type": "image_url",
122
+ # "image_url": {
123
+ # "url": "https://fyb-pc-static.cdn.bcebos.com/static/asset/homepage@2x_daaf4f0f6cf971ed6d9329b30afdf438.png"
124
+ # }
125
+ # }
126
+ # ]
127
+
128
+ user_content = [
129
+ {
130
+ "type": "text",
131
+ "text": "总结下"
132
+ },
133
+ {
134
+ "type": "file_url",
135
+ "file_url": {
136
+ "url": "https://oss.ffire.cc/files/%E6%8B%9B%E6%A0%87%E6%96%87%E4%BB%B6%E5%A4%87%E6%A1%88%E8%A1%A8%EF%BC%88%E7%AC%AC%E4%BA%8C%E6%AC%A1%EF%BC%89.pdf"
137
+ }
138
+ }
139
+
140
+ ]
141
+
86
142
  request = ChatCompletionRequest(
87
143
  # model="qwen-turbo-2024-11-01",
88
- # model="qwen-max-latest",
89
- model="qwen-max-latest-search",
144
+ model="qwen-max-latest",
145
+ # model="qwen-max-latest-search",
146
+ # model="qwq-max",
147
+ # model="qwen2.5-vl-72b-instruct",
90
148
 
91
149
  # model="qwen-plus-latest",
92
150
 
@@ -94,24 +152,45 @@ if __name__ == '__main__':
94
152
  {
95
153
  'role': 'user',
96
154
  # 'content': '今天南京天气',
155
+ # 'content': "9.8 9.11哪个大",
97
156
  # 'content': '总结下',
98
157
 
99
158
  # "chat_type": "search",
100
159
 
101
- "content": [
102
- {
103
- "type": "text",
104
- "text": "总结下",
105
- "chat_type": "t2t",
106
- "feature_config": {
107
- "thinking_enabled": False
108
- }
109
- },
110
- {
111
- "type": "file",
112
- "file": "2d677df1-45b2-4f30-829f-0d42b2b07136"
113
- }
114
- ]
160
+ 'content': user_content,
161
+
162
+ # "content": [
163
+ # {
164
+ # "type": "text",
165
+ # "text": "总结下",
166
+ # "chat_type": "t2t",
167
+ # "feature_config": {
168
+ # "thinking_enabled": False
169
+ # }
170
+ # },
171
+ # {
172
+ # "type": "file",
173
+ # "file": "2d677df1-45b2-4f30-829f-0d42b2b07136"
174
+ # }
175
+ # ]
176
+
177
+ # "content": [
178
+ # {
179
+ # "type": "text",
180
+ # "text": "总结下",
181
+ # # "chat_type": "t2t"
182
+ #
183
+ # },
184
+ # {
185
+ # "type": "image",
186
+ # "image": "703dabac-b0d9-4357-8a85-75b9456df1dd"
187
+ # },
188
+ # {
189
+ # "type": "image",
190
+ # "image": "https://oss.ffire.cc/files/kling_watermark.png"
191
+ #
192
+ # }
193
+ # ]
115
194
 
116
195
  },
117
196
 
@@ -120,3 +199,5 @@ if __name__ == '__main__':
120
199
 
121
200
  )
122
201
  arun(create(request))
202
+
203
+ # arun(to_file("/Users/betterme/PycharmProjects/AI/MeUtils/meutils/llm/completions/yuanbao.py"))
@@ -81,19 +81,6 @@ MODEL_PRICE = {
81
81
  "api-images-cogview-3": 0.01,
82
82
  "api-images-stable-diffusion": 0.01,
83
83
 
84
- "api-images-flux1.0-turbo": 0.02,
85
- "api-images-flux-schnell": 0.01,
86
- "api-images-flux1.0-schnell": 0.01,
87
- "api-images-flux1.0-dev": 0.05,
88
- "api-images-flux.1-dev": 0.05,
89
-
90
- "api-images-flux-pro-max": 0.1,
91
- "api-images-flux1.0-pro": 0.1,
92
- "api-images-flux.1-pro": 0.1,
93
- "api-images-flux1.1-pro": 0.1,
94
- "api-images-flux.1.1-pro": 0.1,
95
- "api-images-recraftv3": 0.03,
96
-
97
84
  "api-images-seededit": 0.1,
98
85
  "seededit": 0.1,
99
86
  "chat-seededit": 0.1,
@@ -102,9 +89,14 @@ MODEL_PRICE = {
102
89
 
103
90
  # 图片 音频 视频
104
91
  "recraftv3": 0.1, # 官方的
92
+ "recraft-v3": 0.1, # d3
105
93
  "recraft-api": 0.1,
106
94
  "chat-recraftv3": 0.1,
107
95
 
96
+ "flux-pro-1.1-ultra": 0.2,
97
+ "ideogram-ai/ideogram-v2": 0.2,
98
+ "ideogram-ai/ideogram-v2-turbo": 0.1,
99
+
108
100
  "api-asr": 0.01,
109
101
  "api-stt": 0.01,
110
102
  "api-tts": 0.01,
@@ -112,8 +104,6 @@ MODEL_PRICE = {
112
104
  "kolors": 0.02,
113
105
  "kling": 0.02,
114
106
 
115
- "ideogram": 0.3,
116
-
117
107
  "api-hunyuan-video": 0.1,
118
108
 
119
109
  "deepseek-ai/Janus-Pro-7B": 0.01,
@@ -219,11 +209,14 @@ MODEL_PRICE = {
219
209
  "api-translator": 0.0001,
220
210
  "api-voice-clone": 0.01,
221
211
 
212
+ # suno
222
213
  "suno_music": 0.3,
223
214
  "suno_lyrics": 0.01,
224
215
  "suno_uploads": 0.01,
225
216
  "suno_upload": 0.01,
226
217
  "suno_concat": 0.01,
218
+ "chirp-v3-5": 0.5,
219
+ "chat-suno": 0.5,
227
220
 
228
221
  # all
229
222
  "o1-plus": 0.2,
@@ -638,11 +631,11 @@ MODEL_RATIO = {
638
631
  "gemini-2.0-flash": 0.0625,
639
632
  "gemini-2.0-flash-001": 0.0625,
640
633
  "gemini-2.0-flash-lite-preview-02-05": 0.0625,
634
+ "gemini-2.0-flash-exp": 0.0625,
641
635
 
642
636
  "gemini-2.0-pro": 1.25,
643
637
  "gemini-2.0-pro-exp-02-05": 1.25,
644
638
 
645
- "gemini-2.0-flash-exp": 0.5,
646
639
  "gemini-2.0-flash-thinking-exp": 1,
647
640
  "gemini-2.0-flash-thinking-exp-1219": 1,
648
641
  "gemini-2.0-flash-thinking-exp-01-21": 1,
@@ -689,6 +682,7 @@ MODEL_RATIO = {
689
682
  "gpt-4o-mini-2024-07-18": 0.075,
690
683
  "gpt-4o-2024-08-06": 1.25,
691
684
  "gpt-4o-2024-11-20": 1.25,
685
+ "gpt-4.5-preview-2025-02-27": 37.5,
692
686
 
693
687
  "o1": 7.5,
694
688
  "o1-2024-12-17": 7.5,
@@ -716,14 +710,12 @@ MODEL_RATIO = {
716
710
  "llama3-8b-8192": 0.01,
717
711
  "llama3-70b-8192": 0.01,
718
712
  "mixtral-8x7b-32768": 0.01,
719
- "gemma-7b-it": 0.01,
720
713
  "llama-3.1-8b-instant": 0.01,
721
714
  "llama-3.1-70b-versatile": 3,
722
715
 
723
716
  # sili
724
- "gemma2-9b-it": 0.01,
725
- "gemma2-27b-it": 0.63,
726
- "google/gemma-2-27b-it": 0.09,
717
+ "gemma2-9b-it": 0.1,
718
+ "gemma2-27b-it": 0.5,
727
719
 
728
720
  "internlm2_5-7b-chat": 0.01,
729
721
  'internlm2_5-20b-chat': 0.5,
@@ -774,6 +766,7 @@ COMPLETION_RATIO = {
774
766
  "gpt-4-all": 4,
775
767
  "gpt-4-gizmo-*": 4,
776
768
  "gpt-4o-all": 4,
769
+ "gpt-4.5-preview-2025-02-27":2,
777
770
 
778
771
  "o1-mini": 4,
779
772
  "o1-preview": 4,
@@ -844,6 +837,9 @@ COMPLETION_RATIO = {
844
837
  "gemini-2.0-pro": 4,
845
838
  "gemini-2.0-pro-exp-02-05": 4,
846
839
 
840
+ "gemma2-9b-it": 4,
841
+ "gemma2-27b-it": 4,
842
+
847
843
  "hunyuan-a52b-instruct": 5,
848
844
  "qwen2.5-coder-32b-instruct": 3,
849
845
 
@@ -123,7 +123,6 @@ class CompletionRequest(BaseModel):
123
123
 
124
124
  system_messages: Optional[list] = None
125
125
  last_content: Optional[Any] = None
126
- urls: List[str] = []
127
126
 
128
127
  def __init__(self, **kwargs):
129
128
  super().__init__(**kwargs)