MeUtils 2024.12.2.11.50.27__py3-none-any.whl → 2024.12.5.13.7.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -61,13 +61,21 @@ async def get_access_token(token: str):
61
61
  token, last_active_session_id = await get_refresh_token(token) # last_active_token 没啥用
62
62
 
63
63
  headers = {
64
- "Cookie": f"__client={token}"
64
+ "Cookie": f"__client={token}",
65
+ }
66
+
67
+ params = {
68
+ "__clerk_api_version": "2021-02-05",
69
+ "_clerk_js_version": "5.35.1"
65
70
  }
71
+
66
72
  async with httpx.AsyncClient(base_url=CLIENT_BASE_URL, headers=headers, timeout=60) as client:
67
- response = await client.post(f"/sessions/{last_active_session_id}/tokens")
68
- if response.is_success:
69
- return response.json().get('jwt')
73
+ response = await client.post(f"/sessions/{last_active_session_id}/tokens", params=params)
74
+ response.raise_for_status()
75
+ logger.debug(bjson(response.json()))
76
+ return response.json().get('jwt')
70
77
 
78
+ # /tokens?__clerk_api_version=2021-02-05&_clerk_js_version=5.35.1
71
79
 
72
80
  @retrying(predicate=lambda r: not r)
73
81
  async def create_task(request: SunoAIRequest, token: Optional[str] = None):
@@ -314,9 +322,9 @@ async def create_task_for_cover(cover_from_url, lyrics):
314
322
  if __name__ == '__main__':
315
323
  # token = os.getenv("SUNO_API_KEY")
316
324
  # token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8yaGlHSTlCZFVwOUdZcUlGM3ZmTU1IT25SNFAiLCJyb3RhdGluZ190b2tlbiI6ImltOWMzOGJ4bnV2OThiZXplMW8yOG1zd2Y2c3lrdzd6YnM2ejJubHkifQ.SnC8-G2LVQztTiA2davFS413mQIaBmRFDzIw1JmvHg4UOMXq95z0CgbfK8Gx8Zv-FXdpKVqkamiNTzZP9qsLOSgREqCSSq5bmA6SPIWx-R6dj1PMDFRX-qv5qGyyPe4sadF6wnr45MS9859148gRmr_Go8rAT_7Hu0DKySextl-Xbs6ClDaYYUyyV3HudWQh4F8jwvxkyer05AgN6smQH5eZI-NRKVgZn_i6Mtl8IJz8R1fzD2YNIcvH4QC4qGhrg9n74ljIeORCMsoJzW2SBZa4QWWDx_0VYs-tA_Z43bqwN_2ojMGM63fm2hLOZmwf6S1LQy9_O6UdcUQiEs__OA"
317
-
325
+ token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8ybEIwRFlXTWYya3NxNWlxazl4S2dZbmQ0eVciLCJyb3RhdGluZ190b2tlbiI6Im5vejdpMzlvY2YzMTEzaHp6aDZwY2N5MzltZm5xNmZvdnhtOG9oNngifQ.qUeBLl-NxKzhpoUBpo_EwkHH0qwIsG0nMSD_yvHv5EK7YfybupoPWw8SpSKDhmZu5K_KsgdOF0RQH22jll4U-x0BwfVu1ze-GBxjnNEoSerUB7hu1cfvmg7xMH8rHJQig2TWE2h0hzP6dMajPHQWRTltbb5MMkKHgBFj0CiAFqaGwzSSvAtERwwHBIK3KalbaV1oyd6DJYG4FrVgQLubkp7VXj11LszxD6qXklRhsc9h55kvYASDPHhnZJi9u2QfIbiKVkraXb6ShqDmNtqXbj22p6g2R9fwMEB-m68S7QSZyAWRArWzsSZujzhxmNuMGbuVX1v7op7F3hA2zFphYw"
318
326
  # print(arun(get_refresh_token(token)))
319
- # print(arun(get_access_token("token")))
327
+ print(arun(get_access_token(token)))
320
328
 
321
329
  # arun(generate_lyrics(prompt=''))
322
330
 
@@ -339,9 +347,9 @@ if __name__ == '__main__':
339
347
  # token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8yajM1dkNXaTE1b0VQWnlUU0RMMnBsU3RiMVQiLCJyb3RhdGluZ190b2tlbiI6Ijd5MDV0cG1nMWJpbjBmaGIxZW1kYzZoazQzbDRuaG84bmJ4Yzc1dzgifQ.LsEfuPgwXu33f_UD2pRY4HjHIwU_rPG2rvG46BVDKqXcPmhRTWl5LjKgFrSzU51tgxfG-wMopVJhRxgS6YZUMtKVojDFtV_ZImyJ30u6LYA5nSbrkhqUrBdU4P5WmkL9irvh4sGPtvv8ML_pyXzsittsDrNnDCtm_isacOD-Fy3VKOCOjWj4W3qUUdnmBvTeeUbrnepQqurAjYPg6Ug-WkR43xZ5tWtxxTQ4ebtZglgRyCbyF3TM9XMKa67FTwHQsja8cLo2CyGRzb89e3uuF8Na_CY17ZlxJyQ2p_FZmL0egWr0EveZFeVzIUUs704pkhKd-RC9Q47Jqcg8qdiRYA"
340
348
  # arun(generate_lyrics('hi', token))
341
349
  # arun(generate_lyrics())
342
- task_id = "0b017d4e-c559-4cc6-9339-9cd53aa25af4"
343
- token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8ybEIwRFlXTWYya3NxNWlxazl4S2dZbmQ0eVciLCJyb3RhdGluZ190b2tlbiI6Im5vejdpMzlvY2YzMTEzaHp6aDZwY2N5MzltZm5xNmZvdnhtOG9oNngifQ.qUeBLl-NxKzhpoUBpo_EwkHH0qwIsG0nMSD_yvHv5EK7YfybupoPWw8SpSKDhmZu5K_KsgdOF0RQH22jll4U-x0BwfVu1ze-GBxjnNEoSerUB7hu1cfvmg7xMH8rHJQig2TWE2h0hzP6dMajPHQWRTltbb5MMkKHgBFj0CiAFqaGwzSSvAtERwwHBIK3KalbaV1oyd6DJYG4FrVgQLubkp7VXj11LszxD6qXklRhsc9h55kvYASDPHhnZJi9u2QfIbiKVkraXb6ShqDmNtqXbj22p6g2R9fwMEB-m68S7QSZyAWRArWzsSZujzhxmNuMGbuVX1v7op7F3hA2zFphYw"
344
- arun(get_task(task_id=task_id, token=token))
350
+ # task_id = "0b017d4e-c559-4cc6-9339-9cd53aa25af4"
351
+ # token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8ybEIwRFlXTWYya3NxNWlxazl4S2dZbmQ0eVciLCJyb3RhdGluZ190b2tlbiI6Im5vejdpMzlvY2YzMTEzaHp6aDZwY2N5MzltZm5xNmZvdnhtOG9oNngifQ.qUeBLl-NxKzhpoUBpo_EwkHH0qwIsG0nMSD_yvHv5EK7YfybupoPWw8SpSKDhmZu5K_KsgdOF0RQH22jll4U-x0BwfVu1ze-GBxjnNEoSerUB7hu1cfvmg7xMH8rHJQig2TWE2h0hzP6dMajPHQWRTltbb5MMkKHgBFj0CiAFqaGwzSSvAtERwwHBIK3KalbaV1oyd6DJYG4FrVgQLubkp7VXj11LszxD6qXklRhsc9h55kvYASDPHhnZJi9u2QfIbiKVkraXb6ShqDmNtqXbj22p6g2R9fwMEB-m68S7QSZyAWRArWzsSZujzhxmNuMGbuVX1v7op7F3hA2zFphYw"
352
+ # arun(get_task(task_id=task_id, token=token))
345
353
 
346
354
  # arun(get_credits(token))
347
355
  # arun(check_token(token))
meutils/apis/textin.py CHANGED
@@ -112,6 +112,7 @@ if __name__ == '__main__':
112
112
 
113
113
  # response = requests.request("POST", url, data=data)
114
114
  data = open("/Users/betterme/PycharmProjects/AI/qun.png", 'rb').read()
115
+ data = open("img.png", 'rb').read()
115
116
  # data = arun(to_bytes("https://cdn.meimeiqushuiyin.cn/ori/tmp_e8d6329e1b2c1bc541ca530fcbae14e3ec12f65d8d4ec97d.jpg"))
116
117
 
117
118
  from meutils.schemas.task_types import Purpose
@@ -121,6 +122,8 @@ if __name__ == '__main__':
121
122
 
122
123
  # service = "pdf_to_markdown"
123
124
 
125
+ service = 'dewarp'
126
+
124
127
  async def main(n=1):
125
128
  for i in tqdm(range(n)):
126
129
  try:
@@ -99,13 +99,21 @@ async def get_task(
99
99
  data=data
100
100
  )
101
101
 
102
- if remote_get_task: # 获取远程任务, todo: 判断函数类型
102
+ if remote_get_task:
103
+ if inspect.iscoroutinefunction(remote_get_task):
104
+ remote_task_response = await remote_get_task(remote_task_id, token)
105
+ else:
106
+ remote_task_response = remote_get_task(remote_task_id, token)
103
107
 
104
- remote_task_response = await remote_get_task(remote_task_id, token)
105
108
  if not isinstance(remote_task_response, dict):
106
109
  remote_task_response = remote_task_response.model_dump()
107
110
 
111
+ # logger.debug(response)
112
+ # logger.debug(remote_task_response)
113
+
108
114
  response.__dict__.update(remote_task_response) # 更新 response
115
+ # logger.debug(response)
116
+
109
117
 
110
118
  else:
111
119
  response = TaskResponse(
@@ -147,13 +155,13 @@ async def update_oneapi_from_response(task: OneapiTask, task_response: TaskRespo
147
155
  需要获取这几个信息 user_id
148
156
 
149
157
  """
150
- if task.status in {"SUCCESS", "FAILURE"}: return False # 跳出轮询,不再更新
158
+ # if task.status in {"SUCCESS", "FAILURE"}: return False # 跳出轮询,不再更新
151
159
 
152
160
  task.data = task_response.model_dump(exclude={"system_fingerprint"})
153
161
  task.status = task_response.status
154
162
  task.progress = time.time() // 10 % 100
155
163
 
156
- if task.status == "SUCCESS":
164
+ if task.status == "SUCCESS": ###### todo: 状态对齐
157
165
  task.progress = "100%"
158
166
  elif task.status == "FAILURE":
159
167
  task.fail_reason = "查看详情"
@@ -167,7 +175,7 @@ if __name__ == '__main__':
167
175
 
168
176
  from meutils.apis.kling import kolors_virtual_try_on
169
177
 
170
- task_id = "03f8990f-a196-4af3-88cb-727750e462d21"
178
+ task_id = "31b9bb1b-db59-4a94-9027-2ded2e4f24aa"
171
179
  # filter_kwargs = {
172
180
  # "task_id": task_id, #########理论上只需这个
173
181
  # "user_id": 1,
@@ -0,0 +1,76 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # @Project : AI. @by PyCharm
4
+ # @File : config_manager
5
+ # @Time : 2024/12/4 12:07
6
+ # @Author : betterme
7
+ # @WeChat : meutils
8
+ # @Software : PyCharm
9
+ # @Description :
10
+
11
+ import nacos
12
+ import yaml
13
+
14
+ from meutils.pipe import *
15
+
16
+ class ConfigManager:
17
+ _instance = None
18
+ _config: Dict[str, Any] = {}
19
+
20
+ def __new__(cls):
21
+ if cls._instance is None:
22
+ cls._instance = super().__new__(cls)
23
+ return cls._instance
24
+
25
+ def __init__(self):
26
+ # Nacos客户端配置
27
+ self.client = nacos.NacosClient(
28
+ server_addresses="nacos.chatfire.cc", # Nacos服务器地址
29
+ namespace="test", # 命名空间
30
+ username="chatfire", # 用户名
31
+ password="chatfirechatfire" # 密码
32
+ )
33
+
34
+ def init_config(self, data_id: str, group: str = "DEFAULT_GROUP"):
35
+ """初始化配置并添加监听器"""
36
+ # 获取初始配置
37
+ config = self.client.get_config(data_id, group)
38
+ logger.debug(config)
39
+
40
+ if config:
41
+ self._config = yaml.safe_load(config)
42
+
43
+ # 添加配置变更监听器
44
+ self.client.add_config_watcher(
45
+ data_id,
46
+ group,
47
+ self._config_changed_callback
48
+ )
49
+
50
+ def _config_changed_callback(self, args):
51
+ """配置变更回调函数"""
52
+ print(f"配置发生变更: {args}")
53
+ try:
54
+ self._config = yaml.safe_load(args['content'])
55
+ print(f"最新配置: {self._config}")
56
+ except Exception as e:
57
+ print(f"配置更新失败: {e}")
58
+
59
+ @property
60
+ def config(self) -> Dict[str, Any]:
61
+ """获取当前配置"""
62
+ return self._config
63
+
64
+
65
+ if __name__ == '__main__':
66
+ # 初始化配置管理器
67
+
68
+ data_id = "testdata"
69
+ group = "DEFAULT_GROUP"
70
+
71
+ config_manager = ConfigManager()
72
+ config_manager.init_config(
73
+ data_id=data_id, # 配置ID
74
+ )
75
+
76
+ # yaml.safe_load("- 1")
@@ -10,22 +10,21 @@
10
10
  # https://baijiahao.baidu.com/s?id=1774464887530962175&wfr=spider&for=pc
11
11
 
12
12
  from meutils.pipe import *
13
- import nacos
13
+ import nacos # nacos-sdk-python
14
14
 
15
15
  # Both HTTP/HTTPS protocols are supported, if not set protocol prefix default is HTTP, and HTTPS with no ssl check(verify=False)
16
16
  # "192.168.3.4:8848" or "https://192.168.3.4:443" or "http://192.168.3.4:8848,192.168.3.5:8848" or "https://192.168.3.4:443,https://192.168.3.5:443"
17
- SERVER_ADDRESSES = "server addresses split by comma"
18
- NAMESPACE = "namespace id"
17
+ server_addresses = "nacos.chatfire.cc"
18
+ NAMESPACE = "test"
19
19
 
20
20
  # no auth mode
21
- client = nacos.NacosClient(SERVER_ADDRESSES, namespace=NAMESPACE)
21
+ client = nacos.NacosClient(server_addresses=server_addresses, namespace=NAMESPACE, username='chatfire', password='chatfirechatfire')
22
22
  # auth mode
23
23
  # client = nacos.NacosClient(SERVER_ADDRESSES, namespace=NAMESPACE, ak="{ak}", sk="{sk}")
24
24
 
25
25
  # get config
26
- data_id = "config.nacos"
27
- group = "group"
26
+ data_id = "testdata"
27
+ group = "DEFAULT_GROUP"
28
28
  print(client.get_config(data_id, group))
29
29
 
30
30
 
31
-
meutils/data/VERSION CHANGED
@@ -1 +1 @@
1
- 2024.12.02.11.50.27
1
+ 2024.12.05.13.07.04
@@ -114,12 +114,12 @@ async def get_access_token():
114
114
 
115
115
  @alru_cache(ttl=60)
116
116
  @retrying(predicate=lambda r: r is None)
117
- async def create_conversation_id(token: Optional[str] = None):
118
- token = token or await get_access_token()
117
+ async def create_conversation_id(token: str):
119
118
  headers = {
120
119
  "authorization": token
121
120
  }
122
121
  conversation_id = str(uuid.uuid4()) # shortuuid.random()
122
+ # conversation_id = "af306c40-8f85-47a7-a027-185da084c6cc"
123
123
  params = {
124
124
  "conversation_id": conversation_id,
125
125
  "organization_id": "undefined",
@@ -128,34 +128,36 @@ async def create_conversation_id(token: Optional[str] = None):
128
128
  }
129
129
  async with httpx.AsyncClient(base_url=BASE_URL, headers=headers, timeout=30) as client:
130
130
  response = await client.post("/api/new", params=params)
131
-
132
- response = await client.get(f"/?id={conversation_id}", headers=headers)
133
- html_content = response.text
134
- pattern = r'(/_next/static/chunks/7116-[^"]+\.js)'
135
- js = re.findall(pattern, html_content)[-1]
136
-
137
- logger.debug(js)
138
-
139
- response = await client.get(js)
140
-
141
- logger.debug(response.status_code)
142
- next_action = response.text.split(',A=(0,a.$)("')[1][:40]
143
- logger.debug(next_action) # 2bc738a7215e149dbd4601a440f3b6df45089338 过期时间
144
-
145
- headers = {
146
- 'next-action': next_action,
147
- 'Cookie': f"AccessToken={token}",
148
-
149
- 'content-type': 'text/plain;charset=UTF-8',
150
- }
151
- payload = "[]"
152
- response = await client.post(f"/?id={conversation_id}", headers=headers, content=payload)
153
-
154
131
  logger.debug(response.status_code)
155
132
  logger.debug(response.text)
156
-
157
- if response.is_success:
158
- return conversation_id
133
+ return conversation_id
134
+
135
+ # response = await client.get(f"/?id={conversation_id}", headers=headers)
136
+ # html_content = response.text
137
+ # pattern = r'(/_next/static/chunks/7116-[^"]+\.js)'
138
+ # js = re.findall(pattern, html_content)[-1]
139
+ #
140
+ # logger.debug(js)
141
+ #
142
+ # response = await client.get(js)
143
+ #
144
+ # logger.debug(response.status_code)
145
+ # next_action = response.text.split(',A=(0,a.$)("')[1][:40]
146
+ # logger.debug(next_action) # 2bc738a7215e149dbd4601a440f3b6df45089338 过期时间
147
+ #
148
+ # headers = {
149
+ # 'next-action': next_action,
150
+ # 'Cookie': f"AccessToken={token}",
151
+ #
152
+ # 'content-type': 'text/plain;charset=UTF-8',
153
+ # }
154
+ # payload = "[]"
155
+ # response = await client.post(f"/?id={conversation_id}", headers=headers, content=payload)
156
+ #
157
+ # logger.debug(response.status_code)
158
+ # logger.debug(response.text)
159
+ #
160
+ # return conversation_id
159
161
 
160
162
 
161
163
  @retrying(max_retries=3)
@@ -168,6 +170,8 @@ async def create(request: ChatCompletionRequest, token: Optional[str] = None, vi
168
170
  token = token or await get_access_token()
169
171
  conversation_id = await create_conversation_id(token)
170
172
 
173
+ logger.debug(conversation_id)
174
+
171
175
  use_search = False
172
176
  if request.messages[0].get('role') != 'system': # 还原系统信息
173
177
  request.messages.insert(0, {'role': 'system', 'content': f'You are {request.model}'})
@@ -181,19 +185,19 @@ async def create(request: ChatCompletionRequest, token: Optional[str] = None, vi
181
185
 
182
186
  headers = {
183
187
  "authorization": token,
184
- 'Cookie': f"AccessToken={token};",
188
+ # 'Cookie': f"AccessToken={token};",
185
189
  "content-type": "text/plain;charset=UTF-8",
186
190
 
187
191
  }
188
192
  params = {
189
- "organization_id": "undefined",
193
+ # "organization_id": "undefined",
190
194
  # "organization_id": "eb0fb996-2317-467b-9847-15f6c40000b7",
191
195
  "retry": 2,
192
196
  }
193
197
  payload = {
194
198
  # "query": request.last_content,
195
199
  "query": oneturn2multiturn(request.messages),
196
- "images": request.urls, # todo: 兼容base64
200
+ # "images": request.urls, # todo: 兼容base64
197
201
 
198
202
  "conversation_id": conversation_id,
199
203
  "model_id": request.model, # "kaushikaakash04/tune-blob"
@@ -202,6 +206,8 @@ async def create(request: ChatCompletionRequest, token: Optional[str] = None, vi
202
206
  "attachment_name": "",
203
207
  # "messageId": "4a33e497-efb7-4d8f-ae45-9aa7d2c1c5af1726811555410",
204
208
  # "prevMessageId": "4a33e497-efb7-4d8f-ae45-9aa7d2c1c5af1726811555410",
209
+
210
+ "check": "286600"
205
211
  }
206
212
 
207
213
  yield "\n" # 提升首字速度
@@ -254,9 +260,9 @@ if __name__ == '__main__':
254
260
  # model = "net-anthropic/claude-3.5-sonnet"
255
261
  # model = "all"
256
262
 
257
- # model = "kaushikaakash04/tune-blob"
263
+ model = "kaushikaakash04/tune-blob"
258
264
  # model = "openai/o1-mini"
259
- model = "o1-mini-0609"
265
+ # model = "o1-mini-0609"
260
266
 
261
267
  # model = "openai/gpt-4o-mini"
262
268
 
@@ -21,6 +21,14 @@ FEISHU_URL_OSS = "https://xchatllm.feishu.cn/sheets/MekfsfVuohfUf1tsWV0cCvTmn3c?
21
21
 
22
22
  class VideoRequest(BaseModel):
23
23
  """
24
+ 23000 文生视频
25
+ "T2V-01": "23000"
26
+ "I2V-01": "23001"
27
+ "I2V-01-live": "23011"
28
+
29
+ {"desc":"飞起来","useOriginPrompt":false,"fileList":[{"id":"320650134834716679","name":"s.jpg","type":"jpg"}],"modelID":"23001"} # 老模型
30
+ {"desc":"飞起来","useOriginPrompt":false,"fileList":[{"id":"320650134834716679","name":"s.jpg","type":"jpg"}],"modelID":"23011"} # 新模型
31
+
24
32
  {"desc":"跳动","useOriginPrompt":true,"fileList":[{"id":"304987062153912323","name":"3a71b0bb-3cab-4e69-b1f0-592976d0897b_00001_.png","type":"png"}]}
25
33
  """
26
34
  model: str = "video-01"
@@ -35,6 +43,10 @@ class VideoRequest(BaseModel):
35
43
  传入图片需要满足以下条件:格式为JPG/JPEG/PNG;长宽比大于2:5、小于5:2;短边像素大于300px;体积不大于20MB。"""
36
44
  first_frame_image: Optional[str] = None
37
45
 
46
+ def __init__(self, /, **data: Any):
47
+ super().__init__(**data)
48
+
49
+
38
50
 
39
51
  class BaseResponse(BaseModel):
40
52
  """
@@ -110,6 +122,7 @@ class VideoResponse(BaseModel):
110
122
  if self.file_id is None and self.videos:
111
123
  self.file_id = self.videos[0].videoURL
112
124
 
125
+
113
126
  #
114
127
  # "data": {
115
128
  # "videos": [
@@ -138,3 +151,51 @@ class VideoResponse(BaseModel):
138
151
  # "cycleTime": 10,
139
152
  # "hasMore": false
140
153
  # }
154
+
155
+ """
156
+ refer_voice
157
+ 请输入您的参考音色ID
158
+ refer_instrumental
159
+ 请输入您的参考伴奏ID
160
+ lyrics
161
+ ##在无垠的星空下\n\n梦开始飞翔\n月光洒在心上\n\n温柔的想象\n在这片宁静中\n\n我们自由歌唱##
162
+ model
163
+ music-01
164
+ audio_setting
165
+ {"sample_rate":44100,"bitrate":256000,"format":"mp3"}
166
+
167
+ """
168
+
169
+
170
+ class MusicRequet(BaseModel):
171
+ model: str = "music-01"
172
+
173
+ lyrics: str
174
+
175
+ refer_voice: Optional[str] = None
176
+ refer_instrumental: Optional[str] = None
177
+ audio_setting: dict = {
178
+ "sample_rate": 44100,
179
+ "bitrate": 256000,
180
+ "format": "mp3"
181
+ }
182
+
183
+
184
+ """
185
+ {
186
+ "data":{
187
+ "audio":"hex编码的音频数据",
188
+ "status":2
189
+ },
190
+ "trace_id":"02cb3e6a7f8ada5886fdc11e09452353",
191
+ "base_resp":{
192
+ "status_code":0,
193
+ "status_msg":"success"
194
+ }
195
+ }
196
+ """
197
+
198
+
199
+ class MusicResponse(BaseModel):
200
+ trace_id: str
201
+ base_resp: BaseResponse
@@ -117,11 +117,6 @@ class FluxImageRequest(ImageRequest):
117
117
  def __init__(self, /, **data: Any):
118
118
  super().__init__(**data)
119
119
  self.image_size = self.size
120
- if "dev" in self.model:
121
- self.model = 'flux-schnell'
122
-
123
- if "pro" in self.model:
124
- self.model = 'flux-pro'
125
120
 
126
121
 
127
122
  class TogetherImageRequest(ImageRequest): # together
@@ -238,8 +233,7 @@ class RecraftImageRequest(ImageRequest):
238
233
  Literal[
239
234
  'any',
240
235
  'digital_illustration',
241
- 'illustration_3d',
242
- 'digital_illustration_seamless',
236
+ 'digital_illustration',
243
237
  'digital_illustration_pixel_art',
244
238
  'digital_illustration_3d',
245
239
  'digital_illustration_psychedelic',
@@ -259,6 +253,7 @@ class RecraftImageRequest(ImageRequest):
259
253
  'digital_illustration_hand_drawn_outline',
260
254
  'digital_illustration_handmade_3d',
261
255
  'digital_illustration_stickers_drawings',
256
+
262
257
  'realistic_image',
263
258
  'realistic_image_mockup',
264
259
  'realistic_image_b_and_w',
@@ -268,6 +263,7 @@ class RecraftImageRequest(ImageRequest):
268
263
  'realistic_image_natural_light',
269
264
  'realistic_image_studio_portrait',
270
265
  'realistic_image_motion_blur',
266
+
271
267
  'vector_illustration',
272
268
  'vector_illustration_seamless',
273
269
  'vector_illustration_line_art',
@@ -0,0 +1,85 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # @Project : AI. @by PyCharm
4
+ # @File : napkin_types
5
+ # @Time : 2024/12/3 17:23
6
+ # @Author : betterme
7
+ # @WeChat : meutils
8
+ # @Software : PyCharm
9
+ # @Description :
10
+
11
+ from meutils.pipe import *
12
+
13
+ BASE_URL = "https://nlp-california-api.napkin.ai/api/v1"
14
+
15
+ ASSETS_BASE_URL = "https://assets.napkin.ai/assets/db"
16
+ # https://assets.napkin.ai/assets/db/24px/coffee-mug--food-drinks--24x24.svg
17
+
18
+ "https://assets.napkin.ai/assets/db/families/lens-circles-v7--family--3.svg"
19
+
20
+ class IconsSimilarRequest(BaseModel):
21
+ caption: str
22
+
23
+
24
+ class Icon(BaseModel):
25
+ file: str
26
+ list: str
27
+ name: str
28
+ score: float
29
+
30
+
31
+ class IconsSimilarResponse(BaseModel):
32
+ data: List[Icon]
33
+ metadata: Dict = {}
34
+
35
+ def __init__(self, /, **data: Any):
36
+ super().__init__(**data)
37
+
38
+ for icon in self.data:
39
+ icon.file = f"{ASSETS_BASE_URL}/{icon.list}/{icon.file}.svg"
40
+
41
+ class Config:
42
+ extra = "allow"
43
+
44
+ #
45
+ # {'data': [{'file': 'coffee-read--food-drinks--24x24',
46
+ # 'list': '24px',
47
+ # 'name': 'coffee-read',
48
+ # 'score': 0.48007068037986755},
49
+ # {'file': 'coffee-mug--food-drinks--24x24',
50
+ # 'list': '24px',
51
+ # 'name': 'coffee-mug',
52
+ # 'score': 0.4368043541908264},
53
+ # {'file': 'coffee-cup--food-drinks--24x24',
54
+ # 'list': '24px',
55
+ # 'name': 'coffee',
56
+ # 'score': 0.4181402325630188},
57
+ # {'file': 'time-coffee-time-2--interface-essential--24x24',
58
+ # 'list': '24px',
59
+ # 'name': 'coffee-time-2',
60
+ # 'score': 0.408485472202301},
61
+ # {'file': 'coffee-jar--food-drinks--24x24',
62
+ # 'list': '24px',
63
+ # 'name': 'coffee-jar',
64
+ # 'score': 0.4083172082901001},
65
+ # {'file': 'time-coffee-time-3--interface-essential--24x24',
66
+ # 'list': '24px',
67
+ # 'name': 'coffee-time-3',
68
+ # 'score': 0.40657299757003784},
69
+ # {'file': 'coffee-coldbrew-1--food-drinks--24x24',
70
+ # 'list': '24px',
71
+ # 'name': 'coldbrew',
72
+ # 'score': 0.40559959411621094},
73
+ # {'file': 'coffee-straw--food-drinks--24x24',
74
+ # 'list': '24px',
75
+ # 'name': 'straw',
76
+ # 'score': 0.4047240614891052},
77
+ # {'file': 'coffee-to-go--food-drinks--24x24',
78
+ # 'list': '24px',
79
+ # 'name': 'go',
80
+ # 'score': 0.39064520597457886},
81
+ # {'file': 'dating-cup--romance--24x24',
82
+ # 'list': '24px',
83
+ # 'name': 'dating-cup',
84
+ # 'score': 0.38993847370147705}],
85
+ # 'metadata': {}}
@@ -51,10 +51,12 @@ MODEL_PRICE = {
51
51
  "api-images-stable-diffusion": 0.01,
52
52
 
53
53
  "api-images-flux1.0-turbo": 0.02,
54
+ "api-images-flux-schnell": 0.01,
54
55
  "api-images-flux1.0-schnell": 0.01,
55
56
  "api-images-flux1.0-dev": 0.05,
56
57
  "api-images-flux1.0-pro": 0.1,
57
58
  "api-images-flux1.1-pro": 0.1,
59
+ "api-images-recraftv3": 0.03,
58
60
 
59
61
  "api-tripo3d": 0.1,
60
62
 
@@ -234,8 +236,23 @@ MODEL_PRICE = {
234
236
  }
235
237
 
236
238
  MODEL_RATIO = {
237
- # 重排序
238
- "rerank-multilingual-v3.0": 1,
239
+ # embedding & rerank
240
+ "rerank-multilingual-v2.0": 0.1,
241
+ "rerank-multilingual-v3.0": 0.1,
242
+ "BAAI/bge-reranker-v2-m3": 0.1,
243
+ "jina-reranker-v2-base-multilingual": 0.1,
244
+ "netease-youdao/bce-reranker-base_v1": 0.1,
245
+ "BAAI/bge-m3": 0.1,
246
+ "bge-m3": 0.1,
247
+
248
+ "bge-large-zh-v1.5": 0.1,
249
+ "BAAI/bge-large-zh-v1.5": 0.1,
250
+ "bge-large-en-v1.5": 0.1,
251
+ "BAAI/bge-large-en-v1.5": 0.1,
252
+
253
+ "text-embedding-3-large": 0.5,
254
+ "text-embedding-3-small": 0.5,
255
+ "text-embedding-ada-002": 0.5,
239
256
 
240
257
  # 百川
241
258
  'baichuan4-turbo': 7.5,
@@ -274,9 +291,9 @@ MODEL_RATIO = {
274
291
  "glm-4v-plus": 1, # 2.5
275
292
 
276
293
  # 月之暗面 https://platform.moonshot.cn/docs/price/chat#%E4%BA%A7%E5%93%81%E5%AE%9A%E4%BB%B7
277
- "moonshot-v1-8k": 6 / 5, # 特价
278
- "moonshot-v1-32k": 12 / 5, # 特价
279
- "moonshot-v1-128k": 60 / 5, # 特价
294
+ "moonshot-v1-8k": 6 / 2, # 特价
295
+ "moonshot-v1-32k": 12 / 2, # 特价
296
+ "moonshot-v1-128k": 60 / 2, # 特价
280
297
 
281
298
  # 阿里千问 https://dashscope.console.aliyun.com/billing
282
299
  "qwen-long": 0.25,
@@ -389,10 +406,7 @@ MODEL_RATIO = {
389
406
  "text-ada-001": 0.2,
390
407
  "text-babbage-001": 0.25,
391
408
  "text-davinci-edit-001": 10,
392
- "text-embedding-3-large": 1,
393
- "text-embedding-3-small": 1,
394
- "text-embedding-ada-002": 1,
395
- "text-embedding-v1": 1,
409
+
396
410
  "text-moderation-latest": 0.1,
397
411
  "text-moderation-stable": 0.1,
398
412
  "tts-1": 7.5,
@@ -514,16 +528,6 @@ MODEL_RATIO = {
514
528
  "internlm2_5-7b-chat": 0.01,
515
529
  'internlm2_5-20b-chat': 0.5,
516
530
 
517
- # embedding
518
- "bge-large-zh-v1.5": 1,
519
- "bge-large-en-v1.5": 1,
520
- "BAAI/bge-large-zh-v1.5": 1,
521
- "BAAI/bge-large-en-V1.5": 1,
522
- "BAAI/bge-m3": 1,
523
- "bge-large-zh-v1.5-q4": 1,
524
- "bge-small-zh-v1.5-q4": 1,
525
- "chatfire/bge-m3:q8_0": 1,
526
-
527
531
  "acge_text_embedding": 1,
528
532
  "dmeta-embedding-zh-q4": 1,
529
533