MeUtils 2024.12.2.11.1.18__py3-none-any.whl → 2024.12.5.12.28.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {MeUtils-2024.12.2.11.1.18.dist-info → MeUtils-2024.12.5.12.28.40.dist-info}/METADATA +28 -28
  2. {MeUtils-2024.12.2.11.1.18.dist-info → MeUtils-2024.12.5.12.28.40.dist-info}/RECORD +30 -23
  3. examples/_openaisdk/4v.py +8 -6
  4. examples/_openaisdk/openai_step.py +9 -6
  5. examples/demo.py +15 -1
  6. meutils/apis/hailuoai/music.py +11 -0
  7. meutils/apis/hailuoai/videos.py +11 -1
  8. meutils/apis/images/recraft.py +4 -3
  9. meutils/apis/kling/kolors_virtual_try_on_web.py +126 -0
  10. meutils/apis/napkin/__init__.py +11 -0
  11. meutils/apis/napkin/icons.py +42 -0
  12. meutils/apis/sunoai/suno.py +17 -9
  13. meutils/apis/textin.py +3 -0
  14. meutils/async_task/celery_config.py +2 -0
  15. meutils/async_task/utils.py +25 -14
  16. meutils/config_utils/config_manager.py +76 -0
  17. meutils/config_utils/{nacos.py → nacosx.py} +6 -7
  18. meutils/data/VERSION +1 -1
  19. meutils/db/redis_db.py +2 -2
  20. meutils/llm/completions/tune.py +39 -33
  21. meutils/schemas/hailuo_types.py +61 -0
  22. meutils/schemas/image_types.py +3 -7
  23. meutils/schemas/napkin_types.py +85 -0
  24. meutils/schemas/oneapi/common.py +23 -19
  25. meutils/schemas/oneapi//351/207/215/345/256/232/345/220/221.py +132 -0
  26. meutils/str_utils/__init__.py +16 -2
  27. {MeUtils-2024.12.2.11.1.18.dist-info → MeUtils-2024.12.5.12.28.40.dist-info}/LICENSE +0 -0
  28. {MeUtils-2024.12.2.11.1.18.dist-info → MeUtils-2024.12.5.12.28.40.dist-info}/WHEEL +0 -0
  29. {MeUtils-2024.12.2.11.1.18.dist-info → MeUtils-2024.12.5.12.28.40.dist-info}/entry_points.txt +0 -0
  30. {MeUtils-2024.12.2.11.1.18.dist-info → MeUtils-2024.12.5.12.28.40.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,42 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # @Project : AI. @by PyCharm
4
+ # @File : icons
5
+ # @Time : 2024/12/3 16:54
6
+ # @Author : betterme
7
+ # @WeChat : meutils
8
+ # @Software : PyCharm
9
+ # @Description : https://app.napkin.ai/api/v1/auth/sessions
10
+ import json
11
+
12
+ from meutils.pipe import *
13
+ from meutils.schemas.napkin_types import BASE_URL, ASSETS_BASE_URL, IconsSimilarRequest, Icon, IconsSimilarResponse
14
+
15
+
16
+ async def icons_similar(request: IconsSimilarRequest):
17
+ headers = {
18
+
19
+ "cookie": "napkin.identity.visitor=visitor%409e2a59af-19ad-4dc3-96c8-9efbca71c547; napkin.analytics.device_id=d_950e4683-ae41-4941-b9a7-7e8deba8e669_1733214660768; _ga=GA1.1.511154647.1733214661; _fbp=fb.1.1733214661606.54464059277067597; _tt_enable_cookie=1; _ttp=SFxRhdE5PO5Rslohwet46IwiaB3.tt.1; _clck=16odoyg%7C2%7Cfre%7C0%7C1798; intercom-id-zrfc296i=6eb3740a-9c79-4681-87ec-532e4334816f; intercom-device-id-zrfc296i=f282621f-dcec-429b-90c0-fc98bd9dbb71; _ga_NM0S4FZ9JH=GS1.1.1733214661.1.0.1733214667.0.0.0; napkin.app.identity.session=NGJlMjdiZGEtZGEwNi00ZWE5LTkwMjEtN2U4ODAyNjE5NGJm.5092981dc495fe0041ed99fc68dcefa4ba26a0f6; intercom-session-zrfc296i=MGpOUDEvRlE3ZlRyUkhmaUdsVmZjYXNQT2lJYSsxYitaWDhTT0tMWlJFVkVuVjBwTno5emJydVd3VUhvKzlvRC0tLys4ZEp5a0F5M1YzNkFOazJUV2Nudz09--d6be7442da979794afe682b2e044bf51bd19ac88; _clsk=yficge%7C1733214772932%7C9%7C1%7Cr.clarity.ms%2Fcollect; _ga_L149GZ61DV=GS1.1.1733214675.1.1.1733215282.58.0.0",
20
+ "origin": "https://app.napkin.ai",
21
+
22
+ "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
23
+ }
24
+
25
+ payload = request.model_dump()
26
+
27
+ async with httpx.AsyncClient(base_url=BASE_URL, headers=headers, timeout=30) as client:
28
+ response = await client.post(
29
+ "/features/text/icons_similar",
30
+ json=payload,
31
+ )
32
+ response.raise_for_status()
33
+ data = response.json()
34
+ logger.debug(data)
35
+ response = IconsSimilarResponse(**data)
36
+
37
+ return response.model_dump()
38
+
39
+
40
+ if __name__ == '__main__':
41
+ request = IconsSimilarRequest(caption="一杯咖啡与灵感的邂逅")
42
+ arun(icons_similar(request))
@@ -61,13 +61,21 @@ async def get_access_token(token: str):
61
61
  token, last_active_session_id = await get_refresh_token(token) # last_active_token 没啥用
62
62
 
63
63
  headers = {
64
- "Cookie": f"__client={token}"
64
+ "Cookie": f"__client={token}",
65
+ }
66
+
67
+ params = {
68
+ "__clerk_api_version": "2021-02-05",
69
+ "_clerk_js_version": "5.35.1"
65
70
  }
71
+
66
72
  async with httpx.AsyncClient(base_url=CLIENT_BASE_URL, headers=headers, timeout=60) as client:
67
- response = await client.post(f"/sessions/{last_active_session_id}/tokens")
68
- if response.is_success:
69
- return response.json().get('jwt')
73
+ response = await client.post(f"/sessions/{last_active_session_id}/tokens", params=params)
74
+ response.raise_for_status()
75
+ logger.debug(bjson(response.json()))
76
+ return response.json().get('jwt')
70
77
 
78
+ # /tokens?__clerk_api_version=2021-02-05&_clerk_js_version=5.35.1
71
79
 
72
80
  @retrying(predicate=lambda r: not r)
73
81
  async def create_task(request: SunoAIRequest, token: Optional[str] = None):
@@ -314,9 +322,9 @@ async def create_task_for_cover(cover_from_url, lyrics):
314
322
  if __name__ == '__main__':
315
323
  # token = os.getenv("SUNO_API_KEY")
316
324
  # token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8yaGlHSTlCZFVwOUdZcUlGM3ZmTU1IT25SNFAiLCJyb3RhdGluZ190b2tlbiI6ImltOWMzOGJ4bnV2OThiZXplMW8yOG1zd2Y2c3lrdzd6YnM2ejJubHkifQ.SnC8-G2LVQztTiA2davFS413mQIaBmRFDzIw1JmvHg4UOMXq95z0CgbfK8Gx8Zv-FXdpKVqkamiNTzZP9qsLOSgREqCSSq5bmA6SPIWx-R6dj1PMDFRX-qv5qGyyPe4sadF6wnr45MS9859148gRmr_Go8rAT_7Hu0DKySextl-Xbs6ClDaYYUyyV3HudWQh4F8jwvxkyer05AgN6smQH5eZI-NRKVgZn_i6Mtl8IJz8R1fzD2YNIcvH4QC4qGhrg9n74ljIeORCMsoJzW2SBZa4QWWDx_0VYs-tA_Z43bqwN_2ojMGM63fm2hLOZmwf6S1LQy9_O6UdcUQiEs__OA"
317
-
325
+ token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8ybEIwRFlXTWYya3NxNWlxazl4S2dZbmQ0eVciLCJyb3RhdGluZ190b2tlbiI6Im5vejdpMzlvY2YzMTEzaHp6aDZwY2N5MzltZm5xNmZvdnhtOG9oNngifQ.qUeBLl-NxKzhpoUBpo_EwkHH0qwIsG0nMSD_yvHv5EK7YfybupoPWw8SpSKDhmZu5K_KsgdOF0RQH22jll4U-x0BwfVu1ze-GBxjnNEoSerUB7hu1cfvmg7xMH8rHJQig2TWE2h0hzP6dMajPHQWRTltbb5MMkKHgBFj0CiAFqaGwzSSvAtERwwHBIK3KalbaV1oyd6DJYG4FrVgQLubkp7VXj11LszxD6qXklRhsc9h55kvYASDPHhnZJi9u2QfIbiKVkraXb6ShqDmNtqXbj22p6g2R9fwMEB-m68S7QSZyAWRArWzsSZujzhxmNuMGbuVX1v7op7F3hA2zFphYw"
318
326
  # print(arun(get_refresh_token(token)))
319
- # print(arun(get_access_token("token")))
327
+ print(arun(get_access_token(token)))
320
328
 
321
329
  # arun(generate_lyrics(prompt=''))
322
330
 
@@ -339,9 +347,9 @@ if __name__ == '__main__':
339
347
  # token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8yajM1dkNXaTE1b0VQWnlUU0RMMnBsU3RiMVQiLCJyb3RhdGluZ190b2tlbiI6Ijd5MDV0cG1nMWJpbjBmaGIxZW1kYzZoazQzbDRuaG84bmJ4Yzc1dzgifQ.LsEfuPgwXu33f_UD2pRY4HjHIwU_rPG2rvG46BVDKqXcPmhRTWl5LjKgFrSzU51tgxfG-wMopVJhRxgS6YZUMtKVojDFtV_ZImyJ30u6LYA5nSbrkhqUrBdU4P5WmkL9irvh4sGPtvv8ML_pyXzsittsDrNnDCtm_isacOD-Fy3VKOCOjWj4W3qUUdnmBvTeeUbrnepQqurAjYPg6Ug-WkR43xZ5tWtxxTQ4ebtZglgRyCbyF3TM9XMKa67FTwHQsja8cLo2CyGRzb89e3uuF8Na_CY17ZlxJyQ2p_FZmL0egWr0EveZFeVzIUUs704pkhKd-RC9Q47Jqcg8qdiRYA"
340
348
  # arun(generate_lyrics('hi', token))
341
349
  # arun(generate_lyrics())
342
- task_id = "0b017d4e-c559-4cc6-9339-9cd53aa25af4"
343
- token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8ybEIwRFlXTWYya3NxNWlxazl4S2dZbmQ0eVciLCJyb3RhdGluZ190b2tlbiI6Im5vejdpMzlvY2YzMTEzaHp6aDZwY2N5MzltZm5xNmZvdnhtOG9oNngifQ.qUeBLl-NxKzhpoUBpo_EwkHH0qwIsG0nMSD_yvHv5EK7YfybupoPWw8SpSKDhmZu5K_KsgdOF0RQH22jll4U-x0BwfVu1ze-GBxjnNEoSerUB7hu1cfvmg7xMH8rHJQig2TWE2h0hzP6dMajPHQWRTltbb5MMkKHgBFj0CiAFqaGwzSSvAtERwwHBIK3KalbaV1oyd6DJYG4FrVgQLubkp7VXj11LszxD6qXklRhsc9h55kvYASDPHhnZJi9u2QfIbiKVkraXb6ShqDmNtqXbj22p6g2R9fwMEB-m68S7QSZyAWRArWzsSZujzhxmNuMGbuVX1v7op7F3hA2zFphYw"
344
- arun(get_task(task_id=task_id, token=token))
350
+ # task_id = "0b017d4e-c559-4cc6-9339-9cd53aa25af4"
351
+ # token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImNsaWVudF8ybEIwRFlXTWYya3NxNWlxazl4S2dZbmQ0eVciLCJyb3RhdGluZ190b2tlbiI6Im5vejdpMzlvY2YzMTEzaHp6aDZwY2N5MzltZm5xNmZvdnhtOG9oNngifQ.qUeBLl-NxKzhpoUBpo_EwkHH0qwIsG0nMSD_yvHv5EK7YfybupoPWw8SpSKDhmZu5K_KsgdOF0RQH22jll4U-x0BwfVu1ze-GBxjnNEoSerUB7hu1cfvmg7xMH8rHJQig2TWE2h0hzP6dMajPHQWRTltbb5MMkKHgBFj0CiAFqaGwzSSvAtERwwHBIK3KalbaV1oyd6DJYG4FrVgQLubkp7VXj11LszxD6qXklRhsc9h55kvYASDPHhnZJi9u2QfIbiKVkraXb6ShqDmNtqXbj22p6g2R9fwMEB-m68S7QSZyAWRArWzsSZujzhxmNuMGbuVX1v7op7F3hA2zFphYw"
352
+ # arun(get_task(task_id=task_id, token=token))
345
353
 
346
354
  # arun(get_credits(token))
347
355
  # arun(check_token(token))
meutils/apis/textin.py CHANGED
@@ -112,6 +112,7 @@ if __name__ == '__main__':
112
112
 
113
113
  # response = requests.request("POST", url, data=data)
114
114
  data = open("/Users/betterme/PycharmProjects/AI/qun.png", 'rb').read()
115
+ data = open("img.png", 'rb').read()
115
116
  # data = arun(to_bytes("https://cdn.meimeiqushuiyin.cn/ori/tmp_e8d6329e1b2c1bc541ca530fcbae14e3ec12f65d8d4ec97d.jpg"))
116
117
 
117
118
  from meutils.schemas.task_types import Purpose
@@ -121,6 +122,8 @@ if __name__ == '__main__':
121
122
 
122
123
  # service = "pdf_to_markdown"
123
124
 
125
+ service = 'dewarp'
126
+
124
127
  async def main(n=1):
125
128
  for i in tqdm(range(n)):
126
129
  try:
@@ -19,6 +19,8 @@ broker_url = os.getenv('REDIS_URL', broker_url)
19
19
  # result_backend = f"redis://localhost:6379/1" # 结果存储
20
20
  result_backend = f"{broker_url.replace('/0', '')}/1" # 结果存储
21
21
 
22
+ # logger.debug(result_backend)
23
+
22
24
  # 添加以下配置增加可靠性
23
25
  broker_connection_retry = True
24
26
  broker_connection_max_retries = 5
@@ -63,23 +63,26 @@ async def get_task(
63
63
  }
64
64
  :return:
65
65
  """
66
- await redis_aclient.select(1) # 切到 celery
67
- if not await redis_aclient.get(f"celery-task-meta-{task_id}"):
68
- raise HTTPException(status_code=404, detail="TaskID not found")
69
-
70
66
  filter_kwargs = filter_kwargs or {}
71
67
  filter_kwargs["task_id"] = task_id
72
68
 
73
69
  result = AsyncResult(id=task_id)
74
- # logger.debug(bjson(result._get_task_meta()))
75
- # logger.debug(bjson(result.get()))
70
+ logger.debug(bjson(result._get_task_meta()))
71
+ # logger.debug(bjson(result.get(timeout=30)))
72
+ logger.debug(result)
73
+ logger.debug(result.ready())
74
+ logger.debug(result.state)
76
75
 
77
- data = result.get().copy() # 创建任务时:remote task的返回结果 ####### copy避免丢失字段
78
- logger.debug(bjson(data))
79
- token = data.pop("system_fingerprint", None) # 远程任务 token/apikey
76
+ # if result.status=="PENDING": # worker可能还未启动,会阻塞
77
+ # if await redis_aclient.select(1) and not await redis_aclient.exists(f"celery-task-meta-{task_id}"):
78
+ # raise HTTPException(status_code=404, detail="TaskID not found")
80
79
 
81
80
  if result.ready():
82
81
  if result.successful():
82
+ data = result.get(timeout=30).copy() # 创建任务时:remote task的返回结果 ####### copy避免丢失字段
83
+ logger.debug(bjson(data))
84
+ token = data.pop("system_fingerprint", None) # 远程任务 token/apikey
85
+
83
86
  remote_task_id = (
84
87
  data.get("task_id") # 提前写
85
88
  or data.get("data", {}).get("task_id")
@@ -96,13 +99,21 @@ async def get_task(
96
99
  data=data
97
100
  )
98
101
 
99
- if remote_get_task: # 获取远程任务, todo: 判断函数类型
102
+ if remote_get_task:
103
+ if inspect.iscoroutinefunction(remote_get_task):
104
+ remote_task_response = await remote_get_task(remote_task_id, token)
105
+ else:
106
+ remote_task_response = remote_get_task(remote_task_id, token)
100
107
 
101
- remote_task_response = await remote_get_task(remote_task_id, token)
102
108
  if not isinstance(remote_task_response, dict):
103
109
  remote_task_response = remote_task_response.model_dump()
104
110
 
111
+ # logger.debug(response)
112
+ # logger.debug(remote_task_response)
113
+
105
114
  response.__dict__.update(remote_task_response) # 更新 response
115
+ # logger.debug(response)
116
+
106
117
 
107
118
  else:
108
119
  response = TaskResponse(
@@ -144,13 +155,13 @@ async def update_oneapi_from_response(task: OneapiTask, task_response: TaskRespo
144
155
  需要获取这几个信息 user_id
145
156
 
146
157
  """
147
- if task.status in {"SUCCESS", "FAILURE"}: return False # 跳出轮询,不再更新
158
+ # if task.status in {"SUCCESS", "FAILURE"}: return False # 跳出轮询,不再更新
148
159
 
149
160
  task.data = task_response.model_dump(exclude={"system_fingerprint"})
150
161
  task.status = task_response.status
151
162
  task.progress = time.time() // 10 % 100
152
163
 
153
- if task.status == "SUCCESS":
164
+ if task.status == "SUCCESS": ###### todo: 状态对齐
154
165
  task.progress = "100%"
155
166
  elif task.status == "FAILURE":
156
167
  task.fail_reason = "查看详情"
@@ -164,7 +175,7 @@ if __name__ == '__main__':
164
175
 
165
176
  from meutils.apis.kling import kolors_virtual_try_on
166
177
 
167
- task_id = "d91e39a9-eff9-4e98-8412-a5c40371479e"
178
+ task_id = "31b9bb1b-db59-4a94-9027-2ded2e4f24aa"
168
179
  # filter_kwargs = {
169
180
  # "task_id": task_id, #########理论上只需这个
170
181
  # "user_id": 1,
@@ -0,0 +1,76 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # @Project : AI. @by PyCharm
4
+ # @File : config_manager
5
+ # @Time : 2024/12/4 12:07
6
+ # @Author : betterme
7
+ # @WeChat : meutils
8
+ # @Software : PyCharm
9
+ # @Description :
10
+
11
+ import nacos
12
+ import yaml
13
+
14
+ from meutils.pipe import *
15
+
16
+ class ConfigManager:
17
+ _instance = None
18
+ _config: Dict[str, Any] = {}
19
+
20
+ def __new__(cls):
21
+ if cls._instance is None:
22
+ cls._instance = super().__new__(cls)
23
+ return cls._instance
24
+
25
+ def __init__(self):
26
+ # Nacos客户端配置
27
+ self.client = nacos.NacosClient(
28
+ server_addresses="nacos.chatfire.cc", # Nacos服务器地址
29
+ namespace="test", # 命名空间
30
+ username="chatfire", # 用户名
31
+ password="chatfirechatfire" # 密码
32
+ )
33
+
34
+ def init_config(self, data_id: str, group: str = "DEFAULT_GROUP"):
35
+ """初始化配置并添加监听器"""
36
+ # 获取初始配置
37
+ config = self.client.get_config(data_id, group)
38
+ logger.debug(config)
39
+
40
+ if config:
41
+ self._config = yaml.safe_load(config)
42
+
43
+ # 添加配置变更监听器
44
+ self.client.add_config_watcher(
45
+ data_id,
46
+ group,
47
+ self._config_changed_callback
48
+ )
49
+
50
+ def _config_changed_callback(self, args):
51
+ """配置变更回调函数"""
52
+ print(f"配置发生变更: {args}")
53
+ try:
54
+ self._config = yaml.safe_load(args['content'])
55
+ print(f"最新配置: {self._config}")
56
+ except Exception as e:
57
+ print(f"配置更新失败: {e}")
58
+
59
+ @property
60
+ def config(self) -> Dict[str, Any]:
61
+ """获取当前配置"""
62
+ return self._config
63
+
64
+
65
+ if __name__ == '__main__':
66
+ # 初始化配置管理器
67
+
68
+ data_id = "testdata"
69
+ group = "DEFAULT_GROUP"
70
+
71
+ config_manager = ConfigManager()
72
+ config_manager.init_config(
73
+ data_id=data_id, # 配置ID
74
+ )
75
+
76
+ # yaml.safe_load("- 1")
@@ -10,22 +10,21 @@
10
10
  # https://baijiahao.baidu.com/s?id=1774464887530962175&wfr=spider&for=pc
11
11
 
12
12
  from meutils.pipe import *
13
- import nacos
13
+ import nacos # nacos-sdk-python
14
14
 
15
15
  # Both HTTP/HTTPS protocols are supported, if not set protocol prefix default is HTTP, and HTTPS with no ssl check(verify=False)
16
16
  # "192.168.3.4:8848" or "https://192.168.3.4:443" or "http://192.168.3.4:8848,192.168.3.5:8848" or "https://192.168.3.4:443,https://192.168.3.5:443"
17
- SERVER_ADDRESSES = "server addresses split by comma"
18
- NAMESPACE = "namespace id"
17
+ server_addresses = "nacos.chatfire.cc"
18
+ NAMESPACE = "test"
19
19
 
20
20
  # no auth mode
21
- client = nacos.NacosClient(SERVER_ADDRESSES, namespace=NAMESPACE)
21
+ client = nacos.NacosClient(server_addresses=server_addresses, namespace=NAMESPACE, username='chatfire', password='chatfirechatfire')
22
22
  # auth mode
23
23
  # client = nacos.NacosClient(SERVER_ADDRESSES, namespace=NAMESPACE, ak="{ak}", sk="{sk}")
24
24
 
25
25
  # get config
26
- data_id = "config.nacos"
27
- group = "group"
26
+ data_id = "testdata"
27
+ group = "DEFAULT_GROUP"
28
28
  print(client.get_config(data_id, group))
29
29
 
30
30
 
31
-
meutils/data/VERSION CHANGED
@@ -1 +1 @@
1
- 2024.12.02.11.01.18
1
+ 2024.12.05.12.28.40
meutils/db/redis_db.py CHANGED
@@ -66,7 +66,7 @@ if __name__ == '__main__':
66
66
  # _ = redis_client.lrange(feishu, 0, -1)
67
67
  # print(len(eval(_)))
68
68
 
69
- task_id = "celery-task-meta-72d59447-1f88-4727-8067-8244c2268faa"
69
+ task_id = "celery-task-meta-ca94c602-a2cc-4db5-afe4-763f30df8a18"
70
70
 
71
71
 
72
72
  # arun(redis_aclient.get('celery-task-meta-72d59447-1f88-4727-8067-8244c2268faa'))
@@ -75,7 +75,7 @@ if __name__ == '__main__':
75
75
 
76
76
  async def main():
77
77
  r = await redis_aclient.select(1)
78
- print(await redis_aclient.get('celery-task-meta-72d59447-1f88-4727-8067-8244c2268faa'))
78
+ return await redis_aclient.get(task_id)
79
79
 
80
80
 
81
81
  arun(main())
@@ -114,12 +114,12 @@ async def get_access_token():
114
114
 
115
115
  @alru_cache(ttl=60)
116
116
  @retrying(predicate=lambda r: r is None)
117
- async def create_conversation_id(token: Optional[str] = None):
118
- token = token or await get_access_token()
117
+ async def create_conversation_id(token: str):
119
118
  headers = {
120
119
  "authorization": token
121
120
  }
122
121
  conversation_id = str(uuid.uuid4()) # shortuuid.random()
122
+ # conversation_id = "af306c40-8f85-47a7-a027-185da084c6cc"
123
123
  params = {
124
124
  "conversation_id": conversation_id,
125
125
  "organization_id": "undefined",
@@ -128,34 +128,36 @@ async def create_conversation_id(token: Optional[str] = None):
128
128
  }
129
129
  async with httpx.AsyncClient(base_url=BASE_URL, headers=headers, timeout=30) as client:
130
130
  response = await client.post("/api/new", params=params)
131
-
132
- response = await client.get(f"/?id={conversation_id}", headers=headers)
133
- html_content = response.text
134
- pattern = r'(/_next/static/chunks/7116-[^"]+\.js)'
135
- js = re.findall(pattern, html_content)[-1]
136
-
137
- logger.debug(js)
138
-
139
- response = await client.get(js)
140
-
141
- logger.debug(response.status_code)
142
- next_action = response.text.split(',A=(0,a.$)("')[1][:40]
143
- logger.debug(next_action) # 2bc738a7215e149dbd4601a440f3b6df45089338 过期时间
144
-
145
- headers = {
146
- 'next-action': next_action,
147
- 'Cookie': f"AccessToken={token}",
148
-
149
- 'content-type': 'text/plain;charset=UTF-8',
150
- }
151
- payload = "[]"
152
- response = await client.post(f"/?id={conversation_id}", headers=headers, content=payload)
153
-
154
131
  logger.debug(response.status_code)
155
132
  logger.debug(response.text)
156
-
157
- if response.is_success:
158
- return conversation_id
133
+ return conversation_id
134
+
135
+ # response = await client.get(f"/?id={conversation_id}", headers=headers)
136
+ # html_content = response.text
137
+ # pattern = r'(/_next/static/chunks/7116-[^"]+\.js)'
138
+ # js = re.findall(pattern, html_content)[-1]
139
+ #
140
+ # logger.debug(js)
141
+ #
142
+ # response = await client.get(js)
143
+ #
144
+ # logger.debug(response.status_code)
145
+ # next_action = response.text.split(',A=(0,a.$)("')[1][:40]
146
+ # logger.debug(next_action) # 2bc738a7215e149dbd4601a440f3b6df45089338 过期时间
147
+ #
148
+ # headers = {
149
+ # 'next-action': next_action,
150
+ # 'Cookie': f"AccessToken={token}",
151
+ #
152
+ # 'content-type': 'text/plain;charset=UTF-8',
153
+ # }
154
+ # payload = "[]"
155
+ # response = await client.post(f"/?id={conversation_id}", headers=headers, content=payload)
156
+ #
157
+ # logger.debug(response.status_code)
158
+ # logger.debug(response.text)
159
+ #
160
+ # return conversation_id
159
161
 
160
162
 
161
163
  @retrying(max_retries=3)
@@ -168,6 +170,8 @@ async def create(request: ChatCompletionRequest, token: Optional[str] = None, vi
168
170
  token = token or await get_access_token()
169
171
  conversation_id = await create_conversation_id(token)
170
172
 
173
+ logger.debug(conversation_id)
174
+
171
175
  use_search = False
172
176
  if request.messages[0].get('role') != 'system': # 还原系统信息
173
177
  request.messages.insert(0, {'role': 'system', 'content': f'You are {request.model}'})
@@ -181,19 +185,19 @@ async def create(request: ChatCompletionRequest, token: Optional[str] = None, vi
181
185
 
182
186
  headers = {
183
187
  "authorization": token,
184
- 'Cookie': f"AccessToken={token};",
188
+ # 'Cookie': f"AccessToken={token};",
185
189
  "content-type": "text/plain;charset=UTF-8",
186
190
 
187
191
  }
188
192
  params = {
189
- "organization_id": "undefined",
193
+ # "organization_id": "undefined",
190
194
  # "organization_id": "eb0fb996-2317-467b-9847-15f6c40000b7",
191
195
  "retry": 2,
192
196
  }
193
197
  payload = {
194
198
  # "query": request.last_content,
195
199
  "query": oneturn2multiturn(request.messages),
196
- "images": request.urls, # todo: 兼容base64
200
+ # "images": request.urls, # todo: 兼容base64
197
201
 
198
202
  "conversation_id": conversation_id,
199
203
  "model_id": request.model, # "kaushikaakash04/tune-blob"
@@ -202,6 +206,8 @@ async def create(request: ChatCompletionRequest, token: Optional[str] = None, vi
202
206
  "attachment_name": "",
203
207
  # "messageId": "4a33e497-efb7-4d8f-ae45-9aa7d2c1c5af1726811555410",
204
208
  # "prevMessageId": "4a33e497-efb7-4d8f-ae45-9aa7d2c1c5af1726811555410",
209
+
210
+ "check": "286600"
205
211
  }
206
212
 
207
213
  yield "\n" # 提升首字速度
@@ -254,9 +260,9 @@ if __name__ == '__main__':
254
260
  # model = "net-anthropic/claude-3.5-sonnet"
255
261
  # model = "all"
256
262
 
257
- # model = "kaushikaakash04/tune-blob"
263
+ model = "kaushikaakash04/tune-blob"
258
264
  # model = "openai/o1-mini"
259
- model = "o1-mini-0609"
265
+ # model = "o1-mini-0609"
260
266
 
261
267
  # model = "openai/gpt-4o-mini"
262
268
 
@@ -21,6 +21,14 @@ FEISHU_URL_OSS = "https://xchatllm.feishu.cn/sheets/MekfsfVuohfUf1tsWV0cCvTmn3c?
21
21
 
22
22
  class VideoRequest(BaseModel):
23
23
  """
24
+ 23000 文生视频
25
+ "T2V-01": "23000"
26
+ "I2V-01": "23001"
27
+ "I2V-01-live": "23011"
28
+
29
+ {"desc":"飞起来","useOriginPrompt":false,"fileList":[{"id":"320650134834716679","name":"s.jpg","type":"jpg"}],"modelID":"23001"} # 老模型
30
+ {"desc":"飞起来","useOriginPrompt":false,"fileList":[{"id":"320650134834716679","name":"s.jpg","type":"jpg"}],"modelID":"23011"} # 新模型
31
+
24
32
  {"desc":"跳动","useOriginPrompt":true,"fileList":[{"id":"304987062153912323","name":"3a71b0bb-3cab-4e69-b1f0-592976d0897b_00001_.png","type":"png"}]}
25
33
  """
26
34
  model: str = "video-01"
@@ -35,6 +43,10 @@ class VideoRequest(BaseModel):
35
43
  传入图片需要满足以下条件:格式为JPG/JPEG/PNG;长宽比大于2:5、小于5:2;短边像素大于300px;体积不大于20MB。"""
36
44
  first_frame_image: Optional[str] = None
37
45
 
46
+ def __init__(self, /, **data: Any):
47
+ super().__init__(**data)
48
+
49
+
38
50
 
39
51
  class BaseResponse(BaseModel):
40
52
  """
@@ -110,6 +122,7 @@ class VideoResponse(BaseModel):
110
122
  if self.file_id is None and self.videos:
111
123
  self.file_id = self.videos[0].videoURL
112
124
 
125
+
113
126
  #
114
127
  # "data": {
115
128
  # "videos": [
@@ -138,3 +151,51 @@ class VideoResponse(BaseModel):
138
151
  # "cycleTime": 10,
139
152
  # "hasMore": false
140
153
  # }
154
+
155
+ """
156
+ refer_voice
157
+ 请输入您的参考音色ID
158
+ refer_instrumental
159
+ 请输入您的参考伴奏ID
160
+ lyrics
161
+ ##在无垠的星空下\n\n梦开始飞翔\n月光洒在心上\n\n温柔的想象\n在这片宁静中\n\n我们自由歌唱##
162
+ model
163
+ music-01
164
+ audio_setting
165
+ {"sample_rate":44100,"bitrate":256000,"format":"mp3"}
166
+
167
+ """
168
+
169
+
170
+ class MusicRequet(BaseModel):
171
+ model: str = "music-01"
172
+
173
+ lyrics: str
174
+
175
+ refer_voice: Optional[str] = None
176
+ refer_instrumental: Optional[str] = None
177
+ audio_setting: dict = {
178
+ "sample_rate": 44100,
179
+ "bitrate": 256000,
180
+ "format": "mp3"
181
+ }
182
+
183
+
184
+ """
185
+ {
186
+ "data":{
187
+ "audio":"hex编码的音频数据",
188
+ "status":2
189
+ },
190
+ "trace_id":"02cb3e6a7f8ada5886fdc11e09452353",
191
+ "base_resp":{
192
+ "status_code":0,
193
+ "status_msg":"success"
194
+ }
195
+ }
196
+ """
197
+
198
+
199
+ class MusicResponse(BaseModel):
200
+ trace_id: str
201
+ base_resp: BaseResponse
@@ -117,11 +117,6 @@ class FluxImageRequest(ImageRequest):
117
117
  def __init__(self, /, **data: Any):
118
118
  super().__init__(**data)
119
119
  self.image_size = self.size
120
- if "dev" in self.model:
121
- self.model = 'flux-schnell'
122
-
123
- if "pro" in self.model:
124
- self.model = 'flux-pro'
125
120
 
126
121
 
127
122
  class TogetherImageRequest(ImageRequest): # together
@@ -238,8 +233,7 @@ class RecraftImageRequest(ImageRequest):
238
233
  Literal[
239
234
  'any',
240
235
  'digital_illustration',
241
- 'illustration_3d',
242
- 'digital_illustration_seamless',
236
+ 'digital_illustration',
243
237
  'digital_illustration_pixel_art',
244
238
  'digital_illustration_3d',
245
239
  'digital_illustration_psychedelic',
@@ -259,6 +253,7 @@ class RecraftImageRequest(ImageRequest):
259
253
  'digital_illustration_hand_drawn_outline',
260
254
  'digital_illustration_handmade_3d',
261
255
  'digital_illustration_stickers_drawings',
256
+
262
257
  'realistic_image',
263
258
  'realistic_image_mockup',
264
259
  'realistic_image_b_and_w',
@@ -268,6 +263,7 @@ class RecraftImageRequest(ImageRequest):
268
263
  'realistic_image_natural_light',
269
264
  'realistic_image_studio_portrait',
270
265
  'realistic_image_motion_blur',
266
+
271
267
  'vector_illustration',
272
268
  'vector_illustration_seamless',
273
269
  'vector_illustration_line_art',