MeUtils 2025.8.26.12.41.0__py3-none-any.whl → 2025.8.27.11.59.36__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/_openaisdk/openai_edits.py +35 -18
- meutils/apis/fal/images.py +23 -8
- meutils/apis/images/generations.py +9 -6
- meutils/apis/jimeng/images.py +1 -4
- meutils/apis/oneapi/tasks.py +13 -3
- meutils/apis/utils.py +17 -1
- meutils/apis/volcengine_apis/videos.py +71 -15
- meutils/data/VERSION +1 -1
- meutils/db/id2redis.py +12 -0
- meutils/db/redis_db.py +5 -3
- meutils/db/sync_data.py +45 -0
- meutils/llm/check_utils.py +6 -4
- meutils/llm/openai_utils/adapters.py +4 -4
- meutils/llm/openai_utils/common.py +2 -1
- meutils/schemas/image_types.py +10 -1
- meutils/schemas/oneapi/common.py +4 -2
- meutils/schemas/openai_types.py +1 -1
- {meutils-2025.8.26.12.41.0.dist-info → meutils-2025.8.27.11.59.36.dist-info}/METADATA +259 -259
- {meutils-2025.8.26.12.41.0.dist-info → meutils-2025.8.27.11.59.36.dist-info}/RECORD +23 -21
- {meutils-2025.8.26.12.41.0.dist-info → meutils-2025.8.27.11.59.36.dist-info}/WHEEL +0 -0
- {meutils-2025.8.26.12.41.0.dist-info → meutils-2025.8.27.11.59.36.dist-info}/entry_points.txt +0 -0
- {meutils-2025.8.26.12.41.0.dist-info → meutils-2025.8.27.11.59.36.dist-info}/licenses/LICENSE +0 -0
- {meutils-2025.8.26.12.41.0.dist-info → meutils-2025.8.27.11.59.36.dist-info}/top_level.txt +0 -0
@@ -16,8 +16,11 @@ from meutils.schemas.image_types import ImageRequest, ImageEditRequest
|
|
16
16
|
from openai import OpenAI
|
17
17
|
|
18
18
|
client = OpenAI(
|
19
|
-
base_url=os.getenv("GITEE_BASE_URL"),
|
20
|
-
api_key=os.getenv("GITEE_API_KEY"),
|
19
|
+
# base_url=os.getenv("GITEE_BASE_URL"),
|
20
|
+
# api_key=os.getenv("GITEE_API_KEY"),
|
21
|
+
base_url="http://0.0.0.0:8000/v1",
|
22
|
+
api_key="null",
|
23
|
+
|
21
24
|
)
|
22
25
|
|
23
26
|
# r = client.images.edit(
|
@@ -27,26 +30,40 @@ client = OpenAI(
|
|
27
30
|
# response_format="url"
|
28
31
|
# )
|
29
32
|
|
33
|
+
model = "FLUX.1-Kontext-dev"
|
34
|
+
# model ="qwen-image"
|
35
|
+
model = "fal-ai/flux-pro/kontext"
|
36
|
+
# model = "fal-ai/flux-pro/kontext/max"
|
37
|
+
|
38
|
+
# model = "fal-ai/gemini-25-flash-image"
|
39
|
+
|
30
40
|
data = {
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
41
|
+
'model': model,
|
42
|
+
'prompt': "将小黄鸭放在衣服上",
|
43
|
+
'size': "1024x1024",
|
44
|
+
'steps': "20",
|
45
|
+
'guidance_scale': "2.5",
|
46
|
+
'return_image_quality': "80",
|
47
|
+
'return_image_format': "PNG",
|
48
|
+
# 'lora_weights': json.dumps(lora_weights_data), # 将字典转换为 JSON 字符串
|
49
|
+
'lora_scale': "1",
|
40
50
|
|
41
|
-
|
42
|
-
|
51
|
+
# 'image': open('/Users/betterme/PycharmProjects/AI/MeUtils/meutils/apis/images/image2.webp', 'rb').read(),
|
52
|
+
'image': [
|
43
53
|
|
44
|
-
|
54
|
+
# open('/Users/betterme/PycharmProjects/AI/MeUtils/meutils/apis/images/image1.webp', 'rb'),
|
55
|
+
# open('/Users/betterme/PycharmProjects/AI/MeUtils/meutils/apis/images/image2.webp', 'rb')
|
56
|
+
open('/Users/betterme/PycharmProjects/AI/MeUtils/meutils/apis/images/image1.webp', 'rb').read(),
|
57
|
+
open('/Users/betterme/PycharmProjects/AI/MeUtils/meutils/apis/images/image2.webp', 'rb').read()
|
58
|
+
]
|
45
59
|
|
46
|
-
|
47
|
-
|
48
|
-
|
60
|
+
}
|
61
|
+
|
62
|
+
data = to_openai_params(ImageEditRequest(**data))
|
49
63
|
|
64
|
+
r = client.images.edit(
|
65
|
+
**data
|
66
|
+
)
|
50
67
|
|
51
68
|
# import requests
|
52
69
|
#
|
@@ -71,4 +88,4 @@ data = to_openai_params(ImageEditRequest(**data))
|
|
71
88
|
# })
|
72
89
|
#
|
73
90
|
# with open("output.png", "wb") as file:
|
74
|
-
# file.write(output)
|
91
|
+
# file.write(output)
|
meutils/apis/fal/images.py
CHANGED
@@ -96,13 +96,10 @@ async def generate(request: ImageRequest, api_key: Optional[str] = None):
|
|
96
96
|
}
|
97
97
|
elif "kontext" in request.model: # https://fal.ai/models/fal-ai/flux-pro/kontext/max
|
98
98
|
|
99
|
-
if image_urls := parse_url(request.prompt):
|
99
|
+
if (image_urls := request.image_urls) or (image_urls := parse_url(request.prompt)):
|
100
100
|
if not validate_url(image_urls):
|
101
101
|
raise Exception(f"Invalid image url: {image_urls}")
|
102
102
|
|
103
|
-
if request.user: # 转存
|
104
|
-
image_urls = await to_url_fal(image_urls)
|
105
|
-
|
106
103
|
for image_url in image_urls:
|
107
104
|
request.prompt = request.prompt.replace(image_url, "")
|
108
105
|
|
@@ -202,6 +199,17 @@ async def generate(request: ImageRequest, api_key: Optional[str] = None):
|
|
202
199
|
|
203
200
|
}
|
204
201
|
|
202
|
+
elif request.model.startswith(("fal-ai/nano-banana", "fal-ai/gemini-25-flash-image")):
|
203
|
+
arguments = {
|
204
|
+
"prompt": request.prompt,
|
205
|
+
"image_urls": request.image_urls,
|
206
|
+
"num_images": request.n or 1,
|
207
|
+
}
|
208
|
+
if request.image_urls:
|
209
|
+
request.model = f"""{request.model.removesuffix("/edit")}/edit"""
|
210
|
+
|
211
|
+
logger.debug(bjson(arguments))
|
212
|
+
|
205
213
|
try:
|
206
214
|
|
207
215
|
data = await AsyncClient(key=token).run(
|
@@ -245,9 +253,10 @@ if __name__ == '__main__':
|
|
245
253
|
model = "fal-ai/imagen4/preview"
|
246
254
|
model = "fal-ai/flux-pro/kontext"
|
247
255
|
# model = "fal-ai/flux-pro/kontext/max"
|
256
|
+
|
248
257
|
#
|
249
258
|
prompt = "https://v3.fal.media/files/penguin/XoW0qavfF-ahg-jX4BMyL_image.webp https://v3.fal.media/files/tiger/bml6YA7DWJXOigadvxk75_image.webp Put the little duckling on top of the woman's t-shirt."
|
250
|
-
prompt = '把小鸭子放在女人的T恤上面。\nhttps://s3.ffire.cc/cdn/20250530/tEzZKkhp3tKbNzva6mgC2T\nhttps://s3.ffire.cc/cdn/20250530/AwHJpuJuNg5w3sVbH4PZdv'
|
259
|
+
# prompt = '把小鸭子放在女人的T恤上面。\nhttps://s3.ffire.cc/cdn/20250530/tEzZKkhp3tKbNzva6mgC2T\nhttps://s3.ffire.cc/cdn/20250530/AwHJpuJuNg5w3sVbH4PZdv'
|
251
260
|
request = ImageRequest(prompt=prompt, model=model)
|
252
261
|
|
253
262
|
data = {
|
@@ -256,14 +265,20 @@ if __name__ == '__main__':
|
|
256
265
|
"size": 'auto'
|
257
266
|
}
|
258
267
|
|
268
|
+
data = {
|
269
|
+
"model": "fal-ai/gemini-25-flash-image",
|
270
|
+
"prompt": "make a photo of the man driving the car down the california coastline",
|
271
|
+
"image": [
|
272
|
+
"https://storage.googleapis.com/falserverless/example_inputs/nano-banana-edit-input.png",
|
273
|
+
"https://storage.googleapis.com/falserverless/example_inputs/nano-banana-edit-input-2.png"
|
274
|
+
],
|
275
|
+
"num_images": 1
|
276
|
+
}
|
259
277
|
|
260
278
|
request = ImageRequest(**data)
|
261
279
|
|
262
|
-
print(request)
|
263
|
-
|
264
280
|
arun(generate(request))
|
265
281
|
|
266
|
-
|
267
282
|
# request = ImageRequest(prompt='https://oss.ffire.cc/files/kling_watermark.png The woman smiled', model=model)
|
268
283
|
#
|
269
284
|
# request = ImageRequest(prompt="https://oss.ffire.cc/files/kling_watermark.png 让这个女人哭起来", model=model)
|
@@ -34,16 +34,17 @@ async def generate(
|
|
34
34
|
**request.model_dump(exclude_none=True, exclude={"extra_fields", "aspect_ratio"}),
|
35
35
|
**(request.extra_fields or {})
|
36
36
|
}
|
37
|
-
|
37
|
+
request = ImageRequest(**data)
|
38
38
|
if request.model.startswith("doubao"):
|
39
|
-
|
40
|
-
|
41
|
-
|
39
|
+
request.watermark = False
|
40
|
+
if request.image and isinstance(request.image, list):
|
41
|
+
request.image = request.image[0]
|
42
42
|
|
43
|
+
data = to_openai_params(request)
|
43
44
|
client = AsyncClient(api_key=api_key, base_url=base_url)
|
44
45
|
return await client.images.generate(**data)
|
45
46
|
|
46
|
-
if request.model.startswith("fal-ai"):
|
47
|
+
if request.model.startswith("fal-ai"): # 主要 request.image
|
47
48
|
return await fal_generate(request, api_key)
|
48
49
|
|
49
50
|
if request.model.startswith(("recraft",)):
|
@@ -62,7 +63,9 @@ async def generate(
|
|
62
63
|
if request.model in {"Qwen-Image", "FLUX_1-Krea-dev"} and request.model.endswith(("lora",)):
|
63
64
|
return await gitee_images_generate(request, api_key)
|
64
65
|
|
65
|
-
if request.model.startswith("qwen-image"):
|
66
|
+
if request.model.startswith("qwen-image"): # 仅支持单图
|
67
|
+
if request.image and isinstance(request.image, list):
|
68
|
+
request.image = request.image[-1]
|
66
69
|
return await QwenCompletions(api_key=api_key).generate(request)
|
67
70
|
|
68
71
|
|
meutils/apis/jimeng/images.py
CHANGED
@@ -218,9 +218,6 @@ async def create_task(request: ImageRequest, token: Optional[str] = None): # to
|
|
218
218
|
|
219
219
|
headers = get_headers(url, token)
|
220
220
|
|
221
|
-
if "http" in request.prompt: # 图生
|
222
|
-
request.model = "high_aes_general_v20_L:general_v2.0_L"
|
223
|
-
|
224
221
|
draft_content = await create_draft_content(request, token)
|
225
222
|
|
226
223
|
logger.debug(json.dumps(draft_content))
|
@@ -663,7 +660,7 @@ if __name__ == '__main__':
|
|
663
660
|
# arun(generate(ImageRequest(prompt="fuck you")))
|
664
661
|
prompt = "A plump Chinese beauty wearing a wedding dress revealing her skirt and underwear is swinging on the swing,Happy smile,cleavage,Exposed thighs,Spread your legs open,Extend your leg,panties,upskirt,Barefoot,sole"
|
665
662
|
# prompt = "a dog cat in the same room"
|
666
|
-
prompt = "https://oss.ffire.cc/files/kling_watermark.png
|
663
|
+
prompt = "https://oss.ffire.cc/files/kling_watermark.png 让这个女人带上墨镜,衣服换个颜色.. . ! "
|
667
664
|
request = ImageRequest(prompt=prompt, size="1328x1328")
|
668
665
|
# request = ImageRequest(prompt=prompt, size="1024x1024")
|
669
666
|
|
meutils/apis/oneapi/tasks.py
CHANGED
@@ -14,6 +14,7 @@ from meutils.apis.utils import make_request_httpx
|
|
14
14
|
from meutils.apis.oneapi.log import get_logs
|
15
15
|
from meutils.apis.oneapi.user import update_user_for_refund, get_user
|
16
16
|
|
17
|
+
|
17
18
|
# headers
|
18
19
|
ACTIONS = {
|
19
20
|
# 按量计费的异步任务
|
@@ -36,7 +37,7 @@ ACTIONS = {
|
|
36
37
|
}
|
37
38
|
|
38
39
|
|
39
|
-
async def get_tasks(platform: str = "flux", action: str = "", status: str = "NOT_START"):
|
40
|
+
async def get_tasks(platform: str = "flux", action: str = "", status: str = "NOT_START", return_ids: bool = False):
|
40
41
|
base_url = "https://api.chatfire.cn"
|
41
42
|
path = "/api/task/"
|
42
43
|
headers = {
|
@@ -66,6 +67,11 @@ async def get_tasks(platform: str = "flux", action: str = "", status: str = "NOT
|
|
66
67
|
params=params,
|
67
68
|
headers=headers
|
68
69
|
)
|
70
|
+
if return_ids:
|
71
|
+
# from meutils.str_utils.json_utils import json_path
|
72
|
+
|
73
|
+
return [item['task_id'] for item in response['data']['items']] | xUnique
|
74
|
+
|
69
75
|
return response
|
70
76
|
|
71
77
|
|
@@ -74,7 +80,7 @@ async def polling_tasks(platform: str = "flux", action: str = "", status: str =
|
|
74
80
|
if items := response['data']['items']:
|
75
81
|
tasks = []
|
76
82
|
model = ''
|
77
|
-
for item in items[:
|
83
|
+
for item in items[:128]: # 批量更新
|
78
84
|
task_id = item['task_id']
|
79
85
|
action = item['action'].split('-', maxsplit=1)[0] # 模糊匹配
|
80
86
|
if 'fal-' in item['action']:
|
@@ -139,7 +145,11 @@ async def refund_tasks(task_id: Optional[str] = None): # 只补偿一次
|
|
139
145
|
|
140
146
|
|
141
147
|
if __name__ == '__main__':
|
148
|
+
|
142
149
|
pass
|
143
150
|
arun(polling_tasks())
|
144
|
-
# arun(get_tasks(action="
|
151
|
+
# arun(get_tasks(action="jimeng-video-3.0", status="FAILURE"))
|
152
|
+
# arun(get_tasks(action="jimeng-video-3.0", return_ids=True))
|
153
|
+
|
154
|
+
|
145
155
|
# arun(refund_tasks())
|
meutils/apis/utils.py
CHANGED
@@ -7,6 +7,8 @@
|
|
7
7
|
# @WeChat : meutils
|
8
8
|
# @Software : PyCharm
|
9
9
|
# @Description :
|
10
|
+
import os
|
11
|
+
|
10
12
|
import httpx
|
11
13
|
|
12
14
|
from meutils.pipe import *
|
@@ -193,6 +195,20 @@ async def make_request_with_cache(
|
|
193
195
|
)
|
194
196
|
|
195
197
|
|
198
|
+
def get_base_url(base_url: Optional[str], headers: Optional[dict] = None):
|
199
|
+
headers = headers or {}
|
200
|
+
mapping = {
|
201
|
+
"volc": os.getenv("VOLC_BASE_URL"),
|
202
|
+
}
|
203
|
+
base_url = (
|
204
|
+
mapping.get(base_url, base_url)
|
205
|
+
or headers.get("base-url") or headers.get("x-base-url")
|
206
|
+
or "https://api.siliconflow.cn/v1"
|
207
|
+
)
|
208
|
+
|
209
|
+
return base_url
|
210
|
+
|
211
|
+
|
196
212
|
if __name__ == '__main__':
|
197
213
|
from meutils.io.files_utils import to_bytes
|
198
214
|
|
@@ -237,7 +253,7 @@ if __name__ == '__main__':
|
|
237
253
|
}
|
238
254
|
FAL_KEY = "aa5c047f-2621-4be2-9cee-9857a630aa11:b06782c97dffb50bfd6eebb63f49c624"
|
239
255
|
|
240
|
-
headers ={"Authorization": f"key {FAL_KEY}"}
|
256
|
+
headers = {"Authorization": f"key {FAL_KEY}"}
|
241
257
|
arun(make_request(
|
242
258
|
base_url=base_url,
|
243
259
|
path=path,
|
@@ -28,18 +28,29 @@ from fastapi import APIRouter, File, UploadFile, Query, Form, Depends, Request,
|
|
28
28
|
FEISHU_URL = "https://xchatllm.feishu.cn/sheets/GYCHsvI4qhnDPNtI4VPcdw2knEd?sheet=8W6kk8" # 超刷
|
29
29
|
|
30
30
|
|
31
|
-
async def get_valid_token(tokens: Optional[list] = None):
|
31
|
+
async def get_valid_token(tokens: Optional[list] = None, force_update: bool = True):
|
32
32
|
tokens = tokens or await get_series(FEISHU_URL)
|
33
33
|
|
34
|
+
if token := await redis_aclient.get("volc-token"):
|
35
|
+
token = token.decode()
|
36
|
+
|
37
|
+
if force_update: # 强制检测更新
|
38
|
+
if await check(token):
|
39
|
+
return token
|
40
|
+
else:
|
41
|
+
await redis_aclient.delete("volc-token") # 删除无效
|
42
|
+
else:
|
43
|
+
return token
|
44
|
+
|
34
45
|
for token in tokens:
|
35
46
|
if await check(token):
|
36
|
-
|
37
|
-
|
47
|
+
if not await redis_aclient.get("volc-token"):
|
48
|
+
await redis_aclient.set("volc-token", token, ex=2 * 3600 - 10 * 60)
|
38
49
|
|
39
50
|
return token
|
40
|
-
|
41
|
-
|
42
|
-
_ = f"{time.ctime()}\n\n{FEISHU_URL}\n\n所有token
|
51
|
+
|
52
|
+
logger.debug(f"无效 {token}")
|
53
|
+
_ = f"{time.ctime()}\n\n{FEISHU_URL}\n\n所有token无效\n\n{token}"
|
43
54
|
logger.error(_)
|
44
55
|
send_message(_, n=3)
|
45
56
|
|
@@ -47,18 +58,18 @@ async def get_valid_token(tokens: Optional[list] = None):
|
|
47
58
|
# check_image_and_video = partial(check, purpose='video and image')
|
48
59
|
|
49
60
|
|
50
|
-
@retrying(
|
61
|
+
@retrying()
|
51
62
|
async def create_task(request: Union[CompletionRequest, VideoRequest], api_key: Optional[str] = None):
|
52
63
|
# api_key = api_key or await get_next_token_for_polling(feishu_url=FEISHU_URL, check_token=check)
|
53
|
-
|
64
|
+
api_key = api_key or await get_valid_token(force_update=False)
|
54
65
|
|
55
|
-
feishu_url = "https://xchatllm.feishu.cn/sheets/Z59Js10DbhT8wdt72LachSDlnlf?sheet=rcoDg7"
|
56
|
-
api_key = api_key or await get_next_token_for_polling(
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
)
|
66
|
+
# feishu_url = "https://xchatllm.feishu.cn/sheets/Z59Js10DbhT8wdt72LachSDlnlf?sheet=rcoDg7"
|
67
|
+
# api_key = api_key or await get_next_token_for_polling(
|
68
|
+
# feishu_url=feishu_url,
|
69
|
+
# from_redis=True,
|
70
|
+
# ttl=24 * 3600,
|
71
|
+
# check_token=check
|
72
|
+
# )
|
62
73
|
|
63
74
|
logger.debug(f"api_key: {api_key}")
|
64
75
|
if isinstance(request, VideoRequest): # 兼容jimeng
|
@@ -129,6 +140,9 @@ async def create_task(request: Union[CompletionRequest, VideoRequest], api_key:
|
|
129
140
|
payload["content"] = request.content
|
130
141
|
|
131
142
|
elif image_urls := request.last_urls.get("image_url"):
|
143
|
+
if payload["model"] not in {"doubao-seedance-1-0-lite-i2v-250428", "doubao-seedance-1-0-pro-250528"}:
|
144
|
+
payload["model"] = "doubao-seedance-1-0-lite-i2v-250428"
|
145
|
+
|
132
146
|
payload["content"] = [
|
133
147
|
{
|
134
148
|
"type": "text",
|
@@ -181,6 +195,34 @@ async def get_task(task_id: str):
|
|
181
195
|
return response
|
182
196
|
|
183
197
|
|
198
|
+
async def get_task_from_feishu(task_id: Union[str, list], tokens: Optional[list] = None): # todo: 定时校验
|
199
|
+
feishu_url = "https://xchatllm.feishu.cn/sheets/Z59Js10DbhT8wdt72LachSDlnlf?sheet=rcoDg7"
|
200
|
+
tokens = tokens or await get_series(feishu_url)
|
201
|
+
|
202
|
+
if isinstance(task_id, str):
|
203
|
+
task_ids = [task_id]
|
204
|
+
else:
|
205
|
+
task_ids = task_id
|
206
|
+
|
207
|
+
for task_id in tqdm(set(task_ids)):
|
208
|
+
if not await redis_aclient.get(task_id):
|
209
|
+
for api_key in tqdm(tokens):
|
210
|
+
client = AsyncClient(base_url="https://ark.cn-beijing.volces.com/api/v3", api_key=api_key)
|
211
|
+
try:
|
212
|
+
response = await client.get(
|
213
|
+
path=f"/contents/generations/tasks/{task_id}",
|
214
|
+
cast_to=object,
|
215
|
+
)
|
216
|
+
|
217
|
+
await redis_aclient.set(task_id, api_key, ex=7 * 24 * 3600)
|
218
|
+
logger.debug(f"{task_id} => {api_key}")
|
219
|
+
break
|
220
|
+
|
221
|
+
except Exception as e:
|
222
|
+
# logger.error(e)
|
223
|
+
continue
|
224
|
+
|
225
|
+
|
184
226
|
# 执行异步函数
|
185
227
|
if __name__ == "__main__":
|
186
228
|
# api_key = "07139a08-e360-44e2-ba31-07f379bf99ed" # {'id': 'cgt-20250611164343-w2bzq'} todo 过期调用get
|
@@ -211,6 +253,20 @@ if __name__ == "__main__":
|
|
211
253
|
|
212
254
|
# arun(get_task("cgt-20250707160713-j8kll"))
|
213
255
|
|
256
|
+
tokens = """
|
257
|
+
|
258
|
+
7499ffeb-dbb1-4016-b178-98407c701f09
|
259
|
+
c4356b58-4aa3-4a52-b907-b40c4dd2e502
|
260
|
+
|
261
|
+
""".split()
|
262
|
+
|
263
|
+
ids = [
|
264
|
+
"cgt-20250826185409-d6vb2", "cgt-20250826185409-h8dsf"
|
265
|
+
]
|
266
|
+
|
267
|
+
# arun(get_task_from_feishu(ids, tokens))
|
268
|
+
# arun(get_task_from_feishu(ids, ))
|
269
|
+
|
214
270
|
arun(get_valid_token())
|
215
271
|
|
216
272
|
"""
|
meutils/data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
2025.08.
|
1
|
+
2025.08.27.11.59.36
|
meutils/db/id2redis.py
ADDED
meutils/db/redis_db.py
CHANGED
@@ -88,8 +88,8 @@ if __name__ == '__main__':
|
|
88
88
|
#
|
89
89
|
# arun(main())
|
90
90
|
|
91
|
-
async def main():
|
92
|
-
|
91
|
+
# async def main():
|
92
|
+
# return await redis_aclient.lpop("redis_key")
|
93
93
|
|
94
94
|
|
95
95
|
# arun(main())
|
@@ -98,4 +98,6 @@ if __name__ == '__main__':
|
|
98
98
|
# r = redis_client.sadd('set1', 'd')
|
99
99
|
# k="meutils.config_utils.lark_utils.commonaget_spreadsheet_values()[('feishu_url', 'https://xchatllm.feishu.cn/sheets/GYCHsvI4qhnDPNtI4VPcdw2knEd?sheet=Gvm9dt'), ('to_dataframe', True)] "
|
100
100
|
# redis_client.delete(k)
|
101
|
-
print(redis_client.get('test'))
|
101
|
+
print(redis_client.get('test'))
|
102
|
+
|
103
|
+
print(redis_client.delete("k"))
|
meutils/db/sync_data.py
ADDED
@@ -0,0 +1,45 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : to_db
|
5
|
+
# @Time : 2025/8/26 17:08
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
import json
|
11
|
+
|
12
|
+
from meutils.pipe import *
|
13
|
+
from meutils.db.redis_db import redis_client
|
14
|
+
import os
|
15
|
+
from redis import Redis, ConnectionPool
|
16
|
+
from redis.asyncio import Redis as AsyncRedis, ConnectionPool as AsyncConnectionPool
|
17
|
+
|
18
|
+
# kwargs = {
|
19
|
+
# "retry_on_timeout": True,
|
20
|
+
# # "db": 6
|
21
|
+
# }
|
22
|
+
# REDIS_URL="redis://:chatfirechatfire@110.sdsd.51.201:6379" # 'redis://localhost:10/1?pool_max_size=1'
|
23
|
+
# # logger.debug(REDIS_URL)
|
24
|
+
#
|
25
|
+
# pool = ConnectionPool.from_url(REDIS_URL, **kwargs)
|
26
|
+
# redis_client_main = Redis.from_pool(pool)
|
27
|
+
|
28
|
+
data = json.loads(open('FeHelper-20250826164705.json').read())
|
29
|
+
|
30
|
+
|
31
|
+
|
32
|
+
|
33
|
+
|
34
|
+
ids = list(set([item['task_id'] for item in data['data']['items']]))
|
35
|
+
|
36
|
+
#
|
37
|
+
#
|
38
|
+
# for id in ids:
|
39
|
+
# if v := redis_client.get(id):
|
40
|
+
# redis_client_main.set(id, v)
|
41
|
+
#
|
42
|
+
#
|
43
|
+
# redis_client_main.get("cgt-20250826155618-nrzxm")
|
44
|
+
|
45
|
+
|
meutils/llm/check_utils.py
CHANGED
@@ -213,7 +213,7 @@ async def check_token_for_sophnet(api_key, threshold: float = 1):
|
|
213
213
|
|
214
214
|
#
|
215
215
|
@retrying()
|
216
|
-
@rcache(ttl=
|
216
|
+
# @rcache(ttl=7 * 24 * 3600, skip_cache_func=skip_cache_func)
|
217
217
|
async def check_token_for_volc(api_key, threshold: float = 1, purpose: Optional[str] = None):
|
218
218
|
if not isinstance(api_key, str):
|
219
219
|
return await check_tokens(api_key, check_token_for_volc)
|
@@ -226,8 +226,8 @@ async def check_token_for_volc(api_key, threshold: float = 1, purpose: Optional[
|
|
226
226
|
url = "https://ark.cn-beijing.volces.com/api/v3/contents/generations/tasks"
|
227
227
|
|
228
228
|
payload = {
|
229
|
-
"model": "doubao-seedance-1-0-pro-250528",
|
230
|
-
|
229
|
+
# "model": "doubao-seedance-1-0-pro-250528",
|
230
|
+
"model":"doubao-seedance-1-0-lite-i2v-250428",
|
231
231
|
"content": [
|
232
232
|
{
|
233
233
|
"type": "text",
|
@@ -267,6 +267,7 @@ async def check_token_for_volc(api_key, threshold: float = 1, purpose: Optional[
|
|
267
267
|
messages=[{"role": "user", "content": "hi"}],
|
268
268
|
max_tokens=1
|
269
269
|
)
|
270
|
+
# logger.debug(response.json())
|
270
271
|
|
271
272
|
return True
|
272
273
|
except TimeoutException as e:
|
@@ -399,8 +400,9 @@ if __name__ == '__main__':
|
|
399
400
|
|
400
401
|
# arun(check_token_for_ppinfra("sk_F0kgPyCMTzmOH_-VCEJucOK8HIrbnLGYm_IWxBToHZQ"))
|
401
402
|
|
402
|
-
arun(check_token_for_volc("
|
403
|
+
# arun(check_token_for_volc("c720b2fb-e2be-42a7-a9e5-0a42c29d5766"))
|
403
404
|
# arun(check_token_for_volc("279749bd-ba5e-4962-9c65-eb6604b65594"))
|
405
|
+
arun(check_token_for_volc("8a5af7cb-42a3-4391-ac40-9d0f4502acde", purpose='seedance'))
|
404
406
|
|
405
407
|
# arun(check_token_for_ppinfra("sk_mCb5sRGTi6GXkSRp5F679Rbs0V_Hfee3p85lccGXCOo"))
|
406
408
|
|
@@ -55,12 +55,12 @@ async def chat_for_image(
|
|
55
55
|
# return response
|
56
56
|
image = None
|
57
57
|
prompt = request.last_user_content
|
58
|
-
if request.last_urls: # image_url
|
58
|
+
if image_urls := request.last_urls.get("image_url"): # image_url
|
59
59
|
if request.model.startswith('fal'):
|
60
|
-
urls = await to_url_fal(
|
61
|
-
|
60
|
+
urls = await to_url_fal(image_urls, content_type="image/png") # 国外友好
|
61
|
+
image = urls
|
62
62
|
else:
|
63
|
-
urls = await to_url(
|
63
|
+
urls = await to_url(image_urls, content_type="image/png") # 数组
|
64
64
|
image = urls[-1]
|
65
65
|
|
66
66
|
request = ImageRequest(
|
@@ -42,7 +42,8 @@ def to_openai_params(
|
|
42
42
|
request: Union[dict, CompletionRequest, ChatCompletionRequest, ImageRequest, ImageEditRequest, TTSRequest, STTRequest],
|
43
43
|
redirect_model: Optional[str] = None,
|
44
44
|
) -> dict:
|
45
|
-
|
45
|
+
if not isinstance(request, ImageEditRequest):
|
46
|
+
data = copy.deepcopy(request)
|
46
47
|
if not isinstance(request, dict):
|
47
48
|
data = request.model_dump(exclude_none=True)
|
48
49
|
|
meutils/schemas/image_types.py
CHANGED
@@ -116,7 +116,7 @@ class ImageRequest(BaseModel): # openai
|
|
116
116
|
|
117
117
|
user: Optional[str] = None # to_url_fal
|
118
118
|
|
119
|
-
image: Optional[str] = None # url b64
|
119
|
+
image: Optional[Union[str, List[str]]] = None # url b64
|
120
120
|
watermark: Optional[bool] = None
|
121
121
|
|
122
122
|
def __init__(self, /, **data: Any):
|
@@ -133,6 +133,15 @@ class ImageRequest(BaseModel): # openai
|
|
133
133
|
|
134
134
|
self.size = self.size if 'x' in self.size else '512x512'
|
135
135
|
|
136
|
+
@cached_property
|
137
|
+
def image_urls(self):
|
138
|
+
if self.image:
|
139
|
+
if isinstance(self.image, str):
|
140
|
+
return [self.image]
|
141
|
+
else:
|
142
|
+
return self.image
|
143
|
+
return []
|
144
|
+
|
136
145
|
@cached_property
|
137
146
|
def image_and_prompt(self): # image prompt 目前是单图
|
138
147
|
if self.prompt.startswith('http') and (prompts := self.prompt.split(maxsplit=1)):
|
meutils/schemas/oneapi/common.py
CHANGED
@@ -479,9 +479,11 @@ MODEL_PRICE = {
|
|
479
479
|
|
480
480
|
"deepseek-search": 0.01,
|
481
481
|
'deepseek-r1-search': 0.01,
|
482
|
-
"deepseek-r1-search-pro": 0.
|
483
|
-
"deepseek-r1-search-pro-thinking": 0.
|
482
|
+
"deepseek-r1-search-pro": 0.01,
|
483
|
+
"deepseek-r1-search-pro-thinking": 0.01,
|
484
484
|
'deepseek-reasoner-search': 0.01,
|
485
|
+
"doubao-1.5-search": 0.01,
|
486
|
+
"deepseek-v3.1-search": 0.01,
|
485
487
|
|
486
488
|
# MJ
|
487
489
|
"mj-chat": 0.3,
|
meutils/schemas/openai_types.py
CHANGED