MeUtils 2024.3.4.13.4.45__py3-none-any.whl → 2025.1.16.17.15.52__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MeUtils-2024.3.4.13.4.45.dist-info → MeUtils-2025.1.16.17.15.52.dist-info}/METADATA +38 -32
- MeUtils-2025.1.16.17.15.52.dist-info/RECORD +864 -0
- {MeUtils-2024.3.4.13.4.45.dist-info → MeUtils-2025.1.16.17.15.52.dist-info}/WHEEL +1 -1
- {MeUtils-2024.3.4.13.4.45.dist-info → MeUtils-2025.1.16.17.15.52.dist-info}/entry_points.txt +1 -0
- apps/spider.py +24 -8
- examples/_openaisdk/4v.py +110 -0
- examples/_openaisdk/__init__.py +11 -0
- examples/_openaisdk/baichuan.py +38 -0
- examples/_openaisdk/bpo.py +138 -0
- examples/_openaisdk/chat_latex.py +95 -0
- examples/_openaisdk/chattts.py +85 -0
- examples/_openaisdk/copilot.py +48 -0
- examples/_openaisdk/dalle3.py +48 -0
- examples/_openaisdk/deeplx.py +31 -0
- examples/_openaisdk/demo.py +77 -0
- examples/_openaisdk/embeddings.py +34 -0
- examples/_openaisdk/gpt4all.py +69 -0
- examples/_openaisdk/gpt_fc.py +23 -0
- examples/_openaisdk/gr_vl.py +46 -0
- examples/_openaisdk/json_mode.py +12 -0
- examples/_openaisdk/kimi.py +91 -0
- examples/_openaisdk/kimi_qa.py +57 -0
- examples/_openaisdk/minimax.py +75 -0
- examples/_openaisdk/open_router.py +48 -0
- examples/_openaisdk/openai_aiplus.py +54 -0
- examples/_openaisdk/openai_audio.py +20 -0
- examples/_openaisdk/openai_baichuan.py +59 -0
- examples/_openaisdk/openai_cache.py +37 -0
- examples/_openaisdk/openai_chatfire.py +228 -0
- examples/_openaisdk/openai_chatfire_all.py +166 -0
- examples/_openaisdk/openai_deepinfra.py +55 -0
- examples/_openaisdk/openai_deepseek.py +29 -0
- examples/_openaisdk/openai_doubao.py +43 -0
- examples/_openaisdk/openai_embeddings.py +36 -0
- examples/_openaisdk/openai_files.py +57 -0
- examples/_openaisdk/openai_gitee.py +33 -0
- examples/_openaisdk/openai_god.py +45 -0
- examples/_openaisdk/openai_groq.py +240 -0
- examples/_openaisdk/openai_images.py +203 -0
- examples/_openaisdk/openai_json.py +78 -0
- examples/_openaisdk/openai_lingyi.py +59 -0
- examples/_openaisdk/openai_modelscope.py +54 -0
- examples/_openaisdk/openai_moon.py +55 -0
- examples/_openaisdk/openai_oi.py +61 -0
- examples/_openaisdk/openai_ppu.py +47 -0
- examples/_openaisdk/openai_qwen.py +58 -0
- examples/_openaisdk/openai_search.py +42 -0
- examples/_openaisdk/openai_sensenova.py +81 -0
- examples/_openaisdk/openai_siliconflow.py +52 -0
- examples/_openaisdk/openai_step.py +45 -0
- examples/_openaisdk/openai_test.py +66 -0
- examples/_openaisdk/openai_together.py +57 -0
- examples/_openaisdk/openai_tune.py +38 -0
- examples/_openaisdk/openai_zhipu.py +59 -0
- examples/_openaisdk/ppu.py +28 -0
- examples/_openaisdk/rag.py +54 -0
- examples/_openaisdk/rag_.py +26 -0
- examples/_openaisdk/test.py +52 -0
- examples/_openaisdk/x.py +32 -0
- examples/_openaisdk/xx.py +29 -0
- examples/_openaisdk/zhipu_files.py +47 -0
- examples/_openaisdk/zhipu_/346/231/272/350/203/275/344/275/223.py +45 -0
- examples/_openaisdk//345/205/234/345/272/225/346/265/213/350/257/225.py +50 -0
- examples/_openaisdk//345/244/232/346/250/241/346/200/201/346/265/213/350/257/225.py +76 -0
- examples/_openaisdk//345/244/232/346/250/241/346/200/201/346/265/213/350/257/225_.py +56 -0
- examples/_openaisdk//346/226/207/344/273/266/351/227/256/347/255/224.py +36 -0
- examples/_openaisdk//346/226/207/346/241/243/350/247/243/346/236/220.py +34 -0
- examples/_openaisdk//346/250/241/345/236/213/346/265/213/350/257/225.py +53 -0
- examples/_openaisdk//351/230/277/351/207/214.py +80 -0
- {meutils/serving/jina/__demo → examples/ann}/__init__.py +1 -1
- examples/ann/main.py +31 -0
- examples/apis/kl.py +28 -0
- examples/apis/x.py +17 -0
- examples/apis/xx.py +17 -0
- examples/arq_demo/demo.py +3 -0
- examples/backgroundtasks.py +25 -0
- examples/bserver.py +513 -21
- examples/cache_demo/HermesCache_demo.py +81 -0
- examples/cache_demo/acacge.py +26 -0
- examples/cache_demo/x.py +31 -0
- {meutils/docarray_utils → examples/caches}/__init__.py +1 -1
- examples/caches/llmcache.py +18 -0
- examples/celery_demo/product_task.py +2 -0
- examples/demo.py +17 -1
- examples/fastapi_caching.py +59 -0
- {meutils/dependencies → examples/gr}/__init__.py +1 -1
- examples/gr/d.py +22 -0
- examples/gr/demo.py +30 -0
- examples/ip2/345/234/260/345/214/272.py +16 -0
- examples/jinja2_demo/j2_demo.py +20 -1
- examples/json/346/240/207/345/207/206/345/214/226.py +54 -0
- examples/md.py +29 -0
- {meutils/serving/jina → examples/nesc}/__init__.py +1 -1
- examples/nesc/main.py +76 -0
- examples/orm/mysql_orm.py +113 -0
- examples/orm/sql_creater.py +57 -0
- examples/orm/sqlm.py +134 -0
- examples/rq_demo/fns.py +18 -0
- examples/rq_demo/redis/351/230/237/345/210/227.py +14 -7
- examples/rq_demo/redis/351/230/237/345/210/227_add_chatfire.py +30 -0
- examples/size_map.py +43 -0
- examples/test.py +59 -0
- examples/webs/__init__.py +11 -0
- examples/webs/main.py +34 -0
- examples/x.py +13 -0
- examples//345/216/273/346/260/264/345/215/260.py +20 -0
- examples//346/226/207/346/241/243/346/231/272/350/203/275/__init__.py +11 -0
- meutils/_utils.py +15 -6
- meutils/ai_audio/asr/__init__.py +3 -2
- meutils/ai_audio/asr/cf_asr.py +53 -0
- meutils/ai_audio/asr/de.py +11 -0
- meutils/ai_audio/asr/fast_asr.py +15 -7
- meutils/ai_audio/asr/openai_asr.py +83 -6
- meutils/ai_audio/fast_asr.py +8 -4
- meutils/ai_audio/tts/EdgeTTS.py +33 -7
- meutils/ai_audio/tts/openai_tts.py +24 -20
- meutils/ai_audio/tts/tts_ui.py +1 -0
- meutils/ai_audio/utils.py +9 -0
- meutils/ai_cv/__init__.py +0 -1
- meutils/ai_cv/ocr.py +3 -2
- meutils/ai_cv/utils.py +154 -0
- meutils/ai_video/avmerge.py +6 -0
- meutils/ai_video/video.py +11 -2
- meutils/{api → apis}/__init__.py +1 -1
- meutils/apis/ali_apis.py +60 -0
- meutils/apis/audio/__init__.py +10 -0
- meutils/apis/audio/deepinfra.py +59 -0
- meutils/apis/audio/fish.py +248 -0
- meutils/apis/baidu/__init__.py +9 -0
- meutils/apis/baidu/bdaitpzs.py +229 -0
- meutils/apis/baidu/test.py +78 -0
- meutils/apis/chatglm/__init__.py +11 -0
- meutils/apis/chatglm/glm_video.py +273 -0
- meutils/apis/chatglm/glm_video_api.py +116 -0
- meutils/apis/chatglm/images.py +63 -0
- meutils/apis/chatglm/temp.py +259 -0
- meutils/apis/chatglm/x.py +31 -0
- meutils/{api → apis}/common.py +10 -6
- meutils/apis/fal/__init__.py +11 -0
- meutils/apis/fal/files.py +53 -0
- meutils/apis/fal/images.py +57 -0
- meutils/apis/fal/images_.py +72 -0
- meutils/apis/fal/videos.py +77 -0
- meutils/apis/firecrawl.py +45 -0
- meutils/apis/gitee/__init__.py +11 -0
- meutils/apis/gitee/images/__init__.py +9 -0
- meutils/apis/gitee/images/kolors.py +99 -0
- meutils/apis/hailuoai/__init__.py +11 -0
- meutils/apis/hailuoai/demo.py +34 -0
- meutils/apis/hailuoai/hasha_new.py +248 -0
- meutils/apis/hailuoai/music.py +11 -0
- meutils/apis/hailuoai/upload.py +116 -0
- meutils/apis/hailuoai/videos.py +460 -0
- meutils/apis/hailuoai/yy.py +242 -0
- meutils/apis/hf/__init__.py +11 -0
- meutils/apis/hf/got_ocr.py +64 -0
- meutils/apis/hf/gradio.py +34 -0
- meutils/apis/hf/hivisionidphotos.py +80 -0
- meutils/apis/hf/kolors.py +68 -0
- meutils/apis/hf/kolors_virtual_try_on.py +107 -0
- meutils/apis/hf/r.py +53 -0
- meutils/apis/hf/x.py +26 -0
- meutils/apis/hf//350/257/201/344/273/266/347/205/247.py +41 -0
- meutils/apis/hunyuan/__init__.py +11 -0
- meutils/apis/hunyuan/image_tools.py +84 -0
- meutils/apis/images/__init__.py +11 -0
- meutils/apis/images/deepinfra.py +92 -0
- meutils/apis/images/demo.py +150 -0
- meutils/apis/images/eidt.py +36 -0
- meutils/apis/images/flux/__init__.py +11 -0
- meutils/apis/images/flux/fluxpro.py +108 -0
- meutils/apis/images/flux/mystic.py +116 -0
- meutils/apis/images/ideogram/__init__.py +10 -0
- meutils/apis/images/ideogram/ideogram_images.py +193 -0
- meutils/apis/images/prodia/__init__.py +12 -0
- meutils/apis/images/prodia/faceswap.py +76 -0
- meutils/apis/images/recraft.py +152 -0
- meutils/apis/images/virtual_try_on/__init__.py +11 -0
- meutils/apis/images/virtual_try_on/images.py +65 -0
- meutils/apis/jiema/24mail.py +96 -0
- meutils/apis/jiema/__init__.py +11 -0
- meutils/apis/jiema/yezi.py +97 -0
- meutils/apis/jimeng/__init__.py +11 -0
- meutils/apis/jimeng/common.py +328 -0
- meutils/apis/jimeng/doubao.py +68 -0
- meutils/apis/jimeng/doubao_utils.py +175 -0
- meutils/apis/jimeng/files.py +263 -0
- meutils/apis/jimeng/images.py +140 -0
- meutils/apis/jimeng/lip_sync.py +11 -0
- meutils/apis/jina.py +55 -0
- meutils/apis/kling/__init__.py +11 -0
- meutils/apis/kling/api.py +60 -0
- meutils/apis/kling/images.py +174 -0
- meutils/apis/kling/kolors_virtual_try_on.py +111 -0
- meutils/apis/kling/kolors_virtual_try_on_web.py +126 -0
- meutils/apis/kling/videos.py +67 -0
- meutils/apis/kling//351/211/264/346/235/203.py +34 -0
- meutils/apis/kuaidi.py +32 -0
- meutils/apis/kuaishou/__init__.py +10 -0
- meutils/apis/kuaishou/klingai.py +523 -0
- meutils/apis/kuaishou/klingai_video.py +197 -0
- meutils/apis/kuaishou/kolors.py +189 -0
- meutils/apis/llm_qa.py +55 -0
- meutils/apis/luma/__init__.py +11 -0
- meutils/apis/luma/luma.py +123 -0
- meutils/apis/minicpm/__init__.py +9 -0
- meutils/apis/minicpm/luca.py +137 -0
- meutils/apis/monica/__init__.py +11 -0
- meutils/apis/monica/llm.py +11 -0
- meutils/apis/napkin/__init__.py +11 -0
- meutils/apis/napkin/icons.py +42 -0
- meutils/apis/niutrans.py +73 -0
- meutils/apis/oneapi/__init__.py +11 -0
- meutils/apis/oneapi/channel.py +68 -0
- meutils/apis/oneapi/common.py +135 -0
- meutils/apis/oneapi/log.py +47 -0
- meutils/apis/oneapi/token.py +48 -0
- meutils/apis/oneapi/token_.py +112 -0
- meutils/apis/oneapi/user.py +100 -0
- meutils/apis/oneapi/utils.py +47 -0
- meutils/apis/pixverse/__init__.py +11 -0
- meutils/apis/pixverse/pixverse.py +150 -0
- meutils/apis/proxy/__init__.py +11 -0
- meutils/apis/proxy/ips.py +178 -0
- meutils/apis/remini/__init__.py +11 -0
- meutils/apis/remini/remini.py +89 -0
- meutils/apis/replicateai/__init__.py +11 -0
- meutils/apis/replicateai/images.py +79 -0
- meutils/apis/replicateai/raw.py +53 -0
- meutils/apis/runwayml/__init__.py +10 -0
- meutils/apis/runwayml/gen.py +143 -0
- meutils/apis/search/__init__.py +11 -0
- meutils/apis/search/baichuan.py +11 -0
- meutils/apis/search/metaso.py +218 -0
- meutils/apis/search/metaso_.py +77 -0
- meutils/apis/search/n.py +99 -0
- meutils/apis/search/searxng.py +42 -0
- meutils/apis/search_music.py +39 -0
- meutils/apis/siliconflow/__init__.py +9 -0
- meutils/apis/siliconflow/audio.py +63 -0
- meutils/apis/siliconflow/image_to_image.py +116 -0
- meutils/apis/siliconflow/images.py +154 -0
- meutils/apis/siliconflow/rerankers.py +40 -0
- meutils/apis/siliconflow/text_to_image.py +132 -0
- meutils/apis/siliconflow/utils.py +66 -0
- meutils/apis/siliconflow/videos.py +102 -0
- meutils/apis/sunoai/__init__.py +10 -0
- meutils/apis/sunoai/haimian.py +135 -0
- meutils/apis/sunoai/suno.py +373 -0
- meutils/apis/textcard/__init__.py +11 -0
- meutils/apis/textcard/demo.py +25 -0
- meutils/apis/textcard/hanyuxinjie.py +81 -0
- meutils/apis/textin.py +159 -0
- meutils/apis/to_image/__init__.py +11 -0
- meutils/apis/to_image/html2image.py +29 -0
- meutils/apis/to_image/md.py +29 -0
- meutils/apis/to_image/url2image.py +41 -0
- meutils/apis/together/__init__.py +11 -0
- meutils/apis/together/images.py +80 -0
- meutils/apis/translator/__init__.py +9 -0
- meutils/apis/translator/deeplx.py +55 -0
- meutils/apis/tripo3d/__init__.py +11 -0
- meutils/apis/tripo3d/images.py +106 -0
- meutils/apis/ts.py +60 -0
- meutils/apis/uptime_kuma/__init__.py +11 -0
- meutils/apis/uptime_kuma/common.py +56 -0
- meutils/apis/uptime_kuma//345/233/275/344/272/247/345/210/206/347/273/204.py +68 -0
- meutils/apis/utils.py +47 -0
- meutils/apis/videos/__init__.py +11 -0
- meutils/apis/videos/sora.py +16 -0
- meutils/apis/vidu/__init__.py +9 -0
- meutils/apis/vidu/vidu_video.py +254 -0
- meutils/apis/vidu/x.py +14 -0
- meutils/apis/voice_clone/__init__.py +10 -0
- meutils/apis/voice_clone/fish.py +236 -0
- meutils/apis/voice_clone/fish_api.py +16 -0
- meutils/apis/web_search.py +31 -0
- meutils/apis/yezi.py +97 -0
- meutils/async_task/__init__.py +13 -0
- meutils/async_task/celery_config.py +106 -0
- meutils/async_task/common.py +37 -0
- meutils/async_task/demo_create_tasks.py +73 -0
- meutils/async_task/tasks/__init__.py +11 -0
- meutils/async_task/tasks/_all.py +20 -0
- meutils/async_task/tasks/hailuo.py +24 -0
- meutils/async_task/tasks/kling.py +30 -0
- meutils/async_task/tasks/replicateai.py +24 -0
- meutils/async_task/tasks/test.py +124 -0
- meutils/async_task/tasks/vidu.py +28 -0
- meutils/async_task/utils.py +191 -0
- meutils/async_task//351/200/232/347/224/250/350/256/276/350/256/241.py +119 -0
- meutils/async_utils/asyncer_.py +37 -0
- meutils/async_utils/background.py +68 -0
- meutils/async_utils/common.py +136 -16
- meutils/async_utils/test.py +47 -0
- meutils/cache_utils.py +29 -23
- meutils/caches/__init__.py +9 -0
- meutils/caches/acache.py +45 -0
- meutils/caches/redis_cache.py +63 -0
- meutils/clis/check_api.py +66 -0
- meutils/clis/cli.py +1 -1
- meutils/common.py +56 -17
- meutils/config_utils/__init__.py +11 -0
- meutils/config_utils/lark_utils/__init__.py +11 -0
- meutils/config_utils/lark_utils/common.py +385 -0
- meutils/config_utils/lark_utils/demo.py +13 -0
- meutils/config_utils/lark_utils/x.py +50 -0
- meutils/config_utils/manager.py +108 -0
- meutils/crawlers/__init__.py +11 -0
- meutils/data/VERSION +1 -1
- meutils/data/cowboy-hat-face.webp +0 -0
- meutils/data/oneapi/FOOTER.md +7 -0
- meutils/data/oneapi/NOTICE.md +138 -0
- meutils/data/oneapi/__init__.py +15 -0
- meutils/db/orm.py +179 -0
- meutils/db/redis_db.py +87 -0
- meutils/decorators/cache.py +1 -1
- meutils/decorators/common.py +84 -5
- meutils/decorators/contextmanagers.py +17 -6
- meutils/decorators/fastapi_decorator.py +77 -3
- meutils/decorators/polling.py +46 -0
- meutils/decorators/retry.py +150 -26
- meutils/fastapi_utils/__init__.py +11 -0
- meutils/fastapi_utils/exceptions/http_error.py +72 -0
- meutils/fastapi_utils/exceptions/validation_error.py +44 -0
- meutils/hash_utils.py +9 -4
- meutils/hooks/__init__.py +11 -0
- meutils/hooks/hook_test.py +174 -0
- meutils/hooks/wechat.py +162 -0
- meutils/hooks/wechat_channel.py +303 -0
- meutils/init/evn.py +1 -1
- meutils/io/files_utils.py +232 -0
- meutils/io/image.py +148 -10
- meutils/io/x.py +75 -0
- meutils/llm/__init__.py +10 -0
- meutils/llm/check_api.py +109 -0
- meutils/llm/check_utils.py +106 -0
- meutils/llm/clients.py +38 -0
- meutils/llm/completions/__init__.py +11 -0
- meutils/llm/completions/agents/__init__.py +11 -0
- meutils/llm/completions/agents/file.py +125 -0
- meutils/llm/completions/cp.py +112 -0
- meutils/llm/completions/delilegal.py +135 -0
- meutils/llm/completions/dify.py +81 -0
- meutils/llm/completions/kimi.py +47 -0
- meutils/llm/completions/modelscope.py +11 -0
- meutils/{fileparser/filetype.py → llm/completions/oi.py} +5 -3
- meutils/llm/completions/rag/__init__.py +11 -0
- meutils/llm/completions/rag/fire.py +157 -0
- meutils/llm/completions/rag/qwen.py +11 -0
- meutils/llm/completions/rag/rag.py +41 -0
- meutils/llm/completions/rag.py +38 -0
- meutils/llm/completions/tryblend.py +201 -0
- meutils/llm/completions/tune.py +284 -0
- meutils/llm/completions/x.py +26 -0
- meutils/llm/completions/xx.py +61 -0
- meutils/llm/completions/yuanbao.py +176 -0
- meutils/llm/demo.py +114 -0
- meutils/llm/functions/__init__.py +11 -0
- meutils/llm/mappers.py +15 -0
- meutils/llm/openai_utils/__init__.py +11 -0
- meutils/llm/openai_utils/common.py +284 -0
- meutils/llm/openai_utils/tool_outputs.py +45 -0
- meutils/llm/output_parsers/__init__.py +80 -0
- meutils/llm/prompts/__init__.py +244 -0
- meutils/llm/prompts/demo.py +155 -0
- meutils/llm/prompts/html2image_test.py +19 -0
- meutils/llm/utils.py +133 -0
- meutils/llm/x.py +75 -0
- meutils/notice/feishu.py +40 -9
- meutils/notice/wechat.py +23 -21
- meutils/np_utils.py +10 -1
- meutils/office_automation/pdf.py +6 -1
- meutils/oss/__init__.py +20 -0
- meutils/oss/minio_oss.py +184 -0
- meutils/oss/minio_utils.py +48 -0
- meutils/other/__demo.py +6 -4
- meutils/pandas_utils/__init__.py +1 -0
- meutils/pandas_utils/common.py +31 -0
- meutils/pandas_utils/pd_utils.py +10 -6
- meutils/parsers/__init__.py +10 -0
- meutils/parsers/file_parsers.py +110 -0
- meutils/parsers/fileparser/demo.py +41 -0
- meutils/parsers/fileparser/filetype.py +41 -0
- meutils/pay.py +37 -0
- meutils/pipe.py +37 -4
- meutils/playwright_utils/common.py +20 -12
- meutils/plots/common.py +35 -34
- meutils/queues/demo.py +56 -0
- meutils/queues/smooth_queue.py +120 -0
- meutils/queues/uniform_queue.py +3 -1
- meutils/request_utils/__init__.py +26 -2
- meutils/request_utils/ark.py +47 -0
- meutils/request_utils/crawler.py +34 -5
- meutils/request_utils/jwt_utils/__init__.py +11 -0
- meutils/request_utils/jwt_utils/common.py +42 -0
- meutils/request_utils/volc.py +160 -0
- meutils/schemas/__init__.py +0 -1
- meutils/schemas/baidu_types.py +70 -0
- meutils/schemas/batch_types.py +450 -0
- meutils/schemas/celery_types.py +64 -0
- meutils/schemas/chatfire_types.py +15 -0
- meutils/schemas/chatglm_types.py +197 -0
- meutils/schemas/db/__init__.py +11 -0
- meutils/schemas/db/oneapi_types.py +117 -0
- meutils/schemas/dify_types.py +40 -0
- meutils/schemas/embedding.py +31 -0
- meutils/schemas/fal_types.py +13 -0
- meutils/schemas/fish_types.py +11 -0
- meutils/schemas/hailuo_types.py +208 -0
- meutils/schemas/haimian_types.py +51 -0
- meutils/schemas/idphoto_types.py +43 -0
- meutils/schemas/image_types.py +476 -0
- meutils/schemas/jimeng_types.py +28 -0
- meutils/schemas/jina_types.py +67 -0
- meutils/schemas/kimi_types.py +86 -0
- meutils/schemas/kling_types.py +235 -0
- meutils/schemas/kuaishou_types.py +328 -0
- meutils/schemas/luma_types.py +59 -0
- meutils/schemas/message_types.py +165 -0
- meutils/schemas/message_types_.py +219 -0
- meutils/schemas/metaso_types.py +64 -0
- meutils/schemas/napkin_types.py +85 -0
- meutils/schemas/ocr_types.py +37 -0
- meutils/schemas/oneapi/__init__.py +11 -0
- meutils/schemas/oneapi/_types.py +49 -0
- meutils/schemas/oneapi/common.py +883 -0
- meutils/schemas/oneapi/icons.py +30 -0
- meutils/schemas/oneapi/model_group_info.py +48 -0
- meutils/schemas/oneapi/model_info.py +34 -0
- meutils/schemas/oneapi/models.py +26 -0
- meutils/schemas/oneapi/x.py +26 -0
- meutils/schemas/oneapi//351/207/215/345/256/232/345/220/221.py +132 -0
- meutils/schemas/openai_api_protocol.py +411 -0
- meutils/schemas/openai_types.py +366 -0
- meutils/schemas/pixverse_types.py +88 -0
- meutils/schemas/playwright_types.py +57 -0
- meutils/schemas/prodia_types.py +19 -0
- meutils/schemas/replicate_types.py +112 -0
- meutils/schemas/request_types.py +20 -0
- meutils/schemas/runwayml_types.py +190 -0
- meutils/schemas/siliconflow_types.py +80 -0
- meutils/schemas/step_types.py +19 -0
- meutils/schemas/suno_types.py +319 -0
- meutils/schemas/task_types.py +192 -0
- meutils/schemas/translator_types.py +29 -0
- meutils/schemas/tripo3d_types.py +57 -0
- meutils/schemas/tryblend_types.py +51 -0
- meutils/schemas/video_types.py +62 -0
- meutils/schemas/vidu_types.py +350 -0
- meutils/schemas/wechat_types.py +316 -0
- meutils/schemas/yuanbao_types.py +260 -0
- meutils/serving/celery/__init__.py +8 -0
- meutils/serving/celery/config.py +115 -0
- meutils/serving/celery/router.py +4 -6
- meutils/serving/celery/tasks.py +6 -4
- meutils/serving/celery//351/200/232/347/224/250/350/256/276/350/256/241.py +119 -0
- meutils/serving/fastapi/common.py +27 -31
- meutils/serving/fastapi/dependencies/__init__.py +0 -1
- meutils/serving/fastapi/dependencies/auth.py +55 -2
- meutils/serving/fastapi/exceptions/http_error.py +67 -2
- meutils/serving/fastapi/exceptions/validation_error.py +18 -2
- meutils/serving/fastapi/lifespans.py +73 -0
- meutils/serving/fastapi/routers/scheduler.py +12 -0
- meutils/serving/fastapi/routers/screenshot.py +47 -0
- meutils/serving/fastapi/routers/spider.py +8 -3
- meutils/serving/fastapi/routers/task.py +48 -0
- meutils/serving/fastapi/utils.py +48 -1
- meutils/serving/streamlit/common.py +1 -1
- meutils/smooth_utils.py +3 -0
- meutils/str_utils/__init__.py +22 -3
- meutils/str_utils/json_utils.py +7 -0
- meutils/str_utils/regular_expression.py +102 -10
- meutils/templates/xx.html +21 -0
- meutils/templates/xxx.html +117 -0
- meutils/todo.py +12 -0
- meutils/tools/token_monitor.py +33 -0
- MeUtils-2024.3.4.13.4.45.dist-info/RECORD +0 -540
- meutils/ai_audio/asr/subtitle.srt +0 -40
- meutils/ai_audio/demo.ipynb +0 -1215
- meutils/ai_audio/example.srt +0 -348
- meutils/ai_audio/new.srt +0 -179
- meutils/ai_audio/subtitles.srt +0 -696
- meutils/ai_audio/tts/new.srt +0 -179
- meutils/ai_audio//350/247/206/351/242/221/345/220/210/345/271/266.sh +0 -32
- meutils/ai_cv/1.jpg +0 -0
- meutils/ai_cv/197.jpg +0 -0
- meutils/ai_cv/2.jpg +0 -0
- meutils/ai_cv/img.png +0 -0
- meutils/ai_cv/invoice.jpg +0 -0
- meutils/ai_cv/tbl.png +0 -0
- meutils/ai_cv/test.png +0 -0
- meutils/ann/README.md +0 -33
- meutils/ann/README_gensim.md +0 -47
- meutils/ann/examples/client.py +0 -59
- meutils/ann/examples/demo.py +0 -24
- meutils/api/deeplx.py +0 -29
- meutils/api/qr.png +0 -0
- meutils/clis/README.md +0 -29
- meutils/clis/__test.sh +0 -17
- meutils/clis/deepseek.txt +0 -8
- meutils/clis/deepseek_13003330042.json +0 -1
- meutils/clis/deepseek_13003872192.json +0 -1
- meutils/clis/deepseek_13852263862.json +0 -1
- meutils/clis/deepseek_13913898681.json +0 -1
- meutils/clis/deepseek_13962978617.json +0 -1
- meutils/clis/deepseek_15251801790.json +0 -1
- meutils/clis/deepseek_15720826383.json +0 -1
- meutils/clis/deepseek_18395563611.json +0 -1
- meutils/clis/deepseek_313303303@qq.com.json +0 -1
- meutils/clis/kimi_state.json +0 -1
- meutils/cmds/README.md +0 -55
- meutils/coding/__init__.py +0 -11
- meutils/coding/find132.py +0 -40
- meutils/db/README.md +0 -51
- meutils/decorators/README.md +0 -17
- meutils/docarray_utils/demo_es.py +0 -34
- meutils/docarray_utils/demo_hnsw.py +0 -55
- meutils/docarray_utils/in_memory.py +0 -38
- meutils/docarray_utils//346/224/271/351/200/240/344/270/213hnsw.py +0 -43
- meutils/io/file.py +0 -20
- meutils/io/img.png +0 -0
- meutils/io/x.yml +0 -1
- meutils/notice/img.png +0 -0
- meutils/notice/todo.md +0 -10
- meutils/office_automation//346/212/225/350/265/204/347/256/241/347/220/206/347/263/273/347/273/237O3.2_/344/272/244/346/230/223/347/273/204.pdm +0 -22469
- meutils/playwright_utils/__test.sh +0 -2
- meutils/playwright_utils/kimi1_cookies.json +0 -1
- meutils/playwright_utils/kimi2_cookies.json +0 -1
- meutils/playwright_utils/kimi_cookies.json +0 -93
- meutils/serving/README.md +0 -1
- meutils/serving/celery/_run.sh +0 -10
- meutils/serving/gui/run.sh +0 -9
- meutils/serving/jina/__demo/client.py +0 -42
- meutils/serving/jina/__demo/flow.svg +0 -1
- meutils/serving/jina/__demo/s.py +0 -34
- meutils/serving/jina/__demo/s2.py +0 -37
- meutils/serving/jina/__demo/server.py +0 -83
- meutils/serving/jina/__demo/test.py +0 -40
- meutils/serving/jina/executors/SentenceEncoder.py +0 -62
- meutils/serving/jina/executors/SentenceEncoder_.py +0 -63
- meutils/serving/jina/executors/__init__.py +0 -46
- meutils/serving/jina/executors/base.py +0 -40
- meutils/serving/jina/nlp_serving/__init__.py +0 -11
- meutils/serving/jina/nlp_serving/word_segmentation.py +0 -40
- meutils/serving/streamlit/conf.yaml +0 -5
- meutils/serving/streamlit/ocr.png +0 -0
- meutils/serving/streamlit/run.sh +0 -17
- meutils/serving/webui/.streamlit/_config.toml +0 -186
- meutils/serving/webui/.streamlit/config.toml +0 -26
- meutils/serving/webui/pages/_1_/345/210/206/350/257/215.py +0 -56
- meutils/serving/webui/pages/_2_/350/257/215/346/200/247/346/240/207/346/263/250/344/270/216/345/256/236/344/275/223/350/257/206/345/210/253.py +0 -54
- meutils/serving/webui/pages/_3_/346/226/207/346/234/254/345/214/271/351/205/215.py +0 -64
- meutils/serving/webui/run.sh +0 -9
- meutils/spark/__init__.py +0 -26
- meutils/tools/monitor.yml +0 -29
- {MeUtils-2024.3.4.13.4.45.dist-info → MeUtils-2025.1.16.17.15.52.dist-info}/LICENSE +0 -0
- {MeUtils-2024.3.4.13.4.45.dist-info → MeUtils-2025.1.16.17.15.52.dist-info}/top_level.txt +0 -0
- {meutils → examples}/comp_utils/__init__.py +0 -0
- {meutils → examples}/comp_utils/reverse_metric.py +0 -0
- /meutils/{fileparser/README.md → fastapi_utils/exceptions/__init__.py} +0 -0
- /meutils/{fileparser → parsers/fileparser}/PDF/346/212/275/345/217/226.py" +0 -0
- /meutils/{fileparser → parsers/fileparser}/__init__.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/common.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/__init__.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/__main__.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/filetype.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/helpers.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/match.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/__init__.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/application.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/archive.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/audio.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/base.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/document.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/font.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/image.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/isobmff.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/types/video.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/filetype/utils.py +0 -0
- /meutils/{fileparser → parsers/fileparser}/pdf.py +0 -0
- /meutils/{fileparser → parsers/fileparser}//350/241/250/346/240/274/346/212/275/345/217/226.py" +0 -0
@@ -0,0 +1,192 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : task_types
|
5
|
+
# @Time : 2024/5/31 15:47
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
from enum import Enum
|
11
|
+
|
12
|
+
from meutils.pipe import *
|
13
|
+
|
14
|
+
# "NOT_START", "SUBMITTED", "QUEUED", "IN_PROGRESS", "FAILURE", "SUCCESS", "UNKNOWN"
|
15
|
+
|
16
|
+
STATUSES = {
|
17
|
+
"not_start": "NOT_START",
|
18
|
+
|
19
|
+
"submitted": "SUBMITTED",
|
20
|
+
|
21
|
+
"starting": "QUEUED",
|
22
|
+
"queued": "QUEUED",
|
23
|
+
"STARTED": "QUEUED",
|
24
|
+
"started": "QUEUED",
|
25
|
+
"pending": "QUEUED",
|
26
|
+
"PENDING": "QUEUED",
|
27
|
+
|
28
|
+
"processing": "IN_PROGRESS",
|
29
|
+
"in_progress": "IN_PROGRESS",
|
30
|
+
"received": "IN_PROGRESS",
|
31
|
+
"inprogress": "IN_PROGRESS",
|
32
|
+
|
33
|
+
"succeed": "SUCCESS",
|
34
|
+
"success": "SUCCESS",
|
35
|
+
"succeeded": "SUCCESS",
|
36
|
+
|
37
|
+
"failed": "FAILURE",
|
38
|
+
"canceled": "FAILURE",
|
39
|
+
"FAILURE": "FAILURE",
|
40
|
+
"failure": "FAILURE",
|
41
|
+
|
42
|
+
"unknown": "UNKNOWN",
|
43
|
+
|
44
|
+
}
|
45
|
+
|
46
|
+
|
47
|
+
class TaskResponse(BaseModel):
|
48
|
+
"""异步任务 通用响应体"""
|
49
|
+
task_id: Optional[str] = None
|
50
|
+
|
51
|
+
code: Optional[int] = 0
|
52
|
+
message: Optional[str] = None
|
53
|
+
status: Optional[str] = "submitted"
|
54
|
+
data: Optional[Any] = None
|
55
|
+
|
56
|
+
# 系统水印:可以存token
|
57
|
+
system_fingerprint: Optional[str] = None
|
58
|
+
|
59
|
+
model: Optional[str] = None
|
60
|
+
|
61
|
+
# created_at: int = Field(default_factory=lambda: int(time.time()))
|
62
|
+
created_at: Union[str, int] = Field(default_factory=lambda: datetime.datetime.today().isoformat())
|
63
|
+
|
64
|
+
def __init__(self, /, **data: Any):
|
65
|
+
super().__init__(**data)
|
66
|
+
self.status = STATUSES.get((self.status or '').lower(), "UNKNOWN")
|
67
|
+
|
68
|
+
class Config:
|
69
|
+
# 允许额外字段,增加灵活性
|
70
|
+
extra = 'allow'
|
71
|
+
|
72
|
+
|
73
|
+
class TaskType(str, Enum):
|
74
|
+
# 存储
|
75
|
+
oss = "oss"
|
76
|
+
|
77
|
+
# 百度助手
|
78
|
+
pcedit = "pcedit"
|
79
|
+
|
80
|
+
# 图 音频 视频
|
81
|
+
|
82
|
+
kling = "kling"
|
83
|
+
kling_vip = "kling@vip"
|
84
|
+
# api
|
85
|
+
kling_image = "kling-image"
|
86
|
+
kling_video = "kling-video"
|
87
|
+
|
88
|
+
vidu = "vidu"
|
89
|
+
vidu_vip = "vidu@vip"
|
90
|
+
|
91
|
+
suno = "suno"
|
92
|
+
haimian = "haimian"
|
93
|
+
lyrics = "lyrics"
|
94
|
+
|
95
|
+
runwayml = "runwayml"
|
96
|
+
fish = 'fish'
|
97
|
+
cogvideox = "cogvideox"
|
98
|
+
cogvideox_vip = "cogvideox@vip"
|
99
|
+
|
100
|
+
faceswap = "faceswap"
|
101
|
+
|
102
|
+
# 文档智能
|
103
|
+
file_extract = "file-extract"
|
104
|
+
moonshot_fileparser = "moonshot-fileparser"
|
105
|
+
textin_fileparser = "textin-fileparser"
|
106
|
+
|
107
|
+
watermark_remove = "watermark-remove"
|
108
|
+
|
109
|
+
# 语音克隆 tts Voice clone
|
110
|
+
tts = "tts"
|
111
|
+
voice_clone = "voice-clone"
|
112
|
+
|
113
|
+
# OCR
|
114
|
+
ocr_pro = "ocr-pro"
|
115
|
+
|
116
|
+
# todo
|
117
|
+
assistants = "assistants"
|
118
|
+
fine_tune = "fine-tune"
|
119
|
+
|
120
|
+
|
121
|
+
Purpose = TaskType
|
122
|
+
|
123
|
+
|
124
|
+
class Task(BaseModel):
|
125
|
+
id: Optional[Union[str, int]] = Field(default_factory=lambda: shortuuid.random())
|
126
|
+
status: Optional[Union[str, int]] = "success" # pending, running, success, failed
|
127
|
+
|
128
|
+
status_code: Optional[int] = None
|
129
|
+
|
130
|
+
data: Optional[Any] = None
|
131
|
+
metadata: Optional[Any] = None
|
132
|
+
# metadata: Optional[Dict[str, str]] = None
|
133
|
+
|
134
|
+
system_fingerprint: Optional[str] = None # api-key token cookie 加密
|
135
|
+
|
136
|
+
created_at: int = Field(default_factory=lambda: int(time.time()))
|
137
|
+
description: Optional[str] = None
|
138
|
+
|
139
|
+
|
140
|
+
class FileTask(BaseModel):
|
141
|
+
id: Union[str, int] = Field(default_factory=lambda: shortuuid.random())
|
142
|
+
status: Optional[str] = None # pending, running, success, failed
|
143
|
+
status_code: Optional[int] = None
|
144
|
+
|
145
|
+
data: Optional[Any] = None
|
146
|
+
metadata: Optional[Any] = None
|
147
|
+
|
148
|
+
system_fingerprint: Optional[str] = None # api-key token cookie 加密
|
149
|
+
|
150
|
+
created_at: int = Field(default_factory=lambda: int(time.time()))
|
151
|
+
|
152
|
+
url: Optional[str] = None
|
153
|
+
|
154
|
+
|
155
|
+
# pass
|
156
|
+
|
157
|
+
if __name__ == '__main__':
|
158
|
+
# print(TaskType("kling").name)
|
159
|
+
#
|
160
|
+
# print(TaskType("kling") == 'kling')
|
161
|
+
|
162
|
+
# print(Task(id=1, status='failed', system_fingerprint='xxx').model_dump(exclude={"system_fingerprint"}))
|
163
|
+
|
164
|
+
# print("kling" == TaskType.kling)
|
165
|
+
# print("kling" == Purpose.kling)
|
166
|
+
|
167
|
+
# print(Purpose('kling').value)
|
168
|
+
# print(Purpose.vidu.startswith('vidu'))
|
169
|
+
|
170
|
+
# print('vidu' in Purpose.vidu)
|
171
|
+
|
172
|
+
# print('kling_vip' in {TaskType.kling, TaskType.kling_vip})
|
173
|
+
|
174
|
+
# print('kling_vip'.startswith(TaskType.kling))
|
175
|
+
|
176
|
+
# print(Purpose.__members__)
|
177
|
+
# print(list(Purpose))
|
178
|
+
#
|
179
|
+
# print(Purpose.oss in Purpose.__members__)
|
180
|
+
|
181
|
+
# , ** {"a": 1, "system_fingerprint": 1}
|
182
|
+
response = TaskResponse(system_fingerprint="121")
|
183
|
+
|
184
|
+
# print(response.model_dump())
|
185
|
+
#
|
186
|
+
# response.__dict__.update({"a": 1, "system_fingerprint": 1})
|
187
|
+
#
|
188
|
+
# print(response.model_dump())
|
189
|
+
|
190
|
+
response.user_id = 1
|
191
|
+
|
192
|
+
print(response)
|
@@ -0,0 +1,29 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : translator_types
|
5
|
+
# @Time : 2024/7/18 14:14
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
|
11
|
+
from meutils.pipe import *
|
12
|
+
|
13
|
+
|
14
|
+
class DeeplxRequest(BaseModel):
|
15
|
+
text: str
|
16
|
+
|
17
|
+
source_lang: str = "auto"
|
18
|
+
target_lang: str = "ZH"
|
19
|
+
|
20
|
+
class Config:
|
21
|
+
frozen = True
|
22
|
+
|
23
|
+
json_schema_extra = {
|
24
|
+
"example": {
|
25
|
+
"text": "火哥AI是最棒的",
|
26
|
+
"source_lang": "auto",
|
27
|
+
"target_lang": "EN"
|
28
|
+
}
|
29
|
+
}
|
@@ -0,0 +1,57 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : tripo3d_types
|
5
|
+
# @Time : 2024/10/28 15:19
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
|
11
|
+
from meutils.pipe import *
|
12
|
+
|
13
|
+
BASE_URL = "https://api.tripo3d.ai"
|
14
|
+
FEISHU_URL = "https://xchatllm.feishu.cn/sheets/GYCHsvI4qhnDPNtI4VPcdw2knEd?sheet=KU4zY6"
|
15
|
+
|
16
|
+
|
17
|
+
class ImageRequest(BaseModel):
|
18
|
+
model_version: Optional[str] = "v2.0-20240919"
|
19
|
+
|
20
|
+
prompt: str
|
21
|
+
|
22
|
+
render_sequence: Optional[bool] = True
|
23
|
+
client_id: Optional[str] = "web"
|
24
|
+
|
25
|
+
isPrivate: bool = False
|
26
|
+
type: str = "text_to_model"
|
27
|
+
name: str = ""
|
28
|
+
|
29
|
+
def __init__(self, /, **data: Any):
|
30
|
+
super().__init__(**data)
|
31
|
+
self.name = self.name or self.prompt
|
32
|
+
|
33
|
+
|
34
|
+
class TaskResponse(BaseModel):
|
35
|
+
"""
|
36
|
+
{
|
37
|
+
"code": 0,
|
38
|
+
"data": {
|
39
|
+
"task_ids": [
|
40
|
+
"e327f716-6300-44ac-baf1-bdd57dd774a9",
|
41
|
+
"3320b387-e049-4645-bce3-64636b454a3e",
|
42
|
+
"c95fb37a-b766-4e3e-8b97-badf145d7a51",
|
43
|
+
"41df5dd5-19c2-4df4-9f4f-71d0cf63d83e"
|
44
|
+
]
|
45
|
+
}
|
46
|
+
}
|
47
|
+
"""
|
48
|
+
code: Optional[int] = None
|
49
|
+
data: Optional[dict] = None
|
50
|
+
task_ids: Optional[list] = None
|
51
|
+
system_fingerprint: Optional[str] = None
|
52
|
+
|
53
|
+
def __init__(self, /, **data: Any):
|
54
|
+
super().__init__(**data)
|
55
|
+
|
56
|
+
if self.data:
|
57
|
+
self.task_ids = self.data.get('task_ids', [])
|
@@ -0,0 +1,51 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : tryblend_types
|
5
|
+
# @Time : 2024/9/4 08:54
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
|
11
|
+
|
12
|
+
BASE_URL = "https://www.tryblend.ai/"
|
13
|
+
FEISHU_URL = "https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=hxVlQw"
|
14
|
+
FEISHU_URL_VIP = "https://xchatllm.feishu.cn/sheets/Bmjtst2f6hfMqFttbhLcdfRJnNf?sheet=LYcY2c"
|
15
|
+
|
16
|
+
GPT_4O = [{"requestId":"4aee81b2-2379-4bd2-ae7a-99c9c6867c86","selectedModel":{"id":"openai:gpt-4o","active":True,"name":"GPT 4o","provider":"openai","maker":"openai","makerHumanName":"OpenAI","fallbackIcon":None,"providerModelId":"gpt-4o","info":{"description":"GPT-4o is OpenAI's most advanced multimodal model that’s faster and cheaper than GPT-4 Turbo with stronger vision capabilities.","fundationModel":"GPT-4","fundationModelMaker":"OpenAI","releaseDate":"2024","knowledgeCutoff":"October, 2023","context":"128000","predictionTimeSecond":None,"usableAsAssistant":True,"pricing":{"inputImagePrice":0.003,"inputPrice":5,"outputPrice":15,"inputPriceUnit":"1m tokens","outputPriceUnit":"1m tokens"},"inputType":["text","image"],"outputType":["text"],"type":["text","vision"],"keywords":["trending","vision"],"website":"https://openai.com/research/gpt-4","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".jpg",".jpeg",".webp",".png",".gif"],"fileExtensionName":"image","maxFileSize":20,"minFileCount":0,"maxFileCount":20},{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"1"}]}],"sessionId":"bb06bb82-708e-4c98-b9db-3771d654195e","userMessageId":"4b1a58ac-55f9-4655-ab3e-1d7cff788b70","assistantMessageId":"4c7eb0ce-69b3-42d9-bf7c-3692f3c04e47"}]
|
17
|
+
GPT_4O_MINI = [{"requestId":"9146047d-387a-4894-b64a-879401a0bed0","selectedModel":{"id":"openai:gpt-4o-mini","active":True,"name":"GPT 4o Mini","provider":"openai","maker":"openai","makerHumanName":"OpenAI","fallbackIcon":None,"providerModelId":"gpt-4o-mini","info":{"description":"GPT-4o mini is OpenAI's most cost-efficient small model that’s smarter and cheaper than GPT-3.5 Turbo, and has vision capabilities.","fundationModel":"GPT-4","fundationModelMaker":"OpenAI","releaseDate":"2024","knowledgeCutoff":"October, 2023","context":"128000","predictionTimeSecond":None,"usableAsAssistant":True,"pricing":{"inputImagePrice":0.003,"inputPrice":0.15,"outputPrice":0.6,"inputPriceUnit":"1m tokens","outputPriceUnit":"1m tokens"},"inputType":["text","image"],"outputType":["text"],"type":["text","vision"],"keywords":["free","vision"],"website":"https://openai.com/research/gpt-4","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".jpg",".jpeg",".webp",".png",".gif"],"fileExtensionName":"image","maxFileSize":5,"minFileCount":0,"maxFileCount":20},{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"1"}]}],"sessionId":"7f27c6b6-1e98-4bb4-9e90-6d68d759632e","userMessageId":"e63eadcf-6715-40a5-805f-ce9442c4518c","assistantMessageId":"ab0971c4-03fa-4c96-9932-b4019d1ded46"}]
|
18
|
+
CLAUDE_3_HAIKU = [{"requestId":"86ce06bd-f76c-4cc4-91af-062907091af9","selectedModel":{"id":"openrouter:anthropic/claude-3-haiku","active":True,"name":"Claude 3 Haiku","provider":"openrouter","maker":"anthropic","makerHumanName":"Anthropic","fallbackIcon":None,"providerModelId":"anthropic/claude-3-haiku","info":{"description":"Claude 3 Haiku is Anthropic's fastest and most compact model for near-instant responsiveness. Quick and accurate targeted performance.","fundationModel":None,"fundationModelMaker":None,"releaseDate":"March 4, 2024","knowledgeCutoff":None,"context":"200000","predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":0.0004,"inputPrice":0.25,"outputPrice":1.25,"inputPriceUnit":"1m tokens","outputPriceUnit":"1m tokens"},"inputType":["text","image"],"outputType":["text"],"type":["text","vision"],"keywords":["free","vision"],"website":"https://www.anthropic.com/news/claude-3-family","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".jpg",".jpeg",".webp",".png",".gif"],"fileExtensionName":"image","maxFileSize":5,"minFileCount":0,"maxFileCount":20},{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"1"}]},{"role":"assistant","content":[{"type":"text","text":"Hello! It looks like you entered \"1.\" How can I assist you today? If you have a question or need information, feel free to let me know!"}]},{"role":"user","content":[{"type":"text","text":"1"}]}],"sessionId":"bb06bb82-708e-4c98-b9db-3771d654195e","userMessageId":"313e734a-fd15-404a-8132-92548aa783b5","assistantMessageId":"5610ede0-3da9-4c0e-bfab-c83fb282312f"}]
|
19
|
+
CLAUDE_3_SONNET = [{"requestId":"00e9b684-29c1-42b4-b2c5-2d7eb6c0be25","selectedModel":{"id":"openrouter:anthropic/claude-3-sonnet","active":True,"name":"Claude 3 Sonnet","provider":"openrouter","maker":"anthropic","makerHumanName":"Anthropic","fallbackIcon":None,"providerModelId":"anthropic/claude-3-sonnet","info":{"description":"Claude 3 Sonnet is an ideal balance of intelligence and speed for enterprise workloads. Maximum utility at a lower price, dependable, balanced for scaled deployments.","fundationModel":None,"fundationModelMaker":None,"releaseDate":"March 4, 2024","knowledgeCutoff":None,"context":"200000","predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":0.0048,"inputPrice":3,"outputPrice":15,"inputPriceUnit":"1m tokens","outputPriceUnit":"1m tokens"},"inputType":["text","image"],"outputType":["text"],"type":["text","vision"],"keywords":["vision"],"website":"https://www.anthropic.com/news/claude-3-family","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".jpg",".jpeg",".webp",".png",".gif"],"fileExtensionName":"image","maxFileSize":5,"minFileCount":0,"maxFileCount":20},{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"1"}]}],"sessionId":"ea188159-85ce-49da-8bf3-c8364f497e1a","userMessageId":"ab21e0dd-993d-44f0-b7be-a80baf2460f1","assistantMessageId":"cf5bbf51-1c8d-4465-a35f-0cf5ced9e7ef"}]
|
20
|
+
CLAUDE_3_OPUS = [{"requestId":"ee6e9a61-4546-499a-aa00-03115a088206","selectedModel":{"id":"openrouter:anthropic/claude-3-opus","active":True,"name":"Claude 3 Opus","provider":"openrouter","maker":"anthropic","makerHumanName":"Anthropic","fallbackIcon":None,"providerModelId":"anthropic/claude-3-opus","info":{"description":"Claude 3 Opus is Anthropic's most powerful model for highly complex tasks. It boasts top-level performance, intelligence, fluency, and understanding. It beats GPT-4 on most benchmarks","fundationModel":None,"fundationModelMaker":None,"releaseDate":"March 4, 2024","knowledgeCutoff":None,"context":"200000","predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":0.024,"inputPrice":15,"outputPrice":75,"inputPriceUnit":"1m tokens","outputPriceUnit":"1m tokens"},"inputType":["text","image"],"outputType":["text"],"type":["text","vision"],"keywords":["vision"],"website":"https://www.anthropic.com/news/claude-3-family","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".jpg",".jpeg",".webp",".png",".gif"],"fileExtensionName":"image","maxFileSize":5,"minFileCount":0,"maxFileCount":20},{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"1"}]}],"sessionId":"f112a636-bda4-47e1-9e35-8fea5a12cbf3","userMessageId":"1a80572b-c7cb-4407-a301-ba8471cdc23c","assistantMessageId":"d2e275af-4648-4d60-a292-38766f5a0425"}]
|
21
|
+
CLAUDE_35_SONNET = [{"requestId":"e1da0fc4-ec34-4784-a336-0dce0f7152d1","selectedModel":{"id":"openrouter:anthropic/claude-3.5-sonnet","active":True,"name":"Claude 3.5 Sonnet","provider":"openrouter","maker":"anthropic","makerHumanName":"Anthropic","fallbackIcon":None,"providerModelId":"anthropic/claude-3.5-sonnet","info":{"description":"Claude 3.5 Sonnet delivers better-than-Opus capabilities, faster-than-Sonnet speeds, at the same Sonnet prices. Sonnet is particularly good at: Coding: Autonomously writes, edits, and runs code with reasoning and troubleshooting Data science: Augments human data science expertise; navigates unstructured data while using multiple tools for insights Visual processing: excelling at interpreting charts, graphs, and images, accurately transcribing text to derive insights beyond just the text alone Agentic tasks: exceptional tool use, making it great at agentic tasks (i.e. complex, multi-step problem solving tasks that require engaging with other systems)","fundationModel":None,"fundationModelMaker":None,"releaseDate":"2024","knowledgeCutoff":"2024","context":"200000","predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":0.0048,"inputPrice":3,"outputPrice":15,"inputPriceUnit":"1m tokens","outputPriceUnit":"1m tokens"},"inputType":["text","image"],"outputType":["text"],"type":["text","vision"],"keywords":["trending","vision"],"website":"https://www.anthropic.com/news/claude-3-family","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".jpg",".jpeg",".webp",".png",".gif"],"fileExtensionName":"image","maxFileSize":5,"minFileCount":0,"maxFileCount":20},{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"1"}]}],"sessionId":"ea24b58e-6132-45ab-a46e-b8ff7bd235e2","userMessageId":"74f40c8a-bc94-4fb4-96cc-4b118309cf61","assistantMessageId":"79f48170-71ad-45bd-b895-a1dc4a17ffa3"}]
|
22
|
+
PERPLEXITY_SONAR_SMALL = [{"requestId":"a3338192-e46f-4ae3-83b2-8e428935c8ea","selectedModel":{"id":"perplexity/llama-3.1-sonar-small-128k-online","active":True,"name":"Perplexity Sonar Small","provider":"perplexity","maker":"perplexity","makerHumanName":"Perplexity","fallbackIcon":None,"providerModelId":"llama-3.1-sonar-small-128k-online","info":{"description":"The smaller, internet-connected chat model by Perplexity Labs, based on Llama 3.1. The online models are focused on delivering helpful, up-to-date, and factual responses.","fundationModel":"LLama 3 8B","fundationModelMaker":"Meta","releaseDate":"August 2024","knowledgeCutoff":"Today","context":"128000","predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":None,"inputPrice":0.2,"outputPrice":0.2,"inputPriceUnit":"request","outputPriceUnit":"1m tokens"},"inputType":["text"],"outputType":["text"],"type":["text"],"keywords":["free","online search"],"website":"https://blog.perplexity.ai/blog/introducing-pplx-online-llms","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"1"}]},{"role":"assistant","content":[{"type":"text","text":"It appears you might have entered a number without providing context. Here are a few possible interpretations:\n\n1. **Numerical Value**: If you simply entered \"1\" as a number, it could represent various things depending on the context, such as a quantity, a ranking, or a specific item in a list.\n2. **Question or Request**: If you are asking for information or assistance related to the number \"1,\" please provide more details so I can help you better.\n3. **List or Sequence**: If you are starting a list or sequence, \"1\" would be the first item.\n\nTo provide a more accurate response, could you please clarify what you are looking for?"}]},{"role":"user","content":[{"type":"text","text":"1"}]}],"sessionId":"430642e3-05e7-42cf-ac09-860202456dd5","userMessageId":"89a39d94-2d7b-4c89-b588-721d93a18bb6","assistantMessageId":"085f5380-5054-4260-be0f-2333486156af"}]
|
23
|
+
PERPLEXITY_SONAR_LARGE = [{"requestId":"ff8e920c-6ec3-4d26-8e47-9c0357bd38d4","selectedModel":{"id":"perplexity/llama-3.1-sonar-large-128k-online","active":True,"name":"Perplexity Sonar Large","provider":"perplexity","maker":"perplexity","makerHumanName":"Perplexity","fallbackIcon":None,"providerModelId":"llama-3.1-sonar-large-128k-online","info":{"description":"The larger, internet-connected chat model by Perplexity Labs, based on Llama 3.1. The online models are focused on delivering helpful, up-to-date, and factual responses.","fundationModel":"LLama 3 70B","fundationModelMaker":"Meta","releaseDate":"August 2024","knowledgeCutoff":"Today","context":"128000","predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":None,"inputPrice":1,"outputPrice":1,"inputPriceUnit":"request","outputPriceUnit":"1m tokens"},"inputType":["text"],"outputType":["text"],"type":["text"],"keywords":["online search"],"website":"https://blog.perplexity.ai/blog/introducing-pplx-online-llms","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"1"}]}],"sessionId":"430642e3-05e7-42cf-ac09-860202456dd5","userMessageId":"09d1272e-935d-4427-ac79-a206c1ed03bb","assistantMessageId":"65378f38-082c-4ab6-ab40-8a39c741e741"}]
|
24
|
+
PERPLEXITY_SONAR_HUGE = [{"requestId":"daef312d-818d-4e90-827e-7dd388df2464","selectedModel":{"id":"perplexity/llama-3.1-sonar-huge-128k-online","active":True,"name":"Perplexity Sonar Huge","provider":"perplexity","maker":"perplexity","makerHumanName":"Perplexity","fallbackIcon":None,"providerModelId":"llama-3.1-sonar-huge-128k-online","info":{"description":"The larger, internet-connected chat model by Perplexity Labs, based on Llama 3.1. The online models are focused on delivering helpful, up-to-date, and factual responses.","fundationModel":"LLama 3 405B","fundationModelMaker":"Meta","releaseDate":"August 2024","knowledgeCutoff":"Today","context":"128000","predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":None,"inputPrice":5,"outputPrice":5,"inputPriceUnit":"request","outputPriceUnit":"1m tokens"},"inputType":["text"],"outputType":["text"],"type":["text"],"keywords":["trending","online search"],"website":"https://blog.perplexity.ai/blog/introducing-pplx-online-llms","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"messages":[{"role":"user","content":[{"type":"text","text":"11"}]}],"sessionId":"b9cc2133-5da9-4e28-934d-21877246a830","userMessageId":"af132989-38ce-4375-9b1c-512c55d28fba","assistantMessageId":"617bbeb1-eb61-4e85-b4c9-c85f614ceca6"}]
|
25
|
+
# 画画
|
26
|
+
FLUX_SCHNELL = {"userMessage":{"id":"81439612-aa86-43a3-a127-6fa2a89b7a37","parentId":"db331b85-ce87-4950-9574-baef444511e6","role":"user","content":"a dog","createdAt":"2024-09-06T06:24:55.571Z","modelId":"falai:fal-ai/flux/schnell","info":{"type":"userTextMessage","isSaved":True},"status":{"isLoading":False},"files":[],"parameters":{}},"selectedModel":{"id":"falai:fal-ai/flux/schnell","active":True,"name":"Flux.1 Schnell","provider":"falai","maker":"black forest labs","makerHumanName":"Black Forest Labs","fallbackIcon":None,"providerModelId":"fal-ai/flux/schnell","info":{"description":"The smallest, cheapest and fastest model of the suite of text-to-image models that define a new state-of-the-art in image detail, prompt adherence, style diversity, and scene complexity for text-to-image synthesis.","fundationModel":None,"fundationModelMaker":None,"releaseDate":"August 1, 2024","knowledgeCutoff":None,"context":None,"predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":0,"inputPrice":0,"outputPrice":0.003,"inputPriceUnit":"image","outputPriceUnit":"megapixel"},"inputType":["text"],"outputType":["image"],"type":["image"],"keywords":["free"],"website":"https://blackforestlabs.ai/announcing-black-forest-labs/?ref=blog.fal.ai","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"sessionId":"8f8500aa-0c8c-411c-b27f-0d9febd7c498","userMessageId":"81439612-aa86-43a3-a127-6fa2a89b7a37","assistantMessageId":"5a060fc3-f9d5-4844-baf3-ae5d8715d0b4","requestId":"61a762e2-4781-4c14-bcb0-7cd481e00820"}
|
27
|
+
FLUX_PRO = {"userMessage":{"id":"67d8d521-955e-4664-b07b-3c4ecf82c214","parentId":None,"role":"user","content":"11","createdAt":"2024-09-06T06:31:31.047Z","modelId":"falai:fal-ai/flux/pro","info":{"type":"userTextMessage","isSaved":True},"status":{"isLoading":False},"files":[],"parameters":{}},"selectedModel":{"id":"falai:fal-ai/flux/pro","active":True,"name":"Flux.1 Pro","provider":"falai","maker":"black forest labs","makerHumanName":"Black Forest Labs","fallbackIcon":None,"providerModelId":"fal-ai/flux-pro","info":{"description":"The largest model of the suite of text-to-image models that define a new state-of-the-art in image detail, prompt adherence, style diversity, and scene complexity for text-to-image synthesis.","fundationModel":None,"fundationModelMaker":None,"releaseDate":"August 1, 2024","knowledgeCutoff":None,"context":None,"predictionTimeSecond":None,"usableAsAssistant":False,"pricing":{"inputImagePrice":0,"inputPrice":0,"outputPrice":0.05,"inputPriceUnit":"image","outputPriceUnit":"megapixel"},"inputType":["text"],"outputType":["image"],"type":["image"],"keywords":["trending"],"website":"https://blackforestlabs.ai/announcing-black-forest-labs/?ref=blog.fal.ai","fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"sessionId":"c1121f53-e40c-4b9e-9321-d54bfe7af39e","userMessageId":"67d8d521-955e-4664-b07b-3c4ecf82c214","assistantMessageId":"02aee5af-474c-496b-b63d-79e86e540ef6","requestId":"7a10bc5c-90c4-494d-9834-17dbab6c3661"}
|
28
|
+
DALL_E_3 = {"userMessage":{"id":"1753c29d-8a4c-41c7-b389-3e290aab7937","parentId":"02aee5af-474c-496b-b63d-79e86e540ef6","role":"user","content":"1","createdAt":"2024-09-06T06:32:12.717Z","modelId":"openai:dall-e-3","info":{"type":"userTextMessage","isSaved":True},"status":{"isLoading":False},"files":[],"parameters":{}},"selectedModel":{"id":"openai:dall-e-3","active":True,"name":"DALL·E 3","provider":"openai","maker":"openai","makerHumanName":"OpenAI","fallbackIcon":None,"providerModelId":"dall-e-3","info":{"description":"no description","fundationModel":None,"fundationModelMaker":None,"releaseDate":"2023","knowledgeCutoff":None,"context":None,"predictionTimeSecond":25,"usableAsAssistant":False,"pricing":{"inputImagePrice":0,"inputPrice":0,"outputPrice":0.04,"inputPriceUnit":"image","outputPriceUnit":"image"},"inputType":["text"],"outputType":["image"],"type":["image"],"keywords":[],"website":None,"fallbackModel":None},"parameters":[{"name":"prompt","description":"The prompt to use for the request","parameterType":"directInput","defaultValue":""},{"name":"file","description":"Files to use as complimentary information for the prompt.","parameterType":"directInput","defaultValue":[],"fileOptions":[{"fileExtension":[".c",".cpp",".cs",".java",".py",".rb",".js",".ts",".tsx",".jsx",".php",".go",".swift",".kt",".scala",".rs",".lua",".pl",".sh",".bash",".ps1",".html",".css",".scss",".less",".json",".xml",".yaml",".yml",".md",".markdown",".txt",".rtf",".tex",".log",".xlsx",".pdf",".csv",".tsv",".ini",".cfg",".conf",".toml",".sql",".r",".m",".f",".f90",".vb",".bas",".ps",".asm"],"fileExtensionName":"text","maxFileSize":5,"minFileCount":0,"maxFileCount":10}]}]},"sessionId":"c1121f53-e40c-4b9e-9321-d54bfe7af39e","userMessageId":"1753c29d-8a4c-41c7-b389-3e290aab7937","assistantMessageId":"91d4b7d7-065f-400c-8b72-634709e958b7","requestId":"07a4c955-ccef-4c97-ac92-ba5a6d5cc17d"}
|
29
|
+
# {
|
30
|
+
# "files": [
|
31
|
+
# {
|
32
|
+
# "id": "a8009269-7ac8-409e-b711-b21081ca42aa",
|
33
|
+
# "createdAt": "2024-09-06T06:31:49.758Z",
|
34
|
+
# "name": "43fa234cc8244815979580895525f451_file.jpg",
|
35
|
+
# "extension": "jpg",
|
36
|
+
# "mediaType": "image",
|
37
|
+
# "type": "jpg",
|
38
|
+
# "uploadThingType": "url",
|
39
|
+
# "size": 464801,
|
40
|
+
# "url": "https://utfs.io/f/2878c70b-7fa9-4996-a3f1-d2cc9a200907-djxsni.jpg",
|
41
|
+
# }
|
42
|
+
# ]
|
43
|
+
# }
|
44
|
+
|
45
|
+
CHECK_PAYLOAD=[{"id":"9e8e7911-4924-47f7-9a34-c9e846c658a1","createdAt":"$D2024-09-12T09:23:24.194Z","parentId":None,"sessionId":"45be1b8a-ea37-43f5-ab80-7d53e2e7edbd","userId":"7b4943cf-78da-45bc-a38f-427a35aa3b89","role":"user","content":"1","modelId":"openai:gpt-4o-mini","info":{"type":"userTextMessage"},"status":{"isLoading":False},"files":[]}]
|
46
|
+
if __name__ == '__main__':
|
47
|
+
import json_repair
|
48
|
+
|
49
|
+
# print("net".startswith(("perplexity", "net")))
|
50
|
+
|
51
|
+
print(list(GPT_4O[0]['selectedModel']))
|
@@ -0,0 +1,62 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
# -*- coding: utf-8 -*-
|
3
|
+
# @Project : AI. @by PyCharm
|
4
|
+
# @File : video_types
|
5
|
+
# @Time : 2024/9/13 10:15
|
6
|
+
# @Author : betterme
|
7
|
+
# @WeChat : meutils
|
8
|
+
# @Software : PyCharm
|
9
|
+
# @Description :
|
10
|
+
|
11
|
+
from meutils.pipe import *
|
12
|
+
|
13
|
+
|
14
|
+
class VideoRequest(BaseModel):
|
15
|
+
model: Literal["cogvideox-flash", "cogvideox"] = "cogvideox-flash"
|
16
|
+
|
17
|
+
prompt: str = "比得兔开小汽车,游走在马路上,脸上的表情充满开心喜悦。"
|
18
|
+
|
19
|
+
"""
|
20
|
+
提供基于其生成内容的图像。如果传入此参数,系统将以该图像为基础进行操作。支持通过URL或Base64编码传入图片。
|
21
|
+
图片要求如下:图片支持.png、jpeg、.jpg 格式、图片大小:不超过5M。image_url和prompt二选一或者同时传入。
|
22
|
+
"""
|
23
|
+
image_url: Optional[str] = None
|
24
|
+
|
25
|
+
"""
|
26
|
+
输出模式,默认为 "quality"。 "quality":质量优先,生成质量高。 "speed":速度优先,生成时间更快,质量相对降低。
|
27
|
+
cogvideox-flash模型不支持选择输出模式。
|
28
|
+
"""
|
29
|
+
quality: Literal["quality", "speed"] = "speed"
|
30
|
+
|
31
|
+
"""是否生成 AI 音效。默认值: False(不生成音效)。"""
|
32
|
+
with_audio: bool = True
|
33
|
+
|
34
|
+
"""
|
35
|
+
默认值: 若不指定,默认生成视频的短边为 1080,长边根据原图片比例缩放。最高支持 4K 分辨率。
|
36
|
+
分辨率选项:720x480、1024x1024、1280x960、960x1280、1920x1080、1080x1920、2048x1080、3840x2160
|
37
|
+
"""
|
38
|
+
size: Literal[
|
39
|
+
'720x480',
|
40
|
+
'1024x1024',
|
41
|
+
'1280x960',
|
42
|
+
'960x1280',
|
43
|
+
'1920x1080',
|
44
|
+
'1080x1920',
|
45
|
+
'2048x1080',
|
46
|
+
'3840x2160'] = "1024x1024"
|
47
|
+
|
48
|
+
duration: Literal[5, 10] = 5
|
49
|
+
|
50
|
+
fps: Literal[30, 60] = 30
|
51
|
+
|
52
|
+
class Config:
|
53
|
+
frozen = True
|
54
|
+
|
55
|
+
|
56
|
+
class FalVideoRequest(BaseModel):
|
57
|
+
model: Union[str, Literal["latentsync", "sync-lipsync", ]] = 'latentsync'
|
58
|
+
video_url: Optional[str] = None
|
59
|
+
audio_url: Optional[str] = None
|
60
|
+
image_url: Optional[str] = None
|
61
|
+
|
62
|
+
sync_mode: Union[str, Literal["cut_off", "loop", "bounce"]] = "cut_off"
|