g4f 6.9.6__tar.gz → 6.9.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {g4f-6.9.6/g4f.egg-info → g4f-6.9.8}/PKG-INFO +1 -1
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/PollinationsAI.py +29 -28
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Yupp.py +1 -2
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/LMArena.py +1 -1
- {g4f-6.9.6 → g4f-6.9.8}/g4f/client/__init__.py +27 -56
- {g4f-6.9.6 → g4f-6.9.8/g4f.egg-info}/PKG-INFO +1 -1
- {g4f-6.9.6 → g4f-6.9.8}/LICENSE +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/MANIFEST.in +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/README.md +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/ApiAirforce.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Chatai.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Cloudflare.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Copilot.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/CopilotSession.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/DeepInfra.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/EasyChat.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/GLM.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/GradientNetwork.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/ItalyGPT.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/LambdaChat.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Mintlify.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/OIVSCodeSer.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/OperaAria.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Perplexity.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/PollinationsImage.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Qwen.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Startnest.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/StringableInference.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/TeachAnything.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/WeWordle.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/Yqcloud.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/audio/EdgeTTS.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/audio/MarkItDown.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/audio/OpenAIFM.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/audio/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/audio/gTTS.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/base_provider.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/helper.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/BAAI_Ling.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/BlackForestLabs_Flux1Dev.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/BlackForestLabs_Flux1KontextDev.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/CohereForAI_C4AI_Command.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/DeepseekAI_JanusPro7b.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/Microsoft_Phi_4_Multimodal.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/Qwen_Qwen_2_5.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/Qwen_Qwen_2_5M.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/Qwen_Qwen_2_5_Max.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/Qwen_Qwen_2_72B.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/Qwen_Qwen_3.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/StabilityAI_SD35Large.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/hf_space/raise_for_status.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/local/Local.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/local/Ollama.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/local/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/AIBadgr.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Anthropic.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Azure.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/BingCreateImages.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/BlackboxPro.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/CablyAI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Cerebras.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Claude.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Cohere.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/CopilotAccount.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Custom.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/DeepSeek.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/DeepSeekAPI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/FenayAI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Gemini.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/GeminiCLI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/GeminiPro.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/GigaChat.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/GithubCopilot.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/GithubCopilotAPI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/GlhfChat.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Grok.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Groq.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/MetaAI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/MetaAIAccount.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/MicrosoftDesigner.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Nvidia.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/OpenRouter.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/OpenaiAPI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/OpenaiAccount.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/OpenaiChat.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/PerplexityApi.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Pi.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/PuterJS.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Reka.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Replicate.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/ThebApi.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Together.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/Video.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/WhiteRabbitNeo.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/You.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/bing/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/bing/create_images.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/hf/HuggingChat.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/hf/HuggingFaceAPI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/hf/HuggingFaceInference.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/hf/HuggingFaceMedia.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/hf/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/hf/models.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/mini_max/HailuoAI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/mini_max/MiniMax.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/mini_max/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/mini_max/crypt.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/needs_auth/xAI.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/openai/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/openai/crypt.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/openai/har_file.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/openai/models.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/openai/new.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/openai/proofofwork.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/QwenCode.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/cookie_generator.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/fingerprint.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/generate_ua.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/oauthFlow.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/qwenContentGenerator.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/qwenOAuth2.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/sharedTokenManager.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/qwen/stubs.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/search/CachedSearch.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/search/DDGS.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/search/GoogleSearch.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/search/SearXNG.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/search/YouTube.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/search/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/template/BackendApi.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/template/OpenaiTemplate.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/template/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/yupp/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/Provider/yupp/models.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/__main__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/api/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/api/_logging.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/api/_tokenizer.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/api/run.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/api/stubs.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/cli/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/cli/__main__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/cli/client.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/client/helper.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/client/models.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/client/service.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/client/stubs.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/client/types.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/config.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/cookies.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/debug.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/errors.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/files.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/gui_parser.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/run.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/api.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/app.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/backend_api.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/config.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/crypto.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/internet.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/js_api.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/server/website.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/gui/webview.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/image/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/image/copy_images.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/langchain.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/markitdown/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/markitdown/_audio_converter.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/markitdown/_base_converter.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/markitdown/_image_converter.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/markitdown/_llm_caption.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/markitdown/_transcribe_audio.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/markitdown/_youtube_converter.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/integration/pydantic_ai.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/local/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/locals/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/locals/models.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/locals/provider.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/mcp/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/mcp/__main__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/mcp/server.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/mcp/tools.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/models.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/any_model_map.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/any_provider.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/asyncio.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/base_provider.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/create_images.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/helper.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/response.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/retry_provider.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/tool_support.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/providers/types.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/requests/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/requests/aiohttp.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/requests/curl_cffi.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/requests/defaults.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/requests/raise_for_status.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/tools/__init__.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/tools/auth.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/tools/fetch_and_scrape.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/tools/files.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/tools/media.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/tools/run_tools.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/tools/web_search.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/typing.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f/version.py +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f.egg-info/SOURCES.txt +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f.egg-info/dependency_links.txt +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f.egg-info/entry_points.txt +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f.egg-info/requires.txt +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/g4f.egg-info/top_level.txt +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/setup.cfg +0 -0
- {g4f-6.9.6 → g4f-6.9.8}/setup.py +0 -0
|
@@ -24,6 +24,7 @@ from ..providers.response import ImageResponse, Reasoning, VideoResponse, JsonRe
|
|
|
24
24
|
from ..tools.media import render_messages
|
|
25
25
|
from ..tools.run_tools import AuthManager
|
|
26
26
|
from ..cookies import get_cookies_dir
|
|
27
|
+
from ..tools.files import secure_filename
|
|
27
28
|
from .template.OpenaiTemplate import read_response
|
|
28
29
|
from .. import debug
|
|
29
30
|
|
|
@@ -31,7 +32,6 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
31
32
|
label = "Pollinations AI 🌸"
|
|
32
33
|
url = "https://pollinations.ai"
|
|
33
34
|
login_url = "https://enter.pollinations.ai"
|
|
34
|
-
api_key = "pk", "_B9YJX5SBohhm2ePq"
|
|
35
35
|
active_by_default = True
|
|
36
36
|
working = True
|
|
37
37
|
supports_system_message = True
|
|
@@ -44,7 +44,9 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
44
44
|
gen_text_api_endpoint = "https://gen.pollinations.ai/v1/chat/completions"
|
|
45
45
|
image_models_endpoint = "https://gen.pollinations.ai/image/models"
|
|
46
46
|
text_models_endpoint = "https://gen.pollinations.ai/text/models"
|
|
47
|
-
|
|
47
|
+
balance_endpoint = "https://api.gpt4free.workers.dev/api/pollinations/account/balance"
|
|
48
|
+
worker_api_endpoint = "https://api.gpt4free.workers.dev/api/pollinations/chat/completions"
|
|
49
|
+
worker_models_endpoint = "https://api.gpt4free.workers.dev/api/pollinations/text/models"
|
|
48
50
|
|
|
49
51
|
# Models configuration
|
|
50
52
|
default_model = "openai"
|
|
@@ -56,8 +58,6 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
56
58
|
image_models = [default_image_model, "turbo", "kontext"]
|
|
57
59
|
audio_models = {}
|
|
58
60
|
vision_models = [default_vision_model]
|
|
59
|
-
_gen_models_loaded = False
|
|
60
|
-
_free_models_loaded = False
|
|
61
61
|
model_aliases = {
|
|
62
62
|
"gpt-4.1-nano": "openai-fast",
|
|
63
63
|
"llama-4-scout": "llamascout",
|
|
@@ -74,12 +74,15 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
74
74
|
}
|
|
75
75
|
swap_model_aliases = {v: k for k, v in model_aliases.items()}
|
|
76
76
|
balance: Optional[float] = None
|
|
77
|
+
current_models_endpoint: Optional[str] = None
|
|
77
78
|
|
|
78
79
|
@classmethod
|
|
79
80
|
def get_balance(cls, api_key: str, timeout: Optional[float] = None) -> Optional[float]:
|
|
80
81
|
try:
|
|
81
|
-
headers =
|
|
82
|
-
|
|
82
|
+
headers = None
|
|
83
|
+
if api_key:
|
|
84
|
+
headers = {"authorization": f"Bearer {api_key}"}
|
|
85
|
+
response = requests.get(cls.balance_endpoint, headers=headers, timeout=timeout)
|
|
83
86
|
response.raise_for_status()
|
|
84
87
|
data = response.json()
|
|
85
88
|
cls.balance = float(data.get("balance", 0.0))
|
|
@@ -103,17 +106,18 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
103
106
|
|
|
104
107
|
if not api_key:
|
|
105
108
|
api_key = AuthManager.load_api_key(cls)
|
|
106
|
-
if not api_key or api_key.startswith("g4f_") or api_key.startswith("gfs_"):
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
debug.log(f"
|
|
109
|
+
if (not api_key or api_key.startswith("g4f_") or api_key.startswith("gfs_")) and cls.balance or cls.balance is None and cls.get_balance(api_key, timeout) and cls.balance > 0:
|
|
110
|
+
debug.log(f"Authenticated with Pollinations AI using G4F API.")
|
|
111
|
+
models_url = cls.worker_models_endpoint
|
|
112
|
+
elif api_key:
|
|
113
|
+
debug.log(f"Using Pollinations AI with provided API key.")
|
|
114
|
+
models_url = cls.gen_text_api_endpoint
|
|
111
115
|
else:
|
|
112
116
|
debug.log(f"Using Pollinations AI without authentication.")
|
|
113
|
-
|
|
117
|
+
models_url = cls.text_models_endpoint
|
|
114
118
|
|
|
115
|
-
if
|
|
116
|
-
path = Path(get_cookies_dir()) / "models" / datetime.today().strftime('%Y-%m-%d') / f"{
|
|
119
|
+
if cls.current_models_endpoint != models_url:
|
|
120
|
+
path = Path(get_cookies_dir()) / "models" / datetime.today().strftime('%Y-%m-%d') / f"{secure_filename(models_url)}.json"
|
|
117
121
|
if path.exists():
|
|
118
122
|
try:
|
|
119
123
|
data = path.read_text()
|
|
@@ -180,10 +184,7 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
180
184
|
cls.swap_model_aliases = {v: k for k, v in cls.model_aliases.items()}
|
|
181
185
|
|
|
182
186
|
finally:
|
|
183
|
-
|
|
184
|
-
cls._gen_models_loaded = True
|
|
185
|
-
else:
|
|
186
|
-
cls._free_models_loaded = True
|
|
187
|
+
cls.current_models_endpoint = models_url
|
|
187
188
|
# Return unique models across all categories
|
|
188
189
|
all_models = cls.text_models.copy()
|
|
189
190
|
all_models.extend(cls.image_models)
|
|
@@ -251,7 +252,7 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
251
252
|
**kwargs
|
|
252
253
|
) -> AsyncResult:
|
|
253
254
|
if cache is None:
|
|
254
|
-
cache = kwargs.get("action") != "variant"
|
|
255
|
+
cache = kwargs.get("action") is None or kwargs.get("action") != "variant"
|
|
255
256
|
if extra_body is None:
|
|
256
257
|
extra_body = {}
|
|
257
258
|
if not model:
|
|
@@ -262,7 +263,7 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
262
263
|
has_audio = True
|
|
263
264
|
break
|
|
264
265
|
model = "openai-audio" if has_audio else cls.default_model
|
|
265
|
-
if cls.get_models(api_key=api_key, timeout=kwargs.get("timeout")):
|
|
266
|
+
if cls.get_models(api_key=api_key, timeout=kwargs.get("timeout", 15)):
|
|
266
267
|
if model in cls.model_aliases:
|
|
267
268
|
model = cls.model_aliases[model]
|
|
268
269
|
debug.log(f"Using model: {model}")
|
|
@@ -480,17 +481,17 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|
|
480
481
|
seed=None if "tools" in extra_body else seed,
|
|
481
482
|
**extra_body
|
|
482
483
|
)
|
|
484
|
+
if (not api_key or api_key.startswith("g4f_") or api_key.startswith("gfs_")) and cls.balance and cls.balance > 0:
|
|
485
|
+
endpoint = cls.worker_api_endpoint
|
|
486
|
+
elif api_key:
|
|
487
|
+
endpoint = cls.gen_text_api_endpoint
|
|
488
|
+
else:
|
|
489
|
+
endpoint = cls.text_api_endpoint
|
|
483
490
|
headers = None
|
|
484
|
-
if api_key
|
|
491
|
+
if api_key:
|
|
485
492
|
headers = {"authorization": f"Bearer {api_key}"}
|
|
486
|
-
elif cls.balance > 0:
|
|
487
|
-
headers = {"authorization": f"Bearer {''.join(cls.api_key)}"}
|
|
488
493
|
yield JsonRequest.from_dict(data)
|
|
489
|
-
|
|
490
|
-
url = cls.gen_text_api_endpoint
|
|
491
|
-
else:
|
|
492
|
-
url = cls.text_api_endpoint
|
|
493
|
-
async with session.post(url, json=data, headers=headers) as response:
|
|
494
|
+
async with session.post(endpoint, json=data, headers=headers) as response:
|
|
494
495
|
if response.status in (400, 500):
|
|
495
496
|
debug.error(f"Error: {response.status} - Bad Request: {data}")
|
|
496
497
|
async for chunk in read_response(response, stream, format_media_prompt(messages), cls.get_dict(),
|
|
@@ -11,7 +11,7 @@ try:
|
|
|
11
11
|
import cloudscraper
|
|
12
12
|
from cloudscraper import CloudScraper
|
|
13
13
|
except ImportError:
|
|
14
|
-
|
|
14
|
+
from typing import Type as CloudScraper
|
|
15
15
|
|
|
16
16
|
from .helper import get_last_user_message
|
|
17
17
|
from .yupp.models import YuppModelManager
|
|
@@ -47,7 +47,6 @@ def create_scraper():
|
|
|
47
47
|
scraper.headers.update({
|
|
48
48
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36 Edg/137.0.0.0",
|
|
49
49
|
"Accept": "text/x-component, */*",
|
|
50
|
-
"Accept-Encoding": "gzip, deflate, br, zstd",
|
|
51
50
|
"Accept-Language": "en-US,en;q=0.9",
|
|
52
51
|
"Sec-Fetch-Dest": "empty",
|
|
53
52
|
"Sec-Fetch-Mode": "cors",
|
|
@@ -116,7 +116,7 @@ class LMArena(AsyncGeneratorProvider, ProviderModelMixin, AuthFileMixin):
|
|
|
116
116
|
"getProxyImage": "60049198d4936e6b7acc63719b63b89284c58683e6"
|
|
117
117
|
}
|
|
118
118
|
@classmethod
|
|
119
|
-
def get_models(cls, timeout: int = None) -> list[str]:
|
|
119
|
+
def get_models(cls, timeout: int = None, **kwargs) -> list[str]:
|
|
120
120
|
if not cls._models_loaded and has_curl_cffi:
|
|
121
121
|
cache_file = cls.get_cache_file()
|
|
122
122
|
args = {}
|
|
@@ -15,7 +15,7 @@ from typing import Union, AsyncIterator, Iterator, Awaitable, Optional, List, Di
|
|
|
15
15
|
|
|
16
16
|
from ..image.copy_images import copy_media, get_media_dir
|
|
17
17
|
from ..typing import Messages, ImageType
|
|
18
|
-
from ..providers.types import ProviderType,
|
|
18
|
+
from ..providers.types import ProviderType, BaseProvider
|
|
19
19
|
from ..providers.response import *
|
|
20
20
|
from ..errors import NoMediaResponseError, ProviderNotFoundError
|
|
21
21
|
from ..providers.retry_provider import IterListProvider
|
|
@@ -70,14 +70,15 @@ def iter_response(
|
|
|
70
70
|
stream: bool,
|
|
71
71
|
response_format: Optional[dict] = None,
|
|
72
72
|
max_tokens: Optional[int] = None,
|
|
73
|
-
stop: Optional[list[str]] = None
|
|
73
|
+
stop: Optional[list[str]] = None,
|
|
74
|
+
provider_info: Optional[ProviderInfo] = None
|
|
74
75
|
) -> ChatCompletionResponseType:
|
|
75
76
|
content = ""
|
|
76
77
|
reasoning = []
|
|
77
78
|
finish_reason = None
|
|
78
79
|
tool_calls = None
|
|
79
80
|
usage = None
|
|
80
|
-
|
|
81
|
+
provider_info: ProviderInfo = None
|
|
81
82
|
conversation: JsonConversation = None
|
|
82
83
|
completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28))
|
|
83
84
|
idx = 0
|
|
@@ -100,7 +101,7 @@ def iter_response(
|
|
|
100
101
|
usage = chunk
|
|
101
102
|
continue
|
|
102
103
|
elif isinstance(chunk, ProviderInfo):
|
|
103
|
-
|
|
104
|
+
provider_info = chunk
|
|
104
105
|
continue
|
|
105
106
|
elif isinstance(chunk, Reasoning):
|
|
106
107
|
reasoning.append(chunk)
|
|
@@ -122,9 +123,9 @@ def iter_response(
|
|
|
122
123
|
|
|
123
124
|
if stream:
|
|
124
125
|
chunk = ChatCompletionChunk.model_construct(chunk, None, completion_id, int(time.time()))
|
|
125
|
-
if
|
|
126
|
-
chunk.provider =
|
|
127
|
-
chunk.model =
|
|
126
|
+
if provider_info is not None:
|
|
127
|
+
chunk.provider = provider_info.name
|
|
128
|
+
chunk.model = provider_info.model
|
|
128
129
|
yield chunk
|
|
129
130
|
|
|
130
131
|
if finish_reason is not None:
|
|
@@ -153,29 +154,18 @@ def iter_response(
|
|
|
153
154
|
conversation=None if conversation is None else conversation.get_dict(),
|
|
154
155
|
reasoning=reasoning if reasoning else None
|
|
155
156
|
)
|
|
156
|
-
if
|
|
157
|
-
chat_completion.provider =
|
|
158
|
-
chat_completion.model =
|
|
157
|
+
if provider_info is not None:
|
|
158
|
+
chat_completion.provider = provider_info.name
|
|
159
|
+
chat_completion.model = provider_info.model
|
|
159
160
|
yield chat_completion
|
|
160
161
|
|
|
161
|
-
# Synchronous iter_append_model_and_provider function
|
|
162
|
-
def iter_append_model_and_provider(response: ChatCompletionResponseType, last_model: str, last_provider: ProviderType) -> ChatCompletionResponseType:
|
|
163
|
-
if isinstance(last_provider, BaseRetryProvider):
|
|
164
|
-
yield from response
|
|
165
|
-
return
|
|
166
|
-
for chunk in response:
|
|
167
|
-
if isinstance(chunk, (ChatCompletion, ChatCompletionChunk)):
|
|
168
|
-
if chunk.provider is None and last_provider is not None:
|
|
169
|
-
chunk.model = getattr(last_provider, "last_model", last_model)
|
|
170
|
-
chunk.provider = last_provider.__name__
|
|
171
|
-
yield chunk
|
|
172
|
-
|
|
173
162
|
async def async_iter_response(
|
|
174
163
|
response: AsyncIterator[Union[str, ResponseType]],
|
|
175
164
|
stream: bool,
|
|
176
165
|
response_format: Optional[dict] = None,
|
|
177
166
|
max_tokens: Optional[int] = None,
|
|
178
|
-
stop: Optional[list[str]] = None
|
|
167
|
+
stop: Optional[list[str]] = None,
|
|
168
|
+
provider_info: Optional[ProviderInfo] = None
|
|
179
169
|
) -> AsyncChatCompletionResponseType:
|
|
180
170
|
content = ""
|
|
181
171
|
reasoning = []
|
|
@@ -184,7 +174,6 @@ async def async_iter_response(
|
|
|
184
174
|
idx = 0
|
|
185
175
|
tool_calls = None
|
|
186
176
|
usage = None
|
|
187
|
-
provider: ProviderInfo = None
|
|
188
177
|
conversation: JsonConversation = None
|
|
189
178
|
|
|
190
179
|
try:
|
|
@@ -203,7 +192,7 @@ async def async_iter_response(
|
|
|
203
192
|
usage = chunk
|
|
204
193
|
continue
|
|
205
194
|
elif isinstance(chunk, ProviderInfo):
|
|
206
|
-
|
|
195
|
+
provider_info = chunk
|
|
207
196
|
continue
|
|
208
197
|
elif isinstance(chunk, Reasoning) and not stream:
|
|
209
198
|
reasoning.append(chunk)
|
|
@@ -225,9 +214,9 @@ async def async_iter_response(
|
|
|
225
214
|
|
|
226
215
|
if stream:
|
|
227
216
|
chunk = ChatCompletionChunk.model_construct(chunk, None, completion_id, int(time.time()))
|
|
228
|
-
if
|
|
229
|
-
chunk.provider =
|
|
230
|
-
chunk.model =
|
|
217
|
+
if provider_info is not None:
|
|
218
|
+
chunk.provider = provider_info.name
|
|
219
|
+
chunk.model = provider_info.model
|
|
231
220
|
yield chunk
|
|
232
221
|
|
|
233
222
|
if finish_reason is not None:
|
|
@@ -256,32 +245,13 @@ async def async_iter_response(
|
|
|
256
245
|
conversation=conversation,
|
|
257
246
|
reasoning=reasoning if reasoning else None
|
|
258
247
|
)
|
|
259
|
-
if
|
|
260
|
-
chat_completion.provider =
|
|
261
|
-
chat_completion.model =
|
|
248
|
+
if provider_info is not None:
|
|
249
|
+
chat_completion.provider = provider_info.name
|
|
250
|
+
chat_completion.model = provider_info.model
|
|
262
251
|
yield chat_completion
|
|
263
252
|
finally:
|
|
264
253
|
await safe_aclose(response)
|
|
265
254
|
|
|
266
|
-
async def async_iter_append_model_and_provider(
|
|
267
|
-
response: AsyncChatCompletionResponseType,
|
|
268
|
-
last_model: str,
|
|
269
|
-
last_provider: ProviderType
|
|
270
|
-
) -> AsyncChatCompletionResponseType:
|
|
271
|
-
try:
|
|
272
|
-
if isinstance(last_provider, BaseRetryProvider):
|
|
273
|
-
async for chunk in response:
|
|
274
|
-
yield chunk
|
|
275
|
-
return
|
|
276
|
-
async for chunk in response:
|
|
277
|
-
if isinstance(chunk, (ChatCompletion, ChatCompletionChunk)):
|
|
278
|
-
if chunk.provider is None and last_provider is not None:
|
|
279
|
-
chunk.model = getattr(last_provider, "last_model", last_model)
|
|
280
|
-
chunk.provider = last_provider.__name__
|
|
281
|
-
yield chunk
|
|
282
|
-
finally:
|
|
283
|
-
await safe_aclose(response)
|
|
284
|
-
|
|
285
255
|
class Client(BaseClient):
|
|
286
256
|
def __init__(
|
|
287
257
|
self,
|
|
@@ -350,9 +320,10 @@ class Completions:
|
|
|
350
320
|
**kwargs
|
|
351
321
|
)
|
|
352
322
|
|
|
323
|
+
provider_info = ProviderInfo(**provider.get_dict(), model=model)
|
|
324
|
+
|
|
353
325
|
def fallback(response):
|
|
354
|
-
|
|
355
|
-
return iter_append_model_and_provider(response, model, provider)
|
|
326
|
+
return iter_response(response, stream, response_format, max_tokens, stop, provider_info)
|
|
356
327
|
|
|
357
328
|
if raw:
|
|
358
329
|
def raw_response(response):
|
|
@@ -689,8 +660,8 @@ class AsyncCompletions:
|
|
|
689
660
|
)
|
|
690
661
|
|
|
691
662
|
def fallback(response):
|
|
692
|
-
|
|
693
|
-
return
|
|
663
|
+
provider_info = ProviderInfo(**provider.get_dict(), model=model)
|
|
664
|
+
return async_iter_response(response, stream, response_format, max_tokens, stop, provider_info)
|
|
694
665
|
|
|
695
666
|
if raw:
|
|
696
667
|
async def raw_response(response):
|
|
@@ -816,7 +787,7 @@ class ClientFactory:
|
|
|
816
787
|
)
|
|
817
788
|
|
|
818
789
|
# Create async client
|
|
819
|
-
async_client = ClientFactory.
|
|
790
|
+
async_client = ClientFactory.create_async_client("PollinationsAI")
|
|
820
791
|
"""
|
|
821
792
|
|
|
822
793
|
# Registry of live/custom providers
|
|
@@ -850,7 +821,7 @@ class ClientFactory:
|
|
|
850
821
|
elif provider.startswith("custom:"):
|
|
851
822
|
if provider.startswith("custom:"):
|
|
852
823
|
serverId = provider[7:]
|
|
853
|
-
base_url = f"https://
|
|
824
|
+
base_url = f"https://api.gpt4free.workers.dev/custom/{serverId}"
|
|
854
825
|
if not base_url:
|
|
855
826
|
raise ValueError("base_url is required for custom providers")
|
|
856
827
|
provider = create_custom_provider(base_url, api_key, name=name, **kwargs)
|
{g4f-6.9.6 → g4f-6.9.8}/LICENSE
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|