webscout 8.3.6__py3-none-any.whl → 8.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/AIutel.py +2 -0
- webscout/Provider/AISEARCH/__init__.py +18 -11
- webscout/Provider/AISEARCH/scira_search.py +3 -1
- webscout/Provider/Aitopia.py +2 -3
- webscout/Provider/Andi.py +3 -3
- webscout/Provider/ChatGPTClone.py +1 -1
- webscout/Provider/ChatSandbox.py +1 -0
- webscout/Provider/Cloudflare.py +1 -1
- webscout/Provider/Cohere.py +1 -0
- webscout/Provider/Deepinfra.py +7 -10
- webscout/Provider/ExaAI.py +1 -1
- webscout/Provider/ExaChat.py +1 -80
- webscout/Provider/Flowith.py +1 -1
- webscout/Provider/Gemini.py +7 -5
- webscout/Provider/GeminiProxy.py +1 -0
- webscout/Provider/GithubChat.py +3 -1
- webscout/Provider/Groq.py +1 -1
- webscout/Provider/HeckAI.py +8 -4
- webscout/Provider/Jadve.py +23 -38
- webscout/Provider/K2Think.py +308 -0
- webscout/Provider/Koboldai.py +8 -186
- webscout/Provider/LambdaChat.py +2 -4
- webscout/Provider/Nemotron.py +3 -4
- webscout/Provider/Netwrck.py +3 -2
- webscout/Provider/OLLAMA.py +1 -0
- webscout/Provider/OPENAI/Cloudflare.py +6 -7
- webscout/Provider/OPENAI/FalconH1.py +2 -7
- webscout/Provider/OPENAI/FreeGemini.py +6 -8
- webscout/Provider/OPENAI/{monochat.py → K2Think.py} +180 -77
- webscout/Provider/OPENAI/NEMOTRON.py +3 -6
- webscout/Provider/OPENAI/PI.py +5 -4
- webscout/Provider/OPENAI/Qwen3.py +2 -3
- webscout/Provider/OPENAI/TogetherAI.py +2 -2
- webscout/Provider/OPENAI/TwoAI.py +3 -4
- webscout/Provider/OPENAI/__init__.py +17 -58
- webscout/Provider/OPENAI/ai4chat.py +313 -303
- webscout/Provider/OPENAI/base.py +9 -29
- webscout/Provider/OPENAI/chatgpt.py +7 -2
- webscout/Provider/OPENAI/chatgptclone.py +4 -7
- webscout/Provider/OPENAI/chatsandbox.py +84 -59
- webscout/Provider/OPENAI/deepinfra.py +6 -6
- webscout/Provider/OPENAI/heckai.py +4 -1
- webscout/Provider/OPENAI/netwrck.py +1 -0
- webscout/Provider/OPENAI/scirachat.py +6 -0
- webscout/Provider/OPENAI/textpollinations.py +3 -11
- webscout/Provider/OPENAI/toolbaz.py +14 -11
- webscout/Provider/OpenGPT.py +1 -1
- webscout/Provider/Openai.py +150 -402
- webscout/Provider/PI.py +1 -0
- webscout/Provider/Perplexitylabs.py +1 -2
- webscout/Provider/QwenLM.py +107 -89
- webscout/Provider/STT/__init__.py +17 -2
- webscout/Provider/{Llama3.py → Sambanova.py} +9 -10
- webscout/Provider/StandardInput.py +1 -1
- webscout/Provider/TTI/__init__.py +18 -12
- webscout/Provider/TTS/__init__.py +18 -10
- webscout/Provider/TeachAnything.py +1 -0
- webscout/Provider/TextPollinationsAI.py +5 -12
- webscout/Provider/TogetherAI.py +86 -87
- webscout/Provider/TwoAI.py +53 -309
- webscout/Provider/TypliAI.py +2 -1
- webscout/Provider/{GizAI.py → UNFINISHED/GizAI.py} +1 -1
- webscout/Provider/Venice.py +2 -1
- webscout/Provider/VercelAI.py +1 -0
- webscout/Provider/WiseCat.py +2 -1
- webscout/Provider/WrDoChat.py +2 -1
- webscout/Provider/__init__.py +18 -86
- webscout/Provider/ai4chat.py +1 -1
- webscout/Provider/akashgpt.py +7 -10
- webscout/Provider/cerebras.py +115 -9
- webscout/Provider/chatglm.py +170 -83
- webscout/Provider/cleeai.py +1 -2
- webscout/Provider/deepseek_assistant.py +1 -1
- webscout/Provider/elmo.py +1 -1
- webscout/Provider/geminiapi.py +1 -1
- webscout/Provider/granite.py +1 -1
- webscout/Provider/hermes.py +1 -3
- webscout/Provider/julius.py +1 -0
- webscout/Provider/learnfastai.py +1 -1
- webscout/Provider/llama3mitril.py +1 -1
- webscout/Provider/llmchat.py +1 -1
- webscout/Provider/llmchatco.py +1 -1
- webscout/Provider/meta.py +3 -3
- webscout/Provider/oivscode.py +2 -2
- webscout/Provider/scira_chat.py +51 -124
- webscout/Provider/searchchat.py +1 -0
- webscout/Provider/sonus.py +1 -1
- webscout/Provider/toolbaz.py +15 -12
- webscout/Provider/turboseek.py +31 -22
- webscout/Provider/typefully.py +2 -1
- webscout/Provider/x0gpt.py +1 -0
- webscout/Provider/yep.py +2 -1
- webscout/tempid.py +6 -0
- webscout/version.py +1 -1
- {webscout-8.3.6.dist-info → webscout-8.3.7.dist-info}/METADATA +2 -1
- {webscout-8.3.6.dist-info → webscout-8.3.7.dist-info}/RECORD +103 -129
- webscout/Provider/AllenAI.py +0 -440
- webscout/Provider/Blackboxai.py +0 -793
- webscout/Provider/FreeGemini.py +0 -250
- webscout/Provider/GptOss.py +0 -207
- webscout/Provider/Hunyuan.py +0 -283
- webscout/Provider/Kimi.py +0 -445
- webscout/Provider/MCPCore.py +0 -322
- webscout/Provider/MiniMax.py +0 -207
- webscout/Provider/OPENAI/BLACKBOXAI.py +0 -1045
- webscout/Provider/OPENAI/MiniMax.py +0 -298
- webscout/Provider/OPENAI/autoproxy.py +0 -1067
- webscout/Provider/OPENAI/copilot.py +0 -321
- webscout/Provider/OPENAI/gptoss.py +0 -288
- webscout/Provider/OPENAI/kimi.py +0 -469
- webscout/Provider/OPENAI/mcpcore.py +0 -431
- webscout/Provider/OPENAI/multichat.py +0 -378
- webscout/Provider/Reka.py +0 -214
- webscout/Provider/UNFINISHED/fetch_together_models.py +0 -90
- webscout/Provider/asksteve.py +0 -220
- webscout/Provider/copilot.py +0 -441
- webscout/Provider/freeaichat.py +0 -294
- webscout/Provider/koala.py +0 -182
- webscout/Provider/lmarena.py +0 -198
- webscout/Provider/monochat.py +0 -275
- webscout/Provider/multichat.py +0 -375
- webscout/Provider/scnet.py +0 -244
- webscout/Provider/talkai.py +0 -194
- /webscout/Provider/{Marcus.py → UNFINISHED/Marcus.py} +0 -0
- /webscout/Provider/{Qodo.py → UNFINISHED/Qodo.py} +0 -0
- /webscout/Provider/{XenAI.py → UNFINISHED/XenAI.py} +0 -0
- {webscout-8.3.6.dist-info → webscout-8.3.7.dist-info}/WHEEL +0 -0
- {webscout-8.3.6.dist-info → webscout-8.3.7.dist-info}/entry_points.txt +0 -0
- {webscout-8.3.6.dist-info → webscout-8.3.7.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.3.6.dist-info → webscout-8.3.7.dist-info}/top_level.txt +0 -0
|
@@ -1,1067 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Auto-proxy module for OpenAI-compatible providers.
|
|
3
|
-
This module provides automatic proxy injection for HTTP sessions using a remote proxy list.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
import random
|
|
7
|
-
import time
|
|
8
|
-
from abc import ABCMeta
|
|
9
|
-
from typing import Dict, List, Optional, Any, Callable, Union
|
|
10
|
-
import requests
|
|
11
|
-
import functools
|
|
12
|
-
from contextlib import contextmanager
|
|
13
|
-
import types
|
|
14
|
-
|
|
15
|
-
# Optional imports for different HTTP clients
|
|
16
|
-
try:
|
|
17
|
-
import httpx
|
|
18
|
-
except ImportError:
|
|
19
|
-
httpx = None
|
|
20
|
-
|
|
21
|
-
try:
|
|
22
|
-
from curl_cffi.requests import Session as CurlSession
|
|
23
|
-
from curl_cffi.requests import AsyncSession as CurlAsyncSession
|
|
24
|
-
except ImportError:
|
|
25
|
-
CurlSession = None
|
|
26
|
-
CurlAsyncSession = None
|
|
27
|
-
|
|
28
|
-
# Global proxy cache
|
|
29
|
-
_proxy_cache = {
|
|
30
|
-
'proxies': [],
|
|
31
|
-
'last_updated': 0,
|
|
32
|
-
'cache_duration': 300 # 5 minutes
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
PROXY_SOURCE_URL = "https://proxies.typegpt.net/ips.txt"
|
|
36
|
-
|
|
37
|
-
# --- Static Proxy Lists ---
|
|
38
|
-
# NordVPN proxies (format: https://host:port:user:pass)
|
|
39
|
-
STATIC_NORDVPN_PROXIES = [
|
|
40
|
-
"https://pl128.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
41
|
-
"https://be148.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
42
|
-
"https://hu48.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
43
|
-
"https://us5063.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
44
|
-
"https://at86.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
45
|
-
"https://ch217.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
46
|
-
"https://dk152.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
47
|
-
"https://no151.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
48
|
-
"https://ch218.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
49
|
-
"https://uk1784.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
50
|
-
"https://fr555.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
51
|
-
"https://ch219.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
52
|
-
"https://us5064.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
53
|
-
"https://uk765.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
54
|
-
"https://uk812.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
55
|
-
"https://uk813.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
56
|
-
"https://uk814.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
57
|
-
"https://uk871.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
58
|
-
"https://uk873.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
59
|
-
"https://uk875.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
60
|
-
"https://uk877.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
61
|
-
"https://uk879.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
62
|
-
"https://uk884.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
63
|
-
"https://uk886.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
64
|
-
"https://be149.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
65
|
-
"https://uk1806.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
66
|
-
"https://uk888.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
67
|
-
"https://uk890.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
68
|
-
"https://uk892.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
69
|
-
"https://uk894.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
70
|
-
"https://uk896.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
71
|
-
"https://uk898.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
72
|
-
"https://us5055.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
73
|
-
"https://jp429.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
74
|
-
"https://it132.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
75
|
-
"https://us4735.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
76
|
-
"https://pl122.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
77
|
-
"https://cz93.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
78
|
-
"https://at80.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
79
|
-
"https://ro59.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
80
|
-
"https://ch198.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
81
|
-
"https://bg38.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
82
|
-
"https://hu47.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
83
|
-
"https://jp454.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
84
|
-
"https://dk150.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
85
|
-
"https://de750.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
86
|
-
"https://pl125.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
87
|
-
"https://us5057.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
88
|
-
"https://us5058.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
89
|
-
"https://us5059.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
90
|
-
"https://us5060.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJVZdy8KKcEW3ZE5",
|
|
91
|
-
"https://no141.nordvpn.com:89:WZBVNB9MCuZu3FLX3D1rUc8a:XRBU8tofEJ"
|
|
92
|
-
]
|
|
93
|
-
|
|
94
|
-
# Webshare rotating proxies (format: http://user:pass@host:port)
|
|
95
|
-
STATIC_WEBSHARE_PROXIES = [
|
|
96
|
-
"http://kkuafwyh-rotate:kl6esmu21js3@p.webshare.io:80",
|
|
97
|
-
"http://stzaxffz-rotate:ax92ravj1pmm@p.webshare.io:80",
|
|
98
|
-
"http://nfokjhhu-rotate:ez248bgee4z9@p.webshare.io:80",
|
|
99
|
-
"http://fiupzkjx-rotate:0zlrd2in3mrh@p.webshare.io:80",
|
|
100
|
-
"http://xukpnkpr-rotate:hcmwl8cl4iyw@p.webshare.io:80",
|
|
101
|
-
"http://tndgqbid-rotate:qb1cgkl4irh4@p.webshare.io:80",
|
|
102
|
-
"http://nnpnjrmj-rotate:8bj089tzcwhz@p.webshare.io:80",
|
|
103
|
-
]
|
|
104
|
-
|
|
105
|
-
# Combine all static proxies
|
|
106
|
-
STATIC_PROXIES = STATIC_NORDVPN_PROXIES + STATIC_WEBSHARE_PROXIES
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
def fetch_proxies() -> List[str]:
|
|
110
|
-
"""
|
|
111
|
-
Fetch proxy list from the remote source.
|
|
112
|
-
|
|
113
|
-
Returns:
|
|
114
|
-
List[str]: List of proxy URLs in format 'http://user:pass@host:port'
|
|
115
|
-
"""
|
|
116
|
-
try:
|
|
117
|
-
response = requests.get(PROXY_SOURCE_URL, timeout=10)
|
|
118
|
-
response.raise_for_status()
|
|
119
|
-
|
|
120
|
-
proxies = []
|
|
121
|
-
for line in response.text.strip().split('\n'):
|
|
122
|
-
line = line.strip()
|
|
123
|
-
if line and line.startswith('http://'):
|
|
124
|
-
proxies.append(line)
|
|
125
|
-
|
|
126
|
-
return proxies
|
|
127
|
-
|
|
128
|
-
except Exception:
|
|
129
|
-
return []
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
def get_cached_proxies() -> List[str]:
|
|
133
|
-
"""
|
|
134
|
-
Get proxies from cache or fetch new ones if cache is expired.
|
|
135
|
-
|
|
136
|
-
Returns:
|
|
137
|
-
List[str]: List of proxy URLs
|
|
138
|
-
"""
|
|
139
|
-
current_time = time.time()
|
|
140
|
-
|
|
141
|
-
# Check if cache is expired or empty
|
|
142
|
-
if (current_time - _proxy_cache['last_updated'] > _proxy_cache['cache_duration'] or
|
|
143
|
-
not _proxy_cache['proxies']):
|
|
144
|
-
|
|
145
|
-
new_proxies = fetch_proxies()
|
|
146
|
-
if new_proxies:
|
|
147
|
-
_proxy_cache['proxies'] = new_proxies
|
|
148
|
-
_proxy_cache['last_updated'] = current_time
|
|
149
|
-
else:
|
|
150
|
-
pass
|
|
151
|
-
|
|
152
|
-
# Priority: Webshare -> remote -> NordVPN
|
|
153
|
-
proxies = STATIC_WEBSHARE_PROXIES + _proxy_cache['proxies'] + STATIC_NORDVPN_PROXIES
|
|
154
|
-
proxies = list(dict.fromkeys(proxies)) # Remove duplicates, preserve order
|
|
155
|
-
return proxies
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
def get_auto_proxy() -> Optional[str]:
|
|
159
|
-
"""
|
|
160
|
-
Get a random proxy, prioritizing Webshare proxies if available.
|
|
161
|
-
|
|
162
|
-
Returns:
|
|
163
|
-
Optional[str]: A proxy URL or None if no proxies available
|
|
164
|
-
"""
|
|
165
|
-
proxies = get_cached_proxies()
|
|
166
|
-
# Try Webshare proxies first
|
|
167
|
-
webshare = [p for p in proxies if p in STATIC_WEBSHARE_PROXIES]
|
|
168
|
-
if webshare:
|
|
169
|
-
return random.choice(webshare)
|
|
170
|
-
if proxies:
|
|
171
|
-
return random.choice(proxies)
|
|
172
|
-
return None
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
def get_proxy_dict(proxy_url: Optional[str] = None) -> Dict[str, str]:
|
|
176
|
-
"""
|
|
177
|
-
Convert a proxy URL to a dictionary format suitable for requests/httpx.
|
|
178
|
-
|
|
179
|
-
Args:
|
|
180
|
-
proxy_url: Proxy URL, if None will get one automatically
|
|
181
|
-
|
|
182
|
-
Returns:
|
|
183
|
-
Dict[str, str]: Proxy dictionary with 'http' and 'https' keys
|
|
184
|
-
"""
|
|
185
|
-
if proxy_url is None:
|
|
186
|
-
proxy_url = get_auto_proxy()
|
|
187
|
-
|
|
188
|
-
if proxy_url is None:
|
|
189
|
-
return {}
|
|
190
|
-
|
|
191
|
-
return {
|
|
192
|
-
'http': proxy_url,
|
|
193
|
-
'https': proxy_url
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
def test_proxy(proxy_url: str, timeout: int = 10) -> bool:
|
|
198
|
-
"""
|
|
199
|
-
Test if a proxy is working by making a request to a test URL.
|
|
200
|
-
|
|
201
|
-
Args:
|
|
202
|
-
proxy_url: The proxy URL to test
|
|
203
|
-
timeout: Request timeout in seconds
|
|
204
|
-
|
|
205
|
-
Returns:
|
|
206
|
-
bool: True if proxy is working, False otherwise
|
|
207
|
-
"""
|
|
208
|
-
try:
|
|
209
|
-
test_url = "https://httpbin.org/ip"
|
|
210
|
-
proxies = {'http': proxy_url, 'https': proxy_url}
|
|
211
|
-
|
|
212
|
-
response = requests.get(test_url, proxies=proxies, timeout=timeout)
|
|
213
|
-
return response.status_code == 200
|
|
214
|
-
|
|
215
|
-
except Exception:
|
|
216
|
-
return False
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
class ProxyAutoMeta(ABCMeta):
|
|
220
|
-
"""
|
|
221
|
-
Metaclass to ensure all OpenAICompatibleProvider subclasses automatically get proxy support.
|
|
222
|
-
This will inject proxies into any requests.Session, httpx.Client, or curl_cffi session attributes found on the instance.
|
|
223
|
-
|
|
224
|
-
To disable automatic proxy injection, set disable_auto_proxy=True in the constructor or
|
|
225
|
-
set the class attribute DISABLE_AUTO_PROXY = True.
|
|
226
|
-
"""
|
|
227
|
-
|
|
228
|
-
def __call__(cls, *args, **kwargs):
|
|
229
|
-
instance = super().__call__(*args, **kwargs)
|
|
230
|
-
|
|
231
|
-
# Check if auto proxy is disabled
|
|
232
|
-
disable_auto_proxy = kwargs.get('disable_auto_proxy', False) or getattr(cls, 'DISABLE_AUTO_PROXY', False)
|
|
233
|
-
|
|
234
|
-
# Get proxies from various sources
|
|
235
|
-
proxies = getattr(instance, 'proxies', None) or kwargs.get('proxies', None)
|
|
236
|
-
|
|
237
|
-
if proxies is None and not disable_auto_proxy:
|
|
238
|
-
try:
|
|
239
|
-
proxy_url = get_auto_proxy()
|
|
240
|
-
if proxy_url:
|
|
241
|
-
proxies = get_proxy_dict(proxy_url)
|
|
242
|
-
else:
|
|
243
|
-
proxies = {}
|
|
244
|
-
except Exception:
|
|
245
|
-
proxies = {}
|
|
246
|
-
elif proxies is None:
|
|
247
|
-
proxies = {}
|
|
248
|
-
|
|
249
|
-
instance.proxies = proxies
|
|
250
|
-
|
|
251
|
-
# Set default max proxy attempts for auto-retry functionality
|
|
252
|
-
if not hasattr(instance, '_max_proxy_attempts'):
|
|
253
|
-
instance._max_proxy_attempts = kwargs.get('max_proxy_attempts', 2)
|
|
254
|
-
|
|
255
|
-
# Always patch existing sessions (for both proxy and auto-retry functionality)
|
|
256
|
-
_patch_instance_sessions(instance, proxies)
|
|
257
|
-
|
|
258
|
-
# Provide helper methods for creating proxied sessions
|
|
259
|
-
_add_proxy_helpers(instance, proxies)
|
|
260
|
-
|
|
261
|
-
return instance
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
def _patch_instance_sessions(instance: Any, proxies: Dict[str, str]) -> None:
|
|
265
|
-
"""
|
|
266
|
-
Patch existing session objects on the instance with proxy configuration and auto-retry functionality.
|
|
267
|
-
|
|
268
|
-
Args:
|
|
269
|
-
instance: The class instance to patch
|
|
270
|
-
proxies: Proxy dictionary to apply
|
|
271
|
-
"""
|
|
272
|
-
for attr_name in dir(instance):
|
|
273
|
-
if attr_name.startswith('_'):
|
|
274
|
-
continue
|
|
275
|
-
|
|
276
|
-
try:
|
|
277
|
-
attr_obj = getattr(instance, attr_name)
|
|
278
|
-
|
|
279
|
-
# Patch requests.Session objects
|
|
280
|
-
if isinstance(attr_obj, requests.Session):
|
|
281
|
-
if proxies:
|
|
282
|
-
attr_obj.proxies.update(proxies)
|
|
283
|
-
_add_auto_retry_to_session(attr_obj, instance)
|
|
284
|
-
|
|
285
|
-
# Patch httpx.Client objects
|
|
286
|
-
elif httpx and isinstance(attr_obj, httpx.Client):
|
|
287
|
-
try:
|
|
288
|
-
# httpx uses different proxy format
|
|
289
|
-
if proxies:
|
|
290
|
-
attr_obj._proxies = proxies
|
|
291
|
-
_add_auto_retry_to_httpx_client(attr_obj, instance)
|
|
292
|
-
except Exception:
|
|
293
|
-
pass
|
|
294
|
-
|
|
295
|
-
# Patch curl_cffi Session objects
|
|
296
|
-
elif CurlSession and isinstance(attr_obj, CurlSession):
|
|
297
|
-
try:
|
|
298
|
-
if proxies:
|
|
299
|
-
attr_obj.proxies.update(proxies)
|
|
300
|
-
_add_auto_retry_to_curl_session(attr_obj, instance)
|
|
301
|
-
except Exception:
|
|
302
|
-
pass
|
|
303
|
-
|
|
304
|
-
# Patch curl_cffi AsyncSession objects
|
|
305
|
-
elif CurlAsyncSession and isinstance(attr_obj, CurlAsyncSession):
|
|
306
|
-
try:
|
|
307
|
-
if proxies:
|
|
308
|
-
attr_obj.proxies.update(proxies)
|
|
309
|
-
_add_auto_retry_to_curl_async_session(attr_obj, instance)
|
|
310
|
-
except Exception:
|
|
311
|
-
pass
|
|
312
|
-
|
|
313
|
-
except Exception:
|
|
314
|
-
continue
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
def _add_auto_retry_to_session(session: requests.Session, instance: Any) -> None:
|
|
318
|
-
"""
|
|
319
|
-
Add auto-retry functionality to a requests.Session object.
|
|
320
|
-
|
|
321
|
-
Args:
|
|
322
|
-
session: The requests.Session to patch
|
|
323
|
-
instance: The provider instance for context
|
|
324
|
-
"""
|
|
325
|
-
if hasattr(session, '_auto_retry_patched'):
|
|
326
|
-
return # Already patched
|
|
327
|
-
|
|
328
|
-
original_request = session.request
|
|
329
|
-
|
|
330
|
-
def request_with_auto_retry(method, url, **kwargs):
|
|
331
|
-
max_proxy_attempts = getattr(instance, '_max_proxy_attempts', 2)
|
|
332
|
-
original_proxies = session.proxies.copy()
|
|
333
|
-
first_error = None
|
|
334
|
-
|
|
335
|
-
# First attempt with current proxy configuration
|
|
336
|
-
try:
|
|
337
|
-
return original_request(method, url, **kwargs)
|
|
338
|
-
except Exception as e:
|
|
339
|
-
first_error = e
|
|
340
|
-
|
|
341
|
-
# If we have proxies configured, try different ones
|
|
342
|
-
if original_proxies:
|
|
343
|
-
proxy_attempts = 0
|
|
344
|
-
|
|
345
|
-
while proxy_attempts < max_proxy_attempts:
|
|
346
|
-
try:
|
|
347
|
-
# Get a new proxy
|
|
348
|
-
new_proxy_url = get_auto_proxy()
|
|
349
|
-
if new_proxy_url:
|
|
350
|
-
new_proxies = get_proxy_dict(new_proxy_url)
|
|
351
|
-
session.proxies.clear()
|
|
352
|
-
session.proxies.update(new_proxies)
|
|
353
|
-
|
|
354
|
-
# Try the request with new proxy
|
|
355
|
-
return original_request(method, url, **kwargs)
|
|
356
|
-
else:
|
|
357
|
-
break # No more proxies available
|
|
358
|
-
|
|
359
|
-
except Exception:
|
|
360
|
-
proxy_attempts += 1
|
|
361
|
-
continue
|
|
362
|
-
|
|
363
|
-
# All proxy attempts failed, try without proxy
|
|
364
|
-
try:
|
|
365
|
-
session.proxies.clear()
|
|
366
|
-
return original_request(method, url, **kwargs)
|
|
367
|
-
except Exception:
|
|
368
|
-
# Restore original proxy settings and re-raise the first error
|
|
369
|
-
session.proxies.clear()
|
|
370
|
-
session.proxies.update(original_proxies)
|
|
371
|
-
raise first_error
|
|
372
|
-
else:
|
|
373
|
-
# No proxies were configured, just re-raise the original error
|
|
374
|
-
raise first_error
|
|
375
|
-
|
|
376
|
-
session.request = request_with_auto_retry
|
|
377
|
-
session._auto_retry_patched = True
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
def _add_auto_retry_to_httpx_client(client, instance: Any) -> None:
|
|
381
|
-
"""
|
|
382
|
-
Add auto-retry functionality to an httpx.Client object.
|
|
383
|
-
|
|
384
|
-
Args:
|
|
385
|
-
client: The httpx.Client to patch
|
|
386
|
-
instance: The provider instance for context
|
|
387
|
-
"""
|
|
388
|
-
if not httpx or hasattr(client, '_auto_retry_patched'):
|
|
389
|
-
return # Not available or already patched
|
|
390
|
-
|
|
391
|
-
try:
|
|
392
|
-
original_request = client.request
|
|
393
|
-
|
|
394
|
-
def request_with_auto_retry(method, url, **kwargs):
|
|
395
|
-
max_proxy_attempts = getattr(instance, '_max_proxy_attempts', 2)
|
|
396
|
-
original_proxies = getattr(client, '_proxies', {}).copy()
|
|
397
|
-
first_error = None
|
|
398
|
-
|
|
399
|
-
# First attempt with current proxy configuration
|
|
400
|
-
try:
|
|
401
|
-
return original_request(method, url, **kwargs)
|
|
402
|
-
except Exception as e:
|
|
403
|
-
first_error = e
|
|
404
|
-
|
|
405
|
-
# If we have proxies configured, try different ones
|
|
406
|
-
if original_proxies:
|
|
407
|
-
proxy_attempts = 0
|
|
408
|
-
|
|
409
|
-
while proxy_attempts < max_proxy_attempts:
|
|
410
|
-
try:
|
|
411
|
-
# Get a new proxy
|
|
412
|
-
new_proxy_url = get_auto_proxy()
|
|
413
|
-
if new_proxy_url:
|
|
414
|
-
new_proxies = get_proxy_dict(new_proxy_url)
|
|
415
|
-
client._proxies = new_proxies
|
|
416
|
-
|
|
417
|
-
# Try the request with new proxy
|
|
418
|
-
return original_request(method, url, **kwargs)
|
|
419
|
-
else:
|
|
420
|
-
break # No more proxies available
|
|
421
|
-
|
|
422
|
-
except Exception:
|
|
423
|
-
proxy_attempts += 1
|
|
424
|
-
continue
|
|
425
|
-
|
|
426
|
-
# All proxy attempts failed, try without proxy
|
|
427
|
-
try:
|
|
428
|
-
client._proxies = {}
|
|
429
|
-
return original_request(method, url, **kwargs)
|
|
430
|
-
except Exception:
|
|
431
|
-
# Restore original proxy settings and re-raise the first error
|
|
432
|
-
client._proxies = original_proxies
|
|
433
|
-
raise first_error
|
|
434
|
-
else:
|
|
435
|
-
# No proxies were configured, just re-raise the original error
|
|
436
|
-
raise first_error
|
|
437
|
-
|
|
438
|
-
client.request = request_with_auto_retry
|
|
439
|
-
client._auto_retry_patched = True
|
|
440
|
-
except Exception:
|
|
441
|
-
pass
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
def _add_auto_retry_to_curl_session(session, instance: Any) -> None:
|
|
445
|
-
"""
|
|
446
|
-
Add auto-retry functionality to a curl_cffi.Session object.
|
|
447
|
-
|
|
448
|
-
Args:
|
|
449
|
-
session: The curl_cffi.Session to patch
|
|
450
|
-
instance: The provider instance for context
|
|
451
|
-
"""
|
|
452
|
-
if not CurlSession or hasattr(session, '_auto_retry_patched'):
|
|
453
|
-
return # Not available or already patched
|
|
454
|
-
|
|
455
|
-
try:
|
|
456
|
-
original_request = session.request
|
|
457
|
-
|
|
458
|
-
def request_with_auto_retry(method, url, **kwargs):
|
|
459
|
-
max_proxy_attempts = getattr(instance, '_max_proxy_attempts', 2)
|
|
460
|
-
original_proxies = session.proxies.copy()
|
|
461
|
-
first_error = None
|
|
462
|
-
|
|
463
|
-
# First attempt with current proxy configuration
|
|
464
|
-
try:
|
|
465
|
-
return original_request(method, url, **kwargs)
|
|
466
|
-
except Exception as e:
|
|
467
|
-
first_error = e
|
|
468
|
-
|
|
469
|
-
# If we have proxies configured, try different ones
|
|
470
|
-
if original_proxies:
|
|
471
|
-
proxy_attempts = 0
|
|
472
|
-
|
|
473
|
-
while proxy_attempts < max_proxy_attempts:
|
|
474
|
-
try:
|
|
475
|
-
# Get a new proxy
|
|
476
|
-
new_proxy_url = get_auto_proxy()
|
|
477
|
-
if new_proxy_url:
|
|
478
|
-
new_proxies = get_proxy_dict(new_proxy_url)
|
|
479
|
-
session.proxies.clear()
|
|
480
|
-
session.proxies.update(new_proxies)
|
|
481
|
-
|
|
482
|
-
# Try the request with new proxy
|
|
483
|
-
return original_request(method, url, **kwargs)
|
|
484
|
-
else:
|
|
485
|
-
break # No more proxies available
|
|
486
|
-
|
|
487
|
-
except Exception:
|
|
488
|
-
proxy_attempts += 1
|
|
489
|
-
continue
|
|
490
|
-
|
|
491
|
-
# All proxy attempts failed, try without proxy
|
|
492
|
-
try:
|
|
493
|
-
session.proxies.clear()
|
|
494
|
-
return original_request(method, url, **kwargs)
|
|
495
|
-
except Exception:
|
|
496
|
-
# Restore original proxy settings and re-raise the first error
|
|
497
|
-
session.proxies.clear()
|
|
498
|
-
session.proxies.update(original_proxies)
|
|
499
|
-
raise first_error
|
|
500
|
-
else:
|
|
501
|
-
# No proxies were configured, just re-raise the original error
|
|
502
|
-
raise first_error
|
|
503
|
-
|
|
504
|
-
session.request = request_with_auto_retry
|
|
505
|
-
session._auto_retry_patched = True
|
|
506
|
-
except Exception:
|
|
507
|
-
pass
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
def _add_auto_retry_to_curl_async_session(session, instance: Any) -> None:
|
|
511
|
-
"""
|
|
512
|
-
Add auto-retry functionality to a curl_cffi.AsyncSession object.
|
|
513
|
-
|
|
514
|
-
Args:
|
|
515
|
-
session: The curl_cffi.AsyncSession to patch
|
|
516
|
-
instance: The provider instance for context
|
|
517
|
-
"""
|
|
518
|
-
if not CurlAsyncSession or hasattr(session, '_auto_retry_patched'):
|
|
519
|
-
return # Not available or already patched
|
|
520
|
-
|
|
521
|
-
try:
|
|
522
|
-
original_request = session.request
|
|
523
|
-
|
|
524
|
-
async def request_with_auto_retry(method, url, **kwargs):
|
|
525
|
-
max_proxy_attempts = getattr(instance, '_max_proxy_attempts', 2)
|
|
526
|
-
original_proxies = session.proxies.copy()
|
|
527
|
-
first_error = None
|
|
528
|
-
|
|
529
|
-
# First attempt with current proxy configuration
|
|
530
|
-
try:
|
|
531
|
-
return await original_request(method, url, **kwargs)
|
|
532
|
-
except Exception as e:
|
|
533
|
-
first_error = e
|
|
534
|
-
|
|
535
|
-
# If we have proxies configured, try different ones
|
|
536
|
-
if original_proxies:
|
|
537
|
-
proxy_attempts = 0
|
|
538
|
-
|
|
539
|
-
while proxy_attempts < max_proxy_attempts:
|
|
540
|
-
try:
|
|
541
|
-
# Get a new proxy
|
|
542
|
-
new_proxy_url = get_auto_proxy()
|
|
543
|
-
if new_proxy_url:
|
|
544
|
-
new_proxies = get_proxy_dict(new_proxy_url)
|
|
545
|
-
session.proxies.clear()
|
|
546
|
-
session.proxies.update(new_proxies)
|
|
547
|
-
|
|
548
|
-
# Try the request with new proxy
|
|
549
|
-
return await original_request(method, url, **kwargs)
|
|
550
|
-
else:
|
|
551
|
-
break # No more proxies available
|
|
552
|
-
|
|
553
|
-
except Exception:
|
|
554
|
-
proxy_attempts += 1
|
|
555
|
-
continue
|
|
556
|
-
|
|
557
|
-
# All proxy attempts failed, try without proxy
|
|
558
|
-
try:
|
|
559
|
-
session.proxies.clear()
|
|
560
|
-
return await original_request(method, url, **kwargs)
|
|
561
|
-
except Exception:
|
|
562
|
-
# Restore original proxy settings and re-raise the first error
|
|
563
|
-
session.proxies.clear()
|
|
564
|
-
session.proxies.update(original_proxies)
|
|
565
|
-
raise first_error
|
|
566
|
-
else:
|
|
567
|
-
# No proxies were configured, just re-raise the original error
|
|
568
|
-
raise first_error
|
|
569
|
-
|
|
570
|
-
session.request = request_with_auto_retry
|
|
571
|
-
session._auto_retry_patched = True
|
|
572
|
-
except Exception:
|
|
573
|
-
pass
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
def _add_proxy_helpers(instance: Any, proxies: Dict[str, str]) -> None:
|
|
577
|
-
"""
|
|
578
|
-
Add helper methods to the instance for creating proxied sessions.
|
|
579
|
-
|
|
580
|
-
Args:
|
|
581
|
-
instance: The class instance to add methods to
|
|
582
|
-
proxies: Proxy dictionary to use in helper methods
|
|
583
|
-
"""
|
|
584
|
-
|
|
585
|
-
def get_proxied_session():
|
|
586
|
-
"""Get a requests.Session with proxies configured"""
|
|
587
|
-
session = requests.Session()
|
|
588
|
-
session.proxies.update(proxies)
|
|
589
|
-
return session
|
|
590
|
-
|
|
591
|
-
def get_proxied_httpx_client(**kwargs):
|
|
592
|
-
"""Get an httpx.Client with proxies configured"""
|
|
593
|
-
if httpx:
|
|
594
|
-
return httpx.Client(proxies=proxies, **kwargs)
|
|
595
|
-
else:
|
|
596
|
-
raise ImportError("httpx is not installed")
|
|
597
|
-
|
|
598
|
-
def get_proxied_curl_session(impersonate="chrome120", **kwargs):
|
|
599
|
-
"""Get a curl_cffi Session with proxies configured"""
|
|
600
|
-
if CurlSession:
|
|
601
|
-
return CurlSession(proxies=proxies, impersonate=impersonate, **kwargs)
|
|
602
|
-
else:
|
|
603
|
-
raise ImportError("curl_cffi is not installed")
|
|
604
|
-
|
|
605
|
-
def get_proxied_curl_async_session(impersonate="chrome120", **kwargs):
|
|
606
|
-
"""Get a curl_cffi AsyncSession with proxies configured"""
|
|
607
|
-
if CurlAsyncSession:
|
|
608
|
-
return CurlAsyncSession(proxies=proxies, impersonate=impersonate, **kwargs)
|
|
609
|
-
else:
|
|
610
|
-
raise ImportError("curl_cffi is not installed")
|
|
611
|
-
|
|
612
|
-
def get_auto_retry_session(max_proxy_attempts: int = 2):
|
|
613
|
-
"""Get a requests.Session with automatic proxy retry and fallback functionality"""
|
|
614
|
-
return create_auto_retry_session(max_proxy_attempts)
|
|
615
|
-
|
|
616
|
-
def make_auto_retry_request(method: str, url: str, max_proxy_attempts: int = 2, **kwargs):
|
|
617
|
-
"""Make a request with automatic proxy retry and fallback"""
|
|
618
|
-
return make_request_with_auto_retry(
|
|
619
|
-
method=method,
|
|
620
|
-
url=url,
|
|
621
|
-
max_proxy_attempts=max_proxy_attempts,
|
|
622
|
-
**kwargs
|
|
623
|
-
)
|
|
624
|
-
|
|
625
|
-
def patch_session_with_auto_retry(session_obj):
|
|
626
|
-
"""Patch any session object with auto-retry functionality"""
|
|
627
|
-
if isinstance(session_obj, requests.Session):
|
|
628
|
-
_add_auto_retry_to_session(session_obj, instance)
|
|
629
|
-
elif httpx and isinstance(session_obj, httpx.Client):
|
|
630
|
-
_add_auto_retry_to_httpx_client(session_obj, instance)
|
|
631
|
-
elif CurlSession and isinstance(session_obj, CurlSession):
|
|
632
|
-
_add_auto_retry_to_curl_session(session_obj, instance)
|
|
633
|
-
elif CurlAsyncSession and isinstance(session_obj, CurlAsyncSession):
|
|
634
|
-
_add_auto_retry_to_curl_async_session(session_obj, instance)
|
|
635
|
-
return session_obj
|
|
636
|
-
|
|
637
|
-
# Add methods to instance
|
|
638
|
-
instance.get_proxied_session = get_proxied_session
|
|
639
|
-
instance.get_proxied_httpx_client = get_proxied_httpx_client
|
|
640
|
-
instance.get_proxied_curl_session = get_proxied_curl_session
|
|
641
|
-
instance.get_proxied_curl_async_session = get_proxied_curl_async_session
|
|
642
|
-
instance.get_auto_retry_session = get_auto_retry_session
|
|
643
|
-
instance.make_auto_retry_request = make_auto_retry_request
|
|
644
|
-
instance.patch_session_with_auto_retry = patch_session_with_auto_retry
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
def get_working_proxy(max_attempts: int = 5, timeout: int = 10) -> Optional[str]:
|
|
648
|
-
"""
|
|
649
|
-
Get a working proxy by testing multiple proxies from the list.
|
|
650
|
-
|
|
651
|
-
Args:
|
|
652
|
-
max_attempts: Maximum number of proxies to test
|
|
653
|
-
timeout: Timeout for each proxy test
|
|
654
|
-
|
|
655
|
-
Returns:
|
|
656
|
-
Optional[str]: A working proxy URL or None if none found
|
|
657
|
-
"""
|
|
658
|
-
proxies = get_cached_proxies()
|
|
659
|
-
if not proxies:
|
|
660
|
-
return None
|
|
661
|
-
|
|
662
|
-
# Shuffle to avoid always testing the same proxies first
|
|
663
|
-
test_proxies = random.sample(proxies, min(max_attempts, len(proxies)))
|
|
664
|
-
|
|
665
|
-
for proxy in test_proxies:
|
|
666
|
-
if test_proxy(proxy, timeout):
|
|
667
|
-
return proxy
|
|
668
|
-
|
|
669
|
-
return None
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
def refresh_proxy_cache() -> int:
|
|
673
|
-
"""
|
|
674
|
-
Force refresh the proxy cache.
|
|
675
|
-
|
|
676
|
-
Returns:
|
|
677
|
-
int: Number of proxies loaded
|
|
678
|
-
"""
|
|
679
|
-
global _proxy_cache
|
|
680
|
-
_proxy_cache['last_updated'] = 0 # Force refresh
|
|
681
|
-
proxies = get_cached_proxies()
|
|
682
|
-
return len(proxies)
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
def get_proxy_stats() -> Dict[str, Any]:
|
|
686
|
-
"""
|
|
687
|
-
Get statistics about the proxy cache.
|
|
688
|
-
|
|
689
|
-
Returns:
|
|
690
|
-
Dict[str, Any]: Statistics including count, last update time, etc.
|
|
691
|
-
"""
|
|
692
|
-
return {
|
|
693
|
-
'proxy_count': len(_proxy_cache['proxies']),
|
|
694
|
-
'last_updated': _proxy_cache['last_updated'],
|
|
695
|
-
'cache_duration': _proxy_cache['cache_duration'],
|
|
696
|
-
'cache_age_seconds': time.time() - _proxy_cache['last_updated'],
|
|
697
|
-
'source_url': PROXY_SOURCE_URL
|
|
698
|
-
}
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
def set_proxy_cache_duration(duration: int) -> None:
|
|
702
|
-
"""
|
|
703
|
-
Set the proxy cache duration.
|
|
704
|
-
|
|
705
|
-
Args:
|
|
706
|
-
duration: Cache duration in seconds
|
|
707
|
-
"""
|
|
708
|
-
global _proxy_cache
|
|
709
|
-
_proxy_cache['cache_duration'] = duration
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
def auto_retry_with_fallback(max_proxy_attempts: int = 2, timeout: int = 10):
|
|
713
|
-
"""
|
|
714
|
-
Decorator that automatically retries requests with different proxies and falls back to no proxy.
|
|
715
|
-
|
|
716
|
-
This decorator will:
|
|
717
|
-
1. Try the request with the current proxy
|
|
718
|
-
2. If it fails, try with up to max_proxy_attempts different proxies
|
|
719
|
-
3. If all proxies fail, retry without any proxy
|
|
720
|
-
|
|
721
|
-
Args:
|
|
722
|
-
max_proxy_attempts: Maximum number of proxy attempts before falling back to no proxy
|
|
723
|
-
timeout: Timeout for each request attempt
|
|
724
|
-
|
|
725
|
-
Returns:
|
|
726
|
-
Decorator function
|
|
727
|
-
"""
|
|
728
|
-
def decorator(func: Callable) -> Callable:
|
|
729
|
-
@functools.wraps(func)
|
|
730
|
-
def wrapper(*args, **kwargs):
|
|
731
|
-
# Track the original instance and its proxy settings
|
|
732
|
-
instance = args[0] if args else None
|
|
733
|
-
original_proxies = getattr(instance, 'proxies', {}) if instance else {}
|
|
734
|
-
|
|
735
|
-
# First attempt with current proxy configuration
|
|
736
|
-
try:
|
|
737
|
-
return func(*args, **kwargs)
|
|
738
|
-
except Exception as e:
|
|
739
|
-
first_error = e
|
|
740
|
-
|
|
741
|
-
# If we have proxies configured, try different ones
|
|
742
|
-
if original_proxies and instance:
|
|
743
|
-
proxy_attempts = 0
|
|
744
|
-
|
|
745
|
-
while proxy_attempts < max_proxy_attempts:
|
|
746
|
-
try:
|
|
747
|
-
# Get a new proxy
|
|
748
|
-
new_proxy_url = get_auto_proxy()
|
|
749
|
-
if new_proxy_url:
|
|
750
|
-
new_proxies = get_proxy_dict(new_proxy_url)
|
|
751
|
-
instance.proxies = new_proxies
|
|
752
|
-
|
|
753
|
-
# Update any existing sessions with new proxy
|
|
754
|
-
_patch_instance_sessions(instance, new_proxies)
|
|
755
|
-
|
|
756
|
-
# Try the request with new proxy
|
|
757
|
-
return func(*args, **kwargs)
|
|
758
|
-
else:
|
|
759
|
-
break # No more proxies available
|
|
760
|
-
|
|
761
|
-
except Exception:
|
|
762
|
-
proxy_attempts += 1
|
|
763
|
-
continue
|
|
764
|
-
|
|
765
|
-
# All proxy attempts failed, try without proxy
|
|
766
|
-
try:
|
|
767
|
-
instance.proxies = {}
|
|
768
|
-
_patch_instance_sessions(instance, {})
|
|
769
|
-
return func(*args, **kwargs)
|
|
770
|
-
except Exception:
|
|
771
|
-
# Restore original proxy settings and re-raise the first error
|
|
772
|
-
instance.proxies = original_proxies
|
|
773
|
-
_patch_instance_sessions(instance, original_proxies)
|
|
774
|
-
raise first_error
|
|
775
|
-
else:
|
|
776
|
-
# No proxies were configured, just re-raise the original error
|
|
777
|
-
raise first_error
|
|
778
|
-
|
|
779
|
-
return wrapper
|
|
780
|
-
return decorator
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
def make_request_with_auto_retry(
|
|
784
|
-
method: str,
|
|
785
|
-
url: str,
|
|
786
|
-
session: Optional[Union[requests.Session, Any]] = None,
|
|
787
|
-
max_proxy_attempts: int = 2,
|
|
788
|
-
timeout: int = 10,
|
|
789
|
-
**kwargs
|
|
790
|
-
) -> requests.Response:
|
|
791
|
-
"""
|
|
792
|
-
Make an HTTP request with automatic proxy retry and fallback.
|
|
793
|
-
|
|
794
|
-
This function will:
|
|
795
|
-
1. Try the request with the current session configuration
|
|
796
|
-
2. If it fails and proxies are configured, try with different proxies
|
|
797
|
-
3. If all proxies fail, retry without any proxy
|
|
798
|
-
|
|
799
|
-
Args:
|
|
800
|
-
method: HTTP method (GET, POST, etc.)
|
|
801
|
-
url: Request URL
|
|
802
|
-
session: Optional session object to use
|
|
803
|
-
max_proxy_attempts: Maximum number of proxy attempts before falling back
|
|
804
|
-
timeout: Request timeout
|
|
805
|
-
**kwargs: Additional arguments to pass to the request
|
|
806
|
-
|
|
807
|
-
Returns:
|
|
808
|
-
requests.Response: The successful response
|
|
809
|
-
|
|
810
|
-
Raises:
|
|
811
|
-
Exception: If all attempts fail
|
|
812
|
-
"""
|
|
813
|
-
if session is None:
|
|
814
|
-
session = requests.Session()
|
|
815
|
-
|
|
816
|
-
original_proxies = getattr(session, 'proxies', {}).copy()
|
|
817
|
-
first_error = None
|
|
818
|
-
|
|
819
|
-
# First attempt with current configuration
|
|
820
|
-
try:
|
|
821
|
-
return session.request(method, url, timeout=timeout, **kwargs)
|
|
822
|
-
except Exception as e:
|
|
823
|
-
first_error = e
|
|
824
|
-
|
|
825
|
-
# If we have proxies configured, try different ones
|
|
826
|
-
if original_proxies:
|
|
827
|
-
proxy_attempts = 0
|
|
828
|
-
|
|
829
|
-
while proxy_attempts < max_proxy_attempts:
|
|
830
|
-
try:
|
|
831
|
-
# Get a new proxy
|
|
832
|
-
new_proxy_url = get_auto_proxy()
|
|
833
|
-
if new_proxy_url:
|
|
834
|
-
new_proxies = get_proxy_dict(new_proxy_url)
|
|
835
|
-
session.proxies.clear()
|
|
836
|
-
session.proxies.update(new_proxies)
|
|
837
|
-
|
|
838
|
-
# Try the request with new proxy
|
|
839
|
-
return session.request(method, url, timeout=timeout, **kwargs)
|
|
840
|
-
else:
|
|
841
|
-
break # No more proxies available
|
|
842
|
-
|
|
843
|
-
except Exception:
|
|
844
|
-
proxy_attempts += 1
|
|
845
|
-
continue
|
|
846
|
-
|
|
847
|
-
# All proxy attempts failed, try without proxy
|
|
848
|
-
try:
|
|
849
|
-
session.proxies.clear()
|
|
850
|
-
return session.request(method, url, timeout=timeout, **kwargs)
|
|
851
|
-
except Exception:
|
|
852
|
-
# Restore original proxy settings and re-raise the first error
|
|
853
|
-
session.proxies.clear()
|
|
854
|
-
session.proxies.update(original_proxies)
|
|
855
|
-
raise first_error
|
|
856
|
-
else:
|
|
857
|
-
# No proxies were configured, just re-raise the original error
|
|
858
|
-
raise first_error
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
def create_auto_retry_session(max_proxy_attempts: int = 2) -> requests.Session:
|
|
862
|
-
"""
|
|
863
|
-
Create a requests.Session with automatic proxy retry functionality.
|
|
864
|
-
|
|
865
|
-
Args:
|
|
866
|
-
max_proxy_attempts: Maximum number of proxy attempts before falling back
|
|
867
|
-
|
|
868
|
-
Returns:
|
|
869
|
-
requests.Session: Session with auto-retry functionality
|
|
870
|
-
"""
|
|
871
|
-
session = requests.Session()
|
|
872
|
-
|
|
873
|
-
# Get initial proxy configuration
|
|
874
|
-
proxy_url = get_auto_proxy()
|
|
875
|
-
if proxy_url:
|
|
876
|
-
proxies = get_proxy_dict(proxy_url)
|
|
877
|
-
session.proxies.update(proxies)
|
|
878
|
-
|
|
879
|
-
# Store the max_proxy_attempts for use in retry logic
|
|
880
|
-
session._max_proxy_attempts = max_proxy_attempts
|
|
881
|
-
|
|
882
|
-
# Override the request method to add auto-retry functionality
|
|
883
|
-
original_request = session.request
|
|
884
|
-
|
|
885
|
-
def request_with_retry(method, url, **kwargs):
|
|
886
|
-
return make_request_with_auto_retry(
|
|
887
|
-
method=method,
|
|
888
|
-
url=url,
|
|
889
|
-
session=session,
|
|
890
|
-
max_proxy_attempts=max_proxy_attempts,
|
|
891
|
-
**kwargs
|
|
892
|
-
)
|
|
893
|
-
|
|
894
|
-
session.request = request_with_retry
|
|
895
|
-
return session
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
def enable_auto_retry_for_provider(provider_instance, max_proxy_attempts: int = 2):
|
|
899
|
-
"""
|
|
900
|
-
Enable auto-retry functionality for an existing provider instance.
|
|
901
|
-
|
|
902
|
-
This function can be used to add auto-retry functionality to providers
|
|
903
|
-
that were created without it, or to update the max_proxy_attempts setting.
|
|
904
|
-
|
|
905
|
-
Args:
|
|
906
|
-
provider_instance: The provider instance to enable auto-retry for
|
|
907
|
-
max_proxy_attempts: Maximum number of proxy attempts before falling back
|
|
908
|
-
"""
|
|
909
|
-
# Set the max proxy attempts
|
|
910
|
-
provider_instance._max_proxy_attempts = max_proxy_attempts
|
|
911
|
-
|
|
912
|
-
# Get current proxies or empty dict
|
|
913
|
-
current_proxies = getattr(provider_instance, 'proxies', {})
|
|
914
|
-
|
|
915
|
-
# Patch all existing sessions
|
|
916
|
-
_patch_instance_sessions(provider_instance, current_proxies)
|
|
917
|
-
|
|
918
|
-
# Add helper methods if they don't exist
|
|
919
|
-
if not hasattr(provider_instance, 'get_auto_retry_session'):
|
|
920
|
-
_add_proxy_helpers(provider_instance, current_proxies)
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
def disable_auto_retry_for_provider(provider_instance):
|
|
924
|
-
"""
|
|
925
|
-
Disable auto-retry functionality for a provider instance.
|
|
926
|
-
|
|
927
|
-
This will restore the original request methods for all sessions.
|
|
928
|
-
Note: This is a best-effort approach and may not work for all session types.
|
|
929
|
-
|
|
930
|
-
Args:
|
|
931
|
-
provider_instance: The provider instance to disable auto-retry for
|
|
932
|
-
"""
|
|
933
|
-
for attr_name in dir(provider_instance):
|
|
934
|
-
if attr_name.startswith('_'):
|
|
935
|
-
continue
|
|
936
|
-
|
|
937
|
-
try:
|
|
938
|
-
attr_obj = getattr(provider_instance, attr_name)
|
|
939
|
-
|
|
940
|
-
# Remove auto-retry from requests.Session objects
|
|
941
|
-
if isinstance(attr_obj, requests.Session) and hasattr(attr_obj, '_auto_retry_patched'):
|
|
942
|
-
# This is a simplified approach - in practice, restoring original methods
|
|
943
|
-
# would require storing references to them, which we don't do here
|
|
944
|
-
delattr(attr_obj, '_auto_retry_patched')
|
|
945
|
-
|
|
946
|
-
# Similar for other session types
|
|
947
|
-
elif httpx and isinstance(attr_obj, httpx.Client) and hasattr(attr_obj, '_auto_retry_patched'):
|
|
948
|
-
delattr(attr_obj, '_auto_retry_patched')
|
|
949
|
-
|
|
950
|
-
elif CurlSession and isinstance(attr_obj, CurlSession) and hasattr(attr_obj, '_auto_retry_patched'):
|
|
951
|
-
delattr(attr_obj, '_auto_retry_patched')
|
|
952
|
-
|
|
953
|
-
elif CurlAsyncSession and isinstance(attr_obj, CurlAsyncSession) and hasattr(attr_obj, '_auto_retry_patched'):
|
|
954
|
-
delattr(attr_obj, '_auto_retry_patched')
|
|
955
|
-
|
|
956
|
-
except Exception:
|
|
957
|
-
continue
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
def proxy():
|
|
961
|
-
"""
|
|
962
|
-
Return a working proxy dict or None. One-liner for easy use.
|
|
963
|
-
Example:
|
|
964
|
-
proxies = autoproxy.proxy()
|
|
965
|
-
requests.get(url, proxies=proxies)
|
|
966
|
-
"""
|
|
967
|
-
proxy_url = get_working_proxy()
|
|
968
|
-
return get_proxy_dict(proxy_url) if proxy_url else None
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
def patch(obj, proxy_url=None):
|
|
972
|
-
"""
|
|
973
|
-
Patch a function, class, or object to use proxies automatically.
|
|
974
|
-
- For functions: inject proxies kwarg if not present.
|
|
975
|
-
- For requests.Session: set .proxies.
|
|
976
|
-
- For classes: patch all methods that look like HTTP calls.
|
|
977
|
-
"""
|
|
978
|
-
if isinstance(obj, requests.Session):
|
|
979
|
-
obj.proxies.update(get_proxy_dict(proxy_url))
|
|
980
|
-
return obj
|
|
981
|
-
if httpx and isinstance(obj, httpx.Client):
|
|
982
|
-
obj._proxies = get_proxy_dict(proxy_url)
|
|
983
|
-
return obj
|
|
984
|
-
if isinstance(obj, types.FunctionType):
|
|
985
|
-
def wrapper(*args, **kwargs):
|
|
986
|
-
if 'proxies' not in kwargs:
|
|
987
|
-
kwargs['proxies'] = get_proxy_dict(proxy_url)
|
|
988
|
-
return obj(*args, **kwargs)
|
|
989
|
-
return wrapper
|
|
990
|
-
if isinstance(obj, type): # class
|
|
991
|
-
for attr in dir(obj):
|
|
992
|
-
if attr.startswith('get') or attr.startswith('post'):
|
|
993
|
-
method = getattr(obj, attr)
|
|
994
|
-
if callable(method):
|
|
995
|
-
setattr(obj, attr, patch(method, proxy_url))
|
|
996
|
-
return obj
|
|
997
|
-
# fallback: return as is
|
|
998
|
-
return obj
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
@contextmanager
|
|
1002
|
-
def use_proxy(proxy_url=None):
|
|
1003
|
-
"""
|
|
1004
|
-
Context manager to temporarily patch requests and httpx to use a proxy globally.
|
|
1005
|
-
Example:
|
|
1006
|
-
with autoproxy.use_proxy():
|
|
1007
|
-
requests.get(url) # uses proxy automatically
|
|
1008
|
-
"""
|
|
1009
|
-
orig_request = requests.Session.request
|
|
1010
|
-
def request_with_proxy(self, method, url, **kwargs):
|
|
1011
|
-
if 'proxies' not in kwargs:
|
|
1012
|
-
kwargs['proxies'] = get_proxy_dict(proxy_url)
|
|
1013
|
-
return orig_request(self, method, url, **kwargs)
|
|
1014
|
-
requests.Session.request = request_with_proxy
|
|
1015
|
-
# Optionally patch httpx if available
|
|
1016
|
-
orig_httpx = None
|
|
1017
|
-
if httpx:
|
|
1018
|
-
orig_httpx = httpx.Client.request
|
|
1019
|
-
def httpx_request_with_proxy(self, method, url, **kwargs):
|
|
1020
|
-
if 'proxies' not in kwargs:
|
|
1021
|
-
kwargs['proxies'] = get_proxy_dict(proxy_url)
|
|
1022
|
-
return orig_httpx(self, method, url, **kwargs)
|
|
1023
|
-
httpx.Client.request = httpx_request_with_proxy
|
|
1024
|
-
try:
|
|
1025
|
-
yield
|
|
1026
|
-
finally:
|
|
1027
|
-
requests.Session.request = orig_request
|
|
1028
|
-
if httpx and orig_httpx:
|
|
1029
|
-
httpx.Client.request = orig_httpx
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
def proxyify(func):
|
|
1033
|
-
"""
|
|
1034
|
-
Decorator to auto-inject proxies into any function.
|
|
1035
|
-
Example:
|
|
1036
|
-
@autoproxy.proxyify
|
|
1037
|
-
def my_request(...): ...
|
|
1038
|
-
"""
|
|
1039
|
-
def wrapper(*args, **kwargs):
|
|
1040
|
-
if 'proxies' not in kwargs:
|
|
1041
|
-
kwargs['proxies'] = proxy()
|
|
1042
|
-
return func(*args, **kwargs)
|
|
1043
|
-
return wrapper
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
def list_proxies():
|
|
1047
|
-
"""
|
|
1048
|
-
List all available proxies (Webshare, remote, NordVPN).
|
|
1049
|
-
"""
|
|
1050
|
-
return get_cached_proxies()
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
def test_all_proxies(timeout=5):
|
|
1054
|
-
"""
|
|
1055
|
-
Test all proxies and return a dict of proxy_url: True/False.
|
|
1056
|
-
"""
|
|
1057
|
-
results = {}
|
|
1058
|
-
for proxy in get_cached_proxies():
|
|
1059
|
-
results[proxy] = test_proxy(proxy, timeout=timeout)
|
|
1060
|
-
return results
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
def current_proxy():
|
|
1064
|
-
"""
|
|
1065
|
-
Return a random proxy that would be used now (Webshare preferred).
|
|
1066
|
-
"""
|
|
1067
|
-
return get_auto_proxy()
|