swiftshadow 2.0.1__py3-none-any.whl → 2.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- swiftshadow/__init__.py +3 -2
- swiftshadow/cache.py +1 -1
- swiftshadow/classes.py +114 -20
- swiftshadow/helpers.py +11 -0
- swiftshadow/models.py +2 -0
- swiftshadow/providers.py +69 -41
- swiftshadow/validator.py +29 -8
- {swiftshadow-2.0.1.dist-info → swiftshadow-2.2.0.dist-info}/METADATA +3 -1
- swiftshadow-2.2.0.dist-info/RECORD +14 -0
- swiftshadow-2.0.1.dist-info/RECORD +0 -14
- {swiftshadow-2.0.1.dist-info → swiftshadow-2.2.0.dist-info}/WHEEL +0 -0
- {swiftshadow-2.0.1.dist-info → swiftshadow-2.2.0.dist-info}/entry_points.txt +0 -0
- {swiftshadow-2.0.1.dist-info → swiftshadow-2.2.0.dist-info}/licenses/LICENSE +0 -0
swiftshadow/__init__.py
CHANGED
swiftshadow/cache.py
CHANGED
swiftshadow/classes.py
CHANGED
@@ -1,17 +1,20 @@
|
|
1
|
+
from asyncio import run
|
1
2
|
from datetime import datetime
|
3
|
+
from logging import DEBUG, INFO, FileHandler, Formatter, StreamHandler, getLogger
|
4
|
+
from pathlib import Path
|
5
|
+
from pickle import dump, dumps, load, loads
|
2
6
|
from random import choice
|
7
|
+
from sys import stdout
|
3
8
|
from typing import Literal
|
4
|
-
|
9
|
+
|
10
|
+
import aiofiles
|
5
11
|
from appdirs import user_cache_dir
|
6
|
-
from logging import FileHandler, getLogger, Formatter, StreamHandler, INFO, DEBUG
|
7
|
-
from sys import stdout
|
8
|
-
from pickle import load, dump
|
9
|
-
from swiftshadow.cache import checkExpiry, getExpiry
|
10
|
-
from swiftshadow.models import CacheData, Proxy as Proxy
|
11
12
|
|
13
|
+
from swiftshadow.cache import checkExpiry, getExpiry
|
12
14
|
from swiftshadow.exceptions import UnsupportedProxyProtocol
|
15
|
+
from swiftshadow.models import CacheData
|
16
|
+
from swiftshadow.models import Proxy as Proxy
|
13
17
|
from swiftshadow.providers import Providers
|
14
|
-
from asyncio import run
|
15
18
|
|
16
19
|
logger = getLogger("swiftshadow")
|
17
20
|
logger.setLevel(INFO)
|
@@ -33,6 +36,7 @@ class ProxyInterface:
|
|
33
36
|
protocol (Literal['https', 'http']): Proxy protocol to use. Defaults to 'http'.
|
34
37
|
maxproxies (int): Maximum number of proxies to collect from providers. Defaults to 10.
|
35
38
|
autorotate (bool): Whether to automatically rotate proxy on each get() call. Defaults to False.
|
39
|
+
autoUpdate (bool): Whether to automatically update proxies upon class initalisation. Defaults to True.
|
36
40
|
cachePeriod (int): Number of minutes before cache is considered expired. Defaults to 10.
|
37
41
|
cacheFolderPath (Path): Filesystem path for cache storage. Uses system cache dir by default.
|
38
42
|
proxies (list[Proxy]): List of available proxy objects.
|
@@ -60,6 +64,7 @@ class ProxyInterface:
|
|
60
64
|
protocol: Literal["https", "http"] = "http",
|
61
65
|
maxProxies: int = 10,
|
62
66
|
autoRotate: bool = False,
|
67
|
+
autoUpdate: bool = True,
|
63
68
|
cachePeriod: int = 10,
|
64
69
|
cacheFolderPath: Path | None = None,
|
65
70
|
debug: bool = False,
|
@@ -72,6 +77,7 @@ class ProxyInterface:
|
|
72
77
|
protocol: Proxy protocol to retrieve. Choose between 'http' or 'https'.
|
73
78
|
maxProxies: Maximum proxies to collect from all providers combined.
|
74
79
|
autoRotate: Enable automatic proxy rotation on every get() call.
|
80
|
+
autoUpdate (bool): Whether to automatically update proxies upon class initalisation.
|
75
81
|
cachePeriod: Cache validity duration in minutes.
|
76
82
|
cacheFolderPath: Custom path for cache storage. Uses system cache dir if None.
|
77
83
|
debug: Enable debug logging level when True.
|
@@ -82,13 +88,16 @@ class ProxyInterface:
|
|
82
88
|
|
83
89
|
if protocol not in ["https", "http"]:
|
84
90
|
raise UnsupportedProxyProtocol(
|
85
|
-
f"Protocol {
|
91
|
+
f"Protocol {
|
92
|
+
protocol
|
93
|
+
} is not supported by swiftshadow, please choose between HTTP or HTTPS"
|
86
94
|
)
|
87
95
|
self.protocol: Literal["https", "http"] = protocol
|
88
96
|
|
89
97
|
self.maxproxies: int = maxProxies
|
90
98
|
self.autorotate: bool = autoRotate
|
91
99
|
self.cachePeriod: int = cachePeriod
|
100
|
+
self.configString: str = f"{maxProxies}{''.join(protocol)}{''.join(countries)}"
|
92
101
|
|
93
102
|
if debug:
|
94
103
|
logger.setLevel(DEBUG)
|
@@ -108,8 +117,82 @@ class ProxyInterface:
|
|
108
117
|
self.proxies: list[Proxy] = []
|
109
118
|
self.current: Proxy | None = None
|
110
119
|
self.cacheExpiry: datetime | None = None
|
120
|
+
self.autoUpdate = autoUpdate
|
121
|
+
|
122
|
+
if self.autoUpdate:
|
123
|
+
self.update()
|
124
|
+
|
125
|
+
async def async_update(self):
|
126
|
+
"""
|
127
|
+
Updates proxy list from providers or cache in async.
|
128
|
+
|
129
|
+
First attempts to load valid proxies from cache. If cache is expired/missing,
|
130
|
+
fetches fresh proxies from registered providers that match country and protocol filters.
|
131
|
+
Updates cache file with new proxies if fetched from providers.
|
132
|
+
|
133
|
+
Raises:
|
134
|
+
ValueError: If no proxies found after provider scraping.
|
135
|
+
"""
|
136
|
+
try:
|
137
|
+
async with aiofiles.open(
|
138
|
+
self.cacheFolderPath.joinpath("swiftshadow.pickle"), "rb"
|
139
|
+
) as cacheFile:
|
140
|
+
pickled_bytes = await cacheFile.read()
|
141
|
+
cache: CacheData = loads(pickled_bytes)
|
142
|
+
|
143
|
+
if self.configString != cache.configString:
|
144
|
+
logger.info("Cache Invalid due to configuration changes.")
|
145
|
+
elif not checkExpiry(cache.expiryIn):
|
146
|
+
self.proxies = cache.proxies
|
147
|
+
logger.info("Loaded proxies from cache.")
|
148
|
+
logger.debug(
|
149
|
+
f"Cache with {len(cache.proxies)} proxies, expire in {
|
150
|
+
cache.expiryIn
|
151
|
+
}"
|
152
|
+
)
|
153
|
+
self.current = self.proxies[0]
|
154
|
+
self.cacheExpiry = cache.expiryIn
|
155
|
+
logger.debug(f"Cache set to expire at {cache.expiryIn}")
|
156
|
+
return
|
157
|
+
else:
|
158
|
+
logger.info("Cache Expired")
|
159
|
+
except FileNotFoundError:
|
160
|
+
logger.info("No cache found, will be created after update.")
|
111
161
|
|
112
|
-
self.
|
162
|
+
self.proxies = []
|
163
|
+
|
164
|
+
for provider in Providers:
|
165
|
+
if self.protocol not in provider.protocols:
|
166
|
+
continue
|
167
|
+
if (len(self.countries) != 0) and (not provider.countryFilter):
|
168
|
+
continue
|
169
|
+
providerProxies: list[Proxy] = await provider.providerFunction(
|
170
|
+
self.countries, self.protocol
|
171
|
+
)
|
172
|
+
logger.debug(
|
173
|
+
f"{len(providerProxies)} proxies from {
|
174
|
+
provider.providerFunction.__name__
|
175
|
+
}"
|
176
|
+
)
|
177
|
+
self.proxies.extend(providerProxies)
|
178
|
+
|
179
|
+
if len(self.proxies) >= self.maxproxies:
|
180
|
+
break
|
181
|
+
|
182
|
+
if len(self.proxies) == 0:
|
183
|
+
if self.protocol == "https":
|
184
|
+
raise ValueError("No proxies were found for the current filter settings. Tip: https proxies can be rare; recommend setting protocol to http")
|
185
|
+
raise ValueError("No proxies were found for the current filter settings.")
|
186
|
+
|
187
|
+
async with aiofiles.open(
|
188
|
+
self.cacheFolderPath.joinpath("swiftshadow.pickle"), "wb+"
|
189
|
+
) as cacheFile:
|
190
|
+
cacheExpiry = getExpiry(self.cachePeriod)
|
191
|
+
self.cacheExpiry = cacheExpiry
|
192
|
+
cache = CacheData(cacheExpiry, self.configString, self.proxies)
|
193
|
+
pickled_bytes = dumps(cache)
|
194
|
+
_ = await cacheFile.write(pickled_bytes)
|
195
|
+
self.current = self.proxies[0]
|
113
196
|
|
114
197
|
def update(self):
|
115
198
|
"""
|
@@ -128,13 +211,18 @@ class ProxyInterface:
|
|
128
211
|
) as cacheFile:
|
129
212
|
cache: CacheData = load(cacheFile)
|
130
213
|
|
131
|
-
if
|
214
|
+
if self.configString != cache.configString:
|
215
|
+
logger.info("Cache Invalid due to configuration changes.")
|
216
|
+
elif not checkExpiry(cache.expiryIn):
|
132
217
|
self.proxies = cache.proxies
|
133
218
|
logger.info("Loaded proxies from cache.")
|
134
219
|
logger.debug(
|
135
|
-
f"Cache with {len(cache.proxies)} proxies, expire in {
|
220
|
+
f"Cache with {len(cache.proxies)} proxies, expire in {
|
221
|
+
cache.expiryIn
|
222
|
+
}"
|
136
223
|
)
|
137
224
|
self.current = self.proxies[0]
|
225
|
+
logger.debug(f"Cache set to expire at {cache.expiryIn}")
|
138
226
|
self.cacheExpiry = cache.expiryIn
|
139
227
|
return
|
140
228
|
else:
|
@@ -142,6 +230,8 @@ class ProxyInterface:
|
|
142
230
|
except FileNotFoundError:
|
143
231
|
logger.info("No cache found, will be created after update.")
|
144
232
|
|
233
|
+
self.proxies = []
|
234
|
+
|
145
235
|
for provider in Providers:
|
146
236
|
if self.protocol not in provider.protocols:
|
147
237
|
continue
|
@@ -151,7 +241,9 @@ class ProxyInterface:
|
|
151
241
|
provider.providerFunction(self.countries, self.protocol)
|
152
242
|
)
|
153
243
|
logger.debug(
|
154
|
-
f"{len(providerProxies)} proxies from {
|
244
|
+
f"{len(providerProxies)} proxies from {
|
245
|
+
provider.providerFunction.__name__
|
246
|
+
}"
|
155
247
|
)
|
156
248
|
self.proxies.extend(providerProxies)
|
157
249
|
|
@@ -159,14 +251,14 @@ class ProxyInterface:
|
|
159
251
|
break
|
160
252
|
|
161
253
|
if len(self.proxies) == 0:
|
162
|
-
raise ValueError("No proxies
|
254
|
+
raise ValueError("No proxies were found for the current filter settings.")
|
163
255
|
|
164
256
|
with open(
|
165
257
|
self.cacheFolderPath.joinpath("swiftshadow.pickle"), "wb+"
|
166
258
|
) as cacheFile:
|
167
259
|
cacheExpiry = getExpiry(self.cachePeriod)
|
168
260
|
self.cacheExpiry = cacheExpiry
|
169
|
-
cache = CacheData(cacheExpiry, self.proxies)
|
261
|
+
cache = CacheData(cacheExpiry, self.configString, self.proxies)
|
170
262
|
dump(cache, cacheFile)
|
171
263
|
self.current = self.proxies[0]
|
172
264
|
|
@@ -184,11 +276,13 @@ class ProxyInterface:
|
|
184
276
|
Raises:
|
185
277
|
ValueError: If validate_cache=True but no cache exists.
|
186
278
|
"""
|
187
|
-
if
|
188
|
-
if
|
189
|
-
self.
|
190
|
-
|
191
|
-
|
279
|
+
if validate_cache:
|
280
|
+
if self.cacheExpiry:
|
281
|
+
if checkExpiry(self.cacheExpiry):
|
282
|
+
logger.debug("Cache Expired on rotate call, updating.")
|
283
|
+
self.update()
|
284
|
+
else:
|
285
|
+
raise ValueError("No cache available but validate_cache is true.")
|
192
286
|
self.current = choice(self.proxies)
|
193
287
|
|
194
288
|
def get(self) -> Proxy:
|
@@ -206,7 +300,7 @@ class ProxyInterface:
|
|
206
300
|
"""
|
207
301
|
|
208
302
|
if self.autorotate:
|
209
|
-
self.rotate()
|
303
|
+
self.rotate(validate_cache=self.autoUpdate)
|
210
304
|
if self.current:
|
211
305
|
return self.current
|
212
306
|
else:
|
swiftshadow/helpers.py
CHANGED
@@ -2,7 +2,9 @@ from datetime import datetime
|
|
2
2
|
from typing import Literal
|
3
3
|
|
4
4
|
from requests import get
|
5
|
+
|
5
6
|
from swiftshadow.models import Proxy
|
7
|
+
from swiftshadow.validator import validate_proxies
|
6
8
|
|
7
9
|
|
8
10
|
def checkProxy(proxy):
|
@@ -35,3 +37,12 @@ def plaintextToProxies(text: str, protocol: Literal["http", "https"]) -> list[Pr
|
|
35
37
|
proxy = Proxy(ip=ip, port=int(port), protocol=protocol)
|
36
38
|
proxies.append(proxy)
|
37
39
|
return proxies
|
40
|
+
|
41
|
+
|
42
|
+
async def GenericPlainTextProxyProvider(
|
43
|
+
url: str, protocol: Literal["http", "https"] = "http"
|
44
|
+
) -> list[Proxy]:
|
45
|
+
raw: str = get(url).text
|
46
|
+
proxies: list[Proxy] = plaintextToProxies(raw, protocol=protocol)
|
47
|
+
results = await validate_proxies(proxies)
|
48
|
+
return results
|
swiftshadow/models.py
CHANGED
@@ -46,10 +46,12 @@ class CacheData:
|
|
46
46
|
|
47
47
|
Attributes:
|
48
48
|
expiryIn: Expiry date object.
|
49
|
+
configString: Configuration String for the ProxyInterface this cache was created.
|
49
50
|
proxies: Proxies to head.
|
50
51
|
"""
|
51
52
|
|
52
53
|
expiryIn: datetime
|
54
|
+
configString: str
|
53
55
|
proxies: list[Proxy]
|
54
56
|
|
55
57
|
|
swiftshadow/providers.py
CHANGED
@@ -1,11 +1,14 @@
|
|
1
1
|
from typing import Literal
|
2
|
+
import aiohttp
|
2
3
|
|
3
4
|
from requests import get
|
4
5
|
|
5
|
-
from swiftshadow.helpers import
|
6
|
-
from swiftshadow.models import
|
6
|
+
from swiftshadow.helpers import GenericPlainTextProxyProvider
|
7
|
+
from swiftshadow.models import Provider, Proxy
|
8
|
+
from asyncio import create_task, gather
|
7
9
|
from swiftshadow.types import MonosansProxyDict
|
8
10
|
from swiftshadow.validator import validate_proxies
|
11
|
+
from lxml import etree
|
9
12
|
|
10
13
|
|
11
14
|
async def Monosans(
|
@@ -38,11 +41,10 @@ async def Monosans(
|
|
38
41
|
async def Thespeedx(
|
39
42
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
40
43
|
):
|
41
|
-
|
42
|
-
"https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt"
|
43
|
-
|
44
|
-
|
45
|
-
results = await validate_proxies(proxies)
|
44
|
+
results = await GenericPlainTextProxyProvider(
|
45
|
+
url="https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt",
|
46
|
+
protocol="http",
|
47
|
+
)
|
46
48
|
return results
|
47
49
|
|
48
50
|
|
@@ -83,63 +85,91 @@ async def GoodProxy(
|
|
83
85
|
async def OpenProxyList(
|
84
86
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
85
87
|
):
|
86
|
-
|
87
|
-
|
88
|
-
|
88
|
+
results = await GenericPlainTextProxyProvider(
|
89
|
+
"https://api.openproxylist.xyz/http.txt", "http"
|
90
|
+
)
|
89
91
|
return results
|
90
92
|
|
91
93
|
|
92
94
|
async def MuRongPIG(
|
93
95
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
94
96
|
):
|
95
|
-
|
96
|
-
"https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/refs/heads/main/http_checked.txt"
|
97
|
-
|
98
|
-
|
99
|
-
results = await validate_proxies(proxies)
|
97
|
+
results = await GenericPlainTextProxyProvider(
|
98
|
+
"https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/refs/heads/main/http_checked.txt",
|
99
|
+
"http",
|
100
|
+
)
|
100
101
|
return results
|
101
102
|
|
102
103
|
|
103
|
-
async def
|
104
|
+
async def Mmpx12(
|
104
105
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
105
106
|
):
|
106
|
-
|
107
|
-
|
108
|
-
).text
|
109
|
-
proxies: list[Proxy] = plaintextToProxies(raw, protocol=protocol)
|
110
|
-
results = await validate_proxies(proxies)
|
107
|
+
url = f"https://github.com/mmpx12/proxy-list/raw/refs/heads/master/{protocol}.txt"
|
108
|
+
results = await GenericPlainTextProxyProvider(url, protocol)
|
111
109
|
return results
|
112
110
|
|
113
111
|
|
114
|
-
async def
|
112
|
+
async def Anonym0usWork1221(
|
115
113
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
116
114
|
):
|
117
|
-
|
118
|
-
|
119
|
-
).text
|
120
|
-
proxies: list[Proxy] = plaintextToProxies(raw, protocol=protocol)
|
121
|
-
results = await validate_proxies(proxies)
|
115
|
+
url = f"https://github.com/Anonym0usWork1221/Free-Proxies/raw/refs/heads/main/proxy_files/{protocol}_proxies.txt"
|
116
|
+
results = await GenericPlainTextProxyProvider(url, protocol)
|
122
117
|
return results
|
123
118
|
|
124
119
|
|
125
|
-
async def
|
120
|
+
async def ProxySpace(
|
126
121
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
127
122
|
):
|
128
|
-
|
129
|
-
|
130
|
-
)
|
131
|
-
proxies: list[Proxy] = plaintextToProxies(raw, protocol=protocol)
|
132
|
-
results = await validate_proxies(proxies)
|
123
|
+
results = await GenericPlainTextProxyProvider(
|
124
|
+
"https://proxyspace.pro/http.txt", "http"
|
125
|
+
)
|
133
126
|
return results
|
134
127
|
|
135
128
|
|
136
|
-
async def
|
129
|
+
async def ProxyDB(
|
137
130
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
138
131
|
):
|
139
|
-
|
140
|
-
proxies: list[Proxy] =
|
141
|
-
|
142
|
-
|
132
|
+
base_url = f"https://www.proxydb.net/?protocol={protocol}&sort_column_id=uptime&sort_order_desc=true"
|
133
|
+
proxies: list[Proxy] = []
|
134
|
+
raw = get(base_url).text
|
135
|
+
total = int(
|
136
|
+
raw.split("Showing")[-1].split("total proxies")[0].split("of")[-1].strip()
|
137
|
+
)
|
138
|
+
|
139
|
+
async def parsePage(session: aiohttp.ClientSession, url: str):
|
140
|
+
proxies = []
|
141
|
+
async with session.get(url) as resp:
|
142
|
+
raw = await resp.text()
|
143
|
+
exml = etree.HTML(raw)
|
144
|
+
table = exml.find("body/div/div/table/tbody")
|
145
|
+
rows = iter(table)
|
146
|
+
for row in rows:
|
147
|
+
if len(proxies) > 500:
|
148
|
+
break
|
149
|
+
data = []
|
150
|
+
for td in row[:4]:
|
151
|
+
text = td.text.strip()
|
152
|
+
if text == "":
|
153
|
+
text = list(td)[-1].text
|
154
|
+
data.append(text)
|
155
|
+
if countries != [] and data[-1] not in countries:
|
156
|
+
continue
|
157
|
+
proxy = Proxy(data[0], protocol, data[1])
|
158
|
+
proxies.append(proxy)
|
159
|
+
return proxies
|
160
|
+
|
161
|
+
tasks = []
|
162
|
+
async with aiohttp.ClientSession() as session:
|
163
|
+
for offset in range(0, total, 30):
|
164
|
+
url = base_url + f"&offset={offset}"
|
165
|
+
task = create_task(coro=parsePage(session, url))
|
166
|
+
tasks.append(task)
|
167
|
+
results = await gather(*tasks, return_exceptions=True)
|
168
|
+
for result in results:
|
169
|
+
if isinstance(result, BaseException):
|
170
|
+
continue
|
171
|
+
proxies.extend(result)
|
172
|
+
return proxies
|
143
173
|
|
144
174
|
|
145
175
|
Providers: list[Provider] = [
|
@@ -154,9 +184,7 @@ Providers: list[Provider] = [
|
|
154
184
|
),
|
155
185
|
Provider(providerFunction=Mmpx12, countryFilter=False, protocols=["http", "https"]),
|
156
186
|
Provider(providerFunction=GoodProxy, countryFilter=False, protocols=["http"]),
|
157
|
-
Provider(
|
158
|
-
providerFunction=KangProxy, countryFilter=False, protocols=["http", "https"]
|
159
|
-
),
|
160
187
|
Provider(providerFunction=ProxySpace, countryFilter=False, protocols=["http"]),
|
161
188
|
Provider(providerFunction=OpenProxyList, countryFilter=False, protocols=["http"]),
|
189
|
+
Provider(providerFunction=ProxyDB, countryFilter=True, protocols=["http", "https"]),
|
162
190
|
]
|
swiftshadow/validator.py
CHANGED
@@ -42,21 +42,25 @@ async def get_host_ip(async_session: aiohttp.ClientSession) -> str | None:
|
|
42
42
|
return ip
|
43
43
|
|
44
44
|
|
45
|
-
async def check_proxy(
|
45
|
+
async def check_proxy(
|
46
|
+
async_session: aiohttp.ClientSession, proxy: Proxy, checker: str
|
47
|
+
) -> str:
|
46
48
|
"""
|
47
|
-
Check one proxy
|
49
|
+
Check one proxy object.
|
48
50
|
|
49
51
|
Args:
|
50
52
|
async_session: aiohttp client session object
|
51
53
|
proxy: Proxy Object
|
54
|
+
checker: Proxy checker API.
|
52
55
|
|
53
56
|
Returns:
|
54
57
|
text: API response text
|
55
58
|
"""
|
56
59
|
async with async_session.get(
|
57
|
-
url=f"{proxy.protocol}://
|
60
|
+
url=f"{proxy.protocol}://{checker}",
|
58
61
|
proxy=proxy.as_string(),
|
59
|
-
timeout=
|
62
|
+
timeout=5,
|
63
|
+
ssl=False,
|
60
64
|
) as response:
|
61
65
|
text = await response.text()
|
62
66
|
return text
|
@@ -73,16 +77,33 @@ async def validate_proxies(proxies: list[Proxy]) -> list[Proxy]:
|
|
73
77
|
working_proxies: List of working Proxies
|
74
78
|
"""
|
75
79
|
working_proxies: list[Proxy] = []
|
76
|
-
|
80
|
+
checkers = [
|
81
|
+
"checkip.amazonaws.com",
|
82
|
+
"ipinfo.io/ip",
|
83
|
+
"api.ipify.org/",
|
84
|
+
"whatsmyip.dev/api/ip",
|
85
|
+
"ip4.anysrc.net/banner",
|
86
|
+
"api4.my-ip.io/v2/ip.txt",
|
87
|
+
"api.myip.la",
|
88
|
+
"api.seeip.org",
|
89
|
+
"ips.im/api",
|
90
|
+
"ifconfig.me/ip",
|
91
|
+
"myip.expert/api/",
|
92
|
+
"checkip.info/ip",
|
93
|
+
"api.myip.com",
|
94
|
+
]
|
95
|
+
total_checkers = len(checkers)
|
96
|
+
tcp_connection = aiohttp.TCPConnector(limit=100)
|
97
|
+
async with aiohttp.ClientSession(connector=tcp_connection) as async_session:
|
77
98
|
tasks = []
|
78
99
|
|
79
100
|
host_task = asyncio.create_task(coro=get_host_ip(async_session))
|
80
101
|
tasks.append(host_task)
|
81
102
|
|
82
|
-
for proxy in proxies:
|
83
|
-
|
103
|
+
for idx, proxy in enumerate(proxies):
|
104
|
+
checker = checkers[idx % total_checkers]
|
105
|
+
task = asyncio.create_task(coro=check_proxy(async_session, proxy, checker))
|
84
106
|
tasks.append(task)
|
85
|
-
|
86
107
|
results = await asyncio.gather(*tasks, return_exceptions=True)
|
87
108
|
host_ip = results[0]
|
88
109
|
results = results[1:]
|
@@ -1,12 +1,14 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: swiftshadow
|
3
|
-
Version: 2.0
|
3
|
+
Version: 2.2.0
|
4
4
|
Summary: Free IP Proxy rotator for python
|
5
5
|
Author-email: sachin-sankar <mail.sachinsankar@gmail.com>
|
6
6
|
License-File: LICENSE
|
7
7
|
Requires-Python: >=3.12
|
8
|
+
Requires-Dist: aiofiles>=24.1.0
|
8
9
|
Requires-Dist: aiohttp>=3.11.11
|
9
10
|
Requires-Dist: appdirs>=1.4.4
|
11
|
+
Requires-Dist: lxml>=6.0.0
|
10
12
|
Requires-Dist: requests>=2.32.3
|
11
13
|
Description-Content-Type: text/markdown
|
12
14
|
|
@@ -0,0 +1,14 @@
|
|
1
|
+
swiftshadow/__init__.py,sha256=oxJDmG0YO0XyYGQpa9V_o38lZnoie7U_d4Ik_h46UXs,922
|
2
|
+
swiftshadow/cache.py,sha256=Mg8xsD6K3K012sILBwD2EZH6CE5kWCQNKCfZ5yadalI,800
|
3
|
+
swiftshadow/classes.py,sha256=hXAyvRuGXDvj_TsZxJ-bapK0pL82lhZUBGQZbQiGWN0,12280
|
4
|
+
swiftshadow/exceptions.py,sha256=qu4eXyrkWD9qd4HCIR-8vRfVcqLlTupo4sD72alCdug,129
|
5
|
+
swiftshadow/helpers.py,sha256=b8my66sBw_lTVLaKu4SaGjYQqEfIaStsuZlmK9Pmy5Y,1294
|
6
|
+
swiftshadow/models.py,sha256=rHOuOFc6UYCI8L2pwxAbvS3Fj0Ag89cHod0rt7kQ2Vc,1790
|
7
|
+
swiftshadow/providers.py,sha256=3IjITgKyywynssYKmIUP5jbcmbezIlaJVR7ODcBiDiE,6628
|
8
|
+
swiftshadow/types.py,sha256=Alyw3n54OESX1vSR-0kTvpYTlJ8LKfy5J9WZbtglHpE,894
|
9
|
+
swiftshadow/validator.py,sha256=3qE-99uljtbin1125uOgqAjnLRjuIgNYO5iiUAmaBC8,3102
|
10
|
+
swiftshadow-2.2.0.dist-info/METADATA,sha256=a5SOKU3R1eBHRA6SovdAYDD5m4qQ3VH3qxmtx8FVSi8,3320
|
11
|
+
swiftshadow-2.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
12
|
+
swiftshadow-2.2.0.dist-info/entry_points.txt,sha256=yMj0uEagcmXK2dmMmNXWebTpTT9j5K03oaRrd2wkyLA,49
|
13
|
+
swiftshadow-2.2.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
14
|
+
swiftshadow-2.2.0.dist-info/RECORD,,
|
@@ -1,14 +0,0 @@
|
|
1
|
-
swiftshadow/__init__.py,sha256=DCxCxaMrluQDVJLyK5K61NxSaadD1d-nyTrFKsjfNDI,921
|
2
|
-
swiftshadow/cache.py,sha256=eA_AWa8EsPdB6xD__ButvQdqETC4i89qEgxmHQV2XWU,800
|
3
|
-
swiftshadow/classes.py,sha256=4hr0h2aOb-CXYPrf_ESf05RSEdRVLmYtvx09npnUVhQ,8242
|
4
|
-
swiftshadow/exceptions.py,sha256=qu4eXyrkWD9qd4HCIR-8vRfVcqLlTupo4sD72alCdug,129
|
5
|
-
swiftshadow/helpers.py,sha256=hHJ_JjRx2UFC5Ircl75LeYKBNDYTY_xMy2iWCk-UPqo,959
|
6
|
-
swiftshadow/models.py,sha256=YyfZV98tPdLnF1O3WmTNUNoK4t0GuchfEftzjiM03ck,1678
|
7
|
-
swiftshadow/providers.py,sha256=myJ6t-WD20wrjc7qDhxCpX1-oi7ipQutp34XKM2tjeI,5660
|
8
|
-
swiftshadow/types.py,sha256=Alyw3n54OESX1vSR-0kTvpYTlJ8LKfy5J9WZbtglHpE,894
|
9
|
-
swiftshadow/validator.py,sha256=z0dmRKxhvPETSXfC2hTImL0t8pw0zjSYIhUaDMJcJhU,2469
|
10
|
-
swiftshadow-2.0.1.dist-info/METADATA,sha256=Zj9Nw0u1cscQTrgtc6nBZvHRieCkH3-0r85YeWVp7b0,3261
|
11
|
-
swiftshadow-2.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
12
|
-
swiftshadow-2.0.1.dist-info/entry_points.txt,sha256=yMj0uEagcmXK2dmMmNXWebTpTT9j5K03oaRrd2wkyLA,49
|
13
|
-
swiftshadow-2.0.1.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
14
|
-
swiftshadow-2.0.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|