swiftshadow 0.1.2__tar.gz → 0.2.1__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {swiftshadow-0.1.2/swiftshadow.egg-info → swiftshadow-0.2.1}/PKG-INFO +1 -1
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/setup.py +1 -1
- swiftshadow-0.2.1/swiftshadow/cache.py +16 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow/swiftshadow.py +16 -25
- {swiftshadow-0.1.2 → swiftshadow-0.2.1/swiftshadow.egg-info}/PKG-INFO +1 -1
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow.egg-info/SOURCES.txt +1 -1
- swiftshadow-0.1.2/swiftshadow/classes.py +0 -22
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/LICENSE +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/README.md +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/setup.cfg +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow/__init__.py +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow/constants.py +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow/helpers.py +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow/providers.py +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow.egg-info/dependency_links.txt +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow.egg-info/requires.txt +0 -0
- {swiftshadow-0.1.2 → swiftshadow-0.2.1}/swiftshadow.egg-info/top_level.txt +0 -0
@@ -25,7 +25,7 @@ setup(
|
|
25
25
|
description="Free IP Proxy rotator for python",
|
26
26
|
long_description=long_description,
|
27
27
|
long_description_content_type="text/markdown",
|
28
|
-
version=VERSION.get("__version__", "0.1
|
28
|
+
version=VERSION.get("__version__", "0.2.1"),
|
29
29
|
packages=find_packages(where=".", exclude=["tests"]),
|
30
30
|
install_requires=["requests"],
|
31
31
|
classifiers=[
|
@@ -0,0 +1,16 @@
|
|
1
|
+
from datetime import datetime, timezone, timedelta
|
2
|
+
from time import sleep
|
3
|
+
|
4
|
+
|
5
|
+
def getExpiry(expiryIn):
|
6
|
+
now = datetime.now(timezone.utc)
|
7
|
+
expiry = now + timedelta(seconds=expiryIn)
|
8
|
+
return expiry
|
9
|
+
|
10
|
+
|
11
|
+
def checkExpiry(expiryDateObject):
|
12
|
+
now = datetime.now(timezone.utc)
|
13
|
+
if (now - expiryDateObject).days < 0:
|
14
|
+
return False
|
15
|
+
else:
|
16
|
+
return True
|
@@ -1,9 +1,10 @@
|
|
1
1
|
from requests import get
|
2
2
|
from random import choice
|
3
3
|
from datetime import datetime, timezone, timedelta
|
4
|
-
from
|
4
|
+
from json import dump, load
|
5
5
|
from swiftshadow.helpers import log
|
6
6
|
from swiftshadow.providers import Proxyscrape, Scrapingant
|
7
|
+
import swiftshadow.cache as cache
|
7
8
|
|
8
9
|
|
9
10
|
class Proxy:
|
@@ -25,7 +26,7 @@ class Proxy:
|
|
25
26
|
protocol: HTTP/HTTPS protocol to filter proxies.
|
26
27
|
maxProxies: Maximum number of proxies to store and rotate from.
|
27
28
|
autoRotate: Rotates proxy when `Proxy.proxy()` function is called.
|
28
|
-
cachePeriod: Time to cache proxies in
|
29
|
+
cachePeriod: Time to cache proxies in seconds.
|
29
30
|
|
30
31
|
Returns:
|
31
32
|
proxyClass (swiftshadow.Proxy): `swiftshadow.Proxy` class instance
|
@@ -44,7 +45,8 @@ class Proxy:
|
|
44
45
|
self.maxProxies = maxProxies
|
45
46
|
self.autoRotate = autoRotate
|
46
47
|
self.cachePeriod = cachePeriod
|
47
|
-
|
48
|
+
|
49
|
+
self.update()
|
48
50
|
|
49
51
|
def checkIp(self, ip, cc, protocol):
|
50
52
|
if (ip[1] == cc or cc == None) and ip[2] == protocol:
|
@@ -60,27 +62,16 @@ class Proxy:
|
|
60
62
|
else:
|
61
63
|
return False
|
62
64
|
|
63
|
-
def
|
64
|
-
expiry = latest + timedelta(minutes=self.cachePeriod)
|
65
|
-
live = cache - latest
|
66
|
-
dead = expiry - cache
|
67
|
-
limit = float(self.cachePeriod)
|
68
|
-
if live.seconds / 60 < limit and dead.seconds / 60 < limit:
|
69
|
-
cacheValid = True
|
70
|
-
else:
|
71
|
-
cacheValid = False
|
72
|
-
return cacheValid
|
73
|
-
|
74
|
-
def updateProxyList(self):
|
65
|
+
def update(self):
|
75
66
|
try:
|
76
|
-
with open(".swiftshadow.
|
67
|
+
with open(".swiftshadow.json", "r") as file:
|
77
68
|
data = load(file)
|
78
|
-
self.
|
79
|
-
|
80
|
-
if
|
69
|
+
self.expiry = datetime.fromisoformat(data[0])
|
70
|
+
expired = cache.checkExpiry(self.expiry)
|
71
|
+
if not expired:
|
81
72
|
log(
|
82
73
|
"info",
|
83
|
-
f"
|
74
|
+
f"Loaded proxies from cache",
|
84
75
|
)
|
85
76
|
self.proxies = data[1]
|
86
77
|
self.current = self.proxies[0]
|
@@ -88,7 +79,7 @@ class Proxy:
|
|
88
79
|
else:
|
89
80
|
log(
|
90
81
|
"info",
|
91
|
-
f"
|
82
|
+
f"Cache expired. Updating cache...",
|
92
83
|
)
|
93
84
|
except FileNotFoundError:
|
94
85
|
log("error", "No cache found. Cache will be created after update")
|
@@ -105,8 +96,8 @@ class Proxy:
|
|
105
96
|
"No proxies found for current settings. To prevent runtime error updating the proxy list again.",
|
106
97
|
)
|
107
98
|
self.update()
|
108
|
-
with open(".swiftshadow.
|
109
|
-
dump([
|
99
|
+
with open(".swiftshadow.json", "w") as file:
|
100
|
+
dump([cache.getExpiry(self.cachePeriod).isoformat(), self.proxies], file)
|
110
101
|
self.current = self.proxies[0]
|
111
102
|
|
112
103
|
def rotate(self):
|
@@ -118,7 +109,7 @@ class Proxy:
|
|
118
109
|
Note:
|
119
110
|
Function only for manual rotation. If `autoRotate` is set to `True` then no need to call this function.
|
120
111
|
"""
|
121
|
-
if
|
112
|
+
if cache.checkExpiry(self.expiry):
|
122
113
|
self.update()
|
123
114
|
self.current = choice(self.proxies)
|
124
115
|
|
@@ -129,7 +120,7 @@ class Proxy:
|
|
129
120
|
Returns:
|
130
121
|
proxyDict (dict):A proxy dict of format `{protocol:address}`
|
131
122
|
"""
|
132
|
-
if
|
123
|
+
if cache.checkExpiry(self.expiry):
|
133
124
|
self.update()
|
134
125
|
if self.autoRotate == True:
|
135
126
|
return choice(self.proxies)
|
@@ -1,22 +0,0 @@
|
|
1
|
-
from requests import get
|
2
|
-
|
3
|
-
|
4
|
-
class ScrapingAnt:
|
5
|
-
def __init__(self):
|
6
|
-
self.update()
|
7
|
-
|
8
|
-
def update(self):
|
9
|
-
raw = get("https://scrapingant.com/proxies").text
|
10
|
-
rows = [i.split("<td>") for i in raw.split("<tr>")]
|
11
|
-
data = []
|
12
|
-
|
13
|
-
def clean(text):
|
14
|
-
return text[: text.find("<")].strip()
|
15
|
-
|
16
|
-
for row in rows[2:]:
|
17
|
-
cleaned = [clean(row[1]) + ":" + clean(row[2]), clean(row[3]).lower()]
|
18
|
-
data.append(cleaned)
|
19
|
-
print(data)
|
20
|
-
|
21
|
-
|
22
|
-
a = ScrapingAnt()
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|