swiftshadow 2.1.0__py3-none-any.whl → 2.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- swiftshadow/classes.py +38 -20
- swiftshadow/helpers.py +10 -0
- swiftshadow/models.py +2 -0
- swiftshadow/providers.py +68 -40
- swiftshadow/validator.py +29 -8
- {swiftshadow-2.1.0.dist-info → swiftshadow-2.2.0.dist-info}/METADATA +2 -1
- swiftshadow-2.2.0.dist-info/RECORD +14 -0
- swiftshadow-2.1.0.dist-info/RECORD +0 -14
- {swiftshadow-2.1.0.dist-info → swiftshadow-2.2.0.dist-info}/WHEEL +0 -0
- {swiftshadow-2.1.0.dist-info → swiftshadow-2.2.0.dist-info}/entry_points.txt +0 -0
- {swiftshadow-2.1.0.dist-info → swiftshadow-2.2.0.dist-info}/licenses/LICENSE +0 -0
swiftshadow/classes.py
CHANGED
@@ -1,19 +1,20 @@
|
|
1
|
+
from asyncio import run
|
1
2
|
from datetime import datetime
|
3
|
+
from logging import DEBUG, INFO, FileHandler, Formatter, StreamHandler, getLogger
|
4
|
+
from pathlib import Path
|
5
|
+
from pickle import dump, dumps, load, loads
|
2
6
|
from random import choice
|
7
|
+
from sys import stdout
|
3
8
|
from typing import Literal
|
4
|
-
|
9
|
+
|
10
|
+
import aiofiles
|
5
11
|
from appdirs import user_cache_dir
|
6
|
-
from logging import FileHandler, getLogger, Formatter, StreamHandler, INFO, DEBUG
|
7
|
-
from sys import stdout
|
8
|
-
from pickle import load, dump, loads, dumps
|
9
|
-
from swiftshadow.cache import checkExpiry, getExpiry
|
10
|
-
from swiftshadow.models import CacheData, Proxy as Proxy
|
11
12
|
|
13
|
+
from swiftshadow.cache import checkExpiry, getExpiry
|
12
14
|
from swiftshadow.exceptions import UnsupportedProxyProtocol
|
15
|
+
from swiftshadow.models import CacheData
|
16
|
+
from swiftshadow.models import Proxy as Proxy
|
13
17
|
from swiftshadow.providers import Providers
|
14
|
-
from asyncio import run
|
15
|
-
import aiofiles
|
16
|
-
|
17
18
|
|
18
19
|
logger = getLogger("swiftshadow")
|
19
20
|
logger.setLevel(INFO)
|
@@ -87,13 +88,16 @@ class ProxyInterface:
|
|
87
88
|
|
88
89
|
if protocol not in ["https", "http"]:
|
89
90
|
raise UnsupportedProxyProtocol(
|
90
|
-
f"Protocol {
|
91
|
+
f"Protocol {
|
92
|
+
protocol
|
93
|
+
} is not supported by swiftshadow, please choose between HTTP or HTTPS"
|
91
94
|
)
|
92
95
|
self.protocol: Literal["https", "http"] = protocol
|
93
96
|
|
94
97
|
self.maxproxies: int = maxProxies
|
95
98
|
self.autorotate: bool = autoRotate
|
96
99
|
self.cachePeriod: int = cachePeriod
|
100
|
+
self.configString: str = f"{maxProxies}{''.join(protocol)}{''.join(countries)}"
|
97
101
|
|
98
102
|
if debug:
|
99
103
|
logger.setLevel(DEBUG)
|
@@ -136,11 +140,15 @@ class ProxyInterface:
|
|
136
140
|
pickled_bytes = await cacheFile.read()
|
137
141
|
cache: CacheData = loads(pickled_bytes)
|
138
142
|
|
139
|
-
if
|
143
|
+
if self.configString != cache.configString:
|
144
|
+
logger.info("Cache Invalid due to configuration changes.")
|
145
|
+
elif not checkExpiry(cache.expiryIn):
|
140
146
|
self.proxies = cache.proxies
|
141
147
|
logger.info("Loaded proxies from cache.")
|
142
148
|
logger.debug(
|
143
|
-
f"Cache with {len(cache.proxies)} proxies, expire in {
|
149
|
+
f"Cache with {len(cache.proxies)} proxies, expire in {
|
150
|
+
cache.expiryIn
|
151
|
+
}"
|
144
152
|
)
|
145
153
|
self.current = self.proxies[0]
|
146
154
|
self.cacheExpiry = cache.expiryIn
|
@@ -162,7 +170,9 @@ class ProxyInterface:
|
|
162
170
|
self.countries, self.protocol
|
163
171
|
)
|
164
172
|
logger.debug(
|
165
|
-
f"{len(providerProxies)} proxies from {
|
173
|
+
f"{len(providerProxies)} proxies from {
|
174
|
+
provider.providerFunction.__name__
|
175
|
+
}"
|
166
176
|
)
|
167
177
|
self.proxies.extend(providerProxies)
|
168
178
|
|
@@ -170,14 +180,16 @@ class ProxyInterface:
|
|
170
180
|
break
|
171
181
|
|
172
182
|
if len(self.proxies) == 0:
|
173
|
-
|
183
|
+
if self.protocol == "https":
|
184
|
+
raise ValueError("No proxies were found for the current filter settings. Tip: https proxies can be rare; recommend setting protocol to http")
|
185
|
+
raise ValueError("No proxies were found for the current filter settings.")
|
174
186
|
|
175
187
|
async with aiofiles.open(
|
176
188
|
self.cacheFolderPath.joinpath("swiftshadow.pickle"), "wb+"
|
177
189
|
) as cacheFile:
|
178
190
|
cacheExpiry = getExpiry(self.cachePeriod)
|
179
191
|
self.cacheExpiry = cacheExpiry
|
180
|
-
cache = CacheData(cacheExpiry, self.proxies)
|
192
|
+
cache = CacheData(cacheExpiry, self.configString, self.proxies)
|
181
193
|
pickled_bytes = dumps(cache)
|
182
194
|
_ = await cacheFile.write(pickled_bytes)
|
183
195
|
self.current = self.proxies[0]
|
@@ -199,11 +211,15 @@ class ProxyInterface:
|
|
199
211
|
) as cacheFile:
|
200
212
|
cache: CacheData = load(cacheFile)
|
201
213
|
|
202
|
-
if
|
214
|
+
if self.configString != cache.configString:
|
215
|
+
logger.info("Cache Invalid due to configuration changes.")
|
216
|
+
elif not checkExpiry(cache.expiryIn):
|
203
217
|
self.proxies = cache.proxies
|
204
218
|
logger.info("Loaded proxies from cache.")
|
205
219
|
logger.debug(
|
206
|
-
f"Cache with {len(cache.proxies)} proxies, expire in {
|
220
|
+
f"Cache with {len(cache.proxies)} proxies, expire in {
|
221
|
+
cache.expiryIn
|
222
|
+
}"
|
207
223
|
)
|
208
224
|
self.current = self.proxies[0]
|
209
225
|
logger.debug(f"Cache set to expire at {cache.expiryIn}")
|
@@ -225,7 +241,9 @@ class ProxyInterface:
|
|
225
241
|
provider.providerFunction(self.countries, self.protocol)
|
226
242
|
)
|
227
243
|
logger.debug(
|
228
|
-
f"{len(providerProxies)} proxies from {
|
244
|
+
f"{len(providerProxies)} proxies from {
|
245
|
+
provider.providerFunction.__name__
|
246
|
+
}"
|
229
247
|
)
|
230
248
|
self.proxies.extend(providerProxies)
|
231
249
|
|
@@ -233,14 +251,14 @@ class ProxyInterface:
|
|
233
251
|
break
|
234
252
|
|
235
253
|
if len(self.proxies) == 0:
|
236
|
-
raise ValueError("No proxies
|
254
|
+
raise ValueError("No proxies were found for the current filter settings.")
|
237
255
|
|
238
256
|
with open(
|
239
257
|
self.cacheFolderPath.joinpath("swiftshadow.pickle"), "wb+"
|
240
258
|
) as cacheFile:
|
241
259
|
cacheExpiry = getExpiry(self.cachePeriod)
|
242
260
|
self.cacheExpiry = cacheExpiry
|
243
|
-
cache = CacheData(cacheExpiry, self.proxies)
|
261
|
+
cache = CacheData(cacheExpiry, self.configString, self.proxies)
|
244
262
|
dump(cache, cacheFile)
|
245
263
|
self.current = self.proxies[0]
|
246
264
|
|
swiftshadow/helpers.py
CHANGED
@@ -4,6 +4,7 @@ from typing import Literal
|
|
4
4
|
from requests import get
|
5
5
|
|
6
6
|
from swiftshadow.models import Proxy
|
7
|
+
from swiftshadow.validator import validate_proxies
|
7
8
|
|
8
9
|
|
9
10
|
def checkProxy(proxy):
|
@@ -36,3 +37,12 @@ def plaintextToProxies(text: str, protocol: Literal["http", "https"]) -> list[Pr
|
|
36
37
|
proxy = Proxy(ip=ip, port=int(port), protocol=protocol)
|
37
38
|
proxies.append(proxy)
|
38
39
|
return proxies
|
40
|
+
|
41
|
+
|
42
|
+
async def GenericPlainTextProxyProvider(
|
43
|
+
url: str, protocol: Literal["http", "https"] = "http"
|
44
|
+
) -> list[Proxy]:
|
45
|
+
raw: str = get(url).text
|
46
|
+
proxies: list[Proxy] = plaintextToProxies(raw, protocol=protocol)
|
47
|
+
results = await validate_proxies(proxies)
|
48
|
+
return results
|
swiftshadow/models.py
CHANGED
@@ -46,10 +46,12 @@ class CacheData:
|
|
46
46
|
|
47
47
|
Attributes:
|
48
48
|
expiryIn: Expiry date object.
|
49
|
+
configString: Configuration String for the ProxyInterface this cache was created.
|
49
50
|
proxies: Proxies to head.
|
50
51
|
"""
|
51
52
|
|
52
53
|
expiryIn: datetime
|
54
|
+
configString: str
|
53
55
|
proxies: list[Proxy]
|
54
56
|
|
55
57
|
|
swiftshadow/providers.py
CHANGED
@@ -1,11 +1,14 @@
|
|
1
1
|
from typing import Literal
|
2
|
+
import aiohttp
|
2
3
|
|
3
4
|
from requests import get
|
4
5
|
|
5
|
-
from swiftshadow.helpers import
|
6
|
+
from swiftshadow.helpers import GenericPlainTextProxyProvider
|
6
7
|
from swiftshadow.models import Provider, Proxy
|
8
|
+
from asyncio import create_task, gather
|
7
9
|
from swiftshadow.types import MonosansProxyDict
|
8
10
|
from swiftshadow.validator import validate_proxies
|
11
|
+
from lxml import etree
|
9
12
|
|
10
13
|
|
11
14
|
async def Monosans(
|
@@ -38,11 +41,10 @@ async def Monosans(
|
|
38
41
|
async def Thespeedx(
|
39
42
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
40
43
|
):
|
41
|
-
|
42
|
-
"https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt"
|
43
|
-
|
44
|
-
|
45
|
-
results = await validate_proxies(proxies)
|
44
|
+
results = await GenericPlainTextProxyProvider(
|
45
|
+
url="https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt",
|
46
|
+
protocol="http",
|
47
|
+
)
|
46
48
|
return results
|
47
49
|
|
48
50
|
|
@@ -83,63 +85,91 @@ async def GoodProxy(
|
|
83
85
|
async def OpenProxyList(
|
84
86
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
85
87
|
):
|
86
|
-
|
87
|
-
|
88
|
-
|
88
|
+
results = await GenericPlainTextProxyProvider(
|
89
|
+
"https://api.openproxylist.xyz/http.txt", "http"
|
90
|
+
)
|
89
91
|
return results
|
90
92
|
|
91
93
|
|
92
94
|
async def MuRongPIG(
|
93
95
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
94
96
|
):
|
95
|
-
|
96
|
-
"https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/refs/heads/main/http_checked.txt"
|
97
|
-
|
98
|
-
|
99
|
-
results = await validate_proxies(proxies)
|
97
|
+
results = await GenericPlainTextProxyProvider(
|
98
|
+
"https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/refs/heads/main/http_checked.txt",
|
99
|
+
"http",
|
100
|
+
)
|
100
101
|
return results
|
101
102
|
|
102
103
|
|
103
|
-
async def
|
104
|
+
async def Mmpx12(
|
104
105
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
105
106
|
):
|
106
|
-
|
107
|
-
|
108
|
-
).text
|
109
|
-
proxies: list[Proxy] = plaintextToProxies(raw, protocol=protocol)
|
110
|
-
results = await validate_proxies(proxies)
|
107
|
+
url = f"https://github.com/mmpx12/proxy-list/raw/refs/heads/master/{protocol}.txt"
|
108
|
+
results = await GenericPlainTextProxyProvider(url, protocol)
|
111
109
|
return results
|
112
110
|
|
113
111
|
|
114
|
-
async def
|
112
|
+
async def Anonym0usWork1221(
|
115
113
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
116
114
|
):
|
117
|
-
|
118
|
-
|
119
|
-
).text
|
120
|
-
proxies: list[Proxy] = plaintextToProxies(raw, protocol=protocol)
|
121
|
-
results = await validate_proxies(proxies)
|
115
|
+
url = f"https://github.com/Anonym0usWork1221/Free-Proxies/raw/refs/heads/main/proxy_files/{protocol}_proxies.txt"
|
116
|
+
results = await GenericPlainTextProxyProvider(url, protocol)
|
122
117
|
return results
|
123
118
|
|
124
119
|
|
125
|
-
async def
|
120
|
+
async def ProxySpace(
|
126
121
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
127
122
|
):
|
128
|
-
|
129
|
-
|
130
|
-
)
|
131
|
-
proxies: list[Proxy] = plaintextToProxies(raw, protocol=protocol)
|
132
|
-
results = await validate_proxies(proxies)
|
123
|
+
results = await GenericPlainTextProxyProvider(
|
124
|
+
"https://proxyspace.pro/http.txt", "http"
|
125
|
+
)
|
133
126
|
return results
|
134
127
|
|
135
128
|
|
136
|
-
async def
|
129
|
+
async def ProxyDB(
|
137
130
|
countries: list[str] = [], protocol: Literal["http", "https"] = "http"
|
138
131
|
):
|
139
|
-
|
140
|
-
proxies: list[Proxy] =
|
141
|
-
|
142
|
-
|
132
|
+
base_url = f"https://www.proxydb.net/?protocol={protocol}&sort_column_id=uptime&sort_order_desc=true"
|
133
|
+
proxies: list[Proxy] = []
|
134
|
+
raw = get(base_url).text
|
135
|
+
total = int(
|
136
|
+
raw.split("Showing")[-1].split("total proxies")[0].split("of")[-1].strip()
|
137
|
+
)
|
138
|
+
|
139
|
+
async def parsePage(session: aiohttp.ClientSession, url: str):
|
140
|
+
proxies = []
|
141
|
+
async with session.get(url) as resp:
|
142
|
+
raw = await resp.text()
|
143
|
+
exml = etree.HTML(raw)
|
144
|
+
table = exml.find("body/div/div/table/tbody")
|
145
|
+
rows = iter(table)
|
146
|
+
for row in rows:
|
147
|
+
if len(proxies) > 500:
|
148
|
+
break
|
149
|
+
data = []
|
150
|
+
for td in row[:4]:
|
151
|
+
text = td.text.strip()
|
152
|
+
if text == "":
|
153
|
+
text = list(td)[-1].text
|
154
|
+
data.append(text)
|
155
|
+
if countries != [] and data[-1] not in countries:
|
156
|
+
continue
|
157
|
+
proxy = Proxy(data[0], protocol, data[1])
|
158
|
+
proxies.append(proxy)
|
159
|
+
return proxies
|
160
|
+
|
161
|
+
tasks = []
|
162
|
+
async with aiohttp.ClientSession() as session:
|
163
|
+
for offset in range(0, total, 30):
|
164
|
+
url = base_url + f"&offset={offset}"
|
165
|
+
task = create_task(coro=parsePage(session, url))
|
166
|
+
tasks.append(task)
|
167
|
+
results = await gather(*tasks, return_exceptions=True)
|
168
|
+
for result in results:
|
169
|
+
if isinstance(result, BaseException):
|
170
|
+
continue
|
171
|
+
proxies.extend(result)
|
172
|
+
return proxies
|
143
173
|
|
144
174
|
|
145
175
|
Providers: list[Provider] = [
|
@@ -154,9 +184,7 @@ Providers: list[Provider] = [
|
|
154
184
|
),
|
155
185
|
Provider(providerFunction=Mmpx12, countryFilter=False, protocols=["http", "https"]),
|
156
186
|
Provider(providerFunction=GoodProxy, countryFilter=False, protocols=["http"]),
|
157
|
-
Provider(
|
158
|
-
providerFunction=KangProxy, countryFilter=False, protocols=["http", "https"]
|
159
|
-
),
|
160
187
|
Provider(providerFunction=ProxySpace, countryFilter=False, protocols=["http"]),
|
161
188
|
Provider(providerFunction=OpenProxyList, countryFilter=False, protocols=["http"]),
|
189
|
+
Provider(providerFunction=ProxyDB, countryFilter=True, protocols=["http", "https"]),
|
162
190
|
]
|
swiftshadow/validator.py
CHANGED
@@ -42,21 +42,25 @@ async def get_host_ip(async_session: aiohttp.ClientSession) -> str | None:
|
|
42
42
|
return ip
|
43
43
|
|
44
44
|
|
45
|
-
async def check_proxy(
|
45
|
+
async def check_proxy(
|
46
|
+
async_session: aiohttp.ClientSession, proxy: Proxy, checker: str
|
47
|
+
) -> str:
|
46
48
|
"""
|
47
|
-
Check one proxy
|
49
|
+
Check one proxy object.
|
48
50
|
|
49
51
|
Args:
|
50
52
|
async_session: aiohttp client session object
|
51
53
|
proxy: Proxy Object
|
54
|
+
checker: Proxy checker API.
|
52
55
|
|
53
56
|
Returns:
|
54
57
|
text: API response text
|
55
58
|
"""
|
56
59
|
async with async_session.get(
|
57
|
-
url=f"{proxy.protocol}://
|
60
|
+
url=f"{proxy.protocol}://{checker}",
|
58
61
|
proxy=proxy.as_string(),
|
59
|
-
timeout=
|
62
|
+
timeout=5,
|
63
|
+
ssl=False,
|
60
64
|
) as response:
|
61
65
|
text = await response.text()
|
62
66
|
return text
|
@@ -73,16 +77,33 @@ async def validate_proxies(proxies: list[Proxy]) -> list[Proxy]:
|
|
73
77
|
working_proxies: List of working Proxies
|
74
78
|
"""
|
75
79
|
working_proxies: list[Proxy] = []
|
76
|
-
|
80
|
+
checkers = [
|
81
|
+
"checkip.amazonaws.com",
|
82
|
+
"ipinfo.io/ip",
|
83
|
+
"api.ipify.org/",
|
84
|
+
"whatsmyip.dev/api/ip",
|
85
|
+
"ip4.anysrc.net/banner",
|
86
|
+
"api4.my-ip.io/v2/ip.txt",
|
87
|
+
"api.myip.la",
|
88
|
+
"api.seeip.org",
|
89
|
+
"ips.im/api",
|
90
|
+
"ifconfig.me/ip",
|
91
|
+
"myip.expert/api/",
|
92
|
+
"checkip.info/ip",
|
93
|
+
"api.myip.com",
|
94
|
+
]
|
95
|
+
total_checkers = len(checkers)
|
96
|
+
tcp_connection = aiohttp.TCPConnector(limit=100)
|
97
|
+
async with aiohttp.ClientSession(connector=tcp_connection) as async_session:
|
77
98
|
tasks = []
|
78
99
|
|
79
100
|
host_task = asyncio.create_task(coro=get_host_ip(async_session))
|
80
101
|
tasks.append(host_task)
|
81
102
|
|
82
|
-
for proxy in proxies:
|
83
|
-
|
103
|
+
for idx, proxy in enumerate(proxies):
|
104
|
+
checker = checkers[idx % total_checkers]
|
105
|
+
task = asyncio.create_task(coro=check_proxy(async_session, proxy, checker))
|
84
106
|
tasks.append(task)
|
85
|
-
|
86
107
|
results = await asyncio.gather(*tasks, return_exceptions=True)
|
87
108
|
host_ip = results[0]
|
88
109
|
results = results[1:]
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: swiftshadow
|
3
|
-
Version: 2.
|
3
|
+
Version: 2.2.0
|
4
4
|
Summary: Free IP Proxy rotator for python
|
5
5
|
Author-email: sachin-sankar <mail.sachinsankar@gmail.com>
|
6
6
|
License-File: LICENSE
|
@@ -8,6 +8,7 @@ Requires-Python: >=3.12
|
|
8
8
|
Requires-Dist: aiofiles>=24.1.0
|
9
9
|
Requires-Dist: aiohttp>=3.11.11
|
10
10
|
Requires-Dist: appdirs>=1.4.4
|
11
|
+
Requires-Dist: lxml>=6.0.0
|
11
12
|
Requires-Dist: requests>=2.32.3
|
12
13
|
Description-Content-Type: text/markdown
|
13
14
|
|
@@ -0,0 +1,14 @@
|
|
1
|
+
swiftshadow/__init__.py,sha256=oxJDmG0YO0XyYGQpa9V_o38lZnoie7U_d4Ik_h46UXs,922
|
2
|
+
swiftshadow/cache.py,sha256=Mg8xsD6K3K012sILBwD2EZH6CE5kWCQNKCfZ5yadalI,800
|
3
|
+
swiftshadow/classes.py,sha256=hXAyvRuGXDvj_TsZxJ-bapK0pL82lhZUBGQZbQiGWN0,12280
|
4
|
+
swiftshadow/exceptions.py,sha256=qu4eXyrkWD9qd4HCIR-8vRfVcqLlTupo4sD72alCdug,129
|
5
|
+
swiftshadow/helpers.py,sha256=b8my66sBw_lTVLaKu4SaGjYQqEfIaStsuZlmK9Pmy5Y,1294
|
6
|
+
swiftshadow/models.py,sha256=rHOuOFc6UYCI8L2pwxAbvS3Fj0Ag89cHod0rt7kQ2Vc,1790
|
7
|
+
swiftshadow/providers.py,sha256=3IjITgKyywynssYKmIUP5jbcmbezIlaJVR7ODcBiDiE,6628
|
8
|
+
swiftshadow/types.py,sha256=Alyw3n54OESX1vSR-0kTvpYTlJ8LKfy5J9WZbtglHpE,894
|
9
|
+
swiftshadow/validator.py,sha256=3qE-99uljtbin1125uOgqAjnLRjuIgNYO5iiUAmaBC8,3102
|
10
|
+
swiftshadow-2.2.0.dist-info/METADATA,sha256=a5SOKU3R1eBHRA6SovdAYDD5m4qQ3VH3qxmtx8FVSi8,3320
|
11
|
+
swiftshadow-2.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
12
|
+
swiftshadow-2.2.0.dist-info/entry_points.txt,sha256=yMj0uEagcmXK2dmMmNXWebTpTT9j5K03oaRrd2wkyLA,49
|
13
|
+
swiftshadow-2.2.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
14
|
+
swiftshadow-2.2.0.dist-info/RECORD,,
|
@@ -1,14 +0,0 @@
|
|
1
|
-
swiftshadow/__init__.py,sha256=oxJDmG0YO0XyYGQpa9V_o38lZnoie7U_d4Ik_h46UXs,922
|
2
|
-
swiftshadow/cache.py,sha256=Mg8xsD6K3K012sILBwD2EZH6CE5kWCQNKCfZ5yadalI,800
|
3
|
-
swiftshadow/classes.py,sha256=nTR_zykns2oxJ4Bj2UXxoXmLqNOMtkLqy17p_LH8odc,11423
|
4
|
-
swiftshadow/exceptions.py,sha256=qu4eXyrkWD9qd4HCIR-8vRfVcqLlTupo4sD72alCdug,129
|
5
|
-
swiftshadow/helpers.py,sha256=kC5PvfvDCQwigAmfkxlhb4PhcmgdCNpJksMiqyx9lU4,960
|
6
|
-
swiftshadow/models.py,sha256=YyfZV98tPdLnF1O3WmTNUNoK4t0GuchfEftzjiM03ck,1678
|
7
|
-
swiftshadow/providers.py,sha256=Z_NEOlMx0rr1Z5rfz-rA1kQ4YHKGRuI65jBxMGOyJkE,5660
|
8
|
-
swiftshadow/types.py,sha256=Alyw3n54OESX1vSR-0kTvpYTlJ8LKfy5J9WZbtglHpE,894
|
9
|
-
swiftshadow/validator.py,sha256=z0dmRKxhvPETSXfC2hTImL0t8pw0zjSYIhUaDMJcJhU,2469
|
10
|
-
swiftshadow-2.1.0.dist-info/METADATA,sha256=NTPUMBc5xlCEkcryOEC81Au_MCD8JPO8QxrhkEDo7AM,3293
|
11
|
-
swiftshadow-2.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
12
|
-
swiftshadow-2.1.0.dist-info/entry_points.txt,sha256=yMj0uEagcmXK2dmMmNXWebTpTT9j5K03oaRrd2wkyLA,49
|
13
|
-
swiftshadow-2.1.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
14
|
-
swiftshadow-2.1.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|