cloudcheck 8.6.1__cp312-cp312-manylinux_2_24_i686.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cloudcheck might be problematic. Click here for more details.
- cloudcheck/__init__.py +3 -0
- cloudcheck/cloudcheck.cpython-312-i386-linux-gnu.so +0 -0
- cloudcheck/helpers.py +249 -0
- cloudcheck/providers/__init__.py +51 -0
- cloudcheck/providers/akamai.py +35 -0
- cloudcheck/providers/alibaba.py +13 -0
- cloudcheck/providers/amazon.py +39 -0
- cloudcheck/providers/arvancloud.py +22 -0
- cloudcheck/providers/backblaze.py +13 -0
- cloudcheck/providers/base.py +289 -0
- cloudcheck/providers/cachefly.py +26 -0
- cloudcheck/providers/cisco.py +41 -0
- cloudcheck/providers/cloudflare.py +42 -0
- cloudcheck/providers/cloudfront.py +20 -0
- cloudcheck/providers/ddosguard.py +14 -0
- cloudcheck/providers/dell.py +13 -0
- cloudcheck/providers/digitalocean.py +32 -0
- cloudcheck/providers/dod.py +31 -0
- cloudcheck/providers/fastly.py +24 -0
- cloudcheck/providers/fbi.py +18 -0
- cloudcheck/providers/github.py +30 -0
- cloudcheck/providers/google.py +63 -0
- cloudcheck/providers/heroku.py +9 -0
- cloudcheck/providers/hetzner.py +20 -0
- cloudcheck/providers/hpe.py +14 -0
- cloudcheck/providers/huawei.py +19 -0
- cloudcheck/providers/ibm.py +59 -0
- cloudcheck/providers/imperva.py +27 -0
- cloudcheck/providers/kamatera.py +19 -0
- cloudcheck/providers/leaseweb.py +32 -0
- cloudcheck/providers/microsoft.py +39 -0
- cloudcheck/providers/oracle.py +33 -0
- cloudcheck/providers/ovh.py +17 -0
- cloudcheck/providers/qrator.py +16 -0
- cloudcheck/providers/quiccloud.py +38 -0
- cloudcheck/providers/rackspace.py +23 -0
- cloudcheck/providers/ru_fso.py +13 -0
- cloudcheck/providers/salesforce.py +17 -0
- cloudcheck/providers/scaleway.py +17 -0
- cloudcheck/providers/stormwall.py +14 -0
- cloudcheck/providers/sucuri.py +14 -0
- cloudcheck/providers/tencent.py +17 -0
- cloudcheck/providers/uk_mod.py +16 -0
- cloudcheck/providers/wasabi.py +17 -0
- cloudcheck/providers/x4b.py +14 -0
- cloudcheck/providers/zoho.py +27 -0
- cloudcheck-8.6.1.dist-info/METADATA +175 -0
- cloudcheck-8.6.1.dist-info/RECORD +49 -0
- cloudcheck-8.6.1.dist-info/WHEEL +4 -0
cloudcheck/__init__.py
ADDED
|
Binary file
|
cloudcheck/helpers.py
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
import ipaddress
|
|
2
|
+
import os
|
|
3
|
+
import requests
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import List, Set, Union
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def defrag_cidrs(
|
|
9
|
+
cidrs: List[Union[ipaddress.IPv4Network, ipaddress.IPv6Network]],
|
|
10
|
+
) -> List[Union[ipaddress.IPv4Network, ipaddress.IPv6Network]]:
|
|
11
|
+
"""
|
|
12
|
+
Defragment a list of CIDR blocks by merging adjacent networks.
|
|
13
|
+
|
|
14
|
+
Algorithm:
|
|
15
|
+
1. Sort by network bits (prefix length)
|
|
16
|
+
2. Iterate through pairs of adjacent networks
|
|
17
|
+
3. If networks have equal network bits and can be merged into a larger network,
|
|
18
|
+
replace them with the merged network
|
|
19
|
+
4. Repeat until no more merges are possible
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
cidrs: List of IPv4 or IPv6 network objects
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
List of defragmented network objects
|
|
26
|
+
"""
|
|
27
|
+
if not cidrs:
|
|
28
|
+
return []
|
|
29
|
+
|
|
30
|
+
# Convert to list and remove duplicates
|
|
31
|
+
networks = list(set(cidrs))
|
|
32
|
+
|
|
33
|
+
# Keep iterating until no more merges happen
|
|
34
|
+
changed = True
|
|
35
|
+
while changed:
|
|
36
|
+
changed = False
|
|
37
|
+
|
|
38
|
+
# Sort by network address
|
|
39
|
+
networks.sort(key=lambda x: (x.prefixlen, x.network_address.packed))
|
|
40
|
+
|
|
41
|
+
i = 0
|
|
42
|
+
while i < len(networks) - 1:
|
|
43
|
+
current = networks[i]
|
|
44
|
+
next_net = networks[i + 1]
|
|
45
|
+
|
|
46
|
+
# Check if we can merge these two networks
|
|
47
|
+
if _can_merge_networks(current, next_net):
|
|
48
|
+
# Create the merged network
|
|
49
|
+
merged = _merge_networks(current, next_net)
|
|
50
|
+
|
|
51
|
+
# Replace the two networks with the merged one
|
|
52
|
+
networks[i] = merged
|
|
53
|
+
networks.pop(i + 1)
|
|
54
|
+
changed = True
|
|
55
|
+
else:
|
|
56
|
+
i += 1
|
|
57
|
+
|
|
58
|
+
return networks
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _can_merge_networks(
|
|
62
|
+
net1: Union[ipaddress.IPv4Network, ipaddress.IPv6Network],
|
|
63
|
+
net2: Union[ipaddress.IPv4Network, ipaddress.IPv6Network],
|
|
64
|
+
) -> bool:
|
|
65
|
+
"""
|
|
66
|
+
Check if two networks can be merged into a larger network.
|
|
67
|
+
|
|
68
|
+
Two networks can be merged if:
|
|
69
|
+
1. They have the same prefix length
|
|
70
|
+
2. They are adjacent (one starts where the other ends)
|
|
71
|
+
3. They can be combined into a network with prefix length - 1
|
|
72
|
+
"""
|
|
73
|
+
# Must be same type (IPv4 or IPv6)
|
|
74
|
+
if net1.version != net2.version:
|
|
75
|
+
return False
|
|
76
|
+
|
|
77
|
+
# Must not be the same network
|
|
78
|
+
if net1 == net2:
|
|
79
|
+
return False
|
|
80
|
+
|
|
81
|
+
# Must have same prefix length
|
|
82
|
+
if net1.prefixlen != net2.prefixlen:
|
|
83
|
+
return False
|
|
84
|
+
|
|
85
|
+
# Must be adjacent networks
|
|
86
|
+
if not _are_adjacent_networks(net1, net2):
|
|
87
|
+
return False
|
|
88
|
+
|
|
89
|
+
return True
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _are_adjacent_networks(
|
|
93
|
+
net1: Union[ipaddress.IPv4Network, ipaddress.IPv6Network],
|
|
94
|
+
net2: Union[ipaddress.IPv4Network, ipaddress.IPv6Network],
|
|
95
|
+
) -> bool:
|
|
96
|
+
"""
|
|
97
|
+
Check if two networks are adjacent by creating two networks with sub-1 CIDR
|
|
98
|
+
and checking if they are equal.
|
|
99
|
+
"""
|
|
100
|
+
# Must have same prefix length
|
|
101
|
+
if net1.prefixlen != net2.prefixlen:
|
|
102
|
+
return False
|
|
103
|
+
|
|
104
|
+
# Create two networks with sub-1 CIDR
|
|
105
|
+
new_prefixlen = net1.prefixlen - 1
|
|
106
|
+
if new_prefixlen < 0:
|
|
107
|
+
return False
|
|
108
|
+
|
|
109
|
+
# Create the two networks with the reduced prefix length using supernet
|
|
110
|
+
net1_parent = net1.supernet(prefixlen_diff=1)
|
|
111
|
+
net2_parent = net2.supernet(prefixlen_diff=1)
|
|
112
|
+
|
|
113
|
+
# If they are equal, the networks are adjacent
|
|
114
|
+
return net1_parent == net2_parent
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _merge_networks(
|
|
118
|
+
net1: Union[ipaddress.IPv4Network, ipaddress.IPv6Network],
|
|
119
|
+
net2: Union[ipaddress.IPv4Network, ipaddress.IPv6Network],
|
|
120
|
+
) -> Union[ipaddress.IPv4Network, ipaddress.IPv6Network]:
|
|
121
|
+
"""
|
|
122
|
+
Merge two adjacent networks into a larger network.
|
|
123
|
+
"""
|
|
124
|
+
if net1 == net2:
|
|
125
|
+
raise ValueError("Networks must be different")
|
|
126
|
+
|
|
127
|
+
if not net1.version == net2.version:
|
|
128
|
+
raise ValueError("Networks must be the same version")
|
|
129
|
+
|
|
130
|
+
snet1 = net1.supernet(prefixlen_diff=1)
|
|
131
|
+
snet2 = net2.supernet(prefixlen_diff=1)
|
|
132
|
+
if not snet1 == snet2:
|
|
133
|
+
raise ValueError("Networks must be adjacent")
|
|
134
|
+
|
|
135
|
+
# Find the smaller network address
|
|
136
|
+
min_addr = min(net1.network_address, net2.network_address)
|
|
137
|
+
|
|
138
|
+
# Create the merged network with prefix length - 1
|
|
139
|
+
new_prefixlen = net1.prefixlen - 1
|
|
140
|
+
try:
|
|
141
|
+
return ipaddress.ip_network(f"{min_addr}/{new_prefixlen}")
|
|
142
|
+
except ValueError:
|
|
143
|
+
raise ValueError(
|
|
144
|
+
f"Failed to merge networks: {net1} (type: {type(net1)}) and {net2} (type: {type(net2)})"
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def cidrs_to_strings(
|
|
149
|
+
cidrs: List[Union[ipaddress.IPv4Network, ipaddress.IPv6Network]],
|
|
150
|
+
) -> List[str]:
|
|
151
|
+
"""
|
|
152
|
+
Convert a list of network objects to string representations.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
cidrs: List of network objects
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
List of CIDR strings
|
|
159
|
+
"""
|
|
160
|
+
return [str(cidr) for cidr in cidrs]
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def strings_to_cidrs(
|
|
164
|
+
cidr_strings: List[str],
|
|
165
|
+
) -> List[Union[ipaddress.IPv4Network, ipaddress.IPv6Network]]:
|
|
166
|
+
"""
|
|
167
|
+
Convert a list of CIDR strings to network objects.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
cidr_strings: List of CIDR strings
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
List of network objects
|
|
174
|
+
"""
|
|
175
|
+
networks = []
|
|
176
|
+
for cidr_str in cidr_strings:
|
|
177
|
+
try:
|
|
178
|
+
networks.append(ipaddress.ip_network(cidr_str, strict=False))
|
|
179
|
+
except ValueError:
|
|
180
|
+
# Skip invalid CIDR strings
|
|
181
|
+
continue
|
|
182
|
+
return networks
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
browser_base_headers = {
|
|
186
|
+
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
|
187
|
+
"accept-language": "en-US,en;q=0.9",
|
|
188
|
+
"cache-control": "no-cache",
|
|
189
|
+
"pragma": "no-cache",
|
|
190
|
+
"priority": "u=0, i",
|
|
191
|
+
"referer": "https://www.google.com/",
|
|
192
|
+
"sec-ch-ua": '"Chromium";v="127", "Not)A;Brand";v="99"',
|
|
193
|
+
"sec-ch-ua-mobile": "?0",
|
|
194
|
+
"sec-ch-ua-platform": '"Linux"',
|
|
195
|
+
"sec-fetch-dest": "document",
|
|
196
|
+
"sec-fetch-mode": "navigate",
|
|
197
|
+
"sec-fetch-site": "cross-site",
|
|
198
|
+
"sec-fetch-user": "?1",
|
|
199
|
+
"upgrade-insecure-requests": "1",
|
|
200
|
+
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36",
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def request(url, include_api_key=False, browser_headers=False, **kwargs):
|
|
205
|
+
headers = kwargs.get("headers", {})
|
|
206
|
+
if browser_headers:
|
|
207
|
+
headers.update(browser_base_headers)
|
|
208
|
+
bbot_io_api_key = os.getenv("BBOT_IO_API_KEY")
|
|
209
|
+
if include_api_key and bbot_io_api_key:
|
|
210
|
+
headers["Authorization"] = f"Bearer {bbot_io_api_key}"
|
|
211
|
+
kwargs["headers"] = headers
|
|
212
|
+
return requests.get(url, **kwargs)
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def parse_v2fly_domain_file(file_path: Path) -> Set[str]:
|
|
216
|
+
"""Parse a domain list file and extract domains."""
|
|
217
|
+
print(f"Parsing {file_path}")
|
|
218
|
+
domains = set()
|
|
219
|
+
if not file_path.exists():
|
|
220
|
+
print(f"File {file_path} does not exist")
|
|
221
|
+
return domains
|
|
222
|
+
|
|
223
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
224
|
+
for line in f:
|
|
225
|
+
line = line.strip()
|
|
226
|
+
# Handle inline comments by splitting on # and taking the first part
|
|
227
|
+
line = line.split("#")[0].strip()
|
|
228
|
+
if not line:
|
|
229
|
+
continue
|
|
230
|
+
|
|
231
|
+
if line.startswith("include:"):
|
|
232
|
+
include_file = line[8:]
|
|
233
|
+
include_path = file_path.parent / include_file
|
|
234
|
+
domains.update(parse_v2fly_domain_file(include_path))
|
|
235
|
+
continue
|
|
236
|
+
|
|
237
|
+
if line.startswith("domain:"):
|
|
238
|
+
domain = line[7:]
|
|
239
|
+
elif line.startswith("full:"):
|
|
240
|
+
domain = line[5:]
|
|
241
|
+
elif line.startswith("keyword:") or line.startswith("regexp:"):
|
|
242
|
+
continue
|
|
243
|
+
else:
|
|
244
|
+
domain = line
|
|
245
|
+
|
|
246
|
+
domain = domain.split("@")[0].strip()
|
|
247
|
+
if domain:
|
|
248
|
+
domains.add(domain.lower())
|
|
249
|
+
return domains
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
from sys import stderr
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Dict, Type
|
|
5
|
+
|
|
6
|
+
from cloudcheck.providers.base import BaseProvider
|
|
7
|
+
|
|
8
|
+
# Dictionary to store loaded provider classes
|
|
9
|
+
_provider_classes: Dict[str, Type[BaseProvider]] = {}
|
|
10
|
+
_provider_instances: Dict[str, BaseProvider] = {}
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def load_provider_classes() -> Dict[str, Type[BaseProvider]]:
|
|
14
|
+
"""Dynamically load all cloud provider classes from the providers directory."""
|
|
15
|
+
global _provider_classes
|
|
16
|
+
|
|
17
|
+
if _provider_classes:
|
|
18
|
+
return _provider_classes
|
|
19
|
+
|
|
20
|
+
providers_path = Path(__file__).parent
|
|
21
|
+
|
|
22
|
+
for file in providers_path.glob("*.py"):
|
|
23
|
+
if file.stem in ("base", "__init__"):
|
|
24
|
+
continue
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
import_path = f"cloudcheck.providers.{file.stem}"
|
|
28
|
+
module = importlib.import_module(import_path)
|
|
29
|
+
|
|
30
|
+
# Look for classes that inherit from BaseProvider
|
|
31
|
+
for attr_name in dir(module):
|
|
32
|
+
attr = getattr(module, attr_name)
|
|
33
|
+
if (
|
|
34
|
+
isinstance(attr, type)
|
|
35
|
+
and issubclass(attr, BaseProvider)
|
|
36
|
+
and attr != BaseProvider
|
|
37
|
+
):
|
|
38
|
+
provider_name = attr.__name__
|
|
39
|
+
_provider_classes[provider_name] = attr
|
|
40
|
+
|
|
41
|
+
except Exception as e:
|
|
42
|
+
print(f"Failed to load provider from {file}: {e}", file=stderr)
|
|
43
|
+
raise
|
|
44
|
+
|
|
45
|
+
return _provider_classes
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
for provider_name, provider_class in load_provider_classes().items():
|
|
49
|
+
provider_instance = provider_class()
|
|
50
|
+
globals()[provider_name] = provider_instance
|
|
51
|
+
_provider_instances[provider_name] = provider_instance
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import io
|
|
2
|
+
import zipfile
|
|
3
|
+
from cloudcheck.providers.base import BaseProvider
|
|
4
|
+
from typing import List
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Akamai(BaseProvider):
|
|
8
|
+
v2fly_company: str = "akamai"
|
|
9
|
+
tags: List[str] = ["cloud"]
|
|
10
|
+
short_description: str = "Akamai"
|
|
11
|
+
long_description: str = "A content delivery network and cloud services provider that delivers web and internet security services."
|
|
12
|
+
# {"org_id": "AKAMAI-ARIN", "org_name": "Akamai Technologies, Inc.", "country": "US", "asns": [12222,16625,16702,17204,17334,18680,18717,20189,22207,22452,23454,23455,26008,30675,31984,32787,33047,35993,35994,36029,36183,393234,393560]}
|
|
13
|
+
# {"org_id": "ORG-AT1-RIPE", "org_name": "Akamai International B.V.", "country": "NL", "asns": [20940,21342,21357,21399,31107,31108,31109,31110,31377,33905,34164,34850,35204,39836,43639,48163,49249,49846,200005,213120]}
|
|
14
|
+
# {"org_id": "ORG-ATI1-AP-APNIC", "org_name": "Akamai Technologies, Inc.", "country": "US", "asns": [23903,24319,45757,55409,55770,63949,133103]}
|
|
15
|
+
org_ids: List[str] = [
|
|
16
|
+
"AKAMAI-ARIN",
|
|
17
|
+
"ORG-AT1-RIPE",
|
|
18
|
+
"ORG-ATI1-AP-APNIC",
|
|
19
|
+
]
|
|
20
|
+
|
|
21
|
+
_ips_url = "https://techdocs.akamai.com/property-manager/pdfs/akamai_ipv4_ipv6_CIDRs-txt.zip"
|
|
22
|
+
|
|
23
|
+
def fetch_cidrs(self):
|
|
24
|
+
response = self.request(self._ips_url)
|
|
25
|
+
ranges = set()
|
|
26
|
+
content = getattr(response, "content", b"")
|
|
27
|
+
# Extract the contents of the zip file to memory
|
|
28
|
+
with zipfile.ZipFile(io.BytesIO(content)) as zip_file:
|
|
29
|
+
for filename in ("akamai_ipv4_CIDRs.txt", "akamai_ipv6_CIDRs.txt"):
|
|
30
|
+
with zip_file.open(filename) as f:
|
|
31
|
+
for line in f.read().splitlines():
|
|
32
|
+
line = line.decode(errors="ignore").strip()
|
|
33
|
+
if line:
|
|
34
|
+
ranges.add(line)
|
|
35
|
+
return list(ranges)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from cloudcheck.providers.base import BaseProvider
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Alibaba(BaseProvider):
|
|
6
|
+
v2fly_company: str = "alibaba"
|
|
7
|
+
tags: List[str] = ["cloud"]
|
|
8
|
+
short_description: str = "Alibaba Cloud"
|
|
9
|
+
long_description: str = "A Chinese cloud computing company and subsidiary of Alibaba Group, providing cloud services and infrastructure."
|
|
10
|
+
# {"org_id": "ORG-ASEP1-AP-APNIC", "org_name": "Alibaba Cloud (Singapore) Private Limited", "country": "SG", "asns": [134963]}
|
|
11
|
+
org_ids: List[str] = [
|
|
12
|
+
"ORG-ASEP1-AP-APNIC",
|
|
13
|
+
]
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from cloudcheck.providers.base import BaseProvider
|
|
2
|
+
from typing import List, Dict
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Amazon(BaseProvider):
|
|
6
|
+
v2fly_company: str = "amazon"
|
|
7
|
+
short_description: str = "Amazon Web Services"
|
|
8
|
+
long_description: str = "A comprehensive cloud computing platform provided by Amazon, offering infrastructure services, storage, and computing power."
|
|
9
|
+
org_ids: List[str] = [
|
|
10
|
+
"AMAZO-139-ARIN", # Amazon.com, Inc., US
|
|
11
|
+
"AMAZO-141-ARIN", # Amazon Technologies, Inc., US
|
|
12
|
+
"AMAZO-22-ARIN", # Amazon Web Services, Inc., US
|
|
13
|
+
"AMAZO-4-ARIN", # Amazon.com, Inc., US
|
|
14
|
+
"AMAZON-4-ARIN", # Amazon.com, Inc., US
|
|
15
|
+
"ARL-76-ARIN", # Amazon Robotics LLC, US
|
|
16
|
+
"ASL-830-ARIN", # Amazon.com Services, LLC, US
|
|
17
|
+
"AT-9049-ARIN", # Amazon Technologies Inc., US
|
|
18
|
+
"AT-9066-ARIN", # Amazon Technologies Inc., US
|
|
19
|
+
"ORG-AARP1-AP-APNIC", # Amazon Asia-Pacific Resources Private Limited, SG
|
|
20
|
+
"ORG-ACSP2-AP-APNIC", # Amazon Corporate Services Pty Ltd, AU
|
|
21
|
+
"ORG-ACTS1-AP-APNIC", # Amazon Connection Technology Services (Beijing) Co., LTD, CN
|
|
22
|
+
"ORG-ADSI1-RIPE", # Amazon Data Services Ireland Ltd, IE
|
|
23
|
+
"ORG-ADSJ1-AP-APNIC", # Amazon Data Services Japan KK, JP
|
|
24
|
+
"ORG-AI2-AP-APNIC", # Amazon.com, Inc., US
|
|
25
|
+
]
|
|
26
|
+
tags: List[str] = ["cloud"]
|
|
27
|
+
_bucket_name_regex = r"[a-z0-9_][a-z0-9-\.]{1,61}[a-z0-9]"
|
|
28
|
+
regexes: Dict[str, List[str]] = {
|
|
29
|
+
"STORAGE_BUCKET_NAME": [_bucket_name_regex],
|
|
30
|
+
"STORAGE_BUCKET_HOSTNAME": [
|
|
31
|
+
r"(" + _bucket_name_regex + r")\.(s3-?(?:[a-z0-9-]*\.){1,2}amazonaws\.com)"
|
|
32
|
+
],
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
_ips_url = "https://ip-ranges.amazonaws.com/ip-ranges.json"
|
|
36
|
+
|
|
37
|
+
def fetch_cidrs(self):
|
|
38
|
+
response = self.request(self._ips_url)
|
|
39
|
+
return list(set(p["ip_prefix"] for p in response.json()["prefixes"]))
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from cloudcheck.providers.base import BaseProvider
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Arvancloud(BaseProvider):
|
|
6
|
+
domains: List[str] = ["arvancloud.ir"]
|
|
7
|
+
tags: List[str] = ["cdn"]
|
|
8
|
+
short_description: str = "Arvancloud"
|
|
9
|
+
long_description: str = "An Iranian cloud computing and content delivery network provider offering cloud infrastructure and CDN services."
|
|
10
|
+
# {"org_id": "ORG-AGTL2-RIPE", "org_name": "ARVANCLOUD GLOBAL TECHNOLOGIES L.L.C", "country": "AE", "asns": [57568,208006,210296]}
|
|
11
|
+
org_ids: List[str] = [
|
|
12
|
+
"ORG-AGTL2-RIPE",
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
_ips_url = "https://www.arvancloud.ir/en/ips.txt"
|
|
16
|
+
|
|
17
|
+
def fetch_cidrs(self):
|
|
18
|
+
response = self.request(self._ips_url)
|
|
19
|
+
ranges = set()
|
|
20
|
+
if getattr(response, "status_code", 0) == 200:
|
|
21
|
+
ranges.update(response.text.splitlines())
|
|
22
|
+
return list(ranges)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from cloudcheck.providers.base import BaseProvider
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Backblaze(BaseProvider):
|
|
6
|
+
tags: List[str] = ["cloud"]
|
|
7
|
+
short_description: str = "Backblaze"
|
|
8
|
+
long_description: str = "A cloud storage and backup service provider offering data backup and cloud storage solutions."
|
|
9
|
+
# {"org_id": "BACKB-7-ARIN", "org_name": "Backblaze Inc", "country": "US", "asns": [40401,396865]}
|
|
10
|
+
org_ids: List[str] = [
|
|
11
|
+
"BACKB-7-ARIN",
|
|
12
|
+
]
|
|
13
|
+
domains: List[str] = ["backblaze.com", "backblazeb2.com"]
|