autowebx 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,12 @@
1
+ Metadata-Version: 2.4
2
+ Name: autowebx
3
+ Version: 1.0.0
4
+ Requires-Dist: requests>=2.32.3
5
+ Requires-Dist: beautifulsoup4>=4.13.4
6
+ Requires-Dist: names>=0.3.0
7
+ Requires-Dist: phonenumbers>=9.0.6
8
+ Requires-Dist: colorama>=0.4.6
9
+ Requires-Dist: art>=6.5
10
+ Requires-Dist: multipledispatch>=1.0.0
11
+ Requires-Dist: ntplib>=0.4.0
12
+ Dynamic: requires-dist
@@ -0,0 +1,217 @@
1
+ import inspect
2
+ import json
3
+ import threading
4
+ from calendar import monthrange
5
+ from datetime import datetime
6
+ from sys import exc_info, argv
7
+ from time import sleep, time
8
+ from urllib.parse import unquote, parse_qs, urlparse
9
+ from uuid import getnode
10
+
11
+ from art import text2art
12
+ from colorama import Fore
13
+ from ntplib import NTPClient
14
+ from phonenumbers import region_code_for_number, parse
15
+
16
+ from autoweb.files import add
17
+
18
+ __lock = threading.Lock()
19
+
20
+
21
+ class AccountError(Exception):
22
+ """Raised when an account fails to create"""
23
+
24
+
25
+ def int_input(prompt: str, default: int = 1) -> int:
26
+ try:
27
+ return int(input(prompt))
28
+ except ValueError:
29
+ return default
30
+
31
+
32
+ def intro(name: str):
33
+ user_id = getnode()
34
+ sync_print('=' * 85)
35
+ sync_print(f'{Fore.LIGHTYELLOW_EX}{text2art("Hazem3010", "banner3")}')
36
+ indent = " " * int((85 - len(f'{name} :: Your ID: {user_id}')) / 2)
37
+ sync_print(f'{indent}{Fore.GREEN}{name}{Fore.LIGHTMAGENTA_EX} :: {Fore.CYAN}Your ID: {user_id}{Fore.RESET}')
38
+ sync_print('=' * 85)
39
+ return str(user_id)
40
+
41
+
42
+ class PhoneNumber:
43
+ def __init__(self, phone_number):
44
+ self.number = phone_number
45
+ parsed_number = parse(f'+{phone_number}')
46
+ self.prefix = parsed_number.country_code
47
+ self.country = region_code_for_number(parsed_number)
48
+
49
+
50
+ def sync_print(*content, end: str = '\n'):
51
+ __lock.acquire()
52
+ print(*content, end=end, flush=True)
53
+ add(f"{' '.join(map(str, content))}{end}", 'output.txt')
54
+ __lock.release()
55
+
56
+
57
+ class URL:
58
+ def __init__(self, url: str):
59
+ self.__parameters__ = parse_qs(urlparse(url).query)
60
+
61
+ def get(self, parameter: str) -> str:
62
+ return self.__parameters__.get(parameter)[0]
63
+
64
+
65
+ def __get_function__() -> None:
66
+ http = argv[1]
67
+ http_content = open(http, 'r').read()
68
+ lines = http_content.split('\n')
69
+ method, endpoint, _ = lines[0].split(' ')
70
+ endpoint = unquote(endpoint).replace("\"", "\\\"")
71
+
72
+ # Extract headers properly
73
+ headers = {}
74
+ host = ""
75
+ for line in lines[1:]:
76
+ if line == '':
77
+ break
78
+ line_data = line.split(':', 1)
79
+ if len(line_data) == 2:
80
+ key, value = line_data[0].strip(), unquote(line_data[1].strip())
81
+ headers[key] = value
82
+ if key.lower() == "host":
83
+ host = value
84
+
85
+ if not host:
86
+ raise ValueError("Host header is missing in the request")
87
+
88
+ result = f'def function(self): # {http}\n url = "https://{host}{endpoint}"\n\n'
89
+
90
+ result += " headers = {\n"
91
+ for key, value in headers.items():
92
+ result += f" '{key}': '{value}',\n"
93
+ result = result[:-2] + "\n }\n\n"
94
+
95
+ content = http_content.split('\n\n')
96
+ with_payload = False
97
+ json_payload = False
98
+ if len(content) > 1 and content[1] != '':
99
+ with_payload = True
100
+ result += ' payload = '
101
+ variables_content = content[1].strip()
102
+ if variables_content.startswith('{'):
103
+ payload = json.loads(variables_content)
104
+ json_payload = True
105
+ else:
106
+ variables = content[1].strip().split('&')
107
+ payload = {}
108
+ for item in variables:
109
+ try:
110
+ pair = item.split('=')
111
+ payload[pair[0]] = unquote(pair[1])
112
+ except IndexError:
113
+ pass
114
+
115
+ payload = json.dumps(payload, indent=4).replace('\n', '\n ').replace(': true', ': True')
116
+ payload = payload.replace(': false', ': False')
117
+ result += unquote(payload)
118
+ result = result + '\n\n'
119
+
120
+ args = f'(url, headers=headers{", json=payload)" if json_payload else ", data=payload)" if with_payload else ")"}'
121
+ result += f' response = self.session.{method.lower()}{args}\n'
122
+ result = result.replace('\t', ' ')
123
+
124
+ open('function.py', 'w', encoding='UTF-8').write(result)
125
+
126
+
127
+ def ranges(numbers: list[str]) -> list[str]:
128
+ return list({number[:-3] for number in numbers})
129
+
130
+
131
+ def days_in_month(year: int, month: int) -> int:
132
+ return monthrange(year, month)[1]
133
+
134
+
135
+ def handle_threads(threads: int, total: int, target) -> None:
136
+ for i in range(1, total + 1):
137
+ target(count=i).start()
138
+ while True:
139
+ active = 0
140
+ for thread in threading.enumerate():
141
+ if thread.name.startswith('Task_'):
142
+ active += 1
143
+ if active < threads:
144
+ break
145
+ sleep(1)
146
+
147
+ done = False
148
+ while not done:
149
+ done = True
150
+ for thread in threading.enumerate():
151
+ if thread.name.startswith('Task_'):
152
+ done = False
153
+ break
154
+
155
+
156
+
157
+ def var_name(var):
158
+ for name, value in inspect.stack()[1].frame.f_locals.items():
159
+ if value is var:
160
+ return name
161
+ return None
162
+
163
+
164
+ useragents = [
165
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
166
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0",
167
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 "
168
+ "Safari/537.36 Edg/123.0.2420.81",
169
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 "
170
+ "Safari/537.36 OPR/109.0.0.0",
171
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 "
172
+ "Safari/537.36",
173
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 14.4; rv:124.0) Gecko/20100101 Firefox/124.0",
174
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 14_4_1) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.4.1 "
175
+ "Safari/605.1.15",
176
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 14_4_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 "
177
+ "Safari/537.36 OPR/109.0.0.0",
178
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
179
+ "Mozilla/5.0 (X11; Linux i686; rv:124.0) Gecko/20100101 Firefox/124.0"
180
+ ]
181
+
182
+ __locales_str = (
183
+ "af_ZAam_ETar_AEar_BHar_DZar_EGar_IQar_JOar_KWar_LBar_LYar_MAar_OMar_QAar_SAar_SDar_SYar_TNar_YEaz_AZbe_BYbg_BGbn_B"
184
+ "Dbn_INbs_BAca_EScs_CZcy_GBda_DKde_ATde_BEde_CHde_DEde_LIde_LUdv_MVel_CYel_GRen_AUen_BWen_CAen_GBen_HKen_IEen_INen_"
185
+ "JMen_MHen_MTen_NAen_NZen_PHen_PKen_SGen_TTen_USen_ZAen_ZWes_ARes_BOes_CLes_COes_CRes_DOes_ECes_ESes_GTes_HNes_MXes"
186
+ "_NIes_PAes_PEes_PRes_PYes_SVes_USes_UYes_VEet_EEeu_ESfa_IRfi_FIfo_FOfr_BEfr_BFfr_BIfr_BJfr_BLfr_CAfr_CDfr_CFfr_CGf"
187
+ "r_CHfr_CIfr_CMfr_DJfr_FRfr_GAfr_GFfr_GNfr_GPfr_GQfr_HTfr_KMfr_LUfr_MAfr_MCfr_MFfr_MGfr_MLfr_MQfr_NEfr_PFfr_PMfr_RE"
188
+ "fr_RWfr_SCfr_SNfr_SYfr_TDfr_TGfr_TNfr_VUfr_WFfr_YTga_IEgl_ESgu_INha_NGhe_ILhi_INhr_BAhr_HRhu_HUhy_AMid_IDig_NGis_I"
189
+ "Sit_CHit_ITja_JPka_GEkk_KZkm_KHkn_INko_KRky_KGlo_LAlt_LTlv_LVmg_MGmk_MKml_INmn_MNmr_INms_BNms_MYmt_MTnb_NOne_NPnl_"
190
+ "AWnl_BEnl_NLnn_NOom_ETor_INpa_INpl_PLps_AFpt_AOpt_BRpt_CHpt_CVpt_FRpt_GQpt_GWpt_LUpt_MOpt_MZpt_PTpt_STpt_TLro_MDro"
191
+ "_ROru_BYru_KGru_KZru_MDru_RUru_UArw_RWsd_INsi_LKsk_SKsl_SIso_DJso_ETso_KEso_SOsq_ALsr_BAsr_MEsr_RSsv_AXsv_FIsv_SEs"
192
+ "w_CDsw_KEsw_TZsw_UGta_INta_LKte_INth_THti_ERti_ETtl_PHtn_BWtn_ZAtr_CYtr_TRug_CNuk_UAur_INur_PKuz_AFuz_UZvi_VNyo_NG"
193
+ "zh_CNzh_HKzh_MOzh_SGzh_TWzu_ZA"
194
+ )
195
+
196
+
197
+ def locales():
198
+ return [__locales_str[i:i + 5] for i in range(0, len(__locales_str), 5)]
199
+
200
+
201
+ def exception_line():
202
+ return exc_info()[-1].tb_lineno
203
+
204
+
205
+ def internet_time():
206
+ return datetime.fromtimestamp(NTPClient().request('pool.ntp.org').tx_time)
207
+
208
+
209
+ class Timer:
210
+ def __init__(self, timeout, message):
211
+ self.timeout = timeout
212
+ self.start = time()
213
+ self.message = message
214
+
215
+ def __call__(self):
216
+ if time() - self.start > self.timeout:
217
+ raise TimeoutError(self.message)
@@ -0,0 +1,82 @@
1
+ import random
2
+ from string import digits, ascii_lowercase, ascii_uppercase
3
+
4
+ from names import get_first_name
5
+
6
+
7
+ class Account:
8
+ def __init__(self, **kwargs):
9
+ self.first_name = kwargs.get("first_name", get_first_name())
10
+ self.last_name = kwargs.get("last_name", get_first_name())
11
+ self.full_name = self.first_name + " " + self.last_name
12
+ domain = kwargs.get("domain", 'gmail.com')
13
+ if 'username' in kwargs.keys():
14
+ self.username = kwargs["username"]
15
+ elif 'email_length' in kwargs.keys():
16
+ self.username = generate_username(self.first_name, kwargs["email_length"] - len(domain))
17
+ elif 'username_length' in kwargs.keys():
18
+ self.username = generate_username(self.first_name, kwargs["username_length"])
19
+ else:
20
+ self.username = generate_username(self.first_name)
21
+ self.email = kwargs.get("email", f'{self.username}@{domain}')
22
+ password_length = kwargs.get("password_length", None)
23
+ additional_characters = kwargs.get('additional_characters', '')
24
+ self.password = kwargs.get("password", generate_password(password_length, additional_characters))
25
+ self.phone_number = generate_us_number()
26
+ self.address_line1 = generate_address_line_1()
27
+ self.city = get_random_city()
28
+
29
+
30
+ def generate_username(name: str = get_first_name(), length: int = random.randint(10, 15)):
31
+ return f'{name}{"".join(random.choice(digits) for _ in range(length - len(name)))}'.lower()
32
+
33
+
34
+ def generate_password(length: int | None = None, additional_characters: str = ''):
35
+ if length is None:
36
+ length = random.randint(10, 20)
37
+ while True:
38
+ try:
39
+ character_set = ascii_lowercase + ascii_uppercase + digits + additional_characters
40
+ password = ''.join(random.choice(character_set) for _ in range(length))
41
+ contains_lower = any(character in ascii_lowercase for character in password)
42
+ contains_upper = any(character in ascii_uppercase for character in password)
43
+ contains_digit = any(character.isdigit() for character in password)
44
+ contains_others = any(character in additional_characters for character in password)
45
+ contains_both_cases = contains_lower and contains_upper
46
+ if contains_digit and (contains_others if additional_characters else True) and contains_both_cases:
47
+ return password
48
+ else:
49
+ return generate_password(length, additional_characters)
50
+ except RecursionError:
51
+ pass
52
+
53
+
54
+ def generate_us_number():
55
+ area_code = str(random.randint(200, 999)) # Avoids starting with 0 or 1
56
+ exchange_code = str(random.randint(200, 999)) # Same here
57
+ subscriber_number = str(random.randint(0, 9999)).zfill(4)
58
+
59
+ return f"{area_code}{exchange_code}{subscriber_number}"
60
+
61
+
62
+ def generate_address_line_1():
63
+ street_numbers = range(100, 9999)
64
+ street_names = [
65
+ "Main", "Oak", "Pine", "Maple", "Cedar", "Elm", "Washington", "Lake", "Hill", "Sunset"
66
+ ]
67
+ street_types = ["St", "Ave", "Blvd", "Rd", "Ln", "Dr", "Ct", "Pl", "Way", "Terrace"]
68
+
69
+ number = str(random.choice(street_numbers))
70
+ name = random.choice(street_names)
71
+ st_type = random.choice(street_types)
72
+
73
+ return f"{number} {name} {st_type}"
74
+
75
+
76
+ def get_random_city():
77
+ cities = [
78
+ "New York", "Tokyo", "London", "Paris", "Berlin",
79
+ "Cairo", "Dubai", "Istanbul", "Sydney", "Toronto",
80
+ "Rio de Janeiro", "Moscow", "Seoul", "Bangkok", "Mumbai"
81
+ ]
82
+ return random.choice(cities)
@@ -0,0 +1,68 @@
1
+ import json
2
+ from json import JSONDecodeError
3
+ from typing import Union, Mapping, Any, Iterable, Tuple
4
+
5
+ from autoweb import var_name
6
+
7
+
8
+ class AutoSaveDict(dict):
9
+ def __init__(self, file_path: str = 'dict.json'):
10
+ self.__file_path = file_path
11
+ try:
12
+ super().__init__(json.load(open(self.__file_path, 'r', encoding='utf-8')))
13
+ except (FileNotFoundError, JSONDecodeError):
14
+ super().__init__()
15
+
16
+ self.__save()
17
+
18
+ def __save(self, ):
19
+ json.dump(self, open(self.__file_path, 'w', encoding='utf-8', errors='ignore'), indent=2, ensure_ascii=False)
20
+
21
+ def __setitem__(self, key, value):
22
+ super().__setitem__(key, value)
23
+ self.__save()
24
+
25
+ def __delitem__(self, key):
26
+ super().__delitem__(key)
27
+ self.__save()
28
+
29
+ def update(
30
+ self,
31
+ __m: Union[Mapping[Any, Any], Iterable[Tuple[Any, Any]]] = (),
32
+ **kwargs: Any
33
+ ) -> None:
34
+ """
35
+ يغطّي حالتين أساسيتين:
36
+ 1. update(dict_or_mapping)
37
+ 2. update([(k1, v1), (k2, v2), ...], key=value, ...)
38
+ """
39
+ # أولًا: ننفّذ وظيفة الـ dict الأصلية (ترحيل القيم)
40
+ super().update(__m, **kwargs)
41
+ # بعد ذلك: نحفظ القاموس في المِلَفّ
42
+ self.__save()
43
+
44
+ def clear(self):
45
+ super().clear()
46
+ self.__save()
47
+
48
+ def __call__(self, key, from_list: list = None, loop: bool = False):
49
+ answer = self.setdefault(key, 0)
50
+ if isinstance(answer, int):
51
+ if from_list is not None:
52
+ if answer < len(from_list):
53
+ self[key] = answer + 1
54
+ return from_list[answer]
55
+ else:
56
+ if loop:
57
+ self[key] = 0
58
+ if len(from_list) > 0:
59
+ return self(key, from_list, loop)
60
+ else:
61
+ raise IndexError(f"The list {var_name(from_list)} is empty")
62
+ else:
63
+ raise IndexError('IndexError: list index out of range')
64
+ else:
65
+ self[key] = answer + 1
66
+ return answer
67
+ else:
68
+ return answer
@@ -0,0 +1,65 @@
1
+ import queue
2
+ import threading
3
+ import time
4
+ import json
5
+ from abc import ABC
6
+ from collections import deque
7
+ from collections.abc import Iterable
8
+ from threading import Event
9
+
10
+
11
+ class AutoSaveQueue(queue.Queue, Iterable, ABC):
12
+ def __init__(self, file_path, auto_save_interval=5, *args, **kwargs):
13
+ super().__init__(*args, **kwargs)
14
+ self.file_path = file_path
15
+ self.auto_save_interval = auto_save_interval
16
+ self._stop_event = Event()
17
+ self._start_auto_save()
18
+
19
+ def _start_auto_save(self):
20
+ """Start the auto-save thread that saves the queue to the file at regular intervals."""
21
+ self.auto_save_thread = threading.Thread(target=self._auto_save)
22
+ self.auto_save_thread.daemon = True
23
+ self.auto_save_thread.start()
24
+
25
+ def _auto_save(self):
26
+ """Automatically save the queue to a file at regular intervals."""
27
+ while not self._stop_event.is_set():
28
+ self.save()
29
+ time.sleep(self.auto_save_interval)
30
+
31
+ def save(self):
32
+ """Save the current state of the queue to a file."""
33
+ with self.mutex:
34
+ open(self.file_path, 'w').write('\n'.join(str(item) for item in self))
35
+
36
+ def stop(self):
37
+ """Stop the auto-save thread."""
38
+ self._stop_event.set()
39
+ self.auto_save_thread.join()
40
+
41
+ def put(self, item, *args, **kwargs):
42
+ """Override the put method to save after adding an item."""
43
+ super().put(item, *args, **kwargs)
44
+ self.save()
45
+
46
+ def get(self, *args, **kwargs):
47
+ """Override the get method to save after removing an item."""
48
+ item = super().get(*args, **kwargs)
49
+ self.save()
50
+ return item
51
+
52
+ def load(self):
53
+ """Load the queue from a file."""
54
+ try:
55
+ with open(self.file_path, 'r') as f:
56
+ data = json.load(f)
57
+ with self.mutex:
58
+ self.queue = deque(data)
59
+ except (FileNotFoundError, json.JSONDecodeError):
60
+ pass # Ignore errors when the file doesn't exist or is empty
61
+
62
+ # Usage
63
+ auto_queue = AutoSaveQueue("queue_data.json", auto_save_interval=10)
64
+ auto_queue.put("task 1")
65
+ auto_queue.put("task 2")
@@ -0,0 +1,53 @@
1
+ from autoweb.files import load
2
+
3
+
4
+ class AutoSaveSet(set):
5
+ def __init__(self, filename):
6
+ super().__init__(load(filename))
7
+ self.filename = filename
8
+ self._save_to_file()
9
+
10
+ def _save_to_file(self):
11
+ open(self.filename, 'w').write('\n'.join(self))
12
+
13
+ def add(self, element):
14
+ super().add(element)
15
+ self._save_to_file()
16
+
17
+ def remove(self, element):
18
+ super().remove(element)
19
+ self._save_to_file()
20
+
21
+ def update(self, *args):
22
+ super().update(*args)
23
+ self._save_to_file()
24
+
25
+ def discard(self, element):
26
+ super().discard(element)
27
+ self._save_to_file()
28
+
29
+ def clear(self):
30
+ super().clear()
31
+ self._save_to_file()
32
+
33
+ def pop(self):
34
+ element = super().pop()
35
+ self._save_to_file()
36
+ return element
37
+
38
+ def difference_update(self, *args):
39
+ super().difference_update(*args)
40
+ self._save_to_file()
41
+
42
+ def intersection_update(self, *args):
43
+ super().intersection_update(*args)
44
+ self._save_to_file()
45
+
46
+ def symmetric_difference_update(self, *args):
47
+ super().symmetric_difference_update(*args)
48
+ self._save_to_file()
49
+
50
+ def __iadd__(self, elements):
51
+ super().update(elements)
52
+ self._save_to_file()
53
+ return self
@@ -0,0 +1,41 @@
1
+ from random import randint
2
+ from time import time, sleep
3
+
4
+ from requests import post
5
+
6
+
7
+ class RecaptchaV2:
8
+ def __init__(self, apikey: str, website_url: str, website_key: str, is_invisible: bool = False):
9
+ self.client_key = apikey
10
+
11
+ data = {
12
+ "clientKey": apikey,
13
+ "task": {
14
+ "type": "RecaptchaV2TaskProxyless",
15
+ "websiteURL": website_url,
16
+ "websiteKey": website_key,
17
+ "isInvisible": is_invisible
18
+ }
19
+ }
20
+
21
+ response = post('https://api.capsolver.com/createTask', json=data).json()
22
+ self.task_id = response['taskId']
23
+ self.submit_time = time()
24
+
25
+ def solution(self, timeout=randint(15, 20)):
26
+ while time() < self.submit_time + timeout:
27
+ pass
28
+
29
+ data = {
30
+ "clientKey": self.client_key,
31
+ "taskId": self.task_id
32
+ }
33
+
34
+ while True:
35
+ response = post('https://api.capsolver.com/getTaskResult', json=data).json()
36
+ if response['status'] == 'processing':
37
+ sleep(5)
38
+ elif error_id := response['errorId'] != 0:
39
+ raise Exception(f'{error_id}:{response["errorCode"]}:{response["errorDescription"]}')
40
+ else:
41
+ return response['solution']['gRecaptchaResponse']
@@ -0,0 +1,20 @@
1
+ from multiprocessing.connection import Listener
2
+ from multiprocessing.connection import Client
3
+ from threading import Lock
4
+
5
+ __lock = Lock()
6
+
7
+
8
+ def send(msg):
9
+ with Listener(('localhost', 3011)) as listener:
10
+ with listener.accept() as conn:
11
+ conn.send(msg)
12
+
13
+
14
+ def receive():
15
+ with __lock:
16
+ try:
17
+ with Client(('localhost', 3011)) as conn:
18
+ return conn.recv()
19
+ except ConnectionRefusedError:
20
+ raise ConnectionRefusedError("Run the sender first")
@@ -0,0 +1,78 @@
1
+ import re
2
+ from time import time
3
+
4
+ from bs4 import BeautifulSoup, Tag, ResultSet
5
+ from bs4.element import PageElement, NavigableString
6
+ from requests import get, post, ConnectTimeout, ConnectionError
7
+
8
+
9
+ def get_suggestions():
10
+ html = get("https://email-fake.com/").text
11
+ soup = BeautifulSoup(html, "html.parser")
12
+ return [child.text for child in soup.find('div', class_="tt-suggestions").children]
13
+
14
+
15
+ def get_message(
16
+ email: str | Tag, timeout: int | float = 30
17
+ ) -> None | PageElement | BeautifulSoup | NavigableString | ResultSet[Tag]:
18
+ if isinstance(email, str):
19
+ url = "https://email-fake.com/"
20
+ username, domain = email.split("@")
21
+ surl = f'{domain}/{username}'
22
+ start = time()
23
+ exception = None
24
+ while time() - start < timeout:
25
+ try:
26
+ response = get(url, cookies={'surl': surl}).text
27
+ soup = BeautifulSoup(response, 'html.parser')
28
+ tag = soup.find('div', class_='mess_bodiyy')
29
+ if tag:
30
+ delll = re.search(r'delll: "([^"]+)"', response).group(1)
31
+ cookies = {'surl': surl, 'embx': f'["{email}"]'}
32
+ post('https://email-fake.com/del_mail.php', {'delll': delll}, cookies=cookies)
33
+ return tag
34
+ else:
35
+ exception = TimeoutError("No message received")
36
+ tags = soup.select('#email-table a')
37
+ if len(tags) > 0:
38
+ return tags
39
+ except (ConnectionError, ConnectTimeout):
40
+ exception = ConnectionError("There is no internet connection")
41
+ raise exception
42
+ elif isinstance(email, Tag):
43
+ start = time()
44
+ exception = None
45
+ while time() - start < timeout:
46
+ try:
47
+ print(url := f'https://www.email-fake.com{email.get("href")}')
48
+ response = get(url).text
49
+ soup = BeautifulSoup(response, 'html.parser')
50
+ tag = soup.find('div', class_='mess_bodiyy')
51
+ if tag:
52
+ return tag
53
+ except ConnectionError:
54
+ exception = ConnectionError("There is no internet connection")
55
+ raise exception
56
+ return None
57
+
58
+
59
+ def delete_all_messages(email: str):
60
+ username, domain = email.split("@")
61
+ surl = f'{domain}/{username}'
62
+ response = get("https://email-fake.com/", cookies={'surl': surl}).text
63
+
64
+ if match := re.search(r'delll: "([^"]+)"', response):
65
+ delll = match.group(1)
66
+ cookies = {'surl': surl, 'embx': f'["{email}"]'}
67
+ post('https://email-fake.com/del_mail.php', {'delll': delll}, cookies=cookies)
68
+
69
+ if match := re.search(r'dellall: "([^"]+)"', response):
70
+ dellall = match.group(1)
71
+ url = "https://email-fake.com/del_mail.php"
72
+ headers = {
73
+ 'Cookie': f'surl={surl}; embx=["{email}"]'
74
+ }
75
+ payload = {
76
+ "dellall": dellall
77
+ }
78
+ post(url, headers=headers, data=payload)