python-base-toolkit 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
File without changes
@@ -0,0 +1,7 @@
1
+ import python_base_toolkit.consts.units.binary_units
2
+ import python_base_toolkit.consts.units.time_units
3
+
4
+ __all__ = [
5
+ 'binary_units',
6
+ 'time_units',
7
+ ]
@@ -0,0 +1,13 @@
1
+ B = 8
2
+ KiB = 1024
3
+ MiB = KiB * 1024
4
+ GiB = MiB * 1024
5
+ TiB = GiB * 1024
6
+ PiB = TiB * 1024
7
+ EiB = PiB * 1024
8
+
9
+ KB = 1000 # 1 kilobyte (KB) = 1000 bytes in decimal (SI)
10
+ GB = 1000 ** 3 # 1 gigabyte (GB) = 1000^3 bytes in decimal (SI)
11
+ TB = 1000 ** 4 # 1 terabyte (TB) = 1000^4 bytes in decimal (SI)
12
+ PB = 1000 ** 5 # 1 petabyte (PB) = 1000^5 bytes in decimal (SI)
13
+ EB = 1000 ** 6 # 1 exabyte (EB) = 1000^6 bytes in decimal (SI)
@@ -0,0 +1,11 @@
1
+ Pico_Second = 0.000000000001
2
+ Nano_Second = 0.000000001
3
+ Micro_Second = 0.000001
4
+ Milli_Second = 0.001
5
+ Second = 1
6
+ Minute = Second * 60
7
+ Hour = Minute * 60
8
+ Day = Hour * 24
9
+ Week = Day * 7
10
+ Month = Day * 30
11
+ Year = Day * 365
File without changes
@@ -0,0 +1,43 @@
1
+ import json
2
+ import time
3
+ from functools import wraps
4
+ import datetime
5
+ from typing import Callable, Any
6
+
7
+ from custom_python_logger import get_logger
8
+
9
+ logger = get_logger(__name__)
10
+
11
+
12
+ def report_telemetry(
13
+ func: Callable[..., Any],
14
+ start_time: datetime.datetime,
15
+ end_time: datetime.datetime,
16
+ *args: Any,
17
+ **kwargs: Any
18
+ ) -> None:
19
+ data = {
20
+ "function_name": func.__name__,
21
+ "args": args,
22
+ "kwargs": kwargs,
23
+ "start_time": start_time.isoformat(),
24
+ "end_time": end_time.isoformat(),
25
+ "timestamp": time.time(),
26
+ }
27
+ logger.info(
28
+ f"Sending telemetry data with the following data: {json.dumps(data, indent=4, sort_keys=False, default=str)}")
29
+
30
+
31
+ def report_func_telemetry(func: Callable[..., Any]) -> Callable[..., Any]:
32
+ @wraps(func)
33
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
34
+ # logger.info(f"Calling function: {func.__name__}, with arguments: {args} and keyword arguments: {kwargs}")
35
+ # return func(*args, **kwargs)
36
+ logger.info(f"calling {report_telemetry.__name__} to report the telemetry of {func.__name__}")
37
+ start_time = datetime.datetime.now(datetime.UTC)
38
+ result = func(*args, **kwargs)
39
+ end_time = datetime.datetime.now(datetime.UTC)
40
+ report_telemetry(func=func, start_time=start_time, end_time=end_time, *args, **kwargs)
41
+ return result
42
+
43
+ return wrapper
@@ -0,0 +1,28 @@
1
+ import time
2
+ from functools import wraps
3
+ from typing import Callable, Any
4
+
5
+ from custom_python_logger import get_logger
6
+
7
+ logger = get_logger(__name__)
8
+
9
+
10
+ class Timer:
11
+ def __enter__(self):
12
+ self.start_time = time.perf_counter()
13
+ logger.info("Timer started.")
14
+ return self
15
+
16
+ def __exit__(self, exc_type, exc_value, exc_traceback):
17
+ # logger.info(exc_type, exc_value, exc_traceback)
18
+ self.end_time = time.perf_counter()
19
+ self.elapsed_time = self.end_time - self.start_time
20
+ logger.info(f"Timer stopped. Elapsed time: {self.elapsed_time:.2f} seconds.")
21
+
22
+
23
+ def timer(func: Callable[..., Any]) -> Callable[..., Any]:
24
+ @wraps(func)
25
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
26
+ with Timer():
27
+ return func(*args, **kwargs)
28
+ return wrapper
File without changes
@@ -0,0 +1,69 @@
1
+ from custom_python_logger.logger import get_logger
2
+
3
+ class InstanceManager:
4
+ def __init__(self):
5
+ self.logger = get_logger(__class__.__name__)
6
+ self._instances = []
7
+
8
+ def add(self, instance):
9
+ self._instances.append(instance)
10
+
11
+ def close_all(self):
12
+ for instance in reversed(self._instances):
13
+ _instance_name = instance.__class__.__name__
14
+ try:
15
+ if hasattr(instance, 'close'):
16
+ instance.close()
17
+ else: # hasattr(instance, '__exit__'):
18
+ instance.__exit__(None, None, None)
19
+ self.logger.info(f"Instance: {_instance_name} closed successfully.")
20
+ except Exception as e:
21
+ self.logger.info(f"Failed to close instance {instance}: {e}")
22
+
23
+ def __enter__(self):
24
+ return self
25
+
26
+ def __exit__(self, exc_type, exc_value, traceback):
27
+ self.close_all()
28
+
29
+
30
+ def main():
31
+ class SomeInstance:
32
+ def __init__(self, add_to_instance_manager: bool = False):
33
+ self.logger = get_logger(__class__.__name__)
34
+
35
+ if add_to_instance_manager:
36
+ instance_manager.add(self)
37
+
38
+ @property
39
+ def __class_name__(self):
40
+ return self.__class__.__name__
41
+
42
+ def __enter__(self):
43
+ self.logger.info(f"Entering {self.__class_name__}")
44
+ return self
45
+
46
+ def __exit__(self, exc_type, exc_value, exc_traceback):
47
+ self.logger.info(f"Exiting {self.__class_name__}")
48
+ # Handle any cleanup here
49
+ if exc_type:
50
+ self.logger.info(f"Exception: {exc_value}")
51
+ return True
52
+
53
+ SomeInstance(add_to_instance_manager=True)
54
+
55
+ resource1 = open('file1.txt', 'w')
56
+ resource2 = open('file2.txt', 'w')
57
+
58
+ instance_manager.add(resource1)
59
+ instance_manager.add(resource2)
60
+
61
+ # do stuff...
62
+
63
+ instance_manager.close_all() # or, if inside `with manager:`, it will happen automatically
64
+
65
+
66
+ if __name__ == "__main__":
67
+ instance_manager = InstanceManager()
68
+
69
+ main()
File without changes
@@ -0,0 +1,41 @@
1
+ import json
2
+ from dataclasses import is_dataclass
3
+ from datetime import datetime, date, time
4
+ from decimal import Decimal
5
+ from enum import Enum
6
+ from pathlib import Path
7
+ from typing import Any
8
+
9
+ import pandas as pd
10
+ from custom_python_logger import get_logger
11
+
12
+ logger = get_logger(__name__)
13
+
14
+
15
+ def default_serialize(obj: object) -> object:
16
+ if isinstance(obj, type):
17
+ return obj.__name__
18
+ if is_dataclass(obj) and not isinstance(obj, type):
19
+ return obj.__dict__
20
+ if isinstance(obj, Enum):
21
+ return obj.value
22
+ if isinstance(obj, set):
23
+ return list(obj)
24
+ if isinstance(obj, tuple):
25
+ return list(obj)
26
+ if isinstance(obj, (datetime, date, time)):
27
+ return obj.isoformat()
28
+ if isinstance(obj, Decimal):
29
+ return float(obj)
30
+ if isinstance(obj, Path):
31
+ return str(obj)
32
+ if obj is pd.NA:
33
+ return None
34
+ logger.error(f'Object is not serializable: {obj}')
35
+ raise TypeError(f"Type {type(obj)} not serializable")
36
+
37
+
38
+ def to_json_serializable(obj: Any) -> Any:
39
+ """Convert an object to a JSON-serializable structure."""
40
+ serialized_json = json.dumps(obj, default=default_serialize)
41
+ return json.loads(serialized_json)
@@ -0,0 +1,14 @@
1
+ from datetime import datetime
2
+ from zoneinfo import ZoneInfo
3
+
4
+ utc_timezone = ZoneInfo('UTC')
5
+
6
+
7
+ def datetime_now_with_timezone(timezone: ZoneInfo = utc_timezone) -> datetime:
8
+ if not isinstance(timezone, ZoneInfo):
9
+ raise TypeError(f"Expected pytz timezone, got {type(timezone).__name__}")
10
+
11
+ now_utc = datetime.now(timezone)
12
+ if timezone.key == 'UTC':
13
+ return now_utc
14
+ return now_utc.astimezone(timezone)
@@ -0,0 +1,62 @@
1
+ from collections.abc import Callable
2
+ from time import sleep, time
3
+ from typing import Any
4
+
5
+ from tqdm import tqdm
6
+
7
+
8
+ def _timed_execution(
9
+ func: Callable[[], Any],
10
+ timeout: int = 60,
11
+ interval: int = 1,
12
+ expect_true: bool = True,
13
+ pb_description: str = "Executing",
14
+ return_result: bool = False,
15
+ ) -> Any | bool | None:
16
+ pbar = tqdm(total=timeout, desc=pb_description, unit="s", ncols=100)
17
+ start_time = time()
18
+
19
+ while time() - start_time < timeout:
20
+ res = func()
21
+ if (expect_true and res) or (not expect_true and not res):
22
+ pbar.close()
23
+ return res if return_result else True
24
+ pbar.update(interval)
25
+ sleep(interval)
26
+
27
+ pbar.close()
28
+ return None if return_result else False
29
+
30
+
31
+ def timed_execution_bool(
32
+ func: Callable[[], Any],
33
+ timeout: int = 60,
34
+ interval: int = 1,
35
+ expect_true: bool = True,
36
+ pb_description: str = "Executing",
37
+ ) -> bool:
38
+ return _timed_execution(
39
+ func=func,
40
+ timeout=timeout,
41
+ interval=interval,
42
+ expect_true=expect_true,
43
+ pb_description=pb_description,
44
+ return_result=False,
45
+ )
46
+
47
+
48
+ def timed_execution_result(
49
+ func: Callable[[], Any],
50
+ timeout: int = 60,
51
+ interval: int = 1,
52
+ expect_true: bool = True,
53
+ pb_description: str = "Executing",
54
+ ) -> Any:
55
+ return _timed_execution(
56
+ func=func,
57
+ timeout=timeout,
58
+ interval=interval,
59
+ expect_true=expect_true,
60
+ pb_description=pb_description,
61
+ return_result=True,
62
+ )
@@ -0,0 +1,256 @@
1
+ import gzip
2
+ import os
3
+ import hashlib
4
+ import shutil
5
+ import json
6
+ import csv
7
+ import yaml
8
+ import re
9
+ from typing import Optional, Union, BinaryIO, TextIO, cast, Any
10
+ import tarfile
11
+ import zipfile
12
+
13
+ from custom_python_logger import get_logger
14
+
15
+ logger = get_logger(__name__)
16
+
17
+
18
+ class FilePath:
19
+ @staticmethod
20
+ def ensure_dir(directory: str) -> str:
21
+ if not os.path.exists(directory):
22
+ os.makedirs(directory)
23
+ return directory
24
+
25
+ @staticmethod
26
+ def file_exists(path: str) -> bool:
27
+ return os.path.isfile(path)
28
+
29
+ @staticmethod
30
+ def get_file_extension(path: str) -> str:
31
+ return os.path.splitext(path)[1][1:]
32
+
33
+ @staticmethod
34
+ def get_filename(path: str, with_extension: bool = True) -> str:
35
+ if with_extension:
36
+ return os.path.basename(path)
37
+ return os.path.splitext(os.path.basename(path))[0]
38
+
39
+ @staticmethod
40
+ def get_relative_path(path: str, base_path: str) -> str:
41
+ return os.path.relpath(path, base_path)
42
+
43
+ @staticmethod
44
+ def list_files(directory: str, extension: Optional[str] = None, recursive: bool = False) -> list[str]:
45
+ result = []
46
+ for root, _, files in os.walk(directory) if recursive else [(directory, [], os.listdir(directory))]:
47
+ for file in files:
48
+ if not extension or file.endswith(extension):
49
+ result.append(os.path.join(root, file))
50
+ return result
51
+
52
+
53
+ class FileIO:
54
+ @staticmethod
55
+ def safe_open(
56
+ filename: str,
57
+ mode: str = 'r',
58
+ encoding: Optional[str] = None,
59
+ **kwargs
60
+ ) -> Union[TextIO, BinaryIO]:
61
+ if 'b' in mode:
62
+ return cast(BinaryIO, open(filename, mode, **kwargs))
63
+ return cast(TextIO, open(filename, mode, encoding=encoding or 'utf-8', **kwargs))
64
+
65
+ @staticmethod
66
+ def read_text(filename: str, encoding: str = 'utf-8') -> str:
67
+ with open(filename, 'r', encoding=encoding) as f:
68
+ return f.read()
69
+
70
+ @staticmethod
71
+ def write_text(text: str, filename: str, encoding: str = 'utf-8') -> None:
72
+ with open(filename, 'w', encoding=encoding) as f:
73
+ f.write(text)
74
+
75
+ def read_json(self, filename: str) -> dict[str, Any]:
76
+ with self.safe_open(filename, 'r') as f:
77
+ return json.load(f)
78
+
79
+ @staticmethod
80
+ def write_json(data: dict[str, Any], filename: str, indent: int = 2) -> None:
81
+ with open(filename, 'w', encoding='utf-8') as f:
82
+ json.dump(data, f, indent=indent, ensure_ascii=False)
83
+
84
+ @staticmethod
85
+ def read_yaml(filename: str) -> Any:
86
+ with open(filename, 'r', encoding='utf-8') as f:
87
+ return yaml.safe_load(f)
88
+
89
+ @staticmethod
90
+ def write_yaml(data: Any, filename: str, **kwargs) -> None:
91
+ with open(filename, 'w', encoding='utf-8') as f:
92
+ yaml.safe_dump(data, f, allow_unicode=True, **kwargs)
93
+
94
+ def read_csv(self, filename: str, **kwargs) -> list[dict[str, Any]]:
95
+ with self.safe_open(filename, 'r', newline='') as f:
96
+ reader = csv.DictReader(f, **kwargs)
97
+ return list(reader)
98
+
99
+ @staticmethod
100
+ def write_csv(data: list[dict[str, Any]], filename: str, fieldnames: Optional[list[str]] = None, **kwargs) -> None:
101
+ if not data:
102
+ return
103
+
104
+ if fieldnames is None:
105
+ fieldnames = list(data[0].keys())
106
+
107
+ with open(filename, 'w', newline='', encoding='utf-8') as f:
108
+ writer = csv.DictWriter(f, fieldnames=fieldnames, **kwargs)
109
+ writer.writeheader()
110
+ writer.writerows(data)
111
+
112
+ @staticmethod
113
+ def is_binary_file(path: str, chunk_size: int = 1024) -> bool:
114
+ with open(path, 'rb') as f:
115
+ chunk = f.read(chunk_size)
116
+ return b'\0' in chunk
117
+
118
+ @staticmethod
119
+ def is_file_empty(path: str) -> bool:
120
+ return os.path.getsize(path) == 0
121
+
122
+
123
+ class FfileManipulation:
124
+ @staticmethod
125
+ def remove_emojis(text: str) -> str:
126
+ return re.sub(r'[\U00010000-\U0010ffff]', '', text).strip()
127
+
128
+
129
+ class FileSystem:
130
+ @staticmethod
131
+ def copy_file(src: str, dst: str) -> None:
132
+ shutil.copy2(src, dst)
133
+
134
+ @staticmethod
135
+ def move_file(src: str, dst: str) -> None:
136
+ shutil.move(src, dst)
137
+
138
+ @staticmethod
139
+ def delete_file(path: str) -> None:
140
+ if os.path.isfile(path):
141
+ os.remove(path)
142
+
143
+ @staticmethod
144
+ def file_size(path: str) -> int:
145
+ return os.path.getsize(path)
146
+
147
+
148
+ class FileHash:
149
+ @staticmethod
150
+ def file_md5(filename: str, chunk_size: int = 8192) -> str:
151
+ """Calculate MD5 hash of a file."""
152
+ hash_md5 = hashlib.md5()
153
+ with open(filename, "rb") as f:
154
+ for chunk in iter(lambda: f.read(chunk_size), b""):
155
+ hash_md5.update(chunk)
156
+ return hash_md5.hexdigest()
157
+
158
+ @staticmethod
159
+ def file_sha1(filename: str, chunk_size: int = 8192) -> str:
160
+ """Calculate SHA1 hash of a file."""
161
+ hash_sha1 = hashlib.sha1()
162
+ with open(filename, "rb") as f:
163
+ for chunk in iter(lambda: f.read(chunk_size), b""):
164
+ hash_sha1.update(chunk)
165
+ return hash_sha1.hexdigest()
166
+
167
+ @staticmethod
168
+ def file_sha256(filename: str, chunk_size: int = 8192) -> str:
169
+ """Calculate SHA256 hash of a file."""
170
+ hash_sha256 = hashlib.sha256()
171
+ with open(filename, "rb") as f:
172
+ for chunk in iter(lambda: f.read(chunk_size), b""):
173
+ hash_sha256.update(chunk)
174
+ return hash_sha256.hexdigest()
175
+
176
+
177
+ class FileCompression:
178
+ @staticmethod
179
+ def gzip_file(input_path: str, output_path: Optional[str] = None) -> str:
180
+ if not os.path.isfile(input_path):
181
+ raise FileNotFoundError(f"Input file does not exist: {input_path}")
182
+
183
+ if output_path is None:
184
+ output_path = input_path + '.gz'
185
+
186
+ try:
187
+ with open(input_path, 'rb') as f_in:
188
+ with gzip.open(output_path, 'wb') as f_out:
189
+ f_out.write(f_in.read())
190
+ return output_path
191
+ except Exception as e:
192
+ raise IOError(f"Error compressing file: {e}")
193
+
194
+ @staticmethod
195
+ def ungzip_file(gz_path: str, output_path: Optional[str] = None) -> str:
196
+ if not os.path.isfile(gz_path):
197
+ raise FileNotFoundError(f"Gzipped file does not exist: {gz_path}")
198
+
199
+ if output_path is None:
200
+ if gz_path.endswith('.gz'):
201
+ output_path = gz_path[:-3] # Remove .gz extension
202
+ else:
203
+ output_path = gz_path + '_decompressed'
204
+
205
+ try:
206
+ with gzip.open(gz_path, 'rb') as f_in:
207
+ with open(output_path, 'wb') as f_out:
208
+ f_out.write(f_in.read())
209
+ return output_path
210
+ except Exception as e:
211
+ raise IOError(f"Error decompressing file: {e}")
212
+
213
+ @staticmethod
214
+ def compress_directory(directory: str, output_path: Optional[str] = None) -> str:
215
+ """Compress directory to tarball."""
216
+ if not os.path.isdir(directory):
217
+ raise FileNotFoundError(f"Directory does not exist: {directory}")
218
+
219
+ if output_path is None:
220
+ output_path = directory.rstrip(os.sep) + '.tar.gz'
221
+
222
+ with tarfile.open(output_path, 'w:gz') as tar:
223
+ tar.add(directory, arcname=os.path.basename(directory))
224
+
225
+ return output_path
226
+
227
+ @staticmethod
228
+ def extract_archive(archive_path: str, output_dir: Optional[str] = None) -> str:
229
+ """Extract archive file."""
230
+ if not os.path.isfile(archive_path):
231
+ raise FileNotFoundError(f"Archive does not exist: {archive_path}")
232
+
233
+ if output_dir is None:
234
+ output_dir = os.path.splitext(archive_path)[0]
235
+ # Handle double extensions like .tar.gz
236
+ if output_dir.endswith('.tar'):
237
+ output_dir = output_dir[:-4]
238
+
239
+ FilePath.ensure_dir(output_dir)
240
+
241
+ if archive_path.endswith(('.tar.gz', '.tgz')):
242
+ with tarfile.open(archive_path, 'r:gz') as tar:
243
+ tar.extractall(path=output_dir)
244
+ elif archive_path.endswith('.tar.bz2'):
245
+ with tarfile.open(archive_path, 'r:bz2') as tar:
246
+ tar.extractall(path=output_dir)
247
+ elif archive_path.endswith('.tar'):
248
+ with tarfile.open(archive_path, 'r') as tar:
249
+ tar.extractall(path=output_dir)
250
+ elif archive_path.endswith('.zip'):
251
+ with zipfile.ZipFile(archive_path, 'r') as zip_ref:
252
+ zip_ref.extractall(output_dir)
253
+ else:
254
+ raise ValueError(f"Unsupported archive format: {archive_path}")
255
+
256
+ return output_dir
@@ -0,0 +1,6 @@
1
+ from pyfiglet import Figlet
2
+
3
+
4
+ def create_logo(text: str, font: str = 'big') -> str:
5
+ f = Figlet(font=font)
6
+ return f.renderText(text)
@@ -0,0 +1,102 @@
1
+ import ipaddress
2
+ import re
3
+ from typing import Optional
4
+
5
+
6
+ def get_ip_version(ip: str) -> Optional[int]:
7
+ """
8
+ Returns the type of the IP address as integer (IPv4, IPv6 or None in case of an invalid IP address)
9
+ :param ip: IP address (String)
10
+ """
11
+ try:
12
+ _ip = ipaddress.ip_address(ip)
13
+ return _ip.version
14
+ except ValueError:
15
+ return None
16
+
17
+
18
+ def normalize_connection_string(ip: str, port: Optional[int] = None) -> str:
19
+ """
20
+ Returns a normalized connection string (IPv4:port or [IPv6]:port)
21
+ :param ip: IP address (String)
22
+ :param port: Port (Integer or None) - Optional for cases where the port is not needed (E.G. for SCP)
23
+
24
+ Output:
25
+ w/ Port - IPv4:port or [IPv6]:port
26
+ w/o Port - IPv4: or [IPv6]:
27
+ """
28
+ _operation = 'Normalize connection string'
29
+
30
+ _ip_version = get_ip_version(ip)
31
+ if _ip_version == 4:
32
+ return f'{ip}:{port or ""}'
33
+ if _ip_version == 6:
34
+ return f'[{ip}]:{port or ""}'
35
+ raise ValueError(f'Invalid IP address: {ip} - {_operation}')
36
+
37
+
38
+ def normalize_http_url(ip: str, port: Optional[int] = None, https: bool = True) -> str:
39
+ """
40
+ Returns a normalized HTTP URL (http://IPv4:port or http://[IPv6]:port)
41
+ :param ip: IP address (String)
42
+ :param port: Port (Integer)
43
+ :param https: Use HTTPS (Boolean)
44
+ """
45
+ url_schema = 'https' if https else 'http'
46
+ return f'{url_schema}://{normalize_connection_string(ip, port)}'
47
+
48
+
49
+ def parse_ifconfig_to_json(ifconfig_output: str) -> dict:
50
+ """
51
+ Output Example:
52
+ {
53
+ 'interface_name': {
54
+ 'mac_address': <String>,
55
+ 'ipv4_address': <String>,
56
+ 'ipv4_netmask': <String>,
57
+ 'ipv6_address': <String>
58
+ 'ipv6_prefixlen': <String>
59
+ }
60
+ }
61
+ """
62
+ interfaces = {}
63
+ current_interface = None
64
+
65
+ for line in ifconfig_output.splitlines():
66
+ match_interface = re.match(r'^(\S+):\s', line)
67
+ if match_interface:
68
+ current_interface = match_interface.group(1)
69
+ interfaces[current_interface] = {}
70
+
71
+ match_mac = re.search(r'ether\s([0-9a-f:]+)', line)
72
+ if match_mac and current_interface:
73
+ interfaces[current_interface]['mac_address'] = match_mac.group(1)
74
+
75
+ match_ipv4 = re.search(r'inet\s(\d+\.\d+\.\d+\.\d+)', line)
76
+ if match_ipv4 and current_interface:
77
+ interfaces[current_interface]['ipv4_address'] = match_ipv4.group(1)
78
+
79
+ # Try different netmask patterns
80
+ match_netmask = re.search(r'netmask\s+(0x[0-9a-f]+|(?:\d+\.){3}\d+)', line, re.IGNORECASE)
81
+ if match_netmask and current_interface:
82
+ netmask = match_netmask.group(1)
83
+ if netmask.startswith('0x'):
84
+ # Handle hex format
85
+ netmask_int = int(netmask, 16)
86
+ else:
87
+ # Handle decimal format (255.255.255.0)
88
+ octets = [int(x) for x in netmask.split('.')]
89
+ netmask_int = sum(octet << (24 - 8 * i) for i, octet in enumerate(octets))
90
+
91
+ binary_netmask = f'{netmask_int:032b}'
92
+ interfaces[current_interface]['ipv4_netmask'] = binary_netmask.count('1')
93
+
94
+ match_ipv6 = re.search(r'inet6\s([0-9a-fA-F:]+)', line)
95
+ if match_ipv6 and current_interface:
96
+ interfaces[current_interface]['ipv6_address'] = match_ipv6.group(1)
97
+
98
+ match_subnet_mask = re.search(r'prefixlen\s([0-9a-f:]+)', line)
99
+ if match_subnet_mask and current_interface:
100
+ interfaces[current_interface]['ipv6_prefixlen'] = match_subnet_mask.group(1)
101
+
102
+ return interfaces
@@ -0,0 +1,23 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def get_project_path_by_name(project_name: str) -> str:
5
+ current_path = Path(__file__).parent
6
+ while current_path != current_path.parent:
7
+ if str(current_path).endswith(f'/{project_name}'):
8
+ return str(current_path)
9
+ current_path = current_path.parent
10
+ raise FileNotFoundError(
11
+ f'Project "{project_name}" not found in any parent directories.\n'
12
+ f'Current path: {Path(__file__)}',
13
+ )
14
+
15
+
16
+ def get_project_path_by_file(markers: set = None) -> Path:
17
+ markers = markers or {'.git', '.gitignore', 'setup.py', 'pyproject.toml', 'LICENSE', 'README.md'}
18
+ for marker in markers:
19
+ path = Path(__file__).resolve()
20
+ for parent in path.parents:
21
+ if (parent / marker).exists():
22
+ return parent
23
+ raise RuntimeError(f'Project root with markers "{markers}" not found.')
@@ -0,0 +1,22 @@
1
+ import json
2
+ import logging
3
+ from typing import Any, Callable
4
+
5
+ from custom_python_logger import get_logger
6
+
7
+ logger = get_logger(__name__)
8
+
9
+
10
+ def log_in_format(
11
+ data: Any,
12
+ log_level: int = logging.INFO,
13
+ indent: int = 4,
14
+ sort_keys: bool = True,
15
+ default: Callable = None
16
+ ) -> None:
17
+ formatted_data = json.dumps(data, indent=indent, sort_keys=sort_keys, default=default)
18
+
19
+ level_name = logging.getLevelName(log_level).lower()
20
+ log_method = getattr(logger, level_name, logger.debug)
21
+
22
+ log_method("\n%s", formatted_data)
@@ -0,0 +1,27 @@
1
+ import logging
2
+ import shutil
3
+ from pathlib import Path
4
+
5
+ from custom_python_logger.logger import get_logger
6
+ from python_base_toolkit.utils.path_utils import get_project_path_by_file
7
+
8
+ logger = get_logger(__name__)
9
+
10
+
11
+ def delete_pycache_folder(root_dir: Path = None, ignored_dirs: set = None) -> None:
12
+ root_dir = Path(root_dir) if root_dir else get_project_path_by_file()
13
+ ignored_dirs = ignored_dirs or {'.venv'}
14
+
15
+ for path in root_dir.rglob('__pycache__'):
16
+ if any(ignored in path.parts for ignored in ignored_dirs):
17
+ continue
18
+
19
+ logger.info(f"Cleaning: {path}")
20
+ try:
21
+ shutil.rmtree(path)
22
+ logger.info(f"Deleted: {path}")
23
+ except FileNotFoundError:
24
+ logger.exception(f"Already removed: {path}")
25
+ except Exception as e:
26
+ logger.exception(f"Failed to delete {path}: {e}")
27
+ logger.info("Finished cleaning __pycache__ folders.")
@@ -0,0 +1,67 @@
1
+ import random
2
+ from random import choice
3
+ from string import ascii_letters, ascii_lowercase, ascii_uppercase, digits
4
+
5
+
6
+ def generate_random_string(length: int = 8, charset: str = ascii_letters + digits) -> str:
7
+ """
8
+ Generate a random string of a given length using a specified character set
9
+ :param length: The length of the string to generate
10
+ :param charset: The set of characters to use for generating the string
11
+ Return - A random string based on the specified character set
12
+ """
13
+ return ''.join(choice(charset) for _ in range(length))
14
+
15
+
16
+ def generate_random_password(length: int = 8) -> str:
17
+ """
18
+ Generate a random password with a given length
19
+ """
20
+ special_characters = "!@#%^*"
21
+ # Ensure the password starts with a letter
22
+ password = choice(ascii_uppercase)
23
+ # Ensure the password contains at least one digit or special character and one lowercase letter
24
+ password += choice(digits + special_characters)
25
+ password += choice(ascii_lowercase)
26
+
27
+ # Fill the remaining characters with a mix of all types
28
+ remaining_chars = ''.join(
29
+ choice(ascii_letters + digits + special_characters) for _ in range(length - len(password))
30
+ )
31
+
32
+ password += remaining_chars
33
+
34
+ # Shuffle the password to make it more random but keep the first character as a letter
35
+ return password[0] + ''.join(random.sample(password[1:], len(password) - 1))
36
+
37
+
38
+ def generate_random_ipv4() -> str:
39
+ # Generate the first octet (1-223 to avoid multicast and reserved ranges)
40
+ first_octet = random.randint(1, 223)
41
+ remaining_octets = [str(random.randint(0, 255)) for _ in range(3)]
42
+ return f"{first_octet}.{'.'.join(remaining_octets)}"
43
+
44
+
45
+ def generate_random_ipv6() -> str:
46
+ first_group = f'{random.randint(0x2000, 0x3fff):04x}'
47
+ other_groups = [f'{random.randint(0, 0xffff):04x}' for _ in range(7)]
48
+ return ':'.join([first_group] + other_groups)
49
+
50
+
51
+ def generate_random_ip(ip_version: int) -> str:
52
+ """
53
+ Generate a random IP address based on the specified version
54
+ If the cluster IP type matters, please use this function as follows:
55
+ generate_random_ip(ip_version=client.cluster.get_ip_version()) to get the IP version based on the cluster
56
+ """
57
+ if ip_version == 4:
58
+ return generate_random_ipv4()
59
+ if ip_version == 6:
60
+ return generate_random_ipv6()
61
+ raise ValueError(f"Unsupported IP version: {ip_version}. Supported versions are 4 and 6.")
62
+
63
+
64
+ def create_random_text(min_length: int = 64, max_length: int = 4096) -> str:
65
+ """Generate a random string with a length between min_length and max_length characters."""
66
+ length = random.randint(min_length, max_length)
67
+ return ''.join(random.choice(ascii_letters) for _ in range(length))
@@ -0,0 +1,13 @@
1
+ import urllib.parse
2
+
3
+ import requests
4
+
5
+ BASE_TINY_URL = "https://tinyurl.com/api-create.php"
6
+
7
+
8
+ def shorten_url(url: str) -> str:
9
+ encoded_url = urllib.parse.urlencode({"url": url})
10
+ full_url = f"{BASE_TINY_URL}?{encoded_url}"
11
+ res = requests.get(full_url, timeout=5)
12
+ res.raise_for_status() # Raises HTTPError for non-200 codes
13
+ return res.text
@@ -0,0 +1,10 @@
1
+ import sys
2
+ import os
3
+
4
+
5
+ def get_venv_details() -> dict:
6
+ return {
7
+ 'python_executable_path': sys.executable,
8
+ 'python_version': sys.version,
9
+ 'venv_path': os.getenv('VIRTUAL_ENV'),
10
+ }
@@ -0,0 +1,93 @@
1
+ Metadata-Version: 2.4
2
+ Name: python-base-toolkit
3
+ Version: 0.0.1
4
+ Summary: A Python package for managing pytest plugins.
5
+ Home-page: https://github.com/aviz92/python-base-toolkit
6
+ Author: Avi Zaguri
7
+ Author-email:
8
+ Project-URL: Repository, https://github.com/aviz92/python-base-toolkit
9
+ Classifier: Framework :: Pytest
10
+ Classifier: Programming Language :: Python
11
+ Requires-Python: >=3.11
12
+ Description-Content-Type: text/markdown
13
+ License-File: LICENSE
14
+ Requires-Dist: setuptools
15
+ Requires-Dist: wheel
16
+ Requires-Dist: colorlog
17
+ Requires-Dist: pathlib
18
+ Requires-Dist: requests
19
+ Requires-Dist: PyYAML
20
+ Requires-Dist: tqdm
21
+ Requires-Dist: pyfiglet
22
+ Requires-Dist: pandas
23
+ Requires-Dist: custom-python-logger
24
+ Dynamic: author
25
+ Dynamic: classifier
26
+ Dynamic: description
27
+ Dynamic: description-content-type
28
+ Dynamic: home-page
29
+ Dynamic: license-file
30
+ Dynamic: project-url
31
+ Dynamic: requires-dist
32
+ Dynamic: requires-python
33
+ Dynamic: summary
34
+
35
+ # Python Toolkit
36
+ A powerful, production-ready Python toolkit designed to accelerate development by providing essential utilities and common functionality across multiple domains. <br>
37
+ Built with type safety, comprehensive error handling, and modern Python practices, this toolkit streamlines everyday development tasks including file operations, network utilities, data manipulation, performance monitoring, resource management, and more.
38
+
39
+ ---
40
+
41
+ ## 📦 Installation
42
+
43
+ ```bash
44
+ pip install python-toolkit
45
+ ```
46
+
47
+ ---
48
+
49
+ ## 🚀 Features
50
+ - Constants Pack
51
+ - binary
52
+ - time
53
+
54
+ - Decorators Pack
55
+ - telemetry
56
+ - timer
57
+
58
+ - Instances Pack
59
+ - instance_manager
60
+
61
+ - Utilities Pack
62
+ - data_serialization
63
+ - date_time
64
+ - execute
65
+ - file_utils
66
+ - logo
67
+ - network
68
+ - path_utils
69
+ - pretty_print
70
+ - pycache
71
+ - random_utils
72
+ - shorten_url
73
+ - venv_details
74
+
75
+ ---
76
+
77
+ ## 🤝 Contributing
78
+ If you have a helpful tool, pattern, or improvement to suggest:
79
+ Fork the repo <br>
80
+ Create a new branch <br>
81
+ Submit a pull request <br>
82
+ I welcome additions that promote clean, productive, and maintainable development. <br>
83
+
84
+ ---
85
+
86
+ ## 📄 License
87
+ MIT License — see [LICENSE](LICENSE) for details.
88
+
89
+ ---
90
+
91
+ ## 🙏 Thanks
92
+ Thanks for exploring this repository! <br>
93
+ Happy coding! <br>
@@ -0,0 +1,28 @@
1
+ python_base_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ python_base_toolkit/consts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ python_base_toolkit/consts/units/__init__.py,sha256=-1lY3rI3SU7gZKlAKl9E6FjputEdKMGxO0K1GJRXSN0,157
4
+ python_base_toolkit/consts/units/binary_units.py,sha256=8NssiyGO_cJ-FNAh4EYzVlCKRM5n4Q0GC8sL96kvNl4,420
5
+ python_base_toolkit/consts/units/time_units.py,sha256=aDwRnRR_mbOn2B7NUB9b2-qO98CLFzNE8-ZKfwUZ09Q,216
6
+ python_base_toolkit/decorators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ python_base_toolkit/decorators/telemetry.py,sha256=bxittYChUb0zRnDoX_miCZKFQukZ1rPjjPmYfXfcQzA,1400
8
+ python_base_toolkit/decorators/timer.py,sha256=DU0JoqY5cN5hmLVDrNxaMgkPbYNvN3DGU3-FpZXtJK0,813
9
+ python_base_toolkit/instances/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ python_base_toolkit/instances/instance_manager.py,sha256=NDNKVBaYSXvbASehM0YHBWCaYOSLWFJfuKZ7cf0iTho,2058
11
+ python_base_toolkit/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ python_base_toolkit/utils/data_serialization.py,sha256=MCF9pALQU2s5tufWXTzW1Dwj3NS1cjI1Le_BL4717iI,1197
13
+ python_base_toolkit/utils/date_time.py,sha256=ycWWTPnkLukENza4NzCX8vQ113UGu4ugPJHGyx4Dgx0,429
14
+ python_base_toolkit/utils/execute.py,sha256=eppuC5YywOHDK-AwFSmcT5YpuO119fFgpJjcSQY80ks,1535
15
+ python_base_toolkit/utils/file_utils.py,sha256=Na6Y1O8PAb21w0y-VnhBFWonOpXl40IrXnefBY8oKNk,8611
16
+ python_base_toolkit/utils/logo.py,sha256=X4u0CmBgTJWqn3b_V0MG2mKV1Gw4rLGOLSQmY7hatIs,140
17
+ python_base_toolkit/utils/network.py,sha256=i3RyWTJGHt77b45gYn0eO9zqXUkScYZvnq-hOFY_eAk,3585
18
+ python_base_toolkit/utils/path_utils.py,sha256=xY0pMHIO-HoWLgc4In-WK8HjsfIwqwozDYzV_nxC_Ps,878
19
+ python_base_toolkit/utils/pretty_print.py,sha256=66XQQKUDjTJcClqWIU_eNmKLKz0h7_1hxMItEAsxbyo,544
20
+ python_base_toolkit/utils/pycache.py,sha256=YwYGYiERSYQJWg1gYDQH1JmjpZpRYWSdacWd8n6zHD8,925
21
+ python_base_toolkit/utils/random_utils.py,sha256=g3yXqei1bGEGxPnAXsdnVtG2DXIF0t68clqul14cAN8,2730
22
+ python_base_toolkit/utils/shorten_url.py,sha256=zsyW3wloFJdVb9s3fhR-olL6ekT03FDlFm2xbFY4viw,359
23
+ python_base_toolkit/utils/venv_details.py,sha256=B67fyXSXdkLnU-fc7QJgSCSpJX5Sv6D13K7to_CDmf4,210
24
+ python_base_toolkit-0.0.1.dist-info/licenses/LICENSE,sha256=cSikHY6SZFsPZSBizCDAJ0-Bjjzxt-JtX6TVbKxwimo,1067
25
+ python_base_toolkit-0.0.1.dist-info/METADATA,sha256=8-kOqa9CkOUB3kopAtR8ZX0wqL0MN35Pi-Emuvx_tnk,2156
26
+ python_base_toolkit-0.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
27
+ python_base_toolkit-0.0.1.dist-info/top_level.txt,sha256=I9kK6FGKq_6z3aGySoWamY68GRgg0xsL6sS-uyaxJ2k,20
28
+ python_base_toolkit-0.0.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Avi Zaguri
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ python_base_toolkit