nano-dev-utils 1.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,38 @@
1
+ """nano-dev-utils - A collection of small Python utilities for developers.
2
+ Copyright (c) 2025 Yaron Dayan
3
+ """
4
+
5
+ from pathlib import Path
6
+ from importlib.metadata import version
7
+ from .dynamic_importer import Importer
8
+ from .timers import Timer
9
+ from .release_ports import PortsRelease, PROXY_SERVER, INSPECTOR_CLIENT
10
+ from .common import update, encode_dict, str2file, PredicateBuilder, FilterSet
11
+ from .file_tree_display import FileTreeDisplay, DEFAULT_SFX
12
+
13
+ timer = Timer()
14
+ ports_release = PortsRelease()
15
+ importer = Importer()
16
+ filetree_display = FileTreeDisplay(root_dir=str(Path.cwd()))
17
+ predicate_builder = PredicateBuilder
18
+
19
+ __version__ = version('nano-dev-utils')
20
+
21
+ __all__ = [
22
+ 'Importer',
23
+ 'Timer',
24
+ 'PortsRelease',
25
+ 'PROXY_SERVER',
26
+ 'INSPECTOR_CLIENT',
27
+ 'update',
28
+ 'encode_dict',
29
+ 'str2file',
30
+ 'PredicateBuilder',
31
+ 'predicate_builder',
32
+ 'FilterSet',
33
+ 'timer',
34
+ 'ports_release',
35
+ 'importer',
36
+ 'filetree_display',
37
+ 'DEFAULT_SFX',
38
+ ]
@@ -0,0 +1,177 @@
1
+ import fnmatch
2
+ import re
3
+
4
+ from pathlib import Path
5
+ from typing import AnyStr
6
+
7
+ from collections.abc import Callable
8
+ from functools import partial
9
+ from typing import TypeAlias
10
+
11
+
12
+ FilterSet: TypeAlias = list[str] | set[str] | None
13
+
14
+
15
+ def update(obj: object, attrs: dict) -> None:
16
+ """Updates an object's attributes from a dictionary.
17
+ Uses direct __dict__ modification if possible for performance,
18
+ otherwise falls back to setattr for objects without __dict__ (e.g., __slots__).
19
+
20
+ Args:
21
+ obj: The object whose attributes will be updated.
22
+ attrs: Dictionary of attribute names and values.
23
+
24
+ Raises:
25
+ AttributeError: If an attribute cannot be set (optional, see notes).
26
+ """
27
+ if hasattr(obj, '__dict__'):
28
+ obj.__dict__.update(attrs)
29
+ else:
30
+ for key, value in attrs.items():
31
+ try:
32
+ setattr(obj, key, value)
33
+ except AttributeError as e:
34
+ raise AttributeError(
35
+ f"Cannot set attribute '{key}' on object '{obj}': {e}"
36
+ )
37
+
38
+
39
+ def encode_dict(input_dict: dict) -> bytes:
40
+ """
41
+ Encodes the values of a dictionary into a single bytes object.
42
+
43
+ Each value in the dictionary is converted to its string representation, encoded as bytes,
44
+ and concatenated together with a single space (b' ') separator.
45
+
46
+ Parameters:
47
+ input_dict (dict): The dictionary whose values are to be encoded.
48
+
49
+ Returns:
50
+ bytes: A single bytes object containing all values, separated by spaces.
51
+
52
+ Example:
53
+ >>> encode_dict({"a": 1, "b": "test"})
54
+ b'1 test'
55
+
56
+ Raises:
57
+ TypeError: If input_dict is not a dictionary.
58
+ """
59
+ if not isinstance(input_dict, dict):
60
+ raise TypeError('input_dict must be a dictionary.')
61
+ return b' '.join(str(v).encode() for v in input_dict.values())
62
+
63
+
64
+ def str2file(
65
+ content: AnyStr, filepath: str, mode: str = 'w', enc: str = 'utf-8'
66
+ ) -> None:
67
+ """Simply save file directly from any string content.
68
+
69
+ Args:
70
+ content (AnyStr): String or bytes to write. Must match the mode type ,e.g. bytes for binary.
71
+ filepath (str): Full file path to write to.
72
+ mode (str): see doc for Path.open. Defaults to 'w'.
73
+ enc (str): Encoding used in text modes; ignored in binary modes. Defaults to 'utf-8'.
74
+ """
75
+ out_file_path = Path(filepath)
76
+ try:
77
+ if 'b' in mode:
78
+ with out_file_path.open(mode) as f:
79
+ f.write(content)
80
+ else:
81
+ with out_file_path.open(mode, encoding=enc) as f:
82
+ f.write(content)
83
+
84
+ except PermissionError as e:
85
+ raise PermissionError(f"Cannot write to '{out_file_path}': {e}")
86
+ except OSError as e:
87
+ raise OSError(f"Error writing file '{out_file_path}': {e}")
88
+
89
+
90
+ class PredicateBuilder:
91
+ def build_predicate(
92
+ self, allow: FilterSet, block: FilterSet
93
+ ) -> Callable[[str], bool]:
94
+ """Build a memory-efficient predicate function."""
95
+ compile_patts = self.compile_patts
96
+
97
+ allow_lits, allow_patts = compile_patts(allow)
98
+ block_lits, block_patts = compile_patts(block)
99
+
100
+ flag = (
101
+ 1 if allow_lits or allow_patts else 0,
102
+ 1 if block_lits or block_patts else 0,
103
+ )
104
+
105
+ match flag: # (allow, block)
106
+ case (0, 0):
107
+ return lambda name: True
108
+
109
+ case (0, 1):
110
+ return partial(
111
+ self._match_patt_with_lits,
112
+ name_patts=block_patts,
113
+ name_lits=block_lits,
114
+ negate=True,
115
+ )
116
+
117
+ case (1, 0):
118
+ return partial(
119
+ self._match_patt_with_lits,
120
+ name_patts=allow_patts,
121
+ name_lits=allow_lits,
122
+ negate=False,
123
+ )
124
+
125
+ case (1, 1):
126
+ return partial(
127
+ self._allow_block_predicate,
128
+ allow_lits=allow_lits,
129
+ allow_patts=allow_patts,
130
+ block_lits=block_lits,
131
+ block_patts=block_patts,
132
+ )
133
+
134
+ @staticmethod
135
+ def compile_patts(fs: FilterSet) -> tuple[set[str], list[re.Pattern]]:
136
+ if not fs:
137
+ return set(), []
138
+ literals, patterns = set(), []
139
+ for item in fs:
140
+ if '*' in item or '?' in item or '[' in item:
141
+ patterns.append(re.compile(fnmatch.translate(item)))
142
+ else:
143
+ literals.add(item)
144
+ return literals, patterns
145
+
146
+ @staticmethod
147
+ def _match_patts(name: str, patterns: list[re.Pattern]) -> bool:
148
+ """Return True if name matches any compiled regex pattern."""
149
+ return any(pat.fullmatch(name) for pat in patterns)
150
+
151
+ def _match_patt_with_lits(
152
+ self,
153
+ name: str,
154
+ *,
155
+ name_lits: set[str],
156
+ name_patts: list[re.Pattern],
157
+ negate: bool = False,
158
+ ) -> bool:
159
+ """Return True if name is in literals or matches any pattern."""
160
+ res = name in name_lits or self._match_patts(name, name_patts)
161
+ return not res if negate else res
162
+
163
+ def _allow_block_predicate(
164
+ self,
165
+ name: str,
166
+ *,
167
+ allow_lits: set[str],
168
+ allow_patts: list[re.Pattern],
169
+ block_lits: set[str],
170
+ block_patts: list[re.Pattern],
171
+ ) -> bool:
172
+ """Return True if name is allowed and not blocked (block takes precedence)."""
173
+ if name in block_lits or self._match_patts(name, block_patts):
174
+ return False
175
+ if name in allow_lits or self._match_patts(name, allow_patts):
176
+ return True
177
+ return False
@@ -0,0 +1,31 @@
1
+ from types import ModuleType
2
+ from typing import Any
3
+
4
+ import importlib
5
+ from nano_dev_utils.common import update
6
+
7
+
8
+ class Importer:
9
+ def __init__(self):
10
+ self.imported_modules = {}
11
+
12
+ def update(self, attrs: dict) -> None:
13
+ update(self, attrs)
14
+
15
+ def import_mod_from_lib(self, library: str, module_name: str) -> ModuleType | Any:
16
+ """Lazily imports and caches a specific submodule from a given library.
17
+ :param library: The name of the library.
18
+ :param module_name: The name of the module to import.
19
+ :return: The imported module.
20
+ """
21
+ if module_name in self.imported_modules:
22
+ return self.imported_modules[module_name]
23
+
24
+ lib_mod = f'{library}.{module_name}'
25
+
26
+ try:
27
+ module = importlib.import_module(lib_mod)
28
+ self.imported_modules[module_name] = module
29
+ return module
30
+ except ModuleNotFoundError as e:
31
+ raise ImportError(f'Could not import {lib_mod}') from e
@@ -0,0 +1,219 @@
1
+ import os
2
+ import re
3
+
4
+ from collections.abc import Generator
5
+ from pathlib import Path
6
+ from typing_extensions import Callable, Any
7
+
8
+ from .common import str2file, FilterSet, PredicateBuilder
9
+
10
+
11
+ DEFAULT_SFX = '_filetree.txt'
12
+
13
+ STYLES: list[str] = [' ', '-', '—', '_', '*', '>', '<', '+', '.']
14
+
15
+ _NUM_SPLIT = re.compile(r'(\d+)').split
16
+
17
+
18
+ class FileTreeDisplay:
19
+ """Generate and display a visual file tree of a directory.
20
+
21
+ This class builds a directory tree structure and yields formatted
22
+ visual representations of directories and files.
23
+ Supports exclusion lists, configurable indentation, and custom prefix styles.
24
+ """
25
+
26
+ def __init__(
27
+ self,
28
+ root_dir: str | None = None,
29
+ filepath: str | None = None,
30
+ ignore_dirs: FilterSet = None,
31
+ ignore_files: FilterSet = None,
32
+ include_dirs: FilterSet = None,
33
+ include_files: FilterSet = None,
34
+ style: str = ' ',
35
+ indent: int = 2,
36
+ files_first: bool = False,
37
+ sort_key_name: str = 'natural',
38
+ reverse: bool = False,
39
+ custom_sort: Callable[[str], Any] | None = None,
40
+ save2file: bool = True,
41
+ printout: bool = False,
42
+ ) -> None:
43
+ """Initialize the FileTreeDisplay instance.
44
+
45
+ Args:
46
+ root_dir (str): Root directory to traverse.
47
+ filepath: str | None: full output file path.
48
+ ignore_dirs (list[str] | set[str] | None): Directory names or patterns to ignore.
49
+ ignore_files (list[str] | set[str] | None): File names or patterns to ignore.
50
+ include_dirs (list[str] | set[str] | None): Directory names or patterns to include.
51
+ include_files (list[str] | set[str] | None): File names or patterns to include.
52
+ style (str): Character(s) used to represent hierarchy levels. Defaults to " ".
53
+ indent (int): Number of style characters used per hierarchy level. Defaults to 2.
54
+ files_first (bool): Determines whether to list files first. Defaults to False.
55
+ sort_key_name (str): sorting key name, e.g. 'lex' for lexicographic or 'custom'. Defaults to 'natural'.
56
+ '' means no sorting.
57
+ reverse (bool): reversed sorting.
58
+ custom_sort (Callable[[str], Any] | None):
59
+ save2file (bool): save file tree info to a file.
60
+ printout (bool): print file tree info.
61
+ """
62
+ self.root_path = Path(root_dir) if root_dir else Path.cwd()
63
+ self.filepath = filepath
64
+ self.ignore_dirs = set(ignore_dirs or [])
65
+ self.ignore_files = set(ignore_files or [])
66
+ self.include_dirs = set(include_dirs or [])
67
+ self.include_files = set(include_files or [])
68
+ self.style = style
69
+ self.indent = indent
70
+ self.files_first = files_first
71
+ self.sort_key_name = sort_key_name
72
+ self.reverse = reverse
73
+ self.custom_sort = custom_sort
74
+ self.save2file = save2file
75
+ self.printout = printout
76
+
77
+ self.sort_keys = {
78
+ 'natural': self._nat_key,
79
+ 'lex': self._lex_key,
80
+ 'custom': self.custom_sort,
81
+ '': None,
82
+ }
83
+
84
+ self.pb = PredicateBuilder()
85
+ self.dir_filter = self.pb.build_predicate(self.include_dirs, self.ignore_dirs)
86
+ self.file_filter = self.pb.build_predicate(
87
+ self.include_files, self.ignore_files
88
+ )
89
+
90
+ def init(self, *args, **kwargs) -> None:
91
+ self.__init__(*args, **kwargs)
92
+
93
+ def update(self, attrs: dict) -> None:
94
+ self.__dict__.update(attrs)
95
+ pattern = re.compile(r'^(ign|inc)')
96
+ if any(pattern.match(key) for key in attrs):
97
+ self.update_predicates()
98
+
99
+ def update_predicates(self):
100
+ self.dir_filter = self.pb.build_predicate(self.include_dirs, self.ignore_dirs)
101
+ self.file_filter = self.pb.build_predicate(
102
+ self.include_files, self.ignore_files
103
+ )
104
+
105
+ @staticmethod
106
+ def _nat_key(name: str) -> list[int | str | Any]:
107
+ """Natural sorting key"""
108
+ return [
109
+ int(part) if part.isdigit() else part.lower() for part in _NUM_SPLIT(name)
110
+ ]
111
+
112
+ @staticmethod
113
+ def _lex_key(name: str) -> str:
114
+ """Lexicographic sorting key"""
115
+ return name.lower()
116
+
117
+ def file_tree_display(self) -> str:
118
+ """Generate and save the directory tree to a text file.
119
+
120
+ Returns:
121
+ Either a str: Path to the saved output file containing the directory tree.
122
+ or the whole built tree, as a string of CRLF-separated lines.
123
+ """
124
+ root_path_str = str(self.root_path)
125
+ filepath = self.filepath
126
+ if not self.root_path.is_dir():
127
+ raise NotADirectoryError(f"The path '{root_path_str}' is not a directory.")
128
+
129
+ if self.style not in STYLES:
130
+ raise ValueError(f"'{self.style}' is invalid: must be one of {STYLES}\n")
131
+
132
+ iterator = self.build_tree(root_path_str)
133
+
134
+ tree_info = self.get_tree_info(iterator)
135
+
136
+ if self.save2file and filepath:
137
+ str2file(tree_info, filepath)
138
+ return filepath
139
+
140
+ if self.printout:
141
+ print(tree_info)
142
+
143
+ return tree_info
144
+
145
+ def get_tree_info(self, iterator: Generator[str, None, None]) -> str:
146
+ lines = [f'{self.root_path.name}/']
147
+ lines.extend(list(iterator))
148
+ return '\n'.join(lines)
149
+
150
+ def build_tree(self, dir_path: str, prefix: str = '') -> Generator[str, None, None]:
151
+ """Yields formatted directory tree lines, using a recursive DFS.
152
+ Intended order of appearance is with a preference to subdirectories first.
153
+
154
+ Args:
155
+ dir_path (str): The directory path currently being traversed.
156
+ prefix (str): Hierarchical prefix applied to each level.
157
+
158
+ Yields:
159
+ str: A formatted string representing either a directory or a file.
160
+ """
161
+ files_first = self.files_first
162
+ dir_filter, file_filter = self.dir_filter, self.file_filter
163
+ sort_key_name, reverse = self.sort_key_name, self.reverse
164
+ sort_key = self.sort_keys.get(self.sort_key_name)
165
+ curr_indent = self.style * self.indent
166
+
167
+ next_prefix = prefix + curr_indent
168
+
169
+ if sort_key is None:
170
+ if sort_key_name == 'custom':
171
+ raise ValueError(
172
+ "custom_sort function must be specified when sort_key_name='custom'"
173
+ )
174
+ raise ValueError(f'Invalid sort key name: {sort_key_name}')
175
+
176
+ try:
177
+ with os.scandir(dir_path) as entries:
178
+ dirs, files = [], []
179
+ append_dir, append_file = dirs.append, files.append
180
+ for entry in entries:
181
+ name = entry.name
182
+ if entry.is_dir():
183
+ if dir_filter(name):
184
+ append_dir((name, entry.path))
185
+ else:
186
+ if file_filter(name):
187
+ append_file(name)
188
+
189
+ except (PermissionError, OSError) as e:
190
+ msg = (
191
+ '[Permission Denied]'
192
+ if isinstance(e, PermissionError)
193
+ else '[Error reading directory]'
194
+ )
195
+ yield f'{next_prefix}{msg}'
196
+ return
197
+
198
+ if sort_key:
199
+ dirs.sort(key=lambda d: sort_key(d[0]), reverse=reverse)
200
+ files.sort(key=sort_key, reverse=reverse)
201
+
202
+ if files_first:
203
+ for name in files:
204
+ yield next_prefix + name
205
+
206
+ for name, path in dirs:
207
+ yield f'{next_prefix}{name}/'
208
+ yield from self.build_tree(path, next_prefix)
209
+
210
+ if not files_first:
211
+ for name in files:
212
+ yield next_prefix + name
213
+
214
+ def format_out_path(self) -> Path:
215
+ alt_file_name = f'{self.root_path.name}{DEFAULT_SFX}'
216
+ out_file = (
217
+ Path(self.filepath) if self.filepath else (self.root_path / alt_file_name)
218
+ )
219
+ return out_file
@@ -0,0 +1,164 @@
1
+ import platform
2
+ import subprocess
3
+ import logging
4
+
5
+ from .common import update
6
+
7
+
8
+ lgr = logging.getLogger(__name__)
9
+ """Module-level logger. Configure using logging.basicConfig() in your application."""
10
+
11
+ PROXY_SERVER = 6277
12
+ INSPECTOR_CLIENT = 6274
13
+
14
+
15
+ class PortsRelease:
16
+ def __init__(self, default_ports: list[int] | None = None):
17
+ self.default_ports: list[int] = (
18
+ default_ports
19
+ if default_ports is not None
20
+ else [PROXY_SERVER, INSPECTOR_CLIENT]
21
+ )
22
+
23
+ @staticmethod
24
+ def _log_process_found(port: int, pid: int) -> str:
25
+ return f'Process ID (PID) found for port {port}: {pid}.'
26
+
27
+ @staticmethod
28
+ def _log_process_terminated(pid: int, port: int) -> str:
29
+ return f'Process {pid} (on port {port}) terminated successfully.'
30
+
31
+ @staticmethod
32
+ def _log_no_process(port: int) -> str:
33
+ return f'No process found listening on port {port}.'
34
+
35
+ @staticmethod
36
+ def _log_invalid_port(port: int) -> str:
37
+ return f'Invalid port number: {port}. Skipping.'
38
+
39
+ @staticmethod
40
+ def _log_terminate_failed(
41
+ pid: int, port: int | None = None, error: str | None = None
42
+ ) -> str:
43
+ base_msg = f'Failed to terminate process {pid}'
44
+ if port:
45
+ base_msg += f' (on port {port})'
46
+ if error:
47
+ base_msg += f'. Error: {error}'
48
+ return base_msg
49
+
50
+ @staticmethod
51
+ def _log_line_parse_failed(line: str) -> str:
52
+ return f'Could not parse PID from line: {line}'
53
+
54
+ @staticmethod
55
+ def _log_unexpected_error(e: Exception) -> str:
56
+ return f'An unexpected error occurred: {e}'
57
+
58
+ @staticmethod
59
+ def _log_cmd_error(error: bytes) -> str:
60
+ return f'Error running command: {error.decode()}'
61
+
62
+ @staticmethod
63
+ def _log_unsupported_os() -> str:
64
+ return f'Unsupported OS: {platform.system()}'
65
+
66
+ def init(self, *args, **kwargs) -> None:
67
+ self.__init__(*args, **kwargs)
68
+
69
+ def update(self, attrs: dict) -> None:
70
+ update(self, attrs)
71
+
72
+ def get_pid_by_port(self, port: int) -> int | None:
73
+ """Gets the process ID (PID) listening on the specified port."""
74
+ system = platform.system()
75
+ try:
76
+ cmd: str = {
77
+ 'Windows': f'netstat -ano | findstr :{port}',
78
+ 'Linux': f'ss -lntp | grep :{port}',
79
+ 'Darwin': f'lsof -i :{port}',
80
+ }.get(system, '')
81
+ if not cmd:
82
+ lgr.error(self._log_unsupported_os())
83
+ return None
84
+
85
+ process = subprocess.Popen(
86
+ cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
87
+ )
88
+ output, error = process.communicate()
89
+ if error:
90
+ lgr.error(self._log_cmd_error(error))
91
+ return None
92
+
93
+ lines: list[str] = output.decode().splitlines()
94
+ for line in lines:
95
+ if str(port) in line:
96
+ parts: list[str] = line.split()
97
+ if system == 'Windows' and len(parts) > 4:
98
+ try:
99
+ return int(parts[4])
100
+ except ValueError:
101
+ lgr.error(self._log_line_parse_failed(line))
102
+ return None
103
+ elif system == 'Linux':
104
+ for part in parts:
105
+ if 'pid=' in part:
106
+ try:
107
+ return int(part.split('=')[1])
108
+ except ValueError:
109
+ lgr.error(self._log_line_parse_failed(line))
110
+ return None
111
+ elif system == 'Darwin' and len(parts) > 1:
112
+ try:
113
+ return int(parts[1])
114
+ except ValueError:
115
+ lgr.error(self._log_line_parse_failed(line))
116
+ return None
117
+ return None
118
+ except Exception as e:
119
+ lgr.error(self._log_unexpected_error(e))
120
+ return None
121
+
122
+ def kill_process(self, pid: int) -> bool:
123
+ """Kills the process with the specified PID."""
124
+ try:
125
+ cmd: str = {
126
+ 'Windows': f'taskkill /F /PID {pid}',
127
+ 'Linux': f'kill -9 {pid}',
128
+ 'Darwin': f'kill -9 {pid}',
129
+ }.get(platform.system(), '') # fallback to empty string
130
+ if not cmd:
131
+ lgr.error(self._log_unsupported_os())
132
+ return False
133
+ process = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE)
134
+ _, error = process.communicate()
135
+ if process.returncode:
136
+ error_msg = error.decode()
137
+ lgr.error(self._log_terminate_failed(pid=pid, error=error_msg))
138
+ return False
139
+ return True
140
+ except Exception as e:
141
+ lgr.error(self._log_unexpected_error(e))
142
+ return False
143
+
144
+ def release_all(self, ports: list[int] | None = None) -> None:
145
+ try:
146
+ ports_to_release: list[int] = self.default_ports if ports is None else ports
147
+
148
+ for port in ports_to_release:
149
+ if not isinstance(port, int):
150
+ lgr.error(self._log_invalid_port(port))
151
+ continue
152
+
153
+ pid: int | None = self.get_pid_by_port(port)
154
+ if pid is None:
155
+ lgr.info(self._log_no_process(port))
156
+ continue
157
+
158
+ lgr.info(self._log_process_found(port, pid))
159
+ if self.kill_process(pid):
160
+ lgr.info(self._log_process_terminated(pid, port))
161
+ else:
162
+ lgr.error(self._log_terminate_failed(pid=pid, port=port))
163
+ except Exception as e:
164
+ lgr.error(self._log_unexpected_error(e))
@@ -0,0 +1,214 @@
1
+ from functools import wraps
2
+ import time
3
+ import logging
4
+ import inspect
5
+
6
+ from typing import (
7
+ TypeVar,
8
+ ParamSpec,
9
+ Callable,
10
+ Awaitable,
11
+ Any,
12
+ cast,
13
+ )
14
+
15
+ from nano_dev_utils.common import update
16
+
17
+
18
+ lgr = logging.getLogger(__name__)
19
+ """Module-level logger. Configure using logging.basicConfig() in your application."""
20
+
21
+ P = ParamSpec('P')
22
+ R = TypeVar('R')
23
+
24
+
25
+ class Timer:
26
+ def __init__(
27
+ self, precision: int = 4, verbose: bool = False, printout: bool = False
28
+ ):
29
+ self.precision = precision
30
+ self.verbose = verbose
31
+ self.printout = printout
32
+
33
+ def init(self, *args, **kwargs) -> None:
34
+ self.__init__(*args, **kwargs)
35
+
36
+ def update(self, attrs: dict[str, Any]) -> None:
37
+ update(self, attrs)
38
+
39
+ def res_formatter(self, elapsed_ns: float, *, precision: int = 4) -> str:
40
+ return self._duration_formatter(elapsed_ns, precision=precision)
41
+
42
+ def timeit(
43
+ self,
44
+ iterations: int = 1,
45
+ timeout: float | None = None,
46
+ per_iteration: bool = False,
47
+ ) -> Callable[[Callable[P, Any]], Callable[P, Any]]:
48
+ """Decorator that measures execution time for sync / async functions.
49
+
50
+ Args:
51
+ iterations: Number of times to run the function (averaged for reporting).
52
+ timeout: Optional max allowed time (in seconds); raises TimeoutError if exceeded.
53
+ per_iteration: If True, enforces timeout per iteration, else cumulatively.
54
+
55
+ Returns:
56
+ A decorated function that behaves identically to the original, with timing logged.
57
+ """
58
+
59
+ RP = ParamSpec('RP')
60
+ RR = TypeVar('RR')
61
+
62
+ precision, verbose, printout = self.precision, self.verbose, self.printout
63
+ check_timeout = self._check_timeout
64
+ duration_formatter = self._duration_formatter
65
+ formatted_msg = self._formatted_msg
66
+
67
+ def decorator(
68
+ func: Callable[RP, RR] | Callable[RP, Awaitable[RR]],
69
+ ) -> Callable[RP, Any]:
70
+ if inspect.iscoroutinefunction(func):
71
+ async_func = cast(Callable[RP, Awaitable[RR]], func)
72
+
73
+ @wraps(func)
74
+ async def async_wrapper(*args: RP.args, **kwargs: RP.kwargs) -> RR:
75
+ func_name = func.__name__
76
+ total_elapsed_ns = 0
77
+ result: RR | None = None
78
+ for i in range(1, iterations + 1):
79
+ start_ns = time.perf_counter_ns()
80
+ result = await async_func(*args, **kwargs)
81
+ duration_ns = time.perf_counter_ns() - start_ns
82
+ total_elapsed_ns += duration_ns
83
+
84
+ check_timeout(
85
+ func_name,
86
+ i,
87
+ duration_ns,
88
+ total_elapsed_ns,
89
+ timeout,
90
+ per_iteration,
91
+ )
92
+ avg_elapsed_ns = total_elapsed_ns / iterations
93
+ duration_str = duration_formatter(avg_elapsed_ns, precision)
94
+
95
+ msg = formatted_msg(
96
+ func_name, args, kwargs, duration_str, iterations, verbose
97
+ )
98
+ lgr.info(msg)
99
+ if printout:
100
+ print(msg)
101
+ return cast(RR, result)
102
+
103
+ return cast(Callable[RP, Awaitable[RR]], async_wrapper)
104
+ else:
105
+ sync_func = cast(Callable[RP, RR], func)
106
+
107
+ @wraps(func)
108
+ def sync_wrapper(*args: RP.args, **kwargs: RP.kwargs) -> RR:
109
+ func_name = func.__name__
110
+ total_elapsed_ns = 0
111
+ result: RR | None = None
112
+ for i in range(1, iterations + 1):
113
+ start_ns = time.perf_counter_ns()
114
+ result = sync_func(*args, **kwargs)
115
+ duration_ns = time.perf_counter_ns() - start_ns
116
+ total_elapsed_ns += duration_ns
117
+ check_timeout(
118
+ func_name,
119
+ i,
120
+ duration_ns,
121
+ total_elapsed_ns,
122
+ timeout,
123
+ per_iteration,
124
+ )
125
+ avg_elapsed_ns = total_elapsed_ns / iterations
126
+ duration_str = duration_formatter(avg_elapsed_ns, precision)
127
+ msg = formatted_msg(
128
+ func_name, args, kwargs, duration_str, iterations, verbose
129
+ )
130
+ lgr.info(msg)
131
+ if printout:
132
+ print(msg)
133
+ return cast(RR, result)
134
+
135
+ return cast(Callable[RP, RR], sync_wrapper)
136
+
137
+ return decorator
138
+
139
+ def _check_timeout(
140
+ self,
141
+ func_name: str,
142
+ i: int,
143
+ duration_ns: float,
144
+ total_elapsed_ns: float,
145
+ timeout: float | None,
146
+ per_iteration: bool,
147
+ ) -> None:
148
+ """Raise TimeoutError if timeout is exceeded."""
149
+ if timeout is None:
150
+ return
151
+ precision = self.precision
152
+ timeout_exceeded = f'{func_name} exceeded {timeout:.{precision}f}s'
153
+ if per_iteration:
154
+ duration_s = duration_ns / 1e9
155
+ if duration_s > timeout:
156
+ raise TimeoutError(
157
+ f'{timeout_exceeded} on iteration {i} '
158
+ f'(took {duration_s:.{precision}f}s)'
159
+ )
160
+ else:
161
+ total_duration_s = total_elapsed_ns / 1e9
162
+ if total_duration_s > timeout:
163
+ raise TimeoutError(
164
+ f'{timeout_exceeded} after {i} iterations '
165
+ f'(took {total_duration_s:.{precision}f}s)'
166
+ )
167
+
168
+ @staticmethod
169
+ def _duration_formatter(elapsed_ns: float, precision: int = 4) -> str:
170
+ """Convert nanoseconds to the appropriate time unit, supporting multi-unit results."""
171
+ ns_sec, ns_min, ns_hour = 1e9, 6e10, 3.6e12
172
+ ns_ms, ns_us = 1e6, 1e3
173
+
174
+ if elapsed_ns < ns_sec:
175
+ if elapsed_ns >= ns_ms:
176
+ return f'{elapsed_ns / ns_ms:.{precision}f} ms'
177
+ elif elapsed_ns >= ns_us:
178
+ return f'{elapsed_ns / ns_us:.{precision}f} μs'
179
+ return f'{elapsed_ns:.2f} ns'
180
+
181
+ if elapsed_ns < ns_min:
182
+ seconds = elapsed_ns / ns_sec
183
+ return f'{seconds:.1f} s' if seconds < 10 else f'{seconds:.0f} s'
184
+
185
+ if elapsed_ns >= ns_hour:
186
+ hours = int(elapsed_ns / ns_hour)
187
+ rem = elapsed_ns % ns_hour
188
+ mins = int(rem / ns_min)
189
+ secs = int((rem % ns_min) / ns_sec)
190
+
191
+ parts = [f'{hours} h']
192
+ if mins:
193
+ parts.append(f'{mins} m')
194
+ if secs:
195
+ parts.append(f'{secs} s')
196
+ return ' '.join(parts)
197
+
198
+ else:
199
+ minutes = int(elapsed_ns / ns_min)
200
+ seconds = int((elapsed_ns % ns_min) / ns_sec)
201
+ return f'{minutes} m {seconds} s' if seconds else f'{minutes} m'
202
+
203
+ @staticmethod
204
+ def _formatted_msg(
205
+ func_name: str,
206
+ args: tuple,
207
+ kwargs: dict,
208
+ duration_str: str,
209
+ iterations: int,
210
+ verbose: bool,
211
+ ) -> str:
212
+ extra_info = f'{args} {kwargs} ' if verbose else ''
213
+ iter_info = f' (avg. over {iterations} runs)' if iterations > 1 else ''
214
+ return f'{func_name} {extra_info}took {duration_str}{iter_info}'
@@ -0,0 +1,280 @@
1
+ Metadata-Version: 2.4
2
+ Name: nano_dev_utils
3
+ Version: 1.4.2
4
+ Summary: A collection of small Python utilities for developers.
5
+ Project-URL: Homepage, https://github.com/yaronday/nano_utils
6
+ Project-URL: Issues, https://github.com/yaronday/nano_utils/issues
7
+ Project-URL: license, https://github.com/yaronday/nano_dev_utils/blob/master/LICENSE
8
+ Author-email: Yaron Dayan <yaronday77@gmail.com>
9
+ License: MIT
10
+ License-File: LICENSE
11
+ Classifier: Operating System :: OS Independent
12
+ Classifier: Programming Language :: Python :: 3
13
+ Requires-Python: >=3.10
14
+ Requires-Dist: build>=1.3.0
15
+ Description-Content-Type: text/markdown
16
+
17
+ # nano_dev_utils
18
+
19
+ A collection of small Python utilities for developers.
20
+
21
+ ## Modules
22
+
23
+ ### `timers.py`
24
+
25
+ This module provides a `Timer` class for measuring the execution time of code blocks and functions with additional features like timeout control and multi-iteration averaging.
26
+
27
+ #### `Timer` Class
28
+
29
+ * **`__init__(self, precision: int = 4, verbose: bool = False, printout: bool = False)`**: Initializes a `Timer` instance.
30
+ * `precision`: The number of decimal places to record and display time durations. Defaults to 4.
31
+ * `verbose`: Optionally displays the function's positional arguments (args) and keyword arguments (kwargs). Defaults to `False`.
32
+ * `printout`: Allows printing to console.
33
+
34
+ * **`def timeit(
35
+ self,
36
+ iterations: int = 1,
37
+ timeout: float | None = None,
38
+ per_iteration: bool = False,
39
+ ) -> Callable[[Callable[P, Any]], Callable[P, Any]]:`**:
40
+ Decorator that times either **sync** or **async** function execution with advanced features:
41
+ * `iterations`: Number of times to run the function (for averaging). Defaults to 1.
42
+ * `timeout`: Maximum allowed execution time in seconds. When exceeded:
43
+ * Raises `TimeoutError` immediately
44
+ * **Warning:** The function execution will be aborted mid-operation
45
+ * No return value will be available if timeout occurs
46
+ * `per_iteration`: If True, applies timeout check to each iteration; otherwise checks total time across all iterations.
47
+ * Features:
48
+ * Records execution times
49
+ * Handles timeout conditions
50
+ * Calculates average execution time across iterations
51
+ * Logs the function name and execution time (with optional arguments)
52
+ * Returns the result of the original function (unless timeout occurs)
53
+
54
+ #### Example Usage:
55
+
56
+ ```python
57
+ import time
58
+ import logging
59
+ from nano_dev_utils import timer
60
+
61
+ # This timer version uses a logger but also allows printing (if enabled), so it has to be configured in your app, for instance:
62
+ logging.basicConfig(filename='timer example.log',
63
+ level=logging.INFO, # DEBUG, WARNING, ERROR, CRITICAL
64
+ format='%(asctime)s - %(levelname)s: %(message)s',
65
+ datefmt='%d-%m-%Y %H:%M:%S')
66
+
67
+ # Basic timing
68
+ @timer.timeit()
69
+ def my_function(a, b=10):
70
+ """A sample function."""
71
+ time.sleep(0.1)
72
+ return a + b
73
+
74
+ timer.init(precision=6, verbose=True)
75
+ '''Alternative options:
76
+ timer.update({'precision': 6, 'verbose': True}) # 1. Using update method
77
+
78
+ from nano_dev_utils.timers import Timer # 2. explicit instantiation
79
+ timer = Timer(precision=6, verbose=True)
80
+ '''
81
+
82
+ timer.update({'printout': True}) # allow printing to console
83
+
84
+ # Advanced usage with timeout and iterations
85
+ @timer.timeit(iterations=5, timeout=0.5, per_iteration=True)
86
+ def critical_function(x):
87
+ """Function with timeout check per iteration."""
88
+ time.sleep(0.08)
89
+ return x * 2
90
+
91
+ result1 = my_function(5, b=20) # Shows args/kwargs and timing
92
+ result2 = critical_function(10) # Runs 5 times with per-iteration timeout
93
+ ```
94
+
95
+ ### `dynamic_importer.py`
96
+
97
+ This module provides an `Importer` class for lazy loading and caching module imports.
98
+
99
+ #### `Importer` Class
100
+
101
+ * **`__init__(self)`**: Initializes an `Importer` instance with an empty dictionary `imported_modules` to cache imported modules.
102
+
103
+ * **`import_mod_from_lib(self, library: str, module_name: str) -> ModuleType | Any`**: Lazily imports a module from a specified library and caches it.
104
+ * `library` (str): The name of the library (e.g., "os", "requests").
105
+ * `module_name` (str): The name of the module to import within the library (e.g., "path", "get").
106
+ * Returns the imported module. If the module has already been imported, it returns a cached instance.
107
+ * Raises `ImportError` if the module cannot be found.
108
+
109
+ #### Example Usage:
110
+
111
+ ```python
112
+ from nano_dev_utils import importer
113
+
114
+ os_path = importer.import_mod_from_lib("os", "path")
115
+ print(f"Imported os.path: {os_path}")
116
+
117
+ requests_get = importer.import_mod_from_lib("requests", "get")
118
+ print(f"Imported requests.get: {requests_get}")
119
+
120
+ # Subsequent calls will return the cached module
121
+ os_path_again = importer.import_mod_from_lib("os", "path")
122
+ print(f"Imported os.path again (cached): {os_path_again}")
123
+ ```
124
+
125
+ ### `release_ports.py`
126
+
127
+ This module provides a `PortsRelease` class to identify and release processes
128
+ listening on specified TCP ports.
129
+ It supports Windows, Linux, and macOS.
130
+
131
+ #### `PortsRelease` Class
132
+
133
+ * **`__init__(self, default_ports: list[int] | None = None)`**:
134
+ * Initializes a `PortsRelease` instance.
135
+ * `default_ports`: A list of default ports to manage. If not provided, it defaults to `[6277, 6274]`.
136
+
137
+ * **`get_pid_by_port(self, port: int) -> int | None`**: A static method that attempts to find
138
+ a process ID (PID) listening on a given `port`.
139
+ * It uses platform-specific commands (`netstat`, `ss`, `lsof`).
140
+ * Returns the PID if found, otherwise `None`.
141
+
142
+ * **`kill_process(self, pid: int) -> bool`**: A static method that attempts to kill the process
143
+ with the given `pid`.
144
+ * It uses platform-specific commands (`taskkill`, `kill -9`).
145
+ * Returns `True` if the process was successfully killed, `False` otherwise.
146
+
147
+ * **`release_all(self, ports: list[int] | None = None) -> None`**: Releases all processes listening on the specified `ports`.
148
+ * `ports`: A list of ports to release.
149
+ * If `None`, it uses the `default_ports` defined during initialization.
150
+ * For each port, it first tries to get the PID and then attempts to kill the process.
151
+ * It logs the actions and any errors encountered. Invalid port numbers in the provided list are skipped.
152
+
153
+ #### Example Usage:
154
+
155
+ ```python
156
+ import logging
157
+ from nano_dev_utils import ports_release, PortsRelease
158
+
159
+
160
+ # configure the logger
161
+ logging.basicConfig(filename='port release.log',
162
+ level=logging.INFO, # DEBUG, WARNING, ERROR, CRITICAL
163
+ format='%(asctime)s - %(levelname)s: %(message)s',
164
+ datefmt='%d-%m-%Y %H:%M:%S')
165
+
166
+
167
+ ports_release.release_all()
168
+
169
+ # Create an instance with custom ports
170
+ custom_ports_releaser = PortsRelease(default_ports=[8080, 9000, 6274])
171
+ custom_ports_releaser.release_all(ports=[8080, 9000])
172
+
173
+ # Release only the default ports
174
+ ports_release.release_all()
175
+ ```
176
+
177
+ ### `file_tree_display.py`
178
+
179
+ This module provides a utility for generating a visually structured directory tree.
180
+ It supports recursive traversal, customizable hierarchy styles, and inclusion / exclusion
181
+ patterns for directories and files.
182
+ Output can be displayed in the console or saved to a file.
183
+
184
+
185
+ #### Key Features
186
+
187
+ - Recursively displays and logs directory trees
188
+ - Efficient directory traversal
189
+ - Blazing fast (see Benchmarks below)
190
+ - Generates human-readable file tree structure
191
+ - Supports including / ignoring specific directories or files via pattern matching
192
+ - Customizable tree display output
193
+ - Optionally saves the resulting tree to a text file
194
+ - Lightweight, flexible and easily configurable
195
+
196
+
197
+ ## Benchmarks
198
+
199
+ As measured on a dataset of 10553 files, 1235 folders (ca. 16 GB) using Python 3.10 on SSD.
200
+ Avg. time was measured over 10 runs per configuration.
201
+
202
+ | Tool | Time (s) |
203
+ |-----------------|----------|
204
+ | FileTreeDisplay | 0.198 |
205
+ | Seedir | 4.378 |
206
+
207
+
208
+
209
+ #### Class Overview
210
+
211
+ **`FileTreeDisplay`**
212
+ Constructs and manages the visual representation of a directory structure.
213
+
214
+ **Initialization Parameters**
215
+
216
+ | Parameter | Type | Description |
217
+ |:---------------------------|:--------------------------------|:----------------------------------------------------------------------------|
218
+ | `root_dir` | `str` | Path to the directory to scan. |
219
+ | `filepath` | `str / None` | Optional output destination for the saved file tree. |
220
+ | `ignore_dirs` | `list[str] or set[str] or None` | Directory names or patterns to skip. |
221
+ | `ignore_files` | `list[str] or set[str] or None` | File names or patterns to skip. |
222
+ | `include_dirs` | `list[str] or set[str] or None` | Only include specified folder names or patterns. |
223
+ | `include_files` | `list[str] or set[str] or None` | Only include specified file names or patterns, '*.pdf' - only include pdfs. |
224
+ | `style` | `str` | Character(s) used to mark hierarchy levels. Defaults to `' '`. |
225
+ | `indent` | `int` | Number of style characters per level. Defaults `2`. |
226
+ | `files_first` | `bool` | Determines whether to list files first. Defaults to False. |
227
+ | `sort_key_name` | `str` | Sort key. 'lex' (lexicographic) or 'custom'. Defaults to 'natural'. |
228
+ | `reverse` | `bool` | Reversed sorting order. |
229
+ | `custom_sort` | `Callable[[str], Any] / None` | Custom sort key function. |
230
+ | `title` | `str` | Custom title shown in the output. |
231
+ | `save2file` | `bool` | Save file tree (folder structure) info into a file. |
232
+ | `printout` | `bool` | Print file tree info. |
233
+
234
+ #### Core Methods
235
+
236
+ - `file_tree_display(save2file: bool = True) -> str | None`
237
+ Generates the directory tree. If `save2file=True`, saves the output; otherwise prints it directly.
238
+ - `build_tree(dir_path: str, prefix: str = '') -> Generator[str, None, None]`
239
+ Recursively yields formatted lines representing directories and files.
240
+
241
+
242
+ #### Example Usage
243
+
244
+ ```python
245
+ from pathlib import Path
246
+ from nano_dev_utils.file_tree_display import FileTreeDisplay
247
+
248
+ root = r'c:/your_root_dir'
249
+ target_path = r'c:/your_target_path'
250
+ filename = 'filetree.md'
251
+ filepath = str(Path(target_path, filename))
252
+
253
+ ftd = FileTreeDisplay(root_dir=root,
254
+ ignore_dirs=['.git', 'node_modules', '.idea'],
255
+ ignore_files=['.gitignore', '*.toml'],
256
+ style='—',
257
+ include_dirs=['src', 'tests', 'snapshots'],
258
+ filepath=filepath,
259
+ sort_key_name='custom',
260
+ custom_sort=(lambda x: any(ext in x.lower() for ext in ('jpg', 'png'))),
261
+ files_first=True,
262
+ reverse=True
263
+ )
264
+ ftd.file_tree_display()
265
+ ```
266
+
267
+
268
+ #### Error Handling
269
+
270
+ The module raises well-defined exceptions for common issues:
271
+
272
+ - `NotADirectoryError` when the path is not a directory
273
+ - `PermissionError` for unreadable directories or write-protected files
274
+ - `OSError` for general I/O or write failures
275
+
276
+ ***
277
+
278
+ ## License
279
+ This project is licensed under the MIT License.
280
+ See [LICENSE](LICENSE) for details.
@@ -0,0 +1,10 @@
1
+ nano_dev_utils/__init__.py,sha256=bJNCUyssMVyNmOey-god8A2kElC4nCR9B5DsdvUrKWw,1014
2
+ nano_dev_utils/common.py,sha256=MsY5n9lSOjvEu0wGvmd2zQamFLLbtbjodZku5W9tuWE,5873
3
+ nano_dev_utils/dynamic_importer.py,sha256=-Mh76366lI_mP2QA_jxiVfcKCHOHeukS_j4v7fTh0xw,1028
4
+ nano_dev_utils/file_tree_display.py,sha256=RMd2l1FZgO__9EmCSKRkZ6s7tYpCx6Fe7e83fAxNxg0,8234
5
+ nano_dev_utils/release_ports.py,sha256=yLWMMbN6j6kWtGTg-Nynn37-Q4b2rxkls9hs2sqeZjA,6081
6
+ nano_dev_utils/timers.py,sha256=H6pZBaCasy79ketkGYPwgcLYdNnLSrpIbH9ob0a7iCI,7851
7
+ nano_dev_utils-1.4.2.dist-info/METADATA,sha256=8_9FvdpfJnkDVtHx38tiY9__dp8zMuIl-m0bupKNAXI,12133
8
+ nano_dev_utils-1.4.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
9
+ nano_dev_utils-1.4.2.dist-info/licenses/LICENSE,sha256=Muenl7Bw_LdtHZtlOMAP7Kt97gDCq8WWp2605eDWhHU,1089
10
+ nano_dev_utils-1.4.2.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.27.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Yaron Dayan
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.