xulbux 1.6.4__py3-none-any.whl → 1.6.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of xulbux might be problematic. Click here for more details.
- xulbux/__init__.py +2 -2
- xulbux/_cli_.py +9 -9
- xulbux/_consts_.py +71 -58
- xulbux/xx_code.py +37 -42
- xulbux/xx_color.py +49 -79
- xulbux/xx_console.py +197 -109
- xulbux/xx_data.py +177 -126
- xulbux/xx_env_path.py +1 -5
- xulbux/xx_file.py +1 -1
- xulbux/xx_format_codes.py +40 -38
- xulbux/xx_json.py +2 -5
- xulbux/xx_path.py +5 -5
- xulbux/xx_regex.py +18 -20
- xulbux/xx_string.py +28 -82
- xulbux/xx_system.py +5 -13
- {xulbux-1.6.4.dist-info → xulbux-1.6.6.dist-info}/METADATA +19 -17
- xulbux-1.6.6.dist-info/RECORD +21 -0
- xulbux-1.6.4.dist-info/RECORD +0 -21
- {xulbux-1.6.4.dist-info → xulbux-1.6.6.dist-info}/LICENSE +0 -0
- {xulbux-1.6.4.dist-info → xulbux-1.6.6.dist-info}/WHEEL +0 -0
- {xulbux-1.6.4.dist-info → xulbux-1.6.6.dist-info}/entry_points.txt +0 -0
- {xulbux-1.6.4.dist-info → xulbux-1.6.6.dist-info}/top_level.txt +0 -0
xulbux/xx_data.py
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
|
+
from ._consts_ import COLOR
|
|
2
|
+
from .xx_format_codes import FormatCodes
|
|
1
3
|
from .xx_string import String
|
|
2
4
|
|
|
3
|
-
from typing import TypeAlias, Union
|
|
5
|
+
from typing import TypeAlias, Optional, Union
|
|
6
|
+
import base64 as _base64
|
|
4
7
|
import math as _math
|
|
5
8
|
import re as _re
|
|
6
9
|
|
|
@@ -10,6 +13,32 @@ DataStructure: TypeAlias = Union[list, tuple, set, frozenset, dict]
|
|
|
10
13
|
|
|
11
14
|
class Data:
|
|
12
15
|
|
|
16
|
+
@staticmethod
|
|
17
|
+
def serialize_bytes(data: bytes | bytearray) -> dict[str, str]:
|
|
18
|
+
"""Converts bytes or bytearray to a JSON-compatible format (dictionary) with explicit keys."""
|
|
19
|
+
if isinstance(data, (bytes, bytearray)):
|
|
20
|
+
key = "bytearray" if isinstance(data, bytearray) else "bytes"
|
|
21
|
+
try:
|
|
22
|
+
return {key: data.decode("utf-8"), "encoding": "utf-8"}
|
|
23
|
+
except UnicodeDecodeError:
|
|
24
|
+
pass
|
|
25
|
+
return {key: _base64.b64encode(data).decode("utf-8"), "encoding": "base64"}
|
|
26
|
+
raise TypeError("Unsupported data type")
|
|
27
|
+
|
|
28
|
+
@staticmethod
|
|
29
|
+
def deserialize_bytes(obj: dict[str, str]) -> bytes | bytearray:
|
|
30
|
+
"""Converts a JSON-compatible bytes/bytearray format (dictionary) back to its original type."""
|
|
31
|
+
for key in ("bytes", "bytearray"):
|
|
32
|
+
if key in obj and "encoding" in obj:
|
|
33
|
+
if obj["encoding"] == "utf-8":
|
|
34
|
+
data = obj[key].encode("utf-8")
|
|
35
|
+
elif obj["encoding"] == "base64":
|
|
36
|
+
data = _base64.b64decode(obj[key].encode("utf-8"))
|
|
37
|
+
else:
|
|
38
|
+
raise ValueError("Unknown encoding method")
|
|
39
|
+
return bytearray(data) if key == "bytearray" else data
|
|
40
|
+
raise ValueError("Invalid serialized data")
|
|
41
|
+
|
|
13
42
|
@staticmethod
|
|
14
43
|
def chars_count(data: DataStructure) -> int:
|
|
15
44
|
"""The sum of all the characters amount including the keys in dictionaries."""
|
|
@@ -31,26 +60,18 @@ class Data:
|
|
|
31
60
|
if isinstance(data, dict):
|
|
32
61
|
return {
|
|
33
62
|
k: (
|
|
34
|
-
v
|
|
35
|
-
|
|
36
|
-
else Data.remove_empty_items(v, spaces_are_empty)
|
|
63
|
+
v if not isinstance(v,
|
|
64
|
+
(list, tuple, set, frozenset, dict)) else Data.remove_empty_items(v, spaces_are_empty)
|
|
37
65
|
)
|
|
38
66
|
for k, v in data.items()
|
|
39
67
|
if not String.is_empty(v, spaces_are_empty)
|
|
40
68
|
}
|
|
41
69
|
if isinstance(data, (list, tuple, set, frozenset)):
|
|
42
70
|
return type(data)(
|
|
43
|
-
item
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
if not isinstance(item, (list, tuple, set, frozenset, dict))
|
|
48
|
-
else Data.remove_empty_items(item, spaces_are_empty)
|
|
49
|
-
)
|
|
50
|
-
for item in data
|
|
51
|
-
if not String.is_empty(item, spaces_are_empty)
|
|
52
|
-
)
|
|
53
|
-
if item not in ((), {}, set(), frozenset())
|
|
71
|
+
item for item in ((
|
|
72
|
+
item if not isinstance(item, (list, tuple, set, frozenset,
|
|
73
|
+
dict)) else Data.remove_empty_items(item, spaces_are_empty)
|
|
74
|
+
) for item in data if not String.is_empty(item, spaces_are_empty)) if item not in ((), {}, set(), frozenset())
|
|
54
75
|
)
|
|
55
76
|
return data
|
|
56
77
|
|
|
@@ -131,7 +152,7 @@ class Data:
|
|
|
131
152
|
rf"^((?:(?!{_re.escape(comment_start)}).)*){_re.escape(comment_start)}(?:(?:(?!{_re.escape(comment_end)}).)*)(?:{_re.escape(comment_end)})?(.*?)$"
|
|
132
153
|
)
|
|
133
154
|
|
|
134
|
-
def process_string(s: str) -> str
|
|
155
|
+
def process_string(s: str) -> Optional[str]:
|
|
135
156
|
if comment_end:
|
|
136
157
|
match = pattern.match(s)
|
|
137
158
|
if match:
|
|
@@ -144,7 +165,9 @@ class Data:
|
|
|
144
165
|
def process_item(item: any) -> any:
|
|
145
166
|
if isinstance(item, dict):
|
|
146
167
|
return {
|
|
147
|
-
k: v
|
|
168
|
+
k: v
|
|
169
|
+
for k, v in ((process_item(key), process_item(value)) for key, value in item.items())
|
|
170
|
+
if k is not None
|
|
148
171
|
}
|
|
149
172
|
if isinstance(item, (list, tuple, set, frozenset)):
|
|
150
173
|
processed = (v for v in map(process_item, item) if v is not None)
|
|
@@ -174,9 +197,7 @@ class Data:
|
|
|
174
197
|
The paths from `ignore_paths` and the `path_sep` parameter work exactly the same way as for
|
|
175
198
|
the function `Data.get_path_id()`. See its documentation for more details."""
|
|
176
199
|
|
|
177
|
-
def process_ignore_paths(
|
|
178
|
-
ignore_paths: str | list[str],
|
|
179
|
-
) -> list[list[str]]:
|
|
200
|
+
def process_ignore_paths(ignore_paths: str | list[str], ) -> list[list[str]]:
|
|
180
201
|
if isinstance(ignore_paths, str):
|
|
181
202
|
ignore_paths = [ignore_paths]
|
|
182
203
|
return [path.split(path_sep) for path in ignore_paths if path]
|
|
@@ -187,9 +208,9 @@ class Data:
|
|
|
187
208
|
ignore_paths: list[list[str]],
|
|
188
209
|
current_path: list[str] = [],
|
|
189
210
|
) -> bool:
|
|
190
|
-
if any(current_path == path[:
|
|
211
|
+
if any(current_path == path[:len(current_path)] for path in ignore_paths):
|
|
191
212
|
return True
|
|
192
|
-
if type(d1)
|
|
213
|
+
if type(d1) is not type(d2):
|
|
193
214
|
return False
|
|
194
215
|
if isinstance(d1, dict):
|
|
195
216
|
if set(d1.keys()) != set(d2.keys()):
|
|
@@ -247,7 +268,7 @@ class Data:
|
|
|
247
268
|
If `ignore_not_found` is `True`, the function will return `None` if the value is not found
|
|
248
269
|
instead of raising an error."""
|
|
249
270
|
|
|
250
|
-
def process_path(path: str, data_obj:
|
|
271
|
+
def process_path(path: str, data_obj: DataStructure) -> Optional[str]:
|
|
251
272
|
keys = path.split(path_sep)
|
|
252
273
|
path_ids = []
|
|
253
274
|
max_id_length = 0
|
|
@@ -300,7 +321,7 @@ class Data:
|
|
|
300
321
|
The function will return the value (or key) from the path ID location, as long as the structure
|
|
301
322
|
of `data` hasn't changed since creating the path ID to that value."""
|
|
302
323
|
|
|
303
|
-
def get_nested(data:
|
|
324
|
+
def get_nested(data: DataStructure, path: list[int], get_key: bool) -> any:
|
|
304
325
|
parent = None
|
|
305
326
|
for i, idx in enumerate(path):
|
|
306
327
|
if isinstance(data, dict):
|
|
@@ -323,11 +344,7 @@ class Data:
|
|
|
323
344
|
return get_nested(data, Data.__sep_path_id(path_id), get_key)
|
|
324
345
|
|
|
325
346
|
@staticmethod
|
|
326
|
-
def set_value_by_path_id(
|
|
327
|
-
data: DataStructure,
|
|
328
|
-
update_values: str | list[str],
|
|
329
|
-
sep: str = "::",
|
|
330
|
-
) -> list | tuple | dict:
|
|
347
|
+
def set_value_by_path_id(data: DataStructure, update_values: str | list[str], sep: str = "::") -> list | tuple | dict:
|
|
331
348
|
"""Updates the value/s from `update_values` in the `data`.\n
|
|
332
349
|
--------------------------------------------------------------------------------
|
|
333
350
|
Input a list, tuple or dict as `data`, along with `update_values`, which is a
|
|
@@ -338,9 +355,7 @@ class Data:
|
|
|
338
355
|
The value from path ID will be changed to the new value, as long as the
|
|
339
356
|
structure of `data` hasn't changed since creating the path ID to that value."""
|
|
340
357
|
|
|
341
|
-
def update_nested(
|
|
342
|
-
data: list | tuple | set | frozenset | dict, path: list[int], value: any
|
|
343
|
-
) -> list | tuple | set | frozenset | dict:
|
|
358
|
+
def update_nested(data: DataStructure, path: list[int], value: any) -> DataStructure:
|
|
344
359
|
if len(path) == 1:
|
|
345
360
|
if isinstance(data, dict):
|
|
346
361
|
keys = list(data.keys())
|
|
@@ -364,11 +379,9 @@ class Data:
|
|
|
364
379
|
|
|
365
380
|
if isinstance(update_values, str):
|
|
366
381
|
update_values = [update_values]
|
|
367
|
-
valid_entries = [
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
if len(parts := update_value.split(str(sep).strip())) == 2
|
|
371
|
-
]
|
|
382
|
+
valid_entries = [(parts[0].strip(), parts[1])
|
|
383
|
+
for update_value in update_values
|
|
384
|
+
if len(parts := update_value.split(str(sep).strip())) == 2]
|
|
372
385
|
if not valid_entries:
|
|
373
386
|
raise ValueError(f"No valid update_values found: {update_values}")
|
|
374
387
|
for path_id, new_val in valid_entries:
|
|
@@ -376,40 +389,15 @@ class Data:
|
|
|
376
389
|
data = update_nested(data, path, new_val)
|
|
377
390
|
return data
|
|
378
391
|
|
|
379
|
-
@staticmethod
|
|
380
|
-
def print(
|
|
381
|
-
data: DataStructure,
|
|
382
|
-
indent: int = 4,
|
|
383
|
-
compactness: int = 1,
|
|
384
|
-
sep: str = ", ",
|
|
385
|
-
max_width: int = 127,
|
|
386
|
-
as_json: bool = False,
|
|
387
|
-
end: str = "\n",
|
|
388
|
-
) -> None:
|
|
389
|
-
"""Print nicely formatted data structures.\n
|
|
390
|
-
------------------------------------------------------------------------------
|
|
391
|
-
The indentation spaces-amount can be set with with `indent`.
|
|
392
|
-
There are three different levels of `compactness`:
|
|
393
|
-
- `0` expands everything possible
|
|
394
|
-
- `1` only expands if there's other lists, tuples or dicts inside of data or,
|
|
395
|
-
if the data's content is longer than `max_width`
|
|
396
|
-
- `2` keeps everything collapsed (all on one line)\n
|
|
397
|
-
------------------------------------------------------------------------------
|
|
398
|
-
If `as_json` is set to `True`, the output will be in valid JSON format."""
|
|
399
|
-
print(
|
|
400
|
-
Data.to_str(data, indent, compactness, sep, max_width, as_json),
|
|
401
|
-
end=end,
|
|
402
|
-
flush=True,
|
|
403
|
-
)
|
|
404
|
-
|
|
405
392
|
@staticmethod
|
|
406
393
|
def to_str(
|
|
407
394
|
data: DataStructure,
|
|
408
395
|
indent: int = 4,
|
|
409
396
|
compactness: int = 1,
|
|
410
|
-
sep: str = ", ",
|
|
411
397
|
max_width: int = 127,
|
|
398
|
+
sep: str = ", ",
|
|
412
399
|
as_json: bool = False,
|
|
400
|
+
_syntax_highlighting: dict[str, str] | bool = False,
|
|
413
401
|
) -> str:
|
|
414
402
|
"""Get nicely formatted data structure-strings.\n
|
|
415
403
|
------------------------------------------------------------------------------
|
|
@@ -421,106 +409,169 @@ class Data:
|
|
|
421
409
|
- `2` keeps everything collapsed (all on one line)\n
|
|
422
410
|
------------------------------------------------------------------------------
|
|
423
411
|
If `as_json` is set to `True`, the output will be in valid JSON format."""
|
|
412
|
+
if syntax_hl := _syntax_highlighting not in (None, False):
|
|
413
|
+
if _syntax_highlighting is True:
|
|
414
|
+
_syntax_highlighting = {}
|
|
415
|
+
elif not isinstance(_syntax_highlighting, dict):
|
|
416
|
+
raise TypeError(f"Expected 'syntax_highlighting' to be a dict or bool. Got: {type(_syntax_highlighting)}")
|
|
417
|
+
_syntax_hl = {
|
|
418
|
+
"str": (f"[{COLOR.blue}]", "[_c]"),
|
|
419
|
+
"number": (f"[{COLOR.magenta}]", "[_c]"),
|
|
420
|
+
"literal": (f"[{COLOR.cyan}]", "[_c]"),
|
|
421
|
+
"type": (f"[i|{COLOR.lightblue}]", "[_i|_c]"),
|
|
422
|
+
"punctuation": (f"[{COLOR.darkgray}]", "[_c]"),
|
|
423
|
+
}
|
|
424
|
+
_syntax_hl.update({
|
|
425
|
+
k: [f"[{v}]", "[_]"] if k in _syntax_hl and v not in ("", None) else ["", ""]
|
|
426
|
+
for k, v in _syntax_highlighting.items()
|
|
427
|
+
})
|
|
428
|
+
sep = f"{_syntax_hl['punctuation'][0]}{sep}{_syntax_hl['punctuation'][1]}"
|
|
429
|
+
punct_map = {"(": ("/(", "("), **{char: char for char in "'\":)[]{}"}}
|
|
430
|
+
punct = {
|
|
431
|
+
k: ((f"{_syntax_hl['punctuation'][0]}{v[0]}{_syntax_hl['punctuation'][1]}" if syntax_hl else v[1])
|
|
432
|
+
if isinstance(v, (list, tuple)) else
|
|
433
|
+
(f"{_syntax_hl['punctuation'][0]}{v}{_syntax_hl['punctuation'][1]}" if syntax_hl else v))
|
|
434
|
+
for k, v in punct_map.items()
|
|
435
|
+
}
|
|
424
436
|
|
|
425
|
-
def format_value(value: any, current_indent: int) -> str:
|
|
426
|
-
if isinstance(value, dict):
|
|
437
|
+
def format_value(value: any, current_indent: int = None) -> str:
|
|
438
|
+
if current_indent is not None and isinstance(value, dict):
|
|
427
439
|
return format_dict(value, current_indent + indent)
|
|
428
|
-
elif hasattr(value, "__dict__"):
|
|
440
|
+
elif current_indent is not None and hasattr(value, "__dict__"):
|
|
429
441
|
return format_dict(value.__dict__, current_indent + indent)
|
|
430
|
-
elif isinstance(value, (list, tuple, set, frozenset)):
|
|
442
|
+
elif current_indent is not None and isinstance(value, (list, tuple, set, frozenset)):
|
|
431
443
|
return format_sequence(value, current_indent + indent)
|
|
444
|
+
elif isinstance(value, (bytes, bytearray)):
|
|
445
|
+
obj_dict = Data.serialize_bytes(value)
|
|
446
|
+
return (
|
|
447
|
+
format_dict(obj_dict, current_indent + indent) if as_json else (
|
|
448
|
+
f"{_syntax_hl['type'][0]}{(k := next(iter(obj_dict)))}{_syntax_hl['type'][1]}"
|
|
449
|
+
+ format_sequence((obj_dict[k], obj_dict["encoding"]), current_indent + indent) if syntax_hl else
|
|
450
|
+
(k := next(iter(obj_dict)))
|
|
451
|
+
+ format_sequence((obj_dict[k], obj_dict["encoding"]), current_indent + indent)
|
|
452
|
+
)
|
|
453
|
+
)
|
|
432
454
|
elif isinstance(value, bool):
|
|
433
|
-
|
|
455
|
+
val = str(value).lower() if as_json else str(value)
|
|
456
|
+
return f"{_syntax_hl['literal'][0]}{val}{_syntax_hl['literal'][1]}" if syntax_hl else val
|
|
434
457
|
elif isinstance(value, (int, float)):
|
|
435
|
-
|
|
458
|
+
val = "null" if as_json and (_math.isinf(value) or _math.isnan(value)) else str(value)
|
|
459
|
+
return f"{_syntax_hl['number'][0]}{val}{_syntax_hl['number'][1]}" if syntax_hl else val
|
|
436
460
|
elif isinstance(value, complex):
|
|
437
|
-
return
|
|
461
|
+
return (
|
|
462
|
+
format_value(str(value).strip("()")) if as_json else (
|
|
463
|
+
f"{_syntax_hl['type'][0]}complex{_syntax_hl['type'][1]}"
|
|
464
|
+
+ format_sequence((value.real, value.imag), current_indent + indent)
|
|
465
|
+
if syntax_hl else f"complex{format_sequence((value.real, value.imag), current_indent + indent)}"
|
|
466
|
+
)
|
|
467
|
+
)
|
|
438
468
|
elif value is None:
|
|
439
|
-
|
|
469
|
+
val = "null" if as_json else "None"
|
|
470
|
+
return f"{_syntax_hl['literal'][0]}{val}{_syntax_hl['literal'][1]}" if syntax_hl else val
|
|
440
471
|
else:
|
|
441
|
-
return
|
|
472
|
+
return ((
|
|
473
|
+
punct['"'] + _syntax_hl["str"][0] + String.escape(str(value), '"') + _syntax_hl["str"][1]
|
|
474
|
+
+ punct['"'] if syntax_hl else punct['"'] + String.escape(str(value), '"') + punct['"']
|
|
475
|
+
) if as_json else (
|
|
476
|
+
punct["'"] + _syntax_hl["str"][0] + String.escape(str(value), "'") + _syntax_hl["str"][1]
|
|
477
|
+
+ punct["'"] if syntax_hl else punct["'"] + String.escape(str(value), "'") + punct["'"]
|
|
478
|
+
))
|
|
442
479
|
|
|
443
480
|
def should_expand(seq: list | tuple | dict) -> bool:
|
|
444
481
|
if compactness == 0:
|
|
445
482
|
return True
|
|
446
483
|
if compactness == 2:
|
|
447
484
|
return False
|
|
448
|
-
|
|
485
|
+
complex_types = (list, tuple, dict, set, frozenset) + ((bytes, bytearray) if as_json else ())
|
|
486
|
+
complex_items = sum(1 for item in seq if isinstance(item, complex_types))
|
|
449
487
|
return (
|
|
450
|
-
complex_items > 1
|
|
451
|
-
|
|
452
|
-
or Data.chars_count(seq) + (len(seq) * len(sep)) > max_width
|
|
453
|
-
)
|
|
454
|
-
|
|
455
|
-
def format_key(k: any) -> str:
|
|
456
|
-
return (
|
|
457
|
-
'"' + String.escape(str(k), '"') + '"'
|
|
458
|
-
if as_json
|
|
459
|
-
else ("'" + String.escape(str(k), "'") + "'" if isinstance(k, str) else str(k))
|
|
488
|
+
complex_items > 1 or (complex_items == 1 and len(seq) > 1) or Data.chars_count(seq) +
|
|
489
|
+
(len(seq) * len(sep)) > max_width
|
|
460
490
|
)
|
|
461
491
|
|
|
462
492
|
def format_dict(d: dict, current_indent: int) -> str:
|
|
463
493
|
if not d or compactness == 2:
|
|
464
|
-
return
|
|
494
|
+
return (
|
|
495
|
+
punct["{"]
|
|
496
|
+
+ sep.join(f"{format_value(k)}{punct[':']} {format_value(v, current_indent)}" for k, v in d.items())
|
|
497
|
+
+ punct["}"]
|
|
498
|
+
)
|
|
465
499
|
if not should_expand(d.values()):
|
|
466
|
-
return
|
|
500
|
+
return (
|
|
501
|
+
punct["{"]
|
|
502
|
+
+ sep.join(f"{format_value(k)}{punct[':']} {format_value(v, current_indent)}" for k, v in d.items())
|
|
503
|
+
+ punct["}"]
|
|
504
|
+
)
|
|
467
505
|
items = []
|
|
468
|
-
for
|
|
469
|
-
formatted_value = format_value(
|
|
470
|
-
items.append(f'
|
|
471
|
-
return "{\n" + "
|
|
506
|
+
for k, val in d.items():
|
|
507
|
+
formatted_value = format_value(val, current_indent)
|
|
508
|
+
items.append(f"{' ' * (current_indent + indent)}{format_value(k)}{punct[':']} {formatted_value}")
|
|
509
|
+
return punct["{"] + "\n" + f"{sep}\n".join(items) + f"\n{' ' * current_indent}" + punct["}"]
|
|
472
510
|
|
|
473
511
|
def format_sequence(seq, current_indent: int) -> str:
|
|
474
512
|
if as_json:
|
|
475
513
|
seq = list(seq)
|
|
476
514
|
if not seq or compactness == 2:
|
|
477
515
|
return (
|
|
478
|
-
"[" + sep.join(format_value(item, current_indent)
|
|
479
|
-
|
|
480
|
-
|
|
516
|
+
punct["["] + sep.join(format_value(item, current_indent)
|
|
517
|
+
for item in seq) + punct["]"] if isinstance(seq, list) else punct["("]
|
|
518
|
+
+ sep.join(format_value(item, current_indent) for item in seq) + punct[")"]
|
|
481
519
|
)
|
|
482
520
|
if not should_expand(seq):
|
|
483
521
|
return (
|
|
484
|
-
"[" + sep.join(format_value(item, current_indent)
|
|
485
|
-
|
|
486
|
-
|
|
522
|
+
punct["["] + sep.join(format_value(item, current_indent)
|
|
523
|
+
for item in seq) + punct["]"] if isinstance(seq, list) else punct["("]
|
|
524
|
+
+ sep.join(format_value(item, current_indent) for item in seq) + punct[")"]
|
|
487
525
|
)
|
|
488
526
|
items = [format_value(item, current_indent) for item in seq]
|
|
489
|
-
formatted_items = "
|
|
527
|
+
formatted_items = f"{sep}\n".join(f'{" " * (current_indent + indent)}{item}' for item in items)
|
|
490
528
|
if isinstance(seq, list):
|
|
491
|
-
return "[\n
|
|
529
|
+
return f"{punct['[']}\n{formatted_items}\n{' ' * current_indent}{punct[']']}"
|
|
492
530
|
else:
|
|
493
|
-
return "(\n
|
|
531
|
+
return f"{punct['(']}\n{formatted_items}\n{' ' * current_indent}{punct[')']}"
|
|
494
532
|
|
|
495
533
|
return format_dict(data, 0) if isinstance(data, dict) else format_sequence(data, 0)
|
|
496
534
|
|
|
497
535
|
@staticmethod
|
|
498
|
-
def
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
536
|
+
def print(
|
|
537
|
+
data: DataStructure,
|
|
538
|
+
indent: int = 4,
|
|
539
|
+
compactness: int = 1,
|
|
540
|
+
max_width: int = 127,
|
|
541
|
+
sep: str = ", ",
|
|
542
|
+
end: str = "\n",
|
|
543
|
+
as_json: bool = False,
|
|
544
|
+
syntax_highlighting: dict[str, str] | bool = {},
|
|
545
|
+
) -> None:
|
|
546
|
+
"""Print nicely formatted data structures.\n
|
|
547
|
+
------------------------------------------------------------------------------
|
|
548
|
+
The indentation spaces-amount can be set with with `indent`.
|
|
549
|
+
There are three different levels of `compactness`:
|
|
550
|
+
- `0` expands everything possible
|
|
551
|
+
- `1` only expands if there's other lists, tuples or dicts inside of data or,
|
|
552
|
+
if the data's content is longer than `max_width`
|
|
553
|
+
- `2` keeps everything collapsed (all on one line)\n
|
|
554
|
+
------------------------------------------------------------------------------
|
|
555
|
+
If `as_json` is set to `True`, the output will be in valid JSON format.\n
|
|
556
|
+
------------------------------------------------------------------------------
|
|
557
|
+
The `syntax_highlighting` parameter is a dictionary with 5 keys for each part
|
|
558
|
+
of the data. The key's values are the formatting codes to apply to this data
|
|
559
|
+
part. The formatting can be changed by simply adding the key with the new
|
|
560
|
+
value inside the `syntax_highlighting` dictionary.\n
|
|
561
|
+
The keys with their default values are:
|
|
562
|
+
- `str: COLOR.["blue"]`
|
|
563
|
+
- `number: COLOR.["magenta"]`
|
|
564
|
+
- `literal: COLOR.["cyan"]`
|
|
565
|
+
- `type: "i|" + COLOR.["lightblue"]`
|
|
566
|
+
- `punctuation: COLOR.["darkgray"]`\n
|
|
567
|
+
For no syntax highlighting, set `syntax_highlighting` to `False` or `None`.\n
|
|
568
|
+
------------------------------------------------------------------------------
|
|
569
|
+
For more detailed information about formatting codes, see `xx_format_codes`
|
|
570
|
+
module documentation."""
|
|
571
|
+
FormatCodes.print(
|
|
572
|
+
Data.to_str(data, indent, compactness, max_width, sep, as_json, syntax_highlighting),
|
|
573
|
+
end=end,
|
|
574
|
+
)
|
|
524
575
|
|
|
525
576
|
@staticmethod
|
|
526
577
|
def __sep_path_id(path_id: str) -> list[int]:
|
|
@@ -528,4 +579,4 @@ class Data:
|
|
|
528
579
|
raise ValueError(f"Invalid path ID: {path_id}")
|
|
529
580
|
id_part_len = int(path_id.split(">")[0])
|
|
530
581
|
path_ids_str = path_id.split(">")[1]
|
|
531
|
-
return [int(path_ids_str[i
|
|
582
|
+
return [int(path_ids_str[i:i + id_part_len]) for i in range(0, len(path_ids_str), id_part_len)]
|
xulbux/xx_env_path.py
CHANGED
|
@@ -1,9 +1,5 @@
|
|
|
1
1
|
"""
|
|
2
|
-
Functions for modifying and checking the systems environment-variables
|
|
3
|
-
- `EnvPath.paths()`
|
|
4
|
-
- `EnvPath.has_path()`
|
|
5
|
-
- `EnvPath.add_path()`
|
|
6
|
-
- `EnvPath.remove_path()`
|
|
2
|
+
Functions for modifying and checking the systems environment-variables (especially the PATH object).
|
|
7
3
|
"""
|
|
8
4
|
|
|
9
5
|
from .xx_path import Path
|
xulbux/xx_file.py
CHANGED
|
@@ -53,7 +53,7 @@ class File:
|
|
|
53
53
|
"""Tries to find the file and extend the path to be absolute and if the file was not found:\n
|
|
54
54
|
Generate the absolute path to the file in the CWD or the running program's base-directory.\n
|
|
55
55
|
----------------------------------------------------------------------------------------------
|
|
56
|
-
If the `file` is not found in
|
|
56
|
+
If the `file` is not found in predefined directories, it will be searched in the `search_in`
|
|
57
57
|
directory/directories. If the file is still not found, it will return the path to the file in
|
|
58
58
|
the base-dir per default or to the file in the CWD if `prefer_base_dir` is set to `False`.\n
|
|
59
59
|
----------------------------------------------------------------------------------------------
|
xulbux/xx_format_codes.py
CHANGED
|
@@ -153,30 +153,33 @@ Per default, you can also use `+` and `-` to get lighter and darker `default_col
|
|
|
153
153
|
from ._consts_ import ANSI
|
|
154
154
|
from .xx_string import String
|
|
155
155
|
from .xx_regex import Regex
|
|
156
|
-
from .xx_color import
|
|
156
|
+
from .xx_color import Color, rgba, hexa
|
|
157
157
|
|
|
158
|
+
from typing import Optional, Pattern
|
|
158
159
|
import ctypes as _ctypes
|
|
159
160
|
import regex as _rx
|
|
160
161
|
import sys as _sys
|
|
161
162
|
import re as _re
|
|
162
163
|
|
|
163
|
-
_CONSOLE_ANSI_CONFIGURED = False
|
|
164
164
|
|
|
165
|
-
|
|
165
|
+
_CONSOLE_ANSI_CONFIGURED: bool = False
|
|
166
|
+
|
|
167
|
+
_PREFIX: dict[str, set[str]] = {
|
|
166
168
|
"BG": {"background", "bg"},
|
|
167
169
|
"BR": {"bright", "br"},
|
|
168
170
|
}
|
|
169
|
-
_PREFIX_RX = {
|
|
171
|
+
_PREFIX_RX: dict[str, str] = {
|
|
170
172
|
"BG": rf"(?:{'|'.join(_PREFIX['BG'])})\s*:",
|
|
171
173
|
"BR": rf"(?:{'|'.join(_PREFIX['BR'])})\s*:",
|
|
172
174
|
}
|
|
173
|
-
_COMPILED = { # PRECOMPILE REGULAR EXPRESSIONS
|
|
175
|
+
_COMPILED: dict[str, Pattern] = { # PRECOMPILE REGULAR EXPRESSIONS
|
|
174
176
|
"*": _re.compile(r"\[\s*([^]_]*?)\s*\*\s*([^]_]*?)\]"),
|
|
175
177
|
"*color": _re.compile(r"\[\s*([^]_]*?)\s*\*color\s*([^]_]*?)\]"),
|
|
178
|
+
"ansi_seq": _re.compile(ANSI.char + r"(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])"),
|
|
176
179
|
"formatting": _rx.compile(
|
|
177
|
-
Regex.brackets("[", "]", is_group=True)
|
|
180
|
+
Regex.brackets("[", "]", is_group=True, ignore_in_strings=False)
|
|
178
181
|
+ r"(?:\s*([/\\]?)\s*"
|
|
179
|
-
+ Regex.brackets("(", ")", is_group=True, ignore_in_strings=False)
|
|
182
|
+
+ Regex.brackets("(", ")", is_group=True, strip_spaces=False, ignore_in_strings=False)
|
|
180
183
|
+ r")?"
|
|
181
184
|
),
|
|
182
185
|
"bg?_default": _re.compile(r"(?i)((?:" + _PREFIX_RX["BG"] + r")?)\s*default"),
|
|
@@ -246,6 +249,8 @@ class FormatCodes:
|
|
|
246
249
|
-------------------------------------------------------------------------
|
|
247
250
|
For exact information about how to use special formatting codes, see the
|
|
248
251
|
`xx_format_codes` module documentation."""
|
|
252
|
+
if not isinstance(string, str):
|
|
253
|
+
string = str(string)
|
|
249
254
|
if Color.is_valid_rgba(default_color, False):
|
|
250
255
|
use_default = True
|
|
251
256
|
elif Color.is_valid_hexa(default_color, False):
|
|
@@ -291,33 +296,25 @@ class FormatCodes:
|
|
|
291
296
|
reset_keys.append("_bg")
|
|
292
297
|
break
|
|
293
298
|
elif is_valid_color(k) or any(
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
):
|
|
299
|
+
k_lower.startswith(pref_colon := f"{prefix}:") and is_valid_color(k[len(pref_colon):])
|
|
300
|
+
for prefix in _PREFIX["BR"]):
|
|
297
301
|
reset_keys.append("_color")
|
|
298
302
|
else:
|
|
299
303
|
reset_keys.append(f"_{k}")
|
|
300
304
|
ansi_resets = [
|
|
301
|
-
r
|
|
302
|
-
|
|
303
|
-
if (r := FormatCodes.__get_replacement(k, default_color, brightness_steps)).startswith(
|
|
304
|
-
f"{ANSI.char}{ANSI.start}"
|
|
305
|
-
)
|
|
305
|
+
r for k in reset_keys if (r := FormatCodes.__get_replacement(k, default_color, brightness_steps)
|
|
306
|
+
).startswith(f"{ANSI.char}{ANSI.start}")
|
|
306
307
|
]
|
|
307
308
|
else:
|
|
308
309
|
ansi_resets = []
|
|
309
310
|
if not (len(ansi_formats) == 1 and ansi_formats[0].count(f"{ANSI.char}{ANSI.start}") >= 1) and not all(
|
|
310
|
-
|
|
311
|
-
):
|
|
311
|
+
f.startswith(f"{ANSI.char}{ANSI.start}") for f in ansi_formats):
|
|
312
312
|
return match.group(0)
|
|
313
313
|
return (
|
|
314
|
-
"".join(ansi_formats)
|
|
315
|
-
+ (
|
|
314
|
+
"".join(ansi_formats) + (
|
|
316
315
|
f"({FormatCodes.to_ansi(auto_reset_txt, default_color, brightness_steps, False)})"
|
|
317
|
-
if escaped and auto_reset_txt
|
|
318
|
-
|
|
319
|
-
)
|
|
320
|
-
+ ("" if escaped else "".join(ansi_resets))
|
|
316
|
+
if escaped and auto_reset_txt else auto_reset_txt if auto_reset_txt else ""
|
|
317
|
+
) + ("" if escaped else "".join(ansi_resets))
|
|
321
318
|
)
|
|
322
319
|
|
|
323
320
|
string = "\n".join(_COMPILED["formatting"].sub(replace_keys, line) for line in string.split("\n"))
|
|
@@ -325,9 +322,19 @@ class FormatCodes:
|
|
|
325
322
|
|
|
326
323
|
@staticmethod
|
|
327
324
|
def escape_ansi(ansi_string: str) -> str:
|
|
328
|
-
"""Escapes all ANSI codes in
|
|
325
|
+
"""Escapes all ANSI codes in the string, so they are visible when output to the console."""
|
|
329
326
|
return ansi_string.replace(ANSI.char, ANSI.escaped_char)
|
|
330
327
|
|
|
328
|
+
@staticmethod
|
|
329
|
+
def remove_ansi(ansi_string: str) -> str:
|
|
330
|
+
"""Removes all ANSI codes from the string."""
|
|
331
|
+
return _COMPILED["ansi_seq"].sub("", ansi_string)
|
|
332
|
+
|
|
333
|
+
@staticmethod
|
|
334
|
+
def remove_formatting(string: str) -> str:
|
|
335
|
+
"""Removes all formatting codes from the string."""
|
|
336
|
+
return _COMPILED["ansi_seq"].sub("", FormatCodes.to_ansi(string))
|
|
337
|
+
|
|
331
338
|
@staticmethod
|
|
332
339
|
def __config_console() -> None:
|
|
333
340
|
"""Configure the console to be able to interpret ANSI formatting."""
|
|
@@ -347,7 +354,7 @@ class FormatCodes:
|
|
|
347
354
|
format_key: str = None,
|
|
348
355
|
brightness_steps: int = None,
|
|
349
356
|
_modifiers: tuple[str, str] = (ANSI.default_color_modifiers["lighten"], ANSI.default_color_modifiers["darken"]),
|
|
350
|
-
) -> str
|
|
357
|
+
) -> Optional[str]:
|
|
351
358
|
"""Get the `default_color` and lighter/darker versions of it as ANSI code."""
|
|
352
359
|
if not brightness_steps or (format_key and _COMPILED["bg?_default"].search(format_key)):
|
|
353
360
|
return (ANSI.seq_bg_color if format_key and _COMPILED["bg_default"].search(format_key) else ANSI.seq_color).format(
|
|
@@ -386,14 +393,9 @@ class FormatCodes:
|
|
|
386
393
|
for map_key in ANSI.codes_map:
|
|
387
394
|
if (isinstance(map_key, tuple) and format_key in map_key) or format_key == map_key:
|
|
388
395
|
return ANSI.seq().format(
|
|
389
|
-
next(
|
|
390
|
-
(
|
|
391
|
-
|
|
392
|
-
for k, v in ANSI.codes_map.items()
|
|
393
|
-
if format_key == k or (isinstance(k, tuple) and format_key in k)
|
|
394
|
-
),
|
|
395
|
-
None,
|
|
396
|
-
)
|
|
396
|
+
next((
|
|
397
|
+
v for k, v in ANSI.codes_map.items() if format_key == k or (isinstance(k, tuple) and format_key in k)
|
|
398
|
+
), None)
|
|
397
399
|
)
|
|
398
400
|
rgb_match = _re.match(_COMPILED["rgb"], format_key)
|
|
399
401
|
hex_match = _re.match(_COMPILED["hex"], format_key)
|
|
@@ -408,8 +410,7 @@ class FormatCodes:
|
|
|
408
410
|
rgb = Color.to_rgba(hex_match.group(2))
|
|
409
411
|
return (
|
|
410
412
|
ANSI.seq_bg_color.format(rgb[0], rgb[1], rgb[2])
|
|
411
|
-
if is_bg
|
|
412
|
-
else ANSI.seq_color.format(rgb[0], rgb[1], rgb[2])
|
|
413
|
+
if is_bg else ANSI.seq_color.format(rgb[0], rgb[1], rgb[2])
|
|
413
414
|
)
|
|
414
415
|
except Exception:
|
|
415
416
|
pass
|
|
@@ -420,10 +421,11 @@ class FormatCodes:
|
|
|
420
421
|
"""Normalizes the given format key."""
|
|
421
422
|
k_parts = format_key.replace(" ", "").lower().split(":")
|
|
422
423
|
prefix_str = "".join(
|
|
423
|
-
f"{prefix_key.lower()}:"
|
|
424
|
-
for prefix_key, prefix_values in _PREFIX.items()
|
|
424
|
+
f"{prefix_key.lower()}:" for prefix_key, prefix_values in _PREFIX.items()
|
|
425
425
|
if any(k_part in prefix_values for k_part in k_parts)
|
|
426
426
|
)
|
|
427
427
|
return prefix_str + ":".join(
|
|
428
|
-
part for part in k_parts if part not in {val
|
|
428
|
+
part for part in k_parts if part not in {val
|
|
429
|
+
for values in _PREFIX.values()
|
|
430
|
+
for val in values}
|
|
429
431
|
)
|