xulbux 1.6.8__py3-none-any.whl → 1.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of xulbux might be problematic. Click here for more details.
- xulbux/__init__.py +3 -35
- xulbux/_cli_.py +21 -28
- xulbux/_consts_.py +1 -0
- xulbux/xx_code.py +62 -46
- xulbux/xx_color.py +223 -159
- xulbux/xx_console.py +152 -78
- xulbux/xx_data.py +79 -71
- xulbux/xx_env_path.py +6 -9
- xulbux/xx_file.py +22 -26
- xulbux/xx_format_codes.py +55 -33
- xulbux/xx_json.py +107 -51
- xulbux/xx_path.py +74 -24
- xulbux/xx_regex.py +11 -10
- xulbux/xx_string.py +4 -1
- xulbux/xx_system.py +6 -10
- {xulbux-1.6.8.dist-info → xulbux-1.7.0.dist-info}/METADATA +18 -39
- xulbux-1.7.0.dist-info/RECORD +21 -0
- {xulbux-1.6.8.dist-info → xulbux-1.7.0.dist-info}/WHEEL +1 -1
- xulbux-1.6.8.dist-info/RECORD +0 -21
- {xulbux-1.6.8.dist-info → xulbux-1.7.0.dist-info}/entry_points.txt +0 -0
- {xulbux-1.6.8.dist-info → xulbux-1.7.0.dist-info/licenses}/LICENSE +0 -0
- {xulbux-1.6.8.dist-info → xulbux-1.7.0.dist-info}/top_level.txt +0 -0
xulbux/xx_data.py
CHANGED
|
@@ -2,13 +2,14 @@ from ._consts_ import COLOR
|
|
|
2
2
|
from .xx_format_codes import FormatCodes
|
|
3
3
|
from .xx_string import String
|
|
4
4
|
|
|
5
|
-
from typing import TypeAlias, Optional, Union
|
|
5
|
+
from typing import TypeAlias, Optional, Union, Any
|
|
6
6
|
import base64 as _base64
|
|
7
7
|
import math as _math
|
|
8
8
|
import re as _re
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
DataStructure: TypeAlias = Union[list, tuple, set, frozenset, dict]
|
|
12
|
+
IndexIterable: TypeAlias = Union[list, tuple, set, frozenset]
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class Data:
|
|
@@ -42,16 +43,23 @@ class Data:
|
|
|
42
43
|
@staticmethod
|
|
43
44
|
def chars_count(data: DataStructure) -> int:
|
|
44
45
|
"""The sum of all the characters amount including the keys in dictionaries."""
|
|
46
|
+
chars_count = 0
|
|
45
47
|
if isinstance(data, dict):
|
|
46
|
-
|
|
47
|
-
|
|
48
|
+
for k, v in data.items():
|
|
49
|
+
chars_count += len(str(k)) + (Data.chars_count(v) if isinstance(v, DataStructure) else len(str(v)))
|
|
50
|
+
elif isinstance(data, IndexIterable):
|
|
51
|
+
for item in data:
|
|
52
|
+
chars_count += Data.chars_count(item) if isinstance(item, DataStructure) else len(str(item))
|
|
53
|
+
return chars_count
|
|
48
54
|
|
|
49
55
|
@staticmethod
|
|
50
56
|
def strip(data: DataStructure) -> DataStructure:
|
|
51
57
|
"""Removes leading and trailing whitespaces from the data structure's items."""
|
|
52
58
|
if isinstance(data, dict):
|
|
53
|
-
return {k: Data.strip(v) for k, v in data.items()}
|
|
54
|
-
|
|
59
|
+
return {k.strip(): Data.strip(v) if isinstance(v, DataStructure) else v.strip() for k, v in data.items()}
|
|
60
|
+
if isinstance(data, IndexIterable):
|
|
61
|
+
return type(data)(Data.strip(item) if isinstance(item, DataStructure) else item.strip() for item in data)
|
|
62
|
+
return data
|
|
55
63
|
|
|
56
64
|
@staticmethod
|
|
57
65
|
def remove_empty_items(data: DataStructure, spaces_are_empty: bool = False) -> DataStructure:
|
|
@@ -59,18 +67,15 @@ class Data:
|
|
|
59
67
|
If `spaces_are_empty` is true, it will count items with only spaces as empty."""
|
|
60
68
|
if isinstance(data, dict):
|
|
61
69
|
return {
|
|
62
|
-
k: (
|
|
63
|
-
v if not isinstance(v,
|
|
64
|
-
(list, tuple, set, frozenset, dict)) else Data.remove_empty_items(v, spaces_are_empty)
|
|
65
|
-
)
|
|
70
|
+
k: (v if not isinstance(v, DataStructure) else Data.remove_empty_items(v, spaces_are_empty))
|
|
66
71
|
for k, v in data.items() if not String.is_empty(v, spaces_are_empty)
|
|
67
72
|
}
|
|
68
|
-
if isinstance(data,
|
|
73
|
+
if isinstance(data, IndexIterable):
|
|
69
74
|
return type(data)(
|
|
70
|
-
item for item in
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
75
|
+
item for item in
|
|
76
|
+
((item if not isinstance(item, DataStructure) else Data.remove_empty_items(item, spaces_are_empty))
|
|
77
|
+
for item in data if not String.is_empty(item, spaces_are_empty))
|
|
78
|
+
if item not in ([], (), {}, set(), frozenset())
|
|
74
79
|
)
|
|
75
80
|
return data
|
|
76
81
|
|
|
@@ -78,17 +83,25 @@ class Data:
|
|
|
78
83
|
def remove_duplicates(data: DataStructure) -> DataStructure:
|
|
79
84
|
"""Removes all duplicates from the data structure."""
|
|
80
85
|
if isinstance(data, dict):
|
|
81
|
-
return {k: Data.remove_duplicates(v) for k, v in data.items()}
|
|
86
|
+
return {k: Data.remove_duplicates(v) if isinstance(v, DataStructure) else v for k, v in data.items()}
|
|
82
87
|
if isinstance(data, (list, tuple)):
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
88
|
+
result = []
|
|
89
|
+
for item in data:
|
|
90
|
+
processed_item = Data.remove_duplicates(item) if isinstance(item, DataStructure) else item
|
|
91
|
+
is_duplicate = False
|
|
92
|
+
for existing_item in result:
|
|
93
|
+
if processed_item == existing_item:
|
|
94
|
+
is_duplicate = True
|
|
95
|
+
break
|
|
96
|
+
if not is_duplicate:
|
|
97
|
+
result.append(processed_item)
|
|
98
|
+
return type(data)(result)
|
|
87
99
|
if isinstance(data, (set, frozenset)):
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
100
|
+
processed_elements = set()
|
|
101
|
+
for item in data:
|
|
102
|
+
processed_item = Data.remove_duplicates(item) if isinstance(item, DataStructure) else item
|
|
103
|
+
processed_elements.add(processed_item)
|
|
104
|
+
return type(data)(processed_elements)
|
|
92
105
|
return data
|
|
93
106
|
|
|
94
107
|
@staticmethod
|
|
@@ -153,7 +166,7 @@ class Data:
|
|
|
153
166
|
|
|
154
167
|
def process_string(s: str) -> Optional[str]:
|
|
155
168
|
if comment_end:
|
|
156
|
-
match = pattern.match(s)
|
|
169
|
+
match = pattern.match(s) # type: ignore[unbound]
|
|
157
170
|
if match:
|
|
158
171
|
start, end = match.group(1).strip(), match.group(2).strip()
|
|
159
172
|
return f"{start}{comment_sep if start and end else ''}{end}" or None
|
|
@@ -161,13 +174,13 @@ class Data:
|
|
|
161
174
|
else:
|
|
162
175
|
return None if s.lstrip().startswith(comment_start) else s.strip() or None
|
|
163
176
|
|
|
164
|
-
def process_item(item:
|
|
177
|
+
def process_item(item: Any) -> Any:
|
|
165
178
|
if isinstance(item, dict):
|
|
166
179
|
return {
|
|
167
180
|
k: v
|
|
168
181
|
for k, v in ((process_item(key), process_item(value)) for key, value in item.items()) if k is not None
|
|
169
182
|
}
|
|
170
|
-
if isinstance(item,
|
|
183
|
+
if isinstance(item, IndexIterable):
|
|
171
184
|
processed = (v for v in map(process_item, item) if v is not None)
|
|
172
185
|
return type(item)(processed)
|
|
173
186
|
if isinstance(item, str):
|
|
@@ -210,7 +223,7 @@ class Data:
|
|
|
210
223
|
return True
|
|
211
224
|
if type(d1) is not type(d2):
|
|
212
225
|
return False
|
|
213
|
-
if isinstance(d1, dict):
|
|
226
|
+
if isinstance(d1, dict) and isinstance(d2, dict):
|
|
214
227
|
if set(d1.keys()) != set(d2.keys()):
|
|
215
228
|
return False
|
|
216
229
|
return all(compare(d1[key], d2[key], ignore_paths, current_path + [key]) for key in d1)
|
|
@@ -238,7 +251,7 @@ class Data:
|
|
|
238
251
|
comment_start: str = ">>",
|
|
239
252
|
comment_end: str = "<<",
|
|
240
253
|
ignore_not_found: bool = False,
|
|
241
|
-
) -> str | list[str]:
|
|
254
|
+
) -> Optional[str | list[Optional[str]]]:
|
|
242
255
|
"""Generates a unique ID based on the path to a specific value within a nested data structure.\n
|
|
243
256
|
-------------------------------------------------------------------------------------------------
|
|
244
257
|
The `data` parameter is the list, tuple, or dictionary, which the id should be generated for.\n
|
|
@@ -283,7 +296,7 @@ class Data:
|
|
|
283
296
|
if ignore_not_found:
|
|
284
297
|
return None
|
|
285
298
|
raise KeyError(f"Key '{key}' not found in dict.")
|
|
286
|
-
elif isinstance(data_obj,
|
|
299
|
+
elif isinstance(data_obj, IndexIterable):
|
|
287
300
|
try:
|
|
288
301
|
idx = int(key)
|
|
289
302
|
data_obj = list(data_obj)[idx] # CONVERT TO LIST FOR INDEXING
|
|
@@ -310,7 +323,7 @@ class Data:
|
|
|
310
323
|
return results if len(results) > 1 else results[0] if results else None
|
|
311
324
|
|
|
312
325
|
@staticmethod
|
|
313
|
-
def get_value_by_path_id(data: DataStructure, path_id: str, get_key: bool = False) ->
|
|
326
|
+
def get_value_by_path_id(data: DataStructure, path_id: str, get_key: bool = False) -> Any:
|
|
314
327
|
"""Retrieves the value from `data` using the provided `path_id`.\n
|
|
315
328
|
-------------------------------------------------------------------------------------------------
|
|
316
329
|
Input your `data` along with a `path_id` that was created before using `Data.get_path_id()`.
|
|
@@ -319,7 +332,7 @@ class Data:
|
|
|
319
332
|
The function will return the value (or key) from the path ID location, as long as the structure
|
|
320
333
|
of `data` hasn't changed since creating the path ID to that value."""
|
|
321
334
|
|
|
322
|
-
def get_nested(data: DataStructure, path: list[int], get_key: bool) ->
|
|
335
|
+
def get_nested(data: DataStructure, path: list[int], get_key: bool) -> Any:
|
|
323
336
|
parent = None
|
|
324
337
|
for i, idx in enumerate(path):
|
|
325
338
|
if isinstance(data, dict):
|
|
@@ -328,7 +341,7 @@ class Data:
|
|
|
328
341
|
return keys[idx]
|
|
329
342
|
parent = data
|
|
330
343
|
data = data[keys[idx]]
|
|
331
|
-
elif isinstance(data,
|
|
344
|
+
elif isinstance(data, IndexIterable):
|
|
332
345
|
if i == len(path) - 1 and get_key:
|
|
333
346
|
if parent is None or not isinstance(parent, dict):
|
|
334
347
|
raise ValueError("Cannot get key from a non-dict parent")
|
|
@@ -342,45 +355,39 @@ class Data:
|
|
|
342
355
|
return get_nested(data, Data.__sep_path_id(path_id), get_key)
|
|
343
356
|
|
|
344
357
|
@staticmethod
|
|
345
|
-
def set_value_by_path_id(data: DataStructure, update_values:
|
|
358
|
+
def set_value_by_path_id(data: DataStructure, update_values: dict[str, Any]) -> DataStructure:
|
|
346
359
|
"""Updates the value/s from `update_values` in the `data`.\n
|
|
347
360
|
--------------------------------------------------------------------------------
|
|
348
361
|
Input a list, tuple or dict as `data`, along with `update_values`, which is a
|
|
349
|
-
path
|
|
350
|
-
|
|
351
|
-
|
|
362
|
+
dictionary where keys are path IDs and values are the new values to insert:
|
|
363
|
+
{ "1>012": "new value", "1>31": ["new value 1", "new value 2"], ... }
|
|
364
|
+
The path IDs should have been created using `Data.get_path_id()`.\n
|
|
352
365
|
--------------------------------------------------------------------------------
|
|
353
366
|
The value from path ID will be changed to the new value, as long as the
|
|
354
367
|
structure of `data` hasn't changed since creating the path ID to that value."""
|
|
355
368
|
|
|
356
|
-
def update_nested(data: DataStructure, path: list[int], value:
|
|
369
|
+
def update_nested(data: DataStructure, path: list[int], value: Any) -> DataStructure:
|
|
357
370
|
if len(path) == 1:
|
|
358
371
|
if isinstance(data, dict):
|
|
359
|
-
keys = list(data.keys())
|
|
360
|
-
data = dict(data)
|
|
372
|
+
keys, data = list(data.keys()), dict(data)
|
|
361
373
|
data[keys[path[0]]] = value
|
|
362
|
-
elif isinstance(data,
|
|
363
|
-
data = list(data)
|
|
374
|
+
elif isinstance(data, IndexIterable):
|
|
375
|
+
was_t, data = type(data), list(data)
|
|
364
376
|
data[path[0]] = value
|
|
365
|
-
data =
|
|
377
|
+
data = was_t(data)
|
|
366
378
|
else:
|
|
367
379
|
if isinstance(data, dict):
|
|
368
|
-
keys = list(data.keys())
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
data
|
|
372
|
-
elif isinstance(data, (list, tuple, set, frozenset)):
|
|
373
|
-
data = list(data)
|
|
380
|
+
keys, data = list(data.keys()), dict(data)
|
|
381
|
+
data[keys[path[0]]] = update_nested(data[keys[path[0]]], path[1:], value)
|
|
382
|
+
elif isinstance(data, IndexIterable):
|
|
383
|
+
was_t, data = type(data), list(data)
|
|
374
384
|
data[path[0]] = update_nested(data[path[0]], path[1:], value)
|
|
375
|
-
data =
|
|
385
|
+
data = was_t(data)
|
|
376
386
|
return data
|
|
377
387
|
|
|
378
|
-
|
|
379
|
-
update_values = [update_values]
|
|
380
|
-
valid_entries = [(parts[0].strip(), parts[1]) for update_value in update_values
|
|
381
|
-
if len(parts := update_value.split(str(sep).strip())) == 2]
|
|
388
|
+
valid_entries = [(path_id, new_val) for path_id, new_val in update_values.items()]
|
|
382
389
|
if not valid_entries:
|
|
383
|
-
raise ValueError(f"No valid update_values found: {update_values}")
|
|
390
|
+
raise ValueError(f"No valid update_values found in dictionary: {update_values}")
|
|
384
391
|
for path_id, new_val in valid_entries:
|
|
385
392
|
path = Data.__sep_path_id(path_id)
|
|
386
393
|
data = update_nested(data, path, new_val)
|
|
@@ -406,7 +413,8 @@ class Data:
|
|
|
406
413
|
- `2` keeps everything collapsed (all on one line)\n
|
|
407
414
|
------------------------------------------------------------------------------
|
|
408
415
|
If `as_json` is set to `True`, the output will be in valid JSON format."""
|
|
409
|
-
|
|
416
|
+
_syntax_hl = {}
|
|
417
|
+
if do_syntax_hl := _syntax_highlighting not in (None, False):
|
|
410
418
|
if _syntax_highlighting is True:
|
|
411
419
|
_syntax_highlighting = {}
|
|
412
420
|
elif not isinstance(_syntax_highlighting, dict):
|
|
@@ -419,62 +427,62 @@ class Data:
|
|
|
419
427
|
"punctuation": (f"[{COLOR.darkgray}]", "[_c]"),
|
|
420
428
|
}
|
|
421
429
|
_syntax_hl.update({
|
|
422
|
-
k:
|
|
430
|
+
k: (f"[{v}]", "[_]") if k in _syntax_hl and v not in ("", None) else ("", "")
|
|
423
431
|
for k, v in _syntax_highlighting.items()
|
|
424
432
|
})
|
|
425
433
|
sep = f"{_syntax_hl['punctuation'][0]}{sep}{_syntax_hl['punctuation'][1]}"
|
|
426
434
|
punct_map = {"(": ("/(", "("), **{char: char for char in "'\":)[]{}"}}
|
|
427
435
|
punct = {
|
|
428
|
-
k: ((f"{_syntax_hl['punctuation'][0]}{v[0]}{_syntax_hl['punctuation'][1]}" if
|
|
436
|
+
k: ((f"{_syntax_hl['punctuation'][0]}{v[0]}{_syntax_hl['punctuation'][1]}" if do_syntax_hl else v[1])
|
|
429
437
|
if isinstance(v, (list, tuple)) else
|
|
430
|
-
(f"{_syntax_hl['punctuation'][0]}{v}{_syntax_hl['punctuation'][1]}" if
|
|
438
|
+
(f"{_syntax_hl['punctuation'][0]}{v}{_syntax_hl['punctuation'][1]}" if do_syntax_hl else v))
|
|
431
439
|
for k, v in punct_map.items()
|
|
432
440
|
}
|
|
433
441
|
|
|
434
|
-
def format_value(value:
|
|
442
|
+
def format_value(value: Any, current_indent: Optional[int] = None) -> str:
|
|
435
443
|
if current_indent is not None and isinstance(value, dict):
|
|
436
444
|
return format_dict(value, current_indent + indent)
|
|
437
445
|
elif current_indent is not None and hasattr(value, "__dict__"):
|
|
438
446
|
return format_dict(value.__dict__, current_indent + indent)
|
|
439
|
-
elif current_indent is not None and isinstance(value,
|
|
447
|
+
elif current_indent is not None and isinstance(value, IndexIterable):
|
|
440
448
|
return format_sequence(value, current_indent + indent)
|
|
441
|
-
elif isinstance(value, (bytes, bytearray)):
|
|
449
|
+
elif current_indent is not None and isinstance(value, (bytes, bytearray)):
|
|
442
450
|
obj_dict = Data.serialize_bytes(value)
|
|
443
451
|
return (
|
|
444
452
|
format_dict(obj_dict, current_indent + indent) if as_json else (
|
|
445
453
|
f"{_syntax_hl['type'][0]}{(k := next(iter(obj_dict)))}{_syntax_hl['type'][1]}"
|
|
446
|
-
+ format_sequence((obj_dict[k], obj_dict["encoding"]), current_indent + indent) if
|
|
454
|
+
+ format_sequence((obj_dict[k], obj_dict["encoding"]), current_indent + indent) if do_syntax_hl else
|
|
447
455
|
(k := next(iter(obj_dict)))
|
|
448
456
|
+ format_sequence((obj_dict[k], obj_dict["encoding"]), current_indent + indent)
|
|
449
457
|
)
|
|
450
458
|
)
|
|
451
459
|
elif isinstance(value, bool):
|
|
452
460
|
val = str(value).lower() if as_json else str(value)
|
|
453
|
-
return f"{_syntax_hl['literal'][0]}{val}{_syntax_hl['literal'][1]}" if
|
|
461
|
+
return f"{_syntax_hl['literal'][0]}{val}{_syntax_hl['literal'][1]}" if do_syntax_hl else val
|
|
454
462
|
elif isinstance(value, (int, float)):
|
|
455
463
|
val = "null" if as_json and (_math.isinf(value) or _math.isnan(value)) else str(value)
|
|
456
|
-
return f"{_syntax_hl['number'][0]}{val}{_syntax_hl['number'][1]}" if
|
|
457
|
-
elif isinstance(value, complex):
|
|
464
|
+
return f"{_syntax_hl['number'][0]}{val}{_syntax_hl['number'][1]}" if do_syntax_hl else val
|
|
465
|
+
elif current_indent is not None and isinstance(value, complex):
|
|
458
466
|
return (
|
|
459
467
|
format_value(str(value).strip("()")) if as_json else (
|
|
460
468
|
f"{_syntax_hl['type'][0]}complex{_syntax_hl['type'][1]}"
|
|
461
469
|
+ format_sequence((value.real, value.imag), current_indent + indent)
|
|
462
|
-
if
|
|
470
|
+
if do_syntax_hl else f"complex{format_sequence((value.real, value.imag), current_indent + indent)}"
|
|
463
471
|
)
|
|
464
472
|
)
|
|
465
473
|
elif value is None:
|
|
466
474
|
val = "null" if as_json else "None"
|
|
467
|
-
return f"{_syntax_hl['literal'][0]}{val}{_syntax_hl['literal'][1]}" if
|
|
475
|
+
return f"{_syntax_hl['literal'][0]}{val}{_syntax_hl['literal'][1]}" if do_syntax_hl else val
|
|
468
476
|
else:
|
|
469
477
|
return ((
|
|
470
478
|
punct['"'] + _syntax_hl["str"][0] + String.escape(str(value), '"') + _syntax_hl["str"][1]
|
|
471
|
-
+ punct['"'] if
|
|
479
|
+
+ punct['"'] if do_syntax_hl else punct['"'] + String.escape(str(value), '"') + punct['"']
|
|
472
480
|
) if as_json else (
|
|
473
481
|
punct["'"] + _syntax_hl["str"][0] + String.escape(str(value), "'") + _syntax_hl["str"][1]
|
|
474
|
-
+ punct["'"] if
|
|
482
|
+
+ punct["'"] if do_syntax_hl else punct["'"] + String.escape(str(value), "'") + punct["'"]
|
|
475
483
|
))
|
|
476
484
|
|
|
477
|
-
def should_expand(seq:
|
|
485
|
+
def should_expand(seq: IndexIterable) -> bool:
|
|
478
486
|
if compactness == 0:
|
|
479
487
|
return True
|
|
480
488
|
if compactness == 2:
|
|
@@ -493,7 +501,7 @@ class Data:
|
|
|
493
501
|
+ sep.join(f"{format_value(k)}{punct[':']} {format_value(v, current_indent)}"
|
|
494
502
|
for k, v in d.items()) + punct["}"]
|
|
495
503
|
)
|
|
496
|
-
if not should_expand(d.values()):
|
|
504
|
+
if not should_expand(list(d.values())):
|
|
497
505
|
return (
|
|
498
506
|
punct["{"]
|
|
499
507
|
+ sep.join(f"{format_value(k)}{punct[':']} {format_value(v, current_indent)}"
|
xulbux/xx_env_path.py
CHANGED
|
@@ -1,9 +1,6 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Functions for modifying and checking the systems environment-variables (especially the PATH object).
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
1
|
from .xx_path import Path
|
|
6
2
|
|
|
3
|
+
from typing import Optional
|
|
7
4
|
import sys as _sys
|
|
8
5
|
import os as _os
|
|
9
6
|
|
|
@@ -17,7 +14,7 @@ class EnvPath:
|
|
|
17
14
|
return paths.split(_os.pathsep) if as_list else paths
|
|
18
15
|
|
|
19
16
|
@staticmethod
|
|
20
|
-
def has_path(path: str = None, cwd: bool = False, base_dir: bool = False) -> bool:
|
|
17
|
+
def has_path(path: Optional[str] = None, cwd: bool = False, base_dir: bool = False) -> bool:
|
|
21
18
|
"""Check if a path is present in the PATH environment variable."""
|
|
22
19
|
if cwd:
|
|
23
20
|
path = _os.getcwd()
|
|
@@ -29,21 +26,21 @@ class EnvPath:
|
|
|
29
26
|
return _os.path.normpath(path) in [_os.path.normpath(p) for p in paths]
|
|
30
27
|
|
|
31
28
|
@staticmethod
|
|
32
|
-
def add_path(path: str = None, cwd: bool = False, base_dir: bool = False) -> None:
|
|
29
|
+
def add_path(path: Optional[str] = None, cwd: bool = False, base_dir: bool = False) -> None:
|
|
33
30
|
"""Add a path to the PATH environment variable."""
|
|
34
31
|
path = EnvPath.__get(path, cwd, base_dir)
|
|
35
32
|
if not EnvPath.has_path(path):
|
|
36
33
|
EnvPath.__persistent(path, add=True)
|
|
37
34
|
|
|
38
35
|
@staticmethod
|
|
39
|
-
def remove_path(path: str = None, cwd: bool = False, base_dir: bool = False) -> None:
|
|
36
|
+
def remove_path(path: Optional[str] = None, cwd: bool = False, base_dir: bool = False) -> None:
|
|
40
37
|
"""Remove a path from the PATH environment variable."""
|
|
41
38
|
path = EnvPath.__get(path, cwd, base_dir)
|
|
42
39
|
if EnvPath.has_path(path):
|
|
43
40
|
EnvPath.__persistent(path, remove=True)
|
|
44
41
|
|
|
45
42
|
@staticmethod
|
|
46
|
-
def __get(path: str = None, cwd: bool = False, base_dir: bool = False) ->
|
|
43
|
+
def __get(path: Optional[str] = None, cwd: bool = False, base_dir: bool = False) -> str:
|
|
47
44
|
"""Get and/or normalize the paths.\n
|
|
48
45
|
------------------------------------------------------------------------------------
|
|
49
46
|
Raise an error if no path is provided and neither `cwd` or `base_dir` is `True`."""
|
|
@@ -60,7 +57,7 @@ class EnvPath:
|
|
|
60
57
|
"""Add or remove a path from PATH persistently across sessions as well as the current session."""
|
|
61
58
|
if add == remove:
|
|
62
59
|
raise ValueError("Either add or remove must be True, but not both.")
|
|
63
|
-
current_paths = EnvPath.paths(as_list=True)
|
|
60
|
+
current_paths = list(EnvPath.paths(as_list=True))
|
|
64
61
|
path = _os.path.normpath(path)
|
|
65
62
|
if remove:
|
|
66
63
|
current_paths = [p for p in current_paths if _os.path.normpath(p) != _os.path.normpath(path)]
|
xulbux/xx_file.py
CHANGED
|
@@ -1,25 +1,42 @@
|
|
|
1
1
|
from .xx_string import String
|
|
2
|
-
from .xx_path import Path
|
|
3
2
|
|
|
4
3
|
import os as _os
|
|
5
4
|
|
|
6
5
|
|
|
7
6
|
class SameContentFileExistsError(FileExistsError):
|
|
8
|
-
|
|
7
|
+
...
|
|
9
8
|
|
|
10
9
|
|
|
11
10
|
class File:
|
|
12
11
|
|
|
13
12
|
@staticmethod
|
|
14
|
-
def rename_extension(
|
|
13
|
+
def rename_extension(
|
|
14
|
+
file: str,
|
|
15
|
+
new_extension: str,
|
|
16
|
+
full_extension: bool = False,
|
|
17
|
+
camel_case_filename: bool = False,
|
|
18
|
+
) -> str:
|
|
15
19
|
"""Rename the extension of a file.\n
|
|
16
20
|
--------------------------------------------------------------------------
|
|
21
|
+
If `full_extension` is true, everything after the first dot in the
|
|
22
|
+
filename will be treated as the extension to replace (e.g. `.tar.gz`).
|
|
23
|
+
Otherwise, only the part after the last dot is replaced (e.g. `.gz`).\n
|
|
17
24
|
If the `camel_case_filename` parameter is true, the filename will be made
|
|
18
25
|
CamelCase in addition to changing the files extension."""
|
|
19
|
-
|
|
20
|
-
|
|
26
|
+
normalized_file = _os.path.normpath(file)
|
|
27
|
+
directory, filename_with_ext = _os.path.split(normalized_file)
|
|
28
|
+
if full_extension:
|
|
29
|
+
try:
|
|
30
|
+
first_dot_index = filename_with_ext.index('.')
|
|
31
|
+
filename = filename_with_ext[:first_dot_index]
|
|
32
|
+
except ValueError:
|
|
33
|
+
filename = filename_with_ext
|
|
34
|
+
else:
|
|
35
|
+
filename, _ = _os.path.splitext(filename_with_ext)
|
|
21
36
|
if camel_case_filename:
|
|
22
37
|
filename = String.to_camel_case(filename)
|
|
38
|
+
if new_extension and not new_extension.startswith('.'):
|
|
39
|
+
new_extension = '.' + new_extension
|
|
23
40
|
return _os.path.join(directory, f"{filename}{new_extension}")
|
|
24
41
|
|
|
25
42
|
@staticmethod
|
|
@@ -40,24 +57,3 @@ class File:
|
|
|
40
57
|
f.write(content)
|
|
41
58
|
full_path = _os.path.abspath(file)
|
|
42
59
|
return full_path
|
|
43
|
-
|
|
44
|
-
@staticmethod
|
|
45
|
-
def extend_or_make_path(
|
|
46
|
-
file: str,
|
|
47
|
-
search_in: str | list[str] = None,
|
|
48
|
-
prefer_base_dir: bool = True,
|
|
49
|
-
correct_paths: bool = False,
|
|
50
|
-
) -> str:
|
|
51
|
-
"""Tries to find the file and extend the path to be absolute and if the file was not found:\n
|
|
52
|
-
Generate the absolute path to the file in the CWD or the running program's base-directory.\n
|
|
53
|
-
----------------------------------------------------------------------------------------------
|
|
54
|
-
If the `file` is not found in predefined directories, it will be searched in the `search_in`
|
|
55
|
-
directory/directories. If the file is still not found, it will return the path to the file in
|
|
56
|
-
the base-dir per default or to the file in the CWD if `prefer_base_dir` is set to `False`.\n
|
|
57
|
-
----------------------------------------------------------------------------------------------
|
|
58
|
-
If `correct_paths` is true, it is possible to have typos in the `search_in` path/s and it
|
|
59
|
-
will still find the file if it is under one of those paths."""
|
|
60
|
-
try:
|
|
61
|
-
return Path.extend(file, search_in, raise_error=True, correct_path=correct_paths)
|
|
62
|
-
except FileNotFoundError:
|
|
63
|
-
return _os.path.join(Path.script_dir, file) if prefer_base_dir else _os.path.join(_os.getcwd(), file)
|