abstract-utilities 0.2.2.627__py3-none-any.whl → 0.2.2.700__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstract_utilities/__init__.py +7 -3
- abstract_utilities/class_utils/abstract_classes.py +104 -34
- abstract_utilities/class_utils/caller_utils.py +38 -0
- abstract_utilities/class_utils/imports/imports.py +1 -1
- abstract_utilities/file_utils/imports/classes.py +59 -55
- abstract_utilities/file_utils/imports/module_imports.py +1 -1
- abstract_utilities/file_utils/src/file_filters/__init__.py +0 -3
- abstract_utilities/file_utils/src/file_filters/ensure_utils.py +382 -10
- abstract_utilities/file_utils/src/file_filters/filter_params.py +64 -0
- abstract_utilities/file_utils/src/file_filters/predicate_utils.py +2 -74
- abstract_utilities/file_utils/src/find_collect.py +10 -0
- abstract_utilities/import_utils/imports/__init__.py +1 -1
- abstract_utilities/import_utils/imports/init_imports.py +3 -0
- abstract_utilities/import_utils/imports/module_imports.py +1 -0
- abstract_utilities/import_utils/imports/utils.py +1 -1
- abstract_utilities/import_utils/src/__init__.py +1 -0
- abstract_utilities/import_utils/src/import_utils.py +39 -0
- abstract_utilities/import_utils/src/layze_import_utils/__init__.py +2 -0
- abstract_utilities/import_utils/src/layze_import_utils/lazy_utils.py +41 -0
- abstract_utilities/import_utils/src/layze_import_utils/nullProxy.py +37 -0
- abstract_utilities/import_utils/src/nullProxy.py +30 -0
- abstract_utilities/imports.py +5 -2
- abstract_utilities/json_utils/imports/imports.py +1 -1
- abstract_utilities/json_utils/json_utils.py +37 -3
- abstract_utilities/list_utils/list_utils.py +3 -0
- abstract_utilities/log_utils/log_file.py +82 -27
- abstract_utilities/path_utils/imports/module_imports.py +1 -1
- abstract_utilities/path_utils/path_utils.py +7 -12
- abstract_utilities/read_write_utils/read_write_utils.py +63 -30
- abstract_utilities/type_utils/__init__.py +5 -1
- abstract_utilities/type_utils/get_type.py +120 -0
- abstract_utilities/type_utils/imports/__init__.py +1 -0
- abstract_utilities/type_utils/imports/constants.py +134 -0
- abstract_utilities/type_utils/imports/module_imports.py +25 -1
- abstract_utilities/type_utils/is_type.py +455 -0
- abstract_utilities/type_utils/make_type.py +126 -0
- abstract_utilities/type_utils/mime_types.py +68 -0
- abstract_utilities/type_utils/type_utils.py +0 -877
- {abstract_utilities-0.2.2.627.dist-info → abstract_utilities-0.2.2.700.dist-info}/METADATA +1 -1
- {abstract_utilities-0.2.2.627.dist-info → abstract_utilities-0.2.2.700.dist-info}/RECORD +42 -32
- {abstract_utilities-0.2.2.627.dist-info → abstract_utilities-0.2.2.700.dist-info}/WHEEL +0 -0
- {abstract_utilities-0.2.2.627.dist-info → abstract_utilities-0.2.2.700.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from ...imports import *
|
|
2
|
+
from .nullProxy import nullProxy,nullProxy_logger
|
|
3
|
+
@lru_cache(maxsize=None)
|
|
4
|
+
def lazy_import_single(name: str,fallback=None):
|
|
5
|
+
"""
|
|
6
|
+
Import module safely. If unavailable, return NullProxy.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
if name in sys.modules:
|
|
10
|
+
return sys.modules[name]
|
|
11
|
+
|
|
12
|
+
try:
|
|
13
|
+
module = importlib.import_module(name)
|
|
14
|
+
return module
|
|
15
|
+
except Exception as e:
|
|
16
|
+
nullProxy_logger.warning(
|
|
17
|
+
"[lazy_import] Failed to import '%s': %s",
|
|
18
|
+
name,
|
|
19
|
+
e,
|
|
20
|
+
)
|
|
21
|
+
return nullProxy(name,fallback=fallback)
|
|
22
|
+
|
|
23
|
+
def get_lazy_attr(module_name: str, *attrs,fallback=None):
|
|
24
|
+
obj = lazy_import(module_name,fallback=fallback)
|
|
25
|
+
|
|
26
|
+
for attr in attrs:
|
|
27
|
+
try:
|
|
28
|
+
obj = getattr(obj, attr)
|
|
29
|
+
except Exception:
|
|
30
|
+
return nullProxy(module_name, attrs,fallback=fallback)
|
|
31
|
+
|
|
32
|
+
return obj
|
|
33
|
+
def lazy_import(name: str, *attrs,fallback=None):
|
|
34
|
+
"""
|
|
35
|
+
Import module safely. If unavailable, return NullProxy.
|
|
36
|
+
"""
|
|
37
|
+
if attrs:
|
|
38
|
+
obj = get_lazy_attr(name, *attrs,fallback=fallback)
|
|
39
|
+
else:
|
|
40
|
+
obj = lazy_import_single(name,fallback=fallback)
|
|
41
|
+
return obj
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from ...imports import *
|
|
2
|
+
nullProxy_logger = logging.getLogger("abstract.lazy_import")
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class nullProxy:
|
|
6
|
+
"""
|
|
7
|
+
Safe, chainable, callable placeholder for missing modules/attributes.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
def __init__(self, name, path=(),fallback=None):
|
|
11
|
+
self._name = name
|
|
12
|
+
self._path = path
|
|
13
|
+
self.fallback=fallback
|
|
14
|
+
def __getattr__(self, attr):
|
|
15
|
+
return nullProxy(self._name, self._path + (attr,))
|
|
16
|
+
|
|
17
|
+
def __call__(self, *args, **kwargs):
|
|
18
|
+
if self.fallback is not None:
|
|
19
|
+
try:
|
|
20
|
+
return self.fallback(*args, **kwargs)
|
|
21
|
+
except Exception as e:
|
|
22
|
+
logger.info(f"{e}")
|
|
23
|
+
nullProxy_logger.warning(
|
|
24
|
+
"[lazy_import] Call to missing module/attr: %s.%s args=%s kwargs=%s",
|
|
25
|
+
self._name,
|
|
26
|
+
".".join(self._path),
|
|
27
|
+
args,
|
|
28
|
+
kwargs,
|
|
29
|
+
)
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
def __repr__(self):
|
|
33
|
+
full = ".".join((self._name, *self._path))
|
|
34
|
+
return f"<nullProxy {full}>"
|
|
35
|
+
|
|
36
|
+
def __bool__(self):
|
|
37
|
+
return False # safe in conditionals
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from ...imports import *
|
|
2
|
+
lazy_import_logger = logging.getLogger("abstract.lazy_import")
|
|
3
|
+
class nullProxy:
|
|
4
|
+
"""
|
|
5
|
+
Safe, chainable, callable placeholder for missing modules/attributes.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
def __init__(self, name, path=()):
|
|
9
|
+
self._name = name
|
|
10
|
+
self._path = path
|
|
11
|
+
|
|
12
|
+
def __getattr__(self, attr):
|
|
13
|
+
return nullProxy(self._name, self._path + (attr,))
|
|
14
|
+
|
|
15
|
+
def __call__(self, *args, **kwargs):
|
|
16
|
+
lazy_import_logger.warning(
|
|
17
|
+
"[lazy_import] Call to missing module/attr: %s.%s args=%s kwargs=%s",
|
|
18
|
+
self._name,
|
|
19
|
+
".".join(self._path),
|
|
20
|
+
args,
|
|
21
|
+
kwargs,
|
|
22
|
+
)
|
|
23
|
+
return None
|
|
24
|
+
|
|
25
|
+
def __repr__(self):
|
|
26
|
+
full = ".".join((self._name, *self._path))
|
|
27
|
+
return f"<nullProxy {full}>"
|
|
28
|
+
|
|
29
|
+
def __bool__(self):
|
|
30
|
+
return False # safe in conditionals
|
abstract_utilities/imports.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
import re,pexpect,shlex,ezodf,tiktoken,geopandas as gpd,os,PyPDF2,json,tempfile,requests
|
|
3
3
|
import textwrap,pdfplumber,math,hashlib,pandas as pd,platform,textwrap as tw,glob,asyncio
|
|
4
|
-
import fnmatch,importlib,shutil,sys,time,threading,posixpath,importlib.util,types
|
|
4
|
+
import fnmatch,importlib,shutil,sys,time,threading,posixpath,importlib.util,types, logging
|
|
5
5
|
import subprocess,pytesseract,queue,logging,functools,pathlib,pkgutil,inspect
|
|
6
6
|
from typing import *
|
|
7
7
|
from datetime import timedelta,datetime
|
|
8
8
|
from flask import jsonify
|
|
9
9
|
from logging.handlers import RotatingFileHandler
|
|
10
10
|
from pathlib import Path
|
|
11
|
-
from functools import reduce
|
|
11
|
+
from functools import reduce,lru_cache
|
|
12
12
|
from types import MethodType,ModuleType
|
|
13
13
|
from werkzeug.utils import secure_filename
|
|
14
14
|
from werkzeug.datastructures import FileStorage
|
|
@@ -16,3 +16,6 @@ from pdf2image import convert_from_path # only used for OCR fallback
|
|
|
16
16
|
from dataclasses import dataclass,field,asdict
|
|
17
17
|
from pprint import pprint
|
|
18
18
|
from dotenv import load_dotenv
|
|
19
|
+
from types import MethodType
|
|
20
|
+
from datetime import datetime, date
|
|
21
|
+
from decimal import Decimal
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
from ...imports import
|
|
1
|
+
from ...imports import *
|
|
2
2
|
from typing import *
|
|
@@ -26,6 +26,13 @@ from .imports import *
|
|
|
26
26
|
|
|
27
27
|
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
|
|
28
28
|
logger = logging.getLogger(__name__)
|
|
29
|
+
def get_keys(mapping,typ=None):
|
|
30
|
+
typ = typ or set
|
|
31
|
+
if isinstance(mapping,dict):
|
|
32
|
+
mapping = mapping.keys()
|
|
33
|
+
return typ(mapping)
|
|
34
|
+
def make_key_map(dict_obj):
|
|
35
|
+
return {k:get_keys(v) for k,v in dict_obj.items()}
|
|
29
36
|
def convert_and_normalize_values(values):
|
|
30
37
|
for value in values:
|
|
31
38
|
if isinstance(value, str):
|
|
@@ -116,10 +123,11 @@ def safe_dump_to_file(data, file_path=None, ensure_ascii=False, indent=4, *args,
|
|
|
116
123
|
else:
|
|
117
124
|
logger.error("file_path and data must be provided to safe_dump_to_file")
|
|
118
125
|
|
|
119
|
-
def safe_read_from_json(
|
|
126
|
+
def safe_read_from_json(file_path,*args,**kwargs):
|
|
120
127
|
is_read=True
|
|
121
|
-
|
|
122
|
-
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
valid_file_path = get_file_path(file_path,*args,is_read=is_read,**kwargs)
|
|
123
131
|
if valid_file_path:
|
|
124
132
|
file_path = valid_file_path
|
|
125
133
|
try:
|
|
@@ -741,3 +749,29 @@ def flatten_json(data, parent_key='', sep='_'):
|
|
|
741
749
|
items.append((parent_key, data))
|
|
742
750
|
|
|
743
751
|
return dict(items)
|
|
752
|
+
|
|
753
|
+
|
|
754
|
+
def to_json_safe(obj):
|
|
755
|
+
if obj is None:
|
|
756
|
+
return None
|
|
757
|
+
|
|
758
|
+
if isinstance(obj, (str, int, float, bool)):
|
|
759
|
+
return obj
|
|
760
|
+
|
|
761
|
+
if isinstance(obj, (datetime, date)):
|
|
762
|
+
return obj.isoformat()
|
|
763
|
+
|
|
764
|
+
if isinstance(obj, Decimal):
|
|
765
|
+
return float(obj)
|
|
766
|
+
|
|
767
|
+
if isinstance(obj, Path):
|
|
768
|
+
return str(obj)
|
|
769
|
+
|
|
770
|
+
if isinstance(obj, dict):
|
|
771
|
+
return {k: to_json_safe(v) for k, v in obj.items()}
|
|
772
|
+
|
|
773
|
+
if isinstance(obj, (list, tuple, set)):
|
|
774
|
+
return [to_json_safe(v) for v in obj]
|
|
775
|
+
|
|
776
|
+
# Fallback (yt-dlp objects, enums, etc.)
|
|
777
|
+
return str(obj)
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from .imports import *
|
|
2
2
|
import os, sys, inspect, logging
|
|
3
3
|
from logging.handlers import RotatingFileHandler
|
|
4
|
-
|
|
4
|
+
import logging
|
|
5
|
+
from pathlib import Path
|
|
5
6
|
PACKAGE_NAME = "abstract_utilities" # ← update if needed
|
|
6
7
|
|
|
7
8
|
|
|
@@ -43,39 +44,93 @@ def _resolve_log_root():
|
|
|
43
44
|
LOG_ROOT = _resolve_log_root()
|
|
44
45
|
|
|
45
46
|
|
|
46
|
-
def get_logFile(bpName=None, maxBytes=100_000, backupCount=3):
|
|
47
|
-
"""
|
|
48
|
-
A logger that always writes to a safe OS-appropriate path.
|
|
49
|
-
Works even when installed through pip.
|
|
50
|
-
"""
|
|
51
|
-
if bpName is None:
|
|
52
|
-
frame_idx = _find_caller_frame_index()
|
|
53
|
-
frame_info = inspect.stack()[frame_idx]
|
|
54
|
-
caller_path = frame_info.filename
|
|
55
|
-
bpName = os.path.splitext(os.path.basename(caller_path))[0]
|
|
56
|
-
del frame_info
|
|
57
|
-
|
|
58
|
-
logger = logging.getLogger(f"{PACKAGE_NAME}.{bpName}")
|
|
59
|
-
logger.setLevel(logging.INFO)
|
|
60
|
-
|
|
61
|
-
if not logger.handlers:
|
|
62
|
-
log_file = os.path.join(LOG_ROOT, f"{bpName}.log")
|
|
63
|
-
handler = RotatingFileHandler(log_file, maxBytes=maxBytes, backupCount=backupCount)
|
|
47
|
+
##def get_logFile(bpName=None, maxBytes=100_000, backupCount=3):
|
|
48
|
+
## """
|
|
49
|
+
## A logger that always writes to a safe OS-appropriate path.
|
|
50
|
+
## Works even when installed through pip.
|
|
51
|
+
## """
|
|
52
|
+
## if bpName is None:
|
|
53
|
+
## frame_idx = _find_caller_frame_index()
|
|
54
|
+
## frame_info = inspect.stack()[frame_idx]
|
|
55
|
+
## caller_path = frame_info.filename
|
|
56
|
+
## bpName = os.path.splitext(os.path.basename(caller_path))[0]
|
|
57
|
+
## del frame_info
|
|
58
|
+
##
|
|
59
|
+
## logger = logging.getLogger(f"{PACKAGE_NAME}.{bpName}")
|
|
60
|
+
## logger.setLevel(logging.INFO)
|
|
61
|
+
##
|
|
62
|
+
## if not logger.handlers:
|
|
63
|
+
## log_file = os.path.join(LOG_ROOT, f"{bpName}.log")
|
|
64
|
+
## handler = RotatingFileHandler(log_file, maxBytes=maxBytes, backupCount=backupCount)
|
|
65
|
+
##
|
|
66
|
+
## fmt = "%(asctime)s - %(levelname)s - %(pathname)s:%(lineno)d - %(message)s"
|
|
67
|
+
## formatter = logging.Formatter(fmt)
|
|
68
|
+
## handler.setFormatter(formatter)
|
|
69
|
+
##
|
|
70
|
+
## logger.addHandler(handler)
|
|
71
|
+
##
|
|
72
|
+
## # Console handler (optional; can disable for gunicorn)
|
|
73
|
+
## console = logging.StreamHandler(sys.stdout)
|
|
74
|
+
## console.setFormatter(formatter)
|
|
75
|
+
## logger.addHandler(console)
|
|
76
|
+
##
|
|
77
|
+
## return logger
|
|
78
|
+
LOG_FORMAT = (
|
|
79
|
+
"[%(asctime)s] "
|
|
80
|
+
"%(levelname)-8s "
|
|
81
|
+
"%(name)s:%(lineno)d | "
|
|
82
|
+
"%(message)s"
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def get_logFile(
|
|
91
|
+
name: str,
|
|
92
|
+
log_dir: str | Path = "logs",
|
|
93
|
+
level: int = logging.INFO,
|
|
94
|
+
console: bool = True,
|
|
95
|
+
max_bytes: int = 5 * 1024 * 1024,
|
|
96
|
+
backup_count: int = 5,
|
|
97
|
+
):
|
|
98
|
+
logger = logging.getLogger(name)
|
|
99
|
+
|
|
100
|
+
if logger.handlers:
|
|
101
|
+
return logger
|
|
102
|
+
|
|
103
|
+
logger.setLevel(level)
|
|
104
|
+
|
|
105
|
+
formatter = logging.Formatter(LOG_FORMAT, DATE_FORMAT)
|
|
64
106
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
107
|
+
try:
|
|
108
|
+
log_dir = Path(log_dir)
|
|
109
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
110
|
+
|
|
111
|
+
file_handler = RotatingFileHandler(
|
|
112
|
+
log_dir / f"{name}.log",
|
|
113
|
+
maxBytes=max_bytes,
|
|
114
|
+
backupCount=backup_count,
|
|
115
|
+
encoding="utf-8",
|
|
116
|
+
)
|
|
117
|
+
file_handler.setFormatter(formatter)
|
|
118
|
+
logger.addHandler(file_handler)
|
|
68
119
|
|
|
69
|
-
|
|
120
|
+
except PermissionError:
|
|
121
|
+
# 🔒 Import-safe fallback
|
|
122
|
+
logger.addHandler(logging.NullHandler())
|
|
70
123
|
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
logger.addHandler(
|
|
124
|
+
if console:
|
|
125
|
+
console_handler = logging.StreamHandler()
|
|
126
|
+
console_handler.setFormatter(formatter)
|
|
127
|
+
logger.addHandler(console_handler)
|
|
75
128
|
|
|
129
|
+
logger.propagate = False
|
|
76
130
|
return logger
|
|
77
131
|
|
|
78
132
|
|
|
133
|
+
|
|
79
134
|
def _find_caller_frame_index():
|
|
80
135
|
"""Find the correct caller module outside this logger."""
|
|
81
136
|
for idx, frame_info in enumerate(inspect.stack()):
|
|
@@ -2,7 +2,7 @@ from ...string_utils import eatAll
|
|
|
2
2
|
from ...list_utils import make_list
|
|
3
3
|
from ...type_utils import get_media_exts, is_media_type,MIME_TYPES
|
|
4
4
|
from ...safe_utils import safe_join,get_slash
|
|
5
|
-
from ...class_utils import get_caller_path,get_caller_dir
|
|
5
|
+
from ...class_utils import get_caller_path,get_caller_dir,get_initial_caller,get_initial_caller_dir
|
|
6
6
|
from ...file_utils import is_file,is_dir,is_exists
|
|
7
7
|
from ...ssh_utils import is_file,is_dir,is_exists
|
|
8
8
|
from ...directory_utils import *
|
|
@@ -107,17 +107,6 @@ def path_join(*paths, isfile=False):
|
|
|
107
107
|
os.makedirs(final_path, exist_ok=True)
|
|
108
108
|
return final_path
|
|
109
109
|
|
|
110
|
-
def is_file(*paths):
|
|
111
|
-
item_path = os.path.join(*paths)
|
|
112
|
-
return os.path.isfile(item_path)
|
|
113
|
-
|
|
114
|
-
def is_dir(*paths):
|
|
115
|
-
item_path = os.path.join(*paths)
|
|
116
|
-
return os.path.isdir(item_path)
|
|
117
|
-
|
|
118
|
-
def is_path(*paths):
|
|
119
|
-
item_path = os.path.join(*paths)
|
|
120
|
-
return item_path if os.path.exists(item_path) else None
|
|
121
110
|
|
|
122
111
|
def get_all_directories(directory):
|
|
123
112
|
dir_list = os.listdir(directory)
|
|
@@ -132,7 +121,7 @@ def get_all_files(directory=None):
|
|
|
132
121
|
|
|
133
122
|
def get_all_items(directory):
|
|
134
123
|
dir_list = os.listdir(directory)
|
|
135
|
-
file_list = [item for item in dir_list if
|
|
124
|
+
file_list = [item for item in dir_list if is_exists(directory,item)]
|
|
136
125
|
return file_list
|
|
137
126
|
|
|
138
127
|
|
|
@@ -176,6 +165,7 @@ def get_safe_splitext(path=None,basename=None):
|
|
|
176
165
|
basename_str = str(basename)
|
|
177
166
|
filename,ext = os.path.splitext(basename_str)
|
|
178
167
|
return filename,ext
|
|
168
|
+
return None,None
|
|
179
169
|
def get_safe_filename(path=None,basename=None):
|
|
180
170
|
filename,_ = get_safe_splitext(path=path,basename=basename)
|
|
181
171
|
return filename
|
|
@@ -223,6 +213,11 @@ def create_base_dir(directory=None, child=None):
|
|
|
223
213
|
|
|
224
214
|
|
|
225
215
|
|
|
216
|
+
def get_abs_path(path,i=None):
|
|
217
|
+
abs_dir = get_initial_caller_dir()
|
|
218
|
+
return os.path.join(abs_dir,path)
|
|
219
|
+
|
|
220
|
+
|
|
226
221
|
|
|
227
222
|
def get_file_parts(path):
|
|
228
223
|
if path:
|
|
@@ -15,6 +15,38 @@ Usage:
|
|
|
15
15
|
from .imports import *
|
|
16
16
|
_FILE_PATH_KEYS = ['file', 'filepath', 'file_path', 'path', 'directory', 'f', 'dst', 'dest']
|
|
17
17
|
_CONTENTS_KEYS = ['cont', 'content', 'contents', 'data', 'datas', 'dat', 'src', 'source']
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
import uuid
|
|
20
|
+
import shlex
|
|
21
|
+
|
|
22
|
+
_STAGE_ROOT = Path("/var/tmp/abstract_stage")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _stage_file(contents: str, suffix=".tmp") -> Path:
|
|
26
|
+
"""
|
|
27
|
+
Write contents to a local staging file.
|
|
28
|
+
"""
|
|
29
|
+
_STAGE_ROOT.mkdir(parents=True, exist_ok=True)
|
|
30
|
+
path = _STAGE_ROOT / f"{uuid.uuid4().hex}{suffix}"
|
|
31
|
+
path.write_text(str(contents), encoding="utf-8")
|
|
32
|
+
return path
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _install_file(staged: Path, dest: str, **kwargs) -> str:
|
|
36
|
+
"""
|
|
37
|
+
Atomically install a staged file to destination using sudo install.
|
|
38
|
+
"""
|
|
39
|
+
cmd = (
|
|
40
|
+
f"sudo install -D -m 0644 "
|
|
41
|
+
f"{shlex.quote(str(staged))} "
|
|
42
|
+
f"{shlex.quote(dest)}"
|
|
43
|
+
)
|
|
44
|
+
return run_local_cmd(
|
|
45
|
+
cmd=cmd,
|
|
46
|
+
password=kwargs.get("password"),
|
|
47
|
+
key=kwargs.get("key"),
|
|
48
|
+
env_path=kwargs.get("env_path"),
|
|
49
|
+
)
|
|
18
50
|
|
|
19
51
|
|
|
20
52
|
# --- Helper utilities --------------------------------------------------------
|
|
@@ -251,56 +283,57 @@ def _should_use_remote(**kwargs) -> bool:
|
|
|
251
283
|
|
|
252
284
|
def _write_to_file(contents: str, file_path: str, **kwargs) -> str:
|
|
253
285
|
"""
|
|
254
|
-
|
|
255
|
-
Works locally and remotely without breaking quotes.
|
|
286
|
+
Unified writer using stage → install model.
|
|
256
287
|
"""
|
|
257
288
|
|
|
258
|
-
# --- Decode mode selection ---
|
|
259
289
|
remote = _should_use_remote(**kwargs)
|
|
260
290
|
|
|
261
|
-
#
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
# Base64 encoding ensures zero escaping issues
|
|
266
|
-
b64 = base64.b64encode(contents.encode("utf-8")).decode("utf-8")
|
|
291
|
+
# --- Remote path (unchanged conceptually) ---
|
|
292
|
+
if remote:
|
|
293
|
+
tmp_path = _stage_file(contents)
|
|
267
294
|
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
f"sudo tee {shlex.quote(file_path)} > /dev/null"
|
|
272
|
-
)
|
|
295
|
+
user_at_host = kwargs["user_at_host"]
|
|
296
|
+
password = kwargs.get("password")
|
|
297
|
+
key = kwargs.get("key")
|
|
273
298
|
|
|
274
|
-
|
|
275
|
-
|
|
299
|
+
# copy staged file
|
|
300
|
+
scp_cmd = (
|
|
301
|
+
f"scp {shlex.quote(str(tmp_path))} "
|
|
302
|
+
f"{shlex.quote(user_at_host)}:{shlex.quote(file_path)}"
|
|
303
|
+
)
|
|
276
304
|
return run_pruned_func(
|
|
277
305
|
run_local_cmd,
|
|
278
|
-
cmd=
|
|
306
|
+
cmd=scp_cmd,
|
|
307
|
+
password=password,
|
|
308
|
+
key=key,
|
|
279
309
|
**kwargs
|
|
280
310
|
)
|
|
281
311
|
|
|
282
|
-
# ---
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
312
|
+
# --- Local path ---
|
|
313
|
+
try:
|
|
314
|
+
# Attempt direct write for non-privileged paths
|
|
315
|
+
os.makedirs(os.path.dirname(file_path) or ".", exist_ok=True)
|
|
316
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
317
|
+
f.write(str(contents))
|
|
318
|
+
return file_path
|
|
319
|
+
|
|
320
|
+
except (PermissionError, FileNotFoundError):
|
|
321
|
+
# Privileged path → stage + install
|
|
322
|
+
staged = _stage_file(contents)
|
|
323
|
+
return _install_file(staged, file_path, **kwargs)
|
|
324
|
+
|
|
289
325
|
|
|
290
326
|
|
|
291
327
|
def write_to_file(*, contents: str, file_path: str, **kwargs):
|
|
292
328
|
"""
|
|
293
|
-
Error-handled
|
|
329
|
+
Error-handled public writer.
|
|
294
330
|
"""
|
|
295
|
-
|
|
296
331
|
try:
|
|
297
|
-
|
|
298
|
-
return result
|
|
299
|
-
|
|
332
|
+
return _write_to_file(contents=contents, file_path=file_path, **kwargs)
|
|
300
333
|
except Exception as e:
|
|
301
334
|
print("WRITE ERROR:", e)
|
|
302
335
|
raise RuntimeError(f"Failed writing: {file_path}")
|
|
303
|
-
def read_from_file(file_path,**kwargs):
|
|
336
|
+
def read_from_file(file_path=None,**kwargs):
|
|
304
337
|
if get_user_pass_host_key(**kwargs):
|
|
305
338
|
kwargs["cwd"] = kwargs.get('cwd') or os.path.dirname(file_path)
|
|
306
339
|
basename = os.path.basename(file_path)
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
from .imports import *
|
|
2
|
+
from .alpha_utils import *
|
|
3
|
+
from .num_utils import *
|
|
4
|
+
from .is_type import *
|
|
5
|
+
from .make_type import *
|
|
6
|
+
def get_obj_obj(obj_type: str, obj: any) -> any:
|
|
7
|
+
"""
|
|
8
|
+
Returns the object converted according to the given type string.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
obj_type: The string representing the type to convert to.
|
|
12
|
+
obj: The object to convert.
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
any: The object converted to the specified type.
|
|
16
|
+
"""
|
|
17
|
+
if obj_type == 'str':
|
|
18
|
+
return make_str(obj)
|
|
19
|
+
elif obj_type == 'bool':
|
|
20
|
+
return make_bool(obj)
|
|
21
|
+
elif obj_type == 'float':
|
|
22
|
+
return make_float(obj)
|
|
23
|
+
elif obj_type == 'int':
|
|
24
|
+
try:
|
|
25
|
+
return int(obj)
|
|
26
|
+
except (TypeError, ValueError):
|
|
27
|
+
return obj
|
|
28
|
+
else:
|
|
29
|
+
return obj
|
|
30
|
+
def get_len_or_num(obj: any) -> int:
|
|
31
|
+
"""
|
|
32
|
+
Returns the length of the object if it can be converted to a string, else the integer representation of the object.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
obj: The object to process.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
int: The length of the object as a string or the integer representation of the object.
|
|
39
|
+
"""
|
|
40
|
+
if is_int(obj) or is_float(obj):
|
|
41
|
+
return int(obj)
|
|
42
|
+
else:
|
|
43
|
+
try:
|
|
44
|
+
return len(str(obj))
|
|
45
|
+
except (TypeError, ValueError):
|
|
46
|
+
return 0
|
|
47
|
+
def get_types_list()->list:
|
|
48
|
+
return ['list', 'bool', 'str', 'int', 'float', 'set', 'dict', 'frozenset', 'bytearray', 'bytes', 'memoryview', 'range', 'enumerate', 'zip', 'filter', 'map', 'property', 'slice', 'super', 'type', 'Exception', 'NoneType']
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def str_lower(obj):
|
|
52
|
+
try:
|
|
53
|
+
obj=str(obj).lower()
|
|
54
|
+
except Exception as e:
|
|
55
|
+
print(f"{e}")
|
|
56
|
+
return obj
|
|
57
|
+
|
|
58
|
+
def get_bool_response(bool_response,json_data):
|
|
59
|
+
if not is_instance(bool_response,bool):
|
|
60
|
+
try:
|
|
61
|
+
bool_response = json_data.get(bool_response) in [None,'',[],"",{}]
|
|
62
|
+
except:
|
|
63
|
+
pass
|
|
64
|
+
return bool_response
|
|
65
|
+
def get_alphabet_str():
|
|
66
|
+
return 'abcdefghijklmnopqrstuvwxyz'
|
|
67
|
+
def get_alphabet_upper_str():
|
|
68
|
+
alphabet_str = get_alphabet_str()
|
|
69
|
+
return alphabet_str.upper()
|
|
70
|
+
def get_alphabet_comp_str():
|
|
71
|
+
return get_alphabet_str() + get_alphabet_upper_str()
|
|
72
|
+
|
|
73
|
+
def get_alphabet():
|
|
74
|
+
alphabet_str = get_alphabet_str()
|
|
75
|
+
return break_string(alphabet_str)
|
|
76
|
+
def get_alphabet_upper():
|
|
77
|
+
alphabet_upper_str = get_alphabet_upper_str()
|
|
78
|
+
return break_string(alphabet_upper_str)
|
|
79
|
+
def get_alphabet_comp():
|
|
80
|
+
alphabet_comp_str = get_alphabet_comp_str()
|
|
81
|
+
return break_string(alphabet_comp_str)
|
|
82
|
+
def get_numbers_str():
|
|
83
|
+
return '0123457890'
|
|
84
|
+
def get_numbers_int():
|
|
85
|
+
numbers_str = get_numbers_str()
|
|
86
|
+
return [int(number) for number in numbers_str]
|
|
87
|
+
def get_numbers():
|
|
88
|
+
numbers_str = get_numbers_str()
|
|
89
|
+
return break_string(numbers_str)
|
|
90
|
+
def get_numbers_comp():
|
|
91
|
+
numbers_str = get_numbers()
|
|
92
|
+
numbers_int = get_numbers_int()
|
|
93
|
+
return numbers_str + numbers_int
|
|
94
|
+
def break_string(string):
|
|
95
|
+
string_str = str(string)
|
|
96
|
+
return list(string_str)
|
|
97
|
+
def get_alpha_ints(ints=True,alpha=True,lower=True,capitalize=True,string=True,listObj=True):
|
|
98
|
+
objs = [] if listObj else ""
|
|
99
|
+
if ints:
|
|
100
|
+
objs+=getInts(string=string,listObj=listObj)
|
|
101
|
+
if alpha:
|
|
102
|
+
objs+=getAlphas(lower=lower,capitalize=capitalize,listObj=listObj)
|
|
103
|
+
return objs
|
|
104
|
+
def if_true_get_string(data, key):
|
|
105
|
+
return key if data.get(key) else None
|
|
106
|
+
def find_for_string(string, parts):
|
|
107
|
+
return [part for part in parts if string.lower() in str(part).lower()]
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def is_strings_in_string(strings, parts):
|
|
111
|
+
strings = make_list(strings)
|
|
112
|
+
for string in strings:
|
|
113
|
+
parts = find_for_string(string, parts)
|
|
114
|
+
if not parts:
|
|
115
|
+
return []
|
|
116
|
+
return parts
|
|
117
|
+
def if_not_bool_default(value,default=None):
|
|
118
|
+
if not isinstance(value,bool):
|
|
119
|
+
value = default
|
|
120
|
+
return value
|