abstract-utilities 0.2.2.442__py3-none-any.whl → 0.2.2.480__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of abstract-utilities might be problematic. Click here for more details.
- abstract_utilities/__init__.py +2 -1
- abstract_utilities/class_utils.py +0 -1
- abstract_utilities/dynimport.py +1 -1
- abstract_utilities/file_utils/file_utils/__init__.py +2 -0
- abstract_utilities/file_utils/file_utils/file_utils.py +3 -3
- abstract_utilities/file_utils/file_utils/find_collect.py +154 -0
- abstract_utilities/file_utils/file_utils/imports/__init__.py +3 -0
- abstract_utilities/file_utils/file_utils/imports/constants.py +39 -0
- abstract_utilities/file_utils/file_utils/imports/file_functions.py +10 -0
- abstract_utilities/file_utils/file_utils/imports/imports.py +39 -0
- abstract_utilities/file_utils/file_utils/imports/module_imports.py +14 -0
- abstract_utilities/file_utils/file_utils/imports.py +9 -0
- abstract_utilities/file_utils/file_utils/type_checks.py +91 -0
- abstract_utilities/file_utils/imports/__init__.py +1 -2
- abstract_utilities/file_utils/imports/clean_imps.py +158 -0
- abstract_utilities/file_utils/imports/file_functions.py +1 -1
- abstract_utilities/file_utils/imports/imports.py +58 -7
- abstract_utilities/file_utils/imports/module_imports.py +6 -2
- abstract_utilities/read_write_utils.py +168 -11
- abstract_utilities/robust_reader/imports/imports.py +0 -9
- abstract_utilities/robust_readers/import_utils/__init__.py +1 -0
- abstract_utilities/robust_readers/import_utils/clean_imports.py +175 -0
- abstract_utilities/robust_readers/initFuncGen.py +10 -2
- abstract_utilities/ssh_utils/imports.py +1 -2
- abstract_utilities/string_clean.py +40 -1
- abstract_utilities/string_utils.py +51 -0
- abstract_utilities/type_utils.py +25 -1
- {abstract_utilities-0.2.2.442.dist-info → abstract_utilities-0.2.2.480.dist-info}/METADATA +1 -1
- {abstract_utilities-0.2.2.442.dist-info → abstract_utilities-0.2.2.480.dist-info}/RECORD +31 -21
- {abstract_utilities-0.2.2.442.dist-info → abstract_utilities-0.2.2.480.dist-info}/WHEEL +0 -0
- {abstract_utilities-0.2.2.442.dist-info → abstract_utilities-0.2.2.480.dist-info}/top_level.txt +0 -0
|
@@ -13,11 +13,14 @@ Usage:
|
|
|
13
13
|
"""
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
|
-
|
|
16
|
+
import shlex
|
|
17
|
+
from .ssh_utils.utils import run_cmd,get_print_sudo_cmd,run_local_cmd,run_remote_cmd
|
|
18
|
+
from .file_utils.file_utils.type_checks import is_file,is_dir,get_user_pass_host_key,is_exists
|
|
19
|
+
from .abstract_classes import run_pruned_func
|
|
17
20
|
_FILE_PATH_KEYS = ['file', 'filepath', 'file_path', 'path', 'directory', 'f', 'dst', 'dest']
|
|
18
21
|
_CONTENTS_KEYS = ['cont', 'content', 'contents', 'data', 'datas', 'dat', 'src', 'source']
|
|
19
22
|
|
|
20
|
-
|
|
23
|
+
|
|
21
24
|
# --- Helper utilities --------------------------------------------------------
|
|
22
25
|
def string_in_keys(strings, kwargs):
|
|
23
26
|
"""Find a matching keyword in kwargs that contains any of the given substrings."""
|
|
@@ -27,25 +30,60 @@ def string_in_keys(strings, kwargs):
|
|
|
27
30
|
return key
|
|
28
31
|
return None
|
|
29
32
|
|
|
30
|
-
|
|
31
|
-
|
|
33
|
+
def make_dirs(path,exist_ok=True,**kwargs):
|
|
34
|
+
if exist_ok or (not exist_ok and not is_dir(path,**kwargs)):
|
|
35
|
+
if get_user_pass_host_key(**kwargs):
|
|
36
|
+
kwargs['cmd']=f"mkdir {path}"
|
|
37
|
+
run_cmd(**kwargs)
|
|
38
|
+
else:
|
|
39
|
+
os.makedirs(path,exist_ok=exist_ok)
|
|
40
|
+
return ext_dir
|
|
41
|
+
def path_join(*args):
|
|
42
|
+
path = None
|
|
43
|
+
for i,arg in enumerate(args):
|
|
44
|
+
if arg:
|
|
45
|
+
if i == 0:
|
|
46
|
+
path = arg
|
|
47
|
+
else:
|
|
48
|
+
path = os.path.join(path,arg)
|
|
49
|
+
return path
|
|
50
|
+
def make_path(path,home_dir=None,file=None,**kwargs):
|
|
51
|
+
if path:
|
|
52
|
+
basename = os.path.basename(path)
|
|
53
|
+
parts = path.split('/')
|
|
54
|
+
parts = [part for part in parts if part]
|
|
55
|
+
|
|
56
|
+
full_dir = home_dir or ''
|
|
57
|
+
if file == True or (file == None and ('.' in basename)):
|
|
58
|
+
pieces = parts[:-1] if len(parts) > 1 else []
|
|
59
|
+
else:
|
|
60
|
+
pieces=parts
|
|
61
|
+
basename=None
|
|
62
|
+
for piece in pieces:
|
|
63
|
+
full_dir = os.path.join(full_dir,piece)
|
|
64
|
+
make_dirs(full_dir,exist_ok=True,**kwargs)
|
|
65
|
+
if basename:
|
|
66
|
+
full_dir=path_join(full_dir,basename)
|
|
67
|
+
print(f"full_dir == {full_dir}")
|
|
68
|
+
return full_dir
|
|
69
|
+
def get_path(paths,**kwargs):
|
|
32
70
|
"""Return the first valid path among given paths."""
|
|
33
71
|
for path in paths:
|
|
34
72
|
if isinstance(path, str):
|
|
35
|
-
if
|
|
73
|
+
if is_file(path,**kwargs):
|
|
36
74
|
return path
|
|
37
75
|
dirname = os.path.dirname(path)
|
|
38
|
-
if
|
|
76
|
+
if is_exists(dirname):
|
|
39
77
|
return path
|
|
40
78
|
return None
|
|
41
79
|
|
|
42
80
|
|
|
43
|
-
def break_down_find_existing(path):
|
|
81
|
+
def break_down_find_existing(path,**kwargs):
|
|
44
82
|
"""Return the first non-existent subpath within a path chain."""
|
|
45
83
|
test_path = ''
|
|
46
84
|
for part in path.split(os.sep):
|
|
47
85
|
test_path = os.path.join(test_path, part)
|
|
48
|
-
if not
|
|
86
|
+
if not is_exists(test_path):
|
|
49
87
|
return test_path if test_path else None
|
|
50
88
|
return test_path
|
|
51
89
|
|
|
@@ -72,7 +110,73 @@ def check_read_write_params(*args, **kwargs):
|
|
|
72
110
|
raise ValueError("Missing file_path argument.")
|
|
73
111
|
return file_path, contents
|
|
74
112
|
|
|
113
|
+
def write_to_path(
|
|
114
|
+
file_path: str,
|
|
115
|
+
contents: str,
|
|
116
|
+
*,
|
|
117
|
+
user_at_host: str = None,
|
|
118
|
+
cwd: str | None = None,
|
|
119
|
+
password=None,
|
|
120
|
+
key=None,
|
|
121
|
+
env_path=None,
|
|
122
|
+
**kwargs
|
|
123
|
+
) -> str:
|
|
124
|
+
"""
|
|
125
|
+
Completely overwrite a file (locally or remotely).
|
|
126
|
+
Supports sudo and password-based remote execution.
|
|
127
|
+
"""
|
|
128
|
+
|
|
129
|
+
# sanitize for shell safety
|
|
130
|
+
quoted_path = shlex.quote(file_path)
|
|
131
|
+
quoted_data = shlex.quote(str(contents))
|
|
75
132
|
|
|
133
|
+
# shell command that fully overwrites
|
|
134
|
+
# (no append, replaces contents entirely)
|
|
135
|
+
base_cmd = f'sudo sh -c "echo {quoted_data} > {quoted_path}"'
|
|
136
|
+
input(base_cmd)
|
|
137
|
+
# optional sudo password injection
|
|
138
|
+
full_cmd = get_print_sudo_cmd(
|
|
139
|
+
cmd=base_cmd,
|
|
140
|
+
password=password,
|
|
141
|
+
key=key,
|
|
142
|
+
env_path=env_path
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
# local or remote dispatch
|
|
146
|
+
if user_at_host:
|
|
147
|
+
return run_remote_cmd(
|
|
148
|
+
user_at_host=user_at_host,
|
|
149
|
+
cmd=full_cmd,
|
|
150
|
+
cwd=cwd,
|
|
151
|
+
password=password,
|
|
152
|
+
key=key,
|
|
153
|
+
env_path=env_path,
|
|
154
|
+
**kwargs
|
|
155
|
+
)
|
|
156
|
+
else:
|
|
157
|
+
return run_local_cmd(
|
|
158
|
+
cmd=full_cmd,
|
|
159
|
+
cwd=cwd,
|
|
160
|
+
password=password,
|
|
161
|
+
key=key,
|
|
162
|
+
env_path=env_path,
|
|
163
|
+
**kwargs
|
|
164
|
+
)
|
|
165
|
+
### --- Core functionality -------------------------------------------------------
|
|
166
|
+
##def write_to_file(*args, **kwargs):
|
|
167
|
+
## """
|
|
168
|
+
## Write contents to a file (create if missing).
|
|
169
|
+
##
|
|
170
|
+
## Returns the file_path written.
|
|
171
|
+
## """
|
|
172
|
+
## file_path, contents = check_read_write_params(*args, **kwargs)
|
|
173
|
+
## if contents is None:
|
|
174
|
+
## raise ValueError("Missing contents to write.")
|
|
175
|
+
##
|
|
176
|
+
## os.makedirs(os.path.dirname(file_path) or ".", exist_ok=True)
|
|
177
|
+
## with open(file_path, "w", encoding="utf-8") as f:
|
|
178
|
+
## f.write(str(contents))
|
|
179
|
+
## return file_path
|
|
76
180
|
# --- Core functionality -------------------------------------------------------
|
|
77
181
|
def write_to_file(*args, **kwargs):
|
|
78
182
|
"""
|
|
@@ -81,20 +185,73 @@ def write_to_file(*args, **kwargs):
|
|
|
81
185
|
Returns the file_path written.
|
|
82
186
|
"""
|
|
83
187
|
file_path, contents = check_read_write_params(*args, **kwargs)
|
|
188
|
+
dirname = os.path.dirname(file_path)
|
|
189
|
+
|
|
84
190
|
if contents is None:
|
|
85
191
|
raise ValueError("Missing contents to write.")
|
|
192
|
+
user_at_host = kwargs.get("user_at_host")
|
|
193
|
+
if get_user_pass_host_key(**kwargs):
|
|
194
|
+
make_dirs(dirname, exist_ok=True,**kwargs)
|
|
195
|
+
kwargs["cwd"] = kwargs.get('cwd') or os.path.dirname(file_path)
|
|
196
|
+
# sanitize for shell safety
|
|
197
|
+
quoted_path = shlex.quote(file_path)
|
|
198
|
+
quoted_data = shlex.quote(str(contents))
|
|
199
|
+
# shell command that fully overwrites
|
|
200
|
+
# (no append, replaces contents entirely)
|
|
201
|
+
kwargs["cmd"] = f'sh -c "echo {quoted_data} > {quoted_path}"'
|
|
202
|
+
if not kwargs.get('password') and not kwargs.get('key'):
|
|
203
|
+
kwargs["cmd"]=f'sudo {kwargs["cmd"]}'
|
|
204
|
+
result = run_pruned_func(run_cmd,**kwargs)
|
|
205
|
+
if not is_file(file_path,**kwargs) or str(contents) != read_from_file(file_path,**kwargs):
|
|
206
|
+
kwargs["cmd"]=f'sudo {kwargs["cmd"]}'
|
|
207
|
+
result = run_pruned_func(run_cmd,**kwargs)
|
|
208
|
+
return result
|
|
86
209
|
|
|
87
|
-
|
|
210
|
+
make_dirs(dirname or ".", exist_ok=True)
|
|
88
211
|
with open(file_path, "w", encoding="utf-8") as f:
|
|
89
212
|
f.write(str(contents))
|
|
90
213
|
return file_path
|
|
91
214
|
|
|
92
215
|
|
|
93
|
-
def read_from_file(file_path):
|
|
216
|
+
def read_from_file(file_path,**kwargs):
|
|
217
|
+
if get_user_pass_host_key(**kwargs):
|
|
218
|
+
kwargs["cwd"] = kwargs.get('cwd') or os.path.dirname(file_path)
|
|
219
|
+
basename = os.path.basename(file_path)
|
|
220
|
+
kwargs["cmd"] = f'cat {basename}'
|
|
221
|
+
return run_pruned_func(run_cmd,**kwargs)
|
|
94
222
|
"""Read text content from a file."""
|
|
95
223
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
96
224
|
return f.read()
|
|
97
|
-
|
|
225
|
+
def get_rel_path(src,src_rel,directory):
|
|
226
|
+
if src.startswith(src_rel):
|
|
227
|
+
src = src[len(src_rel):]
|
|
228
|
+
rel_path = os.path.join(directory,src)
|
|
229
|
+
return rel_path
|
|
230
|
+
def make_relative_path(src,src_rel,dst,**kwargs):
|
|
231
|
+
print(f"src == {src}\nsrc_rel == {src_rel}\ndirectory == {directory}")
|
|
232
|
+
if src.startswith(src_rel):
|
|
233
|
+
rel_path = get_rel_path(src,src_rel,directory)
|
|
234
|
+
path = make_path(src,home_dir=rel_path,**kwargs)
|
|
235
|
+
print(f"path == {path}")
|
|
236
|
+
return path
|
|
237
|
+
def copy_dirs(dirs,dst,src_rel=None,**kwargs):
|
|
238
|
+
for src in dirs:
|
|
239
|
+
if rel_path:
|
|
240
|
+
dst = make_relative_path(src,src_rel,dst,**kwargs)
|
|
241
|
+
make_path(dst,**kwargs)
|
|
242
|
+
|
|
243
|
+
def copy_file(src,dst,rel_path=None,**kwargs):
|
|
244
|
+
if rel_path:
|
|
245
|
+
dst = make_relative_path(src,rel_path,dst,**kwargs)
|
|
246
|
+
if get_user_pass_host_key(**kwargs):
|
|
247
|
+
contents=read_from_file(src,**kwargs)
|
|
248
|
+
write_to_file(contents=contents,file_path=dst,**kwargs)
|
|
249
|
+
else:
|
|
250
|
+
shutil.copy(src,dst)
|
|
251
|
+
return dst
|
|
252
|
+
def copy_files(files,dst,rel_path=None,**kwargs):
|
|
253
|
+
for file in files:
|
|
254
|
+
copy_file(src=file,dst=dst,rel_path=rel_path,**kwargs)
|
|
98
255
|
|
|
99
256
|
def create_and_read_file(*args, **kwargs):
|
|
100
257
|
"""
|
|
@@ -1,12 +1,3 @@
|
|
|
1
|
-
import os,tempfile,shutil,logging,ezodf,fnmatch,pytesseract,pdfplumber
|
|
2
|
-
import pandas as pd
|
|
3
|
-
import geopandas as gpd
|
|
4
|
-
from datetime import datetime
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
from typing import *
|
|
7
|
-
from werkzeug.utils import secure_filename
|
|
8
|
-
from werkzeug.datastructures import FileStorage
|
|
9
|
-
from pdf2image import convert_from_path # only used for OCR fallback
|
|
10
1
|
from ...abstract_classes import SingletonMeta
|
|
11
2
|
from ..pdf_utils import *
|
|
12
3
|
from ...read_write_utils import *
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
from ...read_write_utils import read_from_file,write_to_file
|
|
2
|
+
from ...string_clean import eatAll,eatElse,clean_line
|
|
3
|
+
from ...class_utils import get_caller_path
|
|
4
|
+
from ...list_utils import make_list
|
|
5
|
+
import os
|
|
6
|
+
import_tag = 'import '
|
|
7
|
+
from_tag = 'from '
|
|
8
|
+
def get_text_or_read(text=None,file_path=None):
|
|
9
|
+
text = text or ''
|
|
10
|
+
imports_js = {}
|
|
11
|
+
if not text and file_path and os.path.isfile(file_path):
|
|
12
|
+
text=read_from_file(file_path)
|
|
13
|
+
return text
|
|
14
|
+
def is_line_import(line):
|
|
15
|
+
if line and (line.startswith(from_tag) or line.startswith(import_tag)):
|
|
16
|
+
return True
|
|
17
|
+
return False
|
|
18
|
+
def is_line_group_import(line):
|
|
19
|
+
if line and (line.startswith(from_tag) and import_tag in line):
|
|
20
|
+
return True
|
|
21
|
+
return False
|
|
22
|
+
def get_import_pkg(line):
|
|
23
|
+
if is_line_group_import(line):
|
|
24
|
+
return clean_line(line.split(from_tag)[1].split(import_tag)[0])
|
|
25
|
+
def get_imports_from_import_pkg(line):
|
|
26
|
+
if is_line_group_import(line):
|
|
27
|
+
return get_cleaned_import_list(line,commaClean=True)
|
|
28
|
+
|
|
29
|
+
def add_imports_to_import_pkg_js(import_pkg,imports,import_pkg_js=None):
|
|
30
|
+
import_pkg_js = import_pkg_js or {}
|
|
31
|
+
imports = clean_imports(imports)
|
|
32
|
+
if import_pkg not in import_pkg_js:
|
|
33
|
+
i = len(import_pkg_js["nulines"])
|
|
34
|
+
import_pkg_js[import_pkg]={"imports":imports,"line":i}
|
|
35
|
+
import_line = f"from {import_pkg} import "
|
|
36
|
+
if import_pkg == "import":
|
|
37
|
+
import_line = import_tag
|
|
38
|
+
import_pkg_js["nulines"].append(import_line)
|
|
39
|
+
else:
|
|
40
|
+
import_pkg_js[import_pkg]["imports"]+=imports
|
|
41
|
+
return import_pkg_js
|
|
42
|
+
def update_import_pkg_js(line,import_pkg_js=None):
|
|
43
|
+
import_pkg_js = import_pkg_js or {}
|
|
44
|
+
if is_line_group_import(line):
|
|
45
|
+
import_pkg = get_import_pkg(line)
|
|
46
|
+
imports = get_imports_from_import_pkg(line)
|
|
47
|
+
import_pkg_js = add_imports_to_import_pkg_js(import_pkg,imports,import_pkg_js=import_pkg_js)
|
|
48
|
+
else:
|
|
49
|
+
if len(import_pkg_js["nulines"]) >0 and line == '' and is_line_import(import_pkg_js["nulines"][-1]):
|
|
50
|
+
pass
|
|
51
|
+
else:
|
|
52
|
+
import_pkg_js["nulines"].append(line)
|
|
53
|
+
return import_pkg_js
|
|
54
|
+
def is_from_line_group(line):
|
|
55
|
+
if line and line.startswith(from_tag) and import_tag in line and '(' in line:
|
|
56
|
+
import_spl = line.split(import_tag)[-1]
|
|
57
|
+
import_spl_clean = clean_line(line)
|
|
58
|
+
if not import_spl_clean.endswith(')'):
|
|
59
|
+
return True
|
|
60
|
+
return False
|
|
61
|
+
def clean_imports(imports,commaClean=True):
|
|
62
|
+
chars=["*"]
|
|
63
|
+
if not commaClean:
|
|
64
|
+
chars.append(',')
|
|
65
|
+
if isinstance(imports,str):
|
|
66
|
+
imports = imports.split(',')
|
|
67
|
+
return [eatElse(imp,chars=chars) for imp in imports if imp]
|
|
68
|
+
def get_cleaned_import_list(line,commaClean=True):
|
|
69
|
+
cleaned_import_list=[]
|
|
70
|
+
if import_tag in line:
|
|
71
|
+
imports = line.split(import_tag)[1]
|
|
72
|
+
cleaned_import_list+=clean_imports(imports,commaClean=commaClean)
|
|
73
|
+
return cleaned_import_list
|
|
74
|
+
def get_all_imports(text=None,file_path=None,import_pkg_js=None):
|
|
75
|
+
text = get_text_or_read(text=text,file_path=file_path)
|
|
76
|
+
lines = text.split('\n')
|
|
77
|
+
cleaned_import_list=[]
|
|
78
|
+
nu_lines = []
|
|
79
|
+
is_from_group = False
|
|
80
|
+
import_pkg_js = import_pkg_js or {}
|
|
81
|
+
if "nulines" not in import_pkg_js:
|
|
82
|
+
import_pkg_js["nulines"]=[]
|
|
83
|
+
if "file_path" not in import_pkg_js:
|
|
84
|
+
import_pkg_js["file_path"]=file_path
|
|
85
|
+
if "all_data" not in import_pkg_js:
|
|
86
|
+
import_pkg_js["all_data"]=[]
|
|
87
|
+
if file_path and file_path != import_pkg_js["file_path"]:
|
|
88
|
+
found=False
|
|
89
|
+
nu_data = {"file_path":import_pkg_js["file_path"],"nulines":import_pkg_js["nulines"]}
|
|
90
|
+
for i,data in enumerate(import_pkg_js["all_data"]):
|
|
91
|
+
if data.get('file_path') == import_pkg_js["file_path"]:
|
|
92
|
+
import_pkg_js["all_data"][i] = nu_data
|
|
93
|
+
found = True
|
|
94
|
+
break
|
|
95
|
+
if found == False:
|
|
96
|
+
import_pkg_js["all_data"].append(nu_data)
|
|
97
|
+
import_pkg_js["nulines"]=[]
|
|
98
|
+
import_pkg_js["file_path"]=file_path
|
|
99
|
+
|
|
100
|
+
for line in lines:
|
|
101
|
+
if line.startswith(import_tag) and ' from ' not in line:
|
|
102
|
+
cleaned_import_list = get_cleaned_import_list(line)
|
|
103
|
+
import_pkg_js = add_imports_to_import_pkg_js("import",cleaned_import_list,import_pkg_js=import_pkg_js)
|
|
104
|
+
else:
|
|
105
|
+
if is_from_group:
|
|
106
|
+
import_pkg=is_from_group
|
|
107
|
+
line = clean_line(line)
|
|
108
|
+
if line.endswith(')'):
|
|
109
|
+
is_from_group=False
|
|
110
|
+
line=line[:-1]
|
|
111
|
+
imports_from_import_pkg = clean_imports(line)
|
|
112
|
+
import_pkg_js = add_imports_to_import_pkg_js(import_pkg,imports_from_import_pkg,import_pkg_js=import_pkg_js)
|
|
113
|
+
|
|
114
|
+
else:
|
|
115
|
+
import_pkg_js=update_import_pkg_js(line,import_pkg_js=import_pkg_js)
|
|
116
|
+
if is_from_line_group(line) and is_from_group == False:
|
|
117
|
+
is_from_group=get_import_pkg(line)
|
|
118
|
+
return import_pkg_js
|
|
119
|
+
def clean_all_imports(text=None,file_path=None,import_pkg_js=None):
|
|
120
|
+
if not import_pkg_js:
|
|
121
|
+
import_pkg_js = get_all_imports(text=text,file_path=file_path)
|
|
122
|
+
nu_lines = import_pkg_js["nulines"]
|
|
123
|
+
for pkg,values in import_pkg_js.items():
|
|
124
|
+
comments = []
|
|
125
|
+
if pkg not in ["nulines","file_path","all_data"]:
|
|
126
|
+
line = values.get('line')
|
|
127
|
+
imports = values.get('imports')
|
|
128
|
+
for i,imp in enumerate(imports):
|
|
129
|
+
if '#' in imp:
|
|
130
|
+
imp_spl = imp.split('#')
|
|
131
|
+
comments.append(imp_spl[-1])
|
|
132
|
+
imports[i] = clean_line(imp_spl[0])
|
|
133
|
+
imports = list(set(imports))
|
|
134
|
+
if '*' in imports:
|
|
135
|
+
imports="*"
|
|
136
|
+
else:
|
|
137
|
+
imports=','.join(imports)
|
|
138
|
+
if comments:
|
|
139
|
+
comments=','.join(comments)
|
|
140
|
+
imports+=f" #{comments}"
|
|
141
|
+
import_pkg_js[pkg]["imports"]=imports
|
|
142
|
+
nu_lines[line] += imports
|
|
143
|
+
import_pkg_js["nulines"]=nu_lines
|
|
144
|
+
return import_pkg_js
|
|
145
|
+
|
|
146
|
+
def get_all_real_imps(file):
|
|
147
|
+
contents = read_from_file(file)
|
|
148
|
+
lines = contents.split('\n')
|
|
149
|
+
for line in lines:
|
|
150
|
+
if line.startswith('from '):
|
|
151
|
+
from_line = line.split('from ')[-1]
|
|
152
|
+
dot_fro = ""
|
|
153
|
+
dirname = file
|
|
154
|
+
for char in from_line:
|
|
155
|
+
if char != '.':
|
|
156
|
+
line = f"from {dot_fro}{eatAll(from_line,'.')}"
|
|
157
|
+
if line in all_imps:
|
|
158
|
+
line = ""
|
|
159
|
+
break
|
|
160
|
+
if dot_fro == "":
|
|
161
|
+
dot_fro = ""
|
|
162
|
+
dirname = os.path.dirname(dirname)
|
|
163
|
+
dirbase = os.path.basename(dirname)
|
|
164
|
+
dot_fro = f"{dirbase}.{dot_fro}"
|
|
165
|
+
if line:
|
|
166
|
+
all_imps.append(line)
|
|
167
|
+
|
|
168
|
+
return '\n'.join(all_imps)
|
|
169
|
+
def save_cleaned_imports(text=None,file_path=None,write=False,import_pkg_js=None):
|
|
170
|
+
import_pkg_js=get_all_imports(text=text,file_path=file_path,import_pkg_js=import_pkg_js)
|
|
171
|
+
import_pkg_js = clean_all_imports(text=text,file_path=file_path,import_pkg_js=import_pkg_js)
|
|
172
|
+
contents = '\n'.join(import_pkg_js["nulines"])
|
|
173
|
+
if file_path and write:
|
|
174
|
+
write_to_file(contents=contents,file_path=file_path)
|
|
175
|
+
return contents
|
|
@@ -6,9 +6,17 @@ def call_for_all_tabs(root = None,tab_control=True):
|
|
|
6
6
|
root = root or get_caller_dir()
|
|
7
7
|
get_for_all_tabs(root,tab_control=tab_control)
|
|
8
8
|
|
|
9
|
-
def
|
|
9
|
+
def get_clean_list(*args):
|
|
10
|
+
objs = []
|
|
11
|
+
for arg in args:
|
|
12
|
+
objs+= make_list(arg)
|
|
13
|
+
return list(set(objs))
|
|
14
|
+
def clean_imports(*args,**kwargs):
|
|
15
|
+
for pkg,imps in kwargs.items():
|
|
16
|
+
f"from {pkg} import make_list(imps)"
|
|
17
|
+
|
|
10
18
|
alls = str(list(set("""os,re,subprocess,sys,re,traceback,pydot, enum, inspect, sys, traceback, threading,json,traceback,logging,requests""".replace('\n','').replace(' ','').replace('\t','').split(','))))[1:-1].replace('"','').replace("'",'')
|
|
11
|
-
|
|
19
|
+
return
|
|
12
20
|
def isTab(item):
|
|
13
21
|
item_lower = item.lower()
|
|
14
22
|
for key in ['console','tab']:
|
|
@@ -4,8 +4,7 @@ from typing import *
|
|
|
4
4
|
import subprocess, shlex, os, fnmatch, glob, posixpath, re
|
|
5
5
|
# exec_api.py
|
|
6
6
|
# ---- import your existing pieces ----
|
|
7
|
-
from ..type_utils import make_list
|
|
7
|
+
from ..type_utils import make_list
|
|
8
8
|
from ..time_utils import get_sleep
|
|
9
|
-
from ..ssh_utils import *
|
|
10
9
|
from ..env_utils import *
|
|
11
10
|
from ..string_clean import eatOuter
|
|
@@ -22,6 +22,8 @@ Date: 05/31/2023
|
|
|
22
22
|
Version: 0.1.2
|
|
23
23
|
"""
|
|
24
24
|
import os
|
|
25
|
+
from .list_utils import make_list
|
|
26
|
+
from .type_utils import get_alpha_ints
|
|
25
27
|
def quoteIt(st: str, ls: list) -> str:
|
|
26
28
|
"""
|
|
27
29
|
Quotes specific elements in a string.
|
|
@@ -110,6 +112,42 @@ def eatAll(string: str, list_objects:(str or list)) -> any:
|
|
|
110
112
|
if string and list_objects:
|
|
111
113
|
string = eatOuter(string, list_objects)
|
|
112
114
|
return string
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def eatElse(
|
|
119
|
+
stringObj,
|
|
120
|
+
chars=None,
|
|
121
|
+
ints=True,
|
|
122
|
+
alpha=True,
|
|
123
|
+
lower=True,
|
|
124
|
+
capitalize=True,
|
|
125
|
+
string=True,
|
|
126
|
+
listObj=True
|
|
127
|
+
):
|
|
128
|
+
alpha_ints = get_alpha_ints(
|
|
129
|
+
ints=True,
|
|
130
|
+
alpha=True,
|
|
131
|
+
lower=True,
|
|
132
|
+
capitalize=True,
|
|
133
|
+
string=True,
|
|
134
|
+
listObj=True
|
|
135
|
+
)
|
|
136
|
+
chars = make_list(chars or [])+alpha_ints
|
|
137
|
+
|
|
138
|
+
while True:
|
|
139
|
+
if stringObj:
|
|
140
|
+
str_0 = stringObj[0] not in chars
|
|
141
|
+
str_1 = stringObj[-1] not in chars
|
|
142
|
+
str_eat = str_0 or str_1
|
|
143
|
+
if not str_eat:
|
|
144
|
+
return stringObj
|
|
145
|
+
if stringObj and str_0:
|
|
146
|
+
stringObj = stringObj[1:] if len(stringObj) !=1 else ""
|
|
147
|
+
if stringObj and str_1:
|
|
148
|
+
stringObj = stringObj[:-1] if len(stringObj) !=1 else ""
|
|
149
|
+
else:
|
|
150
|
+
return stringObj
|
|
113
151
|
def safe_split(obj, ls):
|
|
114
152
|
"""
|
|
115
153
|
Safely splits a string using multiple delimiters.
|
|
@@ -185,6 +223,7 @@ def url_join(*paths):
|
|
|
185
223
|
final_path = f"{final_path}/{path}"
|
|
186
224
|
return final_path
|
|
187
225
|
|
|
188
|
-
|
|
226
|
+
def clean_line(line):
|
|
227
|
+
return eatAll(line,[' ','','\t','\n'])
|
|
189
228
|
def capitalize(string):
|
|
190
229
|
return string[:1].upper() + string[1:].lower() if string else string
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from .list_utils import make_list
|
|
2
|
+
def get_from_kwargs(*args,**kwargs):
|
|
3
|
+
del_kwargs = kwargs.get('del_kwargs',False)
|
|
4
|
+
values = {}
|
|
5
|
+
for key in args:
|
|
6
|
+
if key:
|
|
7
|
+
key = str(key)
|
|
8
|
+
if key in kwargs:
|
|
9
|
+
values[key] = kwargs.get(key)
|
|
10
|
+
if del_kwarg:
|
|
11
|
+
del kwargs[key]
|
|
12
|
+
return values,kwargs
|
|
13
|
+
|
|
14
|
+
def replace_it(string,item,rep):
|
|
15
|
+
if item in string:
|
|
16
|
+
string = string.replace(item,rep)
|
|
17
|
+
return string
|
|
18
|
+
def while_replace(string,item,rep):
|
|
19
|
+
while True:
|
|
20
|
+
string = replace_it(string,item,rep)
|
|
21
|
+
if item not in string or item in rep:
|
|
22
|
+
return string
|
|
23
|
+
def for_replace(string,item,replace):
|
|
24
|
+
replace = make_list(replace)
|
|
25
|
+
for rep in replace:
|
|
26
|
+
string = while_replace(string,item,rep)
|
|
27
|
+
return string
|
|
28
|
+
def replace_all(string,*args,**kwargs):
|
|
29
|
+
for items in args:
|
|
30
|
+
if items and isinstance(items,list):
|
|
31
|
+
item = items[0]
|
|
32
|
+
replace = items[1:] if len(items)>1 else items[-1]
|
|
33
|
+
string = for_replace(string,item,replace)
|
|
34
|
+
values,kwargs = get_from_kwargs('item','replace',**kwargs)
|
|
35
|
+
if values:
|
|
36
|
+
string = for_replace(string,**values)
|
|
37
|
+
for item,replace in kwargs.items():
|
|
38
|
+
string = for_replace(string,item,rep)
|
|
39
|
+
return string
|
|
40
|
+
def get_lines(string,strip=True):
|
|
41
|
+
lines = string.split('\n')
|
|
42
|
+
if strip:
|
|
43
|
+
lines = [line for line in lines if line]
|
|
44
|
+
return lines
|
|
45
|
+
def get_alpha():
|
|
46
|
+
return 'abcdefghijklmnopqrstuvwxyz'
|
|
47
|
+
def is_alpha(char,case_sensative=False):
|
|
48
|
+
alphas = get_alpha()
|
|
49
|
+
if not case_sensative:
|
|
50
|
+
alphas+=alphas.upper()
|
|
51
|
+
return char in alphas
|
abstract_utilities/type_utils.py
CHANGED
|
@@ -60,6 +60,7 @@ import os
|
|
|
60
60
|
from pathlib import Path
|
|
61
61
|
from typing import Union
|
|
62
62
|
from .list_utils import make_list
|
|
63
|
+
|
|
63
64
|
# A big, but by no means exhaustive, map of extensions to mime‐types by category:
|
|
64
65
|
MIME_TYPES = {
|
|
65
66
|
'image': {
|
|
@@ -936,7 +937,30 @@ def is_any_instance(value):
|
|
|
936
937
|
for each in [dict, list, int, float]:
|
|
937
938
|
if is_instance(value, each):
|
|
938
939
|
return True
|
|
939
|
-
|
|
940
|
+
def getAlphas(lower=True,capitalize=False,listObj=False):
|
|
941
|
+
obj = ''
|
|
942
|
+
alphas = 'abcdefghijklmoprstuvwxyz'
|
|
943
|
+
if lower:
|
|
944
|
+
obj+=alphas
|
|
945
|
+
if capitalize:
|
|
946
|
+
obj+=alphas.upper()
|
|
947
|
+
if listObj:
|
|
948
|
+
obj = list(obj)
|
|
949
|
+
return obj
|
|
950
|
+
def getInts(string=False,listObj=False):
|
|
951
|
+
obj=12345678909
|
|
952
|
+
if string:
|
|
953
|
+
obj = str(obj)
|
|
954
|
+
if listObj:
|
|
955
|
+
obj = list(obj)
|
|
956
|
+
return obj
|
|
957
|
+
def get_alpha_ints(ints=True,alpha=True,lower=True,capitalize=True,string=True,listObj=True):
|
|
958
|
+
objs = [] if listObj else ""
|
|
959
|
+
if ints:
|
|
960
|
+
objs+=getInts(string=string,listObj=listObj)
|
|
961
|
+
if alpha:
|
|
962
|
+
objs+=getAlphas(lower=lower,capitalize=capitalize,listObj=listObj)
|
|
963
|
+
return objs
|
|
940
964
|
# Function: is_number
|
|
941
965
|
# Function: is_str
|
|
942
966
|
# Function: is_int
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: abstract_utilities
|
|
3
|
-
Version: 0.2.2.
|
|
3
|
+
Version: 0.2.2.480
|
|
4
4
|
Summary: abstract_utilities is a collection of utility modules providing a variety of functions to aid in tasks such as data comparison, list manipulation, JSON handling, string manipulation, mathematical computations, and time operations.
|
|
5
5
|
Home-page: https://github.com/AbstractEndeavors/abstract_utilities
|
|
6
6
|
Author: putkoff
|