abstract-utilities 0.2.2.540__py3-none-any.whl → 0.2.2.667__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of abstract-utilities might be problematic. Click here for more details.
- abstract_utilities/__init__.py +13 -4
- abstract_utilities/class_utils/abstract_classes.py +104 -34
- abstract_utilities/class_utils/caller_utils.py +57 -0
- abstract_utilities/class_utils/global_utils.py +35 -20
- abstract_utilities/class_utils/imports/imports.py +1 -1
- abstract_utilities/directory_utils/src/directory_utils.py +19 -1
- abstract_utilities/file_utils/imports/classes.py +59 -55
- abstract_utilities/file_utils/imports/imports.py +0 -4
- abstract_utilities/file_utils/imports/module_imports.py +1 -1
- abstract_utilities/file_utils/src/__init__.py +2 -3
- abstract_utilities/file_utils/src/file_filters/__init__.py +1 -0
- abstract_utilities/file_utils/src/file_filters/ensure_utils.py +490 -0
- abstract_utilities/file_utils/src/file_filters/filter_params.py +150 -0
- abstract_utilities/file_utils/src/file_filters/filter_utils.py +78 -0
- abstract_utilities/file_utils/src/file_filters/predicate_utils.py +44 -0
- abstract_utilities/file_utils/src/file_reader.py +0 -1
- abstract_utilities/file_utils/src/find_collect.py +10 -86
- abstract_utilities/file_utils/src/find_content.py +210 -0
- abstract_utilities/file_utils/src/initFunctionsGen.py +36 -23
- abstract_utilities/file_utils/src/initFunctionsGens.py +280 -0
- abstract_utilities/file_utils/src/reader_utils/__init__.py +4 -0
- abstract_utilities/file_utils/src/reader_utils/directory_reader.py +53 -0
- abstract_utilities/file_utils/src/reader_utils/file_reader.py +543 -0
- abstract_utilities/file_utils/src/reader_utils/file_readers.py +376 -0
- abstract_utilities/file_utils/src/reader_utils/imports.py +18 -0
- abstract_utilities/file_utils/src/reader_utils/pdf_utils.py +300 -0
- abstract_utilities/import_utils/circular_import_finder.py +222 -0
- abstract_utilities/import_utils/circular_import_finder2.py +118 -0
- abstract_utilities/import_utils/imports/__init__.py +1 -1
- abstract_utilities/import_utils/imports/init_imports.py +3 -0
- abstract_utilities/import_utils/imports/module_imports.py +4 -1
- abstract_utilities/import_utils/imports/utils.py +1 -1
- abstract_utilities/import_utils/src/__init__.py +1 -0
- abstract_utilities/import_utils/src/clean_imports.py +156 -25
- abstract_utilities/import_utils/src/dot_utils.py +11 -0
- abstract_utilities/import_utils/src/extract_utils.py +4 -0
- abstract_utilities/import_utils/src/import_functions.py +66 -2
- abstract_utilities/import_utils/src/import_utils.py +39 -0
- abstract_utilities/import_utils/src/layze_import_utils/__init__.py +2 -0
- abstract_utilities/import_utils/src/layze_import_utils/lazy_utils.py +41 -0
- abstract_utilities/import_utils/src/layze_import_utils/nullProxy.py +32 -0
- abstract_utilities/import_utils/src/nullProxy.py +30 -0
- abstract_utilities/import_utils/src/pkg_utils.py +58 -4
- abstract_utilities/import_utils/src/sysroot_utils.py +56 -1
- abstract_utilities/imports.py +3 -2
- abstract_utilities/json_utils/json_utils.py +11 -3
- abstract_utilities/log_utils/log_file.py +73 -24
- abstract_utilities/parse_utils/parse_utils.py +23 -0
- abstract_utilities/path_utils/imports/module_imports.py +1 -1
- abstract_utilities/path_utils/path_utils.py +32 -35
- abstract_utilities/read_write_utils/imports/imports.py +1 -1
- abstract_utilities/read_write_utils/read_write_utils.py +102 -32
- abstract_utilities/safe_utils/safe_utils.py +30 -0
- abstract_utilities/type_utils/__init__.py +5 -1
- abstract_utilities/type_utils/get_type.py +116 -0
- abstract_utilities/type_utils/imports/__init__.py +1 -0
- abstract_utilities/type_utils/imports/constants.py +134 -0
- abstract_utilities/type_utils/imports/module_imports.py +25 -1
- abstract_utilities/type_utils/is_type.py +455 -0
- abstract_utilities/type_utils/make_type.py +126 -0
- abstract_utilities/type_utils/mime_types.py +68 -0
- abstract_utilities/type_utils/type_utils.py +0 -877
- {abstract_utilities-0.2.2.540.dist-info → abstract_utilities-0.2.2.667.dist-info}/METADATA +1 -1
- {abstract_utilities-0.2.2.540.dist-info → abstract_utilities-0.2.2.667.dist-info}/RECORD +66 -41
- {abstract_utilities-0.2.2.540.dist-info → abstract_utilities-0.2.2.667.dist-info}/WHEEL +0 -0
- {abstract_utilities-0.2.2.540.dist-info → abstract_utilities-0.2.2.667.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from ...imports import *
|
|
2
|
+
import re
|
|
3
|
+
def combine_params(*values,typ=None):
|
|
4
|
+
nu_values = None
|
|
5
|
+
for value in values:
|
|
6
|
+
if value is not None:
|
|
7
|
+
typ = typ or type(value)
|
|
8
|
+
if nu_values is None:
|
|
9
|
+
nu_values = typ()
|
|
10
|
+
|
|
11
|
+
if typ is set:
|
|
12
|
+
nu_values = nu_values | typ(value)
|
|
13
|
+
if typ is list:
|
|
14
|
+
nu_values += typ(value)
|
|
15
|
+
return nu_values
|
|
16
|
+
def get_safe_kwargs(canonical_map, **kwargs):
|
|
17
|
+
# Lowercase all keys for safety
|
|
18
|
+
canonical_map = canonical_map or CANONICAL_MAP
|
|
19
|
+
norm_kwargs = {k.lower(): v for k, v in kwargs.items() if v is not None}
|
|
20
|
+
|
|
21
|
+
# Inverse lookup: alias → canonical key
|
|
22
|
+
alias_lookup = {
|
|
23
|
+
alias: canon
|
|
24
|
+
for canon, aliases in canonical_map.items()
|
|
25
|
+
if aliases
|
|
26
|
+
for alias in aliases
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
# Preserve correctly named keys
|
|
30
|
+
safe_kwargs = {k: v for k, v in norm_kwargs.items() if k in canonical_map}
|
|
31
|
+
|
|
32
|
+
for k, v in norm_kwargs.items():
|
|
33
|
+
if k in alias_lookup:
|
|
34
|
+
canonical_key = alias_lookup[k]
|
|
35
|
+
prev = safe_kwargs.get(canonical_key)
|
|
36
|
+
if prev is None:
|
|
37
|
+
safe_kwargs[canonical_key] = v
|
|
38
|
+
else:
|
|
39
|
+
# merge intelligently if both exist
|
|
40
|
+
if isinstance(prev, (set, list)) and isinstance(v, (set, list)):
|
|
41
|
+
safe_kwargs[canonical_key] = list(set(prev) | set(v))
|
|
42
|
+
else:
|
|
43
|
+
safe_kwargs[canonical_key] = v # overwrite for non-iterables
|
|
44
|
+
|
|
45
|
+
# fill defaults if missing
|
|
46
|
+
for canon in canonical_map:
|
|
47
|
+
safe_kwargs.setdefault(canon, None)
|
|
48
|
+
|
|
49
|
+
return safe_kwargs
|
|
50
|
+
|
|
51
|
+
def create_canonical_map(*args,canonical_map=None):
|
|
52
|
+
keys = [arg for arg in args if arg]
|
|
53
|
+
if not keys:
|
|
54
|
+
return CANONICAL_MAP
|
|
55
|
+
canonical_map = canonical_map or CANONICAL_MAP
|
|
56
|
+
|
|
57
|
+
return {key:canonical_map.get(key) for key in keys}
|
|
58
|
+
def get_safe_canonical_kwargs(*args,canonical_map=None,**kwargs):
|
|
59
|
+
canonical_map = canonical_map or create_canonical_map(*args)
|
|
60
|
+
|
|
61
|
+
return get_safe_kwargs(canonical_map=canonical_map,**kwargs)
|
|
62
|
+
def get_dir_filter_kwargs(**kwargs):
|
|
63
|
+
canonical_map = create_canonical_map("directories")
|
|
64
|
+
return get_safe_kwargs(canonical_map=canonical_map,**kwargs)
|
|
65
|
+
def get_file_filter_kwargs(**kwargs):
|
|
66
|
+
"""
|
|
67
|
+
Normalize arbitrary keyword arguments for file scanning configuration.
|
|
68
|
+
|
|
69
|
+
Examples:
|
|
70
|
+
- 'excluded_ext' or 'unallowed_exts' → 'exclude_exts'
|
|
71
|
+
- 'include_dirs' or 'allow_dir' → 'allowed_dirs'
|
|
72
|
+
- 'excludePattern' or 'excluded_patterns' → 'exclude_patterns'
|
|
73
|
+
- 'allowed_type' or 'include_types' → 'allowed_types'
|
|
74
|
+
"""
|
|
75
|
+
# Canonical keys and aliases
|
|
76
|
+
canonical_keys =["allowed_exts","exclude_exts","allowed_types","exclude_types","allowed_dirs","exclude_dirs","allowed_patterns","exclude_patterns"]
|
|
77
|
+
|
|
78
|
+
return get_safe_canonical_kwargs(*canonical_keys,**kwargs)
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from .ensure_utils import *
|
|
2
|
+
|
|
3
|
+
def get_globs(items,recursive: bool = True,allowed=None,cfg=None,**kwargs):
|
|
4
|
+
glob_paths = []
|
|
5
|
+
allowed = get_allowed_predicate(allowed=allowed,cfg=cfg,**kwargs)
|
|
6
|
+
items = [item for item in make_list(items) if item]
|
|
7
|
+
for item in items:
|
|
8
|
+
pattern = os.path.join(item, "**/*") # include all files recursively\n
|
|
9
|
+
nuItems = glob.glob(pattern, recursive=recursive)
|
|
10
|
+
if allowed:
|
|
11
|
+
nuItems = [nuItem for nuItem in nuItems if nuItem and allowed(nuItem)]
|
|
12
|
+
glob_paths += nuItems
|
|
13
|
+
return glob_paths
|
|
14
|
+
def get_allowed_files(items,allowed=True,cfg=None,**kwargs):
|
|
15
|
+
allowed = get_allowed_predicate(allowed=allowed,cfg=cfg,**kwargs)
|
|
16
|
+
return [item for item in items if item and os.path.isfile(item) and allowed(item)]
|
|
17
|
+
def get_allowed_dirs(items,allowed=False,cfg=None,**kwargs):
|
|
18
|
+
allowed = get_allowed_predicate(allowed=allowed,cfg=cfg,**kwargs)
|
|
19
|
+
return [item for item in items if item and os.path.isdir(item) and allowed(item)]
|
|
20
|
+
|
|
21
|
+
def get_filtered_files(items,allowed=None,files = [],cfg=None,**kwargs):
|
|
22
|
+
allowed = get_allowed_predicate(allowed=allowed,cfg=cfg,**kwargs)
|
|
23
|
+
glob_paths = get_globs(items,allowed=allowed,cfg=cfg,**kwargs)
|
|
24
|
+
return [glob_path for glob_path in glob_paths if glob_path and os.path.isfile(glob_path) and glob_path not in files and allowed(glob_path)]
|
|
25
|
+
def get_filtered_dirs(items,allowed=None,dirs = [],cfg=None,**kwargs):
|
|
26
|
+
allowed = get_allowed_predicate(allowed=allowed,cfg=cfg,**kwargs)
|
|
27
|
+
glob_paths = get_globs(items,allowed=allowed,cfg=cfg,**kwargs)
|
|
28
|
+
return [glob_path for glob_path in glob_paths if glob_path and os.path.isdir(glob_path) and glob_path not in dirs and allowed(glob_path)]
|
|
29
|
+
|
|
30
|
+
def get_all_allowed_files(items,allowed=None,cfg=None,**kwargs):
|
|
31
|
+
dirs = get_all_allowed_dirs(items,allowed=allowed,cfg=cfg,**kwargs)
|
|
32
|
+
files = get_allowed_files(items,allowed=allowed,cfg=cfg,**kwargs)
|
|
33
|
+
nu_files = []
|
|
34
|
+
for directory in dirs:
|
|
35
|
+
files += get_filtered_files(directory,allowed=allowed,files=files,cfg=cfg,**kwargs)
|
|
36
|
+
return files
|
|
37
|
+
def get_all_allowed_dirs(items,allowed=None,cfg=None,**kwargs):
|
|
38
|
+
allowed = get_allowed_predicate(allowed=allowed,cfg=cfg,**kwargs)
|
|
39
|
+
dirs = get_allowed_dirs(items,allowed=allowed,cfg=cfg,**kwargs)
|
|
40
|
+
nu_dirs=[]
|
|
41
|
+
for directory in dirs:
|
|
42
|
+
nu_dirs += get_filtered_dirs(directory,allowed=allowed,dirs=nu_dirs,cfg=cfg,**kwargs)
|
|
43
|
+
return nu_dirs
|
|
44
|
+
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
# file_reader.py
|
|
2
2
|
from ..imports import *
|
|
3
3
|
# -------- Public API drop-ins that mirror your originals --------
|
|
4
|
-
from .filter_params import *
|
|
5
4
|
from .file_filters import *
|
|
6
5
|
from .pdf_utils import *
|
|
7
6
|
# ---------------------------------------------------------------------------
|
|
@@ -1,76 +1,8 @@
|
|
|
1
1
|
from ..imports import *
|
|
2
2
|
from .file_filters import *
|
|
3
|
-
from .filter_params import *
|
|
4
3
|
from pathlib import Path
|
|
5
4
|
from typing import Optional, List, Set
|
|
6
5
|
|
|
7
|
-
def get_proper_type_str(string):
|
|
8
|
-
if not string:
|
|
9
|
-
return None
|
|
10
|
-
string_lower = string.lower()
|
|
11
|
-
items = {
|
|
12
|
-
"d":["dir","dirs","directory","directories","d","dirname"],
|
|
13
|
-
"f":["file","filepath","file_path","files","filepaths","file_paths","f"]
|
|
14
|
-
}
|
|
15
|
-
for key,values in items.items():
|
|
16
|
-
if string_lower in values:
|
|
17
|
-
return key
|
|
18
|
-
init = string_lower[0] if len(string_lower)>0 else None
|
|
19
|
-
if init in items:
|
|
20
|
-
return init
|
|
21
|
-
def check_path_type(
|
|
22
|
-
path: str,
|
|
23
|
-
user: Optional[str] = None,
|
|
24
|
-
host: Optional[str] = None,
|
|
25
|
-
user_as_host: Optional[str] = None,
|
|
26
|
-
use_shell: bool = False
|
|
27
|
-
) -> Literal["file", "directory", "missing", "unknown"]:
|
|
28
|
-
"""
|
|
29
|
-
Determine whether a given path is a file, directory, or missing.
|
|
30
|
-
Works locally or remotely (via SSH).
|
|
31
|
-
|
|
32
|
-
Args:
|
|
33
|
-
path: The path to check.
|
|
34
|
-
user, host, user_as_host: SSH parameters if remote.
|
|
35
|
-
use_shell: Force shell test instead of Python os.path.
|
|
36
|
-
Returns:
|
|
37
|
-
One of: 'file', 'directory', 'missing', or 'unknown'
|
|
38
|
-
"""
|
|
39
|
-
|
|
40
|
-
# --- remote check if user/host is given ---
|
|
41
|
-
if user_as_host or (user and host):
|
|
42
|
-
remote_target = user_as_host or f"{user}@{host}"
|
|
43
|
-
cmd = f"if [ -f '{path}' ]; then echo file; elif [ -d '{path}' ]; then echo directory; else echo missing; fi"
|
|
44
|
-
try:
|
|
45
|
-
result = subprocess.check_output(
|
|
46
|
-
["ssh", remote_target, cmd],
|
|
47
|
-
stderr=subprocess.DEVNULL,
|
|
48
|
-
text=True,
|
|
49
|
-
timeout=5
|
|
50
|
-
).strip()
|
|
51
|
-
return result if result in ("file", "directory", "missing") else "unknown"
|
|
52
|
-
except Exception:
|
|
53
|
-
return "unknown"
|
|
54
|
-
|
|
55
|
-
# --- local check ---
|
|
56
|
-
if not use_shell:
|
|
57
|
-
if os.path.isfile(path):
|
|
58
|
-
return "file"
|
|
59
|
-
elif os.path.isdir(path):
|
|
60
|
-
return "directory"
|
|
61
|
-
elif not os.path.exists(path):
|
|
62
|
-
return "missing"
|
|
63
|
-
return "unknown"
|
|
64
|
-
else:
|
|
65
|
-
# fallback using shell tests (useful for sandboxed contexts)
|
|
66
|
-
cmd = f"if [ -f '{path}' ]; then echo file; elif [ -d '{path}' ]; then echo directory; else echo missing; fi"
|
|
67
|
-
try:
|
|
68
|
-
output = subprocess.check_output(
|
|
69
|
-
cmd, shell=True, stderr=subprocess.DEVNULL, text=True
|
|
70
|
-
).strip()
|
|
71
|
-
return output if output in ("file", "directory", "missing") else "unknown"
|
|
72
|
-
except Exception:
|
|
73
|
-
return "unknown"
|
|
74
6
|
|
|
75
7
|
|
|
76
8
|
|
|
@@ -95,15 +27,15 @@ def get_find_cmd(
|
|
|
95
27
|
"""
|
|
96
28
|
# Normalize inputs into canonical form
|
|
97
29
|
kwargs = get_safe_canonical_kwargs(*args, **kwargs)
|
|
98
|
-
cfg = define_defaults(**kwargs)
|
|
30
|
+
cfg = kwargs.get('cfg') or define_defaults(**kwargs)
|
|
99
31
|
|
|
100
32
|
# Get directory list (may come from args or kwargs)
|
|
101
|
-
directories =
|
|
102
|
-
if not directories:
|
|
33
|
+
kwargs["directories"] = ensure_directories(*args, **kwargs)
|
|
34
|
+
if not kwargs["directories"]:
|
|
103
35
|
return []
|
|
104
36
|
|
|
105
37
|
# Build base command for all directories
|
|
106
|
-
dir_expr = " ".join(shlex.quote(d) for d in directories)
|
|
38
|
+
dir_expr = " ".join(shlex.quote(d) for d in kwargs["directories"])
|
|
107
39
|
cmd = [f"find {dir_expr}"]
|
|
108
40
|
|
|
109
41
|
# --- depth filters ---
|
|
@@ -183,18 +115,7 @@ def get_find_cmd(
|
|
|
183
115
|
|
|
184
116
|
return " ".join(cmd)
|
|
185
117
|
|
|
186
|
-
|
|
187
|
-
directories = []
|
|
188
|
-
for arg in args:
|
|
189
|
-
arg_str = str(arg)
|
|
190
|
-
if is_dir(arg_str,**kwargs):
|
|
191
|
-
directories.append(arg_str)
|
|
192
|
-
elif is_file(arg_str,**kwargs):
|
|
193
|
-
dirname = os.path.dirname(arg_str)
|
|
194
|
-
directories.append(dirname)
|
|
195
|
-
safe_directories = get_dir_filter_kwargs(**kwargs)
|
|
196
|
-
directories+= make_list(safe_directories.get('directories',[]))
|
|
197
|
-
return list(set([r for r in directories if r]))
|
|
118
|
+
|
|
198
119
|
|
|
199
120
|
def collect_globs(
|
|
200
121
|
*args,
|
|
@@ -212,9 +133,10 @@ def collect_globs(
|
|
|
212
133
|
- If file_type is "f" or "d" → returns a list of that type
|
|
213
134
|
- Supports SSH mode via `user_at_host`
|
|
214
135
|
"""
|
|
136
|
+
user_pass_host_key = get_user_pass_host_key(**kwargs)
|
|
215
137
|
kwargs["directories"] = ensure_directories(*args, **kwargs)
|
|
216
138
|
kwargs= get_safe_canonical_kwargs(**kwargs)
|
|
217
|
-
kwargs["cfg"] = define_defaults(**kwargs)
|
|
139
|
+
kwargs["cfg"] = kwargs.get('cfg') or define_defaults(**kwargs)
|
|
218
140
|
|
|
219
141
|
type_strs = {"f":"files","d":"dirs"}
|
|
220
142
|
file_type = get_proper_type_str(file_type)
|
|
@@ -227,11 +149,13 @@ def collect_globs(
|
|
|
227
149
|
type_str = type_strs.get(file_type)
|
|
228
150
|
# Remote path (SSH)
|
|
229
151
|
find_cmd = get_find_cmd(
|
|
152
|
+
directories=kwargs.get("directories"),
|
|
153
|
+
cfg=kwargs.get('cfg'),
|
|
230
154
|
mindepth=mindepth,
|
|
231
155
|
maxdepth=maxdepth,
|
|
232
156
|
depth=depth,
|
|
233
157
|
file_type=file_type,
|
|
234
|
-
**
|
|
158
|
+
**user_pass_host_key,
|
|
235
159
|
)
|
|
236
160
|
result = run_pruned_func(run_cmd,find_cmd,
|
|
237
161
|
**kwargs
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
from .file_filters import *
|
|
2
|
+
from .reader_utils import *
|
|
3
|
+
from .find_collect import *
|
|
4
|
+
STOP_SEARCH = False
|
|
5
|
+
|
|
6
|
+
def request_find_console_stop():
|
|
7
|
+
global STOP_SEARCH
|
|
8
|
+
STOP_SEARCH = True
|
|
9
|
+
|
|
10
|
+
def reset_find_console_stop():
|
|
11
|
+
global STOP_SEARCH
|
|
12
|
+
STOP_SEARCH = False
|
|
13
|
+
|
|
14
|
+
def get_contents(
|
|
15
|
+
full_path=None,
|
|
16
|
+
parse_lines=False,
|
|
17
|
+
content=None
|
|
18
|
+
):
|
|
19
|
+
if full_path:
|
|
20
|
+
content = content or read_any_file(full_path)
|
|
21
|
+
if content:
|
|
22
|
+
if parse_lines:
|
|
23
|
+
content = str(content).split('\n')
|
|
24
|
+
return make_list(content)
|
|
25
|
+
return []
|
|
26
|
+
|
|
27
|
+
def _normalize(s: str, strip_comments=True, collapse_ws=True, lower=True):
|
|
28
|
+
if s is None:
|
|
29
|
+
return ""
|
|
30
|
+
if strip_comments:
|
|
31
|
+
s = s.split('//', 1)[0]
|
|
32
|
+
if collapse_ws:
|
|
33
|
+
s = re.sub(r'\s+', ' ', s)
|
|
34
|
+
if lower:
|
|
35
|
+
s = s.lower()
|
|
36
|
+
return s.strip()
|
|
37
|
+
|
|
38
|
+
def stringInContent(content, strings, total_strings=False, normalize=False):
|
|
39
|
+
if not content:
|
|
40
|
+
return False
|
|
41
|
+
if normalize:
|
|
42
|
+
c = _normalize(str(content))
|
|
43
|
+
|
|
44
|
+
found = [s for s in strings if _normalize(s) and _normalize(s) in c]
|
|
45
|
+
else:
|
|
46
|
+
c = str(content)
|
|
47
|
+
found = [s for s in strings if s and s in c]
|
|
48
|
+
if not found:
|
|
49
|
+
return False
|
|
50
|
+
return len(found) == len(strings) if total_strings else True
|
|
51
|
+
def find_file(content, spec_line, strings, total_strings=False):
|
|
52
|
+
lines = content.split('\n')
|
|
53
|
+
if 1 <= spec_line <= len(lines):
|
|
54
|
+
return stringInContent(lines[spec_line - 1], strings, total_strings=total_strings)
|
|
55
|
+
return False
|
|
56
|
+
def find_lines(content, strings, total_strings=False, normalize=True, any_per_line=True):
|
|
57
|
+
lines = content.split('\n')
|
|
58
|
+
hits = []
|
|
59
|
+
for i, line in enumerate(lines):
|
|
60
|
+
# match one line either if ANY string matches or if ALL match (configurable)
|
|
61
|
+
if any_per_line:
|
|
62
|
+
match = stringInContent(line, strings, total_strings=False, normalize=normalize)
|
|
63
|
+
else:
|
|
64
|
+
match = stringInContent(line, strings, total_strings=True, normalize=normalize)
|
|
65
|
+
if match:
|
|
66
|
+
hits.append({"line": i+1, "content": line})
|
|
67
|
+
return hits
|
|
68
|
+
def getPaths(files, strings):
|
|
69
|
+
tot_strings = strings
|
|
70
|
+
nu_files, found_paths = [], []
|
|
71
|
+
if isinstance(strings,list):
|
|
72
|
+
if len(strings) >1:
|
|
73
|
+
tot_strings = '\n'.join(strings)
|
|
74
|
+
else:
|
|
75
|
+
if len(strings) == 0:
|
|
76
|
+
return nu_files, found_paths
|
|
77
|
+
tot_strings = strings[0]
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
for file_path in files:
|
|
81
|
+
try:
|
|
82
|
+
og_content = read_any_file(file_path)
|
|
83
|
+
if tot_strings not in og_content:
|
|
84
|
+
continue
|
|
85
|
+
if file_path not in nu_files:
|
|
86
|
+
nu_files.append(file_path)
|
|
87
|
+
ogLines = og_content.split('\n')
|
|
88
|
+
# find all occurrences of the block
|
|
89
|
+
for m in re.finditer(re.escape(tot_strings), og_content):
|
|
90
|
+
start_line = og_content[:m.start()].count('\n') + 1 # 1-based
|
|
91
|
+
curr = {'file_path': file_path, 'lines': []}
|
|
92
|
+
for j in range(len(strings)):
|
|
93
|
+
ln = start_line + j
|
|
94
|
+
curr['lines'].append({'line': ln, 'content': ogLines[ln - 1]})
|
|
95
|
+
found_paths.append(curr)
|
|
96
|
+
except Exception as e:
|
|
97
|
+
print(f"{e}")
|
|
98
|
+
return nu_files, found_paths
|
|
99
|
+
|
|
100
|
+
def findContent(
|
|
101
|
+
*args,
|
|
102
|
+
strings: list=[],
|
|
103
|
+
total_strings=True,
|
|
104
|
+
parse_lines=False,
|
|
105
|
+
spec_line=False,
|
|
106
|
+
get_lines=True,
|
|
107
|
+
diffs=False,
|
|
108
|
+
**kwargs
|
|
109
|
+
):
|
|
110
|
+
global STOP_SEARCH
|
|
111
|
+
kwargs["directories"] = ensure_directories(*args,**kwargs)
|
|
112
|
+
|
|
113
|
+
found_paths = []
|
|
114
|
+
|
|
115
|
+
dirs, files = get_files_and_dirs(
|
|
116
|
+
**kwargs
|
|
117
|
+
)
|
|
118
|
+
nu_files, found_paths = getPaths(files, strings)
|
|
119
|
+
|
|
120
|
+
if diffs and found_paths:
|
|
121
|
+
return found_paths
|
|
122
|
+
|
|
123
|
+
for file_path in nu_files:
|
|
124
|
+
if STOP_SEARCH:
|
|
125
|
+
return found_paths # early exit
|
|
126
|
+
|
|
127
|
+
if file_path:
|
|
128
|
+
og_content = read_any_file(file_path)
|
|
129
|
+
contents = get_contents(
|
|
130
|
+
file_path,
|
|
131
|
+
parse_lines=parse_lines,
|
|
132
|
+
content=og_content
|
|
133
|
+
)
|
|
134
|
+
found = False
|
|
135
|
+
for content in contents:
|
|
136
|
+
if STOP_SEARCH:
|
|
137
|
+
return found_paths # bail out cleanly
|
|
138
|
+
|
|
139
|
+
if stringInContent(content, strings, total_strings=True, normalize=True):
|
|
140
|
+
found = True
|
|
141
|
+
if spec_line:
|
|
142
|
+
found = find_file(og_content, spec_line, strings, total_strings=True)
|
|
143
|
+
if found:
|
|
144
|
+
if get_lines:
|
|
145
|
+
lines = find_lines(
|
|
146
|
+
og_content,
|
|
147
|
+
strings=strings,
|
|
148
|
+
total_strings=False,
|
|
149
|
+
normalize=True,
|
|
150
|
+
any_per_line=True
|
|
151
|
+
)
|
|
152
|
+
if lines:
|
|
153
|
+
file_path = {"file_path": file_path, "lines": lines}
|
|
154
|
+
found_paths.append(file_path)
|
|
155
|
+
break
|
|
156
|
+
return found_paths
|
|
157
|
+
def return_function(start_dir=None,preferred_dir=None,basenames=None,functionName=None):
|
|
158
|
+
if basenames:
|
|
159
|
+
basenames = make_list(basenames)
|
|
160
|
+
abstract_file_finder = AbstractFileFinderImporter(start_dir=start_dir,preferred_dir=preferred_dir)
|
|
161
|
+
paths = abstract_file_finder.find_paths(basenames)
|
|
162
|
+
func = abstract_file_finder.import_function_from_path(paths[0], functionName)
|
|
163
|
+
return func
|
|
164
|
+
def getLineNums(file_path):
|
|
165
|
+
lines=[]
|
|
166
|
+
if file_path and isinstance(file_path,dict):
|
|
167
|
+
lines = file_path.get('lines')
|
|
168
|
+
file_path = file_path.get('file_path')
|
|
169
|
+
return file_path,lines
|
|
170
|
+
def get_line_content(obj):
|
|
171
|
+
line,content=None,None
|
|
172
|
+
if obj and isinstance(obj,dict):
|
|
173
|
+
line=obj.get('line')
|
|
174
|
+
content = obj.get('content')
|
|
175
|
+
#print(f"line: {line}\ncontent: {content}")
|
|
176
|
+
return line,content
|
|
177
|
+
def get_edit(file_path):
|
|
178
|
+
if file_path and os.path.isfile(file_path):
|
|
179
|
+
os.system(f"code {file_path}")
|
|
180
|
+
input()
|
|
181
|
+
def editLines(file_paths):
|
|
182
|
+
for file_path in file_paths:
|
|
183
|
+
file_path,lines = getLineNums(file_path)
|
|
184
|
+
for obj in lines:
|
|
185
|
+
line,content = get_line_content(obj)
|
|
186
|
+
get_edit(file_path)
|
|
187
|
+
def findContentAndEdit(*args,
|
|
188
|
+
strings: list=[],
|
|
189
|
+
total_strings=True,
|
|
190
|
+
parse_lines=False,
|
|
191
|
+
spec_line=False,
|
|
192
|
+
get_lines=True,
|
|
193
|
+
edit_lines=False,
|
|
194
|
+
diffs=False,
|
|
195
|
+
**kwargs
|
|
196
|
+
):
|
|
197
|
+
file_paths = findContent(
|
|
198
|
+
*args,
|
|
199
|
+
strings=strings,
|
|
200
|
+
total_strings=total_strings,
|
|
201
|
+
parse_lines=parse_lines,
|
|
202
|
+
spec_line=spec_line,
|
|
203
|
+
get_lines=get_lines,
|
|
204
|
+
diffs=diffs,
|
|
205
|
+
**kwargs
|
|
206
|
+
)
|
|
207
|
+
if edit_lines:
|
|
208
|
+
editLines(file_paths)
|
|
209
|
+
return file_paths
|
|
210
|
+
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
# attach_functions.py — single helper you can import anywhere
|
|
2
2
|
# attach_dynamic.py
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from .find_collect import
|
|
4
|
+
from .find_collect import get_files_and_dirs
|
|
5
5
|
from ..imports import *
|
|
6
|
+
ABSPATH = os.path.abspath(__file__)
|
|
7
|
+
ABSROOT = os.path.dirname(ABSPATH)
|
|
6
8
|
|
|
7
9
|
def caller_path():
|
|
8
10
|
frame = inspect.stack()[1]
|
|
@@ -209,28 +211,37 @@ def get_for_all_tabs(root = None):
|
|
|
209
211
|
root = root or caller_path()
|
|
210
212
|
if os.path.isfile(root):
|
|
211
213
|
root = os.path.dirname(root)
|
|
212
|
-
all_tabs =
|
|
214
|
+
all_tabs = get_files_and_dirs(root,allowed_patterns='*Tab')[0]
|
|
215
|
+
|
|
213
216
|
for ROOT in all_tabs:
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
else:
|
|
219
|
-
apply_inits(ROOT)
|
|
217
|
+
if os.path.isdir(ROOT):
|
|
218
|
+
[apply_inits(os.path.join(ROOT,func)) for func in os.listdir(ROOT) if 'functions' == os.path.splitext(func)[0]]
|
|
219
|
+
|
|
220
|
+
|
|
220
221
|
|
|
221
222
|
|
|
222
223
|
def apply_inits(ROOT):
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
224
|
+
filepaths=[ROOT]
|
|
225
|
+
TAB_DIR = os.path.dirname(ROOT)
|
|
226
|
+
INIT_FUNCS_PAPTH = os.path.join(TAB_DIR, "initFuncs.py")
|
|
227
|
+
if_fun_dir=False
|
|
228
|
+
## if os.path.isfile(ROOT):
|
|
229
|
+
## dirname= os.path.dirname(ROOT)
|
|
230
|
+
## basename= os.path.basename(ROOT)
|
|
231
|
+
## functions_dir = os.path.join(dirname,'functions')
|
|
232
|
+
## functions_path = os.path.join(functions_dir,basename)
|
|
233
|
+
## input(get_clean_imports(ROOT))
|
|
234
|
+
##
|
|
235
|
+
## os.makedirs(functions_dir,exist_ok=True)
|
|
236
|
+
## shutil.move(ROOT,functions_path)
|
|
237
|
+
## ROOT=functions_dir
|
|
238
|
+
if os.path.isdir(ROOT):
|
|
239
|
+
INIT_PATH = os.path.join(ROOT, "__init__.py")
|
|
231
240
|
|
|
232
|
-
|
|
233
|
-
|
|
241
|
+
filepaths = get_files_and_dirs(directory=ROOT,allowed_exts='py',add=True)[-1]
|
|
242
|
+
if_fun_dir=True
|
|
243
|
+
|
|
244
|
+
|
|
234
245
|
|
|
235
246
|
# Parse top-level def names
|
|
236
247
|
def extract_funcs(path: str):
|
|
@@ -240,7 +251,7 @@ def apply_inits(ROOT):
|
|
|
240
251
|
if m:
|
|
241
252
|
funcs.append(m.group(1))
|
|
242
253
|
return funcs
|
|
243
|
-
|
|
254
|
+
|
|
244
255
|
# Build functions/__init__.py that re-exports all discovered functions
|
|
245
256
|
import_lines = []
|
|
246
257
|
all_funcs = []
|
|
@@ -252,7 +263,7 @@ def apply_inits(ROOT):
|
|
|
252
263
|
all_funcs.extend(funcs)
|
|
253
264
|
if if_fun_dir:
|
|
254
265
|
functions_init = "\n".join(import_lines) + ("\n" if import_lines else "")
|
|
255
|
-
write_to_file(contents=functions_init, file_path=
|
|
266
|
+
write_to_file(contents=functions_init, file_path=INIT_PATH)
|
|
256
267
|
|
|
257
268
|
# Prepare the tuple literal of function names for import + loop
|
|
258
269
|
uniq_funcs = sorted(set(all_funcs))
|
|
@@ -261,9 +272,10 @@ def apply_inits(ROOT):
|
|
|
261
272
|
# Generate apiConsole/initFuncs.py using the safer setattr-loop
|
|
262
273
|
init_funcs_src = textwrap.dedent(f"""\
|
|
263
274
|
|
|
264
|
-
|
|
275
|
+
|
|
276
|
+
from abstract_utilities import get_logFile
|
|
265
277
|
from .functions import ({func_tuple})
|
|
266
|
-
|
|
278
|
+
logger=get_logFile(__name__)
|
|
267
279
|
def initFuncs(self):
|
|
268
280
|
try:
|
|
269
281
|
for f in ({func_tuple}):
|
|
@@ -273,8 +285,9 @@ def apply_inits(ROOT):
|
|
|
273
285
|
return self
|
|
274
286
|
""")
|
|
275
287
|
|
|
276
|
-
write_to_file(contents=init_funcs_src, file_path=
|
|
288
|
+
write_to_file(contents=init_funcs_src, file_path=INIT_FUNCS_PAPTH)
|
|
277
289
|
|
|
278
290
|
def call_for_all_tabs():
|
|
279
291
|
root = get_caller_dir(2)
|
|
292
|
+
|
|
280
293
|
get_for_all_tabs(root)
|