abstract-utilities 0.2.2.420__py3-none-any.whl → 0.2.2.422__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of abstract-utilities might be problematic. Click here for more details.
- abstract_utilities/__init__.py +1 -2
- abstract_utilities/dynimport.py +1 -1
- abstract_utilities/file_utils/file_utils/file_utils.py +153 -139
- abstract_utilities/file_utils/imports/classes.py +300 -22
- abstract_utilities/file_utils/imports/module_imports.py +3 -3
- abstract_utilities/ssh_utils/utils.py +72 -6
- {abstract_utilities-0.2.2.420.dist-info → abstract_utilities-0.2.2.422.dist-info}/METADATA +1 -1
- {abstract_utilities-0.2.2.420.dist-info → abstract_utilities-0.2.2.422.dist-info}/RECORD +10 -10
- {abstract_utilities-0.2.2.420.dist-info → abstract_utilities-0.2.2.422.dist-info}/WHEEL +0 -0
- {abstract_utilities-0.2.2.420.dist-info → abstract_utilities-0.2.2.422.dist-info}/top_level.txt +0 -0
abstract_utilities/__init__.py
CHANGED
|
@@ -3,9 +3,8 @@ from datetime import timedelta
|
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from typing import *
|
|
5
5
|
from .hash_utils import *
|
|
6
|
-
from .dynimport import get_abstract_import,import_symbols_to_parent,call_for_all_tabs
|
|
6
|
+
##from .dynimport import get_abstract_import,import_symbols_to_parent,call_for_all_tabs
|
|
7
7
|
|
|
8
|
-
from .robust_readers import *
|
|
9
8
|
from .json_utils import (unified_json_loader,
|
|
10
9
|
find_keys,
|
|
11
10
|
get_key_values_from_path,
|
abstract_utilities/dynimport.py
CHANGED
|
@@ -6,7 +6,7 @@ from typing import Any, Callable, Iterable, Optional
|
|
|
6
6
|
import inspect, sys
|
|
7
7
|
from importlib import import_module
|
|
8
8
|
from .type_utils import make_list
|
|
9
|
-
from .
|
|
9
|
+
from .file_utils import get_for_all_tabs,call_for_all_tabs
|
|
10
10
|
class _LazyAttr:
|
|
11
11
|
"""Lazy resolver proxy to avoid import-time cycles.
|
|
12
12
|
First use triggers actual import & attribute lookup.
|
|
@@ -2,193 +2,207 @@ from .imports import *
|
|
|
2
2
|
# -------- Public API drop-ins that mirror your originals --------
|
|
3
3
|
from .filter_params import *
|
|
4
4
|
from .file_filters import *
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def allowed(path: str) -> bool:
|
|
9
|
-
name = os.path.basename(path).lower()
|
|
10
|
-
path_str = path.lower()
|
|
5
|
+
# -------------------------------------------------------------
|
|
6
|
+
# Wrapper: respects your original API and naming conventions
|
|
7
|
+
# -------------------------------------------------------------
|
|
11
8
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
9
|
+
def get_allowed_predicate(allowed=None):
|
|
10
|
+
if allowed is not False:
|
|
11
|
+
if allowed is True:
|
|
12
|
+
allowed = None
|
|
13
|
+
allowed = allowed or make_allowed_predicate()
|
|
14
|
+
else:
|
|
15
|
+
def allowed(*args):
|
|
16
|
+
return True
|
|
17
|
+
return allowed
|
|
18
18
|
|
|
19
|
-
if cfg.exclude_patterns:
|
|
20
|
-
for pat in cfg.exclude_patterns:
|
|
21
|
-
if fnmatch.fnmatch(name, pat.lower()):
|
|
22
|
-
return False
|
|
23
19
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
20
|
+
# -------------------------------------------------------------
|
|
21
|
+
# Remote-aware globbing
|
|
22
|
+
# -------------------------------------------------------------
|
|
23
|
+
def get_globs(items, recursive: bool = True, allowed=None, **kwargs):
|
|
24
|
+
"""
|
|
25
|
+
Behaves like your original get_globs(), but can traverse both
|
|
26
|
+
local and remote paths transparently via normalize_items().
|
|
27
|
+
"""
|
|
28
|
+
glob_paths = []
|
|
29
|
+
roots = [p for p in make_list(items) if p]
|
|
30
30
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
res: List[str] = []
|
|
34
|
-
if recursive:
|
|
35
|
-
kwargs.setdefault("mindepth", 1)
|
|
36
|
-
else:
|
|
37
|
-
kwargs.setdefault("mindepth", 1)
|
|
31
|
+
kwargs.setdefault("mindepth", 0)
|
|
32
|
+
if not recursive:
|
|
38
33
|
kwargs.setdefault("maxdepth", 1)
|
|
39
|
-
|
|
40
|
-
|
|
34
|
+
|
|
35
|
+
for fs, root, _ in normalize_items(roots, **kwargs):
|
|
36
|
+
# use the backend's recursive walker
|
|
37
|
+
nu_items = fs.glob_recursive(root, **kwargs)
|
|
41
38
|
if allowed:
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
if fs.isdir(root):
|
|
59
|
-
for p in fs.glob_recursive(root, **kwargs):
|
|
60
|
-
if allowed and not allowed(p): continue
|
|
61
|
-
if fs.isfile(p): out.append(p)
|
|
62
|
-
else:
|
|
63
|
-
if allowed and not allowed(root): continue
|
|
64
|
-
if fs.isfile(root): out.append(root)
|
|
39
|
+
|
|
40
|
+
nu_items = [n for n in nu_items if n and allowed(n)]
|
|
41
|
+
|
|
42
|
+
glob_paths += nu_items
|
|
43
|
+
return glob_paths
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
# -------------------------------------------------------------
|
|
47
|
+
# Allowed filters
|
|
48
|
+
# -------------------------------------------------------------
|
|
49
|
+
def get_allowed_files(items, allowed=True, **kwargs):
|
|
50
|
+
allowed = get_allowed_predicate(allowed=allowed)
|
|
51
|
+
out = []
|
|
52
|
+
for fs, item, _ in normalize_items(items, **kwargs):
|
|
53
|
+
if fs.isfile(item) and allowed(item):
|
|
54
|
+
out.append(item)
|
|
65
55
|
return out
|
|
66
56
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
roots = [p for p in make_list(paths) if p]
|
|
76
|
-
out: List[str] = []
|
|
77
|
-
kwargs = {**kwargs, "include_files": False, "include_dirs": True}
|
|
78
|
-
for fs, root in normalize_items(roots):
|
|
79
|
-
if fs.isdir(root):
|
|
80
|
-
if (not allowed) or allowed(root):
|
|
81
|
-
out.append(root)
|
|
82
|
-
for p in fs.glob_recursive(root, **kwargs):
|
|
83
|
-
if (not allowed) or allowed(p):
|
|
84
|
-
if fs.isdir(p): out.append(p)
|
|
57
|
+
|
|
58
|
+
def get_allowed_dirs(items, allowed=False, **kwargs):
|
|
59
|
+
allowed = get_allowed_predicate(allowed=allowed)
|
|
60
|
+
out = []
|
|
61
|
+
for fs, item, _ in normalize_items(items, **kwargs):
|
|
62
|
+
if fs.isdir(item) and allowed(item):
|
|
63
|
+
out.append(item)
|
|
85
64
|
return out
|
|
86
65
|
|
|
87
|
-
def get_filtered_files(paths, allowed=None, files: List[str] | None = None, **kwargs) -> List[str]:
|
|
88
|
-
if allowed is not False:
|
|
89
|
-
if allowed is True:
|
|
90
|
-
allowed = None
|
|
91
|
-
allowed = allowed or make_allowed_predicate(ScanConfig())
|
|
92
|
-
else:
|
|
93
|
-
allowed = (lambda *_: True)
|
|
94
66
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
67
|
+
# -------------------------------------------------------------
|
|
68
|
+
# Filtered sets
|
|
69
|
+
# -------------------------------------------------------------
|
|
70
|
+
def get_filtered_files(items, allowed=None, files=None, **kwargs):
|
|
71
|
+
allowed = get_allowed_predicate(allowed=allowed)
|
|
72
|
+
files = set(files or [])
|
|
73
|
+
out = []
|
|
74
|
+
for fs, root, _ in normalize_items(items, **kwargs):
|
|
100
75
|
for p in fs.glob_recursive(root, **kwargs):
|
|
101
|
-
if p in files:
|
|
102
|
-
|
|
103
|
-
if
|
|
76
|
+
if p in files:
|
|
77
|
+
continue
|
|
78
|
+
if allowed(p) and fs.isfile(p):
|
|
79
|
+
out.append(p)
|
|
104
80
|
return out
|
|
105
81
|
|
|
106
|
-
def get_filtered_dirs(paths, allowed=None, dirs: List[str] | None = None, **kwargs) -> List[str]:
|
|
107
|
-
if allowed is not False:
|
|
108
|
-
if allowed is True:
|
|
109
|
-
allowed = None
|
|
110
|
-
allowed = allowed or make_allowed_predicate(ScanConfig())
|
|
111
|
-
else:
|
|
112
|
-
allowed = (lambda *_: True)
|
|
113
82
|
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
for fs, root in normalize_items(
|
|
83
|
+
def get_filtered_dirs(items, allowed=None, dirs=None, **kwargs):
|
|
84
|
+
allowed = get_allowed_predicate(allowed=allowed)
|
|
85
|
+
dirs = set(dirs or [])
|
|
86
|
+
out = []
|
|
87
|
+
for fs, root, _ in normalize_items(items, **kwargs):
|
|
119
88
|
for p in fs.glob_recursive(root, **kwargs):
|
|
120
|
-
if p in dirs:
|
|
121
|
-
|
|
122
|
-
if
|
|
89
|
+
if p in dirs:
|
|
90
|
+
continue
|
|
91
|
+
if allowed(p) and fs.isdir(p):
|
|
92
|
+
out.append(p)
|
|
123
93
|
return out
|
|
124
94
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
95
|
+
|
|
96
|
+
# -------------------------------------------------------------
|
|
97
|
+
# Recursive expansion
|
|
98
|
+
# -------------------------------------------------------------
|
|
99
|
+
def get_all_allowed_files(items, allowed=None, **kwargs):
|
|
100
|
+
dirs = get_all_allowed_dirs(items, allowed=allowed, **kwargs)
|
|
101
|
+
files = get_allowed_files(items, allowed=allowed, **kwargs)
|
|
128
102
|
seen = set(files)
|
|
129
|
-
for fs, directory in normalize_items(dirs):
|
|
103
|
+
for fs, directory, _ in normalize_items(dirs, **kwargs):
|
|
130
104
|
for p in fs.glob_recursive(directory, **kwargs):
|
|
131
|
-
if p in seen:
|
|
132
|
-
|
|
105
|
+
if p in seen:
|
|
106
|
+
continue
|
|
107
|
+
if allowed and not allowed(p):
|
|
108
|
+
continue
|
|
133
109
|
if fs.isfile(p):
|
|
134
|
-
files.append(p)
|
|
110
|
+
files.append(p)
|
|
111
|
+
seen.add(p)
|
|
135
112
|
return files
|
|
136
113
|
|
|
137
|
-
def get_all_allowed_dirs(paths, allowed=None, **kwargs) -> List[str]:
|
|
138
|
-
if allowed is not False:
|
|
139
|
-
if allowed is True:
|
|
140
|
-
allowed = None
|
|
141
|
-
allowed = allowed or make_allowed_predicate(ScanConfig())
|
|
142
|
-
else:
|
|
143
|
-
allowed = (lambda *_: True)
|
|
144
114
|
|
|
145
|
-
|
|
146
|
-
|
|
115
|
+
def get_all_allowed_dirs(items, allowed=None, **kwargs):
|
|
116
|
+
allowed = get_allowed_predicate(allowed=allowed)
|
|
117
|
+
out = []
|
|
147
118
|
seen = set()
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
119
|
+
for fs, root, _ in normalize_items(items, **kwargs):
|
|
120
|
+
if fs.isdir(root) and allowed(root):
|
|
121
|
+
out.append(root)
|
|
122
|
+
seen.add(root)
|
|
152
123
|
for p in fs.glob_recursive(root, **kwargs):
|
|
153
|
-
if p in seen:
|
|
154
|
-
|
|
155
|
-
if fs.isdir(p):
|
|
156
|
-
out.append(p)
|
|
124
|
+
if p in seen:
|
|
125
|
+
continue
|
|
126
|
+
if allowed(p) and fs.isdir(p):
|
|
127
|
+
out.append(p)
|
|
128
|
+
seen.add(p)
|
|
157
129
|
return out
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
# -------------------------------------------------------------
|
|
133
|
+
# Unified directory scan
|
|
134
|
+
# -------------------------------------------------------------
|
|
158
135
|
def get_files_and_dirs(
|
|
159
|
-
directory: str
|
|
136
|
+
directory: str,
|
|
160
137
|
cfg: Optional["ScanConfig"] = None,
|
|
161
138
|
allowed_exts: Optional[Set[str]] = False,
|
|
162
139
|
unallowed_exts: Optional[Set[str]] = False,
|
|
163
140
|
exclude_types: Optional[Set[str]] = False,
|
|
164
141
|
exclude_dirs: Optional[List[str]] = False,
|
|
165
142
|
exclude_patterns: Optional[List[str]] = False,
|
|
166
|
-
add
|
|
143
|
+
add=False,
|
|
167
144
|
recursive: bool = True,
|
|
168
145
|
include_files: bool = True,
|
|
169
|
-
paths: str | list[str]=None,
|
|
170
146
|
**kwargs
|
|
171
147
|
):
|
|
148
|
+
"""
|
|
149
|
+
Same public signature as your original get_files_and_dirs(),
|
|
150
|
+
but powered by backend objects (LocalFS or SSHFS).
|
|
151
|
+
"""
|
|
172
152
|
cfg = cfg or define_defaults(
|
|
173
153
|
allowed_exts=allowed_exts,
|
|
174
154
|
unallowed_exts=unallowed_exts,
|
|
175
155
|
exclude_types=exclude_types,
|
|
176
156
|
exclude_dirs=exclude_dirs,
|
|
177
157
|
exclude_patterns=exclude_patterns,
|
|
178
|
-
add=add
|
|
158
|
+
add=add
|
|
179
159
|
)
|
|
180
|
-
# make predicate backend-agnostic here; the per-backend calls below pass strings only
|
|
181
160
|
allowed = make_allowed_predicate(cfg)
|
|
182
|
-
|
|
161
|
+
items = []
|
|
162
|
+
files = []
|
|
163
|
+
input(allowed)
|
|
183
164
|
if recursive:
|
|
184
|
-
|
|
165
|
+
items = get_globs(directory, recursive=recursive, allowed=allowed, **kwargs)
|
|
185
166
|
else:
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
167
|
+
for fs, base, _ in normalize_items(make_list(directory), **kwargs):
|
|
168
|
+
try:
|
|
169
|
+
items += [fs.join(base, name) for name in fs.listdir(base)]
|
|
170
|
+
except Exception:
|
|
171
|
+
pass
|
|
191
172
|
|
|
192
173
|
dirs = get_allowed_dirs(items, allowed=allowed, **kwargs)
|
|
193
|
-
|
|
174
|
+
if include_files:
|
|
175
|
+
files = get_allowed_files(items, allowed=allowed, **kwargs)
|
|
194
176
|
return dirs, files
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
# -------------------------------------------------------------
|
|
180
|
+
# Unchanged predicate builder
|
|
181
|
+
# -------------------------------------------------------------
|
|
182
|
+
def make_allowed_predicate(cfg: ScanConfig) -> Callable[[str], bool]:
|
|
183
|
+
def allowed(path: str) -> bool:
|
|
184
|
+
p = Path(path)
|
|
185
|
+
name = p.name.lower()
|
|
186
|
+
path_str = str(p).lower()
|
|
187
|
+
|
|
188
|
+
# A) directory exclusions
|
|
189
|
+
if cfg.exclude_dirs:
|
|
190
|
+
for dpat in cfg.exclude_dirs:
|
|
191
|
+
if dpat in path_str or fnmatch.fnmatch(name, dpat.lower()):
|
|
192
|
+
if p.is_dir() or dpat in path_str:
|
|
193
|
+
return False
|
|
194
|
+
|
|
195
|
+
# B) filename pattern exclusions
|
|
196
|
+
if cfg.exclude_patterns:
|
|
197
|
+
for pat in cfg.exclude_patterns:
|
|
198
|
+
if fnmatch.fnmatch(name, pat.lower()):
|
|
199
|
+
return False
|
|
200
|
+
|
|
201
|
+
# C) extension gates
|
|
202
|
+
if p.is_file():
|
|
203
|
+
ext = p.suffix.lower()
|
|
204
|
+
if (cfg.allowed_exts and ext not in cfg.allowed_exts) or \
|
|
205
|
+
(cfg.unallowed_exts and ext in cfg.unallowed_exts):
|
|
206
|
+
return False
|
|
207
|
+
return True
|
|
208
|
+
return allowed
|
|
@@ -1,6 +1,138 @@
|
|
|
1
1
|
from .imports import *
|
|
2
2
|
from .module_imports import *
|
|
3
3
|
from .constants import *
|
|
4
|
+
def get_item_check_cmd(path, file=True, directory=False, exists=False):
|
|
5
|
+
if (directory and file) or exists:
|
|
6
|
+
typ = "e"
|
|
7
|
+
elif file:
|
|
8
|
+
typ = "f"
|
|
9
|
+
elif directory:
|
|
10
|
+
typ = "d"
|
|
11
|
+
elif isinstance(file, str):
|
|
12
|
+
if "f" in file:
|
|
13
|
+
typ = "f"
|
|
14
|
+
elif "d" in file:
|
|
15
|
+
typ = "d"
|
|
16
|
+
else:
|
|
17
|
+
typ = "e"
|
|
18
|
+
else:
|
|
19
|
+
typ = "e"
|
|
20
|
+
return f"test -{typ} {shlex.quote(path)} && echo __OK__ || true"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_all_item_check_cmd(path, file=True, directory=True, exists=True):
|
|
24
|
+
collects = []
|
|
25
|
+
out_js = {}
|
|
26
|
+
|
|
27
|
+
if file:
|
|
28
|
+
collects.append("file")
|
|
29
|
+
if directory:
|
|
30
|
+
collects.append("dir")
|
|
31
|
+
if exists:
|
|
32
|
+
collects.append("exists")
|
|
33
|
+
|
|
34
|
+
if not collects:
|
|
35
|
+
return out_js
|
|
36
|
+
|
|
37
|
+
path = shlex.quote(path)
|
|
38
|
+
for typ in collects:
|
|
39
|
+
t = typ[0] # f, d, or e
|
|
40
|
+
out_js[typ] = f"test -{t} {path} && echo __OK__ || true"
|
|
41
|
+
|
|
42
|
+
return out_js
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def is_file(
|
|
46
|
+
path,
|
|
47
|
+
user_at_host=None,
|
|
48
|
+
password=None,
|
|
49
|
+
key=None,
|
|
50
|
+
env_path=None,
|
|
51
|
+
**kwargs
|
|
52
|
+
):
|
|
53
|
+
contingencies = list(set([user_at_host,password,key,env_path]))
|
|
54
|
+
len_contingencies = len(contingencies)
|
|
55
|
+
is_potential = (len_contingencies >1 or (None not in contingencies))
|
|
56
|
+
if not is_potential:
|
|
57
|
+
return os.path.isfile(path)
|
|
58
|
+
cmd = get_item_check_cmd(path,file=True)
|
|
59
|
+
return run_cmd(cmd=cmd,
|
|
60
|
+
user_at_host=user_at_host,
|
|
61
|
+
password=password,
|
|
62
|
+
key=key,
|
|
63
|
+
env_path=env_path,
|
|
64
|
+
**kwargs
|
|
65
|
+
)
|
|
66
|
+
def is_dir(
|
|
67
|
+
path,
|
|
68
|
+
user_at_host=None,
|
|
69
|
+
password=None,
|
|
70
|
+
key=None,
|
|
71
|
+
env_path=None,
|
|
72
|
+
**kwargs
|
|
73
|
+
):
|
|
74
|
+
contingencies = list(set([user_at_host,password,key,env_path]))
|
|
75
|
+
len_contingencies = len(contingencies)
|
|
76
|
+
is_potential = (len_contingencies >1 or (None not in contingencies))
|
|
77
|
+
if not is_potential:
|
|
78
|
+
return os.path.isdir(path)
|
|
79
|
+
cmd = get_item_check_cmd(path,file=False,directory=True)
|
|
80
|
+
return run_cmd(cmd=cmd,
|
|
81
|
+
user_at_host=user_at_host,
|
|
82
|
+
password=password,
|
|
83
|
+
key=key,
|
|
84
|
+
env_path=env_path,
|
|
85
|
+
**kwargs
|
|
86
|
+
)
|
|
87
|
+
def is_exists(
|
|
88
|
+
path,
|
|
89
|
+
user_at_host=None,
|
|
90
|
+
password=None,
|
|
91
|
+
key=None,
|
|
92
|
+
env_path=None,
|
|
93
|
+
**kwargs
|
|
94
|
+
):
|
|
95
|
+
contingencies = list(set([user_at_host,password,key,env_path]))
|
|
96
|
+
len_contingencies = len(contingencies)
|
|
97
|
+
is_potential = (len_contingencies >1 or (None not in contingencies))
|
|
98
|
+
if not is_potential:
|
|
99
|
+
return os.path.exists(path)
|
|
100
|
+
if is_potential == True:
|
|
101
|
+
cmd = get_item_check_cmd(path,exists=True)
|
|
102
|
+
return run_cmd(cmd=cmd,
|
|
103
|
+
user_at_host=user_at_host,
|
|
104
|
+
password=password,
|
|
105
|
+
key=key,
|
|
106
|
+
env_path=env_path,
|
|
107
|
+
**kwargs
|
|
108
|
+
)
|
|
109
|
+
def is_any(
|
|
110
|
+
path,
|
|
111
|
+
user_at_host=None,
|
|
112
|
+
password=None,
|
|
113
|
+
key=None,
|
|
114
|
+
env_path=None,
|
|
115
|
+
**kwargs
|
|
116
|
+
):
|
|
117
|
+
contingencies = list(set([user_at_host,password,key,env_path]))
|
|
118
|
+
len_contingencies = len(contingencies)
|
|
119
|
+
is_potential = (len_contingencies >1 or (None not in contingencies))
|
|
120
|
+
if not is_potential:
|
|
121
|
+
return os.path.exists(path)
|
|
122
|
+
if is_potential == True:
|
|
123
|
+
out_js = get_all_item_check_cmd(path,file=True,directory=True,exists=True)
|
|
124
|
+
for typ,cmd in out_js.items():
|
|
125
|
+
response = run_cmd(cmd=cmd,
|
|
126
|
+
user_at_host=user_at_host,
|
|
127
|
+
password=password,
|
|
128
|
+
key=key,
|
|
129
|
+
env_path=env_path,
|
|
130
|
+
**kwargs
|
|
131
|
+
)
|
|
132
|
+
result = "__OK__" in (response or "")
|
|
133
|
+
if result:
|
|
134
|
+
return typ
|
|
135
|
+
return None
|
|
4
136
|
class PathBackend(Protocol):
|
|
5
137
|
def join(self, *parts: str) -> str: ...
|
|
6
138
|
def isfile(self, path: str) -> bool: ...
|
|
@@ -9,12 +141,42 @@ class PathBackend(Protocol):
|
|
|
9
141
|
def listdir(self, base: str) -> List[str]: ...
|
|
10
142
|
|
|
11
143
|
class LocalFS:
|
|
144
|
+
def __init__(self, get_type=False, get_is_dir=False, get_is_file=False, get_is_exists=False, **kwargs):
|
|
145
|
+
self.get_type = get_type
|
|
146
|
+
self.get_is_dir = get_is_dir
|
|
147
|
+
self.get_is_file = get_is_file
|
|
148
|
+
self.get_is_exists = get_is_exists
|
|
149
|
+
|
|
12
150
|
def join(self, *parts: str) -> str:
|
|
13
151
|
return os.path.join(*parts)
|
|
152
|
+
|
|
14
153
|
def isfile(self, path: str) -> bool:
|
|
15
154
|
return os.path.isfile(path)
|
|
155
|
+
|
|
16
156
|
def isdir(self, path: str) -> bool:
|
|
17
157
|
return os.path.isdir(path)
|
|
158
|
+
|
|
159
|
+
def isexists(self, path: str) -> bool:
|
|
160
|
+
return os.path.exists(path)
|
|
161
|
+
|
|
162
|
+
def istype(self, path: str) -> str | None:
|
|
163
|
+
funcs_js = {"file": os.path.isfile, "dir": os.path.isdir, "exists": os.path.exists}
|
|
164
|
+
for key, func in funcs_js.items():
|
|
165
|
+
if func(path):
|
|
166
|
+
return key
|
|
167
|
+
return None
|
|
168
|
+
|
|
169
|
+
def is_included(self, path, **kwargs):
|
|
170
|
+
include_js = {}
|
|
171
|
+
if self.get_type:
|
|
172
|
+
include_js["typ"] = self.istype(path)
|
|
173
|
+
if self.get_is_dir:
|
|
174
|
+
include_js["dir"] = self.isdir(path)
|
|
175
|
+
if self.get_is_file:
|
|
176
|
+
include_js["file"] = self.isfile(path)
|
|
177
|
+
if self.get_is_exists:
|
|
178
|
+
include_js["exists"] = self.isexists(path)
|
|
179
|
+
return include_js
|
|
18
180
|
def glob_recursive(self, base: str, **opts) -> List[str]:
|
|
19
181
|
"""
|
|
20
182
|
opts:
|
|
@@ -55,25 +217,73 @@ class LocalFS:
|
|
|
55
217
|
return [os.path.join(base, name) for name in os.listdir(base)]
|
|
56
218
|
except Exception:
|
|
57
219
|
return []
|
|
58
|
-
|
|
220
|
+
def get_spec_kwargs(
|
|
221
|
+
user_at_host=None,
|
|
222
|
+
password=None,
|
|
223
|
+
key=None,
|
|
224
|
+
env_path=None,
|
|
225
|
+
kwargs=None
|
|
226
|
+
):
|
|
227
|
+
kwargs = kwargs or {}
|
|
228
|
+
kwargs["user_at_host"] = kwargs.get("user_at_host") or user_at_host
|
|
229
|
+
kwargs["password"] = kwargs.get("password") or password
|
|
230
|
+
kwargs["key"] = kwargs.get("key") or key
|
|
231
|
+
kwargs["env_path"] = kwargs.get("env_path") or env_path
|
|
232
|
+
return kwargs
|
|
59
233
|
class SSHFS:
|
|
60
|
-
"""Remote POSIX backend via
|
|
61
|
-
def __init__(self,
|
|
62
|
-
|
|
234
|
+
"""Remote POSIX backend via run_remote_cmd."""
|
|
235
|
+
def __init__(self, password=None, key=None, env_path=None,
|
|
236
|
+
get_type=False, get_is_dir=False, get_is_file=False, get_is_exists=False, **kwargs):
|
|
237
|
+
self.user_at_host = kwargs.get('user_at_host') or kwargs.get('user') or kwargs.get('host')
|
|
238
|
+
self.password = password
|
|
239
|
+
self.key = key
|
|
240
|
+
self.env_path = env_path
|
|
241
|
+
self.get_type = get_type
|
|
242
|
+
self.get_is_dir = get_is_dir
|
|
243
|
+
self.get_is_file = get_is_file
|
|
244
|
+
self.get_is_exists = get_is_exists
|
|
245
|
+
|
|
246
|
+
def cell_spec_kwargs(self, func, path, **kwargs):
|
|
247
|
+
kwargs = get_spec_kwargs(
|
|
248
|
+
user_at_host=self.user_at_host,
|
|
249
|
+
password=self.password,
|
|
250
|
+
key=self.key,
|
|
251
|
+
env_path=self.env_path,
|
|
252
|
+
kwargs=kwargs
|
|
253
|
+
)
|
|
254
|
+
return func(path, **kwargs)
|
|
255
|
+
|
|
256
|
+
def is_included(self, path, **kwargs):
|
|
257
|
+
include_js = {}
|
|
258
|
+
if self.get_type:
|
|
259
|
+
include_js["typ"] = self.istype(path, **kwargs)
|
|
260
|
+
if self.get_is_dir:
|
|
261
|
+
include_js["dir"] = self.isdir(path, **kwargs)
|
|
262
|
+
if self.get_is_file:
|
|
263
|
+
include_js["file"] = self.isfile(path, **kwargs)
|
|
264
|
+
if self.get_is_exists:
|
|
265
|
+
include_js["exists"] = self.isexists(path, **kwargs)
|
|
266
|
+
return include_js
|
|
63
267
|
|
|
64
268
|
def join(self, *parts: str) -> str:
|
|
65
269
|
return posixpath.join(*parts)
|
|
66
270
|
|
|
67
|
-
def isfile(self, path: str) -> bool:
|
|
68
|
-
|
|
69
|
-
out = run_remote_cmd(self.user_at_host, cmd)
|
|
271
|
+
def isfile(self, path: str, **kwargs) -> bool:
|
|
272
|
+
out = self.cell_spec_kwargs(is_file, path, **kwargs)
|
|
70
273
|
return "__OK__" in (out or "")
|
|
71
274
|
|
|
72
|
-
def isdir(self, path: str) -> bool:
|
|
73
|
-
|
|
74
|
-
|
|
275
|
+
def isdir(self, path: str, **kwargs) -> bool:
|
|
276
|
+
out = self.cell_spec_kwargs(is_dir, path, **kwargs)
|
|
277
|
+
return "__OK__" in (out or "")
|
|
278
|
+
|
|
279
|
+
def isexists(self, path: str, **kwargs) -> bool:
|
|
280
|
+
out = self.cell_spec_kwargs(is_exists, path, **kwargs)
|
|
75
281
|
return "__OK__" in (out or "")
|
|
76
282
|
|
|
283
|
+
def istype(self, path: str, **kwargs) -> str | None:
|
|
284
|
+
out = self.cell_spec_kwargs(is_any, path, **kwargs)
|
|
285
|
+
return out
|
|
286
|
+
|
|
77
287
|
def glob_recursive(self, base: str, **opts) -> List[str]:
|
|
78
288
|
maxdepth = opts.get("maxdepth")
|
|
79
289
|
mindepth = opts.get("mindepth", 1)
|
|
@@ -111,18 +321,86 @@ class SSHFS:
|
|
|
111
321
|
return [line.strip() for line in (out or "").splitlines() if line.strip()]
|
|
112
322
|
|
|
113
323
|
|
|
114
|
-
def
|
|
115
|
-
|
|
116
|
-
|
|
324
|
+
def make_allowed_predicate(cfg: ScanConfig,fs=None) -> Callable[[str], bool]:
|
|
325
|
+
fs = fs or LocalFS()
|
|
326
|
+
def allowed(path: str) -> bool:
|
|
327
|
+
|
|
328
|
+
name = p.name.lower()
|
|
329
|
+
path_str = str(p).lower()
|
|
330
|
+
# A) directories
|
|
331
|
+
if cfg.exclude_dirs:
|
|
332
|
+
for dpat in cfg.exclude_dirs:
|
|
333
|
+
if dpat in path_str or fnmatch.fnmatch(name, dpat.lower()):
|
|
334
|
+
if p.is_dir() or dpat in path_str:
|
|
335
|
+
return False
|
|
336
|
+
|
|
337
|
+
if cfg.exclude_patterns:
|
|
338
|
+
# B) filename patterns
|
|
339
|
+
for pat in cfg.exclude_patterns:
|
|
340
|
+
if fnmatch.fnmatch(name, pat.lower()):
|
|
341
|
+
return False
|
|
342
|
+
|
|
343
|
+
# C) extension gates
|
|
344
|
+
if p.is_file():
|
|
345
|
+
ext = p.suffix.lower()
|
|
346
|
+
if (cfg.allowed_exts and ext not in cfg.allowed_exts) or (cfg.unallowed_exts and ext in cfg.unallowed_exts):
|
|
347
|
+
return False
|
|
348
|
+
return True
|
|
349
|
+
return allowed
|
|
350
|
+
def try_group(pre,item,strings):
|
|
351
|
+
|
|
352
|
+
try:
|
|
353
|
+
m = pre.match(item)
|
|
354
|
+
for i,string in enumerate(strings):
|
|
355
|
+
strings[i] = m.group(string)
|
|
356
|
+
|
|
357
|
+
except:
|
|
358
|
+
return None
|
|
359
|
+
return strings
|
|
360
|
+
def normalize_items(
|
|
361
|
+
paths: Iterable[str],
|
|
362
|
+
user_at_host=None,
|
|
363
|
+
get_type=True,
|
|
364
|
+
get_is_dir=False,
|
|
365
|
+
get_is_file=False,
|
|
366
|
+
get_is_exists=False,
|
|
367
|
+
**kwargs
|
|
368
|
+
) -> List[tuple[PathBackend, str, dict]]:
|
|
369
|
+
pairs: List[tuple[PathBackend, str, dict]] = []
|
|
370
|
+
host = user_at_host or kwargs.get("host") or kwargs.get("user")
|
|
371
|
+
|
|
117
372
|
for item in paths:
|
|
118
|
-
if not item:
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
373
|
+
if not item:
|
|
374
|
+
continue
|
|
375
|
+
|
|
376
|
+
strings = try_group(REMOTE_RE, item, ["host", "path"])
|
|
377
|
+
fs_host = None
|
|
378
|
+
nuhost = None
|
|
379
|
+
|
|
380
|
+
if (strings and None not in strings) or host:
|
|
381
|
+
if strings and None not in strings:
|
|
382
|
+
nuhost = strings[0]
|
|
383
|
+
item = strings[1] or item
|
|
384
|
+
nuhost = nuhost or host
|
|
385
|
+
fs_host = SSHFS(
|
|
386
|
+
nuhost,
|
|
387
|
+
user_at_host=user_at_host,
|
|
388
|
+
get_type=get_type,
|
|
389
|
+
get_is_dir=get_is_dir,
|
|
390
|
+
get_is_file=get_is_file,
|
|
391
|
+
get_is_exists=get_is_exists,
|
|
392
|
+
**kwargs
|
|
393
|
+
)
|
|
125
394
|
else:
|
|
126
|
-
|
|
127
|
-
|
|
395
|
+
fs_host = LocalFS(
|
|
396
|
+
get_type=get_type,
|
|
397
|
+
get_is_dir=get_is_dir,
|
|
398
|
+
get_is_file=get_is_file,
|
|
399
|
+
get_is_exists=get_is_exists
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
includes = fs_host.is_included(item)
|
|
403
|
+
pairs.append((fs_host, item, includes))
|
|
128
404
|
return pairs
|
|
405
|
+
|
|
406
|
+
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
from ...string_clean import eatAll
|
|
2
2
|
from ...list_utils import make_list
|
|
3
3
|
from ...type_utils import get_media_exts, is_media_type,MIME_TYPES,is_str
|
|
4
|
-
from ...ssh_utils import
|
|
4
|
+
from ...ssh_utils import *
|
|
5
5
|
from ...env_utils import *
|
|
6
|
-
|
|
6
|
+
|
|
7
7
|
from ...abstract_classes import SingletonMeta
|
|
8
|
-
|
|
8
|
+
|
|
@@ -1,4 +1,30 @@
|
|
|
1
1
|
from .imports import *
|
|
2
|
+
def get_pass_from_key(key=None,env_path=None):
|
|
3
|
+
if key:
|
|
4
|
+
return get_env_value(key=key,path=env_path)
|
|
5
|
+
def get_password(password=None,key=None,env_path=None):
|
|
6
|
+
password = password or get_pass_from_key(key=key,env_path=env_path)
|
|
7
|
+
return password
|
|
8
|
+
|
|
9
|
+
def get_print_sudo_cmd(
|
|
10
|
+
cmd: str,
|
|
11
|
+
password=None,
|
|
12
|
+
key=None,
|
|
13
|
+
env_path=None
|
|
14
|
+
):
|
|
15
|
+
password = get_password(password=password,key=key,env_path=env_path)
|
|
16
|
+
if password != None:
|
|
17
|
+
|
|
18
|
+
cmd = get_password_cmd(password=password,cmd=cmd)
|
|
19
|
+
return cmd
|
|
20
|
+
def get_password_cmd(password:str,cmd:str):
|
|
21
|
+
sudo_cmd = get_sudo_cmd(cmd)
|
|
22
|
+
password_sudo_cmd = get_raw_password_sudo_cmd(password=password,sudo_cmd=sudo_cmd)
|
|
23
|
+
return password_sudo_cmd
|
|
24
|
+
def get_sudo_cmd(cmd: str):
|
|
25
|
+
return f"sudo -S -k {cmd}"
|
|
26
|
+
def get_raw_password_sudo_cmd(password:str,sudo_cmd:str):
|
|
27
|
+
return f"printf %s {shlex.quote(password)} | {sudo_cmd}"
|
|
2
28
|
def get_remote_bash(
|
|
3
29
|
cmd: str,
|
|
4
30
|
cwd: str | None = None
|
|
@@ -9,13 +35,21 @@ def get_remote_ssh(
|
|
|
9
35
|
remote:str=None
|
|
10
36
|
):
|
|
11
37
|
return f"ssh {shlex.quote(user_at_host)} {shlex.quote(remote)}"
|
|
12
|
-
|
|
13
38
|
def get_remote_cmd(
|
|
14
39
|
cmd: str,
|
|
15
40
|
user_at_host: str,
|
|
16
41
|
cwd: str | None = None,
|
|
42
|
+
password=None,
|
|
43
|
+
key=None,
|
|
44
|
+
env_path=None
|
|
17
45
|
|
|
18
46
|
):
|
|
47
|
+
cmd = get_print_sudo_cmd(
|
|
48
|
+
cmd=cmd,
|
|
49
|
+
password=password,
|
|
50
|
+
key=key,
|
|
51
|
+
env_path=env_path
|
|
52
|
+
)
|
|
19
53
|
remote = get_remote_bash(
|
|
20
54
|
cmd=cmd,
|
|
21
55
|
cwd=cwd
|
|
@@ -25,6 +59,8 @@ def get_remote_cmd(
|
|
|
25
59
|
remote=remote
|
|
26
60
|
)
|
|
27
61
|
return full
|
|
62
|
+
|
|
63
|
+
|
|
28
64
|
def execute_cmd(
|
|
29
65
|
*args,
|
|
30
66
|
outfile=None,
|
|
@@ -46,8 +82,18 @@ def run_local_cmd(
|
|
|
46
82
|
outfile: Optional[str] = None,
|
|
47
83
|
shell=True,
|
|
48
84
|
text=True,
|
|
49
|
-
capture_output=True
|
|
85
|
+
capture_output=True,
|
|
86
|
+
user_at_host: str=None,
|
|
87
|
+
password=None,
|
|
88
|
+
key=None,
|
|
89
|
+
env_path=None
|
|
50
90
|
) -> str:
|
|
91
|
+
cmd = get_print_sudo_cmd(
|
|
92
|
+
cmd=cmd,
|
|
93
|
+
password=password,
|
|
94
|
+
key=key,
|
|
95
|
+
env_path=env_path
|
|
96
|
+
)
|
|
51
97
|
return execute_cmd(
|
|
52
98
|
cmd,
|
|
53
99
|
outfile=outfile,
|
|
@@ -64,12 +110,21 @@ def run_remote_cmd(
|
|
|
64
110
|
outfile: Optional[str] = None,
|
|
65
111
|
shell=True,
|
|
66
112
|
text=True,
|
|
67
|
-
capture_output=True
|
|
113
|
+
capture_output=True,
|
|
114
|
+
password=None,
|
|
115
|
+
key=None,
|
|
116
|
+
env_path=None
|
|
68
117
|
) -> str:
|
|
69
118
|
"""
|
|
70
119
|
Run on remote via SSH; capture stdout+stderr locally; write to local outfile.
|
|
71
120
|
NOTE: we do *not* try to write the file on the remote to avoid later scp.
|
|
72
121
|
"""
|
|
122
|
+
cmd = get_print_sudo_cmd(
|
|
123
|
+
cmd=cmd,
|
|
124
|
+
password=password,
|
|
125
|
+
key=key,
|
|
126
|
+
env_path=env_path
|
|
127
|
+
)
|
|
73
128
|
# wrap in bash -lc for PATH/profile + allow 'cd && ...'
|
|
74
129
|
cmd = get_remote_cmd(
|
|
75
130
|
cmd=cmd,
|
|
@@ -83,6 +138,7 @@ def run_remote_cmd(
|
|
|
83
138
|
text=text,
|
|
84
139
|
capture_output=capture_output
|
|
85
140
|
)
|
|
141
|
+
|
|
86
142
|
def run_cmd(
|
|
87
143
|
cmd: str=None,
|
|
88
144
|
cwd: str | None = None,
|
|
@@ -90,8 +146,12 @@ def run_cmd(
|
|
|
90
146
|
shell=True,
|
|
91
147
|
text=True,
|
|
92
148
|
capture_output=True,
|
|
93
|
-
user_at_host: str=None
|
|
149
|
+
user_at_host: str=None,
|
|
150
|
+
password=None,
|
|
151
|
+
key=None,
|
|
152
|
+
env_path=None
|
|
94
153
|
) -> str:
|
|
154
|
+
|
|
95
155
|
if user_at_host:
|
|
96
156
|
return run_ssh_cmd(
|
|
97
157
|
user_at_host=user_at_host,
|
|
@@ -100,7 +160,10 @@ def run_cmd(
|
|
|
100
160
|
outfile=outfile,
|
|
101
161
|
shell=shell,
|
|
102
162
|
text=text,
|
|
103
|
-
capture_output=capture_output
|
|
163
|
+
capture_output=capture_output,
|
|
164
|
+
password=password,
|
|
165
|
+
key=key,
|
|
166
|
+
env_path=env_path
|
|
104
167
|
)
|
|
105
168
|
return run_local_cmd(
|
|
106
169
|
cmd=cmd,
|
|
@@ -108,7 +171,10 @@ def run_cmd(
|
|
|
108
171
|
outfile=outfile,
|
|
109
172
|
shell=shell,
|
|
110
173
|
text=text,
|
|
111
|
-
capture_output=capture_output
|
|
174
|
+
capture_output=capture_output,
|
|
175
|
+
password=password,
|
|
176
|
+
key=key,
|
|
177
|
+
env_path=env_path
|
|
112
178
|
)
|
|
113
179
|
run_ssh_cmd = run_remote_cmd
|
|
114
180
|
remote_cmd = run_remote_cmd
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: abstract_utilities
|
|
3
|
-
Version: 0.2.2.
|
|
3
|
+
Version: 0.2.2.422
|
|
4
4
|
Summary: abstract_utilities is a collection of utility modules providing a variety of functions to aid in tasks such as data comparison, list manipulation, JSON handling, string manipulation, mathematical computations, and time operations.
|
|
5
5
|
Home-page: https://github.com/AbstractEndeavors/abstract_utilities
|
|
6
6
|
Author: putkoff
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
abstract_utilities/__init__.py,sha256=
|
|
1
|
+
abstract_utilities/__init__.py,sha256=hnDgg4ErEvB4kQ9j1GyN-wex9ba_XqTmz1Etf_i-XC8,6380
|
|
2
2
|
abstract_utilities/abstract_classes.py,sha256=A6-FNDQb2P_jcyt01Kc5SuY2QawLVKNjQ-rDGfsn4rA,2461
|
|
3
3
|
abstract_utilities/class_utils.py,sha256=-YHkdbaChRIz9YLXJlFF4m7VkdwUCts_HOUbKXxkrYA,12735
|
|
4
4
|
abstract_utilities/collator_utils.py,sha256=9exNoZAr9rABGYTwZOn7hdLbpnMtRd2AgfU7yjZrXGw,2348
|
|
5
5
|
abstract_utilities/doit.py,sha256=a1zkyMJbSGPvE-OmCQcH_dQyLME392UfvQmGztOWyhE,1646
|
|
6
|
-
abstract_utilities/dynimport.py,sha256=
|
|
6
|
+
abstract_utilities/dynimport.py,sha256=vGNALk6vbbXSANk1akzj9bO3xLLItAWXjNrrO2VCCg8,6984
|
|
7
7
|
abstract_utilities/error_utils.py,sha256=dSMIM3TKe4e9i_akObyjDwy3Zu4fnoWRK9hucg_ryZo,890
|
|
8
8
|
abstract_utilities/global_utils.py,sha256=UkCS1nE561bVbxWsH-YQdFPSeZFMYXV7xg-DAtGUvrI,2204
|
|
9
9
|
abstract_utilities/hash_utils.py,sha256=u7t209ERD9aGONZHqmkYtiQRRadD2qG5ICSTxlYlZMc,206
|
|
@@ -42,16 +42,16 @@ abstract_utilities/file_utils/req.py,sha256=DicCS80V4L8GPWr7lBKDR_gTC1WI9o8lMC2I
|
|
|
42
42
|
abstract_utilities/file_utils/file_utils/__init__.py,sha256=fm_uNRnfKfZOIg7e1HXhWbHac5VoUgRD2iTO5cxLkA0,160
|
|
43
43
|
abstract_utilities/file_utils/file_utils/file_filters.py,sha256=khfbonAPEAhW1wxfFo0I4dawYPCrIKEjNc7VKb1RvzA,3437
|
|
44
44
|
abstract_utilities/file_utils/file_utils/file_reader.py,sha256=2MRj2PGKq4C-iKL8dmhHwWnhmA8GPVsNaWkTREOF9vo,24545
|
|
45
|
-
abstract_utilities/file_utils/file_utils/file_utils.py,sha256=
|
|
45
|
+
abstract_utilities/file_utils/file_utils/file_utils.py,sha256=jKMp2ch6xsxjZ8YXk2oYxTp22SU4YklCmddq3E3rmcY,7011
|
|
46
46
|
abstract_utilities/file_utils/file_utils/filter_params.py,sha256=NF692W0cBhEsbtmaVzb8EKMAasasHDElSRaC9fnzYwE,3382
|
|
47
47
|
abstract_utilities/file_utils/file_utils/imports.py,sha256=SXCMBuHUwqXbfRBk4LjKehsBKZa8-Po5UfEcNTwn4Es,24
|
|
48
48
|
abstract_utilities/file_utils/file_utils/map_utils.py,sha256=B_MlkLP8s-o0yU0R3Y2LcTpBntBzysJO18qq181xz9c,1043
|
|
49
49
|
abstract_utilities/file_utils/file_utils/pdf_utils.py,sha256=D_wg8h-SapCvqinxRIKxMri1jWZNpr5jGvKq9EJePfY,10335
|
|
50
50
|
abstract_utilities/file_utils/imports/__init__.py,sha256=0nezbIo1RqmfZ7pZ0V3kxZDNchNWH8wVATnnvNeILMo,101
|
|
51
|
-
abstract_utilities/file_utils/imports/classes.py,sha256=
|
|
51
|
+
abstract_utilities/file_utils/imports/classes.py,sha256=hNOw48TPAkcDwOY530I7DiYjIfPjVXwEtvb_vN00vdg,13186
|
|
52
52
|
abstract_utilities/file_utils/imports/constants.py,sha256=kSWXjZrbM7MmkytpiCFnsEJcfhPGI5ztUmrvTmT1xpU,1571
|
|
53
53
|
abstract_utilities/file_utils/imports/imports.py,sha256=OjHHuys4ue01PjTIBJuOBAseWUN9pxmAvWsPEj0QcCw,532
|
|
54
|
-
abstract_utilities/file_utils/imports/module_imports.py,sha256=
|
|
54
|
+
abstract_utilities/file_utils/imports/module_imports.py,sha256=jhigHKn-tA98-nKvnp2WBeD9vQrC0xQ10AmKIUOV1r4,247
|
|
55
55
|
abstract_utilities/robust_reader/__init__.py,sha256=7JVGEqZ2VFyFF06cqQ8TFz8EyreOB7Jhisnd69rxL-8,28
|
|
56
56
|
abstract_utilities/robust_reader/file_reader2.py,sha256=U-5opkLu-bct091Eb-5CiNBTf0UFoSITYi8zR-Sz38w,25077
|
|
57
57
|
abstract_utilities/robust_reader/file_readers.py,sha256=U-5opkLu-bct091Eb-5CiNBTf0UFoSITYi8zR-Sz38w,25077
|
|
@@ -73,8 +73,8 @@ abstract_utilities/ssh_utils/__init__.py,sha256=-DxUpOmfMi5zdG3lu93uT9MziZ-I_anw
|
|
|
73
73
|
abstract_utilities/ssh_utils/classes.py,sha256=3Q9BfLpyagNFYyiF4bt-5UCezeUJv9NK9YAFdTsLCV0,4802
|
|
74
74
|
abstract_utilities/ssh_utils/imports.py,sha256=7-pVJK1RfR0KiZsv0mNYGPuNXA4iYqmDvqbAR9h1llU,371
|
|
75
75
|
abstract_utilities/ssh_utils/pexpect_utils.py,sha256=JBdOIXBTXAqE5TrsFjmPWJgwSaWyRJN8rbJ6y3_zKPY,10556
|
|
76
|
-
abstract_utilities/ssh_utils/utils.py,sha256=
|
|
77
|
-
abstract_utilities-0.2.2.
|
|
78
|
-
abstract_utilities-0.2.2.
|
|
79
|
-
abstract_utilities-0.2.2.
|
|
80
|
-
abstract_utilities-0.2.2.
|
|
76
|
+
abstract_utilities/ssh_utils/utils.py,sha256=smUWAx3nW1h0etTndJ_te9bkUX5YzQ8kYd9_gD1TXLk,4882
|
|
77
|
+
abstract_utilities-0.2.2.422.dist-info/METADATA,sha256=yq0rgeCL3-cooP8x1yMcIAKAnYxfSCEV2WcOxIJmDEA,28108
|
|
78
|
+
abstract_utilities-0.2.2.422.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
79
|
+
abstract_utilities-0.2.2.422.dist-info/top_level.txt,sha256=BF0GZ0xVFfN1K-hFIWPO3viNsOs1sSF86n1vHBg39FM,19
|
|
80
|
+
abstract_utilities-0.2.2.422.dist-info/RECORD,,
|
|
File without changes
|
{abstract_utilities-0.2.2.420.dist-info → abstract_utilities-0.2.2.422.dist-info}/top_level.txt
RENAMED
|
File without changes
|