abstract-utilities 0.2.2.453__py3-none-any.whl → 0.2.2.486__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of abstract-utilities might be problematic. Click here for more details.
- abstract_utilities/__init__.py +1 -1
- abstract_utilities/class_utils.py +0 -1
- abstract_utilities/file_utils/file_utils/__init__.py +1 -0
- abstract_utilities/file_utils/file_utils/file_utils.py +3 -3
- abstract_utilities/file_utils/file_utils/find_collect.py +1 -0
- abstract_utilities/file_utils/file_utils/imports/__init__.py +3 -0
- abstract_utilities/file_utils/file_utils/imports/constants.py +39 -0
- abstract_utilities/file_utils/file_utils/imports/file_functions.py +10 -0
- abstract_utilities/file_utils/file_utils/imports/imports.py +39 -0
- abstract_utilities/file_utils/file_utils/imports/module_imports.py +14 -0
- abstract_utilities/file_utils/file_utils/imports.py +9 -0
- abstract_utilities/file_utils/file_utils/type_checks.py +91 -0
- abstract_utilities/file_utils/imports/__init__.py +1 -2
- abstract_utilities/file_utils/imports/clean_imps.py +158 -0
- abstract_utilities/file_utils/imports/file_functions.py +1 -1
- abstract_utilities/file_utils/imports/imports.py +59 -7
- abstract_utilities/file_utils/imports/module_imports.py +6 -3
- abstract_utilities/read_write_utils.py +163 -12
- abstract_utilities/robust_reader/imports/imports.py +0 -9
- abstract_utilities/robust_readers/import_utils/__init__.py +1 -0
- abstract_utilities/robust_readers/import_utils/clean_imports.py +175 -0
- abstract_utilities/string_clean.py +40 -1
- abstract_utilities/string_utils.py +39 -0
- abstract_utilities/type_utils.py +25 -1
- {abstract_utilities-0.2.2.453.dist-info → abstract_utilities-0.2.2.486.dist-info}/METADATA +1 -1
- {abstract_utilities-0.2.2.453.dist-info → abstract_utilities-0.2.2.486.dist-info}/RECORD +28 -20
- {abstract_utilities-0.2.2.453.dist-info → abstract_utilities-0.2.2.486.dist-info}/WHEEL +0 -0
- {abstract_utilities-0.2.2.453.dist-info → abstract_utilities-0.2.2.486.dist-info}/top_level.txt +0 -0
abstract_utilities/__init__.py
CHANGED
|
@@ -116,10 +116,10 @@ from .parse_utils import (num_tokens_from_string,
|
|
|
116
116
|
|
|
117
117
|
from .log_utils import get_caller_info,get_logFile,print_or_log,get_json_call_response,initialize_call_log
|
|
118
118
|
from .error_utils import try_func
|
|
119
|
-
from .class_utils import alias,get_class_inputs,get_set_attr
|
|
120
119
|
from .ssh_utils import *
|
|
121
120
|
from .env_utils import *
|
|
122
121
|
from .path_utils import *
|
|
123
122
|
from .file_utils import *
|
|
124
123
|
from .file_utils import call_for_all_tabs
|
|
125
124
|
from .string_utils import *
|
|
125
|
+
from .class_utils import alias,get_class_inputs,get_set_attr,get_caller_path,get_caller_dir
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
|
|
2
|
-
from typing import *
|
|
3
|
-
import fnmatch, os, glob
|
|
1
|
+
|
|
4
2
|
from .filter_params import *
|
|
3
|
+
from .imports import *
|
|
4
|
+
|
|
5
5
|
##from abstract_utilities import make_list,get_media_exts, is_media_type
|
|
6
6
|
def get_allowed_predicate(allowed=None):
|
|
7
7
|
if allowed != False:
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from .imports import *
|
|
2
|
+
from .module_imports import *
|
|
3
|
+
@dataclass
|
|
4
|
+
class ScanConfig:
|
|
5
|
+
allowed_exts: Set[str]
|
|
6
|
+
unallowed_exts: Set[str]
|
|
7
|
+
exclude_types: Set[str]
|
|
8
|
+
exclude_dirs: List[str] = field(default_factory=list)
|
|
9
|
+
exclude_patterns: List[str] = field(default_factory=list)
|
|
10
|
+
DEFAULT_ALLOWED_EXTS: Set[str] = {
|
|
11
|
+
".py", ".pyw", # python
|
|
12
|
+
".js", ".jsx", ".ts", ".tsx", ".mjs", # JS/TS
|
|
13
|
+
".html", ".htm", ".xml", # markup
|
|
14
|
+
".css", ".scss", ".sass", ".less", # styles
|
|
15
|
+
".json", ".yaml", ".yml", ".toml", ".ini", # configs
|
|
16
|
+
".cfg", ".md", ".markdown", ".rst", # docs
|
|
17
|
+
".sh", ".bash", ".env", # scripts/env
|
|
18
|
+
".txt" # plain text
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
DEFAULT_EXCLUDE_TYPES: Set[str] = {
|
|
22
|
+
"image", "video", "audio", "presentation",
|
|
23
|
+
"spreadsheet", "archive", "executable"
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
# never want these—even if they sneak into ALLOWED
|
|
27
|
+
_unallowed = set(get_media_exts(DEFAULT_EXCLUDE_TYPES)) | {'.bak', '.shp', '.cpg', '.dbf', '.shx','.geojson',".pyc",'.shx','.geojson','.prj','.sbn','.sbx'}
|
|
28
|
+
DEFAULT_UNALLOWED_EXTS = {e for e in _unallowed if e not in DEFAULT_ALLOWED_EXTS}
|
|
29
|
+
|
|
30
|
+
DEFAULT_EXCLUDE_DIRS: Set[str] = {
|
|
31
|
+
"node_modules", "old","__pycache__", "backups", "backup", "backs", "trash", "depriciated", "old", "__init__"
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
DEFAULT_EXCLUDE_PATTERNS: Set[str] = {
|
|
35
|
+
"__init__*", "*.tmp", "*.log", "*.lock", "*.zip","*~"
|
|
36
|
+
}
|
|
37
|
+
REMOTE_RE = re.compile(r"^(?P<host>[^:\s]+@[^:\s]+):(?P<path>/.*)$")
|
|
38
|
+
AllowedPredicate = Optional[Callable[[str], bool]]
|
|
39
|
+
DEFAULT_EXCLUDE_FILE_PATTERNS=DEFAULT_EXCLUDE_PATTERNS
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from .imports import *
|
|
2
|
+
def get_caller_path():
|
|
3
|
+
i = i or 1
|
|
4
|
+
frame = inspect.stack()[i]
|
|
5
|
+
return os.path.abspath(frame.filename)
|
|
6
|
+
def get_caller_dir(i=None):
|
|
7
|
+
i = i or 1
|
|
8
|
+
frame = inspect.stack()[i]
|
|
9
|
+
abspath = os.path.abspath(frame.filename)
|
|
10
|
+
return os.path.dirname(abspath)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# ============================================================
|
|
2
|
+
# abstract_utilities/imports/imports.py
|
|
3
|
+
# Global imports hub — everything imported here will be
|
|
4
|
+
# automatically available to any module that does:
|
|
5
|
+
# from ..imports import *
|
|
6
|
+
# ============================================================
|
|
7
|
+
# ---- Core standard library modules -------------------------
|
|
8
|
+
import os, sys, re, shlex, glob, platform, textwrap, subprocess, inspect, json, time
|
|
9
|
+
import tempfile, shutil, logging, pathlib, fnmatch, importlib, importlib.util, types
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from types import ModuleType
|
|
13
|
+
|
|
14
|
+
# ---- Dataclasses and typing --------------------------------
|
|
15
|
+
from dataclasses import dataclass, field
|
|
16
|
+
from typing import (
|
|
17
|
+
Any, Optional, List, Dict, Set, Tuple,
|
|
18
|
+
Iterable, Callable, Literal, Union, TypeVar
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
# ---- Common 3rd-party dependencies --------------------------
|
|
22
|
+
import pandas as pd
|
|
23
|
+
import geopandas as gpd
|
|
24
|
+
import pytesseract
|
|
25
|
+
import pdfplumber
|
|
26
|
+
import PyPDF2
|
|
27
|
+
import ezodf
|
|
28
|
+
from pdf2image import convert_from_path
|
|
29
|
+
from werkzeug.utils import secure_filename
|
|
30
|
+
from werkzeug.datastructures import FileStorage
|
|
31
|
+
|
|
32
|
+
# ---- Helpers ------------------------------------------------
|
|
33
|
+
import textwrap as tw
|
|
34
|
+
from pprint import pprint
|
|
35
|
+
|
|
36
|
+
# ============================================================
|
|
37
|
+
# AUTO-EXPORT ALL NON-PRIVATE NAMES
|
|
38
|
+
# ============================================================
|
|
39
|
+
__all__ = [name for name in globals() if not name.startswith("_")]
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from .imports import *
|
|
2
|
+
from ....string_clean import eatAll
|
|
3
|
+
from ....list_utils import make_list
|
|
4
|
+
from ....type_utils import get_media_exts, is_media_type, MIME_TYPES, is_str
|
|
5
|
+
from ....ssh_utils import *
|
|
6
|
+
from ....env_utils import *
|
|
7
|
+
from ....read_write_utils import *
|
|
8
|
+
from ....abstract_classes import SingletonMeta
|
|
9
|
+
from ....string_utils import get_from_kwargs
|
|
10
|
+
from ....abstract_classes import run_pruned_func
|
|
11
|
+
from ....class_utils import get_caller, get_caller_path, get_caller_dir
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
__all__ = [name for name in globals() if not name.startswith("_")]
|
|
@@ -1 +1,10 @@
|
|
|
1
1
|
from ..imports import *
|
|
2
|
+
from typing import *
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
@dataclass
|
|
5
|
+
class ScanConfig:
|
|
6
|
+
allowed_exts: Set[str]
|
|
7
|
+
unallowed_exts: Set[str]
|
|
8
|
+
exclude_types: Set[str]
|
|
9
|
+
exclude_dirs: List[str] = field(default_factory=list)
|
|
10
|
+
exclude_patterns: List[str] = field(default_factory=list)
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
from .imports import *
|
|
2
|
+
|
|
3
|
+
def get_user_pass_host_key(**kwargs):
|
|
4
|
+
args = ['password','user_at_host','host','key','user']
|
|
5
|
+
values,kwargs = get_from_kwargs(*args,**kwargs,del_kwarg=False)
|
|
6
|
+
return values
|
|
7
|
+
|
|
8
|
+
# --- Base remote checker -----------------------------------------------------
|
|
9
|
+
def _remote_test(path: str, test_flag: str, timeout: int = 5,*args, **kwargs) -> bool:
|
|
10
|
+
"""
|
|
11
|
+
Run a remote shell test (e.g. -f, -d) via SSH.
|
|
12
|
+
Returns True if test succeeds, False otherwise.
|
|
13
|
+
"""
|
|
14
|
+
try:
|
|
15
|
+
kwargs['cmd']=f"[ {test_flag} {shlex.quote(path)} ] && echo 1 || echo 0"
|
|
16
|
+
kwargs['text']=True
|
|
17
|
+
kwargs['timeout']=timeout
|
|
18
|
+
kwargs['stderr']=subprocess.DEVNULL
|
|
19
|
+
result = run_pruned_func(run_cmd,**kwargs)
|
|
20
|
+
return result.strip() == "1"
|
|
21
|
+
except Exception:
|
|
22
|
+
return False
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# --- Individual path checks --------------------------------------------------
|
|
26
|
+
def is_remote_file(path: str,*args, **kwargs) -> bool:
|
|
27
|
+
"""True if remote path is a file."""
|
|
28
|
+
return _remote_test(path, "-f", **kwargs)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def is_remote_dir(path: str,*args, **kwargs) -> bool:
|
|
32
|
+
"""True if remote path is a directory."""
|
|
33
|
+
return _remote_test(path, "-d", **kwargs)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def is_local_file(path: str) -> bool:
|
|
37
|
+
"""True if local path is a file."""
|
|
38
|
+
return os.path.isfile(path)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def is_local_dir(path: str) -> bool:
|
|
42
|
+
"""True if local path is a directory."""
|
|
43
|
+
return os.path.isdir(path)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
# --- Unified interface -------------------------------------------------------
|
|
47
|
+
|
|
48
|
+
def is_file(path: str,*args,**kwargs) -> bool:
|
|
49
|
+
"""Determine if path is a file (works local or remote)."""
|
|
50
|
+
if get_user_pass_host_key(**kwargs):
|
|
51
|
+
return is_remote_file(path, **kwargs)
|
|
52
|
+
return is_local_file(path)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def is_dir(path: str, *args,**kwargs) -> bool:
|
|
56
|
+
"""Determine if path is a directory (works local or remote)."""
|
|
57
|
+
if get_user_pass_host_key(**kwargs):
|
|
58
|
+
return is_remote_dir(path, **kwargs)
|
|
59
|
+
return is_local_dir(path)
|
|
60
|
+
|
|
61
|
+
def is_exists(path: str, *args,**kwargs) -> bool:
|
|
62
|
+
if is_file(path,**kwargs):
|
|
63
|
+
return True
|
|
64
|
+
if is_dir(path,**kwargs):
|
|
65
|
+
return True
|
|
66
|
+
return False
|
|
67
|
+
# --- Optional: keep your original all-in-one wrapper ------------------------
|
|
68
|
+
def check_path_type(
|
|
69
|
+
path: str,
|
|
70
|
+
*args,
|
|
71
|
+
**kwargs
|
|
72
|
+
) -> str:
|
|
73
|
+
"""
|
|
74
|
+
Return 'file', 'directory', 'missing', or 'unknown'.
|
|
75
|
+
Uses isolated is_file/is_dir functions.
|
|
76
|
+
"""
|
|
77
|
+
if get_user_pass_host_key(**kwargs):
|
|
78
|
+
if is_remote_file(path,**kwargs):
|
|
79
|
+
return "file"
|
|
80
|
+
elif is_remote_dir(path,**kwargs):
|
|
81
|
+
return "directory"
|
|
82
|
+
else:
|
|
83
|
+
return "missing"
|
|
84
|
+
else:
|
|
85
|
+
if os.path.isfile(path):
|
|
86
|
+
return "file"
|
|
87
|
+
elif os.path.isdir(path):
|
|
88
|
+
return "directory"
|
|
89
|
+
elif not os.path.exists(path):
|
|
90
|
+
return "missing"
|
|
91
|
+
return "unknown"
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
from abstract_utilities import read_from_file, eatAll
|
|
2
|
+
import os, sys, re, inspect
|
|
3
|
+
from typing import *
|
|
4
|
+
|
|
5
|
+
# ============================================================
|
|
6
|
+
# Constants
|
|
7
|
+
# ============================================================
|
|
8
|
+
import_tag = 'import '
|
|
9
|
+
from_tag = 'from '
|
|
10
|
+
|
|
11
|
+
# ============================================================
|
|
12
|
+
# Helpers
|
|
13
|
+
# ============================================================
|
|
14
|
+
def get_caller_path(i=None):
|
|
15
|
+
i = i or 1
|
|
16
|
+
frame = inspect.stack()[i]
|
|
17
|
+
return os.path.abspath(frame.filename)
|
|
18
|
+
|
|
19
|
+
def make_list(obj: any) -> list:
|
|
20
|
+
if isinstance(obj, str) and ',' in obj:
|
|
21
|
+
obj = obj.split(',')
|
|
22
|
+
if isinstance(obj, (set, tuple)):
|
|
23
|
+
return list(obj)
|
|
24
|
+
if isinstance(obj, list):
|
|
25
|
+
return obj
|
|
26
|
+
return [obj]
|
|
27
|
+
|
|
28
|
+
def eatElse(stringObj, chars=None):
|
|
29
|
+
chars = make_list(chars or []) + list('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_')
|
|
30
|
+
while stringObj:
|
|
31
|
+
if stringObj and stringObj[0] not in chars:
|
|
32
|
+
stringObj = stringObj[1:]
|
|
33
|
+
continue
|
|
34
|
+
if stringObj and stringObj[-1] not in chars:
|
|
35
|
+
stringObj = stringObj[:-1]
|
|
36
|
+
continue
|
|
37
|
+
break
|
|
38
|
+
return stringObj
|
|
39
|
+
|
|
40
|
+
def clean_line(line):
|
|
41
|
+
return eatAll(line, [' ', '', '\t', '\n'])
|
|
42
|
+
|
|
43
|
+
def is_line_import(line):
|
|
44
|
+
return bool(line and line.startswith(import_tag) and 'from ' not in line)
|
|
45
|
+
|
|
46
|
+
def is_line_from_import(line):
|
|
47
|
+
return bool(line and line.startswith(from_tag) and ' import ' in line)
|
|
48
|
+
|
|
49
|
+
def is_from_group_start(line):
|
|
50
|
+
return bool(line and line.startswith(from_tag) and 'import' in line and '(' in line and not line.rstrip().endswith(')'))
|
|
51
|
+
|
|
52
|
+
def is_from_group_end(line):
|
|
53
|
+
return bool(line and ')' in line)
|
|
54
|
+
|
|
55
|
+
def clean_imports(imports):
|
|
56
|
+
if isinstance(imports, str):
|
|
57
|
+
imports = imports.split(',')
|
|
58
|
+
return [eatElse(imp.strip()) for imp in imports if imp.strip()]
|
|
59
|
+
|
|
60
|
+
# ============================================================
|
|
61
|
+
# Combine lone import statements
|
|
62
|
+
# ============================================================
|
|
63
|
+
def combine_lone_imports(text=None, file_path=None):
|
|
64
|
+
text = text or ''
|
|
65
|
+
if file_path and os.path.isfile(file_path):
|
|
66
|
+
text += read_from_file(file_path)
|
|
67
|
+
lines = text.split('\n')
|
|
68
|
+
|
|
69
|
+
cleaned_import_list = []
|
|
70
|
+
nu_lines = []
|
|
71
|
+
j = None
|
|
72
|
+
|
|
73
|
+
for i, line in enumerate(lines):
|
|
74
|
+
if is_line_import(line):
|
|
75
|
+
if j is None:
|
|
76
|
+
nu_lines.append(import_tag)
|
|
77
|
+
j = i
|
|
78
|
+
cleaned_import_list += clean_imports(line.split(import_tag)[1])
|
|
79
|
+
else:
|
|
80
|
+
nu_lines.append(line)
|
|
81
|
+
|
|
82
|
+
if j is None:
|
|
83
|
+
return '\n'.join(nu_lines)
|
|
84
|
+
cleaned_import_list = sorted(set(cleaned_import_list))
|
|
85
|
+
nu_lines[j] += ', '.join(cleaned_import_list)
|
|
86
|
+
return '\n'.join(nu_lines)
|
|
87
|
+
|
|
88
|
+
# ============================================================
|
|
89
|
+
# Merge repeated 'from pkg import ...' (1-line only)
|
|
90
|
+
# Preserve multi-line grouped imports
|
|
91
|
+
# ============================================================
|
|
92
|
+
def merge_from_import_groups(text=None, file_path=None):
|
|
93
|
+
if file_path and os.path.isfile(file_path):
|
|
94
|
+
text = read_from_file(file_path)
|
|
95
|
+
text = text or ''
|
|
96
|
+
lines = text.split('\n')
|
|
97
|
+
|
|
98
|
+
pkg_to_imports: Dict[str, Set[str]] = {}
|
|
99
|
+
pkg_to_line_index: Dict[str, int] = {}
|
|
100
|
+
nu_lines: List[str] = []
|
|
101
|
+
|
|
102
|
+
in_group = False
|
|
103
|
+
for i, line in enumerate(lines):
|
|
104
|
+
stripped = line.strip()
|
|
105
|
+
|
|
106
|
+
# preserve multi-line grouped blocks intact
|
|
107
|
+
if in_group:
|
|
108
|
+
nu_lines.append(line)
|
|
109
|
+
if is_from_group_end(line):
|
|
110
|
+
in_group = False
|
|
111
|
+
continue
|
|
112
|
+
|
|
113
|
+
if is_from_group_start(line):
|
|
114
|
+
in_group = True
|
|
115
|
+
nu_lines.append(line)
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
if is_line_from_import(line):
|
|
119
|
+
try:
|
|
120
|
+
pkg_part, imps_part = line.split(' import ', 1)
|
|
121
|
+
pkg_name = pkg_part.replace('from ', '').strip()
|
|
122
|
+
imps = clean_imports(imps_part)
|
|
123
|
+
except Exception:
|
|
124
|
+
nu_lines.append(line)
|
|
125
|
+
continue
|
|
126
|
+
|
|
127
|
+
if pkg_name not in pkg_to_imports:
|
|
128
|
+
pkg_to_imports[pkg_name] = set(imps)
|
|
129
|
+
pkg_to_line_index[pkg_name] = len(nu_lines)
|
|
130
|
+
nu_lines.append(line)
|
|
131
|
+
else:
|
|
132
|
+
pkg_to_imports[pkg_name].update(imps)
|
|
133
|
+
else:
|
|
134
|
+
nu_lines.append(line)
|
|
135
|
+
|
|
136
|
+
# Rewrite first occurrences
|
|
137
|
+
for pkg, idx in pkg_to_line_index.items():
|
|
138
|
+
all_imps = sorted(pkg_to_imports[pkg])
|
|
139
|
+
nu_lines[idx] = f"from {pkg} import {', '.join(all_imps)}"
|
|
140
|
+
|
|
141
|
+
return '\n'.join(nu_lines)
|
|
142
|
+
|
|
143
|
+
# ============================================================
|
|
144
|
+
# Pipeline
|
|
145
|
+
# ============================================================
|
|
146
|
+
def clean_imports_pipeline(path: str):
|
|
147
|
+
raw = read_from_file(path)
|
|
148
|
+
step1 = combine_lone_imports(text=raw)
|
|
149
|
+
step2 = merge_from_import_groups(text=step1)
|
|
150
|
+
return step2
|
|
151
|
+
|
|
152
|
+
# ============================================================
|
|
153
|
+
# Standalone Run
|
|
154
|
+
# ============================================================
|
|
155
|
+
if __name__ == "__main__":
|
|
156
|
+
abs_path = "/home/flerb/Documents/pythonTools/modules/src/modules/abstract_utilities/src/abstract_utilities/file_utils/imports/imports.py"
|
|
157
|
+
cleaned = clean_imports_pipeline(abs_path)
|
|
158
|
+
print(cleaned)
|
|
@@ -1,13 +1,65 @@
|
|
|
1
|
-
|
|
1
|
+
# ============================================================
|
|
2
|
+
# abstract_utilities/imports/imports.py
|
|
3
|
+
# Global imports hub — everything imported here will be
|
|
4
|
+
# automatically available to any module that does:
|
|
5
|
+
# from ..imports import *
|
|
6
|
+
# ============================================================
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
import os
|
|
10
|
+
import sys, importlib,os
|
|
11
|
+
import sys, importlib, os, inspect
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
import os,sys
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
|
|
2
17
|
from typing import *
|
|
18
|
+
import re
|
|
19
|
+
|
|
20
|
+
from typing import *
|
|
21
|
+
from types import MethodType
|
|
22
|
+
import os,re, sys, importlib, inspect, os, importlib.util, hashlib
|
|
23
|
+
import os,tempfile,shutil,logging,ezodf,fnmatch,pytesseract,pdfplumber
|
|
24
|
+
import pandas as pd
|
|
3
25
|
import geopandas as gpd
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
from types import ModuleType
|
|
6
26
|
from datetime import datetime
|
|
7
|
-
|
|
27
|
+
|
|
28
|
+
from typing import *
|
|
29
|
+
from werkzeug.utils import secure_filename
|
|
30
|
+
from werkzeug.datastructures import FileStorage
|
|
31
|
+
from pdf2image import convert_from_path # only used for OCR fallback
|
|
32
|
+
# ---- Core standard library modules -------------------------
|
|
33
|
+
import os, sys, re, shlex, glob, platform, textwrap, subprocess, inspect, json, time
|
|
34
|
+
import tempfile, shutil, logging, pathlib, fnmatch, importlib, importlib.util, types
|
|
35
|
+
|
|
36
|
+
from datetime import datetime
|
|
37
|
+
from types import ModuleType
|
|
38
|
+
|
|
39
|
+
# ---- Dataclasses and typing --------------------------------
|
|
8
40
|
from dataclasses import dataclass, field
|
|
41
|
+
from typing import (
|
|
42
|
+
Any, Optional, List, Dict, Set, Tuple,
|
|
43
|
+
Iterable, Callable, Literal, Union, TypeVar
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
# ---- Common 3rd-party dependencies --------------------------
|
|
47
|
+
import pandas as pd
|
|
48
|
+
import geopandas as gpd
|
|
49
|
+
import pytesseract
|
|
50
|
+
import pdfplumber
|
|
51
|
+
import PyPDF2
|
|
52
|
+
import ezodf
|
|
53
|
+
from pdf2image import convert_from_path
|
|
9
54
|
from werkzeug.utils import secure_filename
|
|
10
55
|
from werkzeug.datastructures import FileStorage
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
import textwrap
|
|
56
|
+
|
|
57
|
+
# ---- Helpers ------------------------------------------------
|
|
58
|
+
import textwrap as tw
|
|
59
|
+
from pprint import pprint
|
|
60
|
+
|
|
61
|
+
# ============================================================
|
|
62
|
+
# AUTO-EXPORT ALL NON-PRIVATE NAMES
|
|
63
|
+
# ============================================================
|
|
64
|
+
__all__ = [name for name in globals() if not name.startswith("_")]
|
|
65
|
+
|
|
@@ -1,10 +1,13 @@
|
|
|
1
|
+
from .imports import *
|
|
1
2
|
from ...string_clean import eatAll
|
|
2
3
|
from ...list_utils import make_list
|
|
3
|
-
from ...type_utils import get_media_exts, is_media_type,MIME_TYPES,is_str
|
|
4
|
+
from ...type_utils import get_media_exts, is_media_type, MIME_TYPES, is_str
|
|
4
5
|
from ...ssh_utils import *
|
|
5
6
|
from ...env_utils import *
|
|
6
7
|
from ...read_write_utils import *
|
|
7
8
|
from ...abstract_classes import SingletonMeta
|
|
8
9
|
from ...log_utils import get_logFile
|
|
9
|
-
from ...class_utils import get_caller,get_caller_path,get_caller_dir
|
|
10
|
-
from ...ssh_utils
|
|
10
|
+
from ...class_utils import get_caller, get_caller_path, get_caller_dir
|
|
11
|
+
from ...ssh_utils import run_cmd
|
|
12
|
+
|
|
13
|
+
__all__ = [name for name in globals() if not name.startswith("_")]
|
|
@@ -13,12 +13,14 @@ Usage:
|
|
|
13
13
|
"""
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
|
-
|
|
16
|
+
import shlex
|
|
17
|
+
from .ssh_utils.utils import run_cmd,get_print_sudo_cmd,run_local_cmd,run_remote_cmd
|
|
18
|
+
from .file_utils.file_utils.type_checks import is_file,is_dir,get_user_pass_host_key,is_exists
|
|
17
19
|
from .abstract_classes import run_pruned_func
|
|
18
20
|
_FILE_PATH_KEYS = ['file', 'filepath', 'file_path', 'path', 'directory', 'f', 'dst', 'dest']
|
|
19
21
|
_CONTENTS_KEYS = ['cont', 'content', 'contents', 'data', 'datas', 'dat', 'src', 'source']
|
|
20
22
|
|
|
21
|
-
|
|
23
|
+
|
|
22
24
|
# --- Helper utilities --------------------------------------------------------
|
|
23
25
|
def string_in_keys(strings, kwargs):
|
|
24
26
|
"""Find a matching keyword in kwargs that contains any of the given substrings."""
|
|
@@ -28,25 +30,60 @@ def string_in_keys(strings, kwargs):
|
|
|
28
30
|
return key
|
|
29
31
|
return None
|
|
30
32
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
+
def make_dirs(path,exist_ok=True,**kwargs):
|
|
34
|
+
if exist_ok or (not exist_ok and not is_dir(path,**kwargs)):
|
|
35
|
+
if get_user_pass_host_key(**kwargs):
|
|
36
|
+
kwargs['cmd']=f"mkdir {path}"
|
|
37
|
+
run_pruned_func(run_cmd,**kwargs)
|
|
38
|
+
else:
|
|
39
|
+
os.makedirs(path,exist_ok=exist_ok)
|
|
40
|
+
return path
|
|
41
|
+
def path_join(*args):
|
|
42
|
+
path = None
|
|
43
|
+
for i,arg in enumerate(args):
|
|
44
|
+
if arg:
|
|
45
|
+
if i == 0:
|
|
46
|
+
path = arg
|
|
47
|
+
else:
|
|
48
|
+
path = os.path.join(path,arg)
|
|
49
|
+
return path
|
|
50
|
+
def make_path(path,home_dir=None,file=None,**kwargs):
|
|
51
|
+
if path:
|
|
52
|
+
basename = os.path.basename(path)
|
|
53
|
+
parts = path.split('/')
|
|
54
|
+
parts = [part for part in parts if part]
|
|
55
|
+
|
|
56
|
+
full_dir = home_dir or ''
|
|
57
|
+
if file == True or (file == None and ('.' in basename)):
|
|
58
|
+
pieces = parts[:-1] if len(parts) > 1 else []
|
|
59
|
+
else:
|
|
60
|
+
pieces=parts
|
|
61
|
+
basename=None
|
|
62
|
+
for piece in pieces:
|
|
63
|
+
full_dir = os.path.join(full_dir,piece)
|
|
64
|
+
make_dirs(full_dir,exist_ok=True,**kwargs)
|
|
65
|
+
if basename:
|
|
66
|
+
full_dir=path_join(full_dir,basename)
|
|
67
|
+
print(f"full_dir == {full_dir}")
|
|
68
|
+
return full_dir
|
|
69
|
+
def get_path(paths,**kwargs):
|
|
33
70
|
"""Return the first valid path among given paths."""
|
|
34
71
|
for path in paths:
|
|
35
72
|
if isinstance(path, str):
|
|
36
|
-
if
|
|
73
|
+
if is_file(path,**kwargs):
|
|
37
74
|
return path
|
|
38
75
|
dirname = os.path.dirname(path)
|
|
39
|
-
if
|
|
76
|
+
if is_exists(dirname,**kwargs):
|
|
40
77
|
return path
|
|
41
78
|
return None
|
|
42
79
|
|
|
43
80
|
|
|
44
|
-
def break_down_find_existing(path):
|
|
81
|
+
def break_down_find_existing(path,**kwargs):
|
|
45
82
|
"""Return the first non-existent subpath within a path chain."""
|
|
46
83
|
test_path = ''
|
|
47
84
|
for part in path.split(os.sep):
|
|
48
85
|
test_path = os.path.join(test_path, part)
|
|
49
|
-
if not
|
|
86
|
+
if not is_exists(test_path,**kwargs):
|
|
50
87
|
return test_path if test_path else None
|
|
51
88
|
return test_path
|
|
52
89
|
|
|
@@ -73,7 +110,73 @@ def check_read_write_params(*args, **kwargs):
|
|
|
73
110
|
raise ValueError("Missing file_path argument.")
|
|
74
111
|
return file_path, contents
|
|
75
112
|
|
|
113
|
+
def write_to_path(
|
|
114
|
+
file_path: str,
|
|
115
|
+
contents: str,
|
|
116
|
+
*,
|
|
117
|
+
user_at_host: str = None,
|
|
118
|
+
cwd: str | None = None,
|
|
119
|
+
password=None,
|
|
120
|
+
key=None,
|
|
121
|
+
env_path=None,
|
|
122
|
+
**kwargs
|
|
123
|
+
) -> str:
|
|
124
|
+
"""
|
|
125
|
+
Completely overwrite a file (locally or remotely).
|
|
126
|
+
Supports sudo and password-based remote execution.
|
|
127
|
+
"""
|
|
128
|
+
|
|
129
|
+
# sanitize for shell safety
|
|
130
|
+
quoted_path = shlex.quote(file_path)
|
|
131
|
+
quoted_data = shlex.quote(str(contents))
|
|
132
|
+
|
|
133
|
+
# shell command that fully overwrites
|
|
134
|
+
# (no append, replaces contents entirely)
|
|
135
|
+
base_cmd = f'sudo sh -c "echo {quoted_data} > {quoted_path}"'
|
|
136
|
+
input(base_cmd)
|
|
137
|
+
# optional sudo password injection
|
|
138
|
+
full_cmd = get_print_sudo_cmd(
|
|
139
|
+
cmd=base_cmd,
|
|
140
|
+
password=password,
|
|
141
|
+
key=key,
|
|
142
|
+
env_path=env_path
|
|
143
|
+
)
|
|
76
144
|
|
|
145
|
+
# local or remote dispatch
|
|
146
|
+
if user_at_host:
|
|
147
|
+
return run_remote_cmd(
|
|
148
|
+
user_at_host=user_at_host,
|
|
149
|
+
cmd=full_cmd,
|
|
150
|
+
cwd=cwd,
|
|
151
|
+
password=password,
|
|
152
|
+
key=key,
|
|
153
|
+
env_path=env_path,
|
|
154
|
+
**kwargs
|
|
155
|
+
)
|
|
156
|
+
else:
|
|
157
|
+
return run_local_cmd(
|
|
158
|
+
cmd=full_cmd,
|
|
159
|
+
cwd=cwd,
|
|
160
|
+
password=password,
|
|
161
|
+
key=key,
|
|
162
|
+
env_path=env_path,
|
|
163
|
+
**kwargs
|
|
164
|
+
)
|
|
165
|
+
### --- Core functionality -------------------------------------------------------
|
|
166
|
+
##def write_to_file(*args, **kwargs):
|
|
167
|
+
## """
|
|
168
|
+
## Write contents to a file (create if missing).
|
|
169
|
+
##
|
|
170
|
+
## Returns the file_path written.
|
|
171
|
+
## """
|
|
172
|
+
## file_path, contents = check_read_write_params(*args, **kwargs)
|
|
173
|
+
## if contents is None:
|
|
174
|
+
## raise ValueError("Missing contents to write.")
|
|
175
|
+
##
|
|
176
|
+
## os.makedirs(os.path.dirname(file_path) or ".", exist_ok=True)
|
|
177
|
+
## with open(file_path, "w", encoding="utf-8") as f:
|
|
178
|
+
## f.write(str(contents))
|
|
179
|
+
## return file_path
|
|
77
180
|
# --- Core functionality -------------------------------------------------------
|
|
78
181
|
def write_to_file(*args, **kwargs):
|
|
79
182
|
"""
|
|
@@ -82,18 +185,37 @@ def write_to_file(*args, **kwargs):
|
|
|
82
185
|
Returns the file_path written.
|
|
83
186
|
"""
|
|
84
187
|
file_path, contents = check_read_write_params(*args, **kwargs)
|
|
188
|
+
values,kwargs = get_from_kwargs(['file_path','contents'],del_kwarg=True,**kwargs)
|
|
189
|
+
dirname = os.path.dirname(file_path)
|
|
190
|
+
|
|
85
191
|
if contents is None:
|
|
86
192
|
raise ValueError("Missing contents to write.")
|
|
193
|
+
user_at_host = kwargs.get("user_at_host")
|
|
194
|
+
if get_user_pass_host_key(**kwargs):
|
|
195
|
+
make_dirs(dirname, exist_ok=True,**kwargs)
|
|
196
|
+
kwargs["cwd"] = kwargs.get('cwd') or os.path.dirname(file_path)
|
|
197
|
+
# sanitize for shell safety
|
|
198
|
+
quoted_path = shlex.quote(file_path)
|
|
199
|
+
quoted_data = shlex.quote(str(contents))
|
|
200
|
+
# shell command that fully overwrites
|
|
201
|
+
# (no append, replaces contents entirely)
|
|
202
|
+
kwargs["cmd"] = f'sh -c "echo {quoted_data} > {quoted_path}"'
|
|
203
|
+
if not kwargs.get('password') and not kwargs.get('key'):
|
|
204
|
+
kwargs["cmd"]=f'sudo {kwargs["cmd"]}'
|
|
205
|
+
result = run_pruned_func(run_cmd,**kwargs)
|
|
206
|
+
if not is_file(file_path,**kwargs) or str(contents) != read_from_file(file_path,**kwargs):
|
|
207
|
+
kwargs["cmd"]=f'sudo {kwargs["cmd"]}'
|
|
208
|
+
result = run_pruned_func(run_cmd,**kwargs)
|
|
209
|
+
return result
|
|
87
210
|
|
|
88
|
-
|
|
211
|
+
make_dirs(dirname or ".", exist_ok=True)
|
|
89
212
|
with open(file_path, "w", encoding="utf-8") as f:
|
|
90
213
|
f.write(str(contents))
|
|
91
214
|
return file_path
|
|
92
215
|
|
|
93
216
|
|
|
94
217
|
def read_from_file(file_path,**kwargs):
|
|
95
|
-
|
|
96
|
-
if user_at_host:
|
|
218
|
+
if get_user_pass_host_key(**kwargs):
|
|
97
219
|
kwargs["cwd"] = kwargs.get('cwd') or os.path.dirname(file_path)
|
|
98
220
|
basename = os.path.basename(file_path)
|
|
99
221
|
kwargs["cmd"] = f'cat {basename}'
|
|
@@ -101,7 +223,36 @@ def read_from_file(file_path,**kwargs):
|
|
|
101
223
|
"""Read text content from a file."""
|
|
102
224
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
103
225
|
return f.read()
|
|
104
|
-
|
|
226
|
+
def get_rel_path(src,src_rel,directory):
|
|
227
|
+
if src.startswith(src_rel):
|
|
228
|
+
src = src[len(src_rel):]
|
|
229
|
+
rel_path = os.path.join(directory,src)
|
|
230
|
+
return rel_path
|
|
231
|
+
def make_relative_path(src,src_rel,dst,**kwargs):
|
|
232
|
+
print(f"src == {src}\nsrc_rel == {src_rel}\dst == {dst}")
|
|
233
|
+
if src.startswith(src_rel):
|
|
234
|
+
rel_path = get_rel_path(src,src_rel,dst)
|
|
235
|
+
path = make_path(src,home_dir=rel_path,**kwargs)
|
|
236
|
+
print(f"path == {path}")
|
|
237
|
+
return path
|
|
238
|
+
def copy_dirs(dirs,dst,src_rel=None,**kwargs):
|
|
239
|
+
for src in dirs:
|
|
240
|
+
if rel_path:
|
|
241
|
+
dst = make_relative_path(src,src_rel,dst,**kwargs)
|
|
242
|
+
make_path(dst,**kwargs)
|
|
243
|
+
|
|
244
|
+
def copy_file(src,dst,rel_path=None,**kwargs):
|
|
245
|
+
if rel_path:
|
|
246
|
+
dst = make_relative_path(src,rel_path,dst,**kwargs)
|
|
247
|
+
if get_user_pass_host_key(**kwargs):
|
|
248
|
+
contents=read_from_file(src,**kwargs)
|
|
249
|
+
write_to_file(contents=contents,file_path=dst,**kwargs)
|
|
250
|
+
else:
|
|
251
|
+
shutil.copy(src,dst)
|
|
252
|
+
return dst
|
|
253
|
+
def copy_files(files,dst,rel_path=None,**kwargs):
|
|
254
|
+
for file in files:
|
|
255
|
+
copy_file(src=file,dst=dst,rel_path=rel_path,**kwargs)
|
|
105
256
|
|
|
106
257
|
def create_and_read_file(*args, **kwargs):
|
|
107
258
|
"""
|
|
@@ -1,12 +1,3 @@
|
|
|
1
|
-
import os,tempfile,shutil,logging,ezodf,fnmatch,pytesseract,pdfplumber
|
|
2
|
-
import pandas as pd
|
|
3
|
-
import geopandas as gpd
|
|
4
|
-
from datetime import datetime
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
from typing import *
|
|
7
|
-
from werkzeug.utils import secure_filename
|
|
8
|
-
from werkzeug.datastructures import FileStorage
|
|
9
|
-
from pdf2image import convert_from_path # only used for OCR fallback
|
|
10
1
|
from ...abstract_classes import SingletonMeta
|
|
11
2
|
from ..pdf_utils import *
|
|
12
3
|
from ...read_write_utils import *
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
from ...read_write_utils import read_from_file,write_to_file
|
|
2
|
+
from ...string_clean import eatAll,eatElse,clean_line
|
|
3
|
+
from ...class_utils import get_caller_path
|
|
4
|
+
from ...list_utils import make_list
|
|
5
|
+
import os
|
|
6
|
+
import_tag = 'import '
|
|
7
|
+
from_tag = 'from '
|
|
8
|
+
def get_text_or_read(text=None,file_path=None):
|
|
9
|
+
text = text or ''
|
|
10
|
+
imports_js = {}
|
|
11
|
+
if not text and file_path and os.path.isfile(file_path):
|
|
12
|
+
text=read_from_file(file_path)
|
|
13
|
+
return text
|
|
14
|
+
def is_line_import(line):
|
|
15
|
+
if line and (line.startswith(from_tag) or line.startswith(import_tag)):
|
|
16
|
+
return True
|
|
17
|
+
return False
|
|
18
|
+
def is_line_group_import(line):
|
|
19
|
+
if line and (line.startswith(from_tag) and import_tag in line):
|
|
20
|
+
return True
|
|
21
|
+
return False
|
|
22
|
+
def get_import_pkg(line):
|
|
23
|
+
if is_line_group_import(line):
|
|
24
|
+
return clean_line(line.split(from_tag)[1].split(import_tag)[0])
|
|
25
|
+
def get_imports_from_import_pkg(line):
|
|
26
|
+
if is_line_group_import(line):
|
|
27
|
+
return get_cleaned_import_list(line,commaClean=True)
|
|
28
|
+
|
|
29
|
+
def add_imports_to_import_pkg_js(import_pkg,imports,import_pkg_js=None):
|
|
30
|
+
import_pkg_js = import_pkg_js or {}
|
|
31
|
+
imports = clean_imports(imports)
|
|
32
|
+
if import_pkg not in import_pkg_js:
|
|
33
|
+
i = len(import_pkg_js["nulines"])
|
|
34
|
+
import_pkg_js[import_pkg]={"imports":imports,"line":i}
|
|
35
|
+
import_line = f"from {import_pkg} import "
|
|
36
|
+
if import_pkg == "import":
|
|
37
|
+
import_line = import_tag
|
|
38
|
+
import_pkg_js["nulines"].append(import_line)
|
|
39
|
+
else:
|
|
40
|
+
import_pkg_js[import_pkg]["imports"]+=imports
|
|
41
|
+
return import_pkg_js
|
|
42
|
+
def update_import_pkg_js(line,import_pkg_js=None):
|
|
43
|
+
import_pkg_js = import_pkg_js or {}
|
|
44
|
+
if is_line_group_import(line):
|
|
45
|
+
import_pkg = get_import_pkg(line)
|
|
46
|
+
imports = get_imports_from_import_pkg(line)
|
|
47
|
+
import_pkg_js = add_imports_to_import_pkg_js(import_pkg,imports,import_pkg_js=import_pkg_js)
|
|
48
|
+
else:
|
|
49
|
+
if len(import_pkg_js["nulines"]) >0 and line == '' and is_line_import(import_pkg_js["nulines"][-1]):
|
|
50
|
+
pass
|
|
51
|
+
else:
|
|
52
|
+
import_pkg_js["nulines"].append(line)
|
|
53
|
+
return import_pkg_js
|
|
54
|
+
def is_from_line_group(line):
|
|
55
|
+
if line and line.startswith(from_tag) and import_tag in line and '(' in line:
|
|
56
|
+
import_spl = line.split(import_tag)[-1]
|
|
57
|
+
import_spl_clean = clean_line(line)
|
|
58
|
+
if not import_spl_clean.endswith(')'):
|
|
59
|
+
return True
|
|
60
|
+
return False
|
|
61
|
+
def clean_imports(imports,commaClean=True):
|
|
62
|
+
chars=["*"]
|
|
63
|
+
if not commaClean:
|
|
64
|
+
chars.append(',')
|
|
65
|
+
if isinstance(imports,str):
|
|
66
|
+
imports = imports.split(',')
|
|
67
|
+
return [eatElse(imp,chars=chars) for imp in imports if imp]
|
|
68
|
+
def get_cleaned_import_list(line,commaClean=True):
|
|
69
|
+
cleaned_import_list=[]
|
|
70
|
+
if import_tag in line:
|
|
71
|
+
imports = line.split(import_tag)[1]
|
|
72
|
+
cleaned_import_list+=clean_imports(imports,commaClean=commaClean)
|
|
73
|
+
return cleaned_import_list
|
|
74
|
+
def get_all_imports(text=None,file_path=None,import_pkg_js=None):
|
|
75
|
+
text = get_text_or_read(text=text,file_path=file_path)
|
|
76
|
+
lines = text.split('\n')
|
|
77
|
+
cleaned_import_list=[]
|
|
78
|
+
nu_lines = []
|
|
79
|
+
is_from_group = False
|
|
80
|
+
import_pkg_js = import_pkg_js or {}
|
|
81
|
+
if "nulines" not in import_pkg_js:
|
|
82
|
+
import_pkg_js["nulines"]=[]
|
|
83
|
+
if "file_path" not in import_pkg_js:
|
|
84
|
+
import_pkg_js["file_path"]=file_path
|
|
85
|
+
if "all_data" not in import_pkg_js:
|
|
86
|
+
import_pkg_js["all_data"]=[]
|
|
87
|
+
if file_path and file_path != import_pkg_js["file_path"]:
|
|
88
|
+
found=False
|
|
89
|
+
nu_data = {"file_path":import_pkg_js["file_path"],"nulines":import_pkg_js["nulines"]}
|
|
90
|
+
for i,data in enumerate(import_pkg_js["all_data"]):
|
|
91
|
+
if data.get('file_path') == import_pkg_js["file_path"]:
|
|
92
|
+
import_pkg_js["all_data"][i] = nu_data
|
|
93
|
+
found = True
|
|
94
|
+
break
|
|
95
|
+
if found == False:
|
|
96
|
+
import_pkg_js["all_data"].append(nu_data)
|
|
97
|
+
import_pkg_js["nulines"]=[]
|
|
98
|
+
import_pkg_js["file_path"]=file_path
|
|
99
|
+
|
|
100
|
+
for line in lines:
|
|
101
|
+
if line.startswith(import_tag) and ' from ' not in line:
|
|
102
|
+
cleaned_import_list = get_cleaned_import_list(line)
|
|
103
|
+
import_pkg_js = add_imports_to_import_pkg_js("import",cleaned_import_list,import_pkg_js=import_pkg_js)
|
|
104
|
+
else:
|
|
105
|
+
if is_from_group:
|
|
106
|
+
import_pkg=is_from_group
|
|
107
|
+
line = clean_line(line)
|
|
108
|
+
if line.endswith(')'):
|
|
109
|
+
is_from_group=False
|
|
110
|
+
line=line[:-1]
|
|
111
|
+
imports_from_import_pkg = clean_imports(line)
|
|
112
|
+
import_pkg_js = add_imports_to_import_pkg_js(import_pkg,imports_from_import_pkg,import_pkg_js=import_pkg_js)
|
|
113
|
+
|
|
114
|
+
else:
|
|
115
|
+
import_pkg_js=update_import_pkg_js(line,import_pkg_js=import_pkg_js)
|
|
116
|
+
if is_from_line_group(line) and is_from_group == False:
|
|
117
|
+
is_from_group=get_import_pkg(line)
|
|
118
|
+
return import_pkg_js
|
|
119
|
+
def clean_all_imports(text=None,file_path=None,import_pkg_js=None):
|
|
120
|
+
if not import_pkg_js:
|
|
121
|
+
import_pkg_js = get_all_imports(text=text,file_path=file_path)
|
|
122
|
+
nu_lines = import_pkg_js["nulines"]
|
|
123
|
+
for pkg,values in import_pkg_js.items():
|
|
124
|
+
comments = []
|
|
125
|
+
if pkg not in ["nulines","file_path","all_data"]:
|
|
126
|
+
line = values.get('line')
|
|
127
|
+
imports = values.get('imports')
|
|
128
|
+
for i,imp in enumerate(imports):
|
|
129
|
+
if '#' in imp:
|
|
130
|
+
imp_spl = imp.split('#')
|
|
131
|
+
comments.append(imp_spl[-1])
|
|
132
|
+
imports[i] = clean_line(imp_spl[0])
|
|
133
|
+
imports = list(set(imports))
|
|
134
|
+
if '*' in imports:
|
|
135
|
+
imports="*"
|
|
136
|
+
else:
|
|
137
|
+
imports=','.join(imports)
|
|
138
|
+
if comments:
|
|
139
|
+
comments=','.join(comments)
|
|
140
|
+
imports+=f" #{comments}"
|
|
141
|
+
import_pkg_js[pkg]["imports"]=imports
|
|
142
|
+
nu_lines[line] += imports
|
|
143
|
+
import_pkg_js["nulines"]=nu_lines
|
|
144
|
+
return import_pkg_js
|
|
145
|
+
|
|
146
|
+
def get_all_real_imps(file):
|
|
147
|
+
contents = read_from_file(file)
|
|
148
|
+
lines = contents.split('\n')
|
|
149
|
+
for line in lines:
|
|
150
|
+
if line.startswith('from '):
|
|
151
|
+
from_line = line.split('from ')[-1]
|
|
152
|
+
dot_fro = ""
|
|
153
|
+
dirname = file
|
|
154
|
+
for char in from_line:
|
|
155
|
+
if char != '.':
|
|
156
|
+
line = f"from {dot_fro}{eatAll(from_line,'.')}"
|
|
157
|
+
if line in all_imps:
|
|
158
|
+
line = ""
|
|
159
|
+
break
|
|
160
|
+
if dot_fro == "":
|
|
161
|
+
dot_fro = ""
|
|
162
|
+
dirname = os.path.dirname(dirname)
|
|
163
|
+
dirbase = os.path.basename(dirname)
|
|
164
|
+
dot_fro = f"{dirbase}.{dot_fro}"
|
|
165
|
+
if line:
|
|
166
|
+
all_imps.append(line)
|
|
167
|
+
|
|
168
|
+
return '\n'.join(all_imps)
|
|
169
|
+
def save_cleaned_imports(text=None,file_path=None,write=False,import_pkg_js=None):
|
|
170
|
+
import_pkg_js=get_all_imports(text=text,file_path=file_path,import_pkg_js=import_pkg_js)
|
|
171
|
+
import_pkg_js = clean_all_imports(text=text,file_path=file_path,import_pkg_js=import_pkg_js)
|
|
172
|
+
contents = '\n'.join(import_pkg_js["nulines"])
|
|
173
|
+
if file_path and write:
|
|
174
|
+
write_to_file(contents=contents,file_path=file_path)
|
|
175
|
+
return contents
|
|
@@ -22,6 +22,8 @@ Date: 05/31/2023
|
|
|
22
22
|
Version: 0.1.2
|
|
23
23
|
"""
|
|
24
24
|
import os
|
|
25
|
+
from .list_utils import make_list
|
|
26
|
+
from .type_utils import get_alpha_ints
|
|
25
27
|
def quoteIt(st: str, ls: list) -> str:
|
|
26
28
|
"""
|
|
27
29
|
Quotes specific elements in a string.
|
|
@@ -110,6 +112,42 @@ def eatAll(string: str, list_objects:(str or list)) -> any:
|
|
|
110
112
|
if string and list_objects:
|
|
111
113
|
string = eatOuter(string, list_objects)
|
|
112
114
|
return string
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def eatElse(
|
|
119
|
+
stringObj,
|
|
120
|
+
chars=None,
|
|
121
|
+
ints=True,
|
|
122
|
+
alpha=True,
|
|
123
|
+
lower=True,
|
|
124
|
+
capitalize=True,
|
|
125
|
+
string=True,
|
|
126
|
+
listObj=True
|
|
127
|
+
):
|
|
128
|
+
alpha_ints = get_alpha_ints(
|
|
129
|
+
ints=True,
|
|
130
|
+
alpha=True,
|
|
131
|
+
lower=True,
|
|
132
|
+
capitalize=True,
|
|
133
|
+
string=True,
|
|
134
|
+
listObj=True
|
|
135
|
+
)
|
|
136
|
+
chars = make_list(chars or [])+alpha_ints
|
|
137
|
+
|
|
138
|
+
while True:
|
|
139
|
+
if stringObj:
|
|
140
|
+
str_0 = stringObj[0] not in chars
|
|
141
|
+
str_1 = stringObj[-1] not in chars
|
|
142
|
+
str_eat = str_0 or str_1
|
|
143
|
+
if not str_eat:
|
|
144
|
+
return stringObj
|
|
145
|
+
if stringObj and str_0:
|
|
146
|
+
stringObj = stringObj[1:] if len(stringObj) !=1 else ""
|
|
147
|
+
if stringObj and str_1:
|
|
148
|
+
stringObj = stringObj[:-1] if len(stringObj) !=1 else ""
|
|
149
|
+
else:
|
|
150
|
+
return stringObj
|
|
113
151
|
def safe_split(obj, ls):
|
|
114
152
|
"""
|
|
115
153
|
Safely splits a string using multiple delimiters.
|
|
@@ -185,6 +223,7 @@ def url_join(*paths):
|
|
|
185
223
|
final_path = f"{final_path}/{path}"
|
|
186
224
|
return final_path
|
|
187
225
|
|
|
188
|
-
|
|
226
|
+
def clean_line(line):
|
|
227
|
+
return eatAll(line,[' ','','\t','\n'])
|
|
189
228
|
def capitalize(string):
|
|
190
229
|
return string[:1].upper() + string[1:].lower() if string else string
|
|
@@ -1,3 +1,42 @@
|
|
|
1
|
+
from .list_utils import make_list
|
|
2
|
+
def get_from_kwargs(*args,**kwargs):
|
|
3
|
+
del_kwarg = kwargs.get('del_kwargs',False)
|
|
4
|
+
values = {}
|
|
5
|
+
for key in args:
|
|
6
|
+
if key:
|
|
7
|
+
key = str(key)
|
|
8
|
+
if key in kwargs:
|
|
9
|
+
values[key] = kwargs.get(key)
|
|
10
|
+
if del_kwarg:
|
|
11
|
+
del kwargs[key]
|
|
12
|
+
return values,kwargs
|
|
13
|
+
|
|
14
|
+
def replace_it(string,item,rep):
|
|
15
|
+
if item in string:
|
|
16
|
+
string = string.replace(item,rep)
|
|
17
|
+
return string
|
|
18
|
+
def while_replace(string,item,rep):
|
|
19
|
+
while True:
|
|
20
|
+
string = replace_it(string,item,rep)
|
|
21
|
+
if item not in string or item in rep:
|
|
22
|
+
return string
|
|
23
|
+
def for_replace(string,item,replace):
|
|
24
|
+
replace = make_list(replace)
|
|
25
|
+
for rep in replace:
|
|
26
|
+
string = while_replace(string,item,rep)
|
|
27
|
+
return string
|
|
28
|
+
def replace_all(string,*args,**kwargs):
|
|
29
|
+
for items in args:
|
|
30
|
+
if items and isinstance(items,list):
|
|
31
|
+
item = items[0]
|
|
32
|
+
replace = items[1:] if len(items)>1 else items[-1]
|
|
33
|
+
string = for_replace(string,item,replace)
|
|
34
|
+
values,kwargs = get_from_kwargs('item','replace',**kwargs)
|
|
35
|
+
if values:
|
|
36
|
+
string = for_replace(string,**values)
|
|
37
|
+
for item,replace in kwargs.items():
|
|
38
|
+
string = for_replace(string,item,rep)
|
|
39
|
+
return string
|
|
1
40
|
def get_lines(string,strip=True):
|
|
2
41
|
lines = string.split('\n')
|
|
3
42
|
if strip:
|
abstract_utilities/type_utils.py
CHANGED
|
@@ -60,6 +60,7 @@ import os
|
|
|
60
60
|
from pathlib import Path
|
|
61
61
|
from typing import Union
|
|
62
62
|
from .list_utils import make_list
|
|
63
|
+
|
|
63
64
|
# A big, but by no means exhaustive, map of extensions to mime‐types by category:
|
|
64
65
|
MIME_TYPES = {
|
|
65
66
|
'image': {
|
|
@@ -936,7 +937,30 @@ def is_any_instance(value):
|
|
|
936
937
|
for each in [dict, list, int, float]:
|
|
937
938
|
if is_instance(value, each):
|
|
938
939
|
return True
|
|
939
|
-
|
|
940
|
+
def getAlphas(lower=True,capitalize=False,listObj=False):
|
|
941
|
+
obj = ''
|
|
942
|
+
alphas = 'abcdefghijklmoprstuvwxyz'
|
|
943
|
+
if lower:
|
|
944
|
+
obj+=alphas
|
|
945
|
+
if capitalize:
|
|
946
|
+
obj+=alphas.upper()
|
|
947
|
+
if listObj:
|
|
948
|
+
obj = list(obj)
|
|
949
|
+
return obj
|
|
950
|
+
def getInts(string=False,listObj=False):
|
|
951
|
+
obj=12345678909
|
|
952
|
+
if string:
|
|
953
|
+
obj = str(obj)
|
|
954
|
+
if listObj:
|
|
955
|
+
obj = list(obj)
|
|
956
|
+
return obj
|
|
957
|
+
def get_alpha_ints(ints=True,alpha=True,lower=True,capitalize=True,string=True,listObj=True):
|
|
958
|
+
objs = [] if listObj else ""
|
|
959
|
+
if ints:
|
|
960
|
+
objs+=getInts(string=string,listObj=listObj)
|
|
961
|
+
if alpha:
|
|
962
|
+
objs+=getAlphas(lower=lower,capitalize=capitalize,listObj=listObj)
|
|
963
|
+
return objs
|
|
940
964
|
# Function: is_number
|
|
941
965
|
# Function: is_str
|
|
942
966
|
# Function: is_int
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: abstract_utilities
|
|
3
|
-
Version: 0.2.2.
|
|
3
|
+
Version: 0.2.2.486
|
|
4
4
|
Summary: abstract_utilities is a collection of utility modules providing a variety of functions to aid in tasks such as data comparison, list manipulation, JSON handling, string manipulation, mathematical computations, and time operations.
|
|
5
5
|
Home-page: https://github.com/AbstractEndeavors/abstract_utilities
|
|
6
6
|
Author: putkoff
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
abstract_utilities/__init__.py,sha256=
|
|
1
|
+
abstract_utilities/__init__.py,sha256=OWG_j_7hFAoQXmi_hwHQg8vSufDSAatj8iax78ZYAJc,5229
|
|
2
2
|
abstract_utilities/abstract_classes.py,sha256=A6-FNDQb2P_jcyt01Kc5SuY2QawLVKNjQ-rDGfsn4rA,2461
|
|
3
|
-
abstract_utilities/class_utils.py,sha256=
|
|
3
|
+
abstract_utilities/class_utils.py,sha256=AJe6kIiAwmr_is458EkKsjH2c0pyIiZNkah2gBXzXAE,13663
|
|
4
4
|
abstract_utilities/collator_utils.py,sha256=9exNoZAr9rABGYTwZOn7hdLbpnMtRd2AgfU7yjZrXGw,2348
|
|
5
5
|
abstract_utilities/doit.py,sha256=a1zkyMJbSGPvE-OmCQcH_dQyLME392UfvQmGztOWyhE,1646
|
|
6
6
|
abstract_utilities/dynimport.py,sha256=BTX33OXfUq4LAuT1RAzLhbtxqf7CTm5WHYdvVAH83nc,6584
|
|
@@ -14,14 +14,14 @@ abstract_utilities/log_utils.py,sha256=W74Y-CmdQP4Kj88HmAgejVxWgyWlvgCKMwLvOfyFf
|
|
|
14
14
|
abstract_utilities/math_utils.py,sha256=0o1ls1En03UAkYmxTBildCCJDfHygmNuvVnrNrLYtK0,6578
|
|
15
15
|
abstract_utilities/parse_utils.py,sha256=Z5OGRwHuzCzY91fz0JJojk1BPAo1XF2quNNLuBF4_Vk,18602
|
|
16
16
|
abstract_utilities/path_utils.py,sha256=X_U9cPBbNu5Wi0F3hQE0gXQX1gfhzxhxALbairTEOZU,19252
|
|
17
|
-
abstract_utilities/read_write_utils.py,sha256=
|
|
17
|
+
abstract_utilities/read_write_utils.py,sha256=fqkSJye7Nme0t3oDBNahdwg2az2iaLL1C23G_ZYJhsw,9899
|
|
18
18
|
abstract_utilities/safe_utils.py,sha256=_uoZny6dJjopVakOiaf0UIZcvRRXMh51FpfDUooe0xY,3733
|
|
19
|
-
abstract_utilities/string_clean.py,sha256
|
|
20
|
-
abstract_utilities/string_utils.py,sha256=
|
|
19
|
+
abstract_utilities/string_clean.py,sha256=oQv85J-mA4sP2NJwbTI-1k0RXw7V0AmqZolYaAZvex4,6916
|
|
20
|
+
abstract_utilities/string_utils.py,sha256=PnII0wFQBchVzFjhvEHP9ej1zxLehsRKodtc8Qol4-8,1645
|
|
21
21
|
abstract_utilities/tetsts.py,sha256=PrejTUew5dAAqNb4erMJwfdSHxDyuuHGWY2fMlWk5hk,21
|
|
22
22
|
abstract_utilities/thread_utils.py,sha256=LhE1ylSuOKkkMErBf6SjZprjO_vfh3IKfvNKJQiCxho,5460
|
|
23
23
|
abstract_utilities/time_utils.py,sha256=yikMjn7i-OBKfmOujfNtDz4R0VTMgi3dfQNrCIZUbQU,13052
|
|
24
|
-
abstract_utilities/type_utils.py,sha256=
|
|
24
|
+
abstract_utilities/type_utils.py,sha256=XaaAel9hUKeOzBqSCqJsIC6UiPMXUlhtmU77jOHz8Ek,27676
|
|
25
25
|
abstract_utilities/utils.py,sha256=SCa_-x_wsWrcokQXKwlhalxndxLn5Wg25-zqRdJUmag,185049
|
|
26
26
|
abstract_utilities/cmd_utils/__init__.py,sha256=StTaaB9uzJexvr4TFGVqp_o0_s9T6rQlE3fOZtb_y_0,51
|
|
27
27
|
abstract_utilities/cmd_utils/cmd_utils.py,sha256=n2DEo91J8LWuIJoSoDkWdApUY_8mHrUW3kjEjjF34Io,7876
|
|
@@ -41,31 +41,39 @@ abstract_utilities/env_utils/imports/imports.py,sha256=ZrGEf-J2lyVbb4MrNBX3DwHR0
|
|
|
41
41
|
abstract_utilities/env_utils/imports/utils.py,sha256=oB7WhIm_-cHLrUHRXypZGCdWUtNRyePaVO5_kq5Cv84,4490
|
|
42
42
|
abstract_utilities/file_utils/__init__.py,sha256=I4md5xU5nuBuKyxumvKmnrR0-UgBePX9QfY-QNS-Zso,101
|
|
43
43
|
abstract_utilities/file_utils/req.py,sha256=CsdGHAWIHOLqjzyoOSZ7XYbNciVYnTgaUs5qOCHttE0,11837
|
|
44
|
-
abstract_utilities/file_utils/file_utils/__init__.py,sha256=
|
|
44
|
+
abstract_utilities/file_utils/file_utils/__init__.py,sha256=WOsWvRf7lomRoS80xT1n-R0jvbyHsxPXMsdoPAN62cc,215
|
|
45
45
|
abstract_utilities/file_utils/file_utils/file_filters.py,sha256=khfbonAPEAhW1wxfFo0I4dawYPCrIKEjNc7VKb1RvzA,3437
|
|
46
46
|
abstract_utilities/file_utils/file_utils/file_reader.py,sha256=2MRj2PGKq4C-iKL8dmhHwWnhmA8GPVsNaWkTREOF9vo,24545
|
|
47
|
-
abstract_utilities/file_utils/file_utils/file_utils.py,sha256=
|
|
47
|
+
abstract_utilities/file_utils/file_utils/file_utils.py,sha256=tVBhh1ZQYK-Iq-x5JL4ScS8rYQ7AOP8ezIDFgYs86Ds,7705
|
|
48
48
|
abstract_utilities/file_utils/file_utils/filter_params.py,sha256=NF692W0cBhEsbtmaVzb8EKMAasasHDElSRaC9fnzYwE,3382
|
|
49
|
-
abstract_utilities/file_utils/file_utils/find_collect.py,sha256=
|
|
50
|
-
abstract_utilities/file_utils/file_utils/imports.py,sha256=
|
|
49
|
+
abstract_utilities/file_utils/file_utils/find_collect.py,sha256=bPM7EDrNHlvwZx7CP3AWPNNybzE3AXCSfoMwh6uDHWo,4703
|
|
50
|
+
abstract_utilities/file_utils/file_utils/imports.py,sha256=rF3zdWY98UKuSPwzEzhG0H4cfIVjLqCW3FwsGqFeakE,319
|
|
51
51
|
abstract_utilities/file_utils/file_utils/map_utils.py,sha256=B_MlkLP8s-o0yU0R3Y2LcTpBntBzysJO18qq181xz9c,1043
|
|
52
52
|
abstract_utilities/file_utils/file_utils/pdf_utils.py,sha256=D_wg8h-SapCvqinxRIKxMri1jWZNpr5jGvKq9EJePfY,10335
|
|
53
|
-
abstract_utilities/file_utils/
|
|
53
|
+
abstract_utilities/file_utils/file_utils/type_checks.py,sha256=JnmlcWIQO8d_sjoreVfWh6fCSM1Q0foyFptiH82jjYA,2852
|
|
54
|
+
abstract_utilities/file_utils/file_utils/imports/__init__.py,sha256=Mip2n-nY1PLvaWtwTeVs0rdVd6J3_jfwKmIyGYxf9Vo,72
|
|
55
|
+
abstract_utilities/file_utils/file_utils/imports/constants.py,sha256=eIeSj48vtfa8CTYKuuZXbgJQepBrMracfVguaSuN41U,1626
|
|
56
|
+
abstract_utilities/file_utils/file_utils/imports/file_functions.py,sha256=25yta20DDsdgenXYjpm4Ma3Fd6WK9Q16EjyhcZubDFg,291
|
|
57
|
+
abstract_utilities/file_utils/file_utils/imports/imports.py,sha256=nLtDCj-E9htQ1rbbISevHSqviUGCxgCoTZ7KTAQrCpU,1488
|
|
58
|
+
abstract_utilities/file_utils/file_utils/imports/module_imports.py,sha256=BG_eTb_lnOOHCye_aXYc0CoESzFtXw_8qMMEH3CPLmU,546
|
|
59
|
+
abstract_utilities/file_utils/imports/__init__.py,sha256=PRJBiiPT7oElD3RvHTW80Xd5rIIMdzGN23FD5IkszDI,101
|
|
54
60
|
abstract_utilities/file_utils/imports/classes.py,sha256=zw16D_h5AxJiks4ydbqkWkXVfvgmE-BpiC4eKInY_KI,12259
|
|
61
|
+
abstract_utilities/file_utils/imports/clean_imps.py,sha256=DB_NEKR8YLla5qCkTMuNscMoTnipEm3nCWnaH8wqQDc,5287
|
|
55
62
|
abstract_utilities/file_utils/imports/constants.py,sha256=eIeSj48vtfa8CTYKuuZXbgJQepBrMracfVguaSuN41U,1626
|
|
56
|
-
abstract_utilities/file_utils/imports/file_functions.py,sha256=
|
|
57
|
-
abstract_utilities/file_utils/imports/imports.py,sha256=
|
|
58
|
-
abstract_utilities/file_utils/imports/module_imports.py,sha256=
|
|
63
|
+
abstract_utilities/file_utils/imports/file_functions.py,sha256=brQha7TV9DaJe-hZSuHoFZBUI_45hxrGOIBTAojPWU8,297
|
|
64
|
+
abstract_utilities/file_utils/imports/imports.py,sha256=eDvLMtTQlExI1z7ddnPYoXWyrYtp48JuiAzBPqL5wWA,2057
|
|
65
|
+
abstract_utilities/file_utils/imports/module_imports.py,sha256=BROjglIl217zEuU0kwRilkK9vLrYC9e44AS5HS8HwD0,513
|
|
59
66
|
abstract_utilities/robust_reader/__init__.py,sha256=4i6qW4lwhdYuoO5-p9Xbt8Lpmr3hzCh9Rgb9y19QJwk,28
|
|
60
67
|
abstract_utilities/robust_reader/file_reader2.py,sha256=U-5opkLu-bct091Eb-5CiNBTf0UFoSITYi8zR-Sz38w,25077
|
|
61
68
|
abstract_utilities/robust_reader/file_readers.py,sha256=U-5opkLu-bct091Eb-5CiNBTf0UFoSITYi8zR-Sz38w,25077
|
|
62
69
|
abstract_utilities/robust_reader/sadfsad.py,sha256=gH2ebI9KfiYFv78jzPGk8WPST_FGtojnd_yDwrcvQoM,25282
|
|
63
70
|
abstract_utilities/robust_reader/imports/__init__.py,sha256=mp6T1rBZdOzj0IDkvlteTqtyKJiOZaJTlgrjTdHO1Qw,23
|
|
64
|
-
abstract_utilities/robust_reader/imports/imports.py,sha256=
|
|
71
|
+
abstract_utilities/robust_reader/imports/imports.py,sha256=4pqr_mOX7s5YFC3UekLlA7eakBgq_wACq2FYBlwCiec,106
|
|
65
72
|
abstract_utilities/robust_readers/__init__.py,sha256=_1vhOG1FJnfrRK0ubkT1v6U6udHMIk3qiy1qajL1IiM,55
|
|
66
73
|
abstract_utilities/robust_readers/imports.py,sha256=FtNxdPoLeeNycDnl-6rBGxBfYjhQ7VhmI5guj8XKFcU,355
|
|
67
74
|
abstract_utilities/robust_readers/initFuncGen.py,sha256=nrQn1KpSlPNKoOngN1uizVwNMA4llrcd8aGKqlzpXzI,5436
|
|
68
|
-
abstract_utilities/robust_readers/import_utils/__init__.py,sha256=
|
|
75
|
+
abstract_utilities/robust_readers/import_utils/__init__.py,sha256=0XaHXUzvgMjSV-4VXcBB-sLcXzL3U3ssHZ95YkvgS9w,195
|
|
76
|
+
abstract_utilities/robust_readers/import_utils/clean_imports.py,sha256=eQo7UvO9jiMY7ncFpHyT-BFLNvov6QA4IRoP7MITuls,7228
|
|
69
77
|
abstract_utilities/robust_readers/import_utils/dot_utils.py,sha256=pmwnY461mOnDjIjgHD6H9MhQXFaF-q8kWerJDgJ1DuI,2364
|
|
70
78
|
abstract_utilities/robust_readers/import_utils/function_utils.py,sha256=Q9NKvRov3uAaz2Aal3d6fb_opWNXHF9C8GSKOjgfO8Y,1622
|
|
71
79
|
abstract_utilities/robust_readers/import_utils/import_utils.py,sha256=l0GYdtj5FEYX2yknL-8ru7_U2Sp9Hi1NpegqWPLRMc8,11705
|
|
@@ -78,7 +86,7 @@ abstract_utilities/ssh_utils/classes.py,sha256=3Q9BfLpyagNFYyiF4bt-5UCezeUJv9NK9
|
|
|
78
86
|
abstract_utilities/ssh_utils/imports.py,sha256=oX8WAv-pkhizzko_h3fIUp9Vhsse4nR7RN2vwONxIx0,317
|
|
79
87
|
abstract_utilities/ssh_utils/pexpect_utils.py,sha256=JBdOIXBTXAqE5TrsFjmPWJgwSaWyRJN8rbJ6y3_zKPY,10556
|
|
80
88
|
abstract_utilities/ssh_utils/utils.py,sha256=smUWAx3nW1h0etTndJ_te9bkUX5YzQ8kYd9_gD1TXLk,4882
|
|
81
|
-
abstract_utilities-0.2.2.
|
|
82
|
-
abstract_utilities-0.2.2.
|
|
83
|
-
abstract_utilities-0.2.2.
|
|
84
|
-
abstract_utilities-0.2.2.
|
|
89
|
+
abstract_utilities-0.2.2.486.dist-info/METADATA,sha256=0EqZTJGtthi0euiqfGHYcJ14puRMb_4ov_VZHODTT1Q,28108
|
|
90
|
+
abstract_utilities-0.2.2.486.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
91
|
+
abstract_utilities-0.2.2.486.dist-info/top_level.txt,sha256=BF0GZ0xVFfN1K-hFIWPO3viNsOs1sSF86n1vHBg39FM,19
|
|
92
|
+
abstract_utilities-0.2.2.486.dist-info/RECORD,,
|
|
File without changes
|
{abstract_utilities-0.2.2.453.dist-info → abstract_utilities-0.2.2.486.dist-info}/top_level.txt
RENAMED
|
File without changes
|