abstract-utilities 0.2.2.496__py3-none-any.whl → 0.2.2.504__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstract_utilities/__init__.py +5 -9
- abstract_utilities/class_utils/__init__.py +7 -0
- abstract_utilities/class_utils/abstract_classes.py +74 -0
- abstract_utilities/class_utils/caller_utils.py +35 -0
- abstract_utilities/class_utils/class_utils.py +109 -0
- abstract_utilities/class_utils/function_utils.py +153 -0
- abstract_utilities/class_utils/global_utils.py +56 -0
- abstract_utilities/class_utils/imports/__init__.py +2 -0
- abstract_utilities/class_utils/imports/imports.py +2 -0
- abstract_utilities/class_utils/imports/utils.py +40 -0
- abstract_utilities/class_utils/module_utils.py +63 -0
- abstract_utilities/env_utils/imports/imports.py +3 -2
- abstract_utilities/error_utils/__init__.py +2 -0
- abstract_utilities/error_utils/error_utils.py +25 -0
- abstract_utilities/error_utils/imports/__init__.py +2 -0
- abstract_utilities/error_utils/imports/imports.py +1 -0
- abstract_utilities/error_utils/imports/module_imports.py +1 -0
- abstract_utilities/file_utils/imports/imports.py +3 -18
- abstract_utilities/file_utils/imports/module_imports.py +3 -6
- abstract_utilities/file_utils/src/type_checks.py +0 -1
- abstract_utilities/hash_utils/__init__.py +2 -0
- abstract_utilities/hash_utils/hash_utils.py +5 -0
- abstract_utilities/hash_utils/imports/__init__.py +2 -0
- abstract_utilities/hash_utils/imports/imports.py +1 -0
- abstract_utilities/hash_utils/imports/module_imports.py +0 -0
- abstract_utilities/history_utils/__init__.py +2 -0
- abstract_utilities/history_utils/history_utils.py +37 -0
- abstract_utilities/history_utils/imports/__init__.py +2 -0
- abstract_utilities/history_utils/imports/imports.py +1 -0
- abstract_utilities/history_utils/imports/module_imports.py +0 -0
- abstract_utilities/import_utils/imports/imports.py +1 -1
- abstract_utilities/import_utils/imports/module_imports.py +1 -1
- abstract_utilities/import_utils/src/__init__.py +1 -1
- abstract_utilities/import_utils/src/clean_imports.py +31 -5
- abstract_utilities/import_utils/src/dot_utils.py +9 -0
- abstract_utilities/import_utils/src/package_utilss/__init__.py +139 -0
- abstract_utilities/import_utils/src/package_utilss/context_utils.py +27 -0
- abstract_utilities/import_utils/src/package_utilss/import_collectors.py +53 -0
- abstract_utilities/import_utils/src/package_utilss/path_utils.py +28 -0
- abstract_utilities/import_utils/src/package_utilss/safe_import.py +27 -0
- abstract_utilities/import_utils/src/pkg_utils.py +140 -0
- abstract_utilities/imports.py +18 -0
- abstract_utilities/json_utils/__init__.py +2 -0
- abstract_utilities/json_utils/imports/__init__.py +2 -0
- abstract_utilities/json_utils/imports/imports.py +2 -0
- abstract_utilities/json_utils/imports/module_imports.py +5 -0
- abstract_utilities/json_utils/json_utils.py +743 -0
- abstract_utilities/list_utils/__init__.py +2 -0
- abstract_utilities/list_utils/imports/__init__.py +2 -0
- abstract_utilities/list_utils/imports/imports.py +1 -0
- abstract_utilities/list_utils/imports/module_imports.py +0 -0
- abstract_utilities/list_utils/list_utils.py +199 -0
- abstract_utilities/log_utils/__init__.py +5 -0
- abstract_utilities/log_utils/abstractLogManager.py +64 -0
- abstract_utilities/log_utils/call_response.py +68 -0
- abstract_utilities/log_utils/imports/__init__.py +2 -0
- abstract_utilities/log_utils/imports/imports.py +7 -0
- abstract_utilities/log_utils/imports/module_imports.py +2 -0
- abstract_utilities/log_utils/log_file.py +56 -0
- abstract_utilities/log_utils/logger_callable.py +49 -0
- abstract_utilities/math_utils/__init__.py +2 -0
- abstract_utilities/math_utils/imports/__init__.py +2 -0
- abstract_utilities/math_utils/imports/imports.py +2 -0
- abstract_utilities/math_utils/imports/module_imports.py +1 -0
- abstract_utilities/math_utils/math_utils.py +208 -0
- abstract_utilities/parse_utils/__init__.py +2 -0
- abstract_utilities/parse_utils/imports/__init__.py +3 -0
- abstract_utilities/parse_utils/imports/constants.py +10 -0
- abstract_utilities/parse_utils/imports/imports.py +2 -0
- abstract_utilities/parse_utils/imports/module_imports.py +4 -0
- abstract_utilities/parse_utils/parse_utils.py +516 -0
- abstract_utilities/path_utils/__init__.py +2 -0
- abstract_utilities/path_utils/imports/__init__.py +2 -0
- abstract_utilities/path_utils/imports/imports.py +1 -0
- abstract_utilities/path_utils/imports/module_imports.py +6 -0
- abstract_utilities/path_utils/path_utils.py +715 -0
- abstract_utilities/path_utils.py +94 -2
- abstract_utilities/read_write_utils/__init__.py +1 -0
- abstract_utilities/read_write_utils/imports/__init__.py +2 -0
- abstract_utilities/read_write_utils/imports/imports.py +2 -0
- abstract_utilities/read_write_utils/imports/module_imports.py +5 -0
- abstract_utilities/read_write_utils/read_write_utils.py +338 -0
- abstract_utilities/read_write_utils.py +2 -4
- abstract_utilities/safe_utils/__init__.py +2 -0
- abstract_utilities/safe_utils/imports/__init__.py +3 -0
- abstract_utilities/safe_utils/imports/imports.py +1 -0
- abstract_utilities/safe_utils/imports/module_imports.py +2 -0
- abstract_utilities/safe_utils/safe_utils.py +130 -0
- abstract_utilities/ssh_utils/__init__.py +2 -1
- abstract_utilities/ssh_utils/classes.py +0 -1
- abstract_utilities/ssh_utils/cmd_utils.py +207 -0
- abstract_utilities/ssh_utils/imports/__init__.py +3 -0
- abstract_utilities/ssh_utils/imports/imports.py +5 -0
- abstract_utilities/ssh_utils/imports/module_imports.py +5 -0
- abstract_utilities/ssh_utils/imports/utils.py +189 -0
- abstract_utilities/ssh_utils/pexpect_utils.py +11 -18
- abstract_utilities/string_utils/__init__.py +4 -0
- abstract_utilities/string_utils/clean_utils.py +28 -0
- abstract_utilities/string_utils/eat_utils.py +103 -0
- abstract_utilities/string_utils/imports/__init__.py +3 -0
- abstract_utilities/string_utils/imports/imports.py +2 -0
- abstract_utilities/string_utils/imports/module_imports.py +2 -0
- abstract_utilities/string_utils/imports/utils.py +81 -0
- abstract_utilities/string_utils/replace_utils.py +27 -0
- abstract_utilities/thread_utils/__init__.py +2 -0
- abstract_utilities/thread_utils/imports/__init__.py +2 -0
- abstract_utilities/thread_utils/imports/imports.py +2 -0
- abstract_utilities/thread_utils/imports/module_imports.py +2 -0
- abstract_utilities/thread_utils/thread_utils.py +140 -0
- abstract_utilities/time_utils/__init__.py +2 -0
- abstract_utilities/time_utils/imports/__init__.py +2 -0
- abstract_utilities/time_utils/imports/imports.py +3 -0
- abstract_utilities/time_utils/imports/module_imports.py +1 -0
- abstract_utilities/time_utils/time_utils.py +392 -0
- abstract_utilities/type_utils/__init__.py +3 -0
- abstract_utilities/type_utils/alpha_utils.py +59 -0
- abstract_utilities/type_utils/imports/__init__.py +2 -0
- abstract_utilities/type_utils/imports/imports.py +4 -0
- abstract_utilities/type_utils/imports/module_imports.py +1 -0
- abstract_utilities/type_utils/num_utils.py +19 -0
- abstract_utilities/type_utils/type_utils.py +981 -0
- {abstract_utilities-0.2.2.496.dist-info → abstract_utilities-0.2.2.504.dist-info}/METADATA +1 -1
- abstract_utilities-0.2.2.504.dist-info/RECORD +229 -0
- abstract_utilities-0.2.2.496.dist-info/RECORD +0 -123
- {abstract_utilities-0.2.2.496.dist-info → abstract_utilities-0.2.2.504.dist-info}/WHEEL +0 -0
- {abstract_utilities-0.2.2.496.dist-info → abstract_utilities-0.2.2.504.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,743 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
json_utils.py
|
|
4
|
+
|
|
5
|
+
This script is a utility module providing functions for handling JSON data. It includes functionalities like:
|
|
6
|
+
1. Converting JSON strings to dictionaries and vice versa.
|
|
7
|
+
2. Merging, adding to, updating, and removing keys from dictionaries.
|
|
8
|
+
3. Retrieving keys, values, specific items, and key-value pairs from dictionaries.
|
|
9
|
+
4. Recursively displaying values of nested JSON data structures with indentation.
|
|
10
|
+
5. Loading from and saving dictionaries to JSON files.
|
|
11
|
+
6. Validating and cleaning up JSON strings.
|
|
12
|
+
7. Searching and modifying nested JSON structures based on specific keys, values, or paths.
|
|
13
|
+
8. Inverting JSON data structures.
|
|
14
|
+
9. Creating and reading from JSON files.
|
|
15
|
+
|
|
16
|
+
Each function is documented with Python docstrings for detailed usage instructions.
|
|
17
|
+
|
|
18
|
+
This module is part of the `abstract_utilities` package.
|
|
19
|
+
|
|
20
|
+
Author: putkoff
|
|
21
|
+
Date: 05/31/2023
|
|
22
|
+
Version: 0.1.2
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from .imports import *
|
|
26
|
+
|
|
27
|
+
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
def convert_and_normalize_values(values):
|
|
30
|
+
for value in values:
|
|
31
|
+
if isinstance(value, str):
|
|
32
|
+
yield value.lower()
|
|
33
|
+
elif isinstance(value, (int, float)):
|
|
34
|
+
yield value
|
|
35
|
+
else:
|
|
36
|
+
yield str(value).lower()
|
|
37
|
+
def json_key_or_default(json_data,key,default_value):
|
|
38
|
+
json_data = safe_json_loads(json_data)
|
|
39
|
+
if not isinstance(json_data,dict) or (isinstance(json_data,dict) and key not in json_data):
|
|
40
|
+
return default_value
|
|
41
|
+
return json_data[key]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def is_valid_json(json_string: str) -> bool:
|
|
46
|
+
"""
|
|
47
|
+
Checks whether a given string is a valid JSON string.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
json_string (str): The string to check.
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
bool: True if the string is valid JSON, False otherwise.
|
|
54
|
+
"""
|
|
55
|
+
try:
|
|
56
|
+
json_obj = json.loads(json_string)
|
|
57
|
+
return True
|
|
58
|
+
except json.JSONDecodeError:
|
|
59
|
+
return False
|
|
60
|
+
def get_error_msg(error_msg, default_error_msg):
|
|
61
|
+
return error_msg if error_msg else default_error_msg
|
|
62
|
+
def validate_file_path(file_path,is_read=False):
|
|
63
|
+
if file_path and isinstance(file_path,str):
|
|
64
|
+
if os.path.isfile(file_path) or os.path.isdir(file_path):
|
|
65
|
+
return file_path
|
|
66
|
+
if not is_read:
|
|
67
|
+
dirname = os.path.dirname(file_path)
|
|
68
|
+
if os.path.isdir(dirname):
|
|
69
|
+
return file_path
|
|
70
|
+
def get_file_path(*args,is_read=False,**kwargs):
|
|
71
|
+
args = list(args)
|
|
72
|
+
for file_path in args:
|
|
73
|
+
if validate_file_path(file_path,is_read=is_read):
|
|
74
|
+
return file_path
|
|
75
|
+
for file_path in list(kwargs.values()):
|
|
76
|
+
if validate_file_path(file_path,is_read=is_read):
|
|
77
|
+
return file_path
|
|
78
|
+
def write_file(data,file_path):
|
|
79
|
+
with open(file_path, 'w', encoding='utf-8') as file:
|
|
80
|
+
file.write(str(data))
|
|
81
|
+
def write_json(data,file_path, ensure_ascii=False, indent=4):
|
|
82
|
+
with open(file_path, 'w', encoding='utf-8') as file:
|
|
83
|
+
json.dump(data, file, ensure_ascii=ensure_ascii, indent=indent)
|
|
84
|
+
def safe_write_json(data,file_path, ensure_ascii=False, indent=4):
|
|
85
|
+
if isinstance(data, (dict, list, tuple)):
|
|
86
|
+
write_json(data,file_path, ensure_ascii=ensure_ascii, indent=indent)
|
|
87
|
+
else:
|
|
88
|
+
write_file(data,file_path)
|
|
89
|
+
def read_json(file_path):
|
|
90
|
+
with open(file_path, 'r', encoding='utf-8') as file:
|
|
91
|
+
return json.load(file)
|
|
92
|
+
def output_read_write_error(e,function_name,file_path,valid_file_path=None,data=None,is_read=False):
|
|
93
|
+
error_text = f"Error in {function_name};{e}\nFile path: {file_path} "
|
|
94
|
+
|
|
95
|
+
if valid_file_path == None:
|
|
96
|
+
error_text+=f"\nValid File path: {valid_file_path} "
|
|
97
|
+
|
|
98
|
+
if not is_read:
|
|
99
|
+
error_text+=f"\nData: {data} "
|
|
100
|
+
logger.error(error_text)
|
|
101
|
+
def safe_dump_to_file(data, file_path=None, ensure_ascii=False, indent=4, *args, **kwargs):
|
|
102
|
+
is_read=False
|
|
103
|
+
file_args = [file_path,data]
|
|
104
|
+
valid_file_path = get_file_path(*file_args,*args,is_read=is_read,**kwargs)
|
|
105
|
+
|
|
106
|
+
if valid_file_path:
|
|
107
|
+
file_path = valid_file_path
|
|
108
|
+
if file_path == file_args[-1]:
|
|
109
|
+
data = file_args[0]
|
|
110
|
+
if file_path is not None and data is not None:
|
|
111
|
+
try:
|
|
112
|
+
safe_write_json(data,file_path, ensure_ascii=ensure_ascii, indent=indent)
|
|
113
|
+
except Exception as e:
|
|
114
|
+
function_name='safe_dump_to_file'
|
|
115
|
+
output_read_write_error(e,function_name,file_path,valid_file_path,is_read=is_read)
|
|
116
|
+
else:
|
|
117
|
+
logger.error("file_path and data must be provided to safe_dump_to_file")
|
|
118
|
+
|
|
119
|
+
def safe_read_from_json(*args,**kwargs):
|
|
120
|
+
is_read=True
|
|
121
|
+
file_path = args[0]
|
|
122
|
+
valid_file_path = get_file_path(*args,is_read=is_read,**kwargs)
|
|
123
|
+
if valid_file_path:
|
|
124
|
+
file_path = valid_file_path
|
|
125
|
+
try:
|
|
126
|
+
return read_json(file_path)
|
|
127
|
+
except Exception as e:
|
|
128
|
+
function_name='safe_read_from_json'
|
|
129
|
+
output_read_write_error(e,function_name,file_path,valid_file_path,is_read=is_read)
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
def create_and_read_json(*args, **kwargs) -> dict:
|
|
133
|
+
"""
|
|
134
|
+
Create a JSON file if it does not exist, then read from it.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
file_path (str): The path of the file to create and read from.
|
|
138
|
+
json_data (dict): The content to write to the file if it does not exist.
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
dict: The contents of the JSON file.
|
|
142
|
+
"""
|
|
143
|
+
is_read=True
|
|
144
|
+
valid_file_path = get_file_path(*args,is_read=is_read,**kwargs)
|
|
145
|
+
if not valid_file_path:
|
|
146
|
+
safe_dump_to_file(*args, **kwargs)
|
|
147
|
+
return safe_read_from_json(*args, **kwargs)
|
|
148
|
+
def read_from_json(*args, **kwargs):
|
|
149
|
+
return safe_read_from_json(*args, **kwargs)
|
|
150
|
+
def safe_load_from_json(*args, **kwargs):
|
|
151
|
+
return safe_read_from_json(*args, **kwargs)
|
|
152
|
+
def safe_load_from_file(*args, **kwargs):
|
|
153
|
+
return safe_read_from_json(*args, **kwargs)
|
|
154
|
+
def safe_read_from_file(*args, **kwargs):
|
|
155
|
+
return safe_read_from_json(*args, **kwargs)
|
|
156
|
+
def safe_json_reads(*args, **kwargs):
|
|
157
|
+
return safe_read_from_json(*args, **kwargs)
|
|
158
|
+
|
|
159
|
+
def safe_dump_to_json(*args, **kwargs):
|
|
160
|
+
return safe_dump_to_file(*args, **kwargs)
|
|
161
|
+
def safe_write_to_json(*args, **kwargs):
|
|
162
|
+
return safe_dump_to_file(*args, **kwargs)
|
|
163
|
+
def safe_write_to_file(*args, **kwargs):
|
|
164
|
+
return safe_dump_to_file(*args, **kwargs)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def find_keys(data, target_keys):
|
|
169
|
+
def _find_keys_recursive(data, target_keys, values):
|
|
170
|
+
if isinstance(data, dict):
|
|
171
|
+
for key, value in data.items():
|
|
172
|
+
if key in target_keys:
|
|
173
|
+
values.append(value)
|
|
174
|
+
_find_keys_recursive(value, target_keys, values)
|
|
175
|
+
elif isinstance(data, list):
|
|
176
|
+
for item in data:
|
|
177
|
+
_find_keys_recursive(item, target_keys, values)
|
|
178
|
+
|
|
179
|
+
values = []
|
|
180
|
+
_find_keys_recursive(data, target_keys, values)
|
|
181
|
+
return values
|
|
182
|
+
def try_json_dumps_spec(obj, logger=True, level='error', file_path=None, **kwargs):
|
|
183
|
+
"""
|
|
184
|
+
Attempts to serialize an object to JSON using json.dumps or json.dump.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
obj: The Python object to serialize (e.g., dict, list, str, int, etc.).
|
|
188
|
+
logger: Logger object or None to use _default_logger.
|
|
189
|
+
level: Logging level for errors (default: 'error').
|
|
190
|
+
file_path: If provided, writes JSON to this file using json.dump.
|
|
191
|
+
**kwargs: Additional arguments to pass to json.dumps or json.dump (e.g., indent, sort_keys).
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
str: The JSON-serialized string if file_path is None and serialization succeeds.
|
|
195
|
+
None: If serialization fails or file_path is provided (in which case it writes to the file).
|
|
196
|
+
|
|
197
|
+
Raises:
|
|
198
|
+
ValueError: If file_path is provided but the file cannot be written.
|
|
199
|
+
"""
|
|
200
|
+
|
|
201
|
+
try:
|
|
202
|
+
if file_path:
|
|
203
|
+
# Use json.dump to write to a file
|
|
204
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
205
|
+
json.dump(obj, f, **kwargs)
|
|
206
|
+
return None
|
|
207
|
+
else:
|
|
208
|
+
# Use json.dumps to return a string
|
|
209
|
+
return json.dumps(obj)
|
|
210
|
+
except (TypeError, OverflowError, ValueError) as e:
|
|
211
|
+
if log_callable:
|
|
212
|
+
print_or_log(f"Exception in json.dumps/dump: {e}")
|
|
213
|
+
return None
|
|
214
|
+
def run_it(endpoint,**kwargs):
|
|
215
|
+
response= make_request_link('typicaly',endpoint,data=kwargs)
|
|
216
|
+
return response
|
|
217
|
+
def get_logNone(e):
|
|
218
|
+
logger(f"{e}")
|
|
219
|
+
return None
|
|
220
|
+
def try_json_loads(data):
|
|
221
|
+
try:
|
|
222
|
+
data = json.loads(data)
|
|
223
|
+
except Exception as e:
|
|
224
|
+
data = None#get_logNone(e)
|
|
225
|
+
return data
|
|
226
|
+
def try_json_load(file):
|
|
227
|
+
try:
|
|
228
|
+
file = json.load(file)
|
|
229
|
+
except Exception as e:
|
|
230
|
+
file = get_logNone(e)
|
|
231
|
+
return file
|
|
232
|
+
def try_json_dump(file):
|
|
233
|
+
try:
|
|
234
|
+
file = json.dump(file)
|
|
235
|
+
except Exception as e:
|
|
236
|
+
file = get_logNone(e)
|
|
237
|
+
return file
|
|
238
|
+
def try_json_dumps(data):
|
|
239
|
+
try:
|
|
240
|
+
data = json.dumps(data)
|
|
241
|
+
except Exception as e:
|
|
242
|
+
data = get_logNone(e)
|
|
243
|
+
return data
|
|
244
|
+
def safe_json_loads(data):
|
|
245
|
+
if not isinstance(data,dict):
|
|
246
|
+
data = try_json_loads(data) or data
|
|
247
|
+
return data
|
|
248
|
+
def safe_json_load(file):
|
|
249
|
+
file = try_json_load(file) or file
|
|
250
|
+
return file
|
|
251
|
+
def safe_json_dump(file):
|
|
252
|
+
file = try_json_dump(file) or file
|
|
253
|
+
return file
|
|
254
|
+
def safe_json_dumps(data):
|
|
255
|
+
data = try_json_dumps(data) or data
|
|
256
|
+
return data
|
|
257
|
+
def unified_json_loader(file_path, default_value=None, encoding='utf-8'):
|
|
258
|
+
# Try to load from the file
|
|
259
|
+
with open(file_path, 'r', encoding=encoding) as file:
|
|
260
|
+
content = all_try(data=file, function=try_json_load, error_value=json.JSONDecodeError, error=False)
|
|
261
|
+
|
|
262
|
+
if isinstance(content, dict):
|
|
263
|
+
return content
|
|
264
|
+
|
|
265
|
+
# Try to load from the file as a string
|
|
266
|
+
with open(file_path, 'r', encoding=encoding) as file:
|
|
267
|
+
content_str = file.read()
|
|
268
|
+
content = all_try(data=content_str, function=try_json_loads, error_value=json.JSONDecodeError, error=False)
|
|
269
|
+
|
|
270
|
+
if isinstance(content, dict):
|
|
271
|
+
return content
|
|
272
|
+
|
|
273
|
+
print(f"Error reading JSON from '{file_path}'.")
|
|
274
|
+
return default_value
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def get_key_values_from_path(json_data, path):
|
|
278
|
+
try_path = get_value_from_path(json_data, path[:-1])
|
|
279
|
+
if isinstance(try_path, dict):
|
|
280
|
+
return list(try_path.keys())
|
|
281
|
+
|
|
282
|
+
current_data = json_data
|
|
283
|
+
for step in path:
|
|
284
|
+
try:
|
|
285
|
+
current_data = current_data[step]
|
|
286
|
+
if isinstance(current_data, str):
|
|
287
|
+
try:
|
|
288
|
+
current_data = json.loads(current_data)
|
|
289
|
+
except json.JSONDecodeError:
|
|
290
|
+
pass
|
|
291
|
+
except (TypeError, KeyError, IndexError):
|
|
292
|
+
return None
|
|
293
|
+
|
|
294
|
+
if isinstance(current_data, dict):
|
|
295
|
+
return list(current_data.keys())
|
|
296
|
+
else:
|
|
297
|
+
return None
|
|
298
|
+
def convert_to_json(obj):
|
|
299
|
+
if isinstance(obj, dict):
|
|
300
|
+
return obj
|
|
301
|
+
if isinstance(obj, str):
|
|
302
|
+
return safe_json_loads(obj)
|
|
303
|
+
return None
|
|
304
|
+
def get_any_key(data,key):
|
|
305
|
+
path_to_key = find_paths_to_key(safe_json_loads(data),key)
|
|
306
|
+
if path_to_key:
|
|
307
|
+
value = safe_json_loads(data)
|
|
308
|
+
for each in path_to_key[0]:
|
|
309
|
+
value = safe_json_loads(value[each])
|
|
310
|
+
return value
|
|
311
|
+
return path_to_key
|
|
312
|
+
|
|
313
|
+
def all_try(function=None, data=None, var_data=None, error=False, error_msg=None, error_value=Exception, attach=None, attach_var_data=None):
|
|
314
|
+
try:
|
|
315
|
+
if not function:
|
|
316
|
+
raise ValueError("Function is required")
|
|
317
|
+
|
|
318
|
+
if var_data and not data:
|
|
319
|
+
result = function(**var_data)
|
|
320
|
+
elif data and not var_data:
|
|
321
|
+
if attach and attach_var_data:
|
|
322
|
+
result = function(data).attach(**attach_var_data)
|
|
323
|
+
else:
|
|
324
|
+
result = function(data).attach() if attach else function(data)
|
|
325
|
+
elif data and var_data:
|
|
326
|
+
raise ValueError("Both data and var_data cannot be provided simultaneously")
|
|
327
|
+
else:
|
|
328
|
+
result = function()
|
|
329
|
+
|
|
330
|
+
return result
|
|
331
|
+
except error_value as e:
|
|
332
|
+
if error:
|
|
333
|
+
raise e
|
|
334
|
+
elif error_msg:
|
|
335
|
+
print_error_msg(error_msg, f': {e}')
|
|
336
|
+
return False
|
|
337
|
+
def all_try_json_loads(data, error=False, error_msg=None, error_value=(json.JSONDecodeError, TypeError)):
|
|
338
|
+
return all_try(data=data, function=json.loads, error=error, error_msg=error_msg, error_value=error_value)
|
|
339
|
+
|
|
340
|
+
def safe_json_loadss(data, default_value=None, error=False, error_msg=None):
|
|
341
|
+
""" Safely attempts to load a JSON string. Returns the original data or a default value if parsing fails.
|
|
342
|
+
Args:
|
|
343
|
+
data (str): The JSON string to parse.
|
|
344
|
+
default_value (any, optional): The value to return if parsing fails. Defaults to None.
|
|
345
|
+
error (bool, optional): Whether to raise an error if parsing fails. Defaults to False.
|
|
346
|
+
error_msg (str, optional): The error message to display if parsing fails. Defaults to None.
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
any: The parsed JSON object, or the original data/default value if parsing fails.
|
|
350
|
+
"""
|
|
351
|
+
if isinstance(data,dict):
|
|
352
|
+
return data
|
|
353
|
+
try_json = all_try_json_loads(data=data, error=error, error_msg=error_msg)
|
|
354
|
+
if try_json:
|
|
355
|
+
return try_json
|
|
356
|
+
if default_value:
|
|
357
|
+
data = default_value
|
|
358
|
+
return data
|
|
359
|
+
def clean_invalid_newlines(json_string: str,line_replacement_value='') -> str:
|
|
360
|
+
""" Removes invalid newlines from a JSON string that are not within double quotes.
|
|
361
|
+
Args:
|
|
362
|
+
json_string (str): The JSON string containing newlines.
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
str: The JSON string with invalid newlines removed.
|
|
366
|
+
"""
|
|
367
|
+
pattern = r'(?<!\\)\n(?!([^"]*"[^"]*")*[^"]*$)'
|
|
368
|
+
return re.sub(pattern, line_replacement_value, json_string)
|
|
369
|
+
def get_value_from_path(json_data, path,line_replacement_value='*n*'):
|
|
370
|
+
""" Traverses a nested JSON object using a specified path and returns the value at the end of that path.
|
|
371
|
+
Args:
|
|
372
|
+
json_data (dict/list): The JSON object to traverse.
|
|
373
|
+
path (list): The path to follow in the JSON object.
|
|
374
|
+
|
|
375
|
+
Returns:
|
|
376
|
+
any: The value at the end of the specified path.
|
|
377
|
+
"""
|
|
378
|
+
current_data = safe_json_loads(json_data)
|
|
379
|
+
for step in path:
|
|
380
|
+
current_data = safe_json_loads(current_data[step])
|
|
381
|
+
if isinstance(current_data, str):
|
|
382
|
+
current_data = read_malformed_json(current_data,line_replacement_value=line_replacement_value)
|
|
383
|
+
return current_data
|
|
384
|
+
def find_paths_to_key(json_data, key_to_find,line_replacement_value='*n*'):
|
|
385
|
+
""" Searches a nested JSON object for all paths that lead to a specified key.
|
|
386
|
+
Args:
|
|
387
|
+
json_data (dict/list): The JSON object to search.
|
|
388
|
+
key_to_find (str): The key to search for in the JSON object.
|
|
389
|
+
|
|
390
|
+
Returns:
|
|
391
|
+
list: A list of paths (each path is a list of keys/indices) leading to the specified key.
|
|
392
|
+
"""
|
|
393
|
+
def _search_path(data, current_path):
|
|
394
|
+
if isinstance(data, dict):
|
|
395
|
+
for key, value in data.items():
|
|
396
|
+
new_path = current_path + [key]
|
|
397
|
+
if key == key_to_find:
|
|
398
|
+
paths.append(new_path)
|
|
399
|
+
if isinstance(value, str):
|
|
400
|
+
try:
|
|
401
|
+
json_data = read_malformed_json(value,line_replacement_value=line_replacement_value)
|
|
402
|
+
_search_path(json_data, new_path)
|
|
403
|
+
except json.JSONDecodeError:
|
|
404
|
+
pass
|
|
405
|
+
_search_path(value, new_path)
|
|
406
|
+
elif isinstance(data, list):
|
|
407
|
+
for index, item in enumerate(data):
|
|
408
|
+
new_path = current_path + [index]
|
|
409
|
+
_search_path(item, new_path)
|
|
410
|
+
|
|
411
|
+
paths = []
|
|
412
|
+
_search_path(json_data, [])
|
|
413
|
+
return paths
|
|
414
|
+
def read_malformed_json(json_string,line_replacement_value="*n"):
|
|
415
|
+
""" Attempts to parse a malformed JSON string after cleaning it.
|
|
416
|
+
Args:
|
|
417
|
+
json_string (str): The malformed JSON string.
|
|
418
|
+
|
|
419
|
+
Returns:
|
|
420
|
+
any: The parsed JSON object.
|
|
421
|
+
"""
|
|
422
|
+
if isinstance(json_string, str):
|
|
423
|
+
json_string = clean_invalid_newlines(json_string,line_replacement_value=line_replacement_value)
|
|
424
|
+
return safe_json_loads(json_string)
|
|
425
|
+
def get_any_value(json_obj, key,line_replacement_value="*n*"):
|
|
426
|
+
""" Fetches the value associated with a specified key from a JSON object or file. If the provided input is a file path, it reads the file first.
|
|
427
|
+
Args:
|
|
428
|
+
json_obj (dict/list/str): The JSON object or file path containing the JSON object.
|
|
429
|
+
key (str): The key to search for in the JSON object.
|
|
430
|
+
|
|
431
|
+
Returns:
|
|
432
|
+
any: The value associated with the specified key.
|
|
433
|
+
"""
|
|
434
|
+
if isinstance(json_obj,str):
|
|
435
|
+
if os.path.isfile(json_obj):
|
|
436
|
+
with open(json_obj, 'r', encoding='UTF-8') as f:
|
|
437
|
+
json_obj=f.read()
|
|
438
|
+
json_data = read_malformed_json(json_obj)
|
|
439
|
+
paths_to_value = find_paths_to_key(json_data, key)
|
|
440
|
+
if not isinstance(paths_to_value, list):
|
|
441
|
+
paths_to_value = [paths_to_value]
|
|
442
|
+
for i, path_to_value in enumerate(paths_to_value):
|
|
443
|
+
paths_to_value[i] = get_value_from_path(json_data, path_to_value)
|
|
444
|
+
if isinstance(paths_to_value[i],str):
|
|
445
|
+
paths_to_value[i]=paths_to_value[i].replace(line_replacement_value,'\n')
|
|
446
|
+
if isinstance(paths_to_value,list):
|
|
447
|
+
if len(paths_to_value) == 0:
|
|
448
|
+
paths_to_value=None
|
|
449
|
+
elif len(paths_to_value)==1:
|
|
450
|
+
paths_to_value = paths_to_value[0]
|
|
451
|
+
return paths_to_value
|
|
452
|
+
def format_json_key_values(json_data, indent=0):
|
|
453
|
+
formatted_string = ""
|
|
454
|
+
|
|
455
|
+
# Check if the input is a string and try to parse it as JSON
|
|
456
|
+
if isinstance(json_data, str):
|
|
457
|
+
try:
|
|
458
|
+
json_data = json.loads(json_data)
|
|
459
|
+
except json.JSONDecodeError:
|
|
460
|
+
return "Invalid JSON string"
|
|
461
|
+
|
|
462
|
+
# Function to format individual items based on their type
|
|
463
|
+
def format_item(item, indent):
|
|
464
|
+
if isinstance(item, dict):
|
|
465
|
+
return format_json_key_values(item, indent)
|
|
466
|
+
elif isinstance(item, list):
|
|
467
|
+
return format_list(item, indent)
|
|
468
|
+
else:
|
|
469
|
+
return ' ' * indent + str(item) + "\n"
|
|
470
|
+
|
|
471
|
+
# Function to format lists
|
|
472
|
+
def format_list(lst, indent):
|
|
473
|
+
lst_str = ""
|
|
474
|
+
for elem in lst:
|
|
475
|
+
lst_str += format_item(elem, indent + 1)
|
|
476
|
+
return lst_str
|
|
477
|
+
|
|
478
|
+
# Iterate over each key-value pair
|
|
479
|
+
for key, value in json_data.items():
|
|
480
|
+
# Append the key with appropriate indentation
|
|
481
|
+
formatted_string += ' ' * indent + f"{key}:\n"
|
|
482
|
+
|
|
483
|
+
# Recursively format the value based on its type
|
|
484
|
+
formatted_string += format_item(value, indent)
|
|
485
|
+
|
|
486
|
+
return formatted_string
|
|
487
|
+
|
|
488
|
+
def find_matching_dicts(dict_objs:(dict or list)=None,keys:(str or list)=None,values:(str or list)=None):
|
|
489
|
+
values = make_list(values) if values is not None else []
|
|
490
|
+
dict_objs = make_list(dict_objs) if dict_objs is not None else [{}]
|
|
491
|
+
keys = make_list(keys) if keys is not None else []
|
|
492
|
+
bool_list_og = [False for i in range(len(keys))]
|
|
493
|
+
found_dicts = []
|
|
494
|
+
for dict_obj in dict_objs:
|
|
495
|
+
bool_list = bool_list_og
|
|
496
|
+
for i,key in enumerate(keys):
|
|
497
|
+
if key in list(dict_obj.keys()):
|
|
498
|
+
if dict_obj[key] == values[i]:
|
|
499
|
+
bool_list[i]=True
|
|
500
|
+
if False not in bool_list:
|
|
501
|
+
found_dicts.append(dict_obj)
|
|
502
|
+
return found_dicts
|
|
503
|
+
|
|
504
|
+
def closest_dictionary(dict_objs:dict=None,values:(str or list)=None):
|
|
505
|
+
values = make_list(values) if values is not None else []
|
|
506
|
+
dict_objs = make_list(dict_objs) if dict_objs is not None else [{}]
|
|
507
|
+
total_values = [value for dict_obj in dict_objs for value in dict_obj.values()]
|
|
508
|
+
matched_objs = [get_closest_match_from_list(value, total_values) for value in values]
|
|
509
|
+
bool_list_og = [False for i in range(len(matched_objs))]
|
|
510
|
+
for dict_obj in dict_objs:
|
|
511
|
+
bool_list = bool_list_og
|
|
512
|
+
for key, key_value in dict_obj.items():
|
|
513
|
+
for i,matched_obj in enumerate(matched_objs):
|
|
514
|
+
if key_value.lower() == matched_obj.lower():
|
|
515
|
+
bool_list[i]=True
|
|
516
|
+
if False not in bool_list:
|
|
517
|
+
return dict_obj
|
|
518
|
+
return None
|
|
519
|
+
|
|
520
|
+
def get_dict_from_string(string, file_path=None):
|
|
521
|
+
bracket_count = 0
|
|
522
|
+
start_index = None
|
|
523
|
+
for i, char in enumerate(string):
|
|
524
|
+
if char == '{':
|
|
525
|
+
bracket_count += 1
|
|
526
|
+
if start_index is None:
|
|
527
|
+
start_index = i
|
|
528
|
+
elif char == '}':
|
|
529
|
+
bracket_count -= 1
|
|
530
|
+
if bracket_count == 0 and start_index is not None:
|
|
531
|
+
json_data = safe_json_loads(string[start_index:i+1])
|
|
532
|
+
if file_path:
|
|
533
|
+
safe_dump_to_file(file_path=makeAllDirs(file_path), data=json_data)
|
|
534
|
+
return json_data
|
|
535
|
+
return None
|
|
536
|
+
|
|
537
|
+
def closest_dictionary(dict_objs=None, values=None):
|
|
538
|
+
values = make_list(values) if values is not None else []
|
|
539
|
+
dict_objs = make_list(dict_objs) if dict_objs is not None else [{}]
|
|
540
|
+
total_values = [value for dict_obj in dict_objs for value in dict_obj.values()]
|
|
541
|
+
matched_objs = [get_closest_match_from_list(value, total_values) for value in values]
|
|
542
|
+
|
|
543
|
+
for dict_obj in dict_objs:
|
|
544
|
+
# Using all() with a generator expression for efficiency
|
|
545
|
+
if all(match in convert_and_normalize_values(dict_obj.values()) for match in matched_objs):
|
|
546
|
+
return dict_obj
|
|
547
|
+
return None
|
|
548
|
+
|
|
549
|
+
def get_all_keys(dict_data,keys=[]):
|
|
550
|
+
if isinstance(dict_data,dict):
|
|
551
|
+
for key,value in dict_data.items():
|
|
552
|
+
keys.append(key)
|
|
553
|
+
keys = get_all_keys(value,keys=keys)
|
|
554
|
+
return keys
|
|
555
|
+
|
|
556
|
+
def update_dict_value(data, paths, new_value):
|
|
557
|
+
"""
|
|
558
|
+
Traverses a dictionary to the specified key path and updates its value.
|
|
559
|
+
|
|
560
|
+
Args:
|
|
561
|
+
data (dict): The dictionary to traverse.
|
|
562
|
+
paths (list): The list of keys leading to the target value.
|
|
563
|
+
new_value (any): The new value to assign to the specified key.
|
|
564
|
+
|
|
565
|
+
Returns:
|
|
566
|
+
dict: The updated dictionary.
|
|
567
|
+
"""
|
|
568
|
+
d = data
|
|
569
|
+
for key in paths[:-1]:
|
|
570
|
+
# Traverse the dictionary up to the second-to-last key
|
|
571
|
+
d = d[key]
|
|
572
|
+
# Update the value at the final key
|
|
573
|
+
d[paths[-1]] = new_value
|
|
574
|
+
return data
|
|
575
|
+
def get_all_key_values(keys=None,dict_obj=None):
|
|
576
|
+
keys = keys or []
|
|
577
|
+
dict_obj = dict_obj or {}
|
|
578
|
+
new_dict_obj = {}
|
|
579
|
+
for key in keys:
|
|
580
|
+
values = dict_obj.get(key)
|
|
581
|
+
if values:
|
|
582
|
+
new_dict_obj[key]=values
|
|
583
|
+
return new_dict_obj
|
|
584
|
+
|
|
585
|
+
def get_all_values(keys=None,dict_obj=None):
|
|
586
|
+
keys = keys or []
|
|
587
|
+
dict_obj = dict_obj or {}
|
|
588
|
+
values=[]
|
|
589
|
+
for key in keys:
|
|
590
|
+
value = dict_obj.get(key)
|
|
591
|
+
if value:
|
|
592
|
+
values.append(value)
|
|
593
|
+
return values
|
|
594
|
+
|
|
595
|
+
def safe_update_json_datas(
|
|
596
|
+
json_data: dict,
|
|
597
|
+
update_data: dict,
|
|
598
|
+
valid_keys: list[str] | None = None,
|
|
599
|
+
invalid_keys: list[str] | None = None
|
|
600
|
+
) -> dict:
|
|
601
|
+
"""
|
|
602
|
+
- If valid_keys is provided (non-empty), only update keys in that list.
|
|
603
|
+
- Else if invalid_keys is provided, update all keys except those in invalid_keys,
|
|
604
|
+
and delete any existing keys that are in invalid_keys.
|
|
605
|
+
- Else update every key.
|
|
606
|
+
In all cases, overwrite values unconditionally.
|
|
607
|
+
"""
|
|
608
|
+
valid = set(make_list(valid_keys or []))
|
|
609
|
+
invalid = set(make_list(invalid_keys or []))
|
|
610
|
+
|
|
611
|
+
for key, value in update_data.items():
|
|
612
|
+
if valid:
|
|
613
|
+
if key in valid:
|
|
614
|
+
json_data[key] = value
|
|
615
|
+
elif invalid:
|
|
616
|
+
if key in invalid:
|
|
617
|
+
json_data.pop(key, None)
|
|
618
|
+
else:
|
|
619
|
+
json_data[key] = value
|
|
620
|
+
else:
|
|
621
|
+
json_data[key] = value
|
|
622
|
+
|
|
623
|
+
return json_data
|
|
624
|
+
|
|
625
|
+
def get_json_file_path(file_path,data=None):
|
|
626
|
+
data = data or {}
|
|
627
|
+
if not os.path.isfile(file_path):
|
|
628
|
+
safe_dump_to_file(data={},file_path=file_path)
|
|
629
|
+
return file_path
|
|
630
|
+
|
|
631
|
+
def get_json_file_data(file_path):
|
|
632
|
+
if os.path.isfile(file_path):
|
|
633
|
+
return safe_load_from_json(file_path)
|
|
634
|
+
|
|
635
|
+
def get_create_json_data(file_path,data=None):
|
|
636
|
+
get_json_file_path(file_path,data=data)
|
|
637
|
+
return get_json_file_data(file_path)
|
|
638
|
+
|
|
639
|
+
def get_json_data(file_path):
|
|
640
|
+
file_path = get_file_path(file_path)
|
|
641
|
+
data = safe_read_from_json(file_path)
|
|
642
|
+
return data
|
|
643
|
+
|
|
644
|
+
def save_updated_json_data(data,file_path):
|
|
645
|
+
data = data or {}
|
|
646
|
+
new_data = get_json_data(file_path)
|
|
647
|
+
new_data.update(data)
|
|
648
|
+
safe_dump_to_file(new_data,file_path)
|
|
649
|
+
|
|
650
|
+
def safe_updated_json_data(
|
|
651
|
+
data,
|
|
652
|
+
file_path,
|
|
653
|
+
valid_keys=None,
|
|
654
|
+
invalid_keys=None
|
|
655
|
+
):
|
|
656
|
+
update_data = data or {}
|
|
657
|
+
json_data = get_create_json_data(file_path, data={})
|
|
658
|
+
new_data = safe_update_json_datas(
|
|
659
|
+
json_data=json_data,
|
|
660
|
+
update_data=update_data,
|
|
661
|
+
valid_keys=valid_keys,
|
|
662
|
+
invalid_keys=invalid_keys # ← now correct
|
|
663
|
+
)
|
|
664
|
+
return new_data
|
|
665
|
+
def safe_save_updated_json_data(data,
|
|
666
|
+
file_path,
|
|
667
|
+
valid_keys=None,
|
|
668
|
+
invalid_keys=None
|
|
669
|
+
):
|
|
670
|
+
new_data = safe_updated_json_data(data=data,
|
|
671
|
+
file_path=file_path,
|
|
672
|
+
valid_keys=valid_keys,
|
|
673
|
+
invalid_keys=invalid_keys
|
|
674
|
+
)
|
|
675
|
+
safe_dump_to_file(new_data,file_path)
|
|
676
|
+
return new_data
|
|
677
|
+
|
|
678
|
+
def get_result_from_data(key,func,**data):
|
|
679
|
+
result_data = func(**data)
|
|
680
|
+
result = result_data.get(key)
|
|
681
|
+
return result
|
|
682
|
+
|
|
683
|
+
def dump_if_json(obj):
|
|
684
|
+
"""Convert a dictionary to a JSON string if the object is a dictionary."""
|
|
685
|
+
if isinstance(obj, dict):
|
|
686
|
+
return json.dumps(obj)
|
|
687
|
+
return obj
|
|
688
|
+
def get_desired_key_values(obj,keys=None,defaults=None):
|
|
689
|
+
defaults = defaults or {}
|
|
690
|
+
if keys == None:
|
|
691
|
+
return obj
|
|
692
|
+
new_dict={}
|
|
693
|
+
for key,value in defaults.items():
|
|
694
|
+
new_dict[key] = obj.get(key) or defaults.get(key)
|
|
695
|
+
if obj and isinstance(obj,dict):
|
|
696
|
+
for key in keys:
|
|
697
|
+
new_dict[key] = obj.get(key) or defaults.get(key)
|
|
698
|
+
return new_dict
|
|
699
|
+
def makeParams(*arg,**kwargs):
|
|
700
|
+
arg=make_list(arg)
|
|
701
|
+
arg.append({k: v for k, v in kwargs.items() if v is not None})
|
|
702
|
+
return arg
|
|
703
|
+
|
|
704
|
+
def get_only_kwargs(varList,*args,**kwargs):
|
|
705
|
+
new_kwargs={}
|
|
706
|
+
for i,arg in enumerate(args):
|
|
707
|
+
key_variable = varList[i]
|
|
708
|
+
kwargs[key_variable]=arg
|
|
709
|
+
for key,value in kwargs.items():
|
|
710
|
+
if key in varList:
|
|
711
|
+
new_kwargs[key] = value
|
|
712
|
+
return new_kwargs
|
|
713
|
+
|
|
714
|
+
def flatten_json(data, parent_key='', sep='_'):
|
|
715
|
+
"""
|
|
716
|
+
Flatten a JSON object into a single dictionary with keys indicating the nested structure.
|
|
717
|
+
|
|
718
|
+
Args:
|
|
719
|
+
data (dict): The JSON object to flatten.
|
|
720
|
+
parent_key (str): The base key to use for nested keys (used in recursive calls).
|
|
721
|
+
sep (str): The separator to use between keys.
|
|
722
|
+
|
|
723
|
+
Returns:
|
|
724
|
+
dict: The flattened JSON object.
|
|
725
|
+
"""
|
|
726
|
+
items = []
|
|
727
|
+
if isinstance(data, dict):
|
|
728
|
+
for key, value in data.items():
|
|
729
|
+
new_key = f"{parent_key}{sep}{key}" if parent_key else key
|
|
730
|
+
if isinstance(value, dict):
|
|
731
|
+
items.extend(flatten_json(value, new_key, sep=sep).items())
|
|
732
|
+
elif isinstance(value, list):
|
|
733
|
+
for i, item in enumerate(value):
|
|
734
|
+
items.extend(flatten_json(item, f"{new_key}{sep}{i}", sep=sep).items())
|
|
735
|
+
else:
|
|
736
|
+
items.append((new_key, value))
|
|
737
|
+
elif isinstance(data, list):
|
|
738
|
+
for i, item in enumerate(data):
|
|
739
|
+
items.extend(flatten_json(item, f"{parent_key}{sep}{i}", sep=sep).items())
|
|
740
|
+
else:
|
|
741
|
+
items.append((parent_key, data))
|
|
742
|
+
|
|
743
|
+
return dict(items)
|