naeural-client 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- naeural_client/__init__.py +13 -0
- naeural_client/_ver.py +13 -0
- naeural_client/base/__init__.py +6 -0
- naeural_client/base/distributed_custom_code_presets.py +44 -0
- naeural_client/base/generic_session.py +1763 -0
- naeural_client/base/instance.py +616 -0
- naeural_client/base/payload/__init__.py +1 -0
- naeural_client/base/payload/payload.py +66 -0
- naeural_client/base/pipeline.py +1499 -0
- naeural_client/base/plugin_template.py +5209 -0
- naeural_client/base/responses.py +209 -0
- naeural_client/base/transaction.py +157 -0
- naeural_client/base_decentra_object.py +143 -0
- naeural_client/bc/__init__.py +3 -0
- naeural_client/bc/base.py +1046 -0
- naeural_client/bc/chain.py +0 -0
- naeural_client/bc/ec.py +324 -0
- naeural_client/certs/__init__.py +0 -0
- naeural_client/certs/r9092118.ala.eu-central-1.emqxsl.com.crt +22 -0
- naeural_client/code_cheker/__init__.py +1 -0
- naeural_client/code_cheker/base.py +520 -0
- naeural_client/code_cheker/checker.py +294 -0
- naeural_client/comm/__init__.py +2 -0
- naeural_client/comm/amqp_wrapper.py +338 -0
- naeural_client/comm/mqtt_wrapper.py +539 -0
- naeural_client/const/README.md +3 -0
- naeural_client/const/__init__.py +9 -0
- naeural_client/const/base.py +101 -0
- naeural_client/const/comms.py +80 -0
- naeural_client/const/environment.py +26 -0
- naeural_client/const/formatter.py +7 -0
- naeural_client/const/heartbeat.py +111 -0
- naeural_client/const/misc.py +20 -0
- naeural_client/const/payload.py +190 -0
- naeural_client/default/__init__.py +1 -0
- naeural_client/default/instance/__init__.py +4 -0
- naeural_client/default/instance/chain_dist_custom_job_01_plugin.py +54 -0
- naeural_client/default/instance/custom_web_app_01_plugin.py +118 -0
- naeural_client/default/instance/net_mon_01_plugin.py +45 -0
- naeural_client/default/instance/view_scene_01_plugin.py +28 -0
- naeural_client/default/session/mqtt_session.py +72 -0
- naeural_client/io_formatter/__init__.py +2 -0
- naeural_client/io_formatter/base/__init__.py +1 -0
- naeural_client/io_formatter/base/base_formatter.py +80 -0
- naeural_client/io_formatter/default/__init__.py +3 -0
- naeural_client/io_formatter/default/a_dummy.py +51 -0
- naeural_client/io_formatter/default/aixp1.py +113 -0
- naeural_client/io_formatter/default/default.py +22 -0
- naeural_client/io_formatter/io_formatter_manager.py +96 -0
- naeural_client/logging/__init__.py +1 -0
- naeural_client/logging/base_logger.py +2056 -0
- naeural_client/logging/logger_mixins/__init__.py +12 -0
- naeural_client/logging/logger_mixins/class_instance_mixin.py +92 -0
- naeural_client/logging/logger_mixins/computer_vision_mixin.py +443 -0
- naeural_client/logging/logger_mixins/datetime_mixin.py +344 -0
- naeural_client/logging/logger_mixins/download_mixin.py +421 -0
- naeural_client/logging/logger_mixins/general_serialization_mixin.py +242 -0
- naeural_client/logging/logger_mixins/json_serialization_mixin.py +481 -0
- naeural_client/logging/logger_mixins/pickle_serialization_mixin.py +301 -0
- naeural_client/logging/logger_mixins/process_mixin.py +63 -0
- naeural_client/logging/logger_mixins/resource_size_mixin.py +81 -0
- naeural_client/logging/logger_mixins/timers_mixin.py +501 -0
- naeural_client/logging/logger_mixins/upload_mixin.py +260 -0
- naeural_client/logging/logger_mixins/utils_mixin.py +675 -0
- naeural_client/logging/small_logger.py +93 -0
- naeural_client/logging/tzlocal/__init__.py +20 -0
- naeural_client/logging/tzlocal/unix.py +231 -0
- naeural_client/logging/tzlocal/utils.py +113 -0
- naeural_client/logging/tzlocal/win32.py +151 -0
- naeural_client/logging/tzlocal/windows_tz.py +718 -0
- naeural_client/plugins_manager_mixin.py +273 -0
- naeural_client/utils/__init__.py +2 -0
- naeural_client/utils/comm_utils.py +44 -0
- naeural_client/utils/dotenv.py +75 -0
- naeural_client-2.0.0.dist-info/METADATA +365 -0
- naeural_client-2.0.0.dist-info/RECORD +78 -0
- naeural_client-2.0.0.dist-info/WHEEL +4 -0
- naeural_client-2.0.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,2056 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
import json
|
4
|
+
import shutil
|
5
|
+
import codecs
|
6
|
+
import textwrap
|
7
|
+
import numpy as np
|
8
|
+
import traceback
|
9
|
+
import socket
|
10
|
+
import threading
|
11
|
+
import re
|
12
|
+
|
13
|
+
from time import time as tm
|
14
|
+
from time import strftime, localtime, strptime, mktime
|
15
|
+
from collections import OrderedDict
|
16
|
+
from datetime import datetime as dt
|
17
|
+
from datetime import timedelta, timezone, tzinfo
|
18
|
+
from dateutil import tz
|
19
|
+
from pathlib import Path
|
20
|
+
|
21
|
+
from .tzlocal import get_localzone_name
|
22
|
+
|
23
|
+
|
24
|
+
|
25
|
+
from .._ver import __VER__
|
26
|
+
|
27
|
+
_HTML_START = "<HEAD><meta http-equiv='refresh' content='5' ></HEAD><BODY><pre>"
|
28
|
+
_HTML_END = "</pre></BODY>"
|
29
|
+
|
30
|
+
COLORS = {
|
31
|
+
'n': "\x1b[1;37m", # normal white
|
32
|
+
'd': "\x1b[0;37m", # grey white
|
33
|
+
'r': "\x1b[1;31m",
|
34
|
+
'g': "\x1b[1;32m",
|
35
|
+
'y': "\x1b[1;33m",
|
36
|
+
'b': "\x1b[1;34m",
|
37
|
+
'm': "\x1b[1;35m",
|
38
|
+
'a': "\x1b[41m",
|
39
|
+
'e': "\x1b[41m",
|
40
|
+
'w': "\x1b[1;31m", # warning == red
|
41
|
+
|
42
|
+
'__end__': "\x1b[0m",
|
43
|
+
}
|
44
|
+
|
45
|
+
_LOGGER_LOCK_ID = '_logger_print_lock'
|
46
|
+
|
47
|
+
|
48
|
+
class LockResource():
|
49
|
+
def __init__(self, owner, resource, condition):
|
50
|
+
self.__owner = owner
|
51
|
+
self.__resource = resource
|
52
|
+
self.__condition = condition
|
53
|
+
|
54
|
+
def __enter__(self):
|
55
|
+
if self.__condition:
|
56
|
+
self.__owner.lock_resource(self.__resource)
|
57
|
+
return self
|
58
|
+
|
59
|
+
def __exit__(self, type, value, traceback):
|
60
|
+
if self.__condition:
|
61
|
+
self.__owner.unlock_resource(self.__resource)
|
62
|
+
return
|
63
|
+
|
64
|
+
|
65
|
+
class BaseLogger(object):
|
66
|
+
|
67
|
+
def __init__(self, lib_name="",
|
68
|
+
lib_ver="",
|
69
|
+
config_file="",
|
70
|
+
config_data={},
|
71
|
+
base_folder=None,
|
72
|
+
app_folder=None,
|
73
|
+
show_time=True,
|
74
|
+
config_file_encoding=None,
|
75
|
+
no_folders_no_save=False,
|
76
|
+
max_lines=None,
|
77
|
+
HTML=False,
|
78
|
+
DEBUG=True,
|
79
|
+
data_config_subfolder=None,
|
80
|
+
check_additional_configs=False,
|
81
|
+
append_spaces=True,
|
82
|
+
default_color='n',
|
83
|
+
):
|
84
|
+
|
85
|
+
super(BaseLogger, self).__init__()
|
86
|
+
if os.name == 'nt':
|
87
|
+
os.system('color')
|
88
|
+
self.__lib__ = lib_name
|
89
|
+
self.append_spaces = append_spaces
|
90
|
+
self.show_time = show_time
|
91
|
+
self.no_folders_no_save = no_folders_no_save
|
92
|
+
self.max_lines = max_lines
|
93
|
+
self.HTML = HTML
|
94
|
+
self.DEBUG = DEBUG
|
95
|
+
self.log_suffix = lib_name
|
96
|
+
self.default_color = default_color
|
97
|
+
self.__first_print = False
|
98
|
+
|
99
|
+
self._lock_table = OrderedDict({
|
100
|
+
_LOGGER_LOCK_ID: threading.Lock(),
|
101
|
+
})
|
102
|
+
|
103
|
+
self._lock_table_mutex = threading.Lock()
|
104
|
+
|
105
|
+
self._base_folder = base_folder
|
106
|
+
self._app_folder = app_folder
|
107
|
+
self._normalize_path_sep()
|
108
|
+
|
109
|
+
self.check_additional_configs = check_additional_configs
|
110
|
+
self.data_config_subfolder = data_config_subfolder
|
111
|
+
|
112
|
+
self.__version__ = __VER__
|
113
|
+
self.version = self.__version__
|
114
|
+
self.file_prefix = None
|
115
|
+
self.refresh_file_prefix()
|
116
|
+
|
117
|
+
self.last_time = tm()
|
118
|
+
self.start_timestamp = tm()
|
119
|
+
self.utc_offset = self.get_utc_offset()
|
120
|
+
|
121
|
+
try:
|
122
|
+
self.timezone = get_localzone_name()
|
123
|
+
except Exception as exc:
|
124
|
+
self.timezone = str(exc)
|
125
|
+
|
126
|
+
self.app_log = list()
|
127
|
+
self.err_log = list()
|
128
|
+
self.split_part = 1
|
129
|
+
self.split_err_part = 1
|
130
|
+
self.config_data = None
|
131
|
+
self.__init_config_data = config_data if config_data is not None else {}
|
132
|
+
self.MACHINE_NAME = None
|
133
|
+
self.COMPUTER_NAME = None
|
134
|
+
self.processor_platform = None
|
135
|
+
self.python_version = sys.version.split(' ')[0]
|
136
|
+
self.python_major = int(self.python_version.split('.')[0])
|
137
|
+
self.python_minor = int(self.python_version.split('.')[1])
|
138
|
+
if self.python_major < 3:
|
139
|
+
msg = "ERROR: Python 2 or lower detected. Run will fail!"
|
140
|
+
print(msg)
|
141
|
+
raise ValueError(msg)
|
142
|
+
|
143
|
+
_ = self.get_machine_name()
|
144
|
+
|
145
|
+
|
146
|
+
# START: bundling -- se also properties
|
147
|
+
if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'):
|
148
|
+
print(' Running in a PyInstaller bundle')
|
149
|
+
self.__is_bundled = True
|
150
|
+
self.__bundle_path = sys._MEIPASS
|
151
|
+
else:
|
152
|
+
print(' Running in a normal Python process')
|
153
|
+
self.__is_bundled = False
|
154
|
+
self.__bundle_path = None
|
155
|
+
# END: bundling -- se also properties
|
156
|
+
|
157
|
+
self.analyze_processor_platform()
|
158
|
+
|
159
|
+
self._save_enabled = False
|
160
|
+
if not self.no_folders_no_save:
|
161
|
+
try:
|
162
|
+
self._configure_data_and_dirs(config_file, config_file_encoding)
|
163
|
+
self._save_enabled = True
|
164
|
+
except:
|
165
|
+
self.P("Failed to configure data and dirs. No log persistance possible.\n{}".format(
|
166
|
+
traceback.format_exc()
|
167
|
+
), color='r'
|
168
|
+
)
|
169
|
+
self.no_folders_no_save = True
|
170
|
+
#endtry config data and dirs
|
171
|
+
#endif
|
172
|
+
|
173
|
+
self._generate_log_path()
|
174
|
+
self._generate_error_log_path()
|
175
|
+
self._check_additional_configs()
|
176
|
+
|
177
|
+
self.git_branch = self.get_active_git_branch()
|
178
|
+
self.conda_env = self.get_conda_env()
|
179
|
+
|
180
|
+
if lib_ver == "":
|
181
|
+
lib_ver = __VER__
|
182
|
+
ver = "v{}".format(lib_ver) if lib_ver != "" else ""
|
183
|
+
self.verbose_log(
|
184
|
+
"PyE2 [{} {}] initialized on machine [{}][{}].".format(
|
185
|
+
self.__lib__, ver, self.MACHINE_NAME, self.get_processor_platform(),
|
186
|
+
),
|
187
|
+
color='green'
|
188
|
+
)
|
189
|
+
self.verbose_log(" Timezone: {}.".format(self.timezone),color='green')
|
190
|
+
|
191
|
+
|
192
|
+
if self.DEBUG:
|
193
|
+
self.P(' DEBUG is enabled in Logger', color='g')
|
194
|
+
else:
|
195
|
+
self.P(' WARNING: Debug is NOT enabled in Logger, some functionalities are DISABLED', color='y')
|
196
|
+
|
197
|
+
return
|
198
|
+
|
199
|
+
def get_unique_id(self, size=8):
|
200
|
+
"""
|
201
|
+
efficient and low-colision function for small unique id generation
|
202
|
+
"""
|
203
|
+
import string
|
204
|
+
import random
|
205
|
+
alphabet = string.ascii_lowercase + string.digits
|
206
|
+
uid = ''.join(random.choices(alphabet, k=size))
|
207
|
+
return uid
|
208
|
+
|
209
|
+
@property
|
210
|
+
def is_bundled(self):
|
211
|
+
return self.__is_bundled
|
212
|
+
|
213
|
+
@property
|
214
|
+
def bundle_path(self):
|
215
|
+
return self.__bundle_path
|
216
|
+
|
217
|
+
|
218
|
+
def is_running(self, verbose=True):
|
219
|
+
return self.same_script_already_running(verbose=verbose)
|
220
|
+
|
221
|
+
@staticmethod
|
222
|
+
def version_to_int(version):
|
223
|
+
comps = version.split('.')
|
224
|
+
val = 0
|
225
|
+
power = 3
|
226
|
+
for i, comp in enumerate(comps):
|
227
|
+
v = int(comp)
|
228
|
+
v = v * 100**power
|
229
|
+
power -= 1
|
230
|
+
val += v
|
231
|
+
return val
|
232
|
+
|
233
|
+
@staticmethod
|
234
|
+
def get_packages(as_text=False, indent=0, as_dict=False, mandatory={}):
|
235
|
+
"""
|
236
|
+
Will return the currently installed (and visible) packages
|
237
|
+
|
238
|
+
Parameters
|
239
|
+
----------
|
240
|
+
as_text : bool, optional
|
241
|
+
If true return as text. The default is False.
|
242
|
+
|
243
|
+
indent : int, optional
|
244
|
+
If return text then return it with indent. The default is 0.
|
245
|
+
|
246
|
+
mandatory : dict, optional
|
247
|
+
Will raise error if any packages from the dict of key:ver are missing. The default is {}.
|
248
|
+
|
249
|
+
as_dict: bool, optional
|
250
|
+
Return as package_name:ver dict the result. Default False
|
251
|
+
|
252
|
+
Returns
|
253
|
+
-------
|
254
|
+
packs : list/str/dict
|
255
|
+
the list of packages as list of str, a full text to show or a dict of name:ver.
|
256
|
+
|
257
|
+
"""
|
258
|
+
import pkg_resources
|
259
|
+
def ver_to_int(version, package=None):
|
260
|
+
comps = version.split('.')
|
261
|
+
val = 0
|
262
|
+
power = 3
|
263
|
+
try:
|
264
|
+
for i, comp in enumerate(comps):
|
265
|
+
v = int(comp)
|
266
|
+
v = v * 100**power
|
267
|
+
power -= 1
|
268
|
+
val += v
|
269
|
+
except:
|
270
|
+
BaseLogger.print_color("Failed to convert version '{}' to int for package `{}`, version so far: {}".format(version, package, val), color='r')
|
271
|
+
return val
|
272
|
+
|
273
|
+
raw_packs = [x for x in pkg_resources.working_set]
|
274
|
+
maxlen = max([len(x.key) for x in raw_packs]) + 1
|
275
|
+
lst_pack_ver = [(x.key, x.version) for x in raw_packs]
|
276
|
+
lst_pack_ver = sorted(lst_pack_ver, key=lambda x:x[0])
|
277
|
+
dct_packs = OrderedDict(lst_pack_ver)
|
278
|
+
|
279
|
+
if len(mandatory) > 0:
|
280
|
+
for mandatory_pack in mandatory:
|
281
|
+
if mandatory_pack not in dct_packs:
|
282
|
+
msg = "Mandatory package `{}:{}` is missing. Please check your deployment!".format(
|
283
|
+
mandatory_pack, mandatory[mandatory_pack])
|
284
|
+
BaseLogger.print_color(msg, color='r')
|
285
|
+
raise ValueError(msg)
|
286
|
+
mandatory_ver = ver_to_int(mandatory[mandatory_pack])
|
287
|
+
package_ver = ver_to_int(dct_packs[mandatory_pack], package=mandatory_pack)
|
288
|
+
if mandatory_ver > package_ver:
|
289
|
+
msg = "Mandatory installed package `{}:{}` ({}) below required version `{}` ({}). Please check your deployment!".format(
|
290
|
+
mandatory_pack, dct_packs[mandatory_pack], package_ver, mandatory[mandatory_pack], mandatory_ver)
|
291
|
+
BaseLogger.print_color(msg, color='r')
|
292
|
+
raise ValueError(msg)
|
293
|
+
#endif check for packages and versions
|
294
|
+
|
295
|
+
if as_dict:
|
296
|
+
result = dct_packs
|
297
|
+
else:
|
298
|
+
result = []
|
299
|
+
for x in lst_pack_ver:
|
300
|
+
result.append("{}{}".format(x[0] + ' ' * (maxlen - len(x[0])), x[1] + ' ' * (14 - len(x[1]))))
|
301
|
+
if x[0] in mandatory:
|
302
|
+
result[-1] = result[-1] + ' ==> OK ({} > {})'.format(x[1], mandatory[x[0]])
|
303
|
+
if as_text:
|
304
|
+
fmt = "\n{}".format(' ' * indent)
|
305
|
+
result = ' ' * indent + fmt.join(result)
|
306
|
+
return result
|
307
|
+
|
308
|
+
|
309
|
+
def same_script_already_running(self, verbose=True):
|
310
|
+
import psutil
|
311
|
+
CMD = 'python'
|
312
|
+
script_file = sys.argv[0]
|
313
|
+
if script_file == '':
|
314
|
+
self.P("Cannot get script file name", color='r')
|
315
|
+
return False
|
316
|
+
for q in psutil.process_iter():
|
317
|
+
if q.name().startswith(CMD):
|
318
|
+
if (
|
319
|
+
len(q.cmdline())>1 and
|
320
|
+
script_file in q.cmdline()[1] and
|
321
|
+
q.pid != os.getpid()
|
322
|
+
):
|
323
|
+
if verbose:
|
324
|
+
self.P("Python '{}' process is already running".format(script_file), color='m')
|
325
|
+
return True
|
326
|
+
return False
|
327
|
+
|
328
|
+
@staticmethod
|
329
|
+
def replace_secrets(dct_config, pattern='$EE_'):
|
330
|
+
matches = []
|
331
|
+
missing = []
|
332
|
+
stack = [dct_config]
|
333
|
+
|
334
|
+
while stack:
|
335
|
+
current = stack.pop()
|
336
|
+
if isinstance(current, dict):
|
337
|
+
for key, value in current.items():
|
338
|
+
if isinstance(value, str) and value.startswith(pattern):
|
339
|
+
matches.append(value)
|
340
|
+
env_var_name = value[1:]
|
341
|
+
if env_var_name not in os.environ:
|
342
|
+
missing.append(env_var_name)
|
343
|
+
else:
|
344
|
+
current[key] = os.environ[env_var_name]
|
345
|
+
elif isinstance(value, (dict, list)):
|
346
|
+
stack.append(value)
|
347
|
+
elif isinstance(current, list):
|
348
|
+
for item in current:
|
349
|
+
if isinstance(item, (dict, list)):
|
350
|
+
stack.append(item)
|
351
|
+
if len(missing) > 0:
|
352
|
+
raise ValueError('Required environment configuration for keys {} was not found in current envirnoment. Please setup your docker or bare-metal config to provide this missing key(s)'.format(
|
353
|
+
",".join(['"' + x + '"' for x in missing])
|
354
|
+
))
|
355
|
+
return matches
|
356
|
+
|
357
|
+
|
358
|
+
|
359
|
+
def lock_process(self, str_lock_name, nt_file_lock=False):
|
360
|
+
if os.name == 'nt':
|
361
|
+
# windows
|
362
|
+
if nt_file_lock:
|
363
|
+
# naive lock ...
|
364
|
+
self.P("Attempting to create file lock '{}'".format(str_lock_name), color='m')
|
365
|
+
fn = str_lock_name + '.lock'
|
366
|
+
if os.path.isfile(fn):
|
367
|
+
self.P("Another Windows process has already acquired file lock '{}'".format(str_lock_name), color='r')
|
368
|
+
return None
|
369
|
+
else:
|
370
|
+
str_stamp = self.time_to_str()
|
371
|
+
with open(fn, "wt") as fh:
|
372
|
+
fh.write("LOCKED AT {}".format(str_stamp))
|
373
|
+
self.P("Current Windows process has acquired file lock '{}'".format(fn))
|
374
|
+
return fn
|
375
|
+
else:
|
376
|
+
# nice lock but not always working (if not superuser...)
|
377
|
+
from win32event import CreateMutex
|
378
|
+
from win32api import GetLastError
|
379
|
+
from winerror import ERROR_ALREADY_EXISTS
|
380
|
+
str_lock_name = "Global\\" + str_lock_name.replace("\\","")
|
381
|
+
self.P("Attempting to create lock on current Windows process for id '{}'".format(str_lock_name), color='m')
|
382
|
+
|
383
|
+
try:
|
384
|
+
mutex_handle = CreateMutex(None, 1, str_lock_name)
|
385
|
+
err = GetLastError()
|
386
|
+
except:
|
387
|
+
self.P("Exception in process locking id '{}'".format(str_lock_name), color='r')
|
388
|
+
err = ERROR_ALREADY_EXISTS
|
389
|
+
|
390
|
+
if err == ERROR_ALREADY_EXISTS:
|
391
|
+
# maybe show some text
|
392
|
+
self.P("Another Windows process has already acquired id '{}'".format(str_lock_name), color='r')
|
393
|
+
return None
|
394
|
+
else:
|
395
|
+
# maybe show some text
|
396
|
+
self.P("Current Windows process has acquired id '{}':{} ({})".format(
|
397
|
+
str_lock_name, mutex_handle, err), color='g')
|
398
|
+
return mutex_handle
|
399
|
+
else:
|
400
|
+
import platform
|
401
|
+
str_platform = platform.system()
|
402
|
+
if str_platform.lower() == 'darwin':
|
403
|
+
# macos
|
404
|
+
self.P("Running on MacOS. Skipping mutex and checking if script is running", color='m')
|
405
|
+
if self.same_script_already_running():
|
406
|
+
return None
|
407
|
+
return -1
|
408
|
+
else:
|
409
|
+
import socket
|
410
|
+
self.P("Attempting to create lock on current Linux process for id '{}'".format(str_lock_name), color='m')
|
411
|
+
_lock_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
412
|
+
try:
|
413
|
+
_lock_socket.bind('\0' + str_lock_name)
|
414
|
+
# maybe show some text
|
415
|
+
self.P("Current Linux process has acquired id '{}': {}".format(
|
416
|
+
str_lock_name, _lock_socket), color='g')
|
417
|
+
return _lock_socket
|
418
|
+
except Exception as err:
|
419
|
+
# maybe show some text
|
420
|
+
self.P("Another Linux process has already acquired id '{}'. Error: {}".format(
|
421
|
+
str_lock_name, err), color='r')
|
422
|
+
return None
|
423
|
+
# end if platform
|
424
|
+
# end if not windows
|
425
|
+
return
|
426
|
+
|
427
|
+
def maybe_unlock_windows_file_lock(self, str_lock_name):
|
428
|
+
if os.name == 'nt' and str_lock_name is not None and isinstance(str_lock_name, str):
|
429
|
+
self.P("Attempting to unlock windows lock file...")
|
430
|
+
if os.path.isfile(str_lock_name):
|
431
|
+
os.remove(str_lock_name)
|
432
|
+
self.P("Released windows file lock {}".format(str_lock_name))
|
433
|
+
else:
|
434
|
+
self.P("Unknown file lock '{}'".format(str_lock_name))
|
435
|
+
return
|
436
|
+
|
437
|
+
def analyze_processor_platform(self):
|
438
|
+
import platform
|
439
|
+
import subprocess
|
440
|
+
import re
|
441
|
+
str_system = platform.system()
|
442
|
+
if str_system == "Windows":
|
443
|
+
self.processor_platform = platform.processor()
|
444
|
+
elif str_system == "Darwin":
|
445
|
+
os.environ['PATH'] = os.environ['PATH'] + os.pathsep + '/usr/sbin'
|
446
|
+
command ="sysctl -n machdep.cpu.brand_string"
|
447
|
+
self.processor_platform = subprocess.check_output(command, shell=True).strip().decode('utf-8')
|
448
|
+
elif str_system == "Linux":
|
449
|
+
command = "cat /proc/cpuinfo"
|
450
|
+
all_info = subprocess.check_output(command, shell=True).decode().strip()
|
451
|
+
for line in all_info.split("\n"):
|
452
|
+
if "model name" in line:
|
453
|
+
self.processor_platform = re.sub( ".*model name.*:", "", line,1)
|
454
|
+
break
|
455
|
+
return
|
456
|
+
|
457
|
+
def get_processor_platform(self):
|
458
|
+
return self.processor_platform
|
459
|
+
|
460
|
+
|
461
|
+
def managed_lock_resource(self, str_res, condition=True):
|
462
|
+
"""
|
463
|
+
Managed lock resource. Will lock and unlock resource automatically.
|
464
|
+
To be used in a with statement.
|
465
|
+
The condition parameter allows users to disable the lock if desired.
|
466
|
+
|
467
|
+
Parameters
|
468
|
+
----------
|
469
|
+
str_res : str
|
470
|
+
The resource to lock.
|
471
|
+
condition : bool, optional
|
472
|
+
If False the lock will not be acquired. The default is True.
|
473
|
+
|
474
|
+
Returns
|
475
|
+
-------
|
476
|
+
LockResource
|
477
|
+
The lock resource object.
|
478
|
+
|
479
|
+
Example
|
480
|
+
-------
|
481
|
+
```
|
482
|
+
with self.managed_lock_resource('my_resource'):
|
483
|
+
# do something
|
484
|
+
```
|
485
|
+
|
486
|
+
```
|
487
|
+
# will control if the following operation is locked or not based on this flag
|
488
|
+
locking = False
|
489
|
+
with self.managed_lock_resource('my_resource', condition=locking):
|
490
|
+
# do something
|
491
|
+
```
|
492
|
+
"""
|
493
|
+
return LockResource(self, str_res, condition)
|
494
|
+
|
495
|
+
def lock_resource(self, str_res):
|
496
|
+
"""
|
497
|
+
Possible critical failure:
|
498
|
+
|
499
|
+
1. base plugin runs try stuff etc
|
500
|
+
2. plugin runs lock
|
501
|
+
3. threading.Lock() fails
|
502
|
+
4. base plugin runs except
|
503
|
+
5. except locks in log (no output) due to _lock_table_mutex.acquire(blocking=True)
|
504
|
+
6. any thread running lock_reource will hang with NO LOG OUTPUT
|
505
|
+
"""
|
506
|
+
result = None
|
507
|
+
self._lock_table_mutex.acquire(blocking=True)
|
508
|
+
try:
|
509
|
+
if str_res not in self._lock_table:
|
510
|
+
self._lock_table[str_res] = threading.Lock()
|
511
|
+
except:
|
512
|
+
print("**************************************************************\nPANIC: Failed to create lock for resource '{}'\n**************************************************************".format(str_res))
|
513
|
+
finally:
|
514
|
+
self._lock_table_mutex.release()
|
515
|
+
|
516
|
+
if str_res in self._lock_table:
|
517
|
+
self._lock_table[str_res].acquire(blocking=True)
|
518
|
+
result = self._lock_table[str_res]
|
519
|
+
|
520
|
+
return result
|
521
|
+
|
522
|
+
def unlock_resource(self, str_res):
|
523
|
+
if str_res in self._lock_table:
|
524
|
+
self._lock_table[str_res].release()
|
525
|
+
return
|
526
|
+
|
527
|
+
def managed_lock_logger(self):
|
528
|
+
return self.managed_lock_resource(_LOGGER_LOCK_ID)
|
529
|
+
|
530
|
+
def lock_logger(self):
|
531
|
+
self.lock_resource(_LOGGER_LOCK_ID)
|
532
|
+
return
|
533
|
+
|
534
|
+
def unlock_logger(self):
|
535
|
+
self.unlock_resource(_LOGGER_LOCK_ID)
|
536
|
+
|
537
|
+
def get_file_path(self, fn, folder, subfolder_path=None, force=False):
|
538
|
+
lfld = self.get_target_folder(target=folder)
|
539
|
+
if lfld is None:
|
540
|
+
datafile = fn
|
541
|
+
else:
|
542
|
+
datafolder = lfld
|
543
|
+
if subfolder_path is not None:
|
544
|
+
datafolder = os.path.join(datafolder, subfolder_path.lstrip('/'))
|
545
|
+
datafile = os.path.join(datafolder, fn)
|
546
|
+
if os.path.isfile(datafile) or force:
|
547
|
+
return datafile
|
548
|
+
return
|
549
|
+
|
550
|
+
|
551
|
+
@property
|
552
|
+
def session_id(self):
|
553
|
+
return self.file_prefix
|
554
|
+
|
555
|
+
|
556
|
+
def cleanup_timestamped_files(self, folder, keep=5):
|
557
|
+
if os.path.isdir(folder):
|
558
|
+
files = [x for x in os.listdir(folder) if len(x) > 6 and x[:6].isnumeric()]
|
559
|
+
if len(files) > keep:
|
560
|
+
files = sorted(files)
|
561
|
+
n_delete = len(files) - keep
|
562
|
+
to_delete = files[:n_delete]
|
563
|
+
self.P("Cleaning {} timestamped files between {} and {}, preserving {} in '{}'...".format(
|
564
|
+
n_delete, to_delete[0], to_delete[-1], keep, folder), color='y'
|
565
|
+
)
|
566
|
+
for fn in to_delete:
|
567
|
+
os.remove(os.path.join(folder, fn))
|
568
|
+
print(".", flush=True, end='')
|
569
|
+
print("Done.\r", flush=True, end='')
|
570
|
+
#endif delete extra files
|
571
|
+
#endif folder valir
|
572
|
+
return
|
573
|
+
|
574
|
+
|
575
|
+
def maybe_cleanup_timestamped_files(self, folder, keep=4):
|
576
|
+
return self.cleanup_timestamped_files(folder=folder, keep=keep)
|
577
|
+
|
578
|
+
|
579
|
+
def cleanup_logs(self, archive_older_than_days=2, MAX_ARCHIVE_SIZE=5*1024**2):
|
580
|
+
if self.no_folders_no_save:
|
581
|
+
return
|
582
|
+
self.P("Cleaning logs older than {} days...".format(archive_older_than_days), color='y')
|
583
|
+
str_old_date = (dt.today() - timedelta(days=archive_older_than_days)).strftime('%Y%m%d')
|
584
|
+
int_old_date = int(str_old_date)
|
585
|
+
logs = os.listdir(self._logs_dir)
|
586
|
+
archive_list = []
|
587
|
+
show_list = []
|
588
|
+
base_fn = "_logs_archive"
|
589
|
+
zip_fn = os.path.join(self._logs_dir, base_fn + '.zip')
|
590
|
+
if os.path.isfile(zip_fn):
|
591
|
+
stats = os.stat(zip_fn)
|
592
|
+
if stats.st_size > MAX_ARCHIVE_SIZE:
|
593
|
+
self.P(" Current archive larger than {:.1f} MB. Renaming and marking for manual deletion".format(
|
594
|
+
MAX_ARCHIVE_SIZE / 1024**2), color='y'
|
595
|
+
)
|
596
|
+
new_fn = os.path.join(self._logs_dir, base_fn + '_' + self.file_prefix.split('_')[0] + '.zip')
|
597
|
+
if os.path.isfile(new_fn):
|
598
|
+
self.P(" Something is strange, file already exists. Deleting...")
|
599
|
+
try:
|
600
|
+
os.unlink(new_fn)
|
601
|
+
except:
|
602
|
+
self.P(" Failed to remove {}:\n{}".format(new_fn, traceback.format_exc()), color='r')
|
603
|
+
try:
|
604
|
+
os.rename(zip_fn, new_fn)
|
605
|
+
except:
|
606
|
+
self.P(" Archiving logs file failed:\n{}".format(
|
607
|
+
traceback.format_exc()
|
608
|
+
), color='r')
|
609
|
+
#endif file to large
|
610
|
+
#endif already exists
|
611
|
+
for fn in logs:
|
612
|
+
if fn[-4:] == '.txt':
|
613
|
+
str_date = fn[:8]
|
614
|
+
int_date = None
|
615
|
+
if len(str_date) == 8:
|
616
|
+
try:
|
617
|
+
int_date = int(str_date)
|
618
|
+
except:
|
619
|
+
pass
|
620
|
+
if int_date is not None and int_date < int_old_date:
|
621
|
+
full_fn = os.path.join(self._logs_dir, fn)
|
622
|
+
archive_list.append(full_fn)
|
623
|
+
show_list.append(fn)
|
624
|
+
|
625
|
+
if len(archive_list) > 0:
|
626
|
+
self.P(" Archiving {} logs...".format(len(archive_list)), color='y')
|
627
|
+
self.add_files_to_zip(zip_fn, archive_list)
|
628
|
+
for full_fn in archive_list:
|
629
|
+
os.remove(full_fn)
|
630
|
+
else:
|
631
|
+
self.P(" Nothing to clean.")
|
632
|
+
return
|
633
|
+
|
634
|
+
|
635
|
+
|
636
|
+
def _logger(self, logstr, show=True, noprefix=False, show_time=False, color=None):
|
637
|
+
"""
|
638
|
+
log processing method
|
639
|
+
"""
|
640
|
+
with self.managed_lock_logger():
|
641
|
+
# now that we have locking in place we no longer need to cancel in-thread logging
|
642
|
+
# if not self.is_main_thread:
|
643
|
+
# return
|
644
|
+
self.start_timer('_logger', section='LOGGER_internal')
|
645
|
+
|
646
|
+
elapsed = tm() - self.last_time
|
647
|
+
|
648
|
+
self.start_timer('_logger_add_log', section='LOGGER_internal')
|
649
|
+
self._add_log(
|
650
|
+
logstr, show=show,
|
651
|
+
noprefix=noprefix,
|
652
|
+
show_time=show_time,
|
653
|
+
color=color
|
654
|
+
)
|
655
|
+
self.end_timer('_logger_add_log', section='LOGGER_internal')
|
656
|
+
|
657
|
+
self.start_timer('_logger_save_log', section='LOGGER_internal')
|
658
|
+
if self._save_enabled:
|
659
|
+
self._save_log(
|
660
|
+
log=self.app_log,
|
661
|
+
log_file=self.log_file
|
662
|
+
)
|
663
|
+
self._save_log(
|
664
|
+
log=self.err_log,
|
665
|
+
log_file=self.log_e_file
|
666
|
+
)
|
667
|
+
self.end_timer('_logger_save_log', section='LOGGER_internal')
|
668
|
+
|
669
|
+
self.last_time = tm()
|
670
|
+
self._check_log_size()
|
671
|
+
|
672
|
+
self.end_timer('_logger', section='LOGGER_internal')
|
673
|
+
# endwith lock
|
674
|
+
return elapsed
|
675
|
+
|
676
|
+
def _normalize_path_sep(self):
|
677
|
+
if self._base_folder is not None:
|
678
|
+
if os.path.sep == '\\':
|
679
|
+
self._base_folder = self._base_folder.replace('/', '\\')
|
680
|
+
else:
|
681
|
+
self._base_folder = self._base_folder.replace('\\', '/')
|
682
|
+
#endif
|
683
|
+
#endif
|
684
|
+
|
685
|
+
if self._app_folder is not None:
|
686
|
+
if os.path.sep == '\\':
|
687
|
+
self._app_folder = self._app_folder.replace('/', '\\')
|
688
|
+
else:
|
689
|
+
self._app_folder = self._app_folder.replace('\\', '/')
|
690
|
+
#endif
|
691
|
+
#endif
|
692
|
+
|
693
|
+
return
|
694
|
+
|
695
|
+
def print_on_columns(self, *objects, nr_print_columns=4, nr_print_chars=12, header=None, color=None):
|
696
|
+
if header:
|
697
|
+
self.P(header, color=color)
|
698
|
+
|
699
|
+
print_columns = [[] for _ in range(nr_print_columns)]
|
700
|
+
|
701
|
+
crt_column = 0
|
702
|
+
_fmt = "{:>" + str(nr_print_chars) + "}"
|
703
|
+
|
704
|
+
nr_labels_per_column = int(np.ceil(len(objects) / nr_print_columns))
|
705
|
+
for i, obj in enumerate(objects):
|
706
|
+
if i // nr_labels_per_column != crt_column:
|
707
|
+
crt_column += 1
|
708
|
+
|
709
|
+
print_columns[crt_column].append(_fmt.format(obj[:nr_print_chars]))
|
710
|
+
# endfor
|
711
|
+
|
712
|
+
for i in range(nr_labels_per_column):
|
713
|
+
str_line = ''
|
714
|
+
for j in range(nr_print_columns):
|
715
|
+
if i >= len(print_columns[j]):
|
716
|
+
continue
|
717
|
+
|
718
|
+
str_line += print_columns[j][i] + ' '
|
719
|
+
|
720
|
+
self.P(str_line, noprefix=True, color=color)
|
721
|
+
# endfor
|
722
|
+
return
|
723
|
+
|
724
|
+
def _add_log(self, logstr, show=True, noprefix=False, show_time=False, color=None):
|
725
|
+
if type(logstr) != str:
|
726
|
+
logstr = str(logstr)
|
727
|
+
if logstr == "":
|
728
|
+
logstr = " "
|
729
|
+
if 'WARNING' in logstr and color is None:
|
730
|
+
color = 'warning'
|
731
|
+
if 'ERROR' in logstr and color is None:
|
732
|
+
color = 'error'
|
733
|
+
elapsed = tm() - self.last_time
|
734
|
+
nowtime = dt.now()
|
735
|
+
prefix = ""
|
736
|
+
strnowtime = nowtime.strftime("[{}][%y-%m-%d %H:%M:%S]".format(self.__lib__))
|
737
|
+
if self.show_time and (not noprefix):
|
738
|
+
prefix = strnowtime
|
739
|
+
if logstr[0] == "\n":
|
740
|
+
logstr = logstr[1:]
|
741
|
+
prefix = "\n" + prefix
|
742
|
+
res_log = logstr
|
743
|
+
if len(logstr) == 0 or logstr[0] != '[':
|
744
|
+
prefix = prefix + ' '
|
745
|
+
logstr = prefix + logstr
|
746
|
+
if show_time:
|
747
|
+
logstr += " [{:.2f}s]".format(elapsed)
|
748
|
+
print_logstr = logstr
|
749
|
+
if show:
|
750
|
+
if self.append_spaces:
|
751
|
+
spaces = " " * max(60 - len(print_logstr), 0)
|
752
|
+
else:
|
753
|
+
spaces = ''
|
754
|
+
print_logstr = print_logstr + spaces
|
755
|
+
if color is None:
|
756
|
+
color = self.default_color
|
757
|
+
if not self.__first_print:
|
758
|
+
BaseLogger.print_color("<Logging with default color: {}>".format(color), color=color)
|
759
|
+
self.__first_print = True
|
760
|
+
#endif use default color
|
761
|
+
BaseLogger.print_color(print_logstr, color=color)
|
762
|
+
if color.lower()[0] in ['e', 'r']:
|
763
|
+
self.err_log.append(logstr)
|
764
|
+
self.app_log.append(logstr)
|
765
|
+
|
766
|
+
#endif
|
767
|
+
return
|
768
|
+
|
769
|
+
def _save_log(self, log, log_file, DEBUG_ERRORS=False):
|
770
|
+
""" Generic method that saves logs to a specific file
|
771
|
+
|
772
|
+
Args:
|
773
|
+
log (list): The log list to save
|
774
|
+
log_file (str): The path to the desired file in which to save the log.
|
775
|
+
DEBUG_ERRORS (bool, optional): Print exceptions regarding opening the file and writing to it. Defaults to False.
|
776
|
+
"""
|
777
|
+
if self.no_folders_no_save or self._save_enabled is False:
|
778
|
+
return
|
779
|
+
|
780
|
+
nowtime = dt.now()
|
781
|
+
strnowtime = nowtime.strftime("[{}][%Y-%m-%d %H:%M:%S] ".format(self.__lib__))
|
782
|
+
stage = 0
|
783
|
+
try:
|
784
|
+
log_output = codecs.open(log_file, "w", "utf-8") # open(self.log_file, 'w+')
|
785
|
+
stage += 1
|
786
|
+
if self.HTML:
|
787
|
+
log_output.write(_HTML_START)
|
788
|
+
stage += 1
|
789
|
+
iter_list = reversed(log)
|
790
|
+
else:
|
791
|
+
iter_list = log
|
792
|
+
for log_item in iter_list:
|
793
|
+
# if self.HTML:
|
794
|
+
# log_output.write("%s<BR>\n" % log_item)
|
795
|
+
# else:
|
796
|
+
log_output.write("{}\n".format(log_item))
|
797
|
+
stage += 1
|
798
|
+
if self.HTML:
|
799
|
+
log_output.write(_HTML_END)
|
800
|
+
stage += 1
|
801
|
+
log_output.close()
|
802
|
+
stage += 1
|
803
|
+
except:
|
804
|
+
if DEBUG_ERRORS:
|
805
|
+
print(strnowtime + "LogWErr S: {} [{}]".format(stage,
|
806
|
+
sys.exc_info()[0]), flush=True)
|
807
|
+
return
|
808
|
+
|
809
|
+
def _check_log_size(self):
|
810
|
+
if self.max_lines is None:
|
811
|
+
return
|
812
|
+
|
813
|
+
if len(self.app_log) >= self.max_lines:
|
814
|
+
self._add_log("Ending log part {}".format(self.split_part))
|
815
|
+
self._save_log(
|
816
|
+
log=self.app_log,
|
817
|
+
log_file=self.log_file
|
818
|
+
)
|
819
|
+
self.app_log = []
|
820
|
+
self.split_part += 1
|
821
|
+
self._generate_log_path()
|
822
|
+
self._add_log("Starting log part {}".format(self.split_part))
|
823
|
+
self._save_log(
|
824
|
+
log=self.app_log,
|
825
|
+
log_file=self.log_file
|
826
|
+
)
|
827
|
+
if len(self.err_log) >= self.max_lines:
|
828
|
+
self._add_log("Ending error log part {}".format(self.split_err_part))
|
829
|
+
self._save_log(
|
830
|
+
log=self.err_log,
|
831
|
+
log_file=self.log_e_file
|
832
|
+
)
|
833
|
+
self.err_log = []
|
834
|
+
self.split_err_part += 1
|
835
|
+
self._generate_error_log_path()
|
836
|
+
self._add_log("Starting error log part {}".format(self.split_err_part))
|
837
|
+
self._save_log(
|
838
|
+
log=self.err_log,
|
839
|
+
log_file=self.log_e_file
|
840
|
+
)
|
841
|
+
return
|
842
|
+
|
843
|
+
def verbose_log(self, str_msg, show_time=False, noprefix=False, color=None):
|
844
|
+
return self._logger(
|
845
|
+
str_msg,
|
846
|
+
show=True,
|
847
|
+
show_time=show_time,
|
848
|
+
noprefix=noprefix, color=color
|
849
|
+
)
|
850
|
+
|
851
|
+
def P(self, str_msg, show_time=False, noprefix=False, color=None, boxed=False, **kwargs):
|
852
|
+
return self.p(str_msg, show_time=show_time, noprefix=noprefix, color=color, boxed=boxed, **kwargs)
|
853
|
+
|
854
|
+
def D(self, str_msg, show_time=False, noprefix=False, color=None, **kwargs):
|
855
|
+
if False:
|
856
|
+
return self.P(str_msg, show_time=show_time, noprefix=noprefix, color=color, **kwargs)
|
857
|
+
|
858
|
+
@staticmethod
|
859
|
+
def Pr(str_msg, show_time=False, noprefix=False):
|
860
|
+
if type(str_msg) != str:
|
861
|
+
str_msg = str(str_msg)
|
862
|
+
print("\r" + str_msg, flush=True, end='')
|
863
|
+
|
864
|
+
def __convert_to_box(self, str_msg, box_char='#', indent=None, **kwargs):
|
865
|
+
lst_msg_lines = str_msg.split('\n')
|
866
|
+
max_line_len = max(map(len, lst_msg_lines))
|
867
|
+
|
868
|
+
center = 4 if max_line_len > 80 else 10
|
869
|
+
box_line_len = center + 1 + max_line_len + 1 + center
|
870
|
+
default_indent = 4 if box_line_len > 100 else 20
|
871
|
+
indent = indent if indent is not None else default_indent
|
872
|
+
str_indent = ' ' * indent
|
873
|
+
|
874
|
+
msg = box_char * 3 + 'IMPORTANT' + box_char * 3 + '\n\n'
|
875
|
+
msg += str_indent + box_char * box_line_len + '\n'
|
876
|
+
msg += str_indent + box_char + (box_line_len - 2) * ' ' + box_char + '\n'
|
877
|
+
|
878
|
+
for line in lst_msg_lines:
|
879
|
+
left_box_edge = str_indent + box_char
|
880
|
+
right_box_edge = box_char
|
881
|
+
|
882
|
+
left_shift = ' ' * center
|
883
|
+
right_shift = ' ' * (center + max_line_len - len(line)) # adjust for different line lengths
|
884
|
+
shifted_line = left_shift + line + right_shift
|
885
|
+
msg += left_box_edge + shifted_line + right_box_edge + '\n'
|
886
|
+
# end for
|
887
|
+
|
888
|
+
msg += str_indent + box_char + (box_line_len - 2) * ' ' + box_char + '\n'
|
889
|
+
msg += str_indent + box_char * box_line_len + '\n'
|
890
|
+
|
891
|
+
return msg
|
892
|
+
|
893
|
+
def p(self, str_msg, show_time=False, noprefix=False, color=None, boxed=False, **kwargs):
|
894
|
+
if boxed:
|
895
|
+
msg = self.__convert_to_box(str_msg, **kwargs)
|
896
|
+
self._logger(msg, show=True, noprefix=noprefix, color=color)
|
897
|
+
else:
|
898
|
+
return self._logger(
|
899
|
+
str_msg,
|
900
|
+
show=True,
|
901
|
+
show_time=show_time,
|
902
|
+
noprefix=noprefix, color=color
|
903
|
+
)
|
904
|
+
|
905
|
+
def Pmd(self, s=''):
|
906
|
+
print_func = None
|
907
|
+
try:
|
908
|
+
from IPython.display import Markdown, display
|
909
|
+
def print_func(s):
|
910
|
+
display(Markdown(s))
|
911
|
+
except:
|
912
|
+
pass
|
913
|
+
if type(s) != str:
|
914
|
+
s = str(s)
|
915
|
+
|
916
|
+
if print_func is not None:
|
917
|
+
self._add_log(
|
918
|
+
logstr=s,
|
919
|
+
show=False,
|
920
|
+
noprefix=False,
|
921
|
+
show_time=False,
|
922
|
+
)
|
923
|
+
print_func(s)
|
924
|
+
else:
|
925
|
+
self.P(s)
|
926
|
+
return
|
927
|
+
|
928
|
+
def Pmdc(self, s=''):
|
929
|
+
print_func = None
|
930
|
+
try:
|
931
|
+
from IPython.display import Markdown, display
|
932
|
+
def print_func(s):
|
933
|
+
display(Markdown(s))
|
934
|
+
except:
|
935
|
+
pass
|
936
|
+
if type(s) != str:
|
937
|
+
s = str(s)
|
938
|
+
|
939
|
+
if print_func is not None:
|
940
|
+
self._add_log(
|
941
|
+
logstr=s,
|
942
|
+
show=False,
|
943
|
+
noprefix=False,
|
944
|
+
show_time=False,
|
945
|
+
)
|
946
|
+
print_func('<strong>' + s + '</strong>')
|
947
|
+
else:
|
948
|
+
self.P(s)
|
949
|
+
return
|
950
|
+
|
951
|
+
def print_pad(self, str_msg, str_text, n=3):
|
952
|
+
if type(str_msg) != str:
|
953
|
+
str_msg = str(str_msg)
|
954
|
+
if type(str_text) != str:
|
955
|
+
str_text = str(str_text)
|
956
|
+
str_final = str_msg + "\n" + textwrap.indent(str_text, n * " ")
|
957
|
+
self._logger(str_final, show=True, show_time=False)
|
958
|
+
return
|
959
|
+
|
960
|
+
def log(self, str_msg, show=False, show_time=False, color=None):
|
961
|
+
return self._logger(str_msg, show=show, show_time=show_time, color=color)
|
962
|
+
|
963
|
+
def _generate_log_path(self):
|
964
|
+
if self.no_folders_no_save:
|
965
|
+
return
|
966
|
+
part = '{:03d}'.format(self.split_part)
|
967
|
+
lp = self.file_prefix
|
968
|
+
ls = self.log_suffix
|
969
|
+
if self.HTML:
|
970
|
+
self.log_file = lp + '_' + ls + '_' + part + '_log_web.html'
|
971
|
+
else:
|
972
|
+
self.log_file = lp + '_' + ls + '_' + part + '_log.txt'
|
973
|
+
|
974
|
+
self.log_file = os.path.join(self._logs_dir, self.log_file)
|
975
|
+
path_dict = {}
|
976
|
+
path_dict['CURRENT_LOG'] = self.log_file
|
977
|
+
file_path = os.path.join(self._logs_dir, self.__lib__ + '.txt')
|
978
|
+
with open(file_path, 'w') as fp:
|
979
|
+
json.dump(path_dict, fp, sort_keys=True, indent=4)
|
980
|
+
self._add_log("{} log changed to {}...".format(file_path, self.log_file))
|
981
|
+
return
|
982
|
+
|
983
|
+
def _generate_error_log_path(self):
|
984
|
+
if self.no_folders_no_save:
|
985
|
+
return
|
986
|
+
part = '{:03d}'.format(self.split_err_part)
|
987
|
+
lp = self.file_prefix
|
988
|
+
ls = self.log_suffix
|
989
|
+
if self.HTML:
|
990
|
+
self.log_e_file = lp + '_' + ls + '_' + part + '_error_log_web.html'
|
991
|
+
else:
|
992
|
+
self.log_e_file = lp + '_' + ls + '_' + part + '_error_log.txt'
|
993
|
+
|
994
|
+
self.log_e_file = os.path.join(self._logs_dir, self.log_e_file)
|
995
|
+
path_dict = {}
|
996
|
+
path_dict['CURRENT_E_LOG'] = self.log_e_file
|
997
|
+
file_path = os.path.join(self._logs_dir, self.__lib__ + '.txt')
|
998
|
+
with open(file_path, 'w') as fp:
|
999
|
+
json.dump(path_dict, fp, sort_keys=True, indent=4)
|
1000
|
+
self._add_log("{} error log changed to {}...".format(file_path, self.log_e_file))
|
1001
|
+
return
|
1002
|
+
|
1003
|
+
def _get_cloud_base_folder(self, base_folder):
|
1004
|
+
upper = base_folder.upper()
|
1005
|
+
google = "GOOGLE" in upper
|
1006
|
+
dropbox = "DROPBOX" in upper
|
1007
|
+
|
1008
|
+
if google and not "/DATA/" in upper:
|
1009
|
+
base_folder = self.get_google_drive()
|
1010
|
+
if dropbox and not "/DATA/" in upper:
|
1011
|
+
base_folder = self.get_dropbox_drive()
|
1012
|
+
return base_folder
|
1013
|
+
|
1014
|
+
def _configure_data_and_dirs(self, config_file, config_file_encoding=None):
|
1015
|
+
if self.no_folders_no_save:
|
1016
|
+
return
|
1017
|
+
|
1018
|
+
if config_file != "":
|
1019
|
+
if config_file_encoding is None:
|
1020
|
+
f = open(config_file)
|
1021
|
+
else:
|
1022
|
+
f = open(config_file, encoding=config_file_encoding)
|
1023
|
+
|
1024
|
+
try:
|
1025
|
+
self.config_data = json.load(f, object_pairs_hook=OrderedDict)
|
1026
|
+
except:
|
1027
|
+
msg = "Failed to load config file '{}'".format(config_file)
|
1028
|
+
self.P(msg, color='r', boxed=True)
|
1029
|
+
self.P("Exception details:\n{}".format(traceback.format_exc()), color='r')
|
1030
|
+
self.config_data = {}
|
1031
|
+
|
1032
|
+
if self._base_folder is None and self._app_folder is None:
|
1033
|
+
assert ("BASE_FOLDER" in self.config_data.keys())
|
1034
|
+
assert ("APP_FOLDER" in self.config_data.keys())
|
1035
|
+
self._base_folder = self.config_data["BASE_FOLDER"]
|
1036
|
+
self._app_folder = self.config_data["APP_FOLDER"]
|
1037
|
+
#endif no defaults for base/app folders
|
1038
|
+
|
1039
|
+
print("Loaded config [{}]".format(config_file), flush=True)
|
1040
|
+
self.config_file = config_file
|
1041
|
+
else:
|
1042
|
+
self.config_data = {
|
1043
|
+
'BASE_FOLDER' : self._base_folder,
|
1044
|
+
'APP_FOLDER' : self._app_folder
|
1045
|
+
}
|
1046
|
+
self.config_file = "default_config.txt"
|
1047
|
+
#endif have or not config file
|
1048
|
+
|
1049
|
+
self.config_data = {
|
1050
|
+
**self.config_data,
|
1051
|
+
**self.__init_config_data,
|
1052
|
+
}
|
1053
|
+
|
1054
|
+
matches = self.replace_secrets(self.config_data)
|
1055
|
+
print(" Config modified with following env vars: {}".format(matches))
|
1056
|
+
|
1057
|
+
self._base_folder = self.expand_tilda(self._base_folder)
|
1058
|
+
self._base_folder = self._get_cloud_base_folder(self._base_folder)
|
1059
|
+
self._root_folder = os.path.abspath(self._base_folder)
|
1060
|
+
self._base_folder = os.path.join(self._base_folder, self._app_folder)
|
1061
|
+
print("BASE: {}".format(self._base_folder), flush=True)
|
1062
|
+
|
1063
|
+
self._normalize_path_sep()
|
1064
|
+
|
1065
|
+
if not os.path.isdir(self._base_folder):
|
1066
|
+
print("{color_start}WARNING! Invalid app base folder '{base_folder}'! We create it automatically!{color_end}".format(
|
1067
|
+
color_start=COLORS['r'],
|
1068
|
+
base_folder=self._base_folder,
|
1069
|
+
color_end=COLORS['__end__']
|
1070
|
+
), flush=True)
|
1071
|
+
#endif
|
1072
|
+
|
1073
|
+
self._logs_dir = os.path.join(self._base_folder, self.get_logs_dir_name())
|
1074
|
+
self._outp_dir = os.path.join(self._base_folder, self.get_output_dir_name())
|
1075
|
+
self._data_dir = os.path.join(self._base_folder, self.get_data_dir_name())
|
1076
|
+
self._modl_dir = os.path.join(self._base_folder, self.get_models_dir_name())
|
1077
|
+
|
1078
|
+
self._setup_folders([
|
1079
|
+
self._outp_dir,
|
1080
|
+
self._logs_dir,
|
1081
|
+
self._data_dir,
|
1082
|
+
self._modl_dir
|
1083
|
+
])
|
1084
|
+
|
1085
|
+
return
|
1086
|
+
|
1087
|
+
@staticmethod
|
1088
|
+
def get_logs_dir_name():
|
1089
|
+
return '_logs'
|
1090
|
+
|
1091
|
+
@staticmethod
|
1092
|
+
def get_output_dir_name():
|
1093
|
+
return '_output'
|
1094
|
+
|
1095
|
+
@staticmethod
|
1096
|
+
def get_data_dir_name():
|
1097
|
+
return '_data'
|
1098
|
+
|
1099
|
+
@staticmethod
|
1100
|
+
def get_models_dir_name():
|
1101
|
+
return '_models'
|
1102
|
+
|
1103
|
+
|
1104
|
+
@staticmethod
|
1105
|
+
def get_all_files_in_folder(root_folder=None):
|
1106
|
+
"""
|
1107
|
+
Walks through all directories and sub-directories of the given root_folder and returns a list of all file paths.
|
1108
|
+
|
1109
|
+
Parameters
|
1110
|
+
----------
|
1111
|
+
root_folder : str
|
1112
|
+
The path of the folder you want to walk through. Default `None` will generate
|
1113
|
+
all files in current folder
|
1114
|
+
|
1115
|
+
Returns
|
1116
|
+
-------
|
1117
|
+
List[str]
|
1118
|
+
A list containing the paths of all files in the folder and its subfolders.
|
1119
|
+
"""
|
1120
|
+
|
1121
|
+
if root_folder is None:
|
1122
|
+
root_folder = os.getcwd()
|
1123
|
+
|
1124
|
+
file_paths = []
|
1125
|
+
|
1126
|
+
for dirpath, dirnames, filenames in os.walk(root_folder):
|
1127
|
+
for filename in filenames:
|
1128
|
+
full_path = os.path.join(dirpath, filename)
|
1129
|
+
file_paths.append(full_path)
|
1130
|
+
|
1131
|
+
return file_paths
|
1132
|
+
|
1133
|
+
@staticmethod
|
1134
|
+
def get_all_subfolders(root_folder=None, as_package=False):
|
1135
|
+
"""
|
1136
|
+
Walks through all directories and sub-directories of the given root_folder and returns a list of all subfolder paths.
|
1137
|
+
|
1138
|
+
Parameters
|
1139
|
+
----------
|
1140
|
+
root_folder : str
|
1141
|
+
The path of the folder you want to walk through. Default `None` will generate
|
1142
|
+
all subfolders in current folder
|
1143
|
+
|
1144
|
+
Returns
|
1145
|
+
-------
|
1146
|
+
List[str]
|
1147
|
+
A list containing the paths of all files in the folder and its subfolders.
|
1148
|
+
"""
|
1149
|
+
|
1150
|
+
if root_folder is None:
|
1151
|
+
root_folder = os.getcwd()
|
1152
|
+
|
1153
|
+
folder_paths = []
|
1154
|
+
|
1155
|
+
for dirpath, dirnames, filenames in os.walk(root_folder):
|
1156
|
+
for dirname in dirnames:
|
1157
|
+
full_path = os.path.join(dirpath, dirname)
|
1158
|
+
if as_package:
|
1159
|
+
full_path = full_path.replace('/', '.').replace('\\', '.')
|
1160
|
+
folder_paths.append(full_path)
|
1161
|
+
|
1162
|
+
return folder_paths
|
1163
|
+
|
1164
|
+
|
1165
|
+
|
1166
|
+
def get_code_base_folder(self):
|
1167
|
+
if getattr(sys, 'frozen', False):
|
1168
|
+
# Running in a bundled exe
|
1169
|
+
folder = sys._MEIPASS
|
1170
|
+
else:
|
1171
|
+
# Running in a normal Python environment
|
1172
|
+
folder = self.root_folder
|
1173
|
+
return folder
|
1174
|
+
|
1175
|
+
@property
|
1176
|
+
def code_base_folder(self):
|
1177
|
+
return self.get_code_base_folder()
|
1178
|
+
|
1179
|
+
def _setup_folders(self, folder_list):
|
1180
|
+
self.folder_list = folder_list
|
1181
|
+
for folder in folder_list:
|
1182
|
+
if not os.path.isdir(folder):
|
1183
|
+
print("Creating folder [{}]".format(folder))
|
1184
|
+
os.makedirs(folder)
|
1185
|
+
return
|
1186
|
+
|
1187
|
+
def update_config(self, dict_newdata=None):
|
1188
|
+
"""
|
1189
|
+
saves config file with current config_data dictionary
|
1190
|
+
"""
|
1191
|
+
if dict_newdata is not None:
|
1192
|
+
for key in dict_newdata:
|
1193
|
+
self.config_data[key] = dict_newdata[key]
|
1194
|
+
with open(self.config_file, 'w') as fp:
|
1195
|
+
json.dump(self.config_data, fp, indent=4)
|
1196
|
+
self.P("Config file '{}' has been updated.".format(self.config_file))
|
1197
|
+
return
|
1198
|
+
|
1199
|
+
|
1200
|
+
def update_config_values(self, dct_newdata):
|
1201
|
+
self.P("Selective update of config file '{}' on {}".format(self.config_file, list(dct_newdata.keys())))
|
1202
|
+
with open(self.config_file, 'r') as fp:
|
1203
|
+
dct_cache_config = json.load(fp, object_pairs_hook=OrderedDict)
|
1204
|
+
for k,v in dct_newdata.items():
|
1205
|
+
old_value = dct_cache_config[k]
|
1206
|
+
dct_cache_config[k] = v
|
1207
|
+
self.P(" Modified '{}'='{}' => '{}'='{}'".format(k, old_value, k, v))
|
1208
|
+
with open(self.config_file, 'w') as fp:
|
1209
|
+
json.dump(dct_cache_config, fp, indent=4)
|
1210
|
+
self.P("Config file '{}' has been updated.".format(self.config_file))
|
1211
|
+
return
|
1212
|
+
|
1213
|
+
|
1214
|
+
def _check_additional_configs(self):
|
1215
|
+
additional_configs = []
|
1216
|
+
|
1217
|
+
if not self.check_additional_configs:
|
1218
|
+
return
|
1219
|
+
|
1220
|
+
check_dir = self.get_data_folder()
|
1221
|
+
if self.data_folder_additional_configs is not None:
|
1222
|
+
check_dir = self.get_data_subfolder(self.data_folder_additional_configs)
|
1223
|
+
if check_dir is None:
|
1224
|
+
self.P("Additional configs folder '{}' not found in '{}'"
|
1225
|
+
.format(self.data_folder_additional_configs, self.get_data_folder()[-50:]))
|
1226
|
+
return
|
1227
|
+
|
1228
|
+
data_files = list(filter(lambda x: os.path.isfile(os.path.join(check_dir, x)), os.listdir(check_dir)))
|
1229
|
+
data_files = list(filter(lambda x: any(ext in x for ext in ['.txt', 'json']), data_files))
|
1230
|
+
|
1231
|
+
for f in data_files:
|
1232
|
+
if any(x in f for x in ['config', 'cfg', 'conf']):
|
1233
|
+
fn = self.get_data_file(f)
|
1234
|
+
self.P("Found additional config in '{}'".format(fn))
|
1235
|
+
dct_config = json.load(open(fn), object_pairs_hook=OrderedDict)
|
1236
|
+
self.replace_secrets(dct_config)
|
1237
|
+
additional_configs.append(dct_config)
|
1238
|
+
|
1239
|
+
if len(additional_configs) > 0:
|
1240
|
+
dct_final = {}
|
1241
|
+
for d in additional_configs:
|
1242
|
+
dct_final.update(d)
|
1243
|
+
for k, v in dct_final.items():
|
1244
|
+
if k in self.config_data:
|
1245
|
+
self.P("[WARNING] Overriding key '{}'".format(k))
|
1246
|
+
self.config_data[k] = v
|
1247
|
+
return
|
1248
|
+
|
1249
|
+
def raise_error(self, error_message):
|
1250
|
+
self.P("ERROR: {}".format(error_message))
|
1251
|
+
raise ValueError(str(error_message))
|
1252
|
+
|
1253
|
+
def get_config_value(self, key, default=0):
|
1254
|
+
if key in self.config_data.keys():
|
1255
|
+
_val = self.config_data[key]
|
1256
|
+
else:
|
1257
|
+
# create key if does not exist
|
1258
|
+
_val = default
|
1259
|
+
self.config_data[key] = _val
|
1260
|
+
return _val
|
1261
|
+
|
1262
|
+
def clear_folder(self, folder, include_subfolders=False):
|
1263
|
+
self.P("Clearing {}".format(folder))
|
1264
|
+
for the_file in os.listdir(folder):
|
1265
|
+
file_path = os.path.join(folder, the_file)
|
1266
|
+
try:
|
1267
|
+
if os.path.isfile(file_path):
|
1268
|
+
self.P(" Deleting {}".format(file_path[-30:]))
|
1269
|
+
os.unlink(file_path)
|
1270
|
+
elif os.path.isdir(file_path) and include_subfolders:
|
1271
|
+
self.P(" Removing ...{} subfolder".format(file_path[-30:]))
|
1272
|
+
shutil.rmtree(file_path)
|
1273
|
+
except Exception as e:
|
1274
|
+
self.P("{}".format(e))
|
1275
|
+
|
1276
|
+
def clear_model_folder(self, include_subfolders=False):
|
1277
|
+
folder = self.get_models_folder()
|
1278
|
+
self.clear_folder(folder, include_subfolders=include_subfolders)
|
1279
|
+
|
1280
|
+
def clear_log_folder(self, include_subfolders=False):
|
1281
|
+
folder = self._logs_dir
|
1282
|
+
self.clear_folder(folder, include_subfolders=include_subfolders)
|
1283
|
+
|
1284
|
+
def clear_output_folder(self, include_subfolders=False):
|
1285
|
+
folder = self.get_output_folder()
|
1286
|
+
self.clear_folder(folder, include_subfolders=include_subfolders)
|
1287
|
+
|
1288
|
+
def clear_all_results(self):
|
1289
|
+
self.P("WARNING: removing all files from models, logs and output!")
|
1290
|
+
self.clear_log_folder()
|
1291
|
+
self.clear_model_folder()
|
1292
|
+
self.clear_output_folder()
|
1293
|
+
|
1294
|
+
def get_base_folder(self):
|
1295
|
+
return self._base_folder if hasattr(self, '_base_folder') else ''
|
1296
|
+
|
1297
|
+
@property
|
1298
|
+
def base_folder(self):
|
1299
|
+
return self.get_base_folder()
|
1300
|
+
|
1301
|
+
@property
|
1302
|
+
def root_folder(self):
|
1303
|
+
return self._root_folder
|
1304
|
+
|
1305
|
+
@property
|
1306
|
+
def app_folder(self):
|
1307
|
+
return self._app_folder
|
1308
|
+
|
1309
|
+
def get_data_folder(self):
|
1310
|
+
return self._data_dir if hasattr(self, '_data_dir') else ''
|
1311
|
+
|
1312
|
+
def get_logs_folder(self):
|
1313
|
+
return self._logs_dir if hasattr(self, '_logs_dir') else ''
|
1314
|
+
|
1315
|
+
def get_output_folder(self):
|
1316
|
+
return self._outp_dir if hasattr(self, '_outp_dir') else ''
|
1317
|
+
|
1318
|
+
def get_models_folder(self):
|
1319
|
+
return self._modl_dir if hasattr(self, '_modl_dir') else ''
|
1320
|
+
|
1321
|
+
def get_target_folder(self, target):
|
1322
|
+
if target is None:
|
1323
|
+
return
|
1324
|
+
|
1325
|
+
if target.lower() in ['data', '_data', 'data_dir', 'dat']:
|
1326
|
+
return self.get_data_folder()
|
1327
|
+
|
1328
|
+
if target.lower() in ['logs', 'log', 'logs_dir', 'log_dir', '_log', '_logs']:
|
1329
|
+
return self.get_logs_folder()
|
1330
|
+
|
1331
|
+
if target.lower() in ['models', 'model', '_models', '_model', 'model_dir', 'models_dir', 'modl']:
|
1332
|
+
return self.get_models_folder()
|
1333
|
+
|
1334
|
+
if target.lower() in ['output', '_output', 'output_dir', 'outp', '_outp']:
|
1335
|
+
return self.get_output_folder()
|
1336
|
+
|
1337
|
+
self.P("Inner folder of type '{}' not found".format(target))
|
1338
|
+
return
|
1339
|
+
|
1340
|
+
|
1341
|
+
def get_subfolder(self, where, subfolder, force_create=False):
|
1342
|
+
folder = self.get_target_folder(target=where)
|
1343
|
+
path = os.path.join(folder, subfolder)
|
1344
|
+
if force_create:
|
1345
|
+
os.makedirs(path, exist_ok=True)
|
1346
|
+
if os.path.isdir(path):
|
1347
|
+
return path
|
1348
|
+
return None
|
1349
|
+
|
1350
|
+
|
1351
|
+
def get_data_subfolder(self, _dir, force_create=False):
|
1352
|
+
return self.get_subfolder(where='data', subfolder=_dir, force_create=force_create)
|
1353
|
+
|
1354
|
+
def get_models_subfolder(self, _dir, force_create=False):
|
1355
|
+
return self.get_subfolder(where='models', subfolder=_dir, force_create=force_create)
|
1356
|
+
|
1357
|
+
def get_output_subfolder(self, _dir, force_create=False):
|
1358
|
+
return self.get_subfolder(where='output', subfolder=_dir, force_create=force_create)
|
1359
|
+
|
1360
|
+
def get_path_from_node(self, dct):
|
1361
|
+
if 'PARENT' in dct:
|
1362
|
+
path = self.get_path_from_node(dct['PARENT'])
|
1363
|
+
os.path.join(path, dct['PATH'])
|
1364
|
+
return path
|
1365
|
+
elif 'USE_DROPBOX' in dct and int(dct['USE_DROPBOX']) == 1:
|
1366
|
+
return os.path.join(self.get_base_folder(), dct['PATH'])
|
1367
|
+
else:
|
1368
|
+
return dct['PATH']
|
1369
|
+
|
1370
|
+
def get_root_subfolder(self, folder):
|
1371
|
+
fld = os.path.join(self._root_folder, folder)
|
1372
|
+
if os.path.isdir(fld):
|
1373
|
+
return fld
|
1374
|
+
else:
|
1375
|
+
return None
|
1376
|
+
|
1377
|
+
def get_base_subfolder(self, folder):
|
1378
|
+
fld = os.path.join(self._base_folder, folder)
|
1379
|
+
if os.path.isdir(fld):
|
1380
|
+
return fld
|
1381
|
+
else:
|
1382
|
+
return None
|
1383
|
+
|
1384
|
+
def get_root_file(self, str_file):
|
1385
|
+
fn = os.path.join(self._root_folder, str_file)
|
1386
|
+
assert os.path.isfile(fn), "File not found: {}".format(fn)
|
1387
|
+
return fn
|
1388
|
+
|
1389
|
+
def get_base_file(self, str_file):
|
1390
|
+
fn = os.path.join(self.get_base_folder(), str_file)
|
1391
|
+
assert os.path.isfile(fn), "File not found: {}".format(fn)
|
1392
|
+
return fn
|
1393
|
+
|
1394
|
+
def get_file_from_folder(self, s_folder, s_file):
|
1395
|
+
s_fn = os.path.join(self.get_base_folder(), s_folder, s_file)
|
1396
|
+
if not os.path.isfile(s_fn):
|
1397
|
+
s_fn = None
|
1398
|
+
return s_fn
|
1399
|
+
|
1400
|
+
def get_data_file(self, s_file):
|
1401
|
+
"""
|
1402
|
+
returns full path of a data file or none is file does not exist
|
1403
|
+
"""
|
1404
|
+
fpath = os.path.join(self.get_data_folder(), s_file)
|
1405
|
+
if not os.path.isfile(fpath):
|
1406
|
+
fpath = None
|
1407
|
+
return fpath
|
1408
|
+
|
1409
|
+
def get_model_file(self, s_file):
|
1410
|
+
"""
|
1411
|
+
returns full path of a data file or none is file does not exist
|
1412
|
+
"""
|
1413
|
+
fpath = os.path.join(self.get_models_folder(), s_file)
|
1414
|
+
if not os.path.isfile(fpath):
|
1415
|
+
fpath = None
|
1416
|
+
return fpath
|
1417
|
+
|
1418
|
+
def get_models_file(self, s_file):
|
1419
|
+
return self.get_model_file(s_file)
|
1420
|
+
|
1421
|
+
def get_output_file(self, s_file):
|
1422
|
+
fpath = os.path.join(self.get_output_folder(), s_file)
|
1423
|
+
if not os.path.isfile(fpath):
|
1424
|
+
fpath = None
|
1425
|
+
return fpath
|
1426
|
+
|
1427
|
+
def check_folder(self, sub_folder, root=None):
|
1428
|
+
if root is None:
|
1429
|
+
root = self.get_base_folder()
|
1430
|
+
sfolder = os.path.join(root, sub_folder)
|
1431
|
+
if sfolder not in self.folder_list:
|
1432
|
+
self.folder_list.append(sfolder)
|
1433
|
+
|
1434
|
+
if not os.path.isdir(sfolder):
|
1435
|
+
self.verbose_log(" Creating folder [...{}]".format(sfolder[-40:]))
|
1436
|
+
os.makedirs(sfolder)
|
1437
|
+
return sfolder
|
1438
|
+
|
1439
|
+
def check_folder_data(self, sub_folder):
|
1440
|
+
root = self.get_data_folder()
|
1441
|
+
return self.check_folder(sub_folder, root)
|
1442
|
+
|
1443
|
+
def check_folder_models(self, sub_folder):
|
1444
|
+
root = self.get_models_folder()
|
1445
|
+
return self.check_folder(sub_folder, root)
|
1446
|
+
|
1447
|
+
def check_folder_output(self, sub_folder):
|
1448
|
+
root = self.get_output_folder()
|
1449
|
+
return self.check_folder(sub_folder, root)
|
1450
|
+
|
1451
|
+
@staticmethod
|
1452
|
+
def is_url_friendly(s):
|
1453
|
+
"""
|
1454
|
+
Check if a string is URL-friendly.
|
1455
|
+
|
1456
|
+
Parameters
|
1457
|
+
----------
|
1458
|
+
s : str
|
1459
|
+
The string to be checked for URL-friendliness.
|
1460
|
+
|
1461
|
+
Returns
|
1462
|
+
-------
|
1463
|
+
bool
|
1464
|
+
True if the string is URL-friendly, False otherwise.
|
1465
|
+
"""
|
1466
|
+
# Regular expression for matching only letters, numbers, underscores, and hyphens
|
1467
|
+
pattern = r'^[a-zA-Z0-9_-]+$'
|
1468
|
+
return bool(re.match(pattern, s))
|
1469
|
+
|
1470
|
+
@staticmethod
|
1471
|
+
def get_folders(path):
|
1472
|
+
lst = [os.path.join(path, x) for x in os.listdir(path)]
|
1473
|
+
return [x for x in lst if os.path.isdir(x)]
|
1474
|
+
|
1475
|
+
@staticmethod
|
1476
|
+
def expand_tilda(path):
|
1477
|
+
if '~' in path:
|
1478
|
+
path = path.replace('~', os.path.expanduser('~'))
|
1479
|
+
return path
|
1480
|
+
|
1481
|
+
def refresh_file_prefix(self):
|
1482
|
+
self.file_prefix = dt.now().strftime("%Y%m%d_%H%M%S")
|
1483
|
+
return
|
1484
|
+
|
1485
|
+
@staticmethod
|
1486
|
+
def now_str(nice_print=False, short=False):
|
1487
|
+
if nice_print:
|
1488
|
+
if short:
|
1489
|
+
return dt.now().strftime("%Y-%m-%d %H:%M:%S")
|
1490
|
+
else:
|
1491
|
+
return dt.now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
1492
|
+
else:
|
1493
|
+
if short:
|
1494
|
+
return dt.now().strftime("%Y%m%d%H%M%S")
|
1495
|
+
else:
|
1496
|
+
return dt.now().strftime("%Y%m%d%H%M%S%f")
|
1497
|
+
|
1498
|
+
@staticmethod
|
1499
|
+
def get_utc_offset(as_string=True):
|
1500
|
+
|
1501
|
+
# Get the local timezone
|
1502
|
+
local_timezone = tz.tzlocal()
|
1503
|
+
# Get the current datetime in the local timezone
|
1504
|
+
now = dt.now(local_timezone)
|
1505
|
+
# Get the timezone offset in minutes
|
1506
|
+
offset_minutes = now.utcoffset().total_seconds() / 60
|
1507
|
+
# Get the timezone offset in hours
|
1508
|
+
offset_hours = offset_minutes / 60
|
1509
|
+
if as_string:
|
1510
|
+
result = "UTC{}{}".format("+" if offset_hours >= 0 else "-", int(offset_hours))
|
1511
|
+
else:
|
1512
|
+
result = offset_hours
|
1513
|
+
return result
|
1514
|
+
|
1515
|
+
|
1516
|
+
@staticmethod
|
1517
|
+
def utc_to_local(remote_datetime, remote_utc, fmt='%Y-%m-%d %H:%M:%S', as_string=False):
|
1518
|
+
"""
|
1519
|
+
Given a "remote" datetime (in datetime or str format) and a string or int denoting an offset
|
1520
|
+
will return local datetime as a datetime object.
|
1521
|
+
|
1522
|
+
Parameters
|
1523
|
+
----------
|
1524
|
+
remote_datetime: datetime or str or tzinfo
|
1525
|
+
The remote datetime
|
1526
|
+
|
1527
|
+
remote_utc: int or str
|
1528
|
+
The UTC offset of the remote datetime as int or as string - i.e. "UTC+3" or even "+3"
|
1529
|
+
|
1530
|
+
Returns
|
1531
|
+
-------
|
1532
|
+
datetime
|
1533
|
+
"""
|
1534
|
+
if remote_utc is None:
|
1535
|
+
remote_utc = 'UTC+3'
|
1536
|
+
if isinstance(remote_utc, str):
|
1537
|
+
remote_utc = tz.gettz(remote_utc)
|
1538
|
+
elif isinstance(remote_utc, int):
|
1539
|
+
utc_offset = remote_utc
|
1540
|
+
remote_utc = tz.tzoffset(None, timedelta(hours=utc_offset))
|
1541
|
+
elif not isinstance(remote_utc, tzinfo):
|
1542
|
+
raise ValueError("Unknown remote_utc type: {}".format(type(remote_utc)))
|
1543
|
+
|
1544
|
+
if isinstance(remote_datetime, str):
|
1545
|
+
remote_datetime = dt.strptime(remote_datetime, fmt)
|
1546
|
+
|
1547
|
+
remote_datetime = remote_datetime.replace(tzinfo=remote_utc)
|
1548
|
+
local_timezone = tz.tzlocal()
|
1549
|
+
local_datetime = remote_datetime.astimezone(local_timezone)
|
1550
|
+
local_datetime = local_datetime.replace(tzinfo=None)
|
1551
|
+
if as_string:
|
1552
|
+
local_datetime = local_datetime.strftime(fmt)
|
1553
|
+
return local_datetime
|
1554
|
+
|
1555
|
+
@staticmethod
|
1556
|
+
def str_to_sec(s):
|
1557
|
+
res = None
|
1558
|
+
try:
|
1559
|
+
import time
|
1560
|
+
x = time.strptime(s,'%H:%M:%S')
|
1561
|
+
res = timedelta(hours=x.tm_hour,minutes=x.tm_min,seconds=x.tm_sec).total_seconds()
|
1562
|
+
except:
|
1563
|
+
pass
|
1564
|
+
return res
|
1565
|
+
|
1566
|
+
|
1567
|
+
@staticmethod
|
1568
|
+
def time_to_str(t=None, fmt='%Y-%m-%d %H:%M:%S'):
|
1569
|
+
if t is None:
|
1570
|
+
t = tm()
|
1571
|
+
return strftime(fmt, localtime(t))
|
1572
|
+
|
1573
|
+
|
1574
|
+
def elapsed_to_str(self, elapsed=None, show_days=False):
|
1575
|
+
"""
|
1576
|
+
Pretty format a number of seconds
|
1577
|
+
|
1578
|
+
Parameters
|
1579
|
+
----------
|
1580
|
+
elapsed : float, optional
|
1581
|
+
The amount of time in seconds. The default is None and will use Logger init start as reference.
|
1582
|
+
show_days : bool, optional
|
1583
|
+
Show in days, hours, etc instead of HH:MM:SS. The default is False.
|
1584
|
+
|
1585
|
+
Returns
|
1586
|
+
-------
|
1587
|
+
s : str
|
1588
|
+
Formatted time.
|
1589
|
+
|
1590
|
+
"""
|
1591
|
+
if elapsed is None:
|
1592
|
+
elapsed = tm() - self.start_timestamp
|
1593
|
+
|
1594
|
+
hours, rem = divmod(elapsed, 3600)
|
1595
|
+
minutes, seconds = divmod(rem, 60)
|
1596
|
+
if show_days:
|
1597
|
+
s = str(timedelta(seconds=int(elapsed)))
|
1598
|
+
else:
|
1599
|
+
s = "{:0>2}:{:0>2}:{:0>2}".format(int(hours),int(minutes),int(seconds))
|
1600
|
+
return s
|
1601
|
+
|
1602
|
+
|
1603
|
+
@staticmethod
|
1604
|
+
def str_to_time(s, fmt='%Y-%m-%d %H:%M:%S'):
|
1605
|
+
return mktime(strptime(s, fmt))
|
1606
|
+
|
1607
|
+
|
1608
|
+
@staticmethod
|
1609
|
+
def str_to_date(s, fmt='%Y-%m-%d %H:%M:%S'):
|
1610
|
+
return dt.strptime(s, fmt)
|
1611
|
+
|
1612
|
+
|
1613
|
+
@staticmethod
|
1614
|
+
def now_str_fmt(fmt=None):
|
1615
|
+
if fmt is None:
|
1616
|
+
fmt = '%Y-%m-%d %H:%M:%S.%f'
|
1617
|
+
|
1618
|
+
return dt.now().strftime(fmt)
|
1619
|
+
|
1620
|
+
def get_error_info(self, return_err_val=False):
|
1621
|
+
"""
|
1622
|
+
Returns error_type, file, method, line for last error if available
|
1623
|
+
|
1624
|
+
Parameters
|
1625
|
+
----------
|
1626
|
+
return_err_val: boolean, optional
|
1627
|
+
Whether the method returns or not the error message (err_val)
|
1628
|
+
|
1629
|
+
Returns
|
1630
|
+
-------
|
1631
|
+
if not return_err_val:
|
1632
|
+
(tuple) str, str, str, str : errortype, file, method, line
|
1633
|
+
else:
|
1634
|
+
(tuple) str, str, str, str, str : errortype, file, method, line, err message
|
1635
|
+
"""
|
1636
|
+
err_type, err_val, err_trace = sys.exc_info()
|
1637
|
+
if False:
|
1638
|
+
# dont try this at home :) if you want to pickle a logger instance after
|
1639
|
+
# `get_error_info` was triggered, then it won't work because `self._last_extracted_error`
|
1640
|
+
# contain an object of type `traceback` and tracebacks cannot be pickled
|
1641
|
+
self._last_extracted_error = err_type, err_val, err_trace
|
1642
|
+
# endif
|
1643
|
+
if err_type is not None:
|
1644
|
+
str_err = err_type.__name__
|
1645
|
+
stack_summary = traceback.extract_tb(err_trace)
|
1646
|
+
last_error_frame = stack_summary[-1]
|
1647
|
+
fn = os.path.splitext(os.path.split(last_error_frame.filename)[-1])[0]
|
1648
|
+
lineno = last_error_frame.lineno
|
1649
|
+
func_name = last_error_frame.name
|
1650
|
+
if not return_err_val:
|
1651
|
+
return str_err, 'File: ' + fn, 'Func: ' + func_name, 'Line: ' + str(lineno)
|
1652
|
+
else:
|
1653
|
+
return str_err, 'File: ' + fn, 'Func: ' + func_name, 'Line: ' + str(lineno), str(err_val)
|
1654
|
+
else:
|
1655
|
+
return "", "", "", "", ""
|
1656
|
+
|
1657
|
+
@staticmethod
|
1658
|
+
def tqdm_enumerate(_iter):
|
1659
|
+
from tqdm import tqdm
|
1660
|
+
i = 0
|
1661
|
+
for y in tqdm(_iter):
|
1662
|
+
yield i, y
|
1663
|
+
i += 1
|
1664
|
+
|
1665
|
+
@staticmethod
|
1666
|
+
def set_nice_prints(linewidth=500,
|
1667
|
+
precision=3,
|
1668
|
+
np_precision=None,
|
1669
|
+
df_precision=None,
|
1670
|
+
suppress=False):
|
1671
|
+
|
1672
|
+
if np_precision is None:
|
1673
|
+
np_precision = precision
|
1674
|
+
if df_precision is None:
|
1675
|
+
df_precision = precision
|
1676
|
+
np.set_printoptions(precision=np_precision)
|
1677
|
+
np.set_printoptions(floatmode='fixed')
|
1678
|
+
np.set_printoptions(linewidth=linewidth)
|
1679
|
+
np.set_printoptions(suppress=suppress)
|
1680
|
+
|
1681
|
+
try:
|
1682
|
+
import pandas as pd
|
1683
|
+
pd.set_option('display.max_rows', 500)
|
1684
|
+
pd.set_option('display.max_columns', 500)
|
1685
|
+
pd.set_option('display.width', 1000)
|
1686
|
+
pd.set_option('display.max_colwidth', 1000)
|
1687
|
+
_format = '{:.' + str(df_precision) + 'f}'
|
1688
|
+
pd.set_option('display.float_format', lambda x: _format.format(x))
|
1689
|
+
except:
|
1690
|
+
pass
|
1691
|
+
|
1692
|
+
return
|
1693
|
+
|
1694
|
+
@staticmethod
|
1695
|
+
def get_google_drive():
|
1696
|
+
home_dir = os.path.expanduser("~")
|
1697
|
+
valid_paths = [
|
1698
|
+
os.path.join(home_dir, "Google Drive"),
|
1699
|
+
os.path.join(home_dir, "GoogleDrive"),
|
1700
|
+
os.path.join(os.path.join(home_dir, "Desktop"), "Google Drive"),
|
1701
|
+
os.path.join(os.path.join(home_dir, "Desktop"), "GoogleDrive"),
|
1702
|
+
os.path.join("C:/", "GoogleDrive"),
|
1703
|
+
os.path.join("C:/", "Google Drive"),
|
1704
|
+
os.path.join("D:/", "GoogleDrive"),
|
1705
|
+
os.path.join("D:/", "Google Drive"),
|
1706
|
+
]
|
1707
|
+
|
1708
|
+
drive_path = None
|
1709
|
+
for path in valid_paths:
|
1710
|
+
if os.path.isdir(path):
|
1711
|
+
drive_path = path
|
1712
|
+
break
|
1713
|
+
|
1714
|
+
if drive_path is None:
|
1715
|
+
raise Exception("Couldn't find google drive folder!")
|
1716
|
+
|
1717
|
+
return drive_path
|
1718
|
+
|
1719
|
+
@staticmethod
|
1720
|
+
def get_dropbox_drive():
|
1721
|
+
# TODO: change this to not be restricted by the folder name
|
1722
|
+
home_dir = os.path.expanduser("~")
|
1723
|
+
valid_paths = [
|
1724
|
+
os.path.join(home_dir, "Lummetry.AI Dropbox/DATA"),
|
1725
|
+
os.path.join(home_dir, "Lummetry.AIDropbox/DATA"),
|
1726
|
+
os.path.join(os.path.join(home_dir, "Desktop"), "Lummetry.AI Dropbox/DATA"),
|
1727
|
+
os.path.join(os.path.join(home_dir, "Desktop"), "Lummetry.AIDropbox/DATA"),
|
1728
|
+
os.path.join("C:/", "Lummetry.AI Dropbox/DATA"),
|
1729
|
+
os.path.join("C:/", "Lummetry.AIDropbox/DATA"),
|
1730
|
+
os.path.join("D:/", "Lummetry.AI Dropbox/DATA"),
|
1731
|
+
os.path.join("D:/", "Lummetry.AIDropbox/DATA"),
|
1732
|
+
os.path.join(home_dir, "Dropbox/DATA"),
|
1733
|
+
os.path.join(os.path.join(home_dir, "Desktop"), "Dropbox/DATA"),
|
1734
|
+
os.path.join("C:/", "Dropbox/DATA"),
|
1735
|
+
os.path.join("D:/", "Dropbox/DATA"),
|
1736
|
+
]
|
1737
|
+
|
1738
|
+
drive_path = None
|
1739
|
+
for path in valid_paths:
|
1740
|
+
if os.path.isdir(path):
|
1741
|
+
drive_path = path
|
1742
|
+
break
|
1743
|
+
|
1744
|
+
if drive_path is None:
|
1745
|
+
raise Exception("Couldn't find google drive folder!")
|
1746
|
+
|
1747
|
+
return drive_path
|
1748
|
+
|
1749
|
+
@staticmethod
|
1750
|
+
def get_dropbox_subfolder(sub_folder):
|
1751
|
+
drop_root = BaseLogger.get_dropbox_drive()
|
1752
|
+
full = os.path.join(drop_root, sub_folder)
|
1753
|
+
if os.path.isdir(full):
|
1754
|
+
return full
|
1755
|
+
else:
|
1756
|
+
return None
|
1757
|
+
|
1758
|
+
@staticmethod
|
1759
|
+
def print_color(s, color=None):
|
1760
|
+
color = color or 'n'
|
1761
|
+
color = color.lower()[0]
|
1762
|
+
color_start = COLORS[color] if color in COLORS else COLORS['n']
|
1763
|
+
color_end = COLORS['__end__']
|
1764
|
+
print('\r' + color_start + s + color_end, flush=True)
|
1765
|
+
return
|
1766
|
+
|
1767
|
+
@staticmethod
|
1768
|
+
def import_string(dotted_path):
|
1769
|
+
"""
|
1770
|
+
Import a dotted module path and return the attribute/class designated by the
|
1771
|
+
last name in the path. Raise ImportError if the import failed.
|
1772
|
+
Credits: django 3.1
|
1773
|
+
"""
|
1774
|
+
from importlib import import_module
|
1775
|
+
try:
|
1776
|
+
module_path, class_name = dotted_path.rsplit('.', 1)
|
1777
|
+
except ValueError as err:
|
1778
|
+
raise ImportError("%s doesn't look like a module path" % dotted_path) from err
|
1779
|
+
|
1780
|
+
module = import_module(module_path)
|
1781
|
+
|
1782
|
+
try:
|
1783
|
+
return getattr(module, class_name)
|
1784
|
+
except AttributeError as err:
|
1785
|
+
raise ImportError(
|
1786
|
+
'Module "%s" does not define a "%s" attribute/class' % \
|
1787
|
+
(module_path, class_name)
|
1788
|
+
) from err
|
1789
|
+
|
1790
|
+
def get_machine_name(self):
|
1791
|
+
"""
|
1792
|
+
if socket.gethostname().find('.')>=0:
|
1793
|
+
name=socket.gethostname()
|
1794
|
+
else:
|
1795
|
+
name=socket.gethostbyaddr(socket.gethostname())[0]
|
1796
|
+
"""
|
1797
|
+
|
1798
|
+
self.MACHINE_NAME = socket.gethostname()
|
1799
|
+
self.COMPUTER_NAME = self.MACHINE_NAME
|
1800
|
+
return self.MACHINE_NAME
|
1801
|
+
|
1802
|
+
|
1803
|
+
def _link(self, src_path, target_subpath, is_dir, target=None):
|
1804
|
+
"""
|
1805
|
+
Creates a symbolic link.
|
1806
|
+
|
1807
|
+
Parameters:
|
1808
|
+
----------
|
1809
|
+
src_path: str, mandatory
|
1810
|
+
Symlink src full path
|
1811
|
+
|
1812
|
+
target_subpath: str, mandatory
|
1813
|
+
Subpath in the target directory of the logger
|
1814
|
+
|
1815
|
+
is_dir: bool, mandatory
|
1816
|
+
Whether is directory or file
|
1817
|
+
|
1818
|
+
target: str, optional
|
1819
|
+
Target directory of the logger (data, models, output or logs)
|
1820
|
+
The default is None ('data')
|
1821
|
+
"""
|
1822
|
+
if target is None:
|
1823
|
+
target = 'data'
|
1824
|
+
|
1825
|
+
if not os.path.exists(src_path):
|
1826
|
+
self.verbose_log("ERROR! Could not create symlink, because '{}' does not exist".format(src_path))
|
1827
|
+
return
|
1828
|
+
|
1829
|
+
target_path = self.get_target_folder(target)
|
1830
|
+
if target_path is None:
|
1831
|
+
return
|
1832
|
+
|
1833
|
+
target_path = os.path.join(target_path, target_subpath)
|
1834
|
+
if os.path.exists(target_path):
|
1835
|
+
return
|
1836
|
+
|
1837
|
+
target_path_parent = Path(target_path).parent
|
1838
|
+
if not os.path.exists(target_path_parent):
|
1839
|
+
os.makedirs(target_path_parent)
|
1840
|
+
|
1841
|
+
os.symlink(
|
1842
|
+
src_path, target_path,
|
1843
|
+
target_is_directory=is_dir
|
1844
|
+
)
|
1845
|
+
|
1846
|
+
return
|
1847
|
+
|
1848
|
+
def link_file(self, src_path, target_subpath, target=None):
|
1849
|
+
self._link(src_path, target_subpath, is_dir=False, target=target)
|
1850
|
+
return
|
1851
|
+
|
1852
|
+
def link_folder(self, src_path, target_subpath, target=None):
|
1853
|
+
self._link(src_path, target_subpath, is_dir=True, target=target)
|
1854
|
+
return
|
1855
|
+
|
1856
|
+
@property
|
1857
|
+
def is_main_thread(self):
|
1858
|
+
return threading.current_thread() is threading.main_thread()
|
1859
|
+
|
1860
|
+
@staticmethod
|
1861
|
+
def get_os_name():
|
1862
|
+
import platform
|
1863
|
+
return platform.platform()
|
1864
|
+
|
1865
|
+
@staticmethod
|
1866
|
+
def get_conda_env():
|
1867
|
+
folder = os.environ.get("CONDA_PREFIX", None)
|
1868
|
+
env = None
|
1869
|
+
if folder is not None and len(folder) > 0:
|
1870
|
+
try:
|
1871
|
+
env = os.path.split(folder)[-1]
|
1872
|
+
except:
|
1873
|
+
env = None
|
1874
|
+
return env
|
1875
|
+
|
1876
|
+
@staticmethod
|
1877
|
+
def get_active_git_branch():
|
1878
|
+
fn = './.git/HEAD'
|
1879
|
+
if os.path.isfile(fn):
|
1880
|
+
with open(fn, 'r') as f:
|
1881
|
+
content = f.readlines()
|
1882
|
+
for line in content:
|
1883
|
+
if line.startswith('ref:'):
|
1884
|
+
return line.partition('refs/heads/')[2].replace('\n','')
|
1885
|
+
else:
|
1886
|
+
return None
|
1887
|
+
|
1888
|
+
|
1889
|
+
def dict_show(self, d):
|
1890
|
+
self.dict_pretty_format(d=d, display=True)
|
1891
|
+
return
|
1892
|
+
|
1893
|
+
def show_dict(self, d):
|
1894
|
+
self.dict_pretty_format(d=d, display=True)
|
1895
|
+
return
|
1896
|
+
|
1897
|
+
def dict_pretty_format(self, d, indent=4, as_str=True, display_callback=None, display=False, limit_str=250):
|
1898
|
+
assert isinstance(d, dict), "`d` must be dict"
|
1899
|
+
if display and display_callback is None:
|
1900
|
+
display_callback = self.P
|
1901
|
+
lst_data = []
|
1902
|
+
def deep_parse(dct, ind=indent):
|
1903
|
+
for ki, key in enumerate(dct):
|
1904
|
+
# dct actually can be dict or list
|
1905
|
+
str_key = str(key) if not isinstance(key, str) else '"{}"'.format(key)
|
1906
|
+
if isinstance(dct, dict):
|
1907
|
+
value = dct[key]
|
1908
|
+
lst_data.append(' ' * ind + str(str_key) + ' : ')
|
1909
|
+
else:
|
1910
|
+
value = key
|
1911
|
+
if isinstance(value, dict):
|
1912
|
+
if lst_data[-1][-1] in ['[', ',']:
|
1913
|
+
lst_data.append(' ' * ind + '{')
|
1914
|
+
else:
|
1915
|
+
lst_data[-1] = lst_data[-1] + '{'
|
1916
|
+
deep_parse(value, ind=ind + indent)
|
1917
|
+
lst_data.append(' ' * ind + '}')
|
1918
|
+
elif isinstance(value, list) and len(value) > 0 and isinstance(value[0], dict):
|
1919
|
+
lst_data[-1] = lst_data[-1] + '['
|
1920
|
+
deep_parse(value, ind=ind + indent)
|
1921
|
+
lst_data.append(' ' * ind + ']')
|
1922
|
+
else:
|
1923
|
+
str_value = str(value) if not isinstance(value,str) else '"{}"'.format(value)
|
1924
|
+
if isinstance(value,str) and len(str_value) > limit_str:
|
1925
|
+
str_value = str_value[:limit_str]
|
1926
|
+
lst_data[-1] = lst_data[-1] + str_value
|
1927
|
+
|
1928
|
+
if ki < (len(dct) - 1):
|
1929
|
+
lst_data[-1] = lst_data[-1] + ','
|
1930
|
+
return
|
1931
|
+
|
1932
|
+
deep_parse(dct=d,ind=0)
|
1933
|
+
|
1934
|
+
displaybuff = "{\n"
|
1935
|
+
for itm in lst_data:
|
1936
|
+
displaybuff += ' ' + itm + '\n'
|
1937
|
+
displaybuff += "}"
|
1938
|
+
|
1939
|
+
if display_callback is not None:
|
1940
|
+
displaybuff = "Dict pretty formatter:\n" + displaybuff
|
1941
|
+
display_callback(displaybuff)
|
1942
|
+
if as_str:
|
1943
|
+
return displaybuff
|
1944
|
+
else:
|
1945
|
+
return lst_data
|
1946
|
+
|
1947
|
+
def get_log_files(self):
|
1948
|
+
return [os.path.join(self._logs_dir, x) for x in os.listdir(self._logs_dir) if '.txt' in x.lower()]
|
1949
|
+
|
1950
|
+
|
1951
|
+
def camel_to_snake(self, s):
|
1952
|
+
import re
|
1953
|
+
if s.isupper():
|
1954
|
+
result = s.lower()
|
1955
|
+
else:
|
1956
|
+
s = re.sub(r'(?<!^)(?=[A-Z])', '_', s).lower()
|
1957
|
+
s = s.replace('__', '_')
|
1958
|
+
result = s
|
1959
|
+
return result
|
1960
|
+
|
1961
|
+
@staticmethod
|
1962
|
+
def match_template(json_data: dict, template: dict) -> bool:
|
1963
|
+
"""
|
1964
|
+
Check if all keys (including subkeys) within the template can be found with the same values in the given JSON.
|
1965
|
+
|
1966
|
+
Parameters
|
1967
|
+
----------
|
1968
|
+
json_data : dict
|
1969
|
+
The JSON (dict) to check against the template.
|
1970
|
+
template : dict
|
1971
|
+
The template JSON (dict) containing the keys and values to match.
|
1972
|
+
|
1973
|
+
Returns
|
1974
|
+
-------
|
1975
|
+
bool
|
1976
|
+
True if the JSON matches the template, False otherwise.
|
1977
|
+
"""
|
1978
|
+
# Initialize the stack with the top-level dictionaries from json_data and template
|
1979
|
+
stack = [(json_data, template)]
|
1980
|
+
|
1981
|
+
# Process each pair of current data and template dictionaries/lists from the stack
|
1982
|
+
while stack:
|
1983
|
+
current_data, current_tmpl = stack.pop()
|
1984
|
+
|
1985
|
+
# Check if current_tmpl is a dictionary
|
1986
|
+
if isinstance(current_tmpl, dict):
|
1987
|
+
for key, value in current_tmpl.items():
|
1988
|
+
# If the key is not in current_data, return False
|
1989
|
+
if key not in current_data:
|
1990
|
+
return False
|
1991
|
+
|
1992
|
+
# If the value in the template is a dictionary, add the corresponding pair to the stack
|
1993
|
+
if isinstance(value, dict):
|
1994
|
+
if not isinstance(current_data[key], dict):
|
1995
|
+
return False
|
1996
|
+
stack.append((current_data[key], value))
|
1997
|
+
|
1998
|
+
# If the value in the template is a list, process each item in the list
|
1999
|
+
elif isinstance(value, list):
|
2000
|
+
if not isinstance(current_data[key], list):
|
2001
|
+
return False
|
2002
|
+
|
2003
|
+
tmpl_list = value
|
2004
|
+
data_list = current_data[key]
|
2005
|
+
|
2006
|
+
# For each item in the template list, ensure there is a matching item in the data list
|
2007
|
+
for tmpl_item in tmpl_list:
|
2008
|
+
matched = False
|
2009
|
+
for data_item in data_list:
|
2010
|
+
# If both are dictionaries, add them to the stack for further processing
|
2011
|
+
if isinstance(tmpl_item, dict) and isinstance(data_item, dict):
|
2012
|
+
stack.append((data_item, tmpl_item))
|
2013
|
+
matched = True
|
2014
|
+
break
|
2015
|
+
# If both are lists, add them to the stack for further processing
|
2016
|
+
elif isinstance(tmpl_item, list) and isinstance(data_item, list):
|
2017
|
+
stack.append((data_item, tmpl_item))
|
2018
|
+
matched = True
|
2019
|
+
break
|
2020
|
+
# If they are of the same type and equal, mark as matched
|
2021
|
+
elif tmpl_item == data_item:
|
2022
|
+
matched = True
|
2023
|
+
break
|
2024
|
+
# If no matching item is found, return False
|
2025
|
+
if not matched:
|
2026
|
+
return False
|
2027
|
+
|
2028
|
+
# If the value is not a dictionary or list, directly compare the values
|
2029
|
+
elif current_data[key] != value:
|
2030
|
+
return False
|
2031
|
+
|
2032
|
+
# Check if current_tmpl is a list
|
2033
|
+
elif isinstance(current_tmpl, list):
|
2034
|
+
for tmpl_item in current_tmpl:
|
2035
|
+
matched = False
|
2036
|
+
for data_item in current_data:
|
2037
|
+
# If both are dictionaries, add them to the stack for further processing
|
2038
|
+
if isinstance(tmpl_item, dict) and isinstance(data_item, dict):
|
2039
|
+
stack.append((data_item, tmpl_item))
|
2040
|
+
matched = True
|
2041
|
+
break
|
2042
|
+
# If both are lists, add them to the stack for further processing
|
2043
|
+
elif isinstance(tmpl_item, list) and isinstance(data_item, list):
|
2044
|
+
stack.append((data_item, tmpl_item))
|
2045
|
+
matched = True
|
2046
|
+
break
|
2047
|
+
# If they are of the same type and equal, mark as matched
|
2048
|
+
elif tmpl_item == data_item:
|
2049
|
+
matched = True
|
2050
|
+
break
|
2051
|
+
# If no matching item is found, return False
|
2052
|
+
if not matched:
|
2053
|
+
return False
|
2054
|
+
|
2055
|
+
# If all checks passed, return True
|
2056
|
+
return True
|