naeural-client 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- naeural_client/__init__.py +13 -0
- naeural_client/_ver.py +13 -0
- naeural_client/base/__init__.py +6 -0
- naeural_client/base/distributed_custom_code_presets.py +44 -0
- naeural_client/base/generic_session.py +1763 -0
- naeural_client/base/instance.py +616 -0
- naeural_client/base/payload/__init__.py +1 -0
- naeural_client/base/payload/payload.py +66 -0
- naeural_client/base/pipeline.py +1499 -0
- naeural_client/base/plugin_template.py +5209 -0
- naeural_client/base/responses.py +209 -0
- naeural_client/base/transaction.py +157 -0
- naeural_client/base_decentra_object.py +143 -0
- naeural_client/bc/__init__.py +3 -0
- naeural_client/bc/base.py +1046 -0
- naeural_client/bc/chain.py +0 -0
- naeural_client/bc/ec.py +324 -0
- naeural_client/certs/__init__.py +0 -0
- naeural_client/certs/r9092118.ala.eu-central-1.emqxsl.com.crt +22 -0
- naeural_client/code_cheker/__init__.py +1 -0
- naeural_client/code_cheker/base.py +520 -0
- naeural_client/code_cheker/checker.py +294 -0
- naeural_client/comm/__init__.py +2 -0
- naeural_client/comm/amqp_wrapper.py +338 -0
- naeural_client/comm/mqtt_wrapper.py +539 -0
- naeural_client/const/README.md +3 -0
- naeural_client/const/__init__.py +9 -0
- naeural_client/const/base.py +101 -0
- naeural_client/const/comms.py +80 -0
- naeural_client/const/environment.py +26 -0
- naeural_client/const/formatter.py +7 -0
- naeural_client/const/heartbeat.py +111 -0
- naeural_client/const/misc.py +20 -0
- naeural_client/const/payload.py +190 -0
- naeural_client/default/__init__.py +1 -0
- naeural_client/default/instance/__init__.py +4 -0
- naeural_client/default/instance/chain_dist_custom_job_01_plugin.py +54 -0
- naeural_client/default/instance/custom_web_app_01_plugin.py +118 -0
- naeural_client/default/instance/net_mon_01_plugin.py +45 -0
- naeural_client/default/instance/view_scene_01_plugin.py +28 -0
- naeural_client/default/session/mqtt_session.py +72 -0
- naeural_client/io_formatter/__init__.py +2 -0
- naeural_client/io_formatter/base/__init__.py +1 -0
- naeural_client/io_formatter/base/base_formatter.py +80 -0
- naeural_client/io_formatter/default/__init__.py +3 -0
- naeural_client/io_formatter/default/a_dummy.py +51 -0
- naeural_client/io_formatter/default/aixp1.py +113 -0
- naeural_client/io_formatter/default/default.py +22 -0
- naeural_client/io_formatter/io_formatter_manager.py +96 -0
- naeural_client/logging/__init__.py +1 -0
- naeural_client/logging/base_logger.py +2056 -0
- naeural_client/logging/logger_mixins/__init__.py +12 -0
- naeural_client/logging/logger_mixins/class_instance_mixin.py +92 -0
- naeural_client/logging/logger_mixins/computer_vision_mixin.py +443 -0
- naeural_client/logging/logger_mixins/datetime_mixin.py +344 -0
- naeural_client/logging/logger_mixins/download_mixin.py +421 -0
- naeural_client/logging/logger_mixins/general_serialization_mixin.py +242 -0
- naeural_client/logging/logger_mixins/json_serialization_mixin.py +481 -0
- naeural_client/logging/logger_mixins/pickle_serialization_mixin.py +301 -0
- naeural_client/logging/logger_mixins/process_mixin.py +63 -0
- naeural_client/logging/logger_mixins/resource_size_mixin.py +81 -0
- naeural_client/logging/logger_mixins/timers_mixin.py +501 -0
- naeural_client/logging/logger_mixins/upload_mixin.py +260 -0
- naeural_client/logging/logger_mixins/utils_mixin.py +675 -0
- naeural_client/logging/small_logger.py +93 -0
- naeural_client/logging/tzlocal/__init__.py +20 -0
- naeural_client/logging/tzlocal/unix.py +231 -0
- naeural_client/logging/tzlocal/utils.py +113 -0
- naeural_client/logging/tzlocal/win32.py +151 -0
- naeural_client/logging/tzlocal/windows_tz.py +718 -0
- naeural_client/plugins_manager_mixin.py +273 -0
- naeural_client/utils/__init__.py +2 -0
- naeural_client/utils/comm_utils.py +44 -0
- naeural_client/utils/dotenv.py +75 -0
- naeural_client-2.0.0.dist-info/METADATA +365 -0
- naeural_client-2.0.0.dist-info/RECORD +78 -0
- naeural_client-2.0.0.dist-info/WHEEL +4 -0
- naeural_client-2.0.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,481 @@
|
|
1
|
+
import json
|
2
|
+
import yaml
|
3
|
+
import os
|
4
|
+
import numpy as np
|
5
|
+
import traceback
|
6
|
+
import datetime
|
7
|
+
|
8
|
+
from collections import OrderedDict
|
9
|
+
|
10
|
+
from copy import deepcopy
|
11
|
+
|
12
|
+
def copy_docstring(original):
|
13
|
+
"""
|
14
|
+
Decorator to copy the docstring of another function to the decorated function.
|
15
|
+
|
16
|
+
Parameters
|
17
|
+
----------
|
18
|
+
original : function
|
19
|
+
The function from which to copy the docstring.
|
20
|
+
|
21
|
+
Returns
|
22
|
+
-------
|
23
|
+
callable
|
24
|
+
A decorator that assigns the original docstring to the decorated function.
|
25
|
+
"""
|
26
|
+
def decorator(target):
|
27
|
+
target.__doc__ = original.__doc__
|
28
|
+
return target
|
29
|
+
return decorator
|
30
|
+
|
31
|
+
|
32
|
+
def replace_nan_inf(data, inplace=False):
|
33
|
+
assert isinstance(data, (dict, list)), "Only dictionaries and lists are supported"
|
34
|
+
if inplace:
|
35
|
+
d = data
|
36
|
+
else:
|
37
|
+
d = deepcopy(data)
|
38
|
+
stack = [d]
|
39
|
+
while stack:
|
40
|
+
current = stack.pop()
|
41
|
+
for key, value in current.items():
|
42
|
+
if isinstance(value, dict):
|
43
|
+
stack.append(value)
|
44
|
+
elif isinstance(value, list):
|
45
|
+
for item in value:
|
46
|
+
if isinstance(item, dict):
|
47
|
+
stack.append(item)
|
48
|
+
elif isinstance(value, float) and (np.isnan(value) or np.isinf(value)):
|
49
|
+
current[key] = None
|
50
|
+
return d
|
51
|
+
|
52
|
+
class SimpleNPJson(json.JSONEncoder):
|
53
|
+
"""
|
54
|
+
Used to help jsonify numpy arrays or lists that contain numpy data types,
|
55
|
+
and handle datetime.
|
56
|
+
"""
|
57
|
+
def default(self, obj):
|
58
|
+
if isinstance(obj, np.integer):
|
59
|
+
return int(obj)
|
60
|
+
elif isinstance(obj, np.floating):
|
61
|
+
return float(obj)
|
62
|
+
elif isinstance(obj, np.ndarray):
|
63
|
+
return obj.tolist()
|
64
|
+
elif isinstance(obj, datetime.datetime):
|
65
|
+
return obj.strftime("%Y-%m-%d %H:%M:%S")
|
66
|
+
elif "torch" in str(type(obj)):
|
67
|
+
return str(obj)
|
68
|
+
else:
|
69
|
+
return super(SimpleNPJson, self).default(obj)
|
70
|
+
|
71
|
+
class NPJson(json.JSONEncoder):
|
72
|
+
def default(self, obj):
|
73
|
+
if isinstance(obj, np.integer):
|
74
|
+
return int(obj)
|
75
|
+
elif isinstance(obj, np.floating):
|
76
|
+
return float(obj)
|
77
|
+
elif isinstance(obj, np.ndarray):
|
78
|
+
return obj.tolist()
|
79
|
+
elif isinstance(obj, datetime.datetime):
|
80
|
+
return obj.strftime("%Y-%m-%d %H:%M:%S")
|
81
|
+
elif "torch" in str(type(obj)):
|
82
|
+
return str(obj)
|
83
|
+
else:
|
84
|
+
return super(NPJson, self).default(obj)
|
85
|
+
|
86
|
+
def iterencode(self, o, _one_shot=False):
|
87
|
+
"""Encode the given object and yield each string representation as available."""
|
88
|
+
if self.check_circular:
|
89
|
+
markers = {}
|
90
|
+
else:
|
91
|
+
markers = None
|
92
|
+
if self.ensure_ascii:
|
93
|
+
_encoder = json.encoder.encode_basestring_ascii
|
94
|
+
else:
|
95
|
+
_encoder = json.encoder.encode_basestring
|
96
|
+
|
97
|
+
def floatstr(o, allow_nan=self.allow_nan, _repr=float.__repr__, _inf=json.encoder.INFINITY, _neginf=-json.encoder.INFINITY):
|
98
|
+
if o != o: # Check for NaN
|
99
|
+
text = 'null'
|
100
|
+
elif o == _inf:
|
101
|
+
text = 'null'
|
102
|
+
elif o == _neginf:
|
103
|
+
text = 'null'
|
104
|
+
else:
|
105
|
+
return repr(o).rstrip('0').rstrip('.') if '.' in repr(o) else repr(o)
|
106
|
+
|
107
|
+
if not allow_nan:
|
108
|
+
raise ValueError("Out of range float values are not JSON compliant: " + repr(o))
|
109
|
+
|
110
|
+
return text
|
111
|
+
|
112
|
+
_iterencode = json.encoder._make_iterencode(
|
113
|
+
markers, self.default, _encoder, self.indent, floatstr,
|
114
|
+
self.key_separator, self.item_separator, self.sort_keys,
|
115
|
+
self.skipkeys, _one_shot
|
116
|
+
)
|
117
|
+
return _iterencode(o, 0)
|
118
|
+
|
119
|
+
|
120
|
+
class _JSONSerializationMixin(object):
|
121
|
+
"""
|
122
|
+
Mixin for json serialization functionalities that are attached to `pye2.Logger`.
|
123
|
+
|
124
|
+
This mixin cannot be instantiated because it is built just to provide some additional
|
125
|
+
functionalities for `pye2.Logger`
|
126
|
+
|
127
|
+
In this mixin we can use any attribute/method of the Logger.
|
128
|
+
"""
|
129
|
+
|
130
|
+
def __init__(self):
|
131
|
+
super(_JSONSerializationMixin, self).__init__()
|
132
|
+
return
|
133
|
+
|
134
|
+
def load_json(self,
|
135
|
+
fname,
|
136
|
+
folder=None,
|
137
|
+
numeric_keys=True,
|
138
|
+
verbose=True,
|
139
|
+
subfolder_path=None,
|
140
|
+
locking=True,
|
141
|
+
replace_environment_secrets=None,
|
142
|
+
):
|
143
|
+
assert folder in [None, 'data', 'output', 'models']
|
144
|
+
lfld = self.get_target_folder(target=folder)
|
145
|
+
|
146
|
+
if folder is not None:
|
147
|
+
if subfolder_path is not None:
|
148
|
+
datafile = os.path.join(lfld, subfolder_path.lstrip('/'), fname)
|
149
|
+
if verbose:
|
150
|
+
self.verbose_log("Loading json '{}' from '{}'/'{}'".format(fname, folder, subfolder_path))
|
151
|
+
#endif
|
152
|
+
else:
|
153
|
+
datafile = os.path.join(lfld, fname)
|
154
|
+
if verbose:
|
155
|
+
self.verbose_log("Loading json '{}' from '{}'".format(fname, folder))
|
156
|
+
#endif
|
157
|
+
#endif
|
158
|
+
else:
|
159
|
+
datafile = fname
|
160
|
+
if verbose:
|
161
|
+
self.verbose_log("Loading json '{}'".format(fname))
|
162
|
+
#endif
|
163
|
+
|
164
|
+
if os.path.isfile(datafile):
|
165
|
+
with self.managed_lock_resource(datafile, condition=locking):
|
166
|
+
try:
|
167
|
+
with open(datafile) as f:
|
168
|
+
if not numeric_keys:
|
169
|
+
data = json.load(f)
|
170
|
+
else:
|
171
|
+
data = json.load(f, object_hook=lambda d: {int(k) if k.isnumeric() else k: v for k, v in d.items()})
|
172
|
+
except Exception as e:
|
173
|
+
self.P("JSON load failed: {}".format(e), color='r')
|
174
|
+
data = None
|
175
|
+
# endwith conditional lock
|
176
|
+
if isinstance(replace_environment_secrets, str) and len(replace_environment_secrets) > 0:
|
177
|
+
matches = self.replace_secrets(data)
|
178
|
+
if matches is not None and len(matches) > 0:
|
179
|
+
self.P(" JSON modified with following env vars: {}".format(matches))
|
180
|
+
return data
|
181
|
+
else:
|
182
|
+
if verbose:
|
183
|
+
self.verbose_log(" File not found!", color='r')
|
184
|
+
return
|
185
|
+
|
186
|
+
|
187
|
+
@staticmethod
|
188
|
+
def replace_nan(data, inplace=False):
|
189
|
+
return replace_nan_inf(data, inplace=inplace)
|
190
|
+
|
191
|
+
|
192
|
+
@staticmethod
|
193
|
+
def safe_json_dumps(
|
194
|
+
dct,
|
195
|
+
replace_nan=False,
|
196
|
+
inplace=False,
|
197
|
+
sort_keys=True,
|
198
|
+
separators=(',',':'),
|
199
|
+
**kwargs
|
200
|
+
):
|
201
|
+
"""
|
202
|
+
Safely dumps a dictionary to json string, replacing nan/inf with None.
|
203
|
+
The method also uses deterministic sorting of keys and custom separators.
|
204
|
+
|
205
|
+
Parameters
|
206
|
+
----------
|
207
|
+
dct : dict
|
208
|
+
The dictionary to be dumped to json
|
209
|
+
|
210
|
+
replace_nan : bool, optional
|
211
|
+
If True, replaces nan/inf with None. The default is False.
|
212
|
+
|
213
|
+
inplace : bool, optional
|
214
|
+
If True, replaces nan/inf with None in the original dictionary. The default is False.
|
215
|
+
|
216
|
+
sort_keys : bool, optional
|
217
|
+
If True, sorts the keys of the dictionary. The default is True.
|
218
|
+
|
219
|
+
separators : tuple, optional
|
220
|
+
The separators to be used for json serialization. The default is (',',':').
|
221
|
+
|
222
|
+
**kwargs : dict, optional
|
223
|
+
Additional arguments to be passed to json.dumps
|
224
|
+
|
225
|
+
Returns
|
226
|
+
-------
|
227
|
+
str
|
228
|
+
The json string representing the dictionary.
|
229
|
+
"""
|
230
|
+
data = dct
|
231
|
+
if replace_nan:
|
232
|
+
# NPjson will actually handle inf/nan -> null but we might
|
233
|
+
# need to replace directly in the received dict if `inplace=True`
|
234
|
+
if inplace:
|
235
|
+
data = _JSONSerializationMixin.replace_nan(dct, inplace=inplace)
|
236
|
+
return json.dumps(
|
237
|
+
data,
|
238
|
+
cls=NPJson,
|
239
|
+
sort_keys=sort_keys,
|
240
|
+
separators=separators,
|
241
|
+
**kwargs
|
242
|
+
)
|
243
|
+
else:
|
244
|
+
return json.dumps(
|
245
|
+
data,
|
246
|
+
cls=SimpleNPJson,
|
247
|
+
sort_keys=sort_keys,
|
248
|
+
separators=separators,
|
249
|
+
**kwargs
|
250
|
+
)
|
251
|
+
|
252
|
+
|
253
|
+
@staticmethod
|
254
|
+
@copy_docstring(safe_json_dumps)
|
255
|
+
def safe_dumps_json(
|
256
|
+
dct,
|
257
|
+
replace_nan=False,
|
258
|
+
inplace=False,
|
259
|
+
sort_keys=True,
|
260
|
+
separators=(',',':'),
|
261
|
+
**kwargs
|
262
|
+
):
|
263
|
+
return _JSONSerializationMixin.safe_json_dumps(
|
264
|
+
dct, inplace=inplace, replace_nan=replace_nan,
|
265
|
+
sort_keys=sort_keys, separators=separators,
|
266
|
+
**kwargs
|
267
|
+
)
|
268
|
+
|
269
|
+
@staticmethod
|
270
|
+
@copy_docstring(safe_json_dumps)
|
271
|
+
def json_dumps(
|
272
|
+
dct,
|
273
|
+
replace_nan=False,
|
274
|
+
inplace=False,
|
275
|
+
sort_keys=True,
|
276
|
+
separators=(',',':'),
|
277
|
+
**kwargs
|
278
|
+
):
|
279
|
+
return _JSONSerializationMixin.safe_json_dumps(
|
280
|
+
dct, inplace=inplace, replace_nan=replace_nan,
|
281
|
+
sort_keys=sort_keys, separators=separators,
|
282
|
+
**kwargs
|
283
|
+
)
|
284
|
+
|
285
|
+
|
286
|
+
def load_config_file(self, fn):
|
287
|
+
"""
|
288
|
+
Loads a json/yaml config file and returns the dictionary.
|
289
|
+
"""
|
290
|
+
dct_config = None
|
291
|
+
if os.path.isfile(fn):
|
292
|
+
if fn.endswith('.json'):
|
293
|
+
self.P("Loading JSON config file: {}".format(fn), color='n')
|
294
|
+
with open(fn, 'r') as f:
|
295
|
+
dct_config = json.load(f)
|
296
|
+
elif fn.endswith('.yaml') or fn.endswith('.yml'):
|
297
|
+
self.P("Loading YAML config file: {}".format(fn), color='n')
|
298
|
+
with open(fn, 'r') as f:
|
299
|
+
dct_config = yaml.safe_load(f)
|
300
|
+
elif fn.endswith('.txt'):
|
301
|
+
self.P("Loading JSON config file from .TXT: {}".format(fn), color='n')
|
302
|
+
with open(fn, 'r') as f:
|
303
|
+
dct_config =json.load(f)
|
304
|
+
else:
|
305
|
+
raise ValueError("Unknown config file extension: {}".format(fn))
|
306
|
+
#endif json/yaml
|
307
|
+
dct_config = OrderedDict(dct_config)
|
308
|
+
return dct_config
|
309
|
+
|
310
|
+
|
311
|
+
def load_dict(self, **kwargs):
|
312
|
+
return self.load_json(**kwargs)
|
313
|
+
|
314
|
+
|
315
|
+
def load_data_json(self, fname, **kwargs):
|
316
|
+
return self.load_json(fname, folder='data', **kwargs)
|
317
|
+
|
318
|
+
def load_json_from_data(self, fname, subfolder_path=None, **kwargs):
|
319
|
+
return self.load_json(fname, folder='data', subfolder_path=subfolder_path, **kwargs)
|
320
|
+
|
321
|
+
def thread_safe_save(self, datafile, data_json, folder=None, locking=True, indent=True):
|
322
|
+
lfld = ''
|
323
|
+
if folder is not None:
|
324
|
+
lfld = self.get_target_folder(folder)
|
325
|
+
|
326
|
+
path = os.path.join(lfld, datafile)
|
327
|
+
os.makedirs(os.path.split(path)[0], exist_ok=True)
|
328
|
+
|
329
|
+
with self.managed_lock_resource(path, condition=locking):
|
330
|
+
try:
|
331
|
+
with open(path, 'w') as fp:
|
332
|
+
json.dump(
|
333
|
+
data_json,
|
334
|
+
fp,
|
335
|
+
sort_keys=True,
|
336
|
+
indent=4 if indent else None,
|
337
|
+
cls=NPJson
|
338
|
+
)
|
339
|
+
except Exception as e:
|
340
|
+
self.verbose_log("Exception while saving json '{}':\n{}".format(datafile, traceback.format_exc()), color='r')
|
341
|
+
# endwith conditional locking
|
342
|
+
return path
|
343
|
+
|
344
|
+
|
345
|
+
def save_data_json(self,
|
346
|
+
data_json,
|
347
|
+
fname,
|
348
|
+
subfolder_path=None,
|
349
|
+
verbose=True,
|
350
|
+
locking=True):
|
351
|
+
save_dir = self._data_dir
|
352
|
+
if subfolder_path is not None:
|
353
|
+
save_dir = os.path.join(save_dir, subfolder_path.lstrip('/'))
|
354
|
+
os.makedirs(save_dir, exist_ok=True)
|
355
|
+
|
356
|
+
datafile = os.path.join(save_dir, fname)
|
357
|
+
if verbose:
|
358
|
+
self.verbose_log('Saving data json: {}'.format(datafile))
|
359
|
+
self.thread_safe_save(datafile=datafile, data_json=data_json, locking=locking)
|
360
|
+
return datafile
|
361
|
+
|
362
|
+
def load_output_json(self, fname, **kwargs):
|
363
|
+
return self.load_json(fname, folder='output', **kwargs)
|
364
|
+
|
365
|
+
def save_output_json(self,
|
366
|
+
data_json,
|
367
|
+
fname,
|
368
|
+
subfolder_path=None,
|
369
|
+
verbose=True,
|
370
|
+
locking=True,
|
371
|
+
indent=True,
|
372
|
+
):
|
373
|
+
save_dir = self._outp_dir
|
374
|
+
if subfolder_path is not None:
|
375
|
+
save_dir = os.path.join(save_dir, subfolder_path.lstrip('/'))
|
376
|
+
os.makedirs(save_dir, exist_ok=True)
|
377
|
+
|
378
|
+
datafile = os.path.join(save_dir, fname)
|
379
|
+
if verbose:
|
380
|
+
self.verbose_log('Saving output json: {}'.format(datafile))
|
381
|
+
self.thread_safe_save(
|
382
|
+
datafile=datafile,
|
383
|
+
data_json=data_json,
|
384
|
+
locking=locking,
|
385
|
+
indent=indent,
|
386
|
+
)
|
387
|
+
return datafile
|
388
|
+
|
389
|
+
def load_models_json(self, fname, **kwargs):
|
390
|
+
return self.load_json(fname, folder='models', **kwargs)
|
391
|
+
|
392
|
+
def save_models_json(self,
|
393
|
+
data_json,
|
394
|
+
fname,
|
395
|
+
subfolder_path=None,
|
396
|
+
verbose=True,
|
397
|
+
locking=True):
|
398
|
+
save_dir = self._modl_dir
|
399
|
+
if subfolder_path is not None:
|
400
|
+
save_dir = os.path.join(save_dir, subfolder_path.lstrip('/'))
|
401
|
+
os.makedirs(save_dir, exist_ok=True)
|
402
|
+
|
403
|
+
datafile = os.path.join(save_dir, fname)
|
404
|
+
if verbose:
|
405
|
+
self.verbose_log('Saving models json: {}'.format(datafile))
|
406
|
+
self.thread_safe_save(datafile=datafile, data_json=data_json, locking=locking)
|
407
|
+
return datafile
|
408
|
+
|
409
|
+
def save_json(self, dct, fname, locking=True):
|
410
|
+
return self.thread_safe_save(datafile=fname, data_json=dct, locking=locking)
|
411
|
+
|
412
|
+
def save_json_to_data(self, dct, fname, subfolder_path=None, locking=True):
|
413
|
+
return self.save_data_json(data_json=dct, fname=fname, subfolder_path=subfolder_path, locking=locking)
|
414
|
+
|
415
|
+
def load_dict_from_data(self, fn):
|
416
|
+
return self.load_data_json(fn)
|
417
|
+
|
418
|
+
def load_dict_from_models(self, fn):
|
419
|
+
return self.load_models_json(fn)
|
420
|
+
|
421
|
+
def load_dict_from_output(self, fn):
|
422
|
+
return self.load_output_json(fn)
|
423
|
+
|
424
|
+
@staticmethod
|
425
|
+
def save_dict_txt(path, dct, indent=True):
|
426
|
+
json.dump(dct, open(path, 'w'), sort_keys=True, indent=4 if indent else None)
|
427
|
+
return
|
428
|
+
|
429
|
+
@staticmethod
|
430
|
+
def load_dict_txt(path):
|
431
|
+
"""
|
432
|
+
This function is NOT thread safe
|
433
|
+
"""
|
434
|
+
with open(path) as f:
|
435
|
+
data = json.load(f)
|
436
|
+
return data
|
437
|
+
|
438
|
+
|
439
|
+
def update_data_json(self,
|
440
|
+
fname,
|
441
|
+
update_callback,
|
442
|
+
subfolder_path=None,
|
443
|
+
verbose=False,
|
444
|
+
):
|
445
|
+
assert update_callback is not None, "update_callback must be defined!"
|
446
|
+
datafile = self.get_file_path(
|
447
|
+
fn=fname,
|
448
|
+
folder='data',
|
449
|
+
subfolder_path=subfolder_path,
|
450
|
+
)
|
451
|
+
if datafile is None:
|
452
|
+
self.P("update_data_json failed due to missing {}".format(datafile), color='error')
|
453
|
+
return False
|
454
|
+
with self.managed_lock_resource(datafile):
|
455
|
+
result = None
|
456
|
+
try:
|
457
|
+
data = self.load_data_json(
|
458
|
+
fname=fname,
|
459
|
+
verbose=verbose,
|
460
|
+
subfolder_path=subfolder_path,
|
461
|
+
locking=False,
|
462
|
+
)
|
463
|
+
|
464
|
+
if data is not None:
|
465
|
+
data = update_callback(data)
|
466
|
+
|
467
|
+
self.save_data_json(
|
468
|
+
data_json=data,
|
469
|
+
fname=fname,
|
470
|
+
verbose=verbose,
|
471
|
+
subfolder_path=subfolder_path,
|
472
|
+
locking=False,
|
473
|
+
)
|
474
|
+
result = True
|
475
|
+
except Exception as e:
|
476
|
+
self.P("update_data_json failed: {}".format(e), color='error')
|
477
|
+
result = False
|
478
|
+
|
479
|
+
# endwith lock
|
480
|
+
return result
|
481
|
+
|