naeural-client 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- naeural_client/__init__.py +13 -0
- naeural_client/_ver.py +13 -0
- naeural_client/base/__init__.py +6 -0
- naeural_client/base/distributed_custom_code_presets.py +44 -0
- naeural_client/base/generic_session.py +1763 -0
- naeural_client/base/instance.py +616 -0
- naeural_client/base/payload/__init__.py +1 -0
- naeural_client/base/payload/payload.py +66 -0
- naeural_client/base/pipeline.py +1499 -0
- naeural_client/base/plugin_template.py +5209 -0
- naeural_client/base/responses.py +209 -0
- naeural_client/base/transaction.py +157 -0
- naeural_client/base_decentra_object.py +143 -0
- naeural_client/bc/__init__.py +3 -0
- naeural_client/bc/base.py +1046 -0
- naeural_client/bc/chain.py +0 -0
- naeural_client/bc/ec.py +324 -0
- naeural_client/certs/__init__.py +0 -0
- naeural_client/certs/r9092118.ala.eu-central-1.emqxsl.com.crt +22 -0
- naeural_client/code_cheker/__init__.py +1 -0
- naeural_client/code_cheker/base.py +520 -0
- naeural_client/code_cheker/checker.py +294 -0
- naeural_client/comm/__init__.py +2 -0
- naeural_client/comm/amqp_wrapper.py +338 -0
- naeural_client/comm/mqtt_wrapper.py +539 -0
- naeural_client/const/README.md +3 -0
- naeural_client/const/__init__.py +9 -0
- naeural_client/const/base.py +101 -0
- naeural_client/const/comms.py +80 -0
- naeural_client/const/environment.py +26 -0
- naeural_client/const/formatter.py +7 -0
- naeural_client/const/heartbeat.py +111 -0
- naeural_client/const/misc.py +20 -0
- naeural_client/const/payload.py +190 -0
- naeural_client/default/__init__.py +1 -0
- naeural_client/default/instance/__init__.py +4 -0
- naeural_client/default/instance/chain_dist_custom_job_01_plugin.py +54 -0
- naeural_client/default/instance/custom_web_app_01_plugin.py +118 -0
- naeural_client/default/instance/net_mon_01_plugin.py +45 -0
- naeural_client/default/instance/view_scene_01_plugin.py +28 -0
- naeural_client/default/session/mqtt_session.py +72 -0
- naeural_client/io_formatter/__init__.py +2 -0
- naeural_client/io_formatter/base/__init__.py +1 -0
- naeural_client/io_formatter/base/base_formatter.py +80 -0
- naeural_client/io_formatter/default/__init__.py +3 -0
- naeural_client/io_formatter/default/a_dummy.py +51 -0
- naeural_client/io_formatter/default/aixp1.py +113 -0
- naeural_client/io_formatter/default/default.py +22 -0
- naeural_client/io_formatter/io_formatter_manager.py +96 -0
- naeural_client/logging/__init__.py +1 -0
- naeural_client/logging/base_logger.py +2056 -0
- naeural_client/logging/logger_mixins/__init__.py +12 -0
- naeural_client/logging/logger_mixins/class_instance_mixin.py +92 -0
- naeural_client/logging/logger_mixins/computer_vision_mixin.py +443 -0
- naeural_client/logging/logger_mixins/datetime_mixin.py +344 -0
- naeural_client/logging/logger_mixins/download_mixin.py +421 -0
- naeural_client/logging/logger_mixins/general_serialization_mixin.py +242 -0
- naeural_client/logging/logger_mixins/json_serialization_mixin.py +481 -0
- naeural_client/logging/logger_mixins/pickle_serialization_mixin.py +301 -0
- naeural_client/logging/logger_mixins/process_mixin.py +63 -0
- naeural_client/logging/logger_mixins/resource_size_mixin.py +81 -0
- naeural_client/logging/logger_mixins/timers_mixin.py +501 -0
- naeural_client/logging/logger_mixins/upload_mixin.py +260 -0
- naeural_client/logging/logger_mixins/utils_mixin.py +675 -0
- naeural_client/logging/small_logger.py +93 -0
- naeural_client/logging/tzlocal/__init__.py +20 -0
- naeural_client/logging/tzlocal/unix.py +231 -0
- naeural_client/logging/tzlocal/utils.py +113 -0
- naeural_client/logging/tzlocal/win32.py +151 -0
- naeural_client/logging/tzlocal/windows_tz.py +718 -0
- naeural_client/plugins_manager_mixin.py +273 -0
- naeural_client/utils/__init__.py +2 -0
- naeural_client/utils/comm_utils.py +44 -0
- naeural_client/utils/dotenv.py +75 -0
- naeural_client-2.0.0.dist-info/METADATA +365 -0
- naeural_client-2.0.0.dist-info/RECORD +78 -0
- naeural_client-2.0.0.dist-info/WHEEL +4 -0
- naeural_client-2.0.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,301 @@
|
|
1
|
+
import os
|
2
|
+
import bz2
|
3
|
+
import pickle
|
4
|
+
|
5
|
+
class _PickleSerializationMixin(object):
|
6
|
+
"""
|
7
|
+
Mixin for pickle serialization functionalities that are attached to `pye2.Logger`.
|
8
|
+
|
9
|
+
This mixin cannot be instantiated because it is built just to provide some additional
|
10
|
+
functionalities for `pye2.Logger`
|
11
|
+
|
12
|
+
In this mixin we can use any attribute/method of the Logger.
|
13
|
+
"""
|
14
|
+
|
15
|
+
def __init__(self):
|
16
|
+
super(_PickleSerializationMixin, self).__init__()
|
17
|
+
return
|
18
|
+
|
19
|
+
def _save_compressed_pickle(self, full_filename, myobj, locking=False):
|
20
|
+
"""
|
21
|
+
save object to file using pickle
|
22
|
+
|
23
|
+
@param full_filename: name of destination file
|
24
|
+
@param myobj: object to save (has to be pickleable)
|
25
|
+
"""
|
26
|
+
with self.managed_lock_resource(full_filename, condition=locking):
|
27
|
+
try:
|
28
|
+
fhandle = bz2.BZ2File(full_filename, 'wb')
|
29
|
+
pickle.dump(myobj, fhandle, protocol=pickle.HIGHEST_PROTOCOL)
|
30
|
+
fhandle.close()
|
31
|
+
except:
|
32
|
+
self.P('ERROR: File ' + full_filename + ' cannot be written!')
|
33
|
+
return False
|
34
|
+
# endwith conditional lock
|
35
|
+
return True
|
36
|
+
|
37
|
+
|
38
|
+
def _load_compressed_pickle(self, full_filename, locking=False):
|
39
|
+
"""
|
40
|
+
Load from filename using pickle
|
41
|
+
|
42
|
+
@param full_filename: name of file to load from
|
43
|
+
"""
|
44
|
+
with self.managed_lock_resource(full_filename, condition=locking):
|
45
|
+
try:
|
46
|
+
fhandle = bz2.BZ2File(full_filename, 'rb')
|
47
|
+
myobj = pickle.load(fhandle)
|
48
|
+
fhandle.close()
|
49
|
+
except:
|
50
|
+
self.P('ERROR: File ' + full_filename + ' cannot be read!')
|
51
|
+
return None
|
52
|
+
# endwith conditional lock
|
53
|
+
|
54
|
+
return myobj
|
55
|
+
|
56
|
+
|
57
|
+
def save_pickle(self, data, fn, folder=None,
|
58
|
+
use_prefix=False, verbose=True,
|
59
|
+
compressed=False,
|
60
|
+
subfolder_path=None,
|
61
|
+
locking=True,
|
62
|
+
):
|
63
|
+
"""
|
64
|
+
compressed: True if compression is required OR you can just add '.pklz' to `fn`
|
65
|
+
"""
|
66
|
+
|
67
|
+
def P(s):
|
68
|
+
if verbose:
|
69
|
+
self.P(s)
|
70
|
+
return
|
71
|
+
|
72
|
+
# enddef
|
73
|
+
|
74
|
+
lfld = self.get_target_folder(target=folder)
|
75
|
+
|
76
|
+
if lfld is None:
|
77
|
+
P("Assuming `fn` param ({}) is a full path".format(fn))
|
78
|
+
datafile = fn
|
79
|
+
else:
|
80
|
+
if use_prefix:
|
81
|
+
fn = self.file_prefix + '_' + fn
|
82
|
+
datafolder = lfld
|
83
|
+
if subfolder_path is not None:
|
84
|
+
datafolder = os.path.join(datafolder, subfolder_path.lstrip('/'))
|
85
|
+
os.makedirs(datafolder, exist_ok=True)
|
86
|
+
datafile = os.path.join(datafolder, fn)
|
87
|
+
|
88
|
+
os.makedirs(os.path.split(datafile)[0], exist_ok=True)
|
89
|
+
|
90
|
+
if compressed or '.pklz' in fn:
|
91
|
+
if not compressed:
|
92
|
+
P("Saving pickle with compression=True forced due to extension")
|
93
|
+
else:
|
94
|
+
P("Saving pickle with compression...")
|
95
|
+
if self._save_compressed_pickle(datafile, myobj=data, locking=locking):
|
96
|
+
P(" Compressed pickle {} saved in {}".format(fn, folder))
|
97
|
+
else:
|
98
|
+
P(" FAILED compressed pickle save!")
|
99
|
+
else:
|
100
|
+
P("Saving uncompressed pikle (lock:{}) : {} ".format(locking, datafile))
|
101
|
+
with self.managed_lock_resource(datafile, condition=locking):
|
102
|
+
try:
|
103
|
+
with open(datafile, 'wb') as fhandle:
|
104
|
+
pickle.dump(data, fhandle, protocol=pickle.HIGHEST_PROTOCOL)
|
105
|
+
except:
|
106
|
+
pass
|
107
|
+
# endwith conditional lock
|
108
|
+
if verbose:
|
109
|
+
P(" Saved pickle '{}' in '{}' folder".format(fn, folder))
|
110
|
+
return datafile
|
111
|
+
|
112
|
+
|
113
|
+
def save_pickle_to_data(self, data, fn, compressed=False, verbose=True,
|
114
|
+
subfolder_path=None, locking=True):
|
115
|
+
"""
|
116
|
+
compressed: True if compression is required OR you can just add '.pklz' to `fn`
|
117
|
+
"""
|
118
|
+
return self.save_pickle(
|
119
|
+
data, fn,
|
120
|
+
folder='data',
|
121
|
+
compressed=compressed,
|
122
|
+
subfolder_path=subfolder_path,
|
123
|
+
verbose=verbose,
|
124
|
+
locking=locking,
|
125
|
+
)
|
126
|
+
|
127
|
+
|
128
|
+
def save_pickle_to_models(self, data, fn, compressed=False, verbose=True,
|
129
|
+
subfolder_path=None, locking=True):
|
130
|
+
"""
|
131
|
+
compressed: True if compression is required OR you can just add '.pklz' to `fn`
|
132
|
+
"""
|
133
|
+
return self.save_pickle(
|
134
|
+
data, fn,
|
135
|
+
folder='models',
|
136
|
+
compressed=compressed,
|
137
|
+
subfolder_path=subfolder_path,
|
138
|
+
verbose=verbose,
|
139
|
+
locking=locking,
|
140
|
+
)
|
141
|
+
|
142
|
+
|
143
|
+
def save_pickle_to_output(self, data, fn, compressed=False, verbose=True,
|
144
|
+
subfolder_path=None, locking=True):
|
145
|
+
"""
|
146
|
+
compressed: True if compression is required OR you can just add '.pklz' to `fn`
|
147
|
+
"""
|
148
|
+
return self.save_pickle(
|
149
|
+
data, fn,
|
150
|
+
folder='output',
|
151
|
+
compressed=compressed,
|
152
|
+
subfolder_path=subfolder_path,
|
153
|
+
verbose=verbose,
|
154
|
+
locking=locking,
|
155
|
+
)
|
156
|
+
|
157
|
+
|
158
|
+
def load_pickle_from_models(self, fn, decompress=False, verbose=True,
|
159
|
+
subfolder_path=None, locking=True):
|
160
|
+
"""
|
161
|
+
decompressed : True if the file was saved with `compressed=True` or you can just use '.pklz'
|
162
|
+
"""
|
163
|
+
return self.load_pickle(
|
164
|
+
fn,
|
165
|
+
folder='models',
|
166
|
+
decompress=decompress,
|
167
|
+
verbose=verbose,
|
168
|
+
subfolder_path=subfolder_path,
|
169
|
+
locking=locking,
|
170
|
+
)
|
171
|
+
|
172
|
+
|
173
|
+
def load_pickle_from_data(self, fn, decompress=False, verbose=True,
|
174
|
+
subfolder_path=None, locking=True):
|
175
|
+
"""
|
176
|
+
decompressed : True if the file was saved with `compressed=True` or you can just use '.pklz'
|
177
|
+
"""
|
178
|
+
return self.load_pickle(
|
179
|
+
fn,
|
180
|
+
folder='data',
|
181
|
+
decompress=decompress,
|
182
|
+
verbose=verbose,
|
183
|
+
subfolder_path=subfolder_path,
|
184
|
+
locking=locking,
|
185
|
+
)
|
186
|
+
|
187
|
+
|
188
|
+
def load_pickle_from_output(self, fn, decompress=False, verbose=True,
|
189
|
+
subfolder_path=None, locking=True):
|
190
|
+
"""
|
191
|
+
decompressed : True if the file was saved with `compressed=True` or you can just use '.pklz'
|
192
|
+
"""
|
193
|
+
return self.load_pickle(
|
194
|
+
fn,
|
195
|
+
folder='output',
|
196
|
+
decompress=decompress,
|
197
|
+
verbose=verbose,
|
198
|
+
subfolder_path=subfolder_path,
|
199
|
+
locking=locking,
|
200
|
+
)
|
201
|
+
|
202
|
+
|
203
|
+
def load_pickle(self, fn, folder=None, decompress=False, verbose=True,
|
204
|
+
subfolder_path=None, locking=True):
|
205
|
+
"""
|
206
|
+
load_from: 'data', 'output', 'models'
|
207
|
+
decompressed : True if the file was saved with `compressed=True` or you can just use '.pklz'
|
208
|
+
"""
|
209
|
+
if verbose:
|
210
|
+
P = self.P
|
211
|
+
else:
|
212
|
+
P = lambda x, color=None: x
|
213
|
+
|
214
|
+
lfld = self.get_target_folder(target=folder)
|
215
|
+
|
216
|
+
if lfld is None:
|
217
|
+
P("Loading pickle ... Assuming `fn` param ({}) is a full path".format(fn))
|
218
|
+
datafile = fn
|
219
|
+
else:
|
220
|
+
datafolder = lfld
|
221
|
+
if subfolder_path is not None:
|
222
|
+
datafolder = os.path.join(datafolder, subfolder_path.lstrip('/'))
|
223
|
+
datafile = os.path.join(datafolder, fn)
|
224
|
+
P("Loading pickle (locked:{}) from {}".format(locking, datafile))
|
225
|
+
#endif full path or not
|
226
|
+
data = None
|
227
|
+
exc = None
|
228
|
+
if os.path.isfile(datafile):
|
229
|
+
if decompress or '.pklz' in datafile:
|
230
|
+
if not decompress:
|
231
|
+
P("Loading pickle with decompress=True forced due to extension")
|
232
|
+
else:
|
233
|
+
P("Loading pickle with decompression...")
|
234
|
+
data = self._load_compressed_pickle(datafile)
|
235
|
+
else:
|
236
|
+
with self.managed_lock_resource(datafile, condition=locking):
|
237
|
+
try:
|
238
|
+
with open(datafile, "rb") as f:
|
239
|
+
data = pickle.load(f)
|
240
|
+
except Exception as _exc:
|
241
|
+
data = None
|
242
|
+
exc = _exc
|
243
|
+
# endwith conditional lock
|
244
|
+
#endif decompress or not
|
245
|
+
if data is None:
|
246
|
+
P(" {} load failed with error {}".format(datafile, exc), color='r')
|
247
|
+
else:
|
248
|
+
P(" Loaded: {}".format(datafile))
|
249
|
+
#endif data is None
|
250
|
+
else:
|
251
|
+
P(" File not found! Pickle load failed.", color='r')
|
252
|
+
return data
|
253
|
+
|
254
|
+
|
255
|
+
def update_pickle_from_data(self,
|
256
|
+
fn,
|
257
|
+
update_callback,
|
258
|
+
decompress=False,
|
259
|
+
verbose=False,
|
260
|
+
subfolder_path=None,
|
261
|
+
force_update=False):
|
262
|
+
assert update_callback is not None, "update_callback must be defined!"
|
263
|
+
datafile = self.get_file_path(
|
264
|
+
fn=fn,
|
265
|
+
folder='data',
|
266
|
+
subfolder_path=subfolder_path,
|
267
|
+
force=True
|
268
|
+
)
|
269
|
+
if datafile is None:
|
270
|
+
self.P("update_pickle_from_data failed due to missing {}".format(datafile), color='error')
|
271
|
+
return False
|
272
|
+
|
273
|
+
with self.managed_lock_resource(datafile):
|
274
|
+
result = None
|
275
|
+
try:
|
276
|
+
data = self.load_pickle_from_data(
|
277
|
+
fn=fn,
|
278
|
+
decompress=decompress,
|
279
|
+
verbose=verbose,
|
280
|
+
subfolder_path=subfolder_path,
|
281
|
+
locking=False,
|
282
|
+
)
|
283
|
+
|
284
|
+
if data is not None or force_update:
|
285
|
+
data = update_callback(data)
|
286
|
+
|
287
|
+
self.save_pickle_to_data(
|
288
|
+
data=data,
|
289
|
+
fn=fn,
|
290
|
+
compressed=decompress,
|
291
|
+
verbose=verbose,
|
292
|
+
subfolder_path=subfolder_path,
|
293
|
+
locking=False,
|
294
|
+
)
|
295
|
+
result = True
|
296
|
+
except Exception as e:
|
297
|
+
self.P("update_pickle_from_data failed: {}".format(e), color='error')
|
298
|
+
result = False
|
299
|
+
# endwith lock
|
300
|
+
return result
|
301
|
+
|
@@ -0,0 +1,63 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
|
4
|
+
class _ProcessMixin(object):
|
5
|
+
"""
|
6
|
+
Mixin for process functionalities that are attached to `pye2.Logger`.
|
7
|
+
|
8
|
+
This mixin cannot be instantiated because it is built just to provide some additional
|
9
|
+
functionalities for `pye2.Logger`
|
10
|
+
|
11
|
+
In this mixin we can use any attribute/method of the Logger.
|
12
|
+
"""
|
13
|
+
|
14
|
+
def __init__(self):
|
15
|
+
super(_ProcessMixin, self).__init__()
|
16
|
+
self._close_callback = None
|
17
|
+
return
|
18
|
+
|
19
|
+
@staticmethod
|
20
|
+
def runs_from_ipython():
|
21
|
+
try:
|
22
|
+
__IPYTHON__
|
23
|
+
return True
|
24
|
+
except NameError:
|
25
|
+
return False
|
26
|
+
|
27
|
+
@staticmethod
|
28
|
+
def runs_with_debugger():
|
29
|
+
gettrace = getattr(sys, 'gettrace', None)
|
30
|
+
if gettrace is None:
|
31
|
+
return False
|
32
|
+
else:
|
33
|
+
return not gettrace() is None
|
34
|
+
|
35
|
+
@staticmethod
|
36
|
+
def get_current_process_memory(mb=True):
|
37
|
+
import psutil
|
38
|
+
process = psutil.Process(os.getpid())
|
39
|
+
mem = process.memory_info().rss / pow(1024, 2 if mb else 3)
|
40
|
+
return mem
|
41
|
+
|
42
|
+
def _default_close_callback(self, sig, frame):
|
43
|
+
self.P("SIGINT/Ctrl-C received. Script closing")
|
44
|
+
if self._close_callback is None:
|
45
|
+
self.P(
|
46
|
+
"WARNING: `register_close_callback` received and will force close. Please provide a callback where you can stop the script loop and deallocate nicely.")
|
47
|
+
sys.exit(0)
|
48
|
+
else:
|
49
|
+
self._close_callback()
|
50
|
+
return
|
51
|
+
|
52
|
+
def register_close_callback(self, func=None):
|
53
|
+
"""
|
54
|
+
will register a SIGINT/Ctrl-C callback or will default to the one in Logger
|
55
|
+
"""
|
56
|
+
import signal
|
57
|
+
if func is None:
|
58
|
+
self.P(
|
59
|
+
"WARNING: register_close_callback received NO callback. The script will not behave nice. Please provide a callback where you can stop the script nicely. ")
|
60
|
+
self._close_callback = func
|
61
|
+
signal.signal(signal.SIGINT, self._default_close_callback)
|
62
|
+
self.P("Registered {} SIGINT/Ctrl-C callback".format('custom' if func else 'default'))
|
63
|
+
return
|
@@ -0,0 +1,81 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
class _ResourceSizeMixin(object):
|
4
|
+
"""
|
5
|
+
Mixin for resource size functionalities that are attached to `pye2.Logger`.
|
6
|
+
|
7
|
+
This mixin cannot be instantiated because it is built just to provide some additional
|
8
|
+
functionalities for `pye2.Logger`
|
9
|
+
|
10
|
+
In this mixin we can use any attribute/method of the Logger.
|
11
|
+
"""
|
12
|
+
|
13
|
+
def __init__(self):
|
14
|
+
super(_ResourceSizeMixin, self).__init__()
|
15
|
+
|
16
|
+
@staticmethod
|
17
|
+
def compute_size_units(size):
|
18
|
+
units = ['B', 'KB', 'MB', 'GB', 'TB']
|
19
|
+
i = 0
|
20
|
+
unit = units[i]
|
21
|
+
while size >= 1024 and i < len(units) - 1:
|
22
|
+
i += 1
|
23
|
+
size /= 1024
|
24
|
+
unit = units[i]
|
25
|
+
|
26
|
+
size = round(size, 3)
|
27
|
+
return size, unit
|
28
|
+
|
29
|
+
@staticmethod
|
30
|
+
def cast_size_units(size,
|
31
|
+
initial_unit,
|
32
|
+
target_unit):
|
33
|
+
assert initial_unit in ['B', 'KB', 'MB', 'GB', 'TB']
|
34
|
+
assert target_unit in ['B', 'KB', 'MB', 'GB', 'TB']
|
35
|
+
|
36
|
+
if initial_unit == 'TB':
|
37
|
+
size_bytes = size * (2 ** 40)
|
38
|
+
elif initial_unit == 'GB':
|
39
|
+
size_bytes = size * (2 ** 30)
|
40
|
+
elif initial_unit == 'MB':
|
41
|
+
size_bytes = size * (2 ** 20)
|
42
|
+
elif initial_unit == 'KB':
|
43
|
+
size_bytes = size * (2 ** 10)
|
44
|
+
elif initial_unit == 'B':
|
45
|
+
size_bytes = size
|
46
|
+
|
47
|
+
if target_unit == 'TB':
|
48
|
+
target_size = size_bytes / (2 ** 40)
|
49
|
+
elif target_unit == 'GB':
|
50
|
+
target_size = size_bytes / (2 ** 30)
|
51
|
+
elif target_unit == 'MB':
|
52
|
+
target_size = size_bytes / (2 ** 20)
|
53
|
+
elif target_unit == 'KB':
|
54
|
+
target_size = size_bytes / (2 ** 10)
|
55
|
+
elif target_unit == 'B':
|
56
|
+
target_size = size_bytes
|
57
|
+
|
58
|
+
return target_size
|
59
|
+
|
60
|
+
def get_file_size(self, fn=None, target='data'):
|
61
|
+
file_path = os.path.join(self.get_target_folder(target=target), fn)
|
62
|
+
size = os.path.getsize(file_path)
|
63
|
+
return self.compute_size_units(size)
|
64
|
+
|
65
|
+
@staticmethod
|
66
|
+
def get_folder_size(start_path=None):
|
67
|
+
if start_path is None:
|
68
|
+
start_path = '.'
|
69
|
+
|
70
|
+
total_size = 0
|
71
|
+
for dirpath, dirnames, filenames in os.walk(start_path):
|
72
|
+
for f in filenames:
|
73
|
+
fp = os.path.join(dirpath, f)
|
74
|
+
# skip if it is symbolic link
|
75
|
+
if not os.path.islink(fp):
|
76
|
+
try:
|
77
|
+
total_size += os.path.getsize(fp)
|
78
|
+
except:
|
79
|
+
pass
|
80
|
+
|
81
|
+
return _ResourceSizeMixin.compute_size_units(total_size)
|