naeural-client 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. naeural_client/__init__.py +13 -0
  2. naeural_client/_ver.py +13 -0
  3. naeural_client/base/__init__.py +6 -0
  4. naeural_client/base/distributed_custom_code_presets.py +44 -0
  5. naeural_client/base/generic_session.py +1763 -0
  6. naeural_client/base/instance.py +616 -0
  7. naeural_client/base/payload/__init__.py +1 -0
  8. naeural_client/base/payload/payload.py +66 -0
  9. naeural_client/base/pipeline.py +1499 -0
  10. naeural_client/base/plugin_template.py +5209 -0
  11. naeural_client/base/responses.py +209 -0
  12. naeural_client/base/transaction.py +157 -0
  13. naeural_client/base_decentra_object.py +143 -0
  14. naeural_client/bc/__init__.py +3 -0
  15. naeural_client/bc/base.py +1046 -0
  16. naeural_client/bc/chain.py +0 -0
  17. naeural_client/bc/ec.py +324 -0
  18. naeural_client/certs/__init__.py +0 -0
  19. naeural_client/certs/r9092118.ala.eu-central-1.emqxsl.com.crt +22 -0
  20. naeural_client/code_cheker/__init__.py +1 -0
  21. naeural_client/code_cheker/base.py +520 -0
  22. naeural_client/code_cheker/checker.py +294 -0
  23. naeural_client/comm/__init__.py +2 -0
  24. naeural_client/comm/amqp_wrapper.py +338 -0
  25. naeural_client/comm/mqtt_wrapper.py +539 -0
  26. naeural_client/const/README.md +3 -0
  27. naeural_client/const/__init__.py +9 -0
  28. naeural_client/const/base.py +101 -0
  29. naeural_client/const/comms.py +80 -0
  30. naeural_client/const/environment.py +26 -0
  31. naeural_client/const/formatter.py +7 -0
  32. naeural_client/const/heartbeat.py +111 -0
  33. naeural_client/const/misc.py +20 -0
  34. naeural_client/const/payload.py +190 -0
  35. naeural_client/default/__init__.py +1 -0
  36. naeural_client/default/instance/__init__.py +4 -0
  37. naeural_client/default/instance/chain_dist_custom_job_01_plugin.py +54 -0
  38. naeural_client/default/instance/custom_web_app_01_plugin.py +118 -0
  39. naeural_client/default/instance/net_mon_01_plugin.py +45 -0
  40. naeural_client/default/instance/view_scene_01_plugin.py +28 -0
  41. naeural_client/default/session/mqtt_session.py +72 -0
  42. naeural_client/io_formatter/__init__.py +2 -0
  43. naeural_client/io_formatter/base/__init__.py +1 -0
  44. naeural_client/io_formatter/base/base_formatter.py +80 -0
  45. naeural_client/io_formatter/default/__init__.py +3 -0
  46. naeural_client/io_formatter/default/a_dummy.py +51 -0
  47. naeural_client/io_formatter/default/aixp1.py +113 -0
  48. naeural_client/io_formatter/default/default.py +22 -0
  49. naeural_client/io_formatter/io_formatter_manager.py +96 -0
  50. naeural_client/logging/__init__.py +1 -0
  51. naeural_client/logging/base_logger.py +2056 -0
  52. naeural_client/logging/logger_mixins/__init__.py +12 -0
  53. naeural_client/logging/logger_mixins/class_instance_mixin.py +92 -0
  54. naeural_client/logging/logger_mixins/computer_vision_mixin.py +443 -0
  55. naeural_client/logging/logger_mixins/datetime_mixin.py +344 -0
  56. naeural_client/logging/logger_mixins/download_mixin.py +421 -0
  57. naeural_client/logging/logger_mixins/general_serialization_mixin.py +242 -0
  58. naeural_client/logging/logger_mixins/json_serialization_mixin.py +481 -0
  59. naeural_client/logging/logger_mixins/pickle_serialization_mixin.py +301 -0
  60. naeural_client/logging/logger_mixins/process_mixin.py +63 -0
  61. naeural_client/logging/logger_mixins/resource_size_mixin.py +81 -0
  62. naeural_client/logging/logger_mixins/timers_mixin.py +501 -0
  63. naeural_client/logging/logger_mixins/upload_mixin.py +260 -0
  64. naeural_client/logging/logger_mixins/utils_mixin.py +675 -0
  65. naeural_client/logging/small_logger.py +93 -0
  66. naeural_client/logging/tzlocal/__init__.py +20 -0
  67. naeural_client/logging/tzlocal/unix.py +231 -0
  68. naeural_client/logging/tzlocal/utils.py +113 -0
  69. naeural_client/logging/tzlocal/win32.py +151 -0
  70. naeural_client/logging/tzlocal/windows_tz.py +718 -0
  71. naeural_client/plugins_manager_mixin.py +273 -0
  72. naeural_client/utils/__init__.py +2 -0
  73. naeural_client/utils/comm_utils.py +44 -0
  74. naeural_client/utils/dotenv.py +75 -0
  75. naeural_client-2.0.0.dist-info/METADATA +365 -0
  76. naeural_client-2.0.0.dist-info/RECORD +78 -0
  77. naeural_client-2.0.0.dist-info/WHEEL +4 -0
  78. naeural_client-2.0.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,421 @@
1
+ import os
2
+ import sys
3
+ import zipfile
4
+ from time import time
5
+
6
+
7
+ class _DownloadMixin(object):
8
+ """
9
+ Mixin for download functionalities that are attached to `pye2.Logger`.
10
+
11
+ This mixin cannot be instantiated because it is built just to provide some additional
12
+ functionalities for `pye2.Logger`
13
+
14
+ In this mixin we can use any attribute/method of the Logger.
15
+ """
16
+
17
+ def __init__(self):
18
+ super(_DownloadMixin, self).__init__()
19
+ return
20
+
21
+ def maybe_download_model(self,
22
+ url,
23
+ model_file,
24
+ force_download=False,
25
+ url_model_cfg=None,
26
+ **kwargs,
27
+ ):
28
+ """
29
+ url11 = 'https://www.dropbox.com/s/t6qfxiopcr8yvlq/60_xomv_employee01_002_e142_acc0.985.pb?dl=1'
30
+ url12 = 'https://www.dropbox.com/s/akzyk9vcuqluzup/60_xomv_employee01_002_e142_acc0.985.pb.txt?dl=1'
31
+ # use maybe_download_model
32
+ log.maybe_download_model(url11, 'model1_dup.pb',
33
+ force_download=False,
34
+ url_model_cfg=url12)
35
+ """
36
+ urls = [url]
37
+ fn = [model_file]
38
+ if url_model_cfg is not None:
39
+ urls += [url_model_cfg]
40
+ fn += [model_file + '.txt']
41
+
42
+ return self.maybe_download(
43
+ url=urls,
44
+ fn=fn,
45
+ force_download=force_download,
46
+ target='models',
47
+ **kwargs,
48
+ )
49
+
50
+ def maybe_download(self,
51
+ url,
52
+ fn=None,
53
+ force_download=False,
54
+ target=None,
55
+ print_progress=True,
56
+ publish_func=None,
57
+ publish_only_value=False,
58
+ verbose=True,
59
+ unzip=False,
60
+ **kwargs
61
+ ):
62
+ """
63
+ NEW VERSION: if url starts with 'minio:' the function will retrieve minio conn
64
+ params from **kwargs and use minio_download (if needed or forced)
65
+
66
+ will (maybe) download a from a given (full) url a file and save to
67
+ target folder in `model_file`.
68
+
69
+ The parameter named `publish_func` is a function that publish a message
70
+ through a channel. We do not want to parametrize more than necessary this
71
+ method, so the `publish_func` function should be already charged with
72
+ the channel via a partial.
73
+
74
+ The `unzip` parameter comes in when the url(s) is/are zipped folder(s).
75
+ In this case, put `fn` as the name of the local folder, not as the name of the local zip file
76
+
77
+ Examples:
78
+
79
+ url11 = 'https://www.dropbox.com/s/t6qfxiopcr8yvlq/60_xomv_employee01_002_e142_acc0.985.pb?dl=1'
80
+ fn1 = 'model1.pb'
81
+ url12 = 'https://www.dropbox.com/s/akzyk9vcuqluzup/60_xomv_employee01_002_e142_acc0.985.pb.txt?dl=1'
82
+ fn2 = 'model1.txt'
83
+ url21 = 'https://www.dropbox.com/s/tuywpfzv6ueknj6/70_xfgc03_007_e092_acc0.9413.pb?dl=1'
84
+ fn3 = 'model2.pb'
85
+ url22 = 'https://www.dropbox.com/s/5wrvohffl14qfd3/70_xfgc03_007_e092_acc0.9413.pb.txt?dl=1'
86
+ fn4 = 'model2.txt'
87
+ log = Logger(lib_name='MDL', config_file='config/duplex_config.txt', TF_KERAS=False)
88
+
89
+ # download two files in output
90
+ log.maybe_download(url=[url11, url12],
91
+ fn=[fn1,fn2],
92
+ target='output'
93
+ )
94
+
95
+ # download a txt in data
96
+ log.maybe_download(url=url12,
97
+ fn='model1_dup.txt',
98
+ target='data'
99
+ )
100
+
101
+ # download another two files in models with other signature
102
+ log.maybe_download(url={
103
+ fn3 : url21,
104
+ fn4 : url22
105
+ },
106
+ force_download=True,
107
+ target='models'
108
+ )
109
+
110
+ # use maybe_download_model
111
+ log.maybe_download_model(url11, 'model1_dup.pb',
112
+ force_download=False,
113
+ url_model_cfg=url12)
114
+
115
+ """
116
+ import urllib.request
117
+ assert target in ['models', 'data', 'output', None], "target must be either 'models', 'data', 'output' or None"
118
+
119
+ if type(url) is dict:
120
+ urls = [v for k, v in url.items()]
121
+ fns = [k for k in url]
122
+ else:
123
+ if fn is None:
124
+ self.raise_error("fn must be a string or a list if url param does not have file:url dict")
125
+ urls = url
126
+ fns = fn
127
+ if type(urls) is str:
128
+ urls = [urls]
129
+ if type(fns) is str:
130
+ fns = [fns]
131
+ if len(fns) != len(urls):
132
+ self.raise_error("must provided same nr of urls and file names")
133
+
134
+ if verbose:
135
+ str_log = "Maybe dl '{}' to '{}' from '{}'".format(fns, target, urls)
136
+ self.P(str_log)
137
+ # endif
138
+
139
+ def _print_download_progress(count, block_size, total_size):
140
+ """
141
+ Function used for printing the download progress.
142
+ Used as a call-back function in maybe_download_and_extract().
143
+ """
144
+
145
+ # Percentage completion.
146
+ pct_complete = float(count * block_size) / total_size
147
+
148
+ # Limit it because rounding errors may cause it to exceed 100%.
149
+ pct_complete = min(1.0, pct_complete)
150
+
151
+ if publish_func is not None:
152
+ if publish_only_value:
153
+ publish_func(round(pct_complete * 100, 2))
154
+ else:
155
+ publish_func("progress:{:.1%}".format(pct_complete))
156
+
157
+ if print_progress:
158
+ # Status-message. Note the \r which means the line should overwrite itself.
159
+ msg = "\r- Download progress: {0:.1%}".format(pct_complete)
160
+
161
+ # Print it.
162
+ sys.stdout.write(msg)
163
+ sys.stdout.flush()
164
+ return
165
+
166
+ def _copy_to_target(src, dst):
167
+ import shutil
168
+ if os.path.isfile(src):
169
+ shutil.copy(src, dst)
170
+ elif os.path.isdir(src):
171
+ if not os.path.exists(dst):
172
+ shutil.copytree(src, dst)
173
+ else:
174
+ self.P("ERROR: unknown source type: {}".format(src), color='error')
175
+ return
176
+
177
+ # Path for local file.
178
+ if target is not None:
179
+ download_dir = self.get_target_folder(target=target)
180
+ else:
181
+ download_dir = ''
182
+
183
+ saved_files = []
184
+ msgs = []
185
+ for _fn, _url in zip(fns, urls):
186
+ if _fn is None or _url is None:
187
+ msg = "Cannot download '{}' from '{}'".format(_fn, _url)
188
+ msgs.append(msg)
189
+ saved_files.append(None)
190
+ self.P(msg, color='error')
191
+ continue
192
+ # useful if _fn is a hierarchy not a filename
193
+ _append_to_download_dir, _fn = os.path.split(_fn)
194
+ _crt_download_dir = os.path.join(download_dir, _append_to_download_dir)
195
+ save_path = os.path.join(_crt_download_dir, _fn)
196
+
197
+ # Check if the file already exists, otherwise we need to download it now.
198
+ has_file = os.path.exists(save_path)
199
+ if not has_file or force_download:
200
+ file_path = None
201
+ # handle http standard download
202
+ # automatically add .zip in this corner case
203
+ if unzip and not save_path.endswith('.zip'):
204
+ save_path += '.zip'
205
+
206
+ # Check if the download directory exists, otherwise create it.
207
+ if not os.path.exists(_crt_download_dir):
208
+ if verbose:
209
+ self.P("Download folder not found - creating")
210
+ os.makedirs(_crt_download_dir)
211
+ if has_file:
212
+ if verbose:
213
+ self.P("Forced download: removing ...{}".format(save_path[-40:]))
214
+ os.remove(save_path)
215
+
216
+ if _url.startswith('minio:'):
217
+ # handle MinIO url
218
+ _url = _url.replace('minio:', '')
219
+ file_path = self.minio_download(
220
+ local_file_path=save_path,
221
+ object_name=_url,
222
+ **kwargs,
223
+ )
224
+
225
+ elif _url.startswith('http'):
226
+ # Download the file from the internet.
227
+ if verbose:
228
+ self.P("Downloading {} from {}...".format(_fn, _url[:40]))
229
+ reporthook = _print_download_progress
230
+ import ssl
231
+ ssl._create_default_https_context = ssl._create_unverified_context
232
+ file_path, msg = urllib.request.urlretrieve( # this has errors!!! if this fails, the path is none
233
+ url=_url,
234
+ filename=save_path,
235
+ reporthook=reporthook
236
+ )
237
+
238
+ msgs.append(msg)
239
+ print("", flush=True)
240
+ if verbose:
241
+ self.P("Download done and saved in ...{}".format(file_path[-40:]))
242
+ # endif
243
+ elif os.path.exists(_url):
244
+ if verbose:
245
+ self.P("Found file in local file system at {}".format(_url))
246
+ _copy_to_target(_url, save_path)
247
+ file_path = save_path
248
+
249
+ if verbose:
250
+ self.P("Copied file from given location to {}".format(save_path))
251
+ else:
252
+ self.P("ERROR: unknown url type: {}".format(_url), color='error')
253
+
254
+ if file_path is not None:
255
+ if unzip:
256
+ _directory_to_extract_to = os.path.splitext(save_path)[0]
257
+ if verbose:
258
+ self.P("Unzipping '...{}' ...".format(file_path[-40:]))
259
+ if not os.path.exists(_directory_to_extract_to):
260
+ os.makedirs(_directory_to_extract_to)
261
+
262
+ with zipfile.ZipFile(file_path, 'r') as zip_ref:
263
+ zip_ref.extractall(_directory_to_extract_to)
264
+
265
+ # remove the downloaded zip file as it was already extracted, so it occupies space without any use
266
+ os.remove(file_path)
267
+ saved_files.append(_directory_to_extract_to)
268
+ else:
269
+ saved_files.append(file_path)
270
+
271
+ else:
272
+ if verbose:
273
+ self.P("File {} found. Skipping.".format(_fn))
274
+ saved_files.append(save_path)
275
+ msgs.append("'{}' already downloaded.".format(save_path))
276
+ # endfor
277
+
278
+ return saved_files, msgs
279
+
280
+ def minio_get_dowload_url(self,
281
+ endpoint,
282
+ access_key,
283
+ secret_key,
284
+ bucket_name,
285
+ object_name,
286
+ ):
287
+ """
288
+ Retreives a 7 day url for a particular bucket/object
289
+
290
+ Parameters
291
+ ----------
292
+ endpoint : str
293
+ address of the MinIO server.
294
+ access_key : str
295
+ user.
296
+ secret_key : str
297
+ password.
298
+ bucket_name : str
299
+ preconfigureg bucket name.
300
+ object_name : str
301
+ the existing Minio object name
302
+
303
+ Returns
304
+ -------
305
+ URL
306
+
307
+ """
308
+ from minio import Minio
309
+
310
+ try:
311
+ client = Minio(
312
+ endpoint=endpoint,
313
+ access_key=access_key,
314
+ secret_key=secret_key,
315
+ secure=False,
316
+ )
317
+
318
+ url = client.presigned_get_object(
319
+ bucket_name=bucket_name,
320
+ object_name=object_name,
321
+ )
322
+ except Exception as e:
323
+ self.P(str(e), color='error')
324
+ return None
325
+
326
+ return url
327
+
328
+ def minio_download(self,
329
+ local_file_path,
330
+ endpoint,
331
+ access_key,
332
+ secret_key,
333
+ bucket_name,
334
+ object_name,
335
+ secure=False,
336
+ SSL_CERT_FILE=None,
337
+ **kwargs,
338
+ ):
339
+ """
340
+
341
+
342
+ Parameters
343
+ ----------
344
+ local_file_path : str
345
+ relative or full path to the (future) local file.
346
+ endpoint : str
347
+ address of the MinIO server.
348
+ access_key : str
349
+ user.
350
+ secret_key : str
351
+ password.
352
+ bucket_name : str
353
+ preconfigureg bucket name.
354
+ object_name : str
355
+ a object name - can be None and will be auto-generated
356
+
357
+ Returns
358
+ -------
359
+ saved file name
360
+
361
+ """
362
+ from minio import Minio
363
+ import urllib3
364
+ from datetime import timedelta
365
+
366
+ try:
367
+ start_up = time()
368
+
369
+ # if SSL_CERT_FILE is not None:
370
+ # os.environ['SSL_CERT_FILE'] = SSL_CERT_FILE
371
+ cert_reqs = None
372
+ http_client = None
373
+ if secure:
374
+ if SSL_CERT_FILE is not None:
375
+ if not os.path.isfile(SSL_CERT_FILE):
376
+ raise ValueError("Invalid SSL_CERT_FILE in config")
377
+ else:
378
+ cert_reqs = 'CERT_REQUIRED'
379
+ else:
380
+ cert_reqs = 'CERT_NONE'
381
+
382
+ timeout = timedelta(minutes=5).seconds
383
+ http_client = urllib3.PoolManager(
384
+ timeout=urllib3.util.Timeout(connect=timeout, read=timeout),
385
+ maxsize=10,
386
+ cert_reqs=cert_reqs,
387
+ ca_certs=SSL_CERT_FILE,
388
+ retries=urllib3.Retry(
389
+ total=5,
390
+ backoff_factor=0.2,
391
+ status_forcelist=[500, 502, 503, 504]
392
+ )
393
+ )
394
+ self.P("Downloading from Minio: <{} {} @{}>, secure:{}, SSL_CERT_FILE:'{}', cert_reqs:'{}' using http_client: {}...".format(
395
+ access_key, secret_key, endpoint, secure, SSL_CERT_FILE,
396
+ cert_reqs,
397
+ http_client,
398
+ )
399
+ )
400
+ client = Minio(
401
+ endpoint=endpoint,
402
+ access_key=access_key,
403
+ secret_key=secret_key,
404
+ secure=secure,
405
+ http_client=http_client,
406
+ )
407
+
408
+ res = client.fget_object(
409
+ bucket_name=bucket_name,
410
+ object_name=object_name,
411
+ file_path=local_file_path,
412
+ )
413
+
414
+ self.P("Downloaded '{}' from {}/{}/{} in {:.2f}s".format(
415
+ local_file_path, endpoint, bucket_name, object_name,
416
+ time() - start_up), color='y')
417
+ except Exception as e:
418
+ self.P(str(e), color='error')
419
+ return None
420
+
421
+ return local_file_path
@@ -0,0 +1,242 @@
1
+ import os
2
+ import pickle
3
+ import numpy as np
4
+ import zlib
5
+ import base64
6
+
7
+ class _GeneralSerializationMixin(object):
8
+ """
9
+ Mixin for general serialization functionalities that are attached to `pye2.Logger`:
10
+ - zip
11
+ - csr
12
+ - numpy
13
+ - xml
14
+
15
+
16
+ This mixin cannot be instantiated because it is built just to provide some additional
17
+ functionalities for `pye2.Logger`
18
+
19
+ In this mixin we can use any attribute/method of the Logger.
20
+ """
21
+
22
+ def __init__(self):
23
+ super(_GeneralSerializationMixin, self).__init__()
24
+ return
25
+
26
+ def unzip(self, path_source, path_dest):
27
+ import zipfile
28
+ if not zipfile.is_zipfile(path_source):
29
+ self.P('File provided is not a .zip file!', color='r')
30
+ return
31
+
32
+ with zipfile.ZipFile(path_source, 'r') as zip_ref:
33
+ zip_ref.extractall(path_dest)
34
+ return
35
+
36
+ def save_csr(self, fn, csr_matrix, folder='data', use_prefix=True, verbose=True):
37
+ from scipy import sparse
38
+ lfld = self.get_target_folder(target=folder)
39
+
40
+ if lfld is None:
41
+ raise ValueError("Uknown save folder '{}' - valid options are `data`, `output`, `models`".format(
42
+ folder))
43
+ if use_prefix:
44
+ fn = self.file_prefix + '_' + fn
45
+ datafile = os.path.join(lfld, fn)
46
+ sparse.save_npz(datafile, csr_matrix)
47
+ if verbose:
48
+ self.P("Saved sparse csr matrix '{}' in '{}' folder".format(
49
+ fn, folder))
50
+ return
51
+
52
+ def load_csr(self, fn, folder='data'):
53
+ """
54
+ load_from: 'data', 'output', 'models'
55
+ """
56
+ from scipy import sparse
57
+ lfld = self.get_target_folder(target=folder)
58
+
59
+ if lfld is None:
60
+ raise ValueError("Uknown load folder '{}' - valid options are data, output, models".format(
61
+ folder))
62
+ datafile = os.path.join(lfld, fn)
63
+ self.verbose_log("Loading csr sparse matrix '{}' from '{}'".format(fn, folder))
64
+ data = None
65
+ if os.path.isfile(datafile):
66
+ data = sparse.load_npz(datafile)
67
+ else:
68
+ self.P(" File not found!", color='r')
69
+ return data
70
+
71
+ def save_np(self, fn, arr_or_arrs, folder='data', use_prefix=True, verbose=True):
72
+ lfld = self.get_target_folder(target=folder)
73
+
74
+ if lfld is None:
75
+ raise ValueError("Uknown save folder '{}' - valid options are `data`, `output`, `models`".format(
76
+ folder))
77
+ if use_prefix:
78
+ fn = self.file_prefix + '_' + fn
79
+ datafile = os.path.join(lfld, fn)
80
+ if type(arr_or_arrs) == list:
81
+ np.savez(datafile, arr_or_arrs)
82
+ elif type(arr_or_arrs) == np.ndarray:
83
+ np.save(datafile, arr_or_arrs)
84
+ else:
85
+ raise ValueError("Unknown `arr_or_arrs` - must provide either list of ndarrays or a single ndarray")
86
+ if verbose:
87
+ self.P("Saved sparse numpy data '{}' in '{}' folder".format(
88
+ fn, folder)
89
+ )
90
+ return
91
+
92
+ def load_np(self, fn, folder='data'):
93
+ """
94
+ `folder`: 'data', 'output', 'models'
95
+ """
96
+ lfld = self.get_target_folder(target=folder)
97
+
98
+ if lfld is None:
99
+ raise ValueError("Uknown load folder '{}' - valid options are data, output, models".format(
100
+ folder))
101
+ datafile = os.path.join(lfld, fn)
102
+ self.verbose_log("Loading numpy data '{}' from '{}'".format(fn, folder))
103
+ data = None
104
+ if os.path.isfile(datafile):
105
+ data = np.load(datafile)
106
+ else:
107
+ self.P(" File not found!", color='r')
108
+ return data
109
+
110
+ @staticmethod
111
+ def load_xml(fn):
112
+ import xml.etree.ElementTree as ET
113
+ doc = ET.parse(fn)
114
+ root = doc.getroot()
115
+ return doc, root
116
+
117
+ def read_from_path(self, path):
118
+ import pandas as pd
119
+ from os.path import splitext
120
+ file_name, extension = splitext(path)
121
+ if extension == '.csv':
122
+ self.P('Reading from {}'.format(path))
123
+ df = pd.read_csv(path)
124
+ self.P('Done reading from {}'.format(path), show_time=True)
125
+ return df
126
+ elif extension == '.xls' or extension == '.xlsx':
127
+ self.P('Reading from {}'.format(path))
128
+ df = pd.read_excel(path)
129
+ self.P('Done reading from {}'.format(path), show_time=True)
130
+ return df
131
+ elif extension == '.pkl':
132
+ self.P('Reading from {}'.format(path))
133
+ with open(path, 'rb') as handle:
134
+ df = pickle.load(handle)
135
+ self.P('Done reading from {}'.format(path), show_time=True)
136
+ return df
137
+ raise ValueError('Extension {} not understood!'.format(extension))
138
+
139
+ @staticmethod
140
+ def write_to_path(path, data):
141
+ import pandas as pd
142
+ from os.path import splitext
143
+ file_name, extension = splitext(path)
144
+ if extension == '.csv':
145
+ if isinstance(data, np.ndarray):
146
+ data = pd.DataFrame(data)
147
+ data.to_csv(path, index=False)
148
+ elif extension == '.xls' or extension == '.xlsx':
149
+ if isinstance(data, np.ndarray):
150
+ data = pd.DataFrame(data)
151
+ data.to_excel(path, index=False)
152
+ elif extension == '.pkl':
153
+ with open(path, 'wb') as handle:
154
+ pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL)
155
+
156
+
157
+ def add_file_to_zip(self, path_zip, path_file):
158
+ try:
159
+ from zipfile import ZipFile, ZIP_DEFLATED
160
+ if not os.path.isfile(path_file):
161
+ self.P("Adding to zip '{}' failed: missing '{}' ".format(path_zip, path_file), color='r')
162
+ return
163
+ zip = ZipFile(
164
+ file=path_zip,
165
+ mode='a',
166
+ compression=ZIP_DEFLATED
167
+ )
168
+ self.P("Archiving (zip) '{}' => {}".format(path_file, path_zip), color='y')
169
+ zip.write(path_file, arcname=os.path.basename(path_file))
170
+ zip.close()
171
+ except Exception as e:
172
+ self.P("Exception occured while archiving '{}' in '{}': {}".format(
173
+ path_file, path_zip, e), color='r'
174
+ )
175
+ return
176
+ return path_zip
177
+
178
+ def add_files_to_zip(self, path_zip, files):
179
+ try:
180
+ from zipfile import ZipFile, ZIP_DEFLATED
181
+ zip = ZipFile(
182
+ file=path_zip,
183
+ mode='a',
184
+ compression=ZIP_DEFLATED
185
+ )
186
+ self.P(" Adding {} files to archive (zip) {}".format(
187
+ len(files),
188
+ path_zip,
189
+ ), color='y'
190
+ )
191
+ written = 0
192
+ for path_file in files:
193
+ if not os.path.isfile(path_file):
194
+ self.P(" Adding to zip '{}' failed: missing '{}' ".format(path_zip, path_file), color='r')
195
+ continue
196
+ zip.write(path_file, arcname=os.path.basename(path_file))
197
+ print(".", flush=True, end='')
198
+ written += 1
199
+ zip.close()
200
+ self.P(" Added {} files.".format(written), color='y')
201
+ except Exception as e:
202
+ self.P("Exception occured while archiving {} files in '{}': {}".format(
203
+ len(files), path_zip, e), color='r'
204
+ )
205
+ return
206
+ return path_zip
207
+
208
+
209
+ def compress_bytes(self, data):
210
+ if not isinstance(data, bytes):
211
+ data = bytes(str(data), 'utf-8')
212
+ zip_data = zlib.compress(data)
213
+ return zip_data
214
+
215
+ def decompress_bytes(self, zip_data):
216
+ if not isinstance(zip_data, bytes):
217
+ raise ValueError('`decompress_bytes` input must be bytes type')
218
+ data = zlib.decompress(zip_data)
219
+ return data
220
+
221
+
222
+ def compress_text(self, text):
223
+ b_text = bytes(text, 'utf-8')
224
+ b_code = zlib.compress(b_text, level=9)
225
+ b_encoded = base64.b64encode(b_code)
226
+ str_encoded = b_encoded.decode('utf-8')
227
+ return str_encoded
228
+
229
+
230
+ def decompress_text(self, b64text):
231
+ decoded = None
232
+ try:
233
+ b_decoded = base64.b64decode(b64text)
234
+ b_decoded = zlib.decompress(b_decoded)
235
+ s_decoded = b_decoded.decode('utf-8')
236
+ decoded = s_decoded
237
+ except:
238
+ pass
239
+ return decoded
240
+
241
+
242
+