kaqing 2.0.110__py3-none-any.whl → 2.0.184__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kaqing might be problematic. Click here for more details.

Files changed (204) hide show
  1. adam/__init__.py +0 -2
  2. adam/app_session.py +9 -12
  3. adam/apps.py +18 -4
  4. adam/batch.py +5 -5
  5. adam/checks/check_utils.py +16 -46
  6. adam/checks/cpu.py +7 -1
  7. adam/checks/cpu_metrics.py +52 -0
  8. adam/checks/disk.py +2 -3
  9. adam/columns/columns.py +3 -1
  10. adam/columns/cpu.py +3 -1
  11. adam/columns/cpu_metrics.py +22 -0
  12. adam/columns/memory.py +3 -4
  13. adam/commands/__init__.py +24 -0
  14. adam/commands/alter_tables.py +33 -48
  15. adam/commands/app/__init__.py +0 -0
  16. adam/commands/app/app.py +38 -0
  17. adam/commands/{app_ping.py → app/app_ping.py} +7 -13
  18. adam/commands/app/show_app_actions.py +49 -0
  19. adam/commands/{show → app}/show_app_id.py +8 -11
  20. adam/commands/{show → app}/show_app_queues.py +7 -14
  21. adam/commands/app/utils_app.py +98 -0
  22. adam/commands/audit/audit.py +27 -31
  23. adam/commands/audit/audit_repair_tables.py +14 -18
  24. adam/commands/audit/audit_run.py +16 -23
  25. adam/commands/audit/show_last10.py +4 -17
  26. adam/commands/audit/show_slow10.py +4 -17
  27. adam/commands/audit/show_top10.py +4 -16
  28. adam/commands/audit/utils_show_top10.py +15 -3
  29. adam/commands/bash/__init__.py +5 -0
  30. adam/commands/bash/bash.py +36 -0
  31. adam/commands/bash/bash_completer.py +93 -0
  32. adam/commands/bash/utils_bash.py +16 -0
  33. adam/commands/cat.py +36 -0
  34. adam/commands/cd.py +11 -95
  35. adam/commands/check.py +15 -24
  36. adam/commands/cli_commands.py +2 -3
  37. adam/commands/clipboard_copy.py +86 -0
  38. adam/commands/code.py +57 -0
  39. adam/commands/command.py +198 -40
  40. adam/commands/commands_utils.py +12 -27
  41. adam/commands/cql/cql_completions.py +27 -10
  42. adam/commands/cql/cqlsh.py +12 -30
  43. adam/commands/cql/utils_cql.py +297 -0
  44. adam/commands/deploy/code_start.py +7 -10
  45. adam/commands/deploy/code_stop.py +4 -21
  46. adam/commands/deploy/code_utils.py +3 -3
  47. adam/commands/deploy/deploy.py +4 -27
  48. adam/commands/deploy/deploy_frontend.py +14 -17
  49. adam/commands/deploy/deploy_pg_agent.py +3 -6
  50. adam/commands/deploy/deploy_pod.py +65 -73
  51. adam/commands/deploy/deploy_utils.py +14 -24
  52. adam/commands/deploy/undeploy.py +4 -27
  53. adam/commands/deploy/undeploy_frontend.py +4 -7
  54. adam/commands/deploy/undeploy_pg_agent.py +6 -8
  55. adam/commands/deploy/undeploy_pod.py +11 -12
  56. adam/commands/devices/__init__.py +0 -0
  57. adam/commands/devices/device.py +123 -0
  58. adam/commands/devices/device_app.py +163 -0
  59. adam/commands/devices/device_auit_log.py +49 -0
  60. adam/commands/devices/device_cass.py +179 -0
  61. adam/commands/devices/device_export.py +84 -0
  62. adam/commands/devices/device_postgres.py +150 -0
  63. adam/commands/devices/devices.py +25 -0
  64. adam/commands/download_file.py +47 -0
  65. adam/commands/exit.py +1 -4
  66. adam/commands/export/__init__.py +0 -0
  67. adam/commands/export/clean_up_all_export_sessions.py +37 -0
  68. adam/commands/export/clean_up_export_sessions.py +39 -0
  69. adam/commands/export/download_export_session.py +39 -0
  70. adam/commands/export/drop_export_database.py +39 -0
  71. adam/commands/export/drop_export_databases.py +37 -0
  72. adam/commands/export/export.py +53 -0
  73. adam/commands/export/export_databases.py +245 -0
  74. adam/commands/export/export_select.py +59 -0
  75. adam/commands/export/export_select_x.py +54 -0
  76. adam/commands/export/export_sessions.py +209 -0
  77. adam/commands/export/export_use.py +49 -0
  78. adam/commands/export/exporter.py +332 -0
  79. adam/commands/export/import_files.py +44 -0
  80. adam/commands/export/import_session.py +44 -0
  81. adam/commands/export/importer.py +81 -0
  82. adam/commands/export/importer_athena.py +177 -0
  83. adam/commands/export/importer_sqlite.py +67 -0
  84. adam/commands/export/show_column_counts.py +45 -0
  85. adam/commands/export/show_export_databases.py +38 -0
  86. adam/commands/export/show_export_session.py +39 -0
  87. adam/commands/export/show_export_sessions.py +37 -0
  88. adam/commands/export/utils_export.py +343 -0
  89. adam/commands/find_files.py +51 -0
  90. adam/commands/find_processes.py +76 -0
  91. adam/commands/head.py +36 -0
  92. adam/commands/help.py +5 -3
  93. adam/commands/intermediate_command.py +49 -0
  94. adam/commands/issues.py +11 -43
  95. adam/commands/kubectl.py +38 -0
  96. adam/commands/login.py +22 -24
  97. adam/commands/logs.py +3 -6
  98. adam/commands/ls.py +11 -116
  99. adam/commands/medusa/medusa.py +4 -22
  100. adam/commands/medusa/medusa_backup.py +20 -27
  101. adam/commands/medusa/medusa_restore.py +38 -37
  102. adam/commands/medusa/medusa_show_backupjobs.py +16 -18
  103. adam/commands/medusa/medusa_show_restorejobs.py +13 -18
  104. adam/commands/nodetool.py +11 -17
  105. adam/commands/param_get.py +11 -14
  106. adam/commands/param_set.py +8 -12
  107. adam/commands/postgres/postgres.py +45 -46
  108. adam/commands/postgres/postgres_databases.py +269 -0
  109. adam/commands/postgres/postgres_ls.py +4 -8
  110. adam/commands/postgres/postgres_preview.py +5 -9
  111. adam/commands/postgres/psql_completions.py +4 -3
  112. adam/commands/postgres/utils_postgres.py +70 -0
  113. adam/commands/preview_table.py +8 -44
  114. adam/commands/pwd.py +14 -46
  115. adam/commands/reaper/reaper.py +4 -27
  116. adam/commands/reaper/reaper_forward.py +49 -56
  117. adam/commands/reaper/reaper_forward_session.py +6 -0
  118. adam/commands/reaper/reaper_forward_stop.py +10 -16
  119. adam/commands/reaper/reaper_restart.py +7 -14
  120. adam/commands/reaper/reaper_run_abort.py +8 -33
  121. adam/commands/reaper/reaper_runs.py +43 -58
  122. adam/commands/reaper/reaper_runs_abort.py +29 -49
  123. adam/commands/reaper/reaper_schedule_activate.py +9 -32
  124. adam/commands/reaper/reaper_schedule_start.py +9 -32
  125. adam/commands/reaper/reaper_schedule_stop.py +9 -32
  126. adam/commands/reaper/reaper_schedules.py +4 -14
  127. adam/commands/reaper/reaper_status.py +8 -16
  128. adam/commands/reaper/utils_reaper.py +194 -0
  129. adam/commands/repair/repair.py +4 -22
  130. adam/commands/repair/repair_log.py +5 -11
  131. adam/commands/repair/repair_run.py +27 -34
  132. adam/commands/repair/repair_scan.py +32 -38
  133. adam/commands/repair/repair_stop.py +5 -11
  134. adam/commands/report.py +27 -29
  135. adam/commands/restart.py +25 -26
  136. adam/commands/rollout.py +19 -24
  137. adam/commands/shell.py +12 -4
  138. adam/commands/show/show.py +10 -25
  139. adam/commands/show/show_adam.py +3 -3
  140. adam/commands/show/show_cassandra_repairs.py +35 -0
  141. adam/commands/show/show_cassandra_status.py +33 -51
  142. adam/commands/show/show_cassandra_version.py +5 -18
  143. adam/commands/show/show_commands.py +20 -25
  144. adam/commands/show/show_host.py +1 -1
  145. adam/commands/show/show_login.py +20 -27
  146. adam/commands/show/show_params.py +2 -5
  147. adam/commands/show/show_processes.py +15 -19
  148. adam/commands/show/show_storage.py +10 -20
  149. adam/commands/watch.py +26 -29
  150. adam/config.py +5 -14
  151. adam/embedded_params.py +1 -1
  152. adam/log.py +4 -4
  153. adam/pod_exec_result.py +6 -3
  154. adam/repl.py +69 -115
  155. adam/repl_commands.py +52 -19
  156. adam/repl_state.py +161 -40
  157. adam/sql/sql_completer.py +52 -27
  158. adam/sql/sql_state_machine.py +131 -19
  159. adam/sso/authn_ad.py +6 -8
  160. adam/sso/authn_okta.py +4 -6
  161. adam/sso/cred_cache.py +3 -5
  162. adam/sso/idp.py +9 -12
  163. adam/utils.py +511 -9
  164. adam/utils_athena.py +145 -0
  165. adam/utils_audits.py +12 -103
  166. adam/utils_issues.py +32 -0
  167. adam/utils_k8s/app_clusters.py +28 -0
  168. adam/utils_k8s/app_pods.py +36 -0
  169. adam/utils_k8s/cassandra_clusters.py +30 -19
  170. adam/utils_k8s/cassandra_nodes.py +3 -3
  171. adam/utils_k8s/custom_resources.py +16 -17
  172. adam/utils_k8s/ingresses.py +2 -2
  173. adam/utils_k8s/jobs.py +7 -11
  174. adam/utils_k8s/k8s.py +87 -0
  175. adam/utils_k8s/kube_context.py +2 -2
  176. adam/utils_k8s/pods.py +89 -78
  177. adam/utils_k8s/secrets.py +4 -4
  178. adam/utils_k8s/service_accounts.py +5 -4
  179. adam/utils_k8s/services.py +2 -2
  180. adam/utils_k8s/statefulsets.py +1 -12
  181. adam/utils_local.py +4 -0
  182. adam/utils_net.py +4 -4
  183. adam/utils_repl/__init__.py +0 -0
  184. adam/utils_repl/automata_completer.py +48 -0
  185. adam/utils_repl/repl_completer.py +46 -0
  186. adam/utils_repl/state_machine.py +173 -0
  187. adam/utils_sqlite.py +137 -0
  188. adam/version.py +1 -1
  189. {kaqing-2.0.110.dist-info → kaqing-2.0.184.dist-info}/METADATA +1 -1
  190. kaqing-2.0.184.dist-info/RECORD +244 -0
  191. adam/commands/app.py +0 -67
  192. adam/commands/bash.py +0 -150
  193. adam/commands/cp.py +0 -95
  194. adam/commands/cql/cql_utils.py +0 -112
  195. adam/commands/devices.py +0 -118
  196. adam/commands/postgres/postgres_context.py +0 -239
  197. adam/commands/postgres/postgres_utils.py +0 -31
  198. adam/commands/reaper/reaper_session.py +0 -159
  199. adam/commands/show/show_app_actions.py +0 -56
  200. adam/commands/show/show_repairs.py +0 -47
  201. kaqing-2.0.110.dist-info/RECORD +0 -187
  202. {kaqing-2.0.110.dist-info → kaqing-2.0.184.dist-info}/WHEEL +0 -0
  203. {kaqing-2.0.110.dist-info → kaqing-2.0.184.dist-info}/entry_points.txt +0 -0
  204. {kaqing-2.0.110.dist-info → kaqing-2.0.184.dist-info}/top_level.txt +0 -0
adam/utils.py CHANGED
@@ -1,3 +1,4 @@
1
+ from concurrent.futures import Future, ThreadPoolExecutor
1
2
  from contextlib import redirect_stdout
2
3
  import copy
3
4
  import csv
@@ -9,6 +10,9 @@ import os
9
10
  from pathlib import Path
10
11
  import random
11
12
  import string
13
+ import threading
14
+ import traceback
15
+ from typing import Callable, Iterator, TypeVar, Union
12
16
  from dateutil import parser
13
17
  import subprocess
14
18
  import sys
@@ -18,10 +22,28 @@ import yaml
18
22
 
19
23
  from . import __version__
20
24
 
21
- def to_tabular(lines: str, header: str = None, dashed_line = False):
22
- return lines_to_tabular(lines.split('\n'), header, dashed_line)
25
+ T = TypeVar('T')
26
+
27
+ log_state = threading.local()
28
+
29
+ class LogConfig:
30
+ is_debug = lambda: False
31
+ is_debug_timing = lambda: False
32
+ is_display_help = True
33
+
34
+ NO_SORT = 0
35
+ SORT = 1
36
+ REVERSE_SORT = -1
37
+
38
+ def tabulize(lines: list[T], fn: Callable[..., T] = None, header: str = None, dashed_line = False, separator = ' ', to: int = 1, sorted: int = NO_SORT):
39
+ if fn:
40
+ lines = list(map(fn, lines))
41
+
42
+ if sorted == SORT:
43
+ lines.sort()
44
+ elif sorted == REVERSE_SORT:
45
+ lines.sort(reverse=True)
23
46
 
24
- def lines_to_tabular(lines: list[str], header: str = None, dashed_line = False, separator = ' '):
25
47
  maxes = []
26
48
  nls = []
27
49
 
@@ -52,7 +74,14 @@ def lines_to_tabular(lines: list[str], header: str = None, dashed_line = False,
52
74
  for line in lines:
53
75
  format_line(line)
54
76
 
55
- return '\n'.join(nls)
77
+ table = '\n'.join(nls)
78
+
79
+ if to == 1:
80
+ log(table)
81
+ elif to == 2:
82
+ log2(table)
83
+
84
+ return table
56
85
 
57
86
  def convert_seconds(total_seconds_float):
58
87
  total_seconds_int = int(total_seconds_float) # Convert float to integer seconds
@@ -69,18 +98,28 @@ def epoch(timestamp_string: str):
69
98
  return parser.parse(timestamp_string).timestamp()
70
99
 
71
100
  def log(s = None):
101
+ if not loggable():
102
+ return False
103
+
72
104
  # want to print empty line for False or empty collection
73
105
  if s == None:
74
106
  print()
75
107
  else:
76
108
  click.echo(s)
77
109
 
110
+ return True
111
+
78
112
  def log2(s = None, nl = True):
113
+ if not loggable():
114
+ return False
115
+
79
116
  if s:
80
117
  click.echo(s, err=True, nl=nl)
81
118
  else:
82
119
  print(file=sys.stderr)
83
120
 
121
+ return True
122
+
84
123
  def elapsed_time(start_time: float):
85
124
  end_time = time.time()
86
125
  elapsed_time = end_time - start_time
@@ -95,7 +134,7 @@ def duration(start_time: float, end_time: float = None):
95
134
  end_time = time.time()
96
135
  d = convert_seconds(end_time - start_time)
97
136
  t = []
98
- if d[0]:
137
+ if d:
99
138
  t.append(f'{d[0]}h')
100
139
  if t or d[1]:
101
140
  t.append(f'{d[1]}m')
@@ -159,6 +198,9 @@ def get_deep_keys(d, current_path=""):
159
198
  return keys
160
199
 
161
200
  def display_help(replace_arg = False):
201
+ if not LogConfig.is_display_help:
202
+ return
203
+
162
204
  args = copy.copy(sys.argv)
163
205
  if replace_arg:
164
206
  args[len(args) - 1] = '--help'
@@ -203,12 +245,13 @@ def json_to_csv(json_data: list[dict[any, any]], delimiter: str = ','):
203
245
  with redirect_stdout(body) as f:
204
246
  dict_writer = csv.DictWriter(f, keys, delimiter=delimiter)
205
247
  dict_writer.writerows(flattened_data)
248
+
206
249
  return header.getvalue().strip('\r\n'), [l.strip('\r') for l in body.getvalue().split('\n')]
207
250
  else:
208
251
  return None
209
252
 
210
253
  def log_to_file(config: dict[any, any]):
211
- try:
254
+ with log_exc():
212
255
  base = f"/kaqing/logs"
213
256
  os.makedirs(base, exist_ok=True)
214
257
 
@@ -223,8 +266,6 @@ def log_to_file(config: dict[any, any]):
223
266
  f.write(config)
224
267
  else:
225
268
  f.write(config)
226
- except:
227
- pass
228
269
 
229
270
  def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out = True):
230
271
  dir = f'{Path.home()}/.kaqing'
@@ -240,4 +281,465 @@ def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out
240
281
  return path
241
282
 
242
283
  def idp_token_from_env():
243
- return os.getenv('IDP_TOKEN')
284
+ return os.getenv('IDP_TOKEN')
285
+
286
+ def is_lambda(func):
287
+ return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
288
+
289
+ def debug(s = None):
290
+ if LogConfig.is_debug():
291
+ log2(f'DEBUG {s}')
292
+
293
+ def debug_trace():
294
+ if LogConfig.is_debug():
295
+ log2(traceback.format_exc())
296
+
297
+ def in_docker() -> bool:
298
+ if os.path.exists('/.dockerenv'):
299
+ return True
300
+
301
+ try:
302
+ with open('/proc/1/cgroup', 'rt') as f:
303
+ for line in f:
304
+ if 'docker' in line or 'lxc' in line:
305
+ return True
306
+ except FileNotFoundError:
307
+ pass
308
+
309
+ return False
310
+
311
+ class Ing:
312
+ def __init__(self, msg: str, suppress_log=False):
313
+ self.msg = msg
314
+ self.suppress_log = suppress_log
315
+
316
+ def __enter__(self):
317
+ if not hasattr(log_state, 'ing_cnt'):
318
+ log_state.ing_cnt = 0
319
+
320
+ try:
321
+ if not log_state.ing_cnt:
322
+ if not self.suppress_log and not LogConfig.is_debug():
323
+ log2(f'{self.msg}...', nl=False)
324
+
325
+ return None
326
+ finally:
327
+ log_state.ing_cnt += 1
328
+
329
+ def __exit__(self, exc_type, exc_val, exc_tb):
330
+ log_state.ing_cnt -= 1
331
+ if not log_state.ing_cnt:
332
+ if not self.suppress_log and not LogConfig.is_debug():
333
+ log2(' OK')
334
+
335
+ return False
336
+
337
+ def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
338
+ if not body:
339
+ return Ing(msg, suppress_log=suppress_log)
340
+
341
+ r = None
342
+
343
+ t = Ing(msg, suppress_log=suppress_log)
344
+ t.__enter__()
345
+ try:
346
+ r = body()
347
+ finally:
348
+ t.__exit__(None, None, None)
349
+
350
+ return r
351
+
352
+ def loggable():
353
+ return LogConfig.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
354
+
355
+ class TimingNode:
356
+ def __init__(self, depth: int, s0: time.time = time.time(), line: str = None):
357
+ self.depth = depth
358
+ self.s0 = s0
359
+ self.line = line
360
+ self.children = []
361
+
362
+ def __str__(self):
363
+ return f'[{self.depth}: {self.line}, children={len(self.children)}]'
364
+
365
+ def tree(self):
366
+ lines = []
367
+ if self.line:
368
+ lines.append(self.line)
369
+
370
+ for child in self.children:
371
+ if child.line:
372
+ lines.append(child.tree())
373
+ return '\n'.join(lines)
374
+
375
+ class LogTiming:
376
+ def __init__(self, msg: str, s0: time.time = None):
377
+ self.msg = msg
378
+ self.s0 = s0
379
+
380
+ def __enter__(self):
381
+ if not LogConfig.is_debug_timing():
382
+ return
383
+
384
+ if not hasattr(log_state, 'timings'):
385
+ log_state.timings = TimingNode(0)
386
+
387
+ self.me = log_state.timings
388
+ log_state.timings = TimingNode(self.me.depth+1)
389
+ if not self.s0:
390
+ self.s0 = time.time()
391
+
392
+ def __exit__(self, exc_type, exc_val, exc_tb):
393
+ if not LogConfig.is_debug_timing():
394
+ return False
395
+
396
+ child = log_state.timings
397
+ log_state.timings.line = timing_log_line(self.me.depth, self.msg, self.s0)
398
+
399
+ if child and child.line:
400
+ self.me.children.append(child)
401
+ log_state.timings = self.me
402
+
403
+ if not self.me.depth:
404
+ log2(self.me.tree())
405
+ log_state.timings = TimingNode(0)
406
+
407
+ return False
408
+
409
+ def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
410
+ if not s0 and not body:
411
+ return LogTiming(msg, s0=s0)
412
+
413
+ if not LogConfig.is_debug_timing():
414
+ if body:
415
+ return body()
416
+
417
+ return
418
+
419
+ r = None
420
+
421
+ t = LogTiming(msg, s0=s0)
422
+ t.__enter__()
423
+ try:
424
+ if body:
425
+ r = body()
426
+ finally:
427
+ t.__exit__(None, None, None)
428
+
429
+ return r
430
+
431
+ def timing_log_line(depth: int, msg: str, s0: time.time):
432
+ elapsed = time.time() - s0
433
+ prefix = '[timings] '
434
+ if depth:
435
+ if elapsed > 0.01:
436
+ prefix = (' ' * (depth-1)) + '* '
437
+ else:
438
+ prefix = ' ' * depth
439
+
440
+ return f'{prefix}{msg}: {elapsed:.2f} sec'
441
+
442
+ class WaitLog:
443
+ wait_log_flag = False
444
+
445
+ def wait_log(msg: str):
446
+ if not WaitLog.wait_log_flag:
447
+ log2(msg)
448
+ WaitLog.wait_log_flag = True
449
+
450
+ def clear_wait_log_flag():
451
+ WaitLog.wait_log_flag = False
452
+
453
+ def bytes_generator_from_file(file_path, chunk_size=4096):
454
+ with open(file_path, 'rb') as f:
455
+ while True:
456
+ chunk = f.read(chunk_size)
457
+ if not chunk:
458
+ break
459
+ yield chunk
460
+
461
+ class GeneratorStream(io.RawIOBase):
462
+ def __init__(self, generator):
463
+ self._generator = generator
464
+ self._buffer = b'' # Buffer to store leftover bytes from generator yields
465
+
466
+ def readable(self):
467
+ return True
468
+
469
+ def _read_from_generator(self):
470
+ try:
471
+ chunk = next(self._generator)
472
+ if isinstance(chunk, str):
473
+ chunk = chunk.encode('utf-8') # Encode if generator yields strings
474
+ self._buffer += chunk
475
+ except StopIteration:
476
+ pass # Generator exhausted
477
+
478
+ def readinto(self, b):
479
+ # Fill the buffer if necessary
480
+ while len(self._buffer) < len(b):
481
+ old_buffer_len = len(self._buffer)
482
+ self._read_from_generator()
483
+ if len(self._buffer) == old_buffer_len: # Generator exhausted and buffer empty
484
+ break
485
+
486
+ bytes_to_read = min(len(b), len(self._buffer))
487
+ b[:bytes_to_read] = self._buffer[:bytes_to_read]
488
+ self._buffer = self._buffer[bytes_to_read:]
489
+ return bytes_to_read
490
+
491
+ def read(self, size=-1):
492
+ if size == -1: # Read all remaining data
493
+ while True:
494
+ old_buffer_len = len(self._buffer)
495
+ self._read_from_generator()
496
+ if len(self._buffer) == old_buffer_len:
497
+ break
498
+ data = self._buffer
499
+ self._buffer = b''
500
+ return data
501
+ else:
502
+ # Ensure enough data in buffer
503
+ while len(self._buffer) < size:
504
+ old_buffer_len = len(self._buffer)
505
+ self._read_from_generator()
506
+ if len(self._buffer) == old_buffer_len:
507
+ break
508
+
509
+ data = self._buffer[:size]
510
+ self._buffer = self._buffer[size:]
511
+ return data
512
+
513
+ class LogTrace:
514
+ def __init__(self, err_msg: Union[str, callable, bool] = None):
515
+ self.err_msg = err_msg
516
+
517
+ def __enter__(self):
518
+ return None
519
+
520
+ def __exit__(self, exc_type, exc_val, exc_tb):
521
+ if exc_type is not None:
522
+ if self.err_msg is True:
523
+ log2(str(exc_val))
524
+ elif callable(self.err_msg):
525
+ log2(self.err_msg(exc_val))
526
+ elif self.err_msg is not False and self.err_msg:
527
+ log2(self.err_msg)
528
+
529
+ if self.err_msg is not False and LogConfig.is_debug():
530
+ traceback.print_exception(exc_type, exc_val, exc_tb, file=sys.stderr)
531
+
532
+ # swallow exception
533
+ return True
534
+
535
+ def log_exc(err_msg: Union[str, callable, bool] = None):
536
+ return LogTrace(err_msg=err_msg)
537
+
538
+ class ParallelService:
539
+ def __init__(self, handler: 'ParallelMapHandler'):
540
+ self.handler = handler
541
+
542
+ def map(self, fn: Callable[..., T]) -> Iterator[T]:
543
+ executor = self.handler.executor
544
+ collection = self.handler.collection
545
+ collect = self.handler.collect
546
+ samples_cnt = self.handler.samples
547
+
548
+ iterator = None
549
+ if executor:
550
+ iterator = executor.map(fn, collection)
551
+ elif samples_cnt < sys.maxsize:
552
+ samples = []
553
+
554
+ for elem in collection:
555
+ if not samples_cnt:
556
+ break
557
+
558
+ samples.append(fn(elem))
559
+ samples_cnt -= 1
560
+
561
+ iterator = iter(samples)
562
+ else:
563
+ iterator = map(fn, collection)
564
+
565
+ if collect:
566
+ return list(iterator)
567
+ else:
568
+ return iterator
569
+
570
+ class ParallelMapHandler:
571
+ def __init__(self, collection: list, workers: int, samples: int = sys.maxsize, msg: str = None, collect = True):
572
+ self.collection = collection
573
+ self.workers = workers
574
+ self.executor = None
575
+ self.samples = samples
576
+ self.msg = msg
577
+ if msg and msg.startswith('d`'):
578
+ if LogConfig.is_debug():
579
+ self.msg = msg.replace('d`', '', 1)
580
+ else:
581
+ self.msg = None
582
+ self.collect = collect
583
+
584
+ self.begin = []
585
+ self.end = []
586
+ self.start_time = None
587
+
588
+ def __enter__(self):
589
+ self.calc_msgs()
590
+
591
+ if self.workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
592
+ self.start_time = time.time()
593
+
594
+ self.executor = ThreadPoolExecutor(max_workers=self.workers)
595
+ self.executor.__enter__()
596
+
597
+ return ParallelService(self)
598
+
599
+ def __exit__(self, exc_type, exc_val, exc_tb):
600
+ if self.executor:
601
+ self.executor.__exit__(exc_type, exc_val, exc_tb)
602
+
603
+ if self.end:
604
+ log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
605
+
606
+ return False
607
+
608
+ def size(self):
609
+ if not self.collection:
610
+ return 0
611
+
612
+ return len(self.collection)
613
+
614
+ def calc_msgs(self):
615
+ if not self.msg:
616
+ return
617
+
618
+ size = self.size()
619
+ offloaded = False
620
+ serially = False
621
+ sampling = False
622
+ if size == 0:
623
+ offloaded = True
624
+ self.msg = self.msg.replace('{size}', '1')
625
+ elif self.workers > 1 and size > 1 and self.samples == sys.maxsize:
626
+ self.msg = self.msg.replace('{size}', f'{size}')
627
+ elif self.samples < sys.maxsize:
628
+ sampling = True
629
+ if self.samples > size:
630
+ self.samples = size
631
+ self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
632
+ else:
633
+ serially = True
634
+ self.msg = self.msg.replace('{size}', f'{size}')
635
+
636
+ for token in self.msg.split(' '):
637
+ if '|' in token:
638
+ self.begin.append(token.split('|')[0])
639
+ if not sampling and not serially and not offloaded:
640
+ self.end.append(token.split('|')[1])
641
+ else:
642
+ self.begin.append(token)
643
+ if not sampling and not serially and not offloaded:
644
+ self.end.append(token)
645
+
646
+ if offloaded:
647
+ log2(f'{" ".join(self.begin)} offloaded...')
648
+ elif sampling or serially:
649
+ log2(f'{" ".join(self.begin)} serially...')
650
+ else:
651
+ log2(f'{" ".join(self.begin)} with {self.workers} workers...')
652
+
653
+ def parallelize(collection: list, workers: int = 0, samples = sys.maxsize, msg: str = None, collect = True):
654
+ return ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect)
655
+
656
+ class OffloadService:
657
+ def __init__(self, handler: 'OffloadHandler'):
658
+ self.handler = handler
659
+
660
+ def submit(self, fn: Callable[..., T], /, *args, **kwargs) -> Future[T]:
661
+ executor = self.handler.executor
662
+
663
+ if executor:
664
+ return executor.submit(fn, *args, **kwargs)
665
+ else:
666
+ future = Future()
667
+
668
+ future.set_result(fn(*args, **kwargs))
669
+
670
+ return future
671
+
672
+ class OffloadHandler(ParallelMapHandler):
673
+ def __init__(self, max_workers: int, msg: str = None):
674
+ super().__init__(None, max_workers, msg=msg, collect=False )
675
+
676
+ def __enter__(self):
677
+ self.calc_msgs()
678
+
679
+ if self.workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
680
+ self.start_time = time.time()
681
+
682
+ self.executor = ThreadPoolExecutor(max_workers=self.workers)
683
+ self.executor.__enter__()
684
+
685
+ return OffloadService(self)
686
+
687
+ def __exit__(self, exc_type, exc_val, exc_tb):
688
+ if self.executor:
689
+ self.executor.__exit__(exc_type, exc_val, exc_tb)
690
+
691
+ if self.end:
692
+ log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
693
+
694
+ return False
695
+
696
+ def size(self):
697
+ if not self.collection:
698
+ return 0
699
+
700
+ return len(self.collection)
701
+
702
+ def calc_msgs(self):
703
+ if not self.msg:
704
+ return
705
+
706
+ size = self.size()
707
+ # return
708
+
709
+ offloaded = False
710
+ serially = False
711
+ sampling = False
712
+ if size == 0:
713
+ offloaded = True
714
+ self.msg = self.msg.replace('{size}', '1')
715
+ elif self.workers > 1 and size > 1 and self.samples == sys.maxsize:
716
+ self.msg = self.msg.replace('{size}', f'{size}')
717
+ elif self.samples < sys.maxsize:
718
+ sampling = True
719
+ if self.samples > size:
720
+ self.samples = size
721
+ self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
722
+ else:
723
+ serially = True
724
+ self.msg = self.msg.replace('{size}', f'{size}')
725
+ # return
726
+
727
+ for token in self.msg.split(' '):
728
+ if '|' in token:
729
+ self.begin.append(token.split('|')[0])
730
+ if not sampling and not serially and not offloaded:
731
+ self.end.append(token.split('|')[1])
732
+ else:
733
+ self.begin.append(token)
734
+ if not sampling and not serially and not offloaded:
735
+ self.end.append(token)
736
+
737
+ if offloaded:
738
+ log2(f'{" ".join(self.begin)} offloaded...')
739
+ elif sampling or serially:
740
+ log2(f'{" ".join(self.begin)} serially...')
741
+ else:
742
+ log2(f'{" ".join(self.begin)} with {self.workers} workers...')
743
+
744
+ def offload(max_workers: int = 3, msg: str = None):
745
+ return OffloadHandler(max_workers, msg = msg)