kaqing 2.0.115__py3-none-any.whl → 2.0.172__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kaqing might be problematic. Click here for more details.

Files changed (187) hide show
  1. adam/__init__.py +0 -2
  2. adam/app_session.py +8 -11
  3. adam/batch.py +3 -3
  4. adam/checks/check_utils.py +14 -46
  5. adam/checks/cpu.py +7 -1
  6. adam/checks/cpu_metrics.py +52 -0
  7. adam/checks/disk.py +2 -3
  8. adam/columns/columns.py +3 -1
  9. adam/columns/cpu.py +3 -1
  10. adam/columns/cpu_metrics.py +22 -0
  11. adam/columns/memory.py +3 -4
  12. adam/commands/__init__.py +18 -0
  13. adam/commands/alter_tables.py +43 -47
  14. adam/commands/audit/audit.py +24 -25
  15. adam/commands/audit/audit_repair_tables.py +14 -17
  16. adam/commands/audit/audit_run.py +15 -23
  17. adam/commands/audit/show_last10.py +10 -13
  18. adam/commands/audit/show_slow10.py +10 -13
  19. adam/commands/audit/show_top10.py +10 -14
  20. adam/commands/audit/utils_show_top10.py +2 -3
  21. adam/commands/bash/__init__.py +5 -0
  22. adam/commands/bash/bash.py +8 -96
  23. adam/commands/bash/utils_bash.py +16 -0
  24. adam/commands/cat.py +14 -19
  25. adam/commands/cd.py +12 -100
  26. adam/commands/check.py +20 -21
  27. adam/commands/cli_commands.py +2 -3
  28. adam/commands/code.py +20 -23
  29. adam/commands/command.py +123 -39
  30. adam/commands/commands_utils.py +8 -17
  31. adam/commands/cp.py +33 -39
  32. adam/commands/cql/cql_completions.py +28 -10
  33. adam/commands/cql/cqlsh.py +10 -30
  34. adam/commands/cql/utils_cql.py +343 -0
  35. adam/commands/deploy/code_start.py +7 -10
  36. adam/commands/deploy/code_stop.py +4 -21
  37. adam/commands/deploy/code_utils.py +3 -3
  38. adam/commands/deploy/deploy.py +4 -27
  39. adam/commands/deploy/deploy_frontend.py +14 -17
  40. adam/commands/deploy/deploy_pg_agent.py +2 -5
  41. adam/commands/deploy/deploy_pod.py +65 -73
  42. adam/commands/deploy/deploy_utils.py +14 -24
  43. adam/commands/deploy/undeploy.py +4 -27
  44. adam/commands/deploy/undeploy_frontend.py +4 -7
  45. adam/commands/deploy/undeploy_pg_agent.py +5 -7
  46. adam/commands/deploy/undeploy_pod.py +11 -12
  47. adam/commands/devices/__init__.py +0 -0
  48. adam/commands/devices/device.py +118 -0
  49. adam/commands/devices/device_app.py +173 -0
  50. adam/commands/devices/device_auit_log.py +49 -0
  51. adam/commands/devices/device_cass.py +185 -0
  52. adam/commands/devices/device_export.py +86 -0
  53. adam/commands/devices/device_postgres.py +144 -0
  54. adam/commands/devices/devices.py +25 -0
  55. adam/commands/exit.py +1 -4
  56. adam/commands/export/clean_up_all_export_sessions.py +37 -0
  57. adam/commands/export/clean_up_export_sessions.py +51 -0
  58. adam/commands/export/drop_export_database.py +55 -0
  59. adam/commands/export/drop_export_databases.py +43 -0
  60. adam/commands/export/export.py +19 -26
  61. adam/commands/export/export_databases.py +174 -0
  62. adam/commands/export/export_handlers.py +71 -0
  63. adam/commands/export/export_select.py +48 -22
  64. adam/commands/export/export_select_x.py +54 -0
  65. adam/commands/export/export_use.py +19 -23
  66. adam/commands/export/exporter.py +353 -0
  67. adam/commands/export/import_session.py +40 -0
  68. adam/commands/export/importer.py +67 -0
  69. adam/commands/export/importer_athena.py +77 -0
  70. adam/commands/export/importer_sqlite.py +39 -0
  71. adam/commands/export/show_column_counts.py +54 -0
  72. adam/commands/export/show_export_databases.py +36 -0
  73. adam/commands/export/show_export_session.py +48 -0
  74. adam/commands/export/show_export_sessions.py +44 -0
  75. adam/commands/export/utils_export.py +223 -162
  76. adam/commands/help.py +1 -1
  77. adam/commands/intermediate_command.py +49 -0
  78. adam/commands/issues.py +11 -43
  79. adam/commands/kubectl.py +3 -6
  80. adam/commands/login.py +22 -24
  81. adam/commands/logs.py +3 -6
  82. adam/commands/ls.py +11 -128
  83. adam/commands/medusa/medusa.py +4 -22
  84. adam/commands/medusa/medusa_backup.py +20 -24
  85. adam/commands/medusa/medusa_restore.py +29 -33
  86. adam/commands/medusa/medusa_show_backupjobs.py +14 -18
  87. adam/commands/medusa/medusa_show_restorejobs.py +11 -18
  88. adam/commands/nodetool.py +6 -15
  89. adam/commands/param_get.py +11 -12
  90. adam/commands/param_set.py +9 -10
  91. adam/commands/postgres/postgres.py +41 -34
  92. adam/commands/postgres/postgres_context.py +57 -24
  93. adam/commands/postgres/postgres_ls.py +4 -8
  94. adam/commands/postgres/postgres_preview.py +5 -9
  95. adam/commands/postgres/psql_completions.py +1 -1
  96. adam/commands/postgres/utils_postgres.py +66 -0
  97. adam/commands/preview_table.py +5 -44
  98. adam/commands/pwd.py +14 -47
  99. adam/commands/reaper/reaper.py +4 -27
  100. adam/commands/reaper/reaper_forward.py +48 -55
  101. adam/commands/reaper/reaper_forward_session.py +6 -0
  102. adam/commands/reaper/reaper_forward_stop.py +10 -16
  103. adam/commands/reaper/reaper_restart.py +7 -14
  104. adam/commands/reaper/reaper_run_abort.py +11 -30
  105. adam/commands/reaper/reaper_runs.py +42 -57
  106. adam/commands/reaper/reaper_runs_abort.py +29 -49
  107. adam/commands/reaper/reaper_schedule_activate.py +11 -30
  108. adam/commands/reaper/reaper_schedule_start.py +10 -29
  109. adam/commands/reaper/reaper_schedule_stop.py +10 -29
  110. adam/commands/reaper/reaper_schedules.py +4 -14
  111. adam/commands/reaper/reaper_status.py +8 -16
  112. adam/commands/reaper/utils_reaper.py +196 -0
  113. adam/commands/repair/repair.py +4 -22
  114. adam/commands/repair/repair_log.py +5 -11
  115. adam/commands/repair/repair_run.py +27 -34
  116. adam/commands/repair/repair_scan.py +32 -38
  117. adam/commands/repair/repair_stop.py +5 -11
  118. adam/commands/report.py +27 -29
  119. adam/commands/restart.py +25 -26
  120. adam/commands/rollout.py +19 -24
  121. adam/commands/shell.py +10 -4
  122. adam/commands/show/show.py +10 -25
  123. adam/commands/show/show_cassandra_repairs.py +35 -0
  124. adam/commands/show/show_cassandra_status.py +32 -43
  125. adam/commands/show/show_cassandra_version.py +5 -18
  126. adam/commands/show/show_commands.py +19 -24
  127. adam/commands/show/show_host.py +1 -1
  128. adam/commands/show/show_login.py +20 -27
  129. adam/commands/show/show_processes.py +15 -19
  130. adam/commands/show/show_storage.py +10 -20
  131. adam/commands/watch.py +26 -29
  132. adam/config.py +5 -14
  133. adam/embedded_params.py +1 -1
  134. adam/log.py +4 -4
  135. adam/pod_exec_result.py +3 -3
  136. adam/repl.py +40 -103
  137. adam/repl_commands.py +32 -16
  138. adam/repl_state.py +57 -28
  139. adam/sql/sql_completer.py +44 -28
  140. adam/sql/sql_state_machine.py +89 -28
  141. adam/sso/authn_ad.py +6 -8
  142. adam/sso/authn_okta.py +4 -6
  143. adam/sso/cred_cache.py +3 -5
  144. adam/sso/idp.py +9 -12
  145. adam/utils.py +435 -6
  146. adam/utils_athena.py +57 -37
  147. adam/utils_audits.py +12 -14
  148. adam/utils_issues.py +32 -0
  149. adam/utils_k8s/app_clusters.py +13 -18
  150. adam/utils_k8s/app_pods.py +2 -0
  151. adam/utils_k8s/cassandra_clusters.py +22 -19
  152. adam/utils_k8s/cassandra_nodes.py +2 -2
  153. adam/utils_k8s/custom_resources.py +16 -17
  154. adam/utils_k8s/ingresses.py +2 -2
  155. adam/utils_k8s/jobs.py +7 -11
  156. adam/utils_k8s/k8s.py +87 -0
  157. adam/utils_k8s/pods.py +40 -77
  158. adam/utils_k8s/secrets.py +4 -4
  159. adam/utils_k8s/service_accounts.py +5 -4
  160. adam/utils_k8s/services.py +2 -2
  161. adam/utils_k8s/statefulsets.py +1 -12
  162. adam/utils_net.py +4 -4
  163. adam/utils_repl/__init__.py +0 -0
  164. adam/utils_repl/automata_completer.py +48 -0
  165. adam/utils_repl/repl_completer.py +46 -0
  166. adam/utils_repl/state_machine.py +173 -0
  167. adam/utils_sqlite.py +137 -0
  168. adam/version.py +1 -1
  169. {kaqing-2.0.115.dist-info → kaqing-2.0.172.dist-info}/METADATA +1 -1
  170. kaqing-2.0.172.dist-info/RECORD +230 -0
  171. adam/commands/app.py +0 -67
  172. adam/commands/app_ping.py +0 -44
  173. adam/commands/cql/cql_utils.py +0 -204
  174. adam/commands/devices.py +0 -147
  175. adam/commands/export/export_on_x.py +0 -76
  176. adam/commands/export/export_rmdbs.py +0 -65
  177. adam/commands/postgres/postgres_utils.py +0 -31
  178. adam/commands/reaper/reaper_session.py +0 -159
  179. adam/commands/show/show_app_actions.py +0 -56
  180. adam/commands/show/show_app_id.py +0 -47
  181. adam/commands/show/show_app_queues.py +0 -45
  182. adam/commands/show/show_repairs.py +0 -47
  183. adam/utils_export.py +0 -42
  184. kaqing-2.0.115.dist-info/RECORD +0 -203
  185. {kaqing-2.0.115.dist-info → kaqing-2.0.172.dist-info}/WHEEL +0 -0
  186. {kaqing-2.0.115.dist-info → kaqing-2.0.172.dist-info}/entry_points.txt +0 -0
  187. {kaqing-2.0.115.dist-info → kaqing-2.0.172.dist-info}/top_level.txt +0 -0
adam/sso/cred_cache.py CHANGED
@@ -1,9 +1,9 @@
1
1
  import os
2
2
  from pathlib import Path
3
- import traceback
4
3
  from dotenv import load_dotenv
5
4
 
6
5
  from adam.config import Config
6
+ from adam.utils import debug, log_exc
7
7
  from adam.utils_k8s.kube_context import KubeContext
8
8
 
9
9
  class CredCache:
@@ -34,10 +34,8 @@ class CredCache:
34
34
  def cache(self, username: str, password: str = None):
35
35
  if os.path.exists(self.env_f):
36
36
  with open(self.env_f, 'w') as file:
37
- try:
37
+ with log_exc():
38
38
  file.truncate()
39
- except:
40
- Config().debug(traceback.format_exc())
41
39
 
42
40
  updated = []
43
41
  updated.append(f'IDP_USERNAME={username}')
@@ -56,4 +54,4 @@ class CredCache:
56
54
  if password:
57
55
  self.overrides['IDP_PASSWORD'] = password
58
56
 
59
- Config().debug(f'Cached username: {username}, password: {password}, try load: {self.get_username()}')
57
+ debug(f'Cached username: {username}, password: {password}, try load: {self.get_username()}')
adam/sso/idp.py CHANGED
@@ -3,7 +3,6 @@ import getpass
3
3
  import os
4
4
  import sys
5
5
  import termios
6
- import traceback
7
6
  from typing import Callable, TypeVar
8
7
  import requests
9
8
  from kubernetes import config
@@ -15,7 +14,7 @@ from .cred_cache import CredCache
15
14
  from .idp_session import IdpSession
16
15
  from .idp_login import IdpLogin
17
16
  from adam.config import Config
18
- from adam.utils import log, log2
17
+ from adam.utils import debug, log, log_exc
19
18
 
20
19
  T = TypeVar('T')
21
20
 
@@ -31,6 +30,8 @@ class Idp:
31
30
  def login(app_host: str, username: str = None, idp_uri: str = None, forced = False, use_token_from_env = True, use_cached_creds = True, verify = True) -> IdpLogin:
32
31
  session: IdpSession = IdpSession.create(username, app_host, app_host, idp_uri=idp_uri)
33
32
 
33
+ debug(f'Idp.login({username})')
34
+
34
35
  if use_token_from_env:
35
36
  if l0 := session.login_from_env_var():
36
37
  return l0
@@ -39,11 +40,9 @@ class Idp:
39
40
  token_server = Config().get('app.login.token-server-url', 'http://localhost:{port}').replace('{port}', port)
40
41
  res: requests.Response = requests.get(token_server)
41
42
  if res.status_code == 200 and res.text:
42
- try:
43
+ with log_exc():
43
44
  # may fail if the idp token is not complete
44
45
  return session.login_from_token(res.text)
45
- except:
46
- pass
47
46
 
48
47
  r: IdpLogin = None
49
48
  try:
@@ -57,10 +56,11 @@ class Idp:
57
56
  default_user: str = None
58
57
  if use_cached_creds:
59
58
  default_user = CredCache().get_username()
60
- Config().debug(f'User read from cache: {default_user}')
59
+ debug(f'User read from cache: {default_user}')
61
60
 
62
- if from_env := os.getenv('USERNAME'):
63
- default_user = from_env
61
+ # no value in using USERNAME
62
+ # if from_env := os.getenv('USERNAME') and in_docker():
63
+ # default_user = from_env
64
64
  if default_user and default_user != username:
65
65
  session = IdpSession.create(default_user, app_host, app_host)
66
66
 
@@ -125,7 +125,7 @@ class Idp:
125
125
  termios.tcsetattr(fd, termios.TCSADRAIN, old)
126
126
 
127
127
  def try_kubeconfig(username: str, kubeconfig: str):
128
- try:
128
+ with log_exc():
129
129
  if kubeconfig[0] == '\t':
130
130
  kubeconfig = kubeconfig[1:]
131
131
  kubeconfig_string = base64.b64decode(kubeconfig.encode('ascii') + b'==').decode('utf-8')
@@ -136,8 +136,5 @@ class Idp:
136
136
  Secrets.list_secrets(os.getenv('NAMESPACE'))
137
137
 
138
138
  return IdpLogin(None, None, None, username)
139
- except:
140
- Config().debug(traceback.format_exc())
141
- pass
142
139
 
143
140
  return None
adam/utils.py CHANGED
@@ -1,3 +1,4 @@
1
+ from concurrent.futures import Future, ThreadPoolExecutor
1
2
  from contextlib import redirect_stdout
2
3
  import copy
3
4
  import csv
@@ -9,6 +10,9 @@ import os
9
10
  from pathlib import Path
10
11
  import random
11
12
  import string
13
+ import threading
14
+ import traceback
15
+ from typing import Callable, Iterator, TypeVar, Union
12
16
  from dateutil import parser
13
17
  import subprocess
14
18
  import sys
@@ -18,6 +22,13 @@ import yaml
18
22
 
19
23
  from . import __version__
20
24
 
25
+ log_state = threading.local()
26
+
27
+ class LogConfig:
28
+ is_debug = lambda: False
29
+ is_debug_timing = lambda: False
30
+ is_display_help = True
31
+
21
32
  def to_tabular(lines: str, header: str = None, dashed_line = False):
22
33
  return lines_to_tabular(lines.split('\n'), header, dashed_line)
23
34
 
@@ -69,18 +80,28 @@ def epoch(timestamp_string: str):
69
80
  return parser.parse(timestamp_string).timestamp()
70
81
 
71
82
  def log(s = None):
83
+ if not loggable():
84
+ return False
85
+
72
86
  # want to print empty line for False or empty collection
73
87
  if s == None:
74
88
  print()
75
89
  else:
76
90
  click.echo(s)
77
91
 
92
+ return True
93
+
78
94
  def log2(s = None, nl = True):
95
+ if not loggable():
96
+ return False
97
+
79
98
  if s:
80
99
  click.echo(s, err=True, nl=nl)
81
100
  else:
82
101
  print(file=sys.stderr)
83
102
 
103
+ return True
104
+
84
105
  def elapsed_time(start_time: float):
85
106
  end_time = time.time()
86
107
  elapsed_time = end_time - start_time
@@ -95,8 +116,8 @@ def duration(start_time: float, end_time: float = None):
95
116
  end_time = time.time()
96
117
  d = convert_seconds(end_time - start_time)
97
118
  t = []
98
- if d[0]:
99
- t.append(f'{d[0]}h')
119
+ if d:
120
+ t.append(f'{d}h')
100
121
  if t or d[1]:
101
122
  t.append(f'{d[1]}m')
102
123
  t.append(f'{d[2]}s')
@@ -159,6 +180,9 @@ def get_deep_keys(d, current_path=""):
159
180
  return keys
160
181
 
161
182
  def display_help(replace_arg = False):
183
+ if not LogConfig.is_display_help:
184
+ return
185
+
162
186
  args = copy.copy(sys.argv)
163
187
  if replace_arg:
164
188
  args[len(args) - 1] = '--help'
@@ -203,12 +227,13 @@ def json_to_csv(json_data: list[dict[any, any]], delimiter: str = ','):
203
227
  with redirect_stdout(body) as f:
204
228
  dict_writer = csv.DictWriter(f, keys, delimiter=delimiter)
205
229
  dict_writer.writerows(flattened_data)
230
+
206
231
  return header.getvalue().strip('\r\n'), [l.strip('\r') for l in body.getvalue().split('\n')]
207
232
  else:
208
233
  return None
209
234
 
210
235
  def log_to_file(config: dict[any, any]):
211
- try:
236
+ with log_exc():
212
237
  base = f"/kaqing/logs"
213
238
  os.makedirs(base, exist_ok=True)
214
239
 
@@ -223,8 +248,6 @@ def log_to_file(config: dict[any, any]):
223
248
  f.write(config)
224
249
  else:
225
250
  f.write(config)
226
- except:
227
- pass
228
251
 
229
252
  def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out = True):
230
253
  dir = f'{Path.home()}/.kaqing'
@@ -240,4 +263,410 @@ def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out
240
263
  return path
241
264
 
242
265
  def idp_token_from_env():
243
- return os.getenv('IDP_TOKEN')
266
+ return os.getenv('IDP_TOKEN')
267
+
268
+ def is_lambda(func):
269
+ return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
270
+
271
+ def debug(s = None):
272
+ if LogConfig.is_debug():
273
+ log2(f'DEBUG {s}')
274
+
275
+ def debug_trace():
276
+ if LogConfig.is_debug():
277
+ log2(traceback.format_exc())
278
+
279
+ def in_docker() -> bool:
280
+ if os.path.exists('/.dockerenv'):
281
+ return True
282
+
283
+ try:
284
+ with open('/proc/1/cgroup', 'rt') as f:
285
+ for line in f:
286
+ if 'docker' in line or 'lxc' in line:
287
+ return True
288
+ except FileNotFoundError:
289
+ pass
290
+
291
+ return False
292
+
293
+ class Ing:
294
+ def __init__(self, msg: str, suppress_log=False):
295
+ self.msg = msg
296
+ self.suppress_log = suppress_log
297
+
298
+ def __enter__(self):
299
+ if not hasattr(log_state, 'ing_cnt'):
300
+ log_state.ing_cnt = 0
301
+
302
+ try:
303
+ if not log_state.ing_cnt:
304
+ if not self.suppress_log and not LogConfig.is_debug():
305
+ log2(f'{self.msg}...', nl=False)
306
+
307
+ return None
308
+ finally:
309
+ log_state.ing_cnt += 1
310
+
311
+ def __exit__(self, exc_type, exc_val, exc_tb):
312
+ log_state.ing_cnt -= 1
313
+ if not log_state.ing_cnt:
314
+ if not self.suppress_log and not LogConfig.is_debug():
315
+ log2(' OK')
316
+
317
+ return False
318
+
319
+ def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
320
+ if not body:
321
+ return Ing(msg, suppress_log=suppress_log)
322
+
323
+ r = None
324
+
325
+ t = Ing(msg, suppress_log=suppress_log)
326
+ t.__enter__()
327
+ try:
328
+ r = body()
329
+ finally:
330
+ t.__exit__(None, None, None)
331
+
332
+ return r
333
+
334
+ def loggable():
335
+ return LogConfig.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
336
+
337
+ class TimingNode:
338
+ def __init__(self, depth: int, s0: time.time = time.time(), line: str = None):
339
+ self.depth = depth
340
+ self.s0 = s0
341
+ self.line = line
342
+ self.children = []
343
+
344
+ def __str__(self):
345
+ return f'[{self.depth}: {self.line}, children={len(self.children)}]'
346
+
347
+ def tree(self):
348
+ lines = []
349
+ if self.line:
350
+ lines.append(self.line)
351
+
352
+ for child in self.children:
353
+ if child.line:
354
+ lines.append(child.tree())
355
+ return '\n'.join(lines)
356
+
357
+ class LogTiming:
358
+ def __init__(self, msg: str, s0: time.time = None):
359
+ self.msg = msg
360
+ self.s0 = s0
361
+
362
+ def __enter__(self):
363
+ if not LogConfig.is_debug_timing():
364
+ return
365
+
366
+ if not hasattr(log_state, 'timings'):
367
+ log_state.timings = TimingNode(0)
368
+
369
+ self.me = log_state.timings
370
+ log_state.timings = TimingNode(self.me.depth+1)
371
+ if not self.s0:
372
+ self.s0 = time.time()
373
+
374
+ def __exit__(self, exc_type, exc_val, exc_tb):
375
+ if not LogConfig.is_debug_timing():
376
+ return False
377
+
378
+ child = log_state.timings
379
+ log_state.timings.line = timing_log_line(self.me.depth, self.msg, self.s0)
380
+
381
+ if child and child.line:
382
+ self.me.children.append(child)
383
+ log_state.timings = self.me
384
+
385
+ if not self.me.depth:
386
+ log2(self.me.tree())
387
+ log_state.timings = TimingNode(0)
388
+
389
+ return False
390
+
391
+ def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
392
+ if not s0 and not body:
393
+ return LogTiming(msg, s0=s0)
394
+
395
+ if not LogConfig.is_debug_timing():
396
+ if body:
397
+ return body()
398
+
399
+ return
400
+
401
+ r = None
402
+
403
+ t = LogTiming(msg, s0=s0)
404
+ t.__enter__()
405
+ try:
406
+ if body:
407
+ r = body()
408
+ finally:
409
+ t.__exit__(None, None, None)
410
+
411
+ return r
412
+
413
+ def timing_log_line(depth: int, msg: str, s0: time.time):
414
+ elapsed = time.time() - s0
415
+ prefix = '[timings] '
416
+ if depth:
417
+ if elapsed > 0.01:
418
+ prefix = (' ' * (depth-1)) + '* '
419
+ else:
420
+ prefix = ' ' * depth
421
+
422
+ return f'{prefix}{msg}: {elapsed:.2f} sec'
423
+
424
+ class WaitLog:
425
+ wait_log_flag = False
426
+
427
+ def wait_log(msg: str):
428
+ if not WaitLog.wait_log_flag:
429
+ log2(msg)
430
+ WaitLog.wait_log_flag = True
431
+
432
+ def clear_wait_log_flag():
433
+ WaitLog.wait_log_flag = False
434
+
435
+ class LogTrace:
436
+ def __init__(self, err_msg: Union[str, callable, bool] = None):
437
+ self.err_msg = err_msg
438
+
439
+ def __enter__(self):
440
+ return None
441
+
442
+ def __exit__(self, exc_type, exc_val, exc_tb):
443
+ if exc_type is not None:
444
+ if self.err_msg is True:
445
+ log2(str(exc_val))
446
+ elif callable(self.err_msg):
447
+ log2(self.err_msg(exc_val))
448
+ elif self.err_msg is not False and self.err_msg:
449
+ log2(self.err_msg)
450
+
451
+ if self.err_msg is not False and LogConfig.is_debug():
452
+ traceback.print_exception(exc_type, exc_val, exc_tb, file=sys.stderr)
453
+
454
+ # swallow exception
455
+ return True
456
+
457
+ def log_exc(err_msg: Union[str, callable, bool] = None):
458
+ return LogTrace(err_msg=err_msg)
459
+
460
+ T = TypeVar('T')
461
+
462
+ class ParallelService:
463
+ def __init__(self, handler: 'ParallelMapHandler'):
464
+ self.handler = handler
465
+
466
+ def map(self, fn: Callable[..., T]) -> Iterator[T]:
467
+ executor = self.handler.executor
468
+ collection = self.handler.collection
469
+ collect = self.handler.collect
470
+ samples_cnt = self.handler.samples
471
+
472
+ iterator = None
473
+ if executor:
474
+ iterator = executor.map(fn, collection)
475
+ elif samples_cnt < sys.maxsize:
476
+ samples = []
477
+
478
+ for elem in collection:
479
+ if not samples_cnt:
480
+ break
481
+
482
+ samples.append(fn(elem))
483
+ samples_cnt -= 1
484
+
485
+ iterator = iter(samples)
486
+ else:
487
+ iterator = map(fn, collection)
488
+
489
+ if collect:
490
+ return list(iterator)
491
+ else:
492
+ return iterator
493
+
494
+ class ParallelMapHandler:
495
+ def __init__(self, collection: list, max_workers: int, samples: int = sys.maxsize, msg: str = None, collect = True):
496
+ self.collection = collection
497
+ self.max_workers = max_workers
498
+ self.executor = None
499
+ self.samples = samples
500
+ self.msg = msg
501
+ if msg and msg.startswith('d`'):
502
+ if LogConfig.is_debug():
503
+ self.msg = msg.replace('d`', '', 1)
504
+ else:
505
+ self.msg = None
506
+ self.collect = collect
507
+
508
+ self.begin = []
509
+ self.end = []
510
+ self.start_time = None
511
+
512
+ def __enter__(self):
513
+ self.calc_msgs()
514
+
515
+ if self.max_workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
516
+ self.start_time = time.time()
517
+
518
+ self.executor = ThreadPoolExecutor(max_workers=self.max_workers)
519
+ self.executor.__enter__()
520
+
521
+ return ParallelService(self)
522
+
523
+ def __exit__(self, exc_type, exc_val, exc_tb):
524
+ if self.executor:
525
+ self.executor.__exit__(exc_type, exc_val, exc_tb)
526
+
527
+ if self.end:
528
+ log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
529
+
530
+ return False
531
+
532
+ def size(self):
533
+ if not self.collection:
534
+ return 0
535
+
536
+ return len(self.collection)
537
+
538
+ def calc_msgs(self):
539
+ if not self.msg:
540
+ return
541
+
542
+ size = self.size()
543
+ # return
544
+
545
+ offloaded = False
546
+ serially = False
547
+ sampling = False
548
+ if size == 0:
549
+ offloaded = True
550
+ self.msg = self.msg.replace('{size}', '1')
551
+ elif self.max_workers > 1 and size > 1 and self.samples == sys.maxsize:
552
+ self.msg = self.msg.replace('{size}', f'{size}')
553
+ elif self.samples < sys.maxsize:
554
+ sampling = True
555
+ if self.samples > size:
556
+ self.samples = size
557
+ self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
558
+ else:
559
+ serially = True
560
+ self.msg = self.msg.replace('{size}', f'{size}')
561
+ # return
562
+
563
+ for token in self.msg.split(' '):
564
+ if '|' in token:
565
+ self.begin.append(token.split('|')[0])
566
+ if not sampling and not serially and not offloaded:
567
+ self.end.append(token.split('|')[1])
568
+ else:
569
+ self.begin.append(token)
570
+ if not sampling and not serially and not offloaded:
571
+ self.end.append(token)
572
+
573
+ if offloaded:
574
+ log2(f'{" ".join(self.begin)} offloaded...')
575
+ elif sampling or serially:
576
+ log2(f'{" ".join(self.begin)} serially...')
577
+ else:
578
+ log2(f'{" ".join(self.begin)} with {self.max_workers} workers...')
579
+
580
+ class OffloadService:
581
+ def __init__(self, handler: 'OffloadHandler'):
582
+ self.handler = handler
583
+
584
+ def submit(self, fn: Callable[..., T], /, *args, **kwargs) -> Future[T]:
585
+ executor = self.handler.executor
586
+
587
+ if executor:
588
+ return executor.submit(fn, *args, **kwargs)
589
+ else:
590
+ future = Future()
591
+
592
+ future.set_result(fn(*args, **kwargs))
593
+
594
+ return future
595
+
596
+ class OffloadHandler(ParallelMapHandler):
597
+ def __init__(self, max_workers: int, msg: str = None):
598
+ super().__init__(None, max_workers, msg=msg, collect=False )
599
+
600
+ def __enter__(self):
601
+ self.calc_msgs()
602
+
603
+ if self.max_workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
604
+ self.start_time = time.time()
605
+
606
+ self.executor = ThreadPoolExecutor(max_workers=self.max_workers)
607
+ self.executor.__enter__()
608
+
609
+ return OffloadService(self)
610
+
611
+ def __exit__(self, exc_type, exc_val, exc_tb):
612
+ if self.executor:
613
+ self.executor.__exit__(exc_type, exc_val, exc_tb)
614
+
615
+ if self.end:
616
+ log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
617
+
618
+ return False
619
+
620
+ def size(self):
621
+ if not self.collection:
622
+ return 0
623
+
624
+ return len(self.collection)
625
+
626
+ def calc_msgs(self):
627
+ if not self.msg:
628
+ return
629
+
630
+ size = self.size()
631
+ # return
632
+
633
+ offloaded = False
634
+ serially = False
635
+ sampling = False
636
+ if size == 0:
637
+ offloaded = True
638
+ self.msg = self.msg.replace('{size}', '1')
639
+ elif self.max_workers > 1 and size > 1 and self.samples == sys.maxsize:
640
+ self.msg = self.msg.replace('{size}', f'{size}')
641
+ elif self.samples < sys.maxsize:
642
+ sampling = True
643
+ if self.samples > size:
644
+ self.samples = size
645
+ self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
646
+ else:
647
+ serially = True
648
+ self.msg = self.msg.replace('{size}', f'{size}')
649
+ # return
650
+
651
+ for token in self.msg.split(' '):
652
+ if '|' in token:
653
+ self.begin.append(token.split('|')[0])
654
+ if not sampling and not serially and not offloaded:
655
+ self.end.append(token.split('|')[1])
656
+ else:
657
+ self.begin.append(token)
658
+ if not sampling and not serially and not offloaded:
659
+ self.end.append(token)
660
+
661
+ if offloaded:
662
+ log2(f'{" ".join(self.begin)} offloaded...')
663
+ elif sampling or serially:
664
+ log2(f'{" ".join(self.begin)} serially...')
665
+ else:
666
+ log2(f'{" ".join(self.begin)} with {self.max_workers} workers...')
667
+
668
+ def parallelize(collection: list, max_workers: int = 0, samples = sys.maxsize, msg: str = None, collect = True):
669
+ return ParallelMapHandler(collection, max_workers, samples = samples, msg = msg, collect = collect)
670
+
671
+ def offload(max_workers: int = 3, msg: str = None):
672
+ return OffloadHandler(max_workers, msg = msg)