kaqing 2.0.14__py3-none-any.whl → 2.0.189__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kaqing might be problematic. Click here for more details.

Files changed (228) hide show
  1. adam/__init__.py +0 -2
  2. adam/app_session.py +9 -12
  3. adam/apps.py +20 -6
  4. adam/batch.py +16 -6
  5. adam/checks/check_utils.py +19 -49
  6. adam/checks/compactionstats.py +1 -1
  7. adam/checks/cpu.py +9 -3
  8. adam/checks/cpu_metrics.py +52 -0
  9. adam/checks/disk.py +3 -4
  10. adam/checks/gossip.py +1 -1
  11. adam/checks/memory.py +3 -3
  12. adam/checks/status.py +1 -1
  13. adam/columns/columns.py +3 -1
  14. adam/columns/cpu.py +3 -1
  15. adam/columns/cpu_metrics.py +22 -0
  16. adam/columns/memory.py +3 -4
  17. adam/commands/__init__.py +24 -0
  18. adam/commands/alter_tables.py +66 -0
  19. adam/commands/app/app.py +38 -0
  20. adam/commands/{app_ping.py → app/app_ping.py} +8 -14
  21. adam/commands/app/show_app_actions.py +49 -0
  22. adam/commands/{show → app}/show_app_id.py +9 -12
  23. adam/commands/{show → app}/show_app_queues.py +8 -14
  24. adam/commands/app/utils_app.py +106 -0
  25. adam/commands/audit/__init__.py +0 -0
  26. adam/commands/audit/audit.py +67 -0
  27. adam/commands/audit/audit_repair_tables.py +72 -0
  28. adam/commands/audit/audit_run.py +50 -0
  29. adam/commands/audit/completions_l.py +15 -0
  30. adam/commands/audit/show_last10.py +36 -0
  31. adam/commands/audit/show_slow10.py +36 -0
  32. adam/commands/audit/show_top10.py +36 -0
  33. adam/commands/audit/utils_show_top10.py +71 -0
  34. adam/commands/bash/__init__.py +5 -0
  35. adam/commands/bash/bash.py +36 -0
  36. adam/commands/bash/bash_completer.py +93 -0
  37. adam/commands/bash/utils_bash.py +16 -0
  38. adam/commands/cat.py +36 -0
  39. adam/commands/cd.py +14 -88
  40. adam/commands/check.py +18 -21
  41. adam/commands/cli_commands.py +11 -7
  42. adam/commands/clipboard_copy.py +87 -0
  43. adam/commands/code.py +57 -0
  44. adam/commands/command.py +220 -19
  45. adam/commands/commands_utils.py +28 -31
  46. adam/commands/cql/__init__.py +0 -0
  47. adam/commands/cql/completions_c.py +28 -0
  48. adam/commands/{cqlsh.py → cql/cqlsh.py} +13 -32
  49. adam/commands/cql/utils_cql.py +305 -0
  50. adam/commands/deploy/code_start.py +7 -10
  51. adam/commands/deploy/code_stop.py +4 -21
  52. adam/commands/deploy/code_utils.py +5 -5
  53. adam/commands/deploy/deploy.py +4 -40
  54. adam/commands/deploy/deploy_frontend.py +15 -18
  55. adam/commands/deploy/deploy_pg_agent.py +4 -7
  56. adam/commands/deploy/deploy_pod.py +74 -77
  57. adam/commands/deploy/deploy_utils.py +16 -26
  58. adam/commands/deploy/undeploy.py +4 -40
  59. adam/commands/deploy/undeploy_frontend.py +5 -8
  60. adam/commands/deploy/undeploy_pg_agent.py +7 -8
  61. adam/commands/deploy/undeploy_pod.py +16 -17
  62. adam/commands/devices/__init__.py +0 -0
  63. adam/commands/devices/device.py +149 -0
  64. adam/commands/devices/device_app.py +163 -0
  65. adam/commands/devices/device_auit_log.py +49 -0
  66. adam/commands/devices/device_cass.py +179 -0
  67. adam/commands/devices/device_export.py +87 -0
  68. adam/commands/devices/device_postgres.py +160 -0
  69. adam/commands/devices/devices.py +25 -0
  70. adam/commands/download_file.py +47 -0
  71. adam/commands/exit.py +1 -4
  72. adam/commands/export/__init__.py +0 -0
  73. adam/commands/export/clean_up_all_export_sessions.py +37 -0
  74. adam/commands/export/clean_up_export_sessions.py +39 -0
  75. adam/commands/export/completions_x.py +11 -0
  76. adam/commands/export/download_export_session.py +40 -0
  77. adam/commands/export/drop_export_database.py +39 -0
  78. adam/commands/export/drop_export_databases.py +37 -0
  79. adam/commands/export/export.py +37 -0
  80. adam/commands/export/export_databases.py +246 -0
  81. adam/commands/export/export_select.py +34 -0
  82. adam/commands/export/export_sessions.py +209 -0
  83. adam/commands/export/export_use.py +49 -0
  84. adam/commands/export/export_x_select.py +48 -0
  85. adam/commands/export/exporter.py +332 -0
  86. adam/commands/export/import_files.py +44 -0
  87. adam/commands/export/import_session.py +44 -0
  88. adam/commands/export/importer.py +81 -0
  89. adam/commands/export/importer_athena.py +148 -0
  90. adam/commands/export/importer_sqlite.py +67 -0
  91. adam/commands/export/show_column_counts.py +45 -0
  92. adam/commands/export/show_export_databases.py +39 -0
  93. adam/commands/export/show_export_session.py +39 -0
  94. adam/commands/export/show_export_sessions.py +37 -0
  95. adam/commands/export/utils_export.py +344 -0
  96. adam/commands/find_files.py +51 -0
  97. adam/commands/find_processes.py +76 -0
  98. adam/commands/head.py +36 -0
  99. adam/commands/help.py +14 -9
  100. adam/commands/intermediate_command.py +52 -0
  101. adam/commands/issues.py +14 -40
  102. adam/commands/kubectl.py +38 -0
  103. adam/commands/login.py +26 -25
  104. adam/commands/logs.py +5 -7
  105. adam/commands/ls.py +11 -115
  106. adam/commands/medusa/medusa.py +4 -46
  107. adam/commands/medusa/medusa_backup.py +22 -29
  108. adam/commands/medusa/medusa_restore.py +51 -49
  109. adam/commands/medusa/medusa_show_backupjobs.py +20 -21
  110. adam/commands/medusa/medusa_show_restorejobs.py +16 -21
  111. adam/commands/medusa/utils_medusa.py +15 -0
  112. adam/commands/nodetool.py +8 -17
  113. adam/commands/param_get.py +11 -14
  114. adam/commands/param_set.py +9 -13
  115. adam/commands/postgres/completions_p.py +22 -0
  116. adam/commands/postgres/postgres.py +49 -73
  117. adam/commands/postgres/postgres_databases.py +270 -0
  118. adam/commands/postgres/postgres_ls.py +4 -8
  119. adam/commands/postgres/postgres_preview.py +5 -9
  120. adam/commands/postgres/utils_postgres.py +79 -0
  121. adam/commands/preview_table.py +10 -69
  122. adam/commands/pwd.py +14 -43
  123. adam/commands/reaper/reaper.py +6 -49
  124. adam/commands/reaper/reaper_forward.py +49 -56
  125. adam/commands/reaper/reaper_forward_session.py +6 -0
  126. adam/commands/reaper/reaper_forward_stop.py +10 -16
  127. adam/commands/reaper/reaper_restart.py +8 -15
  128. adam/commands/reaper/reaper_run_abort.py +8 -33
  129. adam/commands/reaper/reaper_runs.py +43 -58
  130. adam/commands/reaper/reaper_runs_abort.py +29 -49
  131. adam/commands/reaper/reaper_schedule_activate.py +14 -33
  132. adam/commands/reaper/reaper_schedule_start.py +9 -33
  133. adam/commands/reaper/reaper_schedule_stop.py +9 -33
  134. adam/commands/reaper/reaper_schedules.py +4 -14
  135. adam/commands/reaper/reaper_status.py +8 -16
  136. adam/commands/reaper/utils_reaper.py +203 -0
  137. adam/commands/repair/repair.py +4 -46
  138. adam/commands/repair/repair_log.py +6 -12
  139. adam/commands/repair/repair_run.py +29 -36
  140. adam/commands/repair/repair_scan.py +33 -41
  141. adam/commands/repair/repair_stop.py +6 -13
  142. adam/commands/report.py +25 -21
  143. adam/commands/restart.py +27 -28
  144. adam/commands/rollout.py +20 -25
  145. adam/commands/shell.py +12 -4
  146. adam/commands/show/show.py +15 -46
  147. adam/commands/show/show_adam.py +3 -3
  148. adam/commands/show/show_cassandra_repairs.py +37 -0
  149. adam/commands/show/show_cassandra_status.py +48 -52
  150. adam/commands/show/show_cassandra_version.py +5 -18
  151. adam/commands/show/show_cli_commands.py +56 -0
  152. adam/commands/show/show_host.py +33 -0
  153. adam/commands/show/show_login.py +23 -27
  154. adam/commands/show/show_params.py +2 -5
  155. adam/commands/show/show_processes.py +18 -21
  156. adam/commands/show/show_storage.py +11 -20
  157. adam/commands/watch.py +27 -30
  158. adam/config.py +8 -6
  159. adam/embedded_params.py +1 -1
  160. adam/log.py +4 -4
  161. adam/pod_exec_result.py +13 -5
  162. adam/repl.py +136 -120
  163. adam/repl_commands.py +66 -24
  164. adam/repl_session.py +8 -1
  165. adam/repl_state.py +343 -73
  166. adam/sql/__init__.py +0 -0
  167. adam/sql/lark_completer.py +284 -0
  168. adam/sql/lark_parser.py +604 -0
  169. adam/sql/sql_completer.py +118 -0
  170. adam/sql/sql_state_machine.py +630 -0
  171. adam/sql/term_completer.py +76 -0
  172. adam/sso/authn_ad.py +7 -9
  173. adam/sso/authn_okta.py +4 -6
  174. adam/sso/cred_cache.py +4 -6
  175. adam/sso/idp.py +10 -13
  176. adam/utils.py +539 -11
  177. adam/utils_athena.py +145 -0
  178. adam/utils_audits.py +102 -0
  179. adam/utils_issues.py +32 -0
  180. adam/utils_k8s/__init__.py +0 -0
  181. adam/utils_k8s/app_clusters.py +28 -0
  182. adam/utils_k8s/app_pods.py +36 -0
  183. adam/utils_k8s/cassandra_clusters.py +44 -0
  184. adam/{k8s_utils → utils_k8s}/cassandra_nodes.py +12 -5
  185. adam/{k8s_utils → utils_k8s}/custom_resources.py +16 -17
  186. adam/{k8s_utils → utils_k8s}/deployment.py +2 -2
  187. adam/{k8s_utils → utils_k8s}/ingresses.py +2 -2
  188. adam/{k8s_utils → utils_k8s}/jobs.py +7 -11
  189. adam/utils_k8s/k8s.py +96 -0
  190. adam/{k8s_utils → utils_k8s}/kube_context.py +3 -3
  191. adam/{k8s_utils → utils_k8s}/pods.py +132 -83
  192. adam/{k8s_utils → utils_k8s}/secrets.py +7 -3
  193. adam/{k8s_utils → utils_k8s}/service_accounts.py +5 -4
  194. adam/{k8s_utils → utils_k8s}/services.py +2 -2
  195. adam/{k8s_utils → utils_k8s}/statefulsets.py +9 -16
  196. adam/utils_local.py +4 -0
  197. adam/utils_net.py +24 -0
  198. adam/utils_repl/__init__.py +0 -0
  199. adam/utils_repl/appendable_completer.py +6 -0
  200. adam/utils_repl/automata_completer.py +48 -0
  201. adam/utils_repl/repl_completer.py +172 -0
  202. adam/utils_repl/state_machine.py +173 -0
  203. adam/utils_sqlite.py +137 -0
  204. adam/version.py +1 -1
  205. {kaqing-2.0.14.dist-info → kaqing-2.0.189.dist-info}/METADATA +1 -1
  206. kaqing-2.0.189.dist-info/RECORD +253 -0
  207. kaqing-2.0.189.dist-info/top_level.txt +2 -0
  208. teddy/__init__.py +0 -0
  209. teddy/lark_parser.py +436 -0
  210. teddy/lark_parser2.py +618 -0
  211. adam/commands/app.py +0 -67
  212. adam/commands/bash.py +0 -87
  213. adam/commands/cp.py +0 -95
  214. adam/commands/cql_utils.py +0 -53
  215. adam/commands/devices.py +0 -89
  216. adam/commands/postgres/postgres_session.py +0 -247
  217. adam/commands/reaper/reaper_session.py +0 -159
  218. adam/commands/show/show_app_actions.py +0 -53
  219. adam/commands/show/show_commands.py +0 -61
  220. adam/commands/show/show_repairs.py +0 -47
  221. adam/k8s_utils/cassandra_clusters.py +0 -48
  222. kaqing-2.0.14.dist-info/RECORD +0 -167
  223. kaqing-2.0.14.dist-info/top_level.txt +0 -1
  224. /adam/{k8s_utils → commands/app}/__init__.py +0 -0
  225. /adam/{k8s_utils → utils_k8s}/config_maps.py +0 -0
  226. /adam/{k8s_utils → utils_k8s}/volumes.py +0 -0
  227. {kaqing-2.0.14.dist-info → kaqing-2.0.189.dist-info}/WHEEL +0 -0
  228. {kaqing-2.0.14.dist-info → kaqing-2.0.189.dist-info}/entry_points.txt +0 -0
adam/utils.py CHANGED
@@ -1,3 +1,4 @@
1
+ from concurrent.futures import Future, ThreadPoolExecutor
1
2
  from contextlib import redirect_stdout
2
3
  import copy
3
4
  import csv
@@ -9,7 +10,9 @@ import os
9
10
  from pathlib import Path
10
11
  import random
11
12
  import string
12
- from typing import cast
13
+ import threading
14
+ import traceback
15
+ from typing import Callable, Iterator, TypeVar, Union
13
16
  from dateutil import parser
14
17
  import subprocess
15
18
  import sys
@@ -17,12 +20,33 @@ import time
17
20
  import click
18
21
  import yaml
19
22
 
23
+ from prompt_toolkit.completion import Completer
24
+
20
25
  from . import __version__
21
26
 
22
- def to_tabular(lines: str, header: str = None, dashed_line = False):
23
- return lines_to_tabular(lines.split('\n'), header, dashed_line)
27
+ T = TypeVar('T')
28
+
29
+ log_state = threading.local()
30
+
31
+ class LogConfig:
32
+ is_debug = lambda: False
33
+ is_debug_timing = lambda: False
34
+ is_debug_complete = lambda: False
35
+ is_display_help = True
36
+
37
+ NO_SORT = 0
38
+ SORT = 1
39
+ REVERSE_SORT = -1
40
+
41
+ def tabulize(lines: list[T], fn: Callable[..., T] = None, header: str = None, dashed_line = False, separator = ' ', to: int = 1, sorted: int = NO_SORT):
42
+ if fn:
43
+ lines = list(map(fn, lines))
44
+
45
+ if sorted == SORT:
46
+ lines.sort()
47
+ elif sorted == REVERSE_SORT:
48
+ lines.sort(reverse=True)
24
49
 
25
- def lines_to_tabular(lines: list[str], header: str = None, dashed_line = False, separator = ' '):
26
50
  maxes = []
27
51
  nls = []
28
52
 
@@ -53,7 +77,14 @@ def lines_to_tabular(lines: list[str], header: str = None, dashed_line = False,
53
77
  for line in lines:
54
78
  format_line(line)
55
79
 
56
- return '\n'.join(nls)
80
+ table = '\n'.join(nls)
81
+
82
+ if to == 1:
83
+ log(table)
84
+ elif to == 2:
85
+ log2(table)
86
+
87
+ return table
57
88
 
58
89
  def convert_seconds(total_seconds_float):
59
90
  total_seconds_int = int(total_seconds_float) # Convert float to integer seconds
@@ -70,18 +101,28 @@ def epoch(timestamp_string: str):
70
101
  return parser.parse(timestamp_string).timestamp()
71
102
 
72
103
  def log(s = None):
104
+ if not loggable():
105
+ return False
106
+
73
107
  # want to print empty line for False or empty collection
74
108
  if s == None:
75
109
  print()
76
110
  else:
77
111
  click.echo(s)
78
112
 
113
+ return True
114
+
79
115
  def log2(s = None, nl = True):
116
+ if not loggable():
117
+ return False
118
+
80
119
  if s:
81
120
  click.echo(s, err=True, nl=nl)
82
121
  else:
83
122
  print(file=sys.stderr)
84
123
 
124
+ return True
125
+
85
126
  def elapsed_time(start_time: float):
86
127
  end_time = time.time()
87
128
  elapsed_time = end_time - start_time
@@ -96,7 +137,7 @@ def duration(start_time: float, end_time: float = None):
96
137
  end_time = time.time()
97
138
  d = convert_seconds(end_time - start_time)
98
139
  t = []
99
- if d[0]:
140
+ if d:
100
141
  t.append(f'{d[0]}h')
101
142
  if t or d[1]:
102
143
  t.append(f'{d[1]}m')
@@ -122,9 +163,25 @@ def deep_merge_dicts(dict1, dict2):
122
163
  merged_dict[key] = deep_merge_dicts(merged_dict[key], value)
123
164
  elif key not in merged_dict or value:
124
165
  # Otherwise, overwrite or add the value from dict2
125
- merged_dict[key] = value
166
+ if key in merged_dict and isinstance(merged_dict[key], Completer):
167
+ print('SEAN completer found, ignoring', key, value)
168
+ else:
169
+ merged_dict[key] = value
126
170
  return merged_dict
127
171
 
172
+ def deep_sort_dict(d):
173
+ """
174
+ Recursively sorts a dictionary by its keys, and any nested lists by their elements.
175
+ """
176
+ if not isinstance(d, (dict, list)):
177
+ return d
178
+
179
+ if isinstance(d, dict):
180
+ return {k: deep_sort_dict(d[k]) for k in sorted(d)}
181
+
182
+ if isinstance(d, list):
183
+ return sorted([deep_sort_dict(item) for item in d])
184
+
128
185
  def get_deep_keys(d, current_path=""):
129
186
  """
130
187
  Recursively collects all combined keys (paths) from a deep dictionary.
@@ -147,6 +204,9 @@ def get_deep_keys(d, current_path=""):
147
204
  return keys
148
205
 
149
206
  def display_help(replace_arg = False):
207
+ if not LogConfig.is_display_help:
208
+ return
209
+
150
210
  args = copy.copy(sys.argv)
151
211
  if replace_arg:
152
212
  args[len(args) - 1] = '--help'
@@ -191,12 +251,13 @@ def json_to_csv(json_data: list[dict[any, any]], delimiter: str = ','):
191
251
  with redirect_stdout(body) as f:
192
252
  dict_writer = csv.DictWriter(f, keys, delimiter=delimiter)
193
253
  dict_writer.writerows(flattened_data)
254
+
194
255
  return header.getvalue().strip('\r\n'), [l.strip('\r') for l in body.getvalue().split('\n')]
195
256
  else:
196
257
  return None
197
258
 
198
259
  def log_to_file(config: dict[any, any]):
199
- try:
260
+ with log_exc():
200
261
  base = f"/kaqing/logs"
201
262
  os.makedirs(base, exist_ok=True)
202
263
 
@@ -211,8 +272,6 @@ def log_to_file(config: dict[any, any]):
211
272
  f.write(config)
212
273
  else:
213
274
  f.write(config)
214
- except:
215
- pass
216
275
 
217
276
  def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out = True):
218
277
  dir = f'{Path.home()}/.kaqing'
@@ -228,4 +287,473 @@ def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out
228
287
  return path
229
288
 
230
289
  def idp_token_from_env():
231
- return os.getenv('IDP_TOKEN')
290
+ return os.getenv('IDP_TOKEN')
291
+
292
+ def is_lambda(func):
293
+ return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
294
+
295
+ def debug(s = None):
296
+ if LogConfig.is_debug():
297
+ log2(f'DEBUG {s}')
298
+
299
+ def debug_complete(s = None):
300
+ if LogConfig.is_debug_complete():
301
+ log2(f'DEBUG {s}')
302
+
303
+ def debug_trace():
304
+ if LogConfig.is_debug():
305
+ log2(traceback.format_exc())
306
+
307
+ def in_docker() -> bool:
308
+ if os.path.exists('/.dockerenv'):
309
+ return True
310
+
311
+ try:
312
+ with open('/proc/1/cgroup', 'rt') as f:
313
+ for line in f:
314
+ if 'docker' in line or 'lxc' in line:
315
+ return True
316
+ except FileNotFoundError:
317
+ pass
318
+
319
+ return False
320
+
321
+ class Ing:
322
+ def __init__(self, msg: str, suppress_log=False):
323
+ self.msg = msg
324
+ self.suppress_log = suppress_log
325
+
326
+ def __enter__(self):
327
+ if not hasattr(log_state, 'ing_cnt'):
328
+ log_state.ing_cnt = 0
329
+
330
+ try:
331
+ if not log_state.ing_cnt:
332
+ if not self.suppress_log and not LogConfig.is_debug():
333
+ log2(f'{self.msg}...', nl=False)
334
+
335
+ return None
336
+ finally:
337
+ log_state.ing_cnt += 1
338
+
339
+ def __exit__(self, exc_type, exc_val, exc_tb):
340
+ log_state.ing_cnt -= 1
341
+ if not log_state.ing_cnt:
342
+ if not self.suppress_log and not LogConfig.is_debug():
343
+ log2(' OK')
344
+
345
+ return False
346
+
347
+ def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
348
+ if not body:
349
+ return Ing(msg, suppress_log=suppress_log)
350
+
351
+ r = None
352
+
353
+ t = Ing(msg, suppress_log=suppress_log)
354
+ t.__enter__()
355
+ try:
356
+ r = body()
357
+ finally:
358
+ t.__exit__(None, None, None)
359
+
360
+ return r
361
+
362
+ def loggable():
363
+ return LogConfig.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
364
+
365
+ class TimingNode:
366
+ def __init__(self, depth: int, s0: time.time = time.time(), line: str = None):
367
+ self.depth = depth
368
+ self.s0 = s0
369
+ self.line = line
370
+ self.children = []
371
+
372
+ def __str__(self):
373
+ return f'[{self.depth}: {self.line}, children={len(self.children)}]'
374
+
375
+ def tree(self):
376
+ lines = []
377
+ if self.line:
378
+ lines.append(self.line)
379
+
380
+ for child in self.children:
381
+ if child.line:
382
+ lines.append(child.tree())
383
+ return '\n'.join(lines)
384
+
385
+ class LogTiming:
386
+ def __init__(self, msg: str, s0: time.time = None):
387
+ self.msg = msg
388
+ self.s0 = s0
389
+
390
+ def __enter__(self):
391
+ if not LogConfig.is_debug_timing():
392
+ return
393
+
394
+ if not hasattr(log_state, 'timings'):
395
+ log_state.timings = TimingNode(0)
396
+
397
+ self.me = log_state.timings
398
+ log_state.timings = TimingNode(self.me.depth+1)
399
+ if not self.s0:
400
+ self.s0 = time.time()
401
+
402
+ def __exit__(self, exc_type, exc_val, exc_tb):
403
+ if not LogConfig.is_debug_timing():
404
+ return False
405
+
406
+ child = log_state.timings
407
+ log_state.timings.line = timing_log_line(self.me.depth, self.msg, self.s0)
408
+
409
+ if child and child.line:
410
+ self.me.children.append(child)
411
+ log_state.timings = self.me
412
+
413
+ if not self.me.depth:
414
+ log2(self.me.tree())
415
+ log_state.timings = TimingNode(0)
416
+
417
+ return False
418
+
419
+ def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
420
+ if not s0 and not body:
421
+ return LogTiming(msg, s0=s0)
422
+
423
+ if not LogConfig.is_debug_timing():
424
+ if body:
425
+ return body()
426
+
427
+ return
428
+
429
+ r = None
430
+
431
+ t = LogTiming(msg, s0=s0)
432
+ t.__enter__()
433
+ try:
434
+ if body:
435
+ r = body()
436
+ finally:
437
+ t.__exit__(None, None, None)
438
+
439
+ return r
440
+
441
+ def timing_log_line(depth: int, msg: str, s0: time.time):
442
+ # print('SEAN log timing', msg, threading.current_thread().name)
443
+ elapsed = time.time() - s0
444
+ offloaded = '-' if threading.current_thread().name.startswith('offload') or threading.current_thread().name.startswith('async') else '+'
445
+ prefix = f'[{offloaded} timings] '
446
+
447
+ if depth:
448
+ if elapsed > 0.01:
449
+ prefix = (' ' * (depth-1)) + '* '
450
+ else:
451
+ prefix = ' ' * depth
452
+
453
+ return f'{prefix}{msg}: {elapsed:.2f} sec'
454
+
455
+ class WaitLog:
456
+ wait_log_flag = False
457
+
458
+ def wait_log(msg: str):
459
+ if not WaitLog.wait_log_flag:
460
+ log2(msg)
461
+ WaitLog.wait_log_flag = True
462
+
463
+ def clear_wait_log_flag():
464
+ WaitLog.wait_log_flag = False
465
+
466
+ def bytes_generator_from_file(file_path, chunk_size=4096):
467
+ with open(file_path, 'rb') as f:
468
+ while True:
469
+ chunk = f.read(chunk_size)
470
+ if not chunk:
471
+ break
472
+ yield chunk
473
+
474
+ class GeneratorStream(io.RawIOBase):
475
+ def __init__(self, generator):
476
+ self._generator = generator
477
+ self._buffer = b'' # Buffer to store leftover bytes from generator yields
478
+
479
+ def readable(self):
480
+ return True
481
+
482
+ def _read_from_generator(self):
483
+ try:
484
+ chunk = next(self._generator)
485
+ if isinstance(chunk, str):
486
+ chunk = chunk.encode('utf-8') # Encode if generator yields strings
487
+ self._buffer += chunk
488
+ except StopIteration:
489
+ pass # Generator exhausted
490
+
491
+ def readinto(self, b):
492
+ # Fill the buffer if necessary
493
+ while len(self._buffer) < len(b):
494
+ old_buffer_len = len(self._buffer)
495
+ self._read_from_generator()
496
+ if len(self._buffer) == old_buffer_len: # Generator exhausted and buffer empty
497
+ break
498
+
499
+ bytes_to_read = min(len(b), len(self._buffer))
500
+ b[:bytes_to_read] = self._buffer[:bytes_to_read]
501
+ self._buffer = self._buffer[bytes_to_read:]
502
+ return bytes_to_read
503
+
504
+ def read(self, size=-1):
505
+ if size == -1: # Read all remaining data
506
+ while True:
507
+ old_buffer_len = len(self._buffer)
508
+ self._read_from_generator()
509
+ if len(self._buffer) == old_buffer_len:
510
+ break
511
+ data = self._buffer
512
+ self._buffer = b''
513
+ return data
514
+ else:
515
+ # Ensure enough data in buffer
516
+ while len(self._buffer) < size:
517
+ old_buffer_len = len(self._buffer)
518
+ self._read_from_generator()
519
+ if len(self._buffer) == old_buffer_len:
520
+ break
521
+
522
+ data = self._buffer[:size]
523
+ self._buffer = self._buffer[size:]
524
+ return data
525
+
526
+ class LogTrace:
527
+ def __init__(self, err_msg: Union[str, callable, bool] = None):
528
+ self.err_msg = err_msg
529
+
530
+ def __enter__(self):
531
+ return None
532
+
533
+ def __exit__(self, exc_type, exc_val, exc_tb):
534
+ if exc_type is not None:
535
+ if self.err_msg is True:
536
+ log2(str(exc_val))
537
+ elif callable(self.err_msg):
538
+ log2(self.err_msg(exc_val))
539
+ elif self.err_msg is not False and self.err_msg:
540
+ log2(self.err_msg)
541
+
542
+ if self.err_msg is not False and LogConfig.is_debug():
543
+ traceback.print_exception(exc_type, exc_val, exc_tb, file=sys.stderr)
544
+
545
+ # swallow exception
546
+ return True
547
+
548
+ def log_exc(err_msg: Union[str, callable, bool] = None):
549
+ return LogTrace(err_msg=err_msg)
550
+
551
+ class ParallelService:
552
+ def __init__(self, handler: 'ParallelMapHandler'):
553
+ self.handler = handler
554
+
555
+ def map(self, fn: Callable[..., T]) -> Iterator[T]:
556
+ executor = self.handler.executor
557
+ collection = self.handler.collection
558
+ collect = self.handler.collect
559
+ samples_cnt = self.handler.samples
560
+
561
+ iterator = None
562
+ if executor:
563
+ iterator = executor.map(fn, collection)
564
+ elif samples_cnt < sys.maxsize:
565
+ samples = []
566
+
567
+ for elem in collection:
568
+ if not samples_cnt:
569
+ break
570
+
571
+ samples.append(fn(elem))
572
+ samples_cnt -= 1
573
+
574
+ iterator = iter(samples)
575
+ else:
576
+ iterator = map(fn, collection)
577
+
578
+ if collect:
579
+ return list(iterator)
580
+ else:
581
+ return iterator
582
+
583
+ class ParallelMapHandler:
584
+ def __init__(self, collection: list, workers: int, samples: int = sys.maxsize, msg: str = None, collect = True):
585
+ self.collection = collection
586
+ self.workers = workers
587
+ self.executor = None
588
+ self.samples = samples
589
+ self.msg = msg
590
+ if msg and msg.startswith('d`'):
591
+ if LogConfig.is_debug():
592
+ self.msg = msg.replace('d`', '', 1)
593
+ else:
594
+ self.msg = None
595
+ self.collect = collect
596
+
597
+ self.begin = []
598
+ self.end = []
599
+ self.start_time = None
600
+
601
+ def __enter__(self):
602
+ self.calc_msgs()
603
+
604
+ if self.workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
605
+ self.start_time = time.time()
606
+
607
+ self.executor = ThreadPoolExecutor(max_workers=self.workers)
608
+ self.executor.__enter__()
609
+
610
+ return ParallelService(self)
611
+
612
+ def __exit__(self, exc_type, exc_val, exc_tb):
613
+ if self.executor:
614
+ self.executor.__exit__(exc_type, exc_val, exc_tb)
615
+
616
+ if self.end:
617
+ log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
618
+
619
+ return False
620
+
621
+ def size(self):
622
+ if not self.collection:
623
+ return 0
624
+
625
+ return len(self.collection)
626
+
627
+ def calc_msgs(self):
628
+ if not self.msg:
629
+ return
630
+
631
+ size = self.size()
632
+ offloaded = False
633
+ serially = False
634
+ sampling = False
635
+ if size == 0:
636
+ offloaded = True
637
+ self.msg = self.msg.replace('{size}', '1')
638
+ elif self.workers > 1 and size > 1 and self.samples == sys.maxsize:
639
+ self.msg = self.msg.replace('{size}', f'{size}')
640
+ elif self.samples < sys.maxsize:
641
+ sampling = True
642
+ if self.samples > size:
643
+ self.samples = size
644
+ self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
645
+ else:
646
+ serially = True
647
+ self.msg = self.msg.replace('{size}', f'{size}')
648
+
649
+ for token in self.msg.split(' '):
650
+ if '|' in token:
651
+ self.begin.append(token.split('|')[0])
652
+ if not sampling and not serially and not offloaded:
653
+ self.end.append(token.split('|')[1])
654
+ else:
655
+ self.begin.append(token)
656
+ if not sampling and not serially and not offloaded:
657
+ self.end.append(token)
658
+
659
+ if offloaded:
660
+ log2(f'{" ".join(self.begin)} offloaded...')
661
+ elif sampling or serially:
662
+ log2(f'{" ".join(self.begin)} serially...')
663
+ else:
664
+ log2(f'{" ".join(self.begin)} with {self.workers} workers...')
665
+
666
+ def parallelize(collection: list, workers: int = 0, samples = sys.maxsize, msg: str = None, collect = True):
667
+ return ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect)
668
+
669
+ class OffloadService:
670
+ def __init__(self, handler: 'OffloadHandler'):
671
+ self.handler = handler
672
+
673
+ def submit(self, fn: Callable[..., T], /, *args, **kwargs) -> Future[T]:
674
+ executor = self.handler.executor
675
+
676
+ if executor:
677
+ return executor.submit(fn, *args, **kwargs)
678
+ else:
679
+ future = Future()
680
+
681
+ future.set_result(fn(*args, **kwargs))
682
+
683
+ return future
684
+
685
+ class OffloadHandler(ParallelMapHandler):
686
+ def __init__(self, max_workers: int, msg: str = None):
687
+ super().__init__(None, max_workers, msg=msg, collect=False )
688
+
689
+ def __enter__(self):
690
+ self.calc_msgs()
691
+
692
+ if self.workers > 1:
693
+ # if self.workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
694
+ self.start_time = time.time()
695
+
696
+ self.executor = ThreadPoolExecutor(max_workers=self.workers, thread_name_prefix='offload')
697
+ self.executor.__enter__()
698
+
699
+ return OffloadService(self)
700
+
701
+ def __exit__(self, exc_type, exc_val, exc_tb):
702
+ if self.executor:
703
+ self.executor.__exit__(exc_type, exc_val, exc_tb)
704
+
705
+ if self.end:
706
+ log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
707
+
708
+ return False
709
+
710
+ # def size(self):
711
+ # if not self.collection:
712
+ # return 0
713
+
714
+ # return len(self.collection)
715
+
716
+ def calc_msgs(self):
717
+ if not self.msg:
718
+ return
719
+
720
+ size = self.size()
721
+ # return
722
+
723
+ offloaded = False
724
+ serially = False
725
+ sampling = False
726
+ if size == 0:
727
+ offloaded = True
728
+ self.msg = self.msg.replace('{size}', '1')
729
+ elif self.workers > 1 and size > 1 and self.samples == sys.maxsize:
730
+ self.msg = self.msg.replace('{size}', f'{size}')
731
+ elif self.samples < sys.maxsize:
732
+ sampling = True
733
+ if self.samples > size:
734
+ self.samples = size
735
+ self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
736
+ else:
737
+ serially = True
738
+ self.msg = self.msg.replace('{size}', f'{size}')
739
+ # return
740
+
741
+ for token in self.msg.split(' '):
742
+ if '|' in token:
743
+ self.begin.append(token.split('|')[0])
744
+ if not sampling and not serially and not offloaded:
745
+ self.end.append(token.split('|')[1])
746
+ else:
747
+ self.begin.append(token)
748
+ if not sampling and not serially and not offloaded:
749
+ self.end.append(token)
750
+
751
+ if offloaded:
752
+ log2(f'{" ".join(self.begin)} offloaded...')
753
+ elif sampling or serially:
754
+ log2(f'{" ".join(self.begin)} serially...')
755
+ else:
756
+ log2(f'{" ".join(self.begin)} with {self.workers} workers...')
757
+
758
+ def offload(max_workers: int = 3, msg: str = None):
759
+ return OffloadHandler(max_workers, msg = msg)