kaqing 2.0.98__py3-none-any.whl → 2.0.203__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (254) hide show
  1. adam/__init__.py +0 -2
  2. adam/app_session.py +9 -12
  3. adam/apps.py +18 -4
  4. adam/batch.py +11 -25
  5. adam/checks/check_utils.py +16 -46
  6. adam/checks/cpu.py +7 -1
  7. adam/checks/cpu_metrics.py +52 -0
  8. adam/checks/disk.py +2 -3
  9. adam/columns/columns.py +3 -1
  10. adam/columns/cpu.py +3 -1
  11. adam/columns/cpu_metrics.py +22 -0
  12. adam/columns/memory.py +3 -4
  13. adam/commands/__init__.py +24 -0
  14. adam/commands/alter_tables.py +37 -63
  15. adam/commands/app/app.py +38 -0
  16. adam/commands/{app_ping.py → app/app_ping.py} +8 -14
  17. adam/commands/app/show_app_actions.py +49 -0
  18. adam/commands/{show → app}/show_app_id.py +8 -11
  19. adam/commands/{show → app}/show_app_queues.py +8 -14
  20. adam/commands/app/utils_app.py +106 -0
  21. adam/commands/audit/audit.py +31 -35
  22. adam/commands/audit/audit_repair_tables.py +26 -48
  23. adam/commands/audit/audit_run.py +50 -0
  24. adam/commands/audit/completions_l.py +15 -0
  25. adam/commands/audit/show_last10.py +36 -0
  26. adam/commands/audit/show_slow10.py +36 -0
  27. adam/commands/audit/show_top10.py +36 -0
  28. adam/commands/audit/utils_show_top10.py +71 -0
  29. adam/commands/bash/__init__.py +5 -0
  30. adam/commands/bash/bash.py +36 -0
  31. adam/commands/bash/bash_completer.py +93 -0
  32. adam/commands/bash/utils_bash.py +16 -0
  33. adam/commands/cassandra/__init__.py +0 -0
  34. adam/commands/cassandra/download_cassandra_log.py +45 -0
  35. adam/commands/cassandra/nodetool.py +64 -0
  36. adam/commands/cassandra/nodetool_commands.py +120 -0
  37. adam/commands/{restart.py → cassandra/restart_cluster.py} +12 -26
  38. adam/commands/cassandra/restart_node.py +51 -0
  39. adam/commands/cassandra/restart_nodes.py +47 -0
  40. adam/commands/cassandra/rollout.py +88 -0
  41. adam/commands/cat.py +36 -0
  42. adam/commands/cd.py +14 -92
  43. adam/commands/check.py +18 -21
  44. adam/commands/cli_commands.py +8 -4
  45. adam/commands/clipboard_copy.py +87 -0
  46. adam/commands/code.py +57 -0
  47. adam/commands/command.py +212 -39
  48. adam/commands/commands_utils.py +20 -28
  49. adam/commands/cql/alter_tables.py +66 -0
  50. adam/commands/cql/completions_c.py +29 -0
  51. adam/commands/cql/cqlsh.py +10 -29
  52. adam/commands/cql/utils_cql.py +305 -0
  53. adam/commands/debug/__init__.py +0 -0
  54. adam/commands/debug/debug.py +22 -0
  55. adam/commands/debug/debug_completes.py +35 -0
  56. adam/commands/debug/debug_timings.py +35 -0
  57. adam/commands/deploy/code_start.py +7 -10
  58. adam/commands/deploy/code_stop.py +4 -21
  59. adam/commands/deploy/code_utils.py +3 -3
  60. adam/commands/deploy/deploy.py +4 -21
  61. adam/commands/deploy/deploy_frontend.py +14 -17
  62. adam/commands/deploy/deploy_pg_agent.py +3 -6
  63. adam/commands/deploy/deploy_pod.py +65 -73
  64. adam/commands/deploy/deploy_utils.py +14 -24
  65. adam/commands/deploy/undeploy.py +4 -21
  66. adam/commands/deploy/undeploy_frontend.py +4 -7
  67. adam/commands/deploy/undeploy_pg_agent.py +6 -8
  68. adam/commands/deploy/undeploy_pod.py +11 -12
  69. adam/commands/devices/__init__.py +0 -0
  70. adam/commands/devices/device.py +149 -0
  71. adam/commands/devices/device_app.py +163 -0
  72. adam/commands/devices/device_auit_log.py +49 -0
  73. adam/commands/devices/device_cass.py +179 -0
  74. adam/commands/devices/device_export.py +87 -0
  75. adam/commands/devices/device_postgres.py +160 -0
  76. adam/commands/devices/devices.py +25 -0
  77. adam/commands/download_cassandra_log.py +45 -0
  78. adam/commands/download_file.py +47 -0
  79. adam/commands/exit.py +1 -4
  80. adam/commands/export/__init__.py +0 -0
  81. adam/commands/export/clean_up_all_export_sessions.py +37 -0
  82. adam/commands/export/clean_up_export_sessions.py +39 -0
  83. adam/commands/export/completions_x.py +11 -0
  84. adam/commands/export/download_export_session.py +40 -0
  85. adam/commands/export/drop_export_database.py +39 -0
  86. adam/commands/export/drop_export_databases.py +37 -0
  87. adam/commands/export/export.py +37 -0
  88. adam/commands/export/export_databases.py +247 -0
  89. adam/commands/export/export_select.py +34 -0
  90. adam/commands/export/export_sessions.py +211 -0
  91. adam/commands/export/export_use.py +49 -0
  92. adam/commands/export/export_x_select.py +48 -0
  93. adam/commands/export/exporter.py +361 -0
  94. adam/commands/export/import_files.py +44 -0
  95. adam/commands/export/import_session.py +44 -0
  96. adam/commands/export/importer.py +82 -0
  97. adam/commands/export/importer_athena.py +150 -0
  98. adam/commands/export/importer_sqlite.py +69 -0
  99. adam/commands/export/show_column_counts.py +45 -0
  100. adam/commands/export/show_export_databases.py +39 -0
  101. adam/commands/export/show_export_session.py +39 -0
  102. adam/commands/export/show_export_sessions.py +37 -0
  103. adam/commands/export/utils_export.py +366 -0
  104. adam/commands/find_files.py +51 -0
  105. adam/commands/find_processes.py +76 -0
  106. adam/commands/generate_report.py +52 -0
  107. adam/commands/head.py +36 -0
  108. adam/commands/help.py +12 -8
  109. adam/commands/intermediate_command.py +52 -0
  110. adam/commands/issues.py +14 -40
  111. adam/commands/kubectl.py +38 -0
  112. adam/commands/login.py +26 -25
  113. adam/commands/ls.py +11 -116
  114. adam/commands/medusa/medusa.py +4 -22
  115. adam/commands/medusa/medusa_backup.py +20 -27
  116. adam/commands/medusa/medusa_restore.py +35 -48
  117. adam/commands/medusa/medusa_show_backupjobs.py +17 -18
  118. adam/commands/medusa/medusa_show_restorejobs.py +13 -18
  119. adam/commands/medusa/utils_medusa.py +15 -0
  120. adam/commands/nodetool.py +8 -19
  121. adam/commands/os/__init__.py +0 -0
  122. adam/commands/os/cat.py +36 -0
  123. adam/commands/os/download_file.py +47 -0
  124. adam/commands/os/find_files.py +51 -0
  125. adam/commands/os/find_processes.py +76 -0
  126. adam/commands/os/head.py +36 -0
  127. adam/commands/os/shell.py +41 -0
  128. adam/commands/param_get.py +11 -14
  129. adam/commands/param_set.py +8 -12
  130. adam/commands/postgres/completions_p.py +22 -0
  131. adam/commands/postgres/postgres.py +47 -55
  132. adam/commands/postgres/postgres_databases.py +269 -0
  133. adam/commands/postgres/postgres_ls.py +4 -8
  134. adam/commands/postgres/postgres_preview.py +5 -9
  135. adam/commands/postgres/utils_postgres.py +79 -0
  136. adam/commands/preview_table.py +10 -61
  137. adam/commands/pwd.py +14 -46
  138. adam/commands/reaper/reaper.py +4 -24
  139. adam/commands/reaper/reaper_forward.py +49 -56
  140. adam/commands/reaper/reaper_forward_session.py +6 -0
  141. adam/commands/reaper/reaper_forward_stop.py +10 -16
  142. adam/commands/reaper/reaper_restart.py +7 -14
  143. adam/commands/reaper/reaper_run_abort.py +8 -33
  144. adam/commands/reaper/reaper_runs.py +43 -58
  145. adam/commands/reaper/reaper_runs_abort.py +29 -49
  146. adam/commands/reaper/reaper_schedule_activate.py +14 -33
  147. adam/commands/reaper/reaper_schedule_start.py +9 -33
  148. adam/commands/reaper/reaper_schedule_stop.py +9 -33
  149. adam/commands/reaper/reaper_schedules.py +4 -14
  150. adam/commands/reaper/reaper_status.py +8 -16
  151. adam/commands/reaper/utils_reaper.py +203 -0
  152. adam/commands/repair/repair.py +4 -22
  153. adam/commands/repair/repair_log.py +5 -11
  154. adam/commands/repair/repair_run.py +27 -34
  155. adam/commands/repair/repair_scan.py +32 -40
  156. adam/commands/repair/repair_stop.py +5 -12
  157. adam/commands/restart_cluster.py +47 -0
  158. adam/commands/restart_node.py +51 -0
  159. adam/commands/restart_nodes.py +47 -0
  160. adam/commands/rollout.py +19 -24
  161. adam/commands/shell.py +12 -4
  162. adam/commands/show/show.py +10 -23
  163. adam/commands/show/show_adam.py +3 -3
  164. adam/commands/show/show_cassandra_repairs.py +37 -0
  165. adam/commands/show/show_cassandra_status.py +47 -51
  166. adam/commands/show/show_cassandra_version.py +5 -18
  167. adam/commands/show/show_cli_commands.py +56 -0
  168. adam/commands/show/show_host.py +1 -1
  169. adam/commands/show/show_login.py +23 -27
  170. adam/commands/show/show_params.py +2 -5
  171. adam/commands/show/show_processes.py +18 -21
  172. adam/commands/show/show_storage.py +11 -20
  173. adam/commands/watch.py +26 -29
  174. adam/config.py +5 -15
  175. adam/embedded_params.py +1 -1
  176. adam/log.py +4 -4
  177. adam/repl.py +105 -133
  178. adam/repl_commands.py +68 -28
  179. adam/repl_session.py +9 -1
  180. adam/repl_state.py +300 -62
  181. adam/sql/async_executor.py +44 -0
  182. adam/sql/lark_completer.py +286 -0
  183. adam/sql/lark_parser.py +604 -0
  184. adam/sql/qingl.lark +1076 -0
  185. adam/sql/sql_completer.py +104 -64
  186. adam/sql/sql_state_machine.py +630 -0
  187. adam/sql/term_completer.py +3 -0
  188. adam/sso/authn_ad.py +6 -8
  189. adam/sso/authn_okta.py +4 -6
  190. adam/sso/cred_cache.py +3 -5
  191. adam/sso/idp.py +9 -12
  192. adam/utils.py +640 -10
  193. adam/utils_athena.py +140 -87
  194. adam/utils_audits.py +102 -0
  195. adam/utils_issues.py +32 -0
  196. adam/utils_k8s/app_clusters.py +28 -0
  197. adam/utils_k8s/app_pods.py +35 -0
  198. adam/utils_k8s/cassandra_clusters.py +34 -21
  199. adam/utils_k8s/cassandra_nodes.py +9 -6
  200. adam/utils_k8s/custom_resources.py +16 -17
  201. adam/utils_k8s/ingresses.py +2 -2
  202. adam/utils_k8s/jobs.py +7 -11
  203. adam/utils_k8s/k8s.py +96 -0
  204. adam/utils_k8s/kube_context.py +3 -6
  205. adam/{pod_exec_result.py → utils_k8s/pod_exec_result.py} +11 -5
  206. adam/utils_k8s/pods.py +146 -75
  207. adam/utils_k8s/secrets.py +4 -4
  208. adam/utils_k8s/service_accounts.py +5 -4
  209. adam/utils_k8s/services.py +2 -2
  210. adam/utils_k8s/statefulsets.py +6 -14
  211. adam/utils_local.py +42 -0
  212. adam/utils_net.py +4 -4
  213. adam/utils_repl/__init__.py +0 -0
  214. adam/utils_repl/appendable_completer.py +6 -0
  215. adam/utils_repl/automata_completer.py +48 -0
  216. adam/utils_repl/repl_completer.py +89 -0
  217. adam/utils_repl/state_machine.py +173 -0
  218. adam/utils_sqlite.py +137 -0
  219. adam/version.py +1 -1
  220. {kaqing-2.0.98.dist-info → kaqing-2.0.203.dist-info}/METADATA +1 -1
  221. kaqing-2.0.203.dist-info/RECORD +277 -0
  222. kaqing-2.0.203.dist-info/top_level.txt +2 -0
  223. teddy/__init__.py +0 -0
  224. teddy/lark_parser.py +436 -0
  225. teddy/lark_parser2.py +618 -0
  226. adam/commands/app.py +0 -67
  227. adam/commands/bash.py +0 -92
  228. adam/commands/cp.py +0 -95
  229. adam/commands/cql/cql_completions.py +0 -11
  230. adam/commands/cql/cql_table_completer.py +0 -8
  231. adam/commands/cql/cql_utils.py +0 -115
  232. adam/commands/describe/describe.py +0 -47
  233. adam/commands/describe/describe_keyspace.py +0 -60
  234. adam/commands/describe/describe_keyspaces.py +0 -49
  235. adam/commands/describe/describe_schema.py +0 -49
  236. adam/commands/describe/describe_table.py +0 -60
  237. adam/commands/describe/describe_tables.py +0 -49
  238. adam/commands/devices.py +0 -118
  239. adam/commands/logs.py +0 -39
  240. adam/commands/postgres/postgres_session.py +0 -240
  241. adam/commands/postgres/postgres_utils.py +0 -31
  242. adam/commands/postgres/psql_completions.py +0 -10
  243. adam/commands/postgres/psql_table_completer.py +0 -11
  244. adam/commands/reaper/reaper_session.py +0 -159
  245. adam/commands/report.py +0 -57
  246. adam/commands/show/show_app_actions.py +0 -53
  247. adam/commands/show/show_commands.py +0 -61
  248. adam/commands/show/show_repairs.py +0 -47
  249. adam/sql/state_machine.py +0 -460
  250. kaqing-2.0.98.dist-info/RECORD +0 -191
  251. kaqing-2.0.98.dist-info/top_level.txt +0 -1
  252. /adam/commands/{describe → app}/__init__.py +0 -0
  253. {kaqing-2.0.98.dist-info → kaqing-2.0.203.dist-info}/WHEEL +0 -0
  254. {kaqing-2.0.98.dist-info → kaqing-2.0.203.dist-info}/entry_points.txt +0 -0
adam/utils.py CHANGED
@@ -1,3 +1,5 @@
1
+ from abc import ABC
2
+ from concurrent.futures import Future, ThreadPoolExecutor
1
3
  from contextlib import redirect_stdout
2
4
  import copy
3
5
  import csv
@@ -9,19 +11,59 @@ import os
9
11
  from pathlib import Path
10
12
  import random
11
13
  import string
14
+ import threading
15
+ import traceback
16
+ from typing import Callable, Iterator, TypeVar, Union
12
17
  from dateutil import parser
13
18
  import subprocess
14
19
  import sys
15
20
  import time
16
21
  import click
17
22
  import yaml
23
+ from prompt_toolkit.completion import Completer
18
24
 
19
25
  from . import __version__
20
26
 
21
- def to_tabular(lines: str, header: str = None, dashed_line = False):
22
- return lines_to_tabular(lines.split('\n'), header, dashed_line)
27
+ T = TypeVar('T')
28
+
29
+ log_state = threading.local()
30
+
31
+ class ConfigReadable:
32
+ def is_debug() -> bool:
33
+ pass
34
+
35
+ def get(self, key: str, default: T) -> T:
36
+ pass
37
+
38
+ class ConfigHolder:
39
+ # the singleton pattern
40
+ def __new__(cls, *args, **kwargs):
41
+ if not hasattr(cls, 'instance'): cls.instance = super(ConfigHolder, cls).__new__(cls)
42
+
43
+ return cls.instance
44
+
45
+ def __init__(self):
46
+ if not hasattr(self, 'config'):
47
+ # set by Config
48
+ self.config: 'ConfigReadable' = None
49
+ # only for testing
50
+ self.is_display_help = True
51
+ # set by ReplSession
52
+ self.append_command_history = lambda entry: None
53
+
54
+ NO_SORT = 0
55
+ SORT = 1
56
+ REVERSE_SORT = -1
57
+
58
+ def tabulize(lines: list[T], fn: Callable[..., T] = None, header: str = None, dashed_line = False, separator = ' ', to: int = 1, sorted: int = NO_SORT):
59
+ if fn:
60
+ lines = list(map(fn, lines))
61
+
62
+ if sorted == SORT:
63
+ lines.sort()
64
+ elif sorted == REVERSE_SORT:
65
+ lines.sort(reverse=True)
23
66
 
24
- def lines_to_tabular(lines: list[str], header: str = None, dashed_line = False, separator = ' '):
25
67
  maxes = []
26
68
  nls = []
27
69
 
@@ -52,7 +94,14 @@ def lines_to_tabular(lines: list[str], header: str = None, dashed_line = False,
52
94
  for line in lines:
53
95
  format_line(line)
54
96
 
55
- return '\n'.join(nls)
97
+ table = '\n'.join(nls)
98
+
99
+ if to == 1:
100
+ log(table)
101
+ elif to == 2:
102
+ log2(table)
103
+
104
+ return table
56
105
 
57
106
  def convert_seconds(total_seconds_float):
58
107
  total_seconds_int = int(total_seconds_float) # Convert float to integer seconds
@@ -69,18 +118,28 @@ def epoch(timestamp_string: str):
69
118
  return parser.parse(timestamp_string).timestamp()
70
119
 
71
120
  def log(s = None):
121
+ if not loggable():
122
+ return False
123
+
72
124
  # want to print empty line for False or empty collection
73
125
  if s == None:
74
126
  print()
75
127
  else:
76
128
  click.echo(s)
77
129
 
130
+ return True
131
+
78
132
  def log2(s = None, nl = True):
133
+ if not loggable():
134
+ return False
135
+
79
136
  if s:
80
137
  click.echo(s, err=True, nl=nl)
81
138
  else:
82
139
  print(file=sys.stderr)
83
140
 
141
+ return True
142
+
84
143
  def elapsed_time(start_time: float):
85
144
  end_time = time.time()
86
145
  elapsed_time = end_time - start_time
@@ -95,7 +154,7 @@ def duration(start_time: float, end_time: float = None):
95
154
  end_time = time.time()
96
155
  d = convert_seconds(end_time - start_time)
97
156
  t = []
98
- if d[0]:
157
+ if d:
99
158
  t.append(f'{d[0]}h')
100
159
  if t or d[1]:
101
160
  t.append(f'{d[1]}m')
@@ -121,7 +180,11 @@ def deep_merge_dicts(dict1, dict2):
121
180
  merged_dict[key] = deep_merge_dicts(merged_dict[key], value)
122
181
  elif key not in merged_dict or value:
123
182
  # Otherwise, overwrite or add the value from dict2
124
- merged_dict[key] = value
183
+ if key in merged_dict and isinstance(merged_dict[key], Completer):
184
+ pass
185
+ # print('SEAN completer found, ignoring', key, value)
186
+ else:
187
+ merged_dict[key] = value
125
188
  return merged_dict
126
189
 
127
190
  def deep_sort_dict(d):
@@ -159,6 +222,9 @@ def get_deep_keys(d, current_path=""):
159
222
  return keys
160
223
 
161
224
  def display_help(replace_arg = False):
225
+ if not ConfigHolder().is_display_help:
226
+ return
227
+
162
228
  args = copy.copy(sys.argv)
163
229
  if replace_arg:
164
230
  args[len(args) - 1] = '--help'
@@ -203,12 +269,13 @@ def json_to_csv(json_data: list[dict[any, any]], delimiter: str = ','):
203
269
  with redirect_stdout(body) as f:
204
270
  dict_writer = csv.DictWriter(f, keys, delimiter=delimiter)
205
271
  dict_writer.writerows(flattened_data)
272
+
206
273
  return header.getvalue().strip('\r\n'), [l.strip('\r') for l in body.getvalue().split('\n')]
207
274
  else:
208
275
  return None
209
276
 
210
277
  def log_to_file(config: dict[any, any]):
211
- try:
278
+ with log_exc():
212
279
  base = f"/kaqing/logs"
213
280
  os.makedirs(base, exist_ok=True)
214
281
 
@@ -223,8 +290,6 @@ def log_to_file(config: dict[any, any]):
223
290
  f.write(config)
224
291
  else:
225
292
  f.write(config)
226
- except:
227
- pass
228
293
 
229
294
  def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out = True):
230
295
  dir = f'{Path.home()}/.kaqing'
@@ -240,4 +305,569 @@ def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out
240
305
  return path
241
306
 
242
307
  def idp_token_from_env():
243
- return os.getenv('IDP_TOKEN')
308
+ return os.getenv('IDP_TOKEN')
309
+
310
+ def is_lambda(func):
311
+ return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
312
+
313
+ def debug(s = None):
314
+ if ConfigHolder().config.is_debug():
315
+ log2(f'DEBUG {s}')
316
+
317
+ def debug_complete(s = None):
318
+ CommandLog.log(f'DEBUG {s}', config=ConfigHolder().config.get('debugs.complete', 'off'))
319
+
320
+ def debug_trace():
321
+ if ConfigHolder().config.is_debug():
322
+ # if LogConfig.is_debug():
323
+ log2(traceback.format_exc())
324
+
325
+ def in_docker() -> bool:
326
+ if os.path.exists('/.dockerenv'):
327
+ return True
328
+
329
+ try:
330
+ with open('/proc/1/cgroup', 'rt') as f:
331
+ for line in f:
332
+ if 'docker' in line or 'lxc' in line:
333
+ return True
334
+ except FileNotFoundError:
335
+ pass
336
+
337
+ return False
338
+
339
+ class Ing:
340
+ def __init__(self, msg: str, suppress_log=False):
341
+ self.msg = msg
342
+ self.suppress_log = suppress_log
343
+
344
+ def __enter__(self):
345
+ if not hasattr(log_state, 'ing_cnt'):
346
+ log_state.ing_cnt = 0
347
+
348
+ try:
349
+ if not log_state.ing_cnt:
350
+ if not self.suppress_log and not ConfigHolder().config.is_debug():
351
+ log2(f'{self.msg}...', nl=False)
352
+
353
+ return None
354
+ finally:
355
+ log_state.ing_cnt += 1
356
+
357
+ def __exit__(self, exc_type, exc_val, exc_tb):
358
+ log_state.ing_cnt -= 1
359
+ if not log_state.ing_cnt:
360
+ if not self.suppress_log and not ConfigHolder().config.is_debug():
361
+ log2(' OK')
362
+
363
+ return False
364
+
365
+ def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
366
+ if not body:
367
+ return Ing(msg, suppress_log=suppress_log)
368
+
369
+ r = None
370
+
371
+ t = Ing(msg, suppress_log=suppress_log)
372
+ t.__enter__()
373
+ try:
374
+ r = body()
375
+ finally:
376
+ t.__exit__(None, None, None)
377
+
378
+ return r
379
+
380
+ def loggable():
381
+ return ConfigHolder().config and ConfigHolder().config.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
382
+
383
+ class TimingNode:
384
+ def __init__(self, depth: int, s0: time.time = time.time(), line: str = None):
385
+ self.depth = depth
386
+ self.s0 = s0
387
+ self.line = line
388
+ self.children = []
389
+
390
+ def __str__(self):
391
+ return f'[{self.depth}: {self.line}, children={len(self.children)}]'
392
+
393
+ def tree(self):
394
+ lines = []
395
+ if self.line:
396
+ lines.append(self.line)
397
+
398
+ for child in self.children:
399
+ if child.line:
400
+ lines.append(child.tree())
401
+ return '\n'.join(lines)
402
+
403
+ class LogTiming:
404
+ def __init__(self, msg: str, s0: time.time = None):
405
+ self.msg = msg
406
+ self.s0 = s0
407
+
408
+ def __enter__(self):
409
+ if (config := ConfigHolder().config.get('debugs.timings', 'off')) not in ['on', 'file']:
410
+ return
411
+
412
+ if not hasattr(log_state, 'timings'):
413
+ log_state.timings = TimingNode(0)
414
+
415
+ self.me = log_state.timings
416
+ log_state.timings = TimingNode(self.me.depth+1)
417
+ if not self.s0:
418
+ self.s0 = time.time()
419
+
420
+ def __exit__(self, exc_type, exc_val, exc_tb):
421
+ if (config := ConfigHolder().config.get('debugs.timings', 'off')) not in ['on', 'file']:
422
+ return False
423
+
424
+ child = log_state.timings
425
+ log_state.timings.line = timing_log_line(self.me.depth, self.msg, self.s0)
426
+
427
+ if child and child.line:
428
+ self.me.children.append(child)
429
+ log_state.timings = self.me
430
+
431
+ if not self.me.depth:
432
+ # log timings finally
433
+ CommandLog.log(self.me.tree(), config)
434
+
435
+ log_state.timings = TimingNode(0)
436
+
437
+ return False
438
+
439
+ def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
440
+ if not s0 and not body:
441
+ return LogTiming(msg, s0=s0)
442
+
443
+ if not ConfigHolder().config.get('debugs.timings', False):
444
+ if body:
445
+ return body()
446
+
447
+ return
448
+
449
+ r = None
450
+
451
+ t = LogTiming(msg, s0=s0)
452
+ t.__enter__()
453
+ try:
454
+ if body:
455
+ r = body()
456
+ finally:
457
+ t.__exit__(None, None, None)
458
+
459
+ return r
460
+
461
+ def timing_log_line(depth: int, msg: str, s0: time.time):
462
+ elapsed = time.time() - s0
463
+ offloaded = '-' if threading.current_thread().name.startswith('offload') or threading.current_thread().name.startswith('async') else '+'
464
+ prefix = f'[{offloaded} timings] '
465
+
466
+ if depth:
467
+ if elapsed > 0.01:
468
+ prefix = (' ' * (depth-1)) + '* '
469
+ else:
470
+ prefix = ' ' * depth
471
+
472
+ return f'{prefix}{msg}: {elapsed:.2f} sec'
473
+
474
+ class WaitLog:
475
+ wait_log_flag = False
476
+
477
+ def wait_log(msg: str):
478
+ if not WaitLog.wait_log_flag:
479
+ log2(msg)
480
+ WaitLog.wait_log_flag = True
481
+
482
+ def clear_wait_log_flag():
483
+ WaitLog.wait_log_flag = False
484
+
485
+ def bytes_generator_from_file(file_path, chunk_size=4096):
486
+ with open(file_path, 'rb') as f:
487
+ while True:
488
+ chunk = f.read(chunk_size)
489
+ if not chunk:
490
+ break
491
+ yield chunk
492
+
493
+ class GeneratorStream(io.RawIOBase):
494
+ def __init__(self, generator):
495
+ self._generator = generator
496
+ self._buffer = b'' # Buffer to store leftover bytes from generator yields
497
+
498
+ def readable(self):
499
+ return True
500
+
501
+ def _read_from_generator(self):
502
+ try:
503
+ chunk = next(self._generator)
504
+ if isinstance(chunk, str):
505
+ chunk = chunk.encode('utf-8') # Encode if generator yields strings
506
+ self._buffer += chunk
507
+ except StopIteration:
508
+ pass # Generator exhausted
509
+
510
+ def readinto(self, b):
511
+ # Fill the buffer if necessary
512
+ while len(self._buffer) < len(b):
513
+ old_buffer_len = len(self._buffer)
514
+ self._read_from_generator()
515
+ if len(self._buffer) == old_buffer_len: # Generator exhausted and buffer empty
516
+ break
517
+
518
+ bytes_to_read = min(len(b), len(self._buffer))
519
+ b[:bytes_to_read] = self._buffer[:bytes_to_read]
520
+ self._buffer = self._buffer[bytes_to_read:]
521
+ return bytes_to_read
522
+
523
+ def read(self, size=-1):
524
+ if size == -1: # Read all remaining data
525
+ while True:
526
+ old_buffer_len = len(self._buffer)
527
+ self._read_from_generator()
528
+ if len(self._buffer) == old_buffer_len:
529
+ break
530
+ data = self._buffer
531
+ self._buffer = b''
532
+ return data
533
+ else:
534
+ # Ensure enough data in buffer
535
+ while len(self._buffer) < size:
536
+ old_buffer_len = len(self._buffer)
537
+ self._read_from_generator()
538
+ if len(self._buffer) == old_buffer_len:
539
+ break
540
+
541
+ data = self._buffer[:size]
542
+ self._buffer = self._buffer[size:]
543
+ return data
544
+
545
+ class LogTrace:
546
+ def __init__(self, err_msg: Union[str, callable, bool] = None):
547
+ self.err_msg = err_msg
548
+
549
+ def __enter__(self):
550
+ return None
551
+
552
+ def __exit__(self, exc_type, exc_val, exc_tb):
553
+ if exc_type is not None:
554
+ if self.err_msg is True:
555
+ log2(str(exc_val))
556
+ elif callable(self.err_msg):
557
+ log2(self.err_msg(exc_val))
558
+ elif self.err_msg is not False and self.err_msg:
559
+ log2(self.err_msg)
560
+
561
+ if self.err_msg is not False and ConfigHolder().config.is_debug():
562
+ traceback.print_exception(exc_type, exc_val, exc_tb, file=sys.stderr)
563
+
564
+ # swallow exception
565
+ return True
566
+
567
+ def log_exc(err_msg: Union[str, callable, bool] = None):
568
+ return LogTrace(err_msg=err_msg)
569
+
570
+ class ParallelService:
571
+ def __init__(self, handler: 'ParallelMapHandler'):
572
+ self.handler = handler
573
+
574
+ def map(self, fn: Callable[..., T]) -> Iterator[T]:
575
+ executor = self.handler.executor
576
+ collection = self.handler.collection
577
+ collect = self.handler.collect
578
+ samples_cnt = self.handler.samples
579
+
580
+ iterator = None
581
+ if executor:
582
+ iterator = executor.map(fn, collection)
583
+ elif samples_cnt < sys.maxsize:
584
+ samples = []
585
+
586
+ for elem in collection:
587
+ if not samples_cnt:
588
+ break
589
+
590
+ samples.append(fn(elem))
591
+ samples_cnt -= 1
592
+
593
+ iterator = iter(samples)
594
+ else:
595
+ iterator = map(fn, collection)
596
+
597
+ if collect:
598
+ return list(iterator)
599
+ else:
600
+ return iterator
601
+
602
+ thread_pools: dict[str, ThreadPoolExecutor] = {}
603
+ thread_pool_lock = threading.Lock()
604
+
605
+ class ParallelMapHandler:
606
+ def __init__(self, collection: list, workers: int, samples: int = sys.maxsize, msg: str = None, collect = True, name = None):
607
+ self.collection = collection
608
+ self.workers = workers
609
+ self.executor = None
610
+ self.samples = samples
611
+ self.msg = msg
612
+ if msg and msg.startswith('d`'):
613
+ if ConfigHolder().config.is_debug():
614
+ self.msg = msg.replace('d`', '', 1)
615
+ else:
616
+ self.msg = None
617
+ self.collect = collect
618
+ self.name = name
619
+
620
+ self.begin = []
621
+ self.end = []
622
+ self.start_time = None
623
+
624
+ def __enter__(self):
625
+ self.start_time = None
626
+
627
+ self.calc_msgs()
628
+
629
+ if self.workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
630
+ self.start_time = time.time()
631
+
632
+ self.executor = self.pool()
633
+ # self.executor = ThreadPoolExecutor(max_workers=self.workers)
634
+ self.executor.__enter__()
635
+
636
+ return ParallelService(self)
637
+
638
+ def __exit__(self, exc_type, exc_val, exc_tb):
639
+ if not self.name and self.executor:
640
+ self.executor.__exit__(exc_type, exc_val, exc_tb)
641
+
642
+ if self.end:
643
+ log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
644
+
645
+ return False
646
+
647
+ def pool(self, thread_name_prefix: str = None):
648
+ if not self.name:
649
+ return ThreadPoolExecutor(max_workers=self.workers)
650
+
651
+ if self.name not in thread_pools:
652
+ thread_pools[self.name] = ThreadPoolExecutor(max_workers=self.workers, thread_name_prefix=thread_name_prefix)
653
+
654
+ return thread_pools[self.name]
655
+
656
+ def size(self):
657
+ if not self.collection:
658
+ return 0
659
+
660
+ return len(self.collection)
661
+
662
+ def calc_msgs(self):
663
+ if not self.msg:
664
+ return
665
+
666
+ self.begin = []
667
+ self.end = []
668
+ size = self.size()
669
+ offloaded = False
670
+ serially = False
671
+ sampling = False
672
+ if size == 0:
673
+ offloaded = True
674
+ msg = self.msg.replace('{size}', '1')
675
+ elif self.workers > 1 and size > 1 and self.samples == sys.maxsize:
676
+ msg = self.msg.replace('{size}', f'{size}')
677
+ elif self.samples < sys.maxsize:
678
+ sampling = True
679
+ samples = self.samples
680
+ if self.samples > size:
681
+ samples = size
682
+ msg = self.msg.replace('{size}', f'{samples}/{size} sample')
683
+ else:
684
+ serially = True
685
+ msg = self.msg.replace('{size}', f'{size}')
686
+
687
+ for token in msg.split(' '):
688
+ if '|' in token:
689
+ self.begin.append(token.split('|')[0])
690
+ if not sampling and not serially and not offloaded:
691
+ self.end.append(token.split('|')[1])
692
+ else:
693
+ self.begin.append(token)
694
+ if not sampling and not serially and not offloaded:
695
+ self.end.append(token)
696
+
697
+ if offloaded:
698
+ log2(f'{" ".join(self.begin)} offloaded...')
699
+ elif sampling or serially:
700
+ log2(f'{" ".join(self.begin)} serially...')
701
+ else:
702
+ log2(f'{" ".join(self.begin)} with {self.workers} workers...')
703
+
704
+ # parallelizers: dict[str, ParallelMapHandler] = {}
705
+ # parallelizer_lock = threading.Lock()
706
+
707
+ def parallelize(collection: list, workers: int = 0, samples = sys.maxsize, msg: str = None, collect = True, name = None):
708
+ return ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect, name = name)
709
+ # if not name:
710
+ # return ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect)
711
+
712
+ # with parallelizer_lock:
713
+ # if name not in parallelizers:
714
+ # parallelizers[name] = ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect, name = name)
715
+
716
+ # return parallelizers[name]
717
+
718
+ class OffloadService:
719
+ def __init__(self, handler: 'OffloadHandler'):
720
+ self.handler = handler
721
+
722
+ def submit(self, fn: Callable[..., T], /, *args, **kwargs) -> Future[T]:
723
+ executor = self.handler.executor
724
+
725
+ if executor:
726
+ return executor.submit(fn, *args, **kwargs)
727
+ else:
728
+ future = Future()
729
+
730
+ future.set_result(fn(*args, **kwargs))
731
+
732
+ return future
733
+
734
+ class OffloadHandler(ParallelMapHandler):
735
+ def __init__(self, max_workers: int, msg: str = None, name: str = None):
736
+ super().__init__(None, max_workers, msg=msg, collect=False, name=f'offload-{name}')
737
+
738
+ def __enter__(self):
739
+ self.start_time = None
740
+ self.calc_msgs()
741
+
742
+ if self.workers > 1:
743
+ self.start_time = time.time()
744
+
745
+ self.executor = self.pool(thread_name_prefix='offload')
746
+ # self.executor = ThreadPoolExecutor(max_workers=self.workers, thread_name_prefix='offload')
747
+ self.executor.__enter__()
748
+
749
+ return OffloadService(self)
750
+
751
+ def __exit__(self, exc_type, exc_val, exc_tb):
752
+ if not self.name and self.executor:
753
+ self.executor.__exit__(exc_type, exc_val, exc_tb)
754
+
755
+ if self.end:
756
+ log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
757
+
758
+ return False
759
+
760
+ def calc_msgs(self):
761
+ if not self.msg:
762
+ return
763
+
764
+ self.begin = []
765
+ self.end = []
766
+ size = self.size()
767
+
768
+ offloaded = False
769
+ serially = False
770
+ sampling = False
771
+ if size == 0:
772
+ offloaded = True
773
+ msg = self.msg.replace('{size}', '1')
774
+ elif self.workers > 1 and size > 1 and self.samples == sys.maxsize:
775
+ msg = self.msg.replace('{size}', f'{size}')
776
+ elif self.samples < sys.maxsize:
777
+ sampling = True
778
+ samples = self.samples
779
+ if samples > size:
780
+ samples = size
781
+ msg = self.msg.replace('{size}', f'{samples}/{size} sample')
782
+ else:
783
+ serially = True
784
+ msg = self.msg.replace('{size}', f'{size}')
785
+
786
+ for token in msg.split(' '):
787
+ if '|' in token:
788
+ self.begin.append(token.split('|')[0])
789
+ if not sampling and not serially and not offloaded:
790
+ self.end.append(token.split('|')[1])
791
+ else:
792
+ self.begin.append(token)
793
+ if not sampling and not serially and not offloaded:
794
+ self.end.append(token)
795
+
796
+ if offloaded:
797
+ log2(f'{" ".join(self.begin)} offloaded...')
798
+ elif sampling or serially:
799
+ log2(f'{" ".join(self.begin)} serially...')
800
+ else:
801
+ log2(f'{" ".join(self.begin)} with {self.workers} workers...')
802
+
803
+ # offloaders: dict[str, OffloadHandler] = {}
804
+ # offloaders_lock = threading.Lock()
805
+
806
+ def offload(max_workers: int = 3, msg: str = None, name: str = None):
807
+ return OffloadHandler(max_workers, msg = msg, name = name)
808
+ # if not name:
809
+ # return OffloadHandler(max_workers, msg = msg)
810
+
811
+ # with offloaders_lock:
812
+ # if name not in offloaders:
813
+ # offloaders[name] = OffloadHandler(max_workers, msg = msg, name = name)
814
+
815
+ # return offloaders[name]
816
+
817
+ def kaqing_log_file_name(suffix = 'log'):
818
+ return f"{ConfigHolder().config.get('log-prefix', '/tmp/qing')}-{datetime.now().strftime('%d%H%M%S')}.{suffix}"
819
+
820
+ class LogFileHandler:
821
+ def __init__(self, suffix = 'log'):
822
+ self.suffix = suffix
823
+
824
+ def __enter__(self):
825
+ self.f = open(kaqing_log_file_name(), 'w')
826
+ self.f.__enter__()
827
+
828
+ return self.f
829
+
830
+ def __exit__(self, exc_type, exc_val, exc_tb):
831
+ self.f.__exit__(exc_type, exc_val, exc_tb)
832
+
833
+ if ConfigHolder().append_command_history:
834
+ ConfigHolder().append_command_history(f':sh cat {self.f.name}')
835
+
836
+ return False
837
+
838
+ def kaqing_log_file(suffix = 'log'):
839
+ return LogFileHandler(suffix = suffix)
840
+
841
+ class CommandLog:
842
+ log_file = None
843
+
844
+ def log(line: str, config: str = 'off'):
845
+ if config == 'file':
846
+ if not CommandLog.log_file:
847
+ try:
848
+ CommandLog.log_file = open(kaqing_log_file_name(suffix='cmd.log'), 'w')
849
+ except:
850
+ pass
851
+
852
+ try:
853
+ CommandLog.log_file.write(line + '\n')
854
+ except:
855
+ pass
856
+ elif config == 'on':
857
+ log2(line)
858
+
859
+ def close_log_file():
860
+ if CommandLog.log_file:
861
+ try:
862
+ CommandLog.log_file.close()
863
+ except:
864
+ pass
865
+
866
+ if ConfigHolder().append_command_history:
867
+ ConfigHolder().append_command_history(f':sh cat {CommandLog.log_file.name}')
868
+
869
+ CommandLog.log_file = None
870
+
871
+ class ExecResult(ABC):
872
+ def exit_code(self) -> int:
873
+ pass