kaqing 2.0.184__py3-none-any.whl → 2.0.227__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kaqing might be problematic. Click here for more details.

Files changed (205) hide show
  1. adam/app_session.py +1 -1
  2. adam/batch.py +15 -15
  3. adam/checks/compactionstats.py +2 -1
  4. adam/checks/cpu.py +2 -1
  5. adam/checks/disk.py +6 -5
  6. adam/checks/gossip.py +2 -1
  7. adam/checks/memory.py +2 -1
  8. adam/checks/status.py +2 -1
  9. adam/commands/app/app.py +4 -4
  10. adam/commands/app/app_ping.py +2 -2
  11. adam/commands/{login.py → app/login.py} +2 -2
  12. adam/commands/app/show_app_actions.py +3 -3
  13. adam/commands/app/show_app_id.py +2 -2
  14. adam/commands/app/show_app_queues.py +2 -2
  15. adam/commands/{show → app}/show_login.py +3 -3
  16. adam/commands/app/utils_app.py +9 -1
  17. adam/commands/audit/audit.py +8 -24
  18. adam/commands/audit/audit_repair_tables.py +3 -3
  19. adam/commands/audit/audit_run.py +3 -3
  20. adam/commands/audit/completions_l.py +15 -0
  21. adam/commands/audit/show_last10.py +2 -3
  22. adam/commands/audit/show_slow10.py +2 -2
  23. adam/commands/audit/show_top10.py +2 -2
  24. adam/commands/bash/bash.py +3 -3
  25. adam/commands/bash/utils_bash.py +1 -1
  26. adam/commands/cassandra/download_cassandra_log.py +45 -0
  27. adam/commands/cassandra/restart_cluster.py +47 -0
  28. adam/commands/cassandra/restart_node.py +51 -0
  29. adam/commands/cassandra/restart_nodes.py +47 -0
  30. adam/commands/{rollout.py → cassandra/rollout.py} +3 -3
  31. adam/commands/{show → cassandra}/show_cassandra_repairs.py +7 -5
  32. adam/commands/{show → cassandra}/show_cassandra_status.py +24 -17
  33. adam/commands/{show → cassandra}/show_cassandra_version.py +2 -2
  34. adam/commands/cassandra/show_processes.py +50 -0
  35. adam/commands/cassandra/show_storage.py +44 -0
  36. adam/commands/{watch.py → cassandra/watch.py} +2 -2
  37. adam/commands/cli/__init__.py +0 -0
  38. adam/commands/{cli_commands.py → cli/cli_commands.py} +6 -1
  39. adam/commands/{clipboard_copy.py → cli/clipboard_copy.py} +4 -4
  40. adam/commands/{show/show_commands.py → cli/show_cli_commands.py} +5 -5
  41. adam/commands/code.py +2 -2
  42. adam/commands/command.py +54 -14
  43. adam/commands/commands_utils.py +14 -6
  44. adam/commands/config/__init__.py +0 -0
  45. adam/commands/{param_get.py → config/param_get.py} +2 -2
  46. adam/commands/{param_set.py → config/param_set.py} +2 -2
  47. adam/commands/{show → config}/show_params.py +3 -3
  48. adam/commands/{alter_tables.py → cql/alter_tables.py} +3 -3
  49. adam/commands/cql/completions_c.py +29 -0
  50. adam/commands/cql/cqlsh.py +4 -8
  51. adam/commands/cql/utils_cql.py +36 -17
  52. adam/commands/debug/__init__.py +0 -0
  53. adam/commands/debug/debug.py +22 -0
  54. adam/commands/debug/debug_completes.py +35 -0
  55. adam/commands/debug/debug_timings.py +35 -0
  56. adam/commands/debug/show_offloaded_completes.py +45 -0
  57. adam/commands/deploy/code_start.py +2 -2
  58. adam/commands/deploy/code_stop.py +2 -2
  59. adam/commands/deploy/deploy_frontend.py +2 -2
  60. adam/commands/deploy/deploy_pg_agent.py +2 -2
  61. adam/commands/deploy/deploy_pod.py +2 -2
  62. adam/commands/deploy/undeploy_frontend.py +2 -2
  63. adam/commands/deploy/undeploy_pg_agent.py +2 -2
  64. adam/commands/deploy/undeploy_pod.py +2 -2
  65. adam/commands/devices/device.py +37 -11
  66. adam/commands/devices/device_app.py +7 -7
  67. adam/commands/devices/device_auit_log.py +2 -2
  68. adam/commands/devices/device_cass.py +6 -6
  69. adam/commands/devices/device_export.py +7 -4
  70. adam/commands/devices/device_postgres.py +19 -9
  71. adam/commands/devices/devices.py +1 -1
  72. adam/commands/diag/__init__.py +0 -0
  73. adam/commands/{check.py → diag/check.py} +3 -3
  74. adam/commands/diag/generate_report.py +52 -0
  75. adam/commands/{issues.py → diag/issues.py} +3 -2
  76. adam/commands/exit.py +2 -2
  77. adam/commands/export/clean_up_all_export_sessions.py +2 -2
  78. adam/commands/export/clean_up_export_sessions.py +2 -2
  79. adam/commands/export/completions_x.py +11 -0
  80. adam/commands/export/download_export_session.py +5 -5
  81. adam/commands/export/drop_export_database.py +2 -2
  82. adam/commands/export/drop_export_databases.py +2 -2
  83. adam/commands/export/export.py +3 -19
  84. adam/commands/export/export_databases.py +20 -11
  85. adam/commands/export/export_select.py +9 -34
  86. adam/commands/export/export_sessions.py +13 -11
  87. adam/commands/export/export_use.py +6 -6
  88. adam/commands/export/export_x_select.py +48 -0
  89. adam/commands/export/exporter.py +140 -53
  90. adam/commands/export/import_files.py +3 -7
  91. adam/commands/export/import_session.py +2 -6
  92. adam/commands/export/importer.py +12 -13
  93. adam/commands/export/importer_athena.py +15 -35
  94. adam/commands/export/importer_sqlite.py +19 -8
  95. adam/commands/export/show_column_counts.py +11 -12
  96. adam/commands/export/show_export_databases.py +4 -4
  97. adam/commands/export/show_export_session.py +5 -5
  98. adam/commands/export/show_export_sessions.py +4 -4
  99. adam/commands/export/utils_export.py +40 -25
  100. adam/commands/fs/__init__.py +0 -0
  101. adam/commands/{cat.py → fs/cat.py} +4 -4
  102. adam/commands/fs/cat_local.py +42 -0
  103. adam/commands/{cd.py → fs/cd.py} +4 -4
  104. adam/commands/{download_file.py → fs/download_file.py} +7 -7
  105. adam/commands/{find_files.py → fs/find_files.py} +7 -7
  106. adam/commands/{find_processes.py → fs/find_processes.py} +14 -22
  107. adam/commands/{head.py → fs/head.py} +5 -5
  108. adam/commands/fs/head_local.py +46 -0
  109. adam/commands/{ls.py → fs/ls.py} +4 -4
  110. adam/commands/fs/ls_local.py +40 -0
  111. adam/commands/{pwd.py → fs/pwd.py} +2 -2
  112. adam/commands/fs/rm.py +18 -0
  113. adam/commands/fs/rm_downloads.py +39 -0
  114. adam/commands/fs/rm_logs.py +44 -0
  115. adam/commands/fs/rm_logs_local.py +38 -0
  116. adam/commands/{shell.py → fs/shell.py} +2 -2
  117. adam/commands/{show → fs}/show_adam.py +3 -3
  118. adam/commands/{show → fs}/show_host.py +2 -2
  119. adam/commands/fs/show_last_results.py +39 -0
  120. adam/commands/fs/tail.py +36 -0
  121. adam/commands/fs/tail_local.py +46 -0
  122. adam/commands/fs/utils_fs.py +192 -0
  123. adam/commands/help.py +2 -2
  124. adam/commands/intermediate_command.py +3 -0
  125. adam/commands/kubectl.py +2 -2
  126. adam/commands/medusa/medusa_backup.py +2 -2
  127. adam/commands/medusa/medusa_restore.py +4 -18
  128. adam/commands/medusa/medusa_show_backupjobs.py +2 -2
  129. adam/commands/medusa/medusa_show_restorejobs.py +2 -2
  130. adam/commands/medusa/utils_medusa.py +15 -0
  131. adam/commands/nodetool/__init__.py +0 -0
  132. adam/commands/nodetool/nodetool.py +87 -0
  133. adam/commands/nodetool/utils_nodetool.py +44 -0
  134. adam/commands/postgres/completions_p.py +22 -0
  135. adam/commands/postgres/postgres.py +10 -20
  136. adam/commands/postgres/postgres_databases.py +3 -3
  137. adam/commands/postgres/postgres_ls.py +3 -3
  138. adam/commands/postgres/postgres_preview.py +2 -2
  139. adam/commands/postgres/utils_postgres.py +12 -2
  140. adam/commands/preview_table.py +3 -4
  141. adam/commands/reaper/reaper_forward.py +2 -2
  142. adam/commands/reaper/reaper_forward_stop.py +2 -2
  143. adam/commands/reaper/reaper_restart.py +2 -2
  144. adam/commands/reaper/reaper_run_abort.py +2 -2
  145. adam/commands/reaper/reaper_runs.py +14 -12
  146. adam/commands/reaper/reaper_runs_abort.py +2 -2
  147. adam/commands/reaper/reaper_schedule_activate.py +8 -4
  148. adam/commands/reaper/reaper_schedule_start.py +3 -4
  149. adam/commands/reaper/reaper_schedule_stop.py +3 -4
  150. adam/commands/reaper/reaper_schedules.py +2 -2
  151. adam/commands/reaper/reaper_status.py +2 -2
  152. adam/commands/reaper/utils_reaper.py +41 -6
  153. adam/commands/repair/repair_log.py +2 -2
  154. adam/commands/repair/repair_run.py +2 -2
  155. adam/commands/repair/repair_scan.py +2 -4
  156. adam/commands/repair/repair_stop.py +2 -3
  157. adam/commands/{show/show.py → show.py} +12 -11
  158. adam/config.py +4 -5
  159. adam/embedded_params.py +1 -1
  160. adam/repl.py +24 -10
  161. adam/repl_commands.py +68 -45
  162. adam/repl_session.py +16 -1
  163. adam/repl_state.py +16 -1
  164. adam/sql/async_executor.py +62 -0
  165. adam/sql/lark_completer.py +286 -0
  166. adam/sql/lark_parser.py +604 -0
  167. adam/sql/qingl.lark +1075 -0
  168. adam/sso/cred_cache.py +2 -5
  169. adam/utils.py +259 -82
  170. adam/utils_async_job.py +73 -0
  171. adam/utils_k8s/app_clusters.py +11 -4
  172. adam/utils_k8s/app_pods.py +10 -5
  173. adam/utils_k8s/cassandra_clusters.py +19 -7
  174. adam/utils_k8s/cassandra_nodes.py +16 -6
  175. adam/utils_k8s/k8s.py +9 -0
  176. adam/utils_k8s/kube_context.py +1 -4
  177. adam/{pod_exec_result.py → utils_k8s/pod_exec_result.py} +8 -2
  178. adam/utils_k8s/pods.py +189 -29
  179. adam/utils_k8s/statefulsets.py +5 -2
  180. adam/utils_local.py +78 -2
  181. adam/utils_repl/appendable_completer.py +6 -0
  182. adam/utils_repl/repl_completer.py +51 -4
  183. adam/utils_sqlite.py +3 -8
  184. adam/version.py +1 -1
  185. {kaqing-2.0.184.dist-info → kaqing-2.0.227.dist-info}/METADATA +1 -1
  186. kaqing-2.0.227.dist-info/RECORD +280 -0
  187. kaqing-2.0.227.dist-info/top_level.txt +2 -0
  188. teddy/__init__.py +0 -0
  189. teddy/lark_parser.py +436 -0
  190. teddy/lark_parser2.py +618 -0
  191. adam/commands/cql/cql_completions.py +0 -32
  192. adam/commands/export/export_select_x.py +0 -54
  193. adam/commands/logs.py +0 -37
  194. adam/commands/nodetool.py +0 -69
  195. adam/commands/postgres/psql_completions.py +0 -11
  196. adam/commands/report.py +0 -61
  197. adam/commands/restart.py +0 -60
  198. adam/commands/show/show_processes.py +0 -49
  199. adam/commands/show/show_storage.py +0 -42
  200. kaqing-2.0.184.dist-info/RECORD +0 -244
  201. kaqing-2.0.184.dist-info/top_level.txt +0 -1
  202. /adam/commands/{show → cassandra}/__init__.py +0 -0
  203. /adam/commands/{nodetool_commands.py → nodetool/nodetool_commands.py} +0 -0
  204. {kaqing-2.0.184.dist-info → kaqing-2.0.227.dist-info}/WHEEL +0 -0
  205. {kaqing-2.0.184.dist-info → kaqing-2.0.227.dist-info}/entry_points.txt +0 -0
adam/sso/cred_cache.py CHANGED
@@ -2,8 +2,7 @@ import os
2
2
  from pathlib import Path
3
3
  from dotenv import load_dotenv
4
4
 
5
- from adam.config import Config
6
- from adam.utils import debug, log_exc
5
+ from adam.utils import creating_dir, debug, log_exc
7
6
  from adam.utils_k8s.kube_context import KubeContext
8
7
 
9
8
  class CredCache:
@@ -15,7 +14,7 @@ class CredCache:
15
14
 
16
15
  def __init__(self):
17
16
  if not hasattr(self, 'env_f'):
18
- self.dir = f'{Path.home()}/.kaqing'
17
+ self.dir = creating_dir(f'{Path.home()}/.kaqing')
19
18
  self.env_f = f'{self.dir}/.credentials'
20
19
  # immutable - cannot reload with different file content
21
20
  load_dotenv(dotenv_path=self.env_f)
@@ -44,8 +43,6 @@ class CredCache:
44
43
  updated.append(f'IDP_PASSWORD={password}')
45
44
 
46
45
  if updated:
47
- if not os.path.exists(self.env_f):
48
- os.makedirs(self.dir, exist_ok=True)
49
46
  with open(self.env_f, 'w') as file:
50
47
  file.write('\n'.join(updated))
51
48
 
adam/utils.py CHANGED
@@ -1,11 +1,12 @@
1
+ from abc import ABC
1
2
  from concurrent.futures import Future, ThreadPoolExecutor
2
3
  from contextlib import redirect_stdout
3
4
  import copy
4
5
  import csv
5
6
  from datetime import datetime
7
+ import html
6
8
  import importlib
7
9
  import io
8
- import json
9
10
  import os
10
11
  from pathlib import Path
11
12
  import random
@@ -19,6 +20,8 @@ import sys
19
20
  import time
20
21
  import click
21
22
  import yaml
23
+ from prompt_toolkit import print_formatted_text, HTML
24
+ from prompt_toolkit.completion import Completer
22
25
 
23
26
  from . import __version__
24
27
 
@@ -26,10 +29,31 @@ T = TypeVar('T')
26
29
 
27
30
  log_state = threading.local()
28
31
 
29
- class LogConfig:
30
- is_debug = lambda: False
31
- is_debug_timing = lambda: False
32
- is_display_help = True
32
+ class Color:
33
+ gray = 'gray'
34
+
35
+ class ConfigReadable:
36
+ def is_debug() -> bool:
37
+ pass
38
+
39
+ def get(self, key: str, default: T) -> T:
40
+ pass
41
+
42
+ class ConfigHolder:
43
+ # the singleton pattern
44
+ def __new__(cls, *args, **kwargs):
45
+ if not hasattr(cls, 'instance'): cls.instance = super(ConfigHolder, cls).__new__(cls)
46
+
47
+ return cls.instance
48
+
49
+ def __init__(self):
50
+ if not hasattr(self, 'config'):
51
+ # set by Config
52
+ self.config: 'ConfigReadable' = None
53
+ # only for testing
54
+ self.is_display_help = True
55
+ # set by ReplSession
56
+ self.append_command_history = lambda entry: None
33
57
 
34
58
  NO_SORT = 0
35
59
  SORT = 1
@@ -97,26 +121,42 @@ def convert_seconds(total_seconds_float):
97
121
  def epoch(timestamp_string: str):
98
122
  return parser.parse(timestamp_string).timestamp()
99
123
 
100
- def log(s = None):
124
+ def log(s = None, text_color: str = None):
101
125
  if not loggable():
102
126
  return False
103
127
 
104
128
  # want to print empty line for False or empty collection
105
129
  if s == None:
106
130
  print()
131
+ elif text_color:
132
+ print_formatted_text(HTML(f'<ansi{text_color}>{html.escape(s)}</ansi{text_color}>'))
107
133
  else:
108
134
  click.echo(s)
109
135
 
110
136
  return True
111
137
 
112
- def log2(s = None, nl = True):
138
+ def log2(s = None, nl = True, file: str = None, text_color: str = None):
113
139
  if not loggable():
114
140
  return False
115
141
 
116
142
  if s:
117
- click.echo(s, err=True, nl=nl)
143
+ if file:
144
+ with open(file, 'at') as f:
145
+ f.write(s)
146
+ if nl:
147
+ f.write('\n')
148
+ elif text_color:
149
+ print_formatted_text(HTML(f'<ansi{text_color}>{html.escape(s)}</ansi{text_color}>'))
150
+ else:
151
+ click.echo(s, err=True, nl=nl)
118
152
  else:
119
- print(file=sys.stderr)
153
+ if file:
154
+ with open(file, 'at') as f:
155
+ f.write('\n')
156
+ elif text_color:
157
+ print_formatted_text(HTML(f'<ansi{text_color}>{html.escape(s)}</ansi{text_color}>'), file=sys.stderr)
158
+ else:
159
+ print(file=sys.stderr)
120
160
 
121
161
  return True
122
162
 
@@ -160,7 +200,11 @@ def deep_merge_dicts(dict1, dict2):
160
200
  merged_dict[key] = deep_merge_dicts(merged_dict[key], value)
161
201
  elif key not in merged_dict or value:
162
202
  # Otherwise, overwrite or add the value from dict2
163
- merged_dict[key] = value
203
+ if key in merged_dict and isinstance(merged_dict[key], Completer):
204
+ pass
205
+ # print('SEAN completer found, ignoring', key, value)
206
+ else:
207
+ merged_dict[key] = value
164
208
  return merged_dict
165
209
 
166
210
  def deep_sort_dict(d):
@@ -198,7 +242,7 @@ def get_deep_keys(d, current_path=""):
198
242
  return keys
199
243
 
200
244
  def display_help(replace_arg = False):
201
- if not LogConfig.is_display_help:
245
+ if not ConfigHolder().is_display_help:
202
246
  return
203
247
 
204
248
  args = copy.copy(sys.argv)
@@ -250,28 +294,10 @@ def json_to_csv(json_data: list[dict[any, any]], delimiter: str = ','):
250
294
  else:
251
295
  return None
252
296
 
253
- def log_to_file(config: dict[any, any]):
254
- with log_exc():
255
- base = f"/kaqing/logs"
256
- os.makedirs(base, exist_ok=True)
257
-
258
- now = datetime.now()
259
- timestamp_str = now.strftime("%Y%m%d-%H%M%S")
260
- filename = f"{base}/login.{timestamp_str}.txt"
261
- with open(filename, 'w') as f:
262
- if isinstance(config, dict):
263
- try:
264
- json.dump(config, f, indent=4)
265
- except:
266
- f.write(config)
267
- else:
268
- f.write(config)
269
-
270
297
  def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out = True):
271
- dir = f'{Path.home()}/.kaqing'
298
+ dir = creating_dir(f'{Path.home()}/.kaqing')
272
299
  path = f'{dir}/{rel_path}'
273
300
  if not os.path.exists(path):
274
- os.makedirs(dir, exist_ok=True)
275
301
  module = importlib.import_module(module)
276
302
  with open(path, 'w') as f:
277
303
  yaml.dump(module.config(), f, default_flow_style=False)
@@ -287,12 +313,15 @@ def is_lambda(func):
287
313
  return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
288
314
 
289
315
  def debug(s = None):
290
- if LogConfig.is_debug():
291
- log2(f'DEBUG {s}')
316
+ if ConfigHolder().config.is_debug():
317
+ log2(f'DEBUG {s}', text_color=Color.gray)
318
+
319
+ def debug_complete(s = None):
320
+ CommandLog.log(f'DEBUG {s}', config=ConfigHolder().config.get('debugs.complete', 'off'))
292
321
 
293
322
  def debug_trace():
294
- if LogConfig.is_debug():
295
- log2(traceback.format_exc())
323
+ if ConfigHolder().config.is_debug():
324
+ log2(traceback.format_exc(), text_color=Color.gray)
296
325
 
297
326
  def in_docker() -> bool:
298
327
  if os.path.exists('/.dockerenv'):
@@ -309,34 +338,42 @@ def in_docker() -> bool:
309
338
  return False
310
339
 
311
340
  class Ing:
312
- def __init__(self, msg: str, suppress_log=False):
341
+ def __init__(self, msg: str, suppress_log=False, job_log: str = None, condition = True):
313
342
  self.msg = msg
314
343
  self.suppress_log = suppress_log
344
+ self.job_log = job_log
345
+ self.condition = condition
315
346
 
316
347
  def __enter__(self):
348
+ if not self.condition:
349
+ return None
350
+
317
351
  if not hasattr(log_state, 'ing_cnt'):
318
352
  log_state.ing_cnt = 0
319
353
 
320
354
  try:
321
355
  if not log_state.ing_cnt:
322
- if not self.suppress_log and not LogConfig.is_debug():
323
- log2(f'{self.msg}...', nl=False)
356
+ if not self.suppress_log and not ConfigHolder().config.is_debug():
357
+ log2(f'{self.msg}...', nl=False, file=self.job_log)
324
358
 
325
359
  return None
326
360
  finally:
327
361
  log_state.ing_cnt += 1
328
362
 
329
363
  def __exit__(self, exc_type, exc_val, exc_tb):
364
+ if not self.condition:
365
+ return False
366
+
330
367
  log_state.ing_cnt -= 1
331
368
  if not log_state.ing_cnt:
332
- if not self.suppress_log and not LogConfig.is_debug():
333
- log2(' OK')
369
+ if not self.suppress_log and not ConfigHolder().config.is_debug():
370
+ log2(' OK', file=self.job_log)
334
371
 
335
372
  return False
336
373
 
337
- def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
374
+ def ing(msg: str, body: Callable[[], None]=None, suppress_log=False, job_log: str = None, condition = True):
338
375
  if not body:
339
- return Ing(msg, suppress_log=suppress_log)
376
+ return Ing(msg, suppress_log=suppress_log, job_log=job_log, condition=condition)
340
377
 
341
378
  r = None
342
379
 
@@ -350,7 +387,7 @@ def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
350
387
  return r
351
388
 
352
389
  def loggable():
353
- return LogConfig.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
390
+ return ConfigHolder().config and ConfigHolder().config.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
354
391
 
355
392
  class TimingNode:
356
393
  def __init__(self, depth: int, s0: time.time = time.time(), line: str = None):
@@ -378,7 +415,7 @@ class LogTiming:
378
415
  self.s0 = s0
379
416
 
380
417
  def __enter__(self):
381
- if not LogConfig.is_debug_timing():
418
+ if (config := ConfigHolder().config.get('debugs.timings', 'off')) not in ['on', 'file']:
382
419
  return
383
420
 
384
421
  if not hasattr(log_state, 'timings'):
@@ -390,7 +427,7 @@ class LogTiming:
390
427
  self.s0 = time.time()
391
428
 
392
429
  def __exit__(self, exc_type, exc_val, exc_tb):
393
- if not LogConfig.is_debug_timing():
430
+ if (config := ConfigHolder().config.get('debugs.timings', 'off')) not in ['on', 'file']:
394
431
  return False
395
432
 
396
433
  child = log_state.timings
@@ -401,7 +438,9 @@ class LogTiming:
401
438
  log_state.timings = self.me
402
439
 
403
440
  if not self.me.depth:
404
- log2(self.me.tree())
441
+ # log timings finally
442
+ CommandLog.log(self.me.tree(), config)
443
+
405
444
  log_state.timings = TimingNode(0)
406
445
 
407
446
  return False
@@ -410,7 +449,7 @@ def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
410
449
  if not s0 and not body:
411
450
  return LogTiming(msg, s0=s0)
412
451
 
413
- if not LogConfig.is_debug_timing():
452
+ if not ConfigHolder().config.get('debugs.timings', False):
414
453
  if body:
415
454
  return body()
416
455
 
@@ -430,7 +469,9 @@ def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
430
469
 
431
470
  def timing_log_line(depth: int, msg: str, s0: time.time):
432
471
  elapsed = time.time() - s0
433
- prefix = '[timings] '
472
+ offloaded = '-' if threading.current_thread().name.startswith('offload') or threading.current_thread().name.startswith('async') else '+'
473
+ prefix = f'[{offloaded} timings] '
474
+
434
475
  if depth:
435
476
  if elapsed > 0.01:
436
477
  prefix = (' ' * (depth-1)) + '* '
@@ -526,7 +567,7 @@ class LogTrace:
526
567
  elif self.err_msg is not False and self.err_msg:
527
568
  log2(self.err_msg)
528
569
 
529
- if self.err_msg is not False and LogConfig.is_debug():
570
+ if self.err_msg is not False and ConfigHolder().config.is_debug():
530
571
  traceback.print_exception(exc_type, exc_val, exc_tb, file=sys.stderr)
531
572
 
532
573
  # swallow exception
@@ -567,37 +608,44 @@ class ParallelService:
567
608
  else:
568
609
  return iterator
569
610
 
611
+ thread_pools: dict[str, ThreadPoolExecutor] = {}
612
+ thread_pool_lock = threading.Lock()
613
+
570
614
  class ParallelMapHandler:
571
- def __init__(self, collection: list, workers: int, samples: int = sys.maxsize, msg: str = None, collect = True):
615
+ def __init__(self, collection: list, workers: int, samples: int = sys.maxsize, msg: str = None, collect = True, name = None):
572
616
  self.collection = collection
573
617
  self.workers = workers
574
618
  self.executor = None
575
619
  self.samples = samples
576
620
  self.msg = msg
577
621
  if msg and msg.startswith('d`'):
578
- if LogConfig.is_debug():
622
+ if ConfigHolder().config.is_debug():
579
623
  self.msg = msg.replace('d`', '', 1)
580
624
  else:
581
625
  self.msg = None
582
626
  self.collect = collect
627
+ self.name = name
583
628
 
584
629
  self.begin = []
585
630
  self.end = []
586
631
  self.start_time = None
587
632
 
588
633
  def __enter__(self):
634
+ self.start_time = None
635
+
589
636
  self.calc_msgs()
590
637
 
591
638
  if self.workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
592
639
  self.start_time = time.time()
593
640
 
594
- self.executor = ThreadPoolExecutor(max_workers=self.workers)
641
+ self.executor = self.pool()
642
+ # self.executor = ThreadPoolExecutor(max_workers=self.workers)
595
643
  self.executor.__enter__()
596
644
 
597
645
  return ParallelService(self)
598
646
 
599
647
  def __exit__(self, exc_type, exc_val, exc_tb):
600
- if self.executor:
648
+ if not self.name and self.executor:
601
649
  self.executor.__exit__(exc_type, exc_val, exc_tb)
602
650
 
603
651
  if self.end:
@@ -605,6 +653,15 @@ class ParallelMapHandler:
605
653
 
606
654
  return False
607
655
 
656
+ def pool(self, thread_name_prefix: str = None):
657
+ if not self.name:
658
+ return ThreadPoolExecutor(max_workers=self.workers)
659
+
660
+ if self.name not in thread_pools:
661
+ thread_pools[self.name] = ThreadPoolExecutor(max_workers=self.workers, thread_name_prefix=thread_name_prefix)
662
+
663
+ return thread_pools[self.name]
664
+
608
665
  def size(self):
609
666
  if not self.collection:
610
667
  return 0
@@ -615,25 +672,28 @@ class ParallelMapHandler:
615
672
  if not self.msg:
616
673
  return
617
674
 
675
+ self.begin = []
676
+ self.end = []
618
677
  size = self.size()
619
678
  offloaded = False
620
679
  serially = False
621
680
  sampling = False
622
681
  if size == 0:
623
682
  offloaded = True
624
- self.msg = self.msg.replace('{size}', '1')
683
+ msg = self.msg.replace('{size}', '1')
625
684
  elif self.workers > 1 and size > 1 and self.samples == sys.maxsize:
626
- self.msg = self.msg.replace('{size}', f'{size}')
685
+ msg = self.msg.replace('{size}', f'{size}')
627
686
  elif self.samples < sys.maxsize:
628
687
  sampling = True
688
+ samples = self.samples
629
689
  if self.samples > size:
630
- self.samples = size
631
- self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
690
+ samples = size
691
+ msg = self.msg.replace('{size}', f'{samples}/{size} sample')
632
692
  else:
633
693
  serially = True
634
- self.msg = self.msg.replace('{size}', f'{size}')
694
+ msg = self.msg.replace('{size}', f'{size}')
635
695
 
636
- for token in self.msg.split(' '):
696
+ for token in msg.split(' '):
637
697
  if '|' in token:
638
698
  self.begin.append(token.split('|')[0])
639
699
  if not sampling and not serially and not offloaded:
@@ -650,8 +710,19 @@ class ParallelMapHandler:
650
710
  else:
651
711
  log2(f'{" ".join(self.begin)} with {self.workers} workers...')
652
712
 
653
- def parallelize(collection: list, workers: int = 0, samples = sys.maxsize, msg: str = None, collect = True):
654
- return ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect)
713
+ # parallelizers: dict[str, ParallelMapHandler] = {}
714
+ # parallelizer_lock = threading.Lock()
715
+
716
+ def parallelize(collection: list, workers: int = 0, samples = sys.maxsize, msg: str = None, collect = True, name = None):
717
+ return ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect, name = name)
718
+ # if not name:
719
+ # return ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect)
720
+
721
+ # with parallelizer_lock:
722
+ # if name not in parallelizers:
723
+ # parallelizers[name] = ParallelMapHandler(collection, workers, samples = samples, msg = msg, collect = collect, name = name)
724
+
725
+ # return parallelizers[name]
655
726
 
656
727
  class OffloadService:
657
728
  def __init__(self, handler: 'OffloadHandler'):
@@ -670,22 +741,24 @@ class OffloadService:
670
741
  return future
671
742
 
672
743
  class OffloadHandler(ParallelMapHandler):
673
- def __init__(self, max_workers: int, msg: str = None):
674
- super().__init__(None, max_workers, msg=msg, collect=False )
744
+ def __init__(self, max_workers: int, msg: str = None, name: str = None):
745
+ super().__init__(None, max_workers, msg=msg, collect=False, name=f'offload-{name}')
675
746
 
676
747
  def __enter__(self):
748
+ self.start_time = None
677
749
  self.calc_msgs()
678
750
 
679
- if self.workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
751
+ if self.workers > 1:
680
752
  self.start_time = time.time()
681
753
 
682
- self.executor = ThreadPoolExecutor(max_workers=self.workers)
754
+ self.executor = self.pool(thread_name_prefix='offload')
755
+ # self.executor = ThreadPoolExecutor(max_workers=self.workers, thread_name_prefix='offload')
683
756
  self.executor.__enter__()
684
757
 
685
758
  return OffloadService(self)
686
759
 
687
760
  def __exit__(self, exc_type, exc_val, exc_tb):
688
- if self.executor:
761
+ if not self.name and self.executor:
689
762
  self.executor.__exit__(exc_type, exc_val, exc_tb)
690
763
 
691
764
  if self.end:
@@ -693,38 +766,33 @@ class OffloadHandler(ParallelMapHandler):
693
766
 
694
767
  return False
695
768
 
696
- def size(self):
697
- if not self.collection:
698
- return 0
699
-
700
- return len(self.collection)
701
-
702
769
  def calc_msgs(self):
703
770
  if not self.msg:
704
771
  return
705
772
 
773
+ self.begin = []
774
+ self.end = []
706
775
  size = self.size()
707
- # return
708
776
 
709
777
  offloaded = False
710
778
  serially = False
711
779
  sampling = False
712
780
  if size == 0:
713
781
  offloaded = True
714
- self.msg = self.msg.replace('{size}', '1')
782
+ msg = self.msg.replace('{size}', '1')
715
783
  elif self.workers > 1 and size > 1 and self.samples == sys.maxsize:
716
- self.msg = self.msg.replace('{size}', f'{size}')
784
+ msg = self.msg.replace('{size}', f'{size}')
717
785
  elif self.samples < sys.maxsize:
718
786
  sampling = True
719
- if self.samples > size:
720
- self.samples = size
721
- self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
787
+ samples = self.samples
788
+ if samples > size:
789
+ samples = size
790
+ msg = self.msg.replace('{size}', f'{samples}/{size} sample')
722
791
  else:
723
792
  serially = True
724
- self.msg = self.msg.replace('{size}', f'{size}')
725
- # return
793
+ msg = self.msg.replace('{size}', f'{size}')
726
794
 
727
- for token in self.msg.split(' '):
795
+ for token in msg.split(' '):
728
796
  if '|' in token:
729
797
  self.begin.append(token.split('|')[0])
730
798
  if not sampling and not serially and not offloaded:
@@ -741,5 +809,114 @@ class OffloadHandler(ParallelMapHandler):
741
809
  else:
742
810
  log2(f'{" ".join(self.begin)} with {self.workers} workers...')
743
811
 
744
- def offload(max_workers: int = 3, msg: str = None):
745
- return OffloadHandler(max_workers, msg = msg)
812
+ def offload(max_workers: int = 3, msg: str = None, name: str = None):
813
+ return OffloadHandler(max_workers, msg = msg, name = name)
814
+
815
+ def kaqing_log_file_name(suffix = 'log'):
816
+ return f"{log_dir()}/{datetime.now().strftime('%d%H%M%S')}.{suffix}"
817
+
818
+ def log_dir():
819
+ return creating_dir(ConfigHolder().config.get('log-dir', '/tmp/qing-db/q/logs'))
820
+
821
+ def log_to_pods():
822
+ return ConfigHolder().config.get('job.log-to-pods', True)
823
+
824
+ def pod_log_dir():
825
+ return ConfigHolder().config.get('pod-log-dir', '/tmp/q/logs')
826
+
827
+ class LogFileHandler:
828
+ def __init__(self, suffix = 'log', condition=True):
829
+ self.suffix = suffix
830
+ self.condition = condition
831
+
832
+ def __enter__(self):
833
+ self.f = None
834
+ if self.condition:
835
+ self.f = open(kaqing_log_file_name(suffix=self.suffix), 'w')
836
+ self.f.__enter__()
837
+
838
+ return self.f
839
+
840
+ def __exit__(self, exc_type, exc_val, exc_tb):
841
+ if self.f:
842
+ self.f.__exit__(exc_type, exc_val, exc_tb)
843
+
844
+ if ConfigHolder().append_command_history:
845
+ ConfigHolder().append_command_history(f':cat {self.f.name}')
846
+
847
+ return False
848
+
849
+ def kaqing_log_file(suffix = 'log', condition=True):
850
+ return LogFileHandler(suffix = suffix, condition=condition)
851
+
852
+ class CommandLog:
853
+ log_file = None
854
+
855
+ def log(line: str, config: str = 'off'):
856
+ if config == 'file':
857
+ if not CommandLog.log_file:
858
+ try:
859
+ CommandLog.log_file = open(kaqing_log_file_name(suffix='cmd.log'), 'w')
860
+ except:
861
+ pass
862
+
863
+ try:
864
+ CommandLog.log_file.write(line + '\n')
865
+ except:
866
+ pass
867
+ elif config == 'on':
868
+ log2(line, text_color=Color.gray)
869
+
870
+ def close_log_file():
871
+ if CommandLog.log_file:
872
+ try:
873
+ CommandLog.log_file.close()
874
+ except:
875
+ pass
876
+
877
+ if ConfigHolder().append_command_history:
878
+ ConfigHolder().append_command_history(f':cat {CommandLog.log_file.name}')
879
+
880
+ CommandLog.log_file = None
881
+
882
+ class ExecResult(ABC):
883
+ def exit_code(self) -> int:
884
+ pass
885
+
886
+ def cat_log_file_cmd(self) -> str:
887
+ pass
888
+
889
+ _dirs_created = set()
890
+
891
+ def creating_dir(dir):
892
+ if dir not in _dirs_created:
893
+ _dirs_created.add(dir)
894
+ if not os.path.exists(dir):
895
+ os.makedirs(dir, exist_ok=True)
896
+
897
+ return dir
898
+
899
+ class LogFile(str):
900
+ def __init__(self, s: str):
901
+ super().__init__()
902
+
903
+ def __repr__(self):
904
+ return super().__repr__()
905
+
906
+ def to_command(self, cmd: str = ':tail'):
907
+ return f'{cmd} {self}'
908
+
909
+ class PodLogFile(LogFile):
910
+ def __new__(cls, value, pod: str, size: str = None):
911
+ return super().__new__(cls, value)
912
+
913
+ def __init__(self, value, pod: str, size: str = None):
914
+ super().__init__(value)
915
+ self.pod = pod
916
+ self.size = size
917
+
918
+ def __repr__(self):
919
+ return super().__repr__()
920
+
921
+ def to_command(self, cmd: str = 'tail'):
922
+ return f'@{self.pod} {cmd} {self}'