kaqing 2.0.188__py3-none-any.whl → 2.0.200__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kaqing might be problematic. Click here for more details.

Files changed (72) hide show
  1. adam/batch.py +6 -6
  2. adam/commands/bash/bash.py +1 -1
  3. adam/commands/bash/utils_bash.py +1 -1
  4. adam/commands/cassandra/__init__.py +0 -0
  5. adam/commands/cassandra/download_cassandra_log.py +45 -0
  6. adam/commands/cassandra/nodetool.py +64 -0
  7. adam/commands/cassandra/nodetool_commands.py +120 -0
  8. adam/commands/cassandra/restart_cluster.py +47 -0
  9. adam/commands/cassandra/restart_node.py +51 -0
  10. adam/commands/cassandra/restart_nodes.py +47 -0
  11. adam/commands/cassandra/rollout.py +88 -0
  12. adam/commands/cat.py +2 -2
  13. adam/commands/cd.py +2 -2
  14. adam/commands/command.py +1 -1
  15. adam/commands/commands_utils.py +8 -13
  16. adam/commands/cql/alter_tables.py +66 -0
  17. adam/commands/cql/completions_c.py +1 -0
  18. adam/commands/cql/utils_cql.py +5 -5
  19. adam/commands/debug/__init__.py +0 -0
  20. adam/commands/debug/debug.py +22 -0
  21. adam/commands/debug/debug_completes.py +35 -0
  22. adam/commands/debug/debug_timings.py +35 -0
  23. adam/commands/devices/devices.py +1 -1
  24. adam/commands/download_cassandra_log.py +45 -0
  25. adam/commands/download_file.py +3 -3
  26. adam/commands/export/export_sessions.py +1 -1
  27. adam/commands/export/exporter.py +1 -1
  28. adam/commands/find_processes.py +2 -2
  29. adam/commands/generate_report.py +52 -0
  30. adam/commands/head.py +2 -2
  31. adam/commands/ls.py +2 -2
  32. adam/commands/medusa/medusa_restore.py +0 -16
  33. adam/commands/nodetool.py +1 -1
  34. adam/commands/os/__init__.py +0 -0
  35. adam/commands/os/cat.py +36 -0
  36. adam/commands/os/download_file.py +47 -0
  37. adam/commands/os/find_files.py +51 -0
  38. adam/commands/os/find_processes.py +76 -0
  39. adam/commands/os/head.py +36 -0
  40. adam/commands/os/shell.py +41 -0
  41. adam/commands/postgres/postgres_databases.py +2 -3
  42. adam/commands/preview_table.py +1 -1
  43. adam/commands/restart_cluster.py +47 -0
  44. adam/commands/restart_node.py +51 -0
  45. adam/commands/restart_nodes.py +47 -0
  46. adam/commands/show/show_cli_commands.py +1 -1
  47. adam/config.py +4 -6
  48. adam/embedded_params.py +1 -1
  49. adam/repl.py +5 -3
  50. adam/repl_commands.py +11 -6
  51. adam/repl_session.py +4 -3
  52. adam/repl_state.py +6 -0
  53. adam/sql/async_executor.py +44 -0
  54. adam/sql/lark_completer.py +6 -4
  55. adam/sql/qingl.lark +1076 -0
  56. adam/utils.py +95 -23
  57. adam/utils_k8s/app_clusters.py +1 -1
  58. adam/utils_k8s/app_pods.py +2 -3
  59. adam/utils_k8s/cassandra_clusters.py +7 -3
  60. adam/utils_k8s/cassandra_nodes.py +8 -5
  61. adam/utils_k8s/kube_context.py +1 -4
  62. adam/utils_k8s/pods.py +55 -1
  63. adam/utils_repl/repl_completer.py +4 -87
  64. adam/version.py +1 -1
  65. {kaqing-2.0.188.dist-info → kaqing-2.0.200.dist-info}/METADATA +1 -1
  66. {kaqing-2.0.188.dist-info → kaqing-2.0.200.dist-info}/RECORD +69 -45
  67. adam/commands/logs.py +0 -37
  68. adam/commands/report.py +0 -61
  69. adam/commands/restart.py +0 -60
  70. {kaqing-2.0.188.dist-info → kaqing-2.0.200.dist-info}/WHEEL +0 -0
  71. {kaqing-2.0.188.dist-info → kaqing-2.0.200.dist-info}/entry_points.txt +0 -0
  72. {kaqing-2.0.188.dist-info → kaqing-2.0.200.dist-info}/top_level.txt +0 -0
adam/utils.py CHANGED
@@ -19,7 +19,6 @@ import sys
19
19
  import time
20
20
  import click
21
21
  import yaml
22
-
23
22
  from prompt_toolkit.completion import Completer
24
23
 
25
24
  from . import __version__
@@ -28,11 +27,28 @@ T = TypeVar('T')
28
27
 
29
28
  log_state = threading.local()
30
29
 
31
- class LogConfig:
32
- is_debug = lambda: False
33
- is_debug_timing = lambda: False
34
- is_debug_complete = lambda: False
35
- is_display_help = True
30
+ class ConfigReadable:
31
+ def is_debug() -> bool:
32
+ pass
33
+
34
+ def get(self, key: str, default: T) -> T:
35
+ pass
36
+
37
+ class ConfigHolder:
38
+ # the singleton pattern
39
+ def __new__(cls, *args, **kwargs):
40
+ if not hasattr(cls, 'instance'): cls.instance = super(ConfigHolder, cls).__new__(cls)
41
+
42
+ return cls.instance
43
+
44
+ def __init__(self):
45
+ if not hasattr(self, 'config'):
46
+ # set by Config
47
+ self.config: 'ConfigReadable' = None
48
+ # only for testing
49
+ self.is_display_help = True
50
+ # set by ReplSession
51
+ self.append_command_history = lambda entry: None
36
52
 
37
53
  NO_SORT = 0
38
54
  SORT = 1
@@ -164,7 +180,8 @@ def deep_merge_dicts(dict1, dict2):
164
180
  elif key not in merged_dict or value:
165
181
  # Otherwise, overwrite or add the value from dict2
166
182
  if key in merged_dict and isinstance(merged_dict[key], Completer):
167
- print('SEAN completer found, ignoring', key, value)
183
+ pass
184
+ # print('SEAN completer found, ignoring', key, value)
168
185
  else:
169
186
  merged_dict[key] = value
170
187
  return merged_dict
@@ -204,7 +221,7 @@ def get_deep_keys(d, current_path=""):
204
221
  return keys
205
222
 
206
223
  def display_help(replace_arg = False):
207
- if not LogConfig.is_display_help:
224
+ if not ConfigHolder().is_display_help:
208
225
  return
209
226
 
210
227
  args = copy.copy(sys.argv)
@@ -293,15 +310,15 @@ def is_lambda(func):
293
310
  return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
294
311
 
295
312
  def debug(s = None):
296
- if LogConfig.is_debug():
313
+ if ConfigHolder().config.is_debug():
297
314
  log2(f'DEBUG {s}')
298
315
 
299
316
  def debug_complete(s = None):
300
- if LogConfig.is_debug_complete():
301
- log2(f'DEBUG {s}')
317
+ CommandLog.log(f'DEBUG {s}', config=ConfigHolder().config.get('debugs.complete', 'off'))
302
318
 
303
319
  def debug_trace():
304
- if LogConfig.is_debug():
320
+ if ConfigHolder().config.is_debug():
321
+ # if LogConfig.is_debug():
305
322
  log2(traceback.format_exc())
306
323
 
307
324
  def in_docker() -> bool:
@@ -329,7 +346,7 @@ class Ing:
329
346
 
330
347
  try:
331
348
  if not log_state.ing_cnt:
332
- if not self.suppress_log and not LogConfig.is_debug():
349
+ if not self.suppress_log and not ConfigHolder().config.is_debug():
333
350
  log2(f'{self.msg}...', nl=False)
334
351
 
335
352
  return None
@@ -339,7 +356,7 @@ class Ing:
339
356
  def __exit__(self, exc_type, exc_val, exc_tb):
340
357
  log_state.ing_cnt -= 1
341
358
  if not log_state.ing_cnt:
342
- if not self.suppress_log and not LogConfig.is_debug():
359
+ if not self.suppress_log and not ConfigHolder().config.is_debug():
343
360
  log2(' OK')
344
361
 
345
362
  return False
@@ -360,7 +377,7 @@ def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
360
377
  return r
361
378
 
362
379
  def loggable():
363
- return LogConfig.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
380
+ return ConfigHolder().config and ConfigHolder().config.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
364
381
 
365
382
  class TimingNode:
366
383
  def __init__(self, depth: int, s0: time.time = time.time(), line: str = None):
@@ -388,7 +405,7 @@ class LogTiming:
388
405
  self.s0 = s0
389
406
 
390
407
  def __enter__(self):
391
- if not LogConfig.is_debug_timing():
408
+ if (config := ConfigHolder().config.get('debugs.timings', 'off')) not in ['on', 'file']:
392
409
  return
393
410
 
394
411
  if not hasattr(log_state, 'timings'):
@@ -400,7 +417,7 @@ class LogTiming:
400
417
  self.s0 = time.time()
401
418
 
402
419
  def __exit__(self, exc_type, exc_val, exc_tb):
403
- if not LogConfig.is_debug_timing():
420
+ if (config := ConfigHolder().config.get('debugs.timings', 'off')) not in ['on', 'file']:
404
421
  return False
405
422
 
406
423
  child = log_state.timings
@@ -411,7 +428,9 @@ class LogTiming:
411
428
  log_state.timings = self.me
412
429
 
413
430
  if not self.me.depth:
414
- log2(self.me.tree())
431
+ # log timings finally
432
+ CommandLog.log(self.me.tree(), config)
433
+
415
434
  log_state.timings = TimingNode(0)
416
435
 
417
436
  return False
@@ -420,7 +439,7 @@ def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
420
439
  if not s0 and not body:
421
440
  return LogTiming(msg, s0=s0)
422
441
 
423
- if not LogConfig.is_debug_timing():
442
+ if not ConfigHolder().config.get('debugs.timings', False):
424
443
  if body:
425
444
  return body()
426
445
 
@@ -439,7 +458,6 @@ def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
439
458
  return r
440
459
 
441
460
  def timing_log_line(depth: int, msg: str, s0: time.time):
442
- # print('SEAN log timing', msg, threading.current_thread().name)
443
461
  elapsed = time.time() - s0
444
462
  offloaded = '-' if threading.current_thread().name.startswith('offload') or threading.current_thread().name.startswith('async') else '+'
445
463
  prefix = f'[{offloaded} timings] '
@@ -539,7 +557,7 @@ class LogTrace:
539
557
  elif self.err_msg is not False and self.err_msg:
540
558
  log2(self.err_msg)
541
559
 
542
- if self.err_msg is not False and LogConfig.is_debug():
560
+ if self.err_msg is not False and ConfigHolder().config.is_debug():
543
561
  traceback.print_exception(exc_type, exc_val, exc_tb, file=sys.stderr)
544
562
 
545
563
  # swallow exception
@@ -588,7 +606,7 @@ class ParallelMapHandler:
588
606
  self.samples = samples
589
607
  self.msg = msg
590
608
  if msg and msg.startswith('d`'):
591
- if LogConfig.is_debug():
609
+ if ConfigHolder().config.is_debug():
592
610
  self.msg = msg.replace('d`', '', 1)
593
611
  else:
594
612
  self.msg = None
@@ -756,4 +774,58 @@ class OffloadHandler(ParallelMapHandler):
756
774
  log2(f'{" ".join(self.begin)} with {self.workers} workers...')
757
775
 
758
776
  def offload(max_workers: int = 3, msg: str = None):
759
- return OffloadHandler(max_workers, msg = msg)
777
+ return OffloadHandler(max_workers, msg = msg)
778
+
779
+ def kaqing_log_file_name(suffix = 'log'):
780
+ return f"{ConfigHolder().config.get('log-prefix', '/tmp/qing')}-{datetime.now().strftime('%d%H%M%S')}.{suffix}"
781
+
782
+ class LogFileHandler:
783
+ def __init__(self, suffix = 'log'):
784
+ self.suffix = suffix
785
+
786
+ def __enter__(self):
787
+ self.f = open(kaqing_log_file_name(), 'w')
788
+ self.f.__enter__()
789
+
790
+ return self.f
791
+
792
+ def __exit__(self, exc_type, exc_val, exc_tb):
793
+ self.f.__exit__(exc_type, exc_val, exc_tb)
794
+
795
+ if ConfigHolder().append_command_history:
796
+ ConfigHolder().append_command_history(f':sh cat {self.f.name}')
797
+
798
+ return False
799
+
800
+ def kaqing_log_file(suffix = 'log'):
801
+ return LogFileHandler(suffix = suffix)
802
+
803
+ class CommandLog:
804
+ log_file = None
805
+
806
+ def log(line: str, config: str = 'off'):
807
+ if config == 'file':
808
+ if not CommandLog.log_file:
809
+ try:
810
+ CommandLog.log_file = open(kaqing_log_file_name(suffix='cmd.log'), 'w')
811
+ except:
812
+ pass
813
+
814
+ try:
815
+ CommandLog.log_file.write(line + '\n')
816
+ except:
817
+ pass
818
+ elif config == 'on':
819
+ log2(line)
820
+
821
+ def close_log_file():
822
+ if CommandLog.log_file:
823
+ try:
824
+ CommandLog.log_file.close()
825
+ except:
826
+ pass
827
+
828
+ if ConfigHolder().append_command_history:
829
+ ConfigHolder().append_command_history(f':sh cat {CommandLog.log_file.name}')
830
+
831
+ CommandLog.log_file = None
@@ -23,6 +23,6 @@ class AppClusters:
23
23
  if result.stdout:
24
24
  log(result.stdout)
25
25
  if result.stderr:
26
- log2(result.stderr, file=sys.stderr)
26
+ log2(result.stderr)
27
27
 
28
28
  return results
@@ -29,8 +29,7 @@ class AppPods:
29
29
  container = Config().get('app.container-name', 'c3-server')
30
30
  r = Pods.exec(pod_name, container, namespace, command, show_out = show_out, throw_err = throw_err, shell = shell, backgrounded = backgrounded)
31
31
 
32
- if r and Config().get('repl.history.push-cat-remote-log-file', True):
33
- if r.log_file and ReplSession().prompt_session:
34
- ReplSession().prompt_session.history.append_string(f'@{r.pod} cat {r.log_file}')
32
+ if r and r.log_file:
33
+ ReplSession().append_history(f'@{r.pod} cat {r.log_file}')
35
34
 
36
35
  return r
@@ -21,20 +21,24 @@ class CassandraClusters:
21
21
  on_any = False,
22
22
  shell = '/bin/sh',
23
23
  backgrounded = False,
24
- log_file = None) -> list[PodExecResult]:
24
+ log_file = None,
25
+ via_sh = True) -> list[PodExecResult]:
25
26
 
26
27
  pods = StatefulSets.pod_names(sts, namespace)
27
28
  samples = 1 if on_any else sys.maxsize
29
+ if via_sh and (backgrounded or command.endswith(' &')) and Config().get('repl.background-process.via-sh', True) and not log_file:
30
+ log_file = Pods.log_file(command)
31
+
28
32
  msg = 'd`Running|Ran ' + action + ' command onto {size} pods'
29
33
  with Pods.parallelize(pods, max_workers, samples, msg, action=action) as exec:
30
- results: list[PodExecResult] = exec.map(lambda pod: CassandraNodes.exec(pod, namespace, command, False, False, shell, backgrounded, log_file))
34
+ results: list[PodExecResult] = exec.map(lambda pod: CassandraNodes.exec(pod, namespace, command, False, False, shell, backgrounded, log_file, via_sh=via_sh))
31
35
  for result in results:
32
36
  if show_out and not Config().is_debug():
33
37
  log(result.command)
34
38
  if result.stdout:
35
39
  log(result.stdout)
36
40
  if result.stderr:
37
- log2(result.stderr, file=sys.stderr)
41
+ log2(result.stderr)
38
42
 
39
43
  return results
40
44
 
@@ -6,12 +6,15 @@ from adam.repl_session import ReplSession
6
6
 
7
7
  # utility collection on cassandra nodes; methods are all static
8
8
  class CassandraNodes:
9
- def exec(pod_name: str, namespace: str, command: str, show_out = True, throw_err = False, shell = '/bin/sh', backgrounded = False, log_file = None) -> PodExecResult:
10
- r = Pods.exec(pod_name, "cassandra", namespace, command, show_out = show_out, throw_err = throw_err, shell = shell, backgrounded = backgrounded, log_file=log_file)
9
+ def exec(pod_name: str, namespace: str, command: str, show_out = True, throw_err = False, shell = '/bin/sh', backgrounded = False, log_file = None, no_history = False, via_sh = True) -> PodExecResult:
10
+ r = Pods.exec(pod_name, "cassandra", namespace, command, show_out = show_out, throw_err = throw_err, shell = shell, backgrounded = backgrounded, log_file=log_file, via_sh=via_sh)
11
11
 
12
- if r and Config().get('repl.history.push-cat-remote-log-file', True):
13
- if r.log_file and ReplSession().prompt_session:
14
- ReplSession().prompt_session.history.append_string(f'@{r.pod} cat {r.log_file}')
12
+ if not no_history and r and r.log_file:
13
+ entry = f'@{r.pod} cat {r.log_file}'
14
+ if Config().get('repl.background-process.via-sh', True):
15
+ entry = f':sh cat {r.log_file}'
16
+
17
+ ReplSession().append_history(entry)
15
18
 
16
19
  return r
17
20
 
@@ -102,7 +102,4 @@ class KubeContext:
102
102
  return name if re.match(r"^(?!pg-).*-k8spg-.*$", name) else None
103
103
 
104
104
  def show_out(s: bool):
105
- return s or Config().is_debug()
106
-
107
- def show_parallelism():
108
- return Config().get('debugs.show-parallelism', False)
105
+ return s or Config().is_debug()
adam/utils_k8s/pods.py CHANGED
@@ -1,6 +1,8 @@
1
1
  from collections.abc import Callable
2
2
  from datetime import datetime
3
3
  import os
4
+ import re
5
+ import subprocess
4
6
  import sys
5
7
  import time
6
8
  from typing import TypeVar
@@ -9,6 +11,7 @@ from kubernetes.stream import stream
9
11
  from kubernetes.stream.ws_client import ERROR_CHANNEL, WSClient
10
12
 
11
13
  from adam.config import Config
14
+ from adam.repl_session import ReplSession
12
15
  from adam.utils_k8s.volumes import ConfigMapMount
13
16
  from adam.pod_exec_result import PodExecResult
14
17
  from adam.utils import GeneratorStream, ParallelMapHandler, log2, debug, log_exc
@@ -60,12 +63,34 @@ class Pods:
60
63
  backgrounded = False,
61
64
  log_file = None,
62
65
  interaction: Callable[[any, list[str]], any] = None,
63
- env_prefix: str = None):
66
+ env_prefix: str = None,
67
+ via_sh = True):
64
68
  if _TEST_POD_EXEC_OUTS:
65
69
  return _TEST_POD_EXEC_OUTS
66
70
 
67
71
  show_out = KubeContext.show_out(show_out)
68
72
 
73
+ if via_sh and (backgrounded or command.endswith(' &')) and Config().get('repl.background-process.via-sh', True):
74
+ command = command.strip(' &')
75
+
76
+ log_all_file = None
77
+ log_pod_file = None
78
+ if log_file:
79
+ log_pod_file = Pods.log_file_from_template(log_file, pod_name=pod_name)
80
+ if (a := Pods.log_file_from_template(log_file, pod_name='all')) != log_file:
81
+ log_all_file = a
82
+ else:
83
+ log_pod_file = Pods.log_file(command, pod_name=pod_name)
84
+
85
+ command = command.replace('"', '\\"')
86
+ cmd = f'nohup kubectl exec {pod_name} -c {container} -- {shell} -c "{command} &" > {log_pod_file} 2>&1 &'
87
+ if log_all_file:
88
+ cmd = f'{cmd} >> {log_all_file}'
89
+
90
+ result = subprocess.run(cmd, capture_output=True, text=True, shell=True)
91
+
92
+ return PodExecResult(result.stdout, result.stderr, cmd, None, pod=pod_name, log_file=log_pod_file)
93
+
69
94
  api = client.CoreV1Api()
70
95
 
71
96
  tty = True
@@ -141,6 +166,33 @@ class Pods:
141
166
 
142
167
  return PodExecResult("".join(stdout), "".join(stderr), k_command, error_output, pod=pod_name, log_file=log_file)
143
168
 
169
+ def log_file(command: str, pod_name: str = None, dt: datetime = None):
170
+ cmd_name = ''
171
+ if command.startswith('nodetool '):
172
+ command = command.strip(' &')
173
+ cmd_name = f".{'_'.join(command.split(' ')[5:])}"
174
+
175
+ pod_suffix = '{pod}'
176
+ if pod_name:
177
+ pod_suffix = pod_name
178
+ if groups := re.match(r'.*-(.*)', pod_name):
179
+ pod_suffix = f'-{groups[1]}'
180
+
181
+ if not dt:
182
+ dt = datetime.now()
183
+
184
+ return f'{log_prefix()}-{dt.strftime("%d%H%M%S")}{cmd_name}{pod_suffix}.log'
185
+
186
+ def log_file_from_template(log_file: str, pod_name: str):
187
+ pod_suffix = pod_name
188
+ if pod_name and (groups := re.match(r'.*-(.*)', pod_name)):
189
+ pod_suffix = f'-{groups[1]}'
190
+
191
+ if not pod_suffix.startswith('-'):
192
+ pod_suffix = f'-{pod_suffix}'
193
+
194
+ return log_file.replace('{pod}', pod_suffix)
195
+
144
196
  def read_file(pod_name: str, container: str, namespace: str, file_path: str):
145
197
  v1 = client.CoreV1Api()
146
198
 
@@ -182,6 +234,8 @@ class Pods:
182
234
  for item in GeneratorStream(bytes):
183
235
  f.write(item)
184
236
 
237
+ ReplSession().append_history(f':sh cat {to_path}')
238
+
185
239
  return to_path
186
240
 
187
241
  def get_container(namespace: str, pod_name: str, container_name: str):
@@ -1,14 +1,11 @@
1
- from concurrent.futures import ThreadPoolExecutor
2
1
  import copy
3
- import inspect
4
2
  import re
5
- import threading
6
3
  import traceback
7
4
  from typing import Iterable, TypeVar, cast
8
- from prompt_toolkit.completion import CompleteEvent, Completer, Completion, NestedCompleter, WordCompleter
5
+ from prompt_toolkit.completion import CompleteEvent, Completion, NestedCompleter, WordCompleter
9
6
  from prompt_toolkit.document import Document
10
7
 
11
- from adam.utils import debug, debug_complete, log2, log_timing
8
+ from adam.utils import debug_complete, log2
12
9
  from adam.utils_repl.appendable_completer import AppendableCompleter
13
10
 
14
11
  import nest_asyncio
@@ -37,6 +34,8 @@ def merge_completions(dict1, dict2):
37
34
  target[key] = merge_completions(target[key], value)
38
35
  elif isinstance(target[key], AppendableCompleter):
39
36
  cast(AppendableCompleter, target[key]).append_completions(key, value)
37
+ elif isinstance(target[key], NestedCompleter):
38
+ cast(NestedCompleter, target[key]).options = merge_completions(cast(NestedCompleter, target[key]).options, value)
40
39
  elif isinstance(value, AppendableCompleter):
41
40
  if isinstance(target[key], dict):
42
41
  cast(AppendableCompleter, value).append_completions(key, target[key])
@@ -88,85 +87,3 @@ class ReplCompleter(NestedCompleter):
88
87
  )
89
88
  for c in completer.get_completions(document, complete_event):
90
89
  yield c
91
-
92
- lock = threading.Lock()
93
- in_queue = set()
94
-
95
- def preload(action: callable, log_key: str = None):
96
- with lock:
97
- if not LazyNestedCompleter.loop:
98
- LazyNestedCompleter.loop = asyncio.new_event_loop()
99
- LazyNestedCompleter.async_exec = ThreadPoolExecutor(max_workers=6, thread_name_prefix='async')
100
- LazyNestedCompleter.loop.set_default_executor(LazyNestedCompleter.async_exec)
101
-
102
- # some lib does not handle asyncio loop properly, as sync exec submit does not work, use another async loop
103
- async def a():
104
- try:
105
- arg_needed = len(action.__code__.co_varnames)
106
-
107
- if log_key:
108
- with log_timing(log_key):
109
- r = action(None) if arg_needed else action()
110
- else:
111
- r = action(None) if arg_needed else action()
112
- if inspect.isawaitable(r):
113
- await r
114
-
115
- in_queue.remove(log_key)
116
- except Exception as e:
117
- log2('preloading error', e, inspect.getsourcelines(action)[0][0])
118
- traceback.print_exc()
119
-
120
- if log_key not in in_queue:
121
- in_queue.add(log_key)
122
- LazyNestedCompleter.async_exec.submit(lambda: LazyNestedCompleter.loop.run_until_complete(a()))
123
-
124
- class LazyNestedCompleter(NestedCompleter):
125
- loop: asyncio.AbstractEventLoop = None
126
- async_exec: ThreadPoolExecutor = None
127
-
128
- def __init__(self, name: str, options_lambda: callable, ignore_case: bool = True, auto: str = 'lazy') -> None:
129
- super().__init__(None, ignore_case)
130
- self.options_lambda = options_lambda
131
- if auto == 'lazy':
132
- preload(options_lambda, log_key=name)
133
-
134
- def __repr__(self) -> str:
135
- return "LazyNestedCompleter(%r, ignore_case=%r)" % (self.options, self.ignore_case)
136
-
137
- def get_completions(
138
- self, document: Document, complete_event: CompleteEvent
139
- ) -> Iterable[Completion]:
140
- if not self.options:
141
- self.options = self.options_lambda()
142
-
143
- # Split document.
144
- text = document.text_before_cursor.lstrip()
145
- stripped_len = len(document.text_before_cursor) - len(text)
146
-
147
- # If there is a space, check for the first term, and use a
148
- # subcompleter.
149
- if " " in text:
150
- first_term = text.split()[0]
151
- completer = self.options.get(first_term)
152
-
153
- # If we have a sub completer, use this for the completions.
154
- if completer is not None:
155
- remaining_text = text[len(first_term) :].lstrip()
156
- move_cursor = len(text) - len(remaining_text) + stripped_len
157
-
158
- new_document = Document(
159
- remaining_text,
160
- cursor_position=document.cursor_position - move_cursor,
161
- )
162
-
163
- for c in completer.get_completions(new_document, complete_event):
164
- yield c
165
-
166
- # No space in the input: behave exactly like `WordCompleter`.
167
- else:
168
- completer = WordCompleter(
169
- list(self.options.keys()), ignore_case=self.ignore_case
170
- )
171
- for c in completer.get_completions(document, complete_event):
172
- yield c
adam/version.py CHANGED
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env python
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
- __version__ = "2.0.188" #: the working version
4
+ __version__ = "2.0.200" #: the working version
5
5
  __release__ = "1.0.0" #: the release version
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kaqing
3
- Version: 2.0.188
3
+ Version: 2.0.200
4
4
  Summary: UNKNOWN
5
5
  Home-page: UNKNOWN
6
6
  License: UNKNOWN