kaqing 2.0.188__py3-none-any.whl → 2.0.211__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaqing might be problematic. Click here for more details.
- adam/batch.py +7 -7
- adam/commands/app/utils_app.py +1 -1
- adam/commands/bash/bash.py +1 -1
- adam/commands/bash/utils_bash.py +1 -1
- adam/commands/cassandra/__init__.py +0 -0
- adam/commands/command.py +1 -1
- adam/commands/commands_utils.py +8 -13
- adam/commands/{alter_tables.py → cql/alter_tables.py} +1 -1
- adam/commands/cql/completions_c.py +1 -0
- adam/commands/cql/utils_cql.py +14 -13
- adam/commands/debug/__init__.py +0 -0
- adam/commands/debug/debug.py +22 -0
- adam/commands/debug/debug_completes.py +35 -0
- adam/commands/debug/debug_timings.py +35 -0
- adam/commands/devices/device.py +1 -1
- adam/commands/devices/devices.py +1 -1
- adam/commands/download_cassandra_log.py +45 -0
- adam/commands/export/export_databases.py +13 -8
- adam/commands/export/export_sessions.py +12 -11
- adam/commands/export/exporter.py +140 -53
- adam/commands/export/import_session.py +0 -4
- adam/commands/export/importer.py +11 -11
- adam/commands/export/importer_athena.py +15 -6
- adam/commands/export/importer_sqlite.py +19 -8
- adam/commands/export/utils_export.py +37 -15
- adam/commands/generate_report.py +52 -0
- adam/commands/medusa/medusa_restore.py +0 -16
- adam/commands/nodetool.py +1 -1
- adam/commands/os/__init__.py +0 -0
- adam/commands/postgres/postgres_databases.py +2 -3
- adam/commands/postgres/postgres_ls.py +1 -1
- adam/commands/postgres/utils_postgres.py +2 -1
- adam/commands/preview_table.py +1 -1
- adam/commands/restart_cluster.py +47 -0
- adam/commands/restart_node.py +51 -0
- adam/commands/restart_nodes.py +47 -0
- adam/commands/show/show_cassandra_status.py +3 -10
- adam/commands/show/show_cli_commands.py +1 -1
- adam/commands/show/show_processes.py +1 -1
- adam/commands/show/show_storage.py +2 -1
- adam/config.py +4 -6
- adam/embedded_params.py +1 -1
- adam/repl.py +5 -3
- adam/repl_commands.py +23 -17
- adam/repl_session.py +4 -3
- adam/repl_state.py +6 -0
- adam/sql/async_executor.py +44 -0
- adam/sql/lark_completer.py +6 -4
- adam/sql/qingl.lark +1076 -0
- adam/sso/cred_cache.py +2 -5
- adam/utils.py +206 -83
- adam/utils_k8s/app_clusters.py +11 -4
- adam/utils_k8s/app_pods.py +10 -5
- adam/utils_k8s/cassandra_clusters.py +8 -4
- adam/utils_k8s/cassandra_nodes.py +14 -5
- adam/utils_k8s/kube_context.py +1 -4
- adam/{pod_exec_result.py → utils_k8s/pod_exec_result.py} +8 -2
- adam/utils_k8s/pods.py +83 -24
- adam/utils_local.py +78 -2
- adam/utils_repl/repl_completer.py +10 -89
- adam/utils_sqlite.py +3 -8
- adam/version.py +1 -1
- {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/METADATA +1 -1
- {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/RECORD +67 -65
- adam/commands/cat.py +0 -36
- adam/commands/cd.py +0 -41
- adam/commands/download_file.py +0 -47
- adam/commands/find_files.py +0 -51
- adam/commands/find_processes.py +0 -76
- adam/commands/head.py +0 -36
- adam/commands/logs.py +0 -37
- adam/commands/ls.py +0 -41
- adam/commands/report.py +0 -61
- adam/commands/restart.py +0 -60
- adam/commands/shell.py +0 -41
- {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/WHEEL +0 -0
- {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/entry_points.txt +0 -0
- {kaqing-2.0.188.dist-info → kaqing-2.0.211.dist-info}/top_level.txt +0 -0
|
@@ -1,17 +1,26 @@
|
|
|
1
1
|
from adam.config import Config
|
|
2
2
|
from adam.utils_k8s.pods import Pods
|
|
3
3
|
from adam.utils_k8s.secrets import Secrets
|
|
4
|
-
from adam.pod_exec_result import PodExecResult
|
|
4
|
+
from adam.utils_k8s.pod_exec_result import PodExecResult
|
|
5
5
|
from adam.repl_session import ReplSession
|
|
6
6
|
|
|
7
7
|
# utility collection on cassandra nodes; methods are all static
|
|
8
8
|
class CassandraNodes:
|
|
9
|
-
def exec(pod_name: str,
|
|
9
|
+
def exec(pod_name: str,
|
|
10
|
+
namespace: str,
|
|
11
|
+
command: str,
|
|
12
|
+
show_out = True,
|
|
13
|
+
throw_err = False,
|
|
14
|
+
shell = '/bin/sh',
|
|
15
|
+
backgrounded = False,
|
|
16
|
+
log_file = None,
|
|
17
|
+
history = True) -> PodExecResult:
|
|
10
18
|
r = Pods.exec(pod_name, "cassandra", namespace, command, show_out = show_out, throw_err = throw_err, shell = shell, backgrounded = backgrounded, log_file=log_file)
|
|
11
19
|
|
|
12
|
-
if r and
|
|
13
|
-
|
|
14
|
-
|
|
20
|
+
if history and r and r.log_file:
|
|
21
|
+
entry = f':cat {r.log_file}'
|
|
22
|
+
|
|
23
|
+
ReplSession().append_history(entry)
|
|
15
24
|
|
|
16
25
|
return r
|
|
17
26
|
|
adam/utils_k8s/kube_context.py
CHANGED
|
@@ -102,7 +102,4 @@ class KubeContext:
|
|
|
102
102
|
return name if re.match(r"^(?!pg-).*-k8spg-.*$", name) else None
|
|
103
103
|
|
|
104
104
|
def show_out(s: bool):
|
|
105
|
-
return s or Config().is_debug()
|
|
106
|
-
|
|
107
|
-
def show_parallelism():
|
|
108
|
-
return Config().get('debugs.show-parallelism', False)
|
|
105
|
+
return s or Config().is_debug()
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import yaml
|
|
2
2
|
|
|
3
|
-
from adam.utils import log_exc
|
|
3
|
+
from adam.utils import ExecResult, log_exc
|
|
4
4
|
|
|
5
|
-
class PodExecResult:
|
|
5
|
+
class PodExecResult(ExecResult):
|
|
6
6
|
# {
|
|
7
7
|
# 'metadata': {},
|
|
8
8
|
# 'status': 'Failure',
|
|
@@ -34,6 +34,12 @@ class PodExecResult:
|
|
|
34
34
|
|
|
35
35
|
return code
|
|
36
36
|
|
|
37
|
+
def cat_log_file_cmd(self):
|
|
38
|
+
if self.pod and self.log_file:
|
|
39
|
+
return f'@{self.pod} cat {self.log_file}'
|
|
40
|
+
|
|
41
|
+
return None
|
|
42
|
+
|
|
37
43
|
def __str__(self):
|
|
38
44
|
return f'{"OK" if self.exit_code() == 0 else self.exit_code()} {self.command}'
|
|
39
45
|
|
adam/utils_k8s/pods.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
from collections.abc import Callable
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
import os
|
|
4
|
+
import re
|
|
5
|
+
import subprocess
|
|
4
6
|
import sys
|
|
5
7
|
import time
|
|
6
8
|
from typing import TypeVar
|
|
@@ -9,10 +11,11 @@ from kubernetes.stream import stream
|
|
|
9
11
|
from kubernetes.stream.ws_client import ERROR_CHANNEL, WSClient
|
|
10
12
|
|
|
11
13
|
from adam.config import Config
|
|
14
|
+
from adam.repl_session import ReplSession
|
|
12
15
|
from adam.utils_k8s.volumes import ConfigMapMount
|
|
13
|
-
from adam.pod_exec_result import PodExecResult
|
|
14
|
-
from adam.utils import GeneratorStream, ParallelMapHandler, log2, debug, log_exc
|
|
15
|
-
from adam.utils_local import
|
|
16
|
+
from adam.utils_k8s.pod_exec_result import PodExecResult
|
|
17
|
+
from adam.utils import GeneratorStream, ParallelMapHandler, log2, debug, log_dir, log_exc
|
|
18
|
+
from adam.utils_local import local_downloads_dir
|
|
16
19
|
from .kube_context import KubeContext
|
|
17
20
|
|
|
18
21
|
from websocket._core import WebSocket
|
|
@@ -66,6 +69,33 @@ class Pods:
|
|
|
66
69
|
|
|
67
70
|
show_out = KubeContext.show_out(show_out)
|
|
68
71
|
|
|
72
|
+
if backgrounded or command.endswith(' &'):
|
|
73
|
+
command = command.strip(' &')
|
|
74
|
+
|
|
75
|
+
log_all_file = None
|
|
76
|
+
log_pod_file = None
|
|
77
|
+
if log_file:
|
|
78
|
+
log_pod_file = Pods.log_file_from_template(log_file, pod_name=pod_name)
|
|
79
|
+
if (a := Pods.log_file_from_template(log_file, pod_name='all')) != log_file:
|
|
80
|
+
log_all_file = a
|
|
81
|
+
else:
|
|
82
|
+
log_pod_file = Pods.log_file(command, pod_name=pod_name)
|
|
83
|
+
|
|
84
|
+
if env_prefix:
|
|
85
|
+
command = f'{env_prefix} {command}'
|
|
86
|
+
|
|
87
|
+
command = command.replace('"', '\\"')
|
|
88
|
+
cmd = f'nohup kubectl exec {pod_name} -c {container} -- {shell} -c "{command} &" > {log_pod_file} 2>&1 &'
|
|
89
|
+
if log_all_file:
|
|
90
|
+
cmd = f'{cmd} >> {log_all_file}'
|
|
91
|
+
|
|
92
|
+
if show_out:
|
|
93
|
+
log2(cmd)
|
|
94
|
+
|
|
95
|
+
result = subprocess.run(cmd, capture_output=True, text=True, shell=True)
|
|
96
|
+
|
|
97
|
+
return PodExecResult(result.stdout, result.stderr, cmd, None, pod=pod_name, log_file=log_pod_file)
|
|
98
|
+
|
|
69
99
|
api = client.CoreV1Api()
|
|
70
100
|
|
|
71
101
|
tty = True
|
|
@@ -73,22 +103,22 @@ class Pods:
|
|
|
73
103
|
if env_prefix:
|
|
74
104
|
exec_command = [shell, '-c', f'{env_prefix} {command}']
|
|
75
105
|
|
|
76
|
-
if backgrounded or command.endswith(' &'):
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
106
|
+
# if backgrounded or command.endswith(' &'):
|
|
107
|
+
# # should be false for starting a background process
|
|
108
|
+
# tty = False
|
|
109
|
+
|
|
110
|
+
# if Config().get('repl.background-process.auto-nohup', True):
|
|
111
|
+
# command = command.strip(' &')
|
|
112
|
+
# cmd_name = ''
|
|
113
|
+
# if command.startswith('nodetool '):
|
|
114
|
+
# cmd_name = f".{'_'.join(command.split(' ')[5:])}"
|
|
115
|
+
|
|
116
|
+
# if not log_file:
|
|
117
|
+
# log_file = f'{log_prefix()}-{datetime.now().strftime("%d%H%M%S")}{cmd_name}.log'
|
|
118
|
+
# command = f"nohup {command} > {log_file} 2>&1 &"
|
|
119
|
+
# if env_prefix:
|
|
120
|
+
# command = f'{env_prefix} {command}'
|
|
121
|
+
# exec_command = [shell, '-c', command]
|
|
92
122
|
|
|
93
123
|
k_command = f'kubectl exec {pod_name} -c {container} -n {namespace} -- {shell} -c "{command}"'
|
|
94
124
|
debug(k_command)
|
|
@@ -141,6 +171,36 @@ class Pods:
|
|
|
141
171
|
|
|
142
172
|
return PodExecResult("".join(stdout), "".join(stderr), k_command, error_output, pod=pod_name, log_file=log_file)
|
|
143
173
|
|
|
174
|
+
def log_file(command: str, pod_name: str = None, dt: datetime = None):
|
|
175
|
+
cmd_name = ''
|
|
176
|
+
if command.startswith('nodetool '):
|
|
177
|
+
command = command.strip(' &')
|
|
178
|
+
cmd_name = f".{'_'.join(command.split(' ')[5:])}"
|
|
179
|
+
|
|
180
|
+
pod_suffix = '{pod}'
|
|
181
|
+
if pod_name:
|
|
182
|
+
pod_suffix = pod_name
|
|
183
|
+
if groups := re.match(r'.*-(.*)', pod_name):
|
|
184
|
+
pod_suffix = f'-{groups[1]}'
|
|
185
|
+
|
|
186
|
+
return f'{log_dir()}/{Pods.job_id()}{cmd_name}{pod_suffix}.log'
|
|
187
|
+
|
|
188
|
+
def job_id(dt: datetime = None):
|
|
189
|
+
if not dt:
|
|
190
|
+
dt = datetime.now()
|
|
191
|
+
|
|
192
|
+
return dt.strftime("%d%H%M%S")
|
|
193
|
+
|
|
194
|
+
def log_file_from_template(log_file: str, pod_name: str):
|
|
195
|
+
pod_suffix = pod_name
|
|
196
|
+
if pod_name and (groups := re.match(r'.*-(.*)', pod_name)):
|
|
197
|
+
pod_suffix = f'-{groups[1]}'
|
|
198
|
+
|
|
199
|
+
if not pod_suffix.startswith('-'):
|
|
200
|
+
pod_suffix = f'-{pod_suffix}'
|
|
201
|
+
|
|
202
|
+
return log_file.replace('{pod}', pod_suffix)
|
|
203
|
+
|
|
144
204
|
def read_file(pod_name: str, container: str, namespace: str, file_path: str):
|
|
145
205
|
v1 = client.CoreV1Api()
|
|
146
206
|
|
|
@@ -175,13 +235,15 @@ class Pods:
|
|
|
175
235
|
|
|
176
236
|
def download_file(pod_name: str, container: str, namespace: str, from_path: str, to_path: str = None):
|
|
177
237
|
if not to_path:
|
|
178
|
-
to_path = f'{
|
|
238
|
+
to_path = f'{local_downloads_dir()}/{os.path.basename(from_path)}'
|
|
179
239
|
|
|
180
240
|
bytes = Pods.read_file(pod_name, container, namespace, from_path)
|
|
181
241
|
with open(to_path, 'wb') as f:
|
|
182
242
|
for item in GeneratorStream(bytes):
|
|
183
243
|
f.write(item)
|
|
184
244
|
|
|
245
|
+
ReplSession().append_history(f':cat {to_path}')
|
|
246
|
+
|
|
185
247
|
return to_path
|
|
186
248
|
|
|
187
249
|
def get_container(namespace: str, pod_name: str, container_name: str):
|
|
@@ -292,7 +354,4 @@ class Pods:
|
|
|
292
354
|
log2(' Timed Out')
|
|
293
355
|
|
|
294
356
|
def completed(namespace: str, pod_name: str):
|
|
295
|
-
return Pods.get(namespace, pod_name).status.phase in ['Succeeded', 'Failed']
|
|
296
|
-
|
|
297
|
-
def log_prefix():
|
|
298
|
-
return Config().get('log-prefix', '/tmp/qing')
|
|
357
|
+
return Pods.get(namespace, pod_name).status.phase in ['Succeeded', 'Failed']
|
adam/utils_local.py
CHANGED
|
@@ -1,4 +1,80 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import subprocess
|
|
3
|
+
import traceback
|
|
4
|
+
|
|
1
5
|
from adam.config import Config
|
|
6
|
+
from adam.utils import ExecResult, creating_dir, debug
|
|
7
|
+
|
|
8
|
+
def local_qing_dir():
|
|
9
|
+
return creating_dir(Config().get('local-qing-dir', '/tmp/qing-db/q'))
|
|
10
|
+
|
|
11
|
+
def local_downloads_dir():
|
|
12
|
+
return creating_dir(Config().get('local-downloads-dir', '/tmp/qing-db/q/downloads'))
|
|
13
|
+
|
|
14
|
+
class LocalExecResult(ExecResult):
|
|
15
|
+
def __init__(self, stdout: str, stderr: str, command: str = None, code = 0, log_file: str = None):
|
|
16
|
+
self.stdout: str = stdout
|
|
17
|
+
self.stderr: str = stderr
|
|
18
|
+
self.command: str = command
|
|
19
|
+
self.code = code
|
|
20
|
+
self.pod = 'local'
|
|
21
|
+
self.log_file = log_file
|
|
22
|
+
|
|
23
|
+
def exit_code(self) -> int:
|
|
24
|
+
return self.code
|
|
25
|
+
|
|
26
|
+
def cat_log_file_cmd(self):
|
|
27
|
+
if self.log_file:
|
|
28
|
+
return f':cat {self.log_file}'
|
|
29
|
+
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
def __str__(self):
|
|
33
|
+
return f'{"OK" if self.exit_code() == 0 else self.exit_code()} {self.command}'
|
|
34
|
+
|
|
35
|
+
def __audit_extra__(self):
|
|
36
|
+
return self.log_file if self.log_file else None
|
|
37
|
+
|
|
38
|
+
def local_exec(cmd: list[str], shell=False, show_out=False):
|
|
39
|
+
stdout = ''
|
|
40
|
+
stderr = ''
|
|
41
|
+
returncode = 0
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
if show_out:
|
|
45
|
+
debug(' '.join(cmd))
|
|
46
|
+
|
|
47
|
+
r = subprocess.run(cmd, capture_output=True, text=True, shell=shell)
|
|
48
|
+
stdout = r.stdout
|
|
49
|
+
stderr = r.stderr
|
|
50
|
+
returncode = r.returncode
|
|
51
|
+
except FileNotFoundError as e:
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
return LocalExecResult(stdout, stderr, ' '.join(cmd), returncode)
|
|
55
|
+
|
|
56
|
+
def find_local_files(pattern: str = f'{local_qing_dir()}/*', file_type: str = None, max_depth = 0, mmin: int = 0):
|
|
57
|
+
# find . -maxdepth 1 -type f -name '*'
|
|
58
|
+
log_files = []
|
|
59
|
+
try:
|
|
60
|
+
dir = os.path.dirname(pattern)
|
|
61
|
+
base = os.path.basename(pattern)
|
|
62
|
+
cmd = ['find', dir]
|
|
63
|
+
if file_type:
|
|
64
|
+
cmd += ['-type', file_type]
|
|
65
|
+
if max_depth:
|
|
66
|
+
cmd += ['-maxdepth', str(max_depth)]
|
|
67
|
+
if mmin:
|
|
68
|
+
cmd += ['-mmin', f'-{mmin}']
|
|
69
|
+
cmd += ['-name', base]
|
|
70
|
+
|
|
71
|
+
stdout = local_exec(cmd, show_out=Config().is_debug()).stdout
|
|
72
|
+
|
|
73
|
+
for line in stdout.split('\n'):
|
|
74
|
+
line = line.strip(' \r')
|
|
75
|
+
if line:
|
|
76
|
+
log_files.append(line)
|
|
77
|
+
except:
|
|
78
|
+
traceback.print_exc()
|
|
2
79
|
|
|
3
|
-
|
|
4
|
-
return Config().get('local-tmp-dir', '/tmp/qing-db')
|
|
80
|
+
return log_files
|
|
@@ -1,14 +1,11 @@
|
|
|
1
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
2
1
|
import copy
|
|
3
|
-
import inspect
|
|
4
2
|
import re
|
|
5
|
-
import threading
|
|
6
3
|
import traceback
|
|
7
4
|
from typing import Iterable, TypeVar, cast
|
|
8
|
-
from prompt_toolkit.completion import CompleteEvent,
|
|
5
|
+
from prompt_toolkit.completion import CompleteEvent, Completion, NestedCompleter, WordCompleter
|
|
9
6
|
from prompt_toolkit.document import Document
|
|
10
7
|
|
|
11
|
-
from adam.utils import
|
|
8
|
+
from adam.utils import debug_complete, log2
|
|
12
9
|
from adam.utils_repl.appendable_completer import AppendableCompleter
|
|
13
10
|
|
|
14
11
|
import nest_asyncio
|
|
@@ -37,6 +34,8 @@ def merge_completions(dict1, dict2):
|
|
|
37
34
|
target[key] = merge_completions(target[key], value)
|
|
38
35
|
elif isinstance(target[key], AppendableCompleter):
|
|
39
36
|
cast(AppendableCompleter, target[key]).append_completions(key, value)
|
|
37
|
+
elif isinstance(target[key], NestedCompleter):
|
|
38
|
+
cast(NestedCompleter, target[key]).options = merge_completions(cast(NestedCompleter, target[key]).options, value)
|
|
40
39
|
elif isinstance(value, AppendableCompleter):
|
|
41
40
|
if isinstance(target[key], dict):
|
|
42
41
|
cast(AppendableCompleter, value).append_completions(key, target[key])
|
|
@@ -77,8 +76,12 @@ class ReplCompleter(NestedCompleter):
|
|
|
77
76
|
cursor_position=document.cursor_position - move_cursor,
|
|
78
77
|
)
|
|
79
78
|
|
|
80
|
-
|
|
81
|
-
|
|
79
|
+
try:
|
|
80
|
+
# potential thread racing
|
|
81
|
+
for c in completer.get_completions(new_document, complete_event):
|
|
82
|
+
yield c
|
|
83
|
+
except:
|
|
84
|
+
pass
|
|
82
85
|
|
|
83
86
|
# No space in the input: behave exactly like `WordCompleter`.
|
|
84
87
|
else:
|
|
@@ -88,85 +91,3 @@ class ReplCompleter(NestedCompleter):
|
|
|
88
91
|
)
|
|
89
92
|
for c in completer.get_completions(document, complete_event):
|
|
90
93
|
yield c
|
|
91
|
-
|
|
92
|
-
lock = threading.Lock()
|
|
93
|
-
in_queue = set()
|
|
94
|
-
|
|
95
|
-
def preload(action: callable, log_key: str = None):
|
|
96
|
-
with lock:
|
|
97
|
-
if not LazyNestedCompleter.loop:
|
|
98
|
-
LazyNestedCompleter.loop = asyncio.new_event_loop()
|
|
99
|
-
LazyNestedCompleter.async_exec = ThreadPoolExecutor(max_workers=6, thread_name_prefix='async')
|
|
100
|
-
LazyNestedCompleter.loop.set_default_executor(LazyNestedCompleter.async_exec)
|
|
101
|
-
|
|
102
|
-
# some lib does not handle asyncio loop properly, as sync exec submit does not work, use another async loop
|
|
103
|
-
async def a():
|
|
104
|
-
try:
|
|
105
|
-
arg_needed = len(action.__code__.co_varnames)
|
|
106
|
-
|
|
107
|
-
if log_key:
|
|
108
|
-
with log_timing(log_key):
|
|
109
|
-
r = action(None) if arg_needed else action()
|
|
110
|
-
else:
|
|
111
|
-
r = action(None) if arg_needed else action()
|
|
112
|
-
if inspect.isawaitable(r):
|
|
113
|
-
await r
|
|
114
|
-
|
|
115
|
-
in_queue.remove(log_key)
|
|
116
|
-
except Exception as e:
|
|
117
|
-
log2('preloading error', e, inspect.getsourcelines(action)[0][0])
|
|
118
|
-
traceback.print_exc()
|
|
119
|
-
|
|
120
|
-
if log_key not in in_queue:
|
|
121
|
-
in_queue.add(log_key)
|
|
122
|
-
LazyNestedCompleter.async_exec.submit(lambda: LazyNestedCompleter.loop.run_until_complete(a()))
|
|
123
|
-
|
|
124
|
-
class LazyNestedCompleter(NestedCompleter):
|
|
125
|
-
loop: asyncio.AbstractEventLoop = None
|
|
126
|
-
async_exec: ThreadPoolExecutor = None
|
|
127
|
-
|
|
128
|
-
def __init__(self, name: str, options_lambda: callable, ignore_case: bool = True, auto: str = 'lazy') -> None:
|
|
129
|
-
super().__init__(None, ignore_case)
|
|
130
|
-
self.options_lambda = options_lambda
|
|
131
|
-
if auto == 'lazy':
|
|
132
|
-
preload(options_lambda, log_key=name)
|
|
133
|
-
|
|
134
|
-
def __repr__(self) -> str:
|
|
135
|
-
return "LazyNestedCompleter(%r, ignore_case=%r)" % (self.options, self.ignore_case)
|
|
136
|
-
|
|
137
|
-
def get_completions(
|
|
138
|
-
self, document: Document, complete_event: CompleteEvent
|
|
139
|
-
) -> Iterable[Completion]:
|
|
140
|
-
if not self.options:
|
|
141
|
-
self.options = self.options_lambda()
|
|
142
|
-
|
|
143
|
-
# Split document.
|
|
144
|
-
text = document.text_before_cursor.lstrip()
|
|
145
|
-
stripped_len = len(document.text_before_cursor) - len(text)
|
|
146
|
-
|
|
147
|
-
# If there is a space, check for the first term, and use a
|
|
148
|
-
# subcompleter.
|
|
149
|
-
if " " in text:
|
|
150
|
-
first_term = text.split()[0]
|
|
151
|
-
completer = self.options.get(first_term)
|
|
152
|
-
|
|
153
|
-
# If we have a sub completer, use this for the completions.
|
|
154
|
-
if completer is not None:
|
|
155
|
-
remaining_text = text[len(first_term) :].lstrip()
|
|
156
|
-
move_cursor = len(text) - len(remaining_text) + stripped_len
|
|
157
|
-
|
|
158
|
-
new_document = Document(
|
|
159
|
-
remaining_text,
|
|
160
|
-
cursor_position=document.cursor_position - move_cursor,
|
|
161
|
-
)
|
|
162
|
-
|
|
163
|
-
for c in completer.get_completions(new_document, complete_event):
|
|
164
|
-
yield c
|
|
165
|
-
|
|
166
|
-
# No space in the input: behave exactly like `WordCompleter`.
|
|
167
|
-
else:
|
|
168
|
-
completer = WordCompleter(
|
|
169
|
-
list(self.options.keys()), ignore_case=self.ignore_case
|
|
170
|
-
)
|
|
171
|
-
for c in completer.get_completions(document, complete_event):
|
|
172
|
-
yield c
|
adam/utils_sqlite.py
CHANGED
|
@@ -7,7 +7,7 @@ import sqlite3
|
|
|
7
7
|
import pandas
|
|
8
8
|
|
|
9
9
|
from adam.config import Config
|
|
10
|
-
from adam.utils import tabulize, log, wait_log
|
|
10
|
+
from adam.utils import creating_dir, tabulize, log, wait_log
|
|
11
11
|
|
|
12
12
|
class CursorHandler:
|
|
13
13
|
def __init__(self, conn: sqlite3.Connection):
|
|
@@ -53,7 +53,7 @@ class SQLite:
|
|
|
53
53
|
return CursorHandler(conn)
|
|
54
54
|
|
|
55
55
|
def local_db_dir():
|
|
56
|
-
return Config().get('export.sqlite.local-db-dir', '/tmp/qing-db')
|
|
56
|
+
return creating_dir(Config().get('export.sqlite.local-db-dir', '/tmp/qing-db/q/export/db'))
|
|
57
57
|
|
|
58
58
|
def keyspace(database: str):
|
|
59
59
|
return '_'.join(database.replace(".db", "").split('_')[1:])
|
|
@@ -95,8 +95,6 @@ class SQLite:
|
|
|
95
95
|
conn.close()
|
|
96
96
|
|
|
97
97
|
def connect(database: str, keyspace: str = None):
|
|
98
|
-
os.makedirs(SQLite.local_db_dir(), exist_ok=True)
|
|
99
|
-
|
|
100
98
|
if keyspace:
|
|
101
99
|
return sqlite3.connect(f'{SQLite.local_db_dir()}/{database}_{keyspace}.db')
|
|
102
100
|
else:
|
|
@@ -131,7 +129,4 @@ class SQLite:
|
|
|
131
129
|
return len(lines), log_file
|
|
132
130
|
|
|
133
131
|
def query(conn, sql: str) -> tuple[str, str, list]:
|
|
134
|
-
return pandas.read_sql_query(sql, conn)
|
|
135
|
-
|
|
136
|
-
def log_prefix():
|
|
137
|
-
return Config().get('export.log-prefix', '/tmp/qing')
|
|
132
|
+
return pandas.read_sql_query(sql, conn)
|
adam/version.py
CHANGED