kaqing 2.0.115__py3-none-any.whl → 2.0.172__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaqing might be problematic. Click here for more details.
- adam/__init__.py +0 -2
- adam/app_session.py +8 -11
- adam/batch.py +3 -3
- adam/checks/check_utils.py +14 -46
- adam/checks/cpu.py +7 -1
- adam/checks/cpu_metrics.py +52 -0
- adam/checks/disk.py +2 -3
- adam/columns/columns.py +3 -1
- adam/columns/cpu.py +3 -1
- adam/columns/cpu_metrics.py +22 -0
- adam/columns/memory.py +3 -4
- adam/commands/__init__.py +18 -0
- adam/commands/alter_tables.py +43 -47
- adam/commands/audit/audit.py +24 -25
- adam/commands/audit/audit_repair_tables.py +14 -17
- adam/commands/audit/audit_run.py +15 -23
- adam/commands/audit/show_last10.py +10 -13
- adam/commands/audit/show_slow10.py +10 -13
- adam/commands/audit/show_top10.py +10 -14
- adam/commands/audit/utils_show_top10.py +2 -3
- adam/commands/bash/__init__.py +5 -0
- adam/commands/bash/bash.py +8 -96
- adam/commands/bash/utils_bash.py +16 -0
- adam/commands/cat.py +14 -19
- adam/commands/cd.py +12 -100
- adam/commands/check.py +20 -21
- adam/commands/cli_commands.py +2 -3
- adam/commands/code.py +20 -23
- adam/commands/command.py +123 -39
- adam/commands/commands_utils.py +8 -17
- adam/commands/cp.py +33 -39
- adam/commands/cql/cql_completions.py +28 -10
- adam/commands/cql/cqlsh.py +10 -30
- adam/commands/cql/utils_cql.py +343 -0
- adam/commands/deploy/code_start.py +7 -10
- adam/commands/deploy/code_stop.py +4 -21
- adam/commands/deploy/code_utils.py +3 -3
- adam/commands/deploy/deploy.py +4 -27
- adam/commands/deploy/deploy_frontend.py +14 -17
- adam/commands/deploy/deploy_pg_agent.py +2 -5
- adam/commands/deploy/deploy_pod.py +65 -73
- adam/commands/deploy/deploy_utils.py +14 -24
- adam/commands/deploy/undeploy.py +4 -27
- adam/commands/deploy/undeploy_frontend.py +4 -7
- adam/commands/deploy/undeploy_pg_agent.py +5 -7
- adam/commands/deploy/undeploy_pod.py +11 -12
- adam/commands/devices/__init__.py +0 -0
- adam/commands/devices/device.py +118 -0
- adam/commands/devices/device_app.py +173 -0
- adam/commands/devices/device_auit_log.py +49 -0
- adam/commands/devices/device_cass.py +185 -0
- adam/commands/devices/device_export.py +86 -0
- adam/commands/devices/device_postgres.py +144 -0
- adam/commands/devices/devices.py +25 -0
- adam/commands/exit.py +1 -4
- adam/commands/export/clean_up_all_export_sessions.py +37 -0
- adam/commands/export/clean_up_export_sessions.py +51 -0
- adam/commands/export/drop_export_database.py +55 -0
- adam/commands/export/drop_export_databases.py +43 -0
- adam/commands/export/export.py +19 -26
- adam/commands/export/export_databases.py +174 -0
- adam/commands/export/export_handlers.py +71 -0
- adam/commands/export/export_select.py +48 -22
- adam/commands/export/export_select_x.py +54 -0
- adam/commands/export/export_use.py +19 -23
- adam/commands/export/exporter.py +353 -0
- adam/commands/export/import_session.py +40 -0
- adam/commands/export/importer.py +67 -0
- adam/commands/export/importer_athena.py +77 -0
- adam/commands/export/importer_sqlite.py +39 -0
- adam/commands/export/show_column_counts.py +54 -0
- adam/commands/export/show_export_databases.py +36 -0
- adam/commands/export/show_export_session.py +48 -0
- adam/commands/export/show_export_sessions.py +44 -0
- adam/commands/export/utils_export.py +223 -162
- adam/commands/help.py +1 -1
- adam/commands/intermediate_command.py +49 -0
- adam/commands/issues.py +11 -43
- adam/commands/kubectl.py +3 -6
- adam/commands/login.py +22 -24
- adam/commands/logs.py +3 -6
- adam/commands/ls.py +11 -128
- adam/commands/medusa/medusa.py +4 -22
- adam/commands/medusa/medusa_backup.py +20 -24
- adam/commands/medusa/medusa_restore.py +29 -33
- adam/commands/medusa/medusa_show_backupjobs.py +14 -18
- adam/commands/medusa/medusa_show_restorejobs.py +11 -18
- adam/commands/nodetool.py +6 -15
- adam/commands/param_get.py +11 -12
- adam/commands/param_set.py +9 -10
- adam/commands/postgres/postgres.py +41 -34
- adam/commands/postgres/postgres_context.py +57 -24
- adam/commands/postgres/postgres_ls.py +4 -8
- adam/commands/postgres/postgres_preview.py +5 -9
- adam/commands/postgres/psql_completions.py +1 -1
- adam/commands/postgres/utils_postgres.py +66 -0
- adam/commands/preview_table.py +5 -44
- adam/commands/pwd.py +14 -47
- adam/commands/reaper/reaper.py +4 -27
- adam/commands/reaper/reaper_forward.py +48 -55
- adam/commands/reaper/reaper_forward_session.py +6 -0
- adam/commands/reaper/reaper_forward_stop.py +10 -16
- adam/commands/reaper/reaper_restart.py +7 -14
- adam/commands/reaper/reaper_run_abort.py +11 -30
- adam/commands/reaper/reaper_runs.py +42 -57
- adam/commands/reaper/reaper_runs_abort.py +29 -49
- adam/commands/reaper/reaper_schedule_activate.py +11 -30
- adam/commands/reaper/reaper_schedule_start.py +10 -29
- adam/commands/reaper/reaper_schedule_stop.py +10 -29
- adam/commands/reaper/reaper_schedules.py +4 -14
- adam/commands/reaper/reaper_status.py +8 -16
- adam/commands/reaper/utils_reaper.py +196 -0
- adam/commands/repair/repair.py +4 -22
- adam/commands/repair/repair_log.py +5 -11
- adam/commands/repair/repair_run.py +27 -34
- adam/commands/repair/repair_scan.py +32 -38
- adam/commands/repair/repair_stop.py +5 -11
- adam/commands/report.py +27 -29
- adam/commands/restart.py +25 -26
- adam/commands/rollout.py +19 -24
- adam/commands/shell.py +10 -4
- adam/commands/show/show.py +10 -25
- adam/commands/show/show_cassandra_repairs.py +35 -0
- adam/commands/show/show_cassandra_status.py +32 -43
- adam/commands/show/show_cassandra_version.py +5 -18
- adam/commands/show/show_commands.py +19 -24
- adam/commands/show/show_host.py +1 -1
- adam/commands/show/show_login.py +20 -27
- adam/commands/show/show_processes.py +15 -19
- adam/commands/show/show_storage.py +10 -20
- adam/commands/watch.py +26 -29
- adam/config.py +5 -14
- adam/embedded_params.py +1 -1
- adam/log.py +4 -4
- adam/pod_exec_result.py +3 -3
- adam/repl.py +40 -103
- adam/repl_commands.py +32 -16
- adam/repl_state.py +57 -28
- adam/sql/sql_completer.py +44 -28
- adam/sql/sql_state_machine.py +89 -28
- adam/sso/authn_ad.py +6 -8
- adam/sso/authn_okta.py +4 -6
- adam/sso/cred_cache.py +3 -5
- adam/sso/idp.py +9 -12
- adam/utils.py +435 -6
- adam/utils_athena.py +57 -37
- adam/utils_audits.py +12 -14
- adam/utils_issues.py +32 -0
- adam/utils_k8s/app_clusters.py +13 -18
- adam/utils_k8s/app_pods.py +2 -0
- adam/utils_k8s/cassandra_clusters.py +22 -19
- adam/utils_k8s/cassandra_nodes.py +2 -2
- adam/utils_k8s/custom_resources.py +16 -17
- adam/utils_k8s/ingresses.py +2 -2
- adam/utils_k8s/jobs.py +7 -11
- adam/utils_k8s/k8s.py +87 -0
- adam/utils_k8s/pods.py +40 -77
- adam/utils_k8s/secrets.py +4 -4
- adam/utils_k8s/service_accounts.py +5 -4
- adam/utils_k8s/services.py +2 -2
- adam/utils_k8s/statefulsets.py +1 -12
- adam/utils_net.py +4 -4
- adam/utils_repl/__init__.py +0 -0
- adam/utils_repl/automata_completer.py +48 -0
- adam/utils_repl/repl_completer.py +46 -0
- adam/utils_repl/state_machine.py +173 -0
- adam/utils_sqlite.py +137 -0
- adam/version.py +1 -1
- {kaqing-2.0.115.dist-info → kaqing-2.0.172.dist-info}/METADATA +1 -1
- kaqing-2.0.172.dist-info/RECORD +230 -0
- adam/commands/app.py +0 -67
- adam/commands/app_ping.py +0 -44
- adam/commands/cql/cql_utils.py +0 -204
- adam/commands/devices.py +0 -147
- adam/commands/export/export_on_x.py +0 -76
- adam/commands/export/export_rmdbs.py +0 -65
- adam/commands/postgres/postgres_utils.py +0 -31
- adam/commands/reaper/reaper_session.py +0 -159
- adam/commands/show/show_app_actions.py +0 -56
- adam/commands/show/show_app_id.py +0 -47
- adam/commands/show/show_app_queues.py +0 -45
- adam/commands/show/show_repairs.py +0 -47
- adam/utils_export.py +0 -42
- kaqing-2.0.115.dist-info/RECORD +0 -203
- {kaqing-2.0.115.dist-info → kaqing-2.0.172.dist-info}/WHEEL +0 -0
- {kaqing-2.0.115.dist-info → kaqing-2.0.172.dist-info}/entry_points.txt +0 -0
- {kaqing-2.0.115.dist-info → kaqing-2.0.172.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,353 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
import functools
|
|
3
|
+
import re
|
|
4
|
+
import time
|
|
5
|
+
|
|
6
|
+
from adam.commands.cql.utils_cql import cassandra_table_names, run_cql, table_spec
|
|
7
|
+
from adam.commands.export.export_databases import ExportDatabases
|
|
8
|
+
from adam.commands.export.importer import Importer
|
|
9
|
+
from adam.commands.export.importer_athena import AthenaImporter
|
|
10
|
+
from adam.commands.export.importer_sqlite import SqliteImporter
|
|
11
|
+
from adam.commands.export.utils_export import ExportSpec, ExportTableStatus, ExportTableSpec, ImportSpec, csv_dir, find_files
|
|
12
|
+
from adam.config import Config
|
|
13
|
+
from adam.pod_exec_result import PodExecResult
|
|
14
|
+
from adam.repl_state import ReplState
|
|
15
|
+
from adam.utils import debug, parallelize, log2, ing, log_exc
|
|
16
|
+
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
17
|
+
from adam.utils_k8s.pods import log_prefix
|
|
18
|
+
from adam.utils_k8s.statefulsets import StatefulSets
|
|
19
|
+
|
|
20
|
+
class Exporter:
|
|
21
|
+
def export_tables(args: list[str], state: ReplState, export_only: bool = False, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
22
|
+
if export_only:
|
|
23
|
+
log2('export-only for testing')
|
|
24
|
+
|
|
25
|
+
spec: ExportSpec = None
|
|
26
|
+
with log_exc(True):
|
|
27
|
+
spec = Exporter.export_spec(' '.join(args), state)
|
|
28
|
+
|
|
29
|
+
statuses, spec = Exporter._export_tables(spec, state, max_workers=max_workers, export_state='init')
|
|
30
|
+
if not statuses:
|
|
31
|
+
return statuses, spec
|
|
32
|
+
|
|
33
|
+
return Exporter._export_tables(spec, state, export_only, max_workers, 'pending_export')
|
|
34
|
+
|
|
35
|
+
return [], None
|
|
36
|
+
|
|
37
|
+
def export_spec(spec_str: str, state: ReplState):
|
|
38
|
+
spec: ExportSpec = ExportSpec.parse_specs(spec_str)
|
|
39
|
+
|
|
40
|
+
session = state.export_session
|
|
41
|
+
if session:
|
|
42
|
+
if spec.importer:
|
|
43
|
+
importer_from_session = Importer.importer_from_session(session)
|
|
44
|
+
if spec.importer != importer_from_session:
|
|
45
|
+
if spec.importer == 'csv':
|
|
46
|
+
prefix = Importer.prefix_from_importer(spec.importer)
|
|
47
|
+
session = f'{prefix}{session[1:]}'
|
|
48
|
+
else:
|
|
49
|
+
raise Exception(f"You're currently using {importer_from_session} export database. You cannot export tables with {spec.importer} type database.")
|
|
50
|
+
else:
|
|
51
|
+
spec.importer = Importer.importer_from_session(session)
|
|
52
|
+
|
|
53
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
54
|
+
raise Exception('Credentials for Athena is not present.')
|
|
55
|
+
else:
|
|
56
|
+
if not spec.importer:
|
|
57
|
+
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
58
|
+
|
|
59
|
+
prefix = Importer.prefix_from_importer(spec.importer)
|
|
60
|
+
session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
|
|
61
|
+
|
|
62
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
63
|
+
raise Exception('Credentials for Athena is not present.')
|
|
64
|
+
|
|
65
|
+
if spec.importer != 'csv':
|
|
66
|
+
state.export_session = session
|
|
67
|
+
|
|
68
|
+
spec.session = session
|
|
69
|
+
|
|
70
|
+
return spec
|
|
71
|
+
|
|
72
|
+
def import_session(args: list[str], state: ReplState, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
73
|
+
import_spec: ImportSpec = None
|
|
74
|
+
with log_exc(True):
|
|
75
|
+
import_spec = Exporter.import_spec(' '.join(args), state)
|
|
76
|
+
tables, status_in_whole = ExportTableStatus.from_session(state.sts, state.pod, state.namespace, import_spec.session)
|
|
77
|
+
if status_in_whole == 'done':
|
|
78
|
+
log2(f'The session has been completely done - no more csv files are found.')
|
|
79
|
+
return [], ExportSpec(None, None, importer=import_spec.importer, tables=[])
|
|
80
|
+
|
|
81
|
+
spec = ExportSpec(None, None, importer=import_spec.importer, tables=[ExportTableSpec.from_status(table) for table in tables], session=import_spec.session)
|
|
82
|
+
|
|
83
|
+
return Exporter._export_tables(spec, state, max_workers=max_workers, export_state = 'import')
|
|
84
|
+
|
|
85
|
+
return [], None
|
|
86
|
+
|
|
87
|
+
def import_spec(spec_str: str, state: ReplState):
|
|
88
|
+
spec: ImportSpec = ImportSpec.parse_specs(spec_str)
|
|
89
|
+
|
|
90
|
+
session = state.export_session
|
|
91
|
+
if session:
|
|
92
|
+
if spec.importer:
|
|
93
|
+
importer = Importer.importer_from_session(state.export_session)
|
|
94
|
+
if spec.importer != importer:
|
|
95
|
+
raise Exception(f"You're currently using {importer} export database. You cannot import to {spec.importer} type database.")
|
|
96
|
+
else:
|
|
97
|
+
spec.importer = Importer.importer_from_session(state.export_session)
|
|
98
|
+
if not spec.importer:
|
|
99
|
+
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
100
|
+
|
|
101
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
102
|
+
raise Exception('Credentials for Athena is not present.')
|
|
103
|
+
else:
|
|
104
|
+
if not spec.importer:
|
|
105
|
+
spec.importer = Importer.importer_from_session(spec.session)
|
|
106
|
+
|
|
107
|
+
if spec.importer == 'csv':
|
|
108
|
+
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
109
|
+
|
|
110
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
111
|
+
raise Exception('Credentials for Athena is not present.')
|
|
112
|
+
|
|
113
|
+
prefix = Importer.prefix_from_importer(spec.importer)
|
|
114
|
+
session = f'{prefix}{spec.session[1:]}'
|
|
115
|
+
state.export_session = session
|
|
116
|
+
|
|
117
|
+
return spec
|
|
118
|
+
|
|
119
|
+
def _export_tables(spec: ExportSpec, state: ReplState, export_only = False, max_workers = 0, export_state = None) -> tuple[list[str], ExportSpec]:
|
|
120
|
+
if not spec.keyspace:
|
|
121
|
+
spec.keyspace = f'{state.namespace}_db'
|
|
122
|
+
|
|
123
|
+
if not spec.tables:
|
|
124
|
+
spec.tables = [ExportTableSpec.parse(t) for t in cassandra_table_names(state, keyspace=spec.keyspace)]
|
|
125
|
+
|
|
126
|
+
if not max_workers:
|
|
127
|
+
max_workers = Config().action_workers(f'export.{spec.importer}', 8)
|
|
128
|
+
|
|
129
|
+
if export_state == 'init':
|
|
130
|
+
CassandraNodes.exec(state.pod, state.namespace, f'rm -rf {csv_dir()}/{spec.session}_*', show_out=Config().is_debug(), shell='bash')
|
|
131
|
+
|
|
132
|
+
action = f'[{spec.session}] Exporting|Exported'
|
|
133
|
+
if export_state == 'init':
|
|
134
|
+
action = f'[{spec.session}] Preparing|Prepared'
|
|
135
|
+
elif export_state == 'import':
|
|
136
|
+
action = f'[{spec.session}] Importing|Imported'
|
|
137
|
+
|
|
138
|
+
with parallelize(spec.tables, max_workers, msg=action + ' {size} Cassandra tables') as exec:
|
|
139
|
+
return exec.map(lambda table: Exporter.export_table(table, state, spec.session, spec.importer, export_only, len(spec.tables) > 1, consistency=spec.consistency, export_state=export_state)), spec
|
|
140
|
+
|
|
141
|
+
def export_table(spec: ExportTableSpec, state: ReplState, session: str, importer: str, export_only = False, multi_tables = True, consistency: str = None, export_state=None):
|
|
142
|
+
s: str = None
|
|
143
|
+
|
|
144
|
+
table, target_table, columns = Exporter.resove_table_n_columns(spec, state, include_ks_in_target=False, importer=importer)
|
|
145
|
+
|
|
146
|
+
log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
147
|
+
create_db = not state.export_session
|
|
148
|
+
|
|
149
|
+
if export_state == 'init':
|
|
150
|
+
Exporter.create_table_log(spec, state, session, table, target_table)
|
|
151
|
+
return 'table_log_created'
|
|
152
|
+
else:
|
|
153
|
+
if export_state == 'pending_export':
|
|
154
|
+
Exporter.export_to_csv(spec, state, session, table, target_table, columns, multi_tables=multi_tables, consistency=consistency)
|
|
155
|
+
|
|
156
|
+
log_files: list[str] = find_files(state.pod, state.namespace, f'{log_file}*')
|
|
157
|
+
if not log_files:
|
|
158
|
+
return s
|
|
159
|
+
|
|
160
|
+
log_file = log_files[0]
|
|
161
|
+
|
|
162
|
+
status: ExportTableStatus = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
|
|
163
|
+
while status.status != 'done':
|
|
164
|
+
if status.status == 'export_in_pregress':
|
|
165
|
+
debug('Exporting to CSV is still in progess, sleeping for 1 sec...')
|
|
166
|
+
time.sleep(1)
|
|
167
|
+
elif status.status == 'exported':
|
|
168
|
+
log_file = Exporter.rename_to_pending_import(spec, state, session, target_table)
|
|
169
|
+
if importer == 'csv' or export_only:
|
|
170
|
+
return 'pending_import'
|
|
171
|
+
elif status.status == 'pending_import':
|
|
172
|
+
log_file, session = Exporter.import_from_csv(spec, state, session, importer, table, target_table, columns, multi_tables=multi_tables, create_db=create_db)
|
|
173
|
+
|
|
174
|
+
status = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
|
|
175
|
+
|
|
176
|
+
return status.status
|
|
177
|
+
|
|
178
|
+
def create_table_log(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str):
|
|
179
|
+
log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
180
|
+
|
|
181
|
+
CassandraNodes.exec(state.pod, state.namespace, f'rm -f {log_file}* && touch {log_file}', show_out=Config().is_debug(), shell='bash')
|
|
182
|
+
|
|
183
|
+
return table
|
|
184
|
+
|
|
185
|
+
def export_to_csv(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str, columns: str, multi_tables = True, consistency: str = None):
|
|
186
|
+
db = f'{session}_{target_table}'
|
|
187
|
+
|
|
188
|
+
CassandraNodes.exec(state.pod, state.namespace, f'mkdir -p {csv_dir()}/{db}', show_out=Config().is_debug(), shell='bash')
|
|
189
|
+
csv_file = f'{csv_dir()}/{db}/{table}.csv'
|
|
190
|
+
log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
191
|
+
|
|
192
|
+
suppress_ing_log = Config().is_debug() or multi_tables
|
|
193
|
+
queries = []
|
|
194
|
+
if consistency:
|
|
195
|
+
queries.append(f'CONSISTENCY {consistency}')
|
|
196
|
+
queries.append(f"COPY {spec.keyspace}.{table}({columns}) TO '{csv_file}' WITH HEADER = TRUE")
|
|
197
|
+
r: PodExecResult = ing(
|
|
198
|
+
f'[{session}] Dumping table {spec.keyspace}.{table}{f" with consistency {consistency}" if consistency else ""}',
|
|
199
|
+
lambda: run_cql(state, ';'.join(queries), show_out=Config().is_debug(), background=True, log_file=log_file),
|
|
200
|
+
suppress_log=suppress_ing_log)
|
|
201
|
+
|
|
202
|
+
return log_file
|
|
203
|
+
|
|
204
|
+
def rename_to_pending_import(spec: ExportTableSpec, state: ReplState, session: str, target_table: str):
|
|
205
|
+
log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
206
|
+
to = f'{log_file}.pending_import'
|
|
207
|
+
|
|
208
|
+
CassandraNodes.exec(state.pod, state.namespace, f'mv {log_file} {to}', show_out=Config().is_debug(), shell='bash')
|
|
209
|
+
|
|
210
|
+
return to
|
|
211
|
+
|
|
212
|
+
def import_from_csv(spec: ExportTableSpec, state: ReplState, session: str, importer: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
213
|
+
im = AthenaImporter() if importer == 'athena' else SqliteImporter()
|
|
214
|
+
return im.import_from_csv(state.pod, state.namespace, state.export_session, session if session else state.export_session, spec.keyspace, table, target_table, columns, multi_tables, create_db)
|
|
215
|
+
|
|
216
|
+
def clear_export_session_cache():
|
|
217
|
+
Exporter.find_export_sessions.cache_clear()
|
|
218
|
+
Exporter.export_session_names.cache_clear()
|
|
219
|
+
|
|
220
|
+
@functools.lru_cache()
|
|
221
|
+
def export_session_names(sts: str, pod: str, namespace: str, importer: str = None, export_state = None):
|
|
222
|
+
if not sts or not namespace:
|
|
223
|
+
return []
|
|
224
|
+
|
|
225
|
+
if not pod:
|
|
226
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
227
|
+
|
|
228
|
+
if not pod:
|
|
229
|
+
return []
|
|
230
|
+
|
|
231
|
+
return [session for session, state in Exporter.find_export_sessions(pod, namespace, importer).items() if not export_state or state == export_state]
|
|
232
|
+
|
|
233
|
+
@functools.lru_cache()
|
|
234
|
+
def find_export_sessions(pod: str, namespace: str, importer: str = None, limit = 100):
|
|
235
|
+
sessions: dict[str, str] = {}
|
|
236
|
+
|
|
237
|
+
prefix = Importer.prefix_from_importer(importer)
|
|
238
|
+
|
|
239
|
+
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{prefix}*_*.log*')
|
|
240
|
+
|
|
241
|
+
if not log_files:
|
|
242
|
+
return {}
|
|
243
|
+
|
|
244
|
+
for log_file in log_files[:limit]:
|
|
245
|
+
m = re.match(f'{log_prefix()}-(.*?)_.*\.log?(.*)', log_file)
|
|
246
|
+
if m:
|
|
247
|
+
s = m.group(1)
|
|
248
|
+
state = m.group(2) # '', '.pending_import', '.done'
|
|
249
|
+
if state:
|
|
250
|
+
state = state.strip('.')
|
|
251
|
+
else:
|
|
252
|
+
state = 'in_export'
|
|
253
|
+
|
|
254
|
+
if s not in sessions:
|
|
255
|
+
sessions[s] = state
|
|
256
|
+
elif sessions[s] == 'done' and state != 'done':
|
|
257
|
+
sessions[s] = state
|
|
258
|
+
|
|
259
|
+
return sessions
|
|
260
|
+
|
|
261
|
+
def clean_up_all_sessions(sts: str, pod: str, namespace: str):
|
|
262
|
+
if not sts or not namespace:
|
|
263
|
+
return False
|
|
264
|
+
|
|
265
|
+
if not pod:
|
|
266
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
267
|
+
|
|
268
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {csv_dir()}/*', show_out=Config().is_debug(), shell='bash')
|
|
269
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {log_prefix()}-*.log*', show_out=Config().is_debug(), shell='bash')
|
|
270
|
+
|
|
271
|
+
return True
|
|
272
|
+
|
|
273
|
+
def clean_up_sessions(sts: str, pod: str, namespace: str, sessions: list[str], max_workers = 0):
|
|
274
|
+
if not sessions:
|
|
275
|
+
return []
|
|
276
|
+
|
|
277
|
+
if not max_workers:
|
|
278
|
+
max_workers = Config().action_workers('export', 8)
|
|
279
|
+
|
|
280
|
+
with parallelize(sessions, max_workers, msg='Cleaning|Cleaned up {size} export sessions') as exec:
|
|
281
|
+
cnt_tuples = exec.map(lambda session: Exporter.clean_up_session(sts, pod, namespace, session, True))
|
|
282
|
+
csv_cnt = 0
|
|
283
|
+
log_cnt = 0
|
|
284
|
+
for (csv, log) in cnt_tuples:
|
|
285
|
+
csv_cnt += csv
|
|
286
|
+
log_cnt += log
|
|
287
|
+
|
|
288
|
+
return csv_cnt, log_cnt
|
|
289
|
+
|
|
290
|
+
def clean_up_session(sts: str, pod: str, namespace: str, session: str, multi_tables = True):
|
|
291
|
+
if not sts or not namespace:
|
|
292
|
+
return 0, 0
|
|
293
|
+
|
|
294
|
+
if not pod:
|
|
295
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
296
|
+
|
|
297
|
+
if not pod:
|
|
298
|
+
return 0, 0
|
|
299
|
+
|
|
300
|
+
csv_cnt = 0
|
|
301
|
+
log_cnt = 0
|
|
302
|
+
|
|
303
|
+
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{session}_*.log*')
|
|
304
|
+
|
|
305
|
+
for log_file in log_files:
|
|
306
|
+
m = re.match(f'{log_prefix()}-{session}_(.*?)\.(.*?)\.log.*', log_file)
|
|
307
|
+
if m:
|
|
308
|
+
table = m.group(2)
|
|
309
|
+
|
|
310
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {csv_dir()}/{session}_{table}', show_out=not multi_tables, shell='bash')
|
|
311
|
+
csv_cnt += 1
|
|
312
|
+
|
|
313
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {log_file}', show_out=not multi_tables, shell='bash')
|
|
314
|
+
log_cnt += 1
|
|
315
|
+
|
|
316
|
+
return csv_cnt, log_cnt
|
|
317
|
+
|
|
318
|
+
def resove_table_n_columns(spec: ExportTableSpec, state: ReplState, include_ks_in_target = False, importer = 'sqlite'):
|
|
319
|
+
table = spec.table
|
|
320
|
+
columns = spec.columns
|
|
321
|
+
if not columns:
|
|
322
|
+
columns = Config().get(f'export.{importer}.columns', f'<keys>')
|
|
323
|
+
|
|
324
|
+
keyspaced_table = f'{spec.keyspace}.{spec.table}'
|
|
325
|
+
if columns == '<keys>':
|
|
326
|
+
columns = ','.join(table_spec(state, keyspaced_table, on_any=True).keys())
|
|
327
|
+
elif columns == '<row-key>':
|
|
328
|
+
columns = table_spec(state, keyspaced_table, on_any=True).row_key()
|
|
329
|
+
elif columns == '*':
|
|
330
|
+
columns = ','.join([c.name for c in table_spec(state, keyspaced_table, on_any=True).columns])
|
|
331
|
+
|
|
332
|
+
if not columns:
|
|
333
|
+
log2(f'ERROR: Empty columns on {table}.')
|
|
334
|
+
return table, None, None
|
|
335
|
+
|
|
336
|
+
target_table = spec.target_table if spec.target_table else table
|
|
337
|
+
if not include_ks_in_target and '.' in target_table:
|
|
338
|
+
target_table = target_table.split('.')[-1]
|
|
339
|
+
|
|
340
|
+
return table, target_table, columns
|
|
341
|
+
|
|
342
|
+
def drop_databases(sts: str, pod: str, namespace: str, db: str = None):
|
|
343
|
+
importer = None
|
|
344
|
+
if db:
|
|
345
|
+
importer = Importer.importer_from_session(db)
|
|
346
|
+
|
|
347
|
+
sessions_done = Exporter.export_session_names(sts, pod, namespace, importer=importer, export_state='done')
|
|
348
|
+
sessions = ExportDatabases.sessions_from_dbs(ExportDatabases.drop_export_dbs(db))
|
|
349
|
+
if sessions_done and sessions:
|
|
350
|
+
intersects = list(set(sessions_done) & set(sessions))
|
|
351
|
+
with ing(f'Cleaning up {len(intersects)} completed sessions'):
|
|
352
|
+
Exporter.clean_up_sessions(sts, pod, namespace, list(intersects))
|
|
353
|
+
Exporter.clear_export_session_cache()
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.export.export_handlers import export
|
|
3
|
+
from adam.commands.export.exporter import Exporter
|
|
4
|
+
from adam.repl_state import ReplState, RequiredState
|
|
5
|
+
|
|
6
|
+
class ImportSession(Command):
|
|
7
|
+
COMMAND = 'import session'
|
|
8
|
+
|
|
9
|
+
# the singleton pattern
|
|
10
|
+
def __new__(cls, *args, **kwargs):
|
|
11
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ImportSession, cls).__new__(cls)
|
|
12
|
+
|
|
13
|
+
return cls.instance
|
|
14
|
+
|
|
15
|
+
def __init__(self, successor: Command=None):
|
|
16
|
+
super().__init__(successor)
|
|
17
|
+
|
|
18
|
+
def command(self):
|
|
19
|
+
return ImportSession.COMMAND
|
|
20
|
+
|
|
21
|
+
def required(self):
|
|
22
|
+
return RequiredState.CLUSTER_OR_POD
|
|
23
|
+
|
|
24
|
+
def run(self, cmd: str, state: ReplState):
|
|
25
|
+
if not(args := self.args(cmd)):
|
|
26
|
+
return super().run(cmd, state)
|
|
27
|
+
|
|
28
|
+
with self.validate(args, state) as (args, state):
|
|
29
|
+
with export(state) as exporter:
|
|
30
|
+
return exporter.import_sesion(args)
|
|
31
|
+
|
|
32
|
+
def completion(self, state: ReplState):
|
|
33
|
+
# warm up cache
|
|
34
|
+
Exporter.export_session_names(state.sts, state.pod, state.namespace)
|
|
35
|
+
Exporter.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
|
|
36
|
+
|
|
37
|
+
return {}
|
|
38
|
+
|
|
39
|
+
def help(self, _: ReplState):
|
|
40
|
+
return f'{ImportSession.COMMAND} <export-session-name>\t import files in session to Athena or SQLite'
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from abc import abstractmethod
|
|
2
|
+
|
|
3
|
+
from adam.commands.export.utils_export import csv_dir
|
|
4
|
+
from adam.config import Config
|
|
5
|
+
from adam.utils import ing
|
|
6
|
+
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
7
|
+
from adam.utils_k8s.pods import log_prefix
|
|
8
|
+
|
|
9
|
+
class Importer:
|
|
10
|
+
@abstractmethod
|
|
11
|
+
def prefix(self):
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
@abstractmethod
|
|
15
|
+
def import_from_csv(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
def move_to_done(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, target_table: str):
|
|
19
|
+
log_file = f'{log_prefix()}-{from_session}_{keyspace}.{target_table}.log.pending_import'
|
|
20
|
+
|
|
21
|
+
to = f'{log_prefix()}-{to_session}_{keyspace}.{target_table}.log.done'
|
|
22
|
+
|
|
23
|
+
CassandraNodes.exec(pod, namespace, f'mv {log_file} {to}', show_out=Config().is_debug(), shell='bash')
|
|
24
|
+
|
|
25
|
+
return to, to_session
|
|
26
|
+
|
|
27
|
+
def prefix_adjusted_session(self, session: str):
|
|
28
|
+
if not session.startswith(self.prefix()):
|
|
29
|
+
return f'{self.prefix()}{session[1:]}'
|
|
30
|
+
|
|
31
|
+
return session
|
|
32
|
+
|
|
33
|
+
def remove_csv(self, pod: str, namespace: str, session: str, table: str, target_table: str, multi_tables = True):
|
|
34
|
+
with ing(f'[{session}] Cleaning up temporary files', suppress_log=multi_tables):
|
|
35
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {self.csv_file(session, table, target_table)}', show_out=Config().is_debug(), shell='bash')
|
|
36
|
+
|
|
37
|
+
def db(self, session: str, keyspace: str):
|
|
38
|
+
return f'{session}_{keyspace}'
|
|
39
|
+
|
|
40
|
+
def csv_file(self, session: str, table: str, target_table: str):
|
|
41
|
+
return f'{csv_dir()}/{session}_{target_table}/{table}.csv'
|
|
42
|
+
|
|
43
|
+
def prefix_from_importer(importer: str = ''):
|
|
44
|
+
if not importer:
|
|
45
|
+
return ''
|
|
46
|
+
|
|
47
|
+
prefix = 's'
|
|
48
|
+
|
|
49
|
+
if importer == 'athena':
|
|
50
|
+
prefix = 'e'
|
|
51
|
+
elif importer == 'csv':
|
|
52
|
+
prefix = 'c'
|
|
53
|
+
|
|
54
|
+
return prefix
|
|
55
|
+
|
|
56
|
+
def importer_from_session(session: str):
|
|
57
|
+
if not session:
|
|
58
|
+
return None
|
|
59
|
+
|
|
60
|
+
importer = 'csv'
|
|
61
|
+
|
|
62
|
+
if session.startswith('s'):
|
|
63
|
+
importer = 'sqlite'
|
|
64
|
+
elif session.startswith('e'):
|
|
65
|
+
importer = 'athena'
|
|
66
|
+
|
|
67
|
+
return importer
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import boto3
|
|
2
|
+
|
|
3
|
+
from adam.commands.export.importer import Importer
|
|
4
|
+
from adam.commands.export.utils_export import GeneratorStream
|
|
5
|
+
from adam.config import Config
|
|
6
|
+
from adam.utils import debug, log2, ing
|
|
7
|
+
from adam.utils_athena import Athena
|
|
8
|
+
from adam.utils_k8s.pods import Pods
|
|
9
|
+
|
|
10
|
+
class AthenaImporter(Importer):
|
|
11
|
+
def ping():
|
|
12
|
+
session = boto3.session.Session()
|
|
13
|
+
credentials = session.get_credentials()
|
|
14
|
+
|
|
15
|
+
return credentials is not None
|
|
16
|
+
|
|
17
|
+
def prefix(self):
|
|
18
|
+
return 'e'
|
|
19
|
+
|
|
20
|
+
def import_from_csv(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
21
|
+
csv_file = self.csv_file(from_session, table, target_table)
|
|
22
|
+
db = self.db(to_session, keyspace)
|
|
23
|
+
|
|
24
|
+
succeeded = False
|
|
25
|
+
try:
|
|
26
|
+
bucket = Config().get('export.bucket', 'c3.ops--qing')
|
|
27
|
+
|
|
28
|
+
with ing(f'[{to_session}] Uploading to S3', suppress_log=multi_tables):
|
|
29
|
+
bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
|
|
30
|
+
|
|
31
|
+
s3 = boto3.client('s3')
|
|
32
|
+
s3.upload_fileobj(GeneratorStream(bytes), bucket, f'export/{db}/{keyspace}/{target_table}/{table}.csv')
|
|
33
|
+
|
|
34
|
+
msg: str = None
|
|
35
|
+
if create_db:
|
|
36
|
+
msg = f"[{to_session}] Creating database {db}"
|
|
37
|
+
else:
|
|
38
|
+
msg = f"[{to_session}] Creating table {target_table}"
|
|
39
|
+
with ing(msg, suppress_log=multi_tables):
|
|
40
|
+
query = f'CREATE DATABASE IF NOT EXISTS {db};'
|
|
41
|
+
debug(query)
|
|
42
|
+
Athena.query(query, 'default')
|
|
43
|
+
|
|
44
|
+
query = f'DROP TABLE IF EXISTS {target_table};'
|
|
45
|
+
debug(query)
|
|
46
|
+
Athena.query(query, db)
|
|
47
|
+
|
|
48
|
+
athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
|
|
49
|
+
query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {target_table}(\n' + \
|
|
50
|
+
f' {athena_columns})\n' + \
|
|
51
|
+
"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
|
|
52
|
+
'WITH SERDEPROPERTIES (\n' + \
|
|
53
|
+
' "separatorChar" = ",",\n' + \
|
|
54
|
+
' "quoteChar" = "\\"")\n' + \
|
|
55
|
+
f"LOCATION 's3://{bucket}/export/{db}/{keyspace}/{target_table}'\n" + \
|
|
56
|
+
'TBLPROPERTIES ("skip.header.line.count"="1");'
|
|
57
|
+
debug(query)
|
|
58
|
+
try:
|
|
59
|
+
Athena.query(query, db)
|
|
60
|
+
except Exception as e:
|
|
61
|
+
log2(f'*** Failed query:\n{query}')
|
|
62
|
+
raise e
|
|
63
|
+
|
|
64
|
+
to, _ = self.move_to_done(pod, namespace, to_session, from_session, keyspace, target_table)
|
|
65
|
+
|
|
66
|
+
succeeded = True
|
|
67
|
+
|
|
68
|
+
return to, to_session
|
|
69
|
+
finally:
|
|
70
|
+
if succeeded:
|
|
71
|
+
self.remove_csv(pod, namespace, from_session, table, target_table, multi_tables)
|
|
72
|
+
Athena.clear_cache()
|
|
73
|
+
|
|
74
|
+
if not multi_tables:
|
|
75
|
+
query = f'select * from {target_table} limit 10'
|
|
76
|
+
log2(query)
|
|
77
|
+
Athena.run_query(query, db)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import pandas
|
|
2
|
+
|
|
3
|
+
from adam.commands.export.importer import Importer
|
|
4
|
+
from adam.commands.export.utils_export import GeneratorStream
|
|
5
|
+
from adam.utils import log2, ing
|
|
6
|
+
from adam.utils_k8s.pods import Pods
|
|
7
|
+
from adam.utils_sqlite import SQLite, sqlite
|
|
8
|
+
|
|
9
|
+
class SqliteImporter(Importer):
|
|
10
|
+
def prefix(self):
|
|
11
|
+
return 's'
|
|
12
|
+
|
|
13
|
+
def import_from_csv(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
14
|
+
csv_file = self.csv_file(from_session, table, target_table)
|
|
15
|
+
|
|
16
|
+
succeeded = False
|
|
17
|
+
with sqlite(to_session, keyspace) as conn:
|
|
18
|
+
try:
|
|
19
|
+
with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables):
|
|
20
|
+
bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
|
|
21
|
+
df = pandas.read_csv(GeneratorStream(bytes))
|
|
22
|
+
|
|
23
|
+
df.to_sql(target_table, conn, index=False, if_exists='replace')
|
|
24
|
+
|
|
25
|
+
to, _ = self.move_to_done(pod, namespace, to_session, from_session, keyspace, target_table)
|
|
26
|
+
|
|
27
|
+
succeeded = True
|
|
28
|
+
|
|
29
|
+
return to, to_session
|
|
30
|
+
finally:
|
|
31
|
+
if succeeded:
|
|
32
|
+
self.remove_csv(pod, namespace, from_session, table, target_table, multi_tables)
|
|
33
|
+
SQLite.clear_cache()
|
|
34
|
+
|
|
35
|
+
if not multi_tables:
|
|
36
|
+
with sqlite(to_session) as conn:
|
|
37
|
+
query = f'select * from {keyspace}.{target_table} limit 10'
|
|
38
|
+
log2(query)
|
|
39
|
+
SQLite.run_query_with_conn(conn, query)
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.export.export_databases import ExportDatabases
|
|
3
|
+
from adam.config import Config
|
|
4
|
+
from adam.repl_state import ReplState, RequiredState
|
|
5
|
+
from adam.utils import log2
|
|
6
|
+
|
|
7
|
+
class ShowColumnCounts(Command):
|
|
8
|
+
COMMAND = 'show column counts on'
|
|
9
|
+
|
|
10
|
+
# the singleton pattern
|
|
11
|
+
def __new__(cls, *args, **kwargs):
|
|
12
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ShowColumnCounts, cls).__new__(cls)
|
|
13
|
+
|
|
14
|
+
return cls.instance
|
|
15
|
+
|
|
16
|
+
def __init__(self, successor: Command=None):
|
|
17
|
+
super().__init__(successor)
|
|
18
|
+
|
|
19
|
+
def command(self):
|
|
20
|
+
return ShowColumnCounts.COMMAND
|
|
21
|
+
|
|
22
|
+
def required(self):
|
|
23
|
+
return RequiredState.EXPORT_DB
|
|
24
|
+
|
|
25
|
+
def run(self, cmd: str, state: ReplState):
|
|
26
|
+
if not(args := self.args(cmd)):
|
|
27
|
+
return super().run(cmd, state)
|
|
28
|
+
|
|
29
|
+
with self.validate(args, state) as (args, state):
|
|
30
|
+
if not args:
|
|
31
|
+
if state.in_repl:
|
|
32
|
+
log2('Use a SQL statement.')
|
|
33
|
+
else:
|
|
34
|
+
log2('* SQL statement is missing.')
|
|
35
|
+
|
|
36
|
+
Command.display_help()
|
|
37
|
+
|
|
38
|
+
return 'command-missing'
|
|
39
|
+
|
|
40
|
+
table = args[0]
|
|
41
|
+
query = Config().get(f'export.column_counts_query', 'select id, count(id) as columns from {table} group by id')
|
|
42
|
+
query = query.replace('{table}', table)
|
|
43
|
+
ExportDatabases.run_query(query, state.export_session)
|
|
44
|
+
|
|
45
|
+
return state
|
|
46
|
+
|
|
47
|
+
def completion(self, state: ReplState):
|
|
48
|
+
if not state.export_session:
|
|
49
|
+
return {}
|
|
50
|
+
|
|
51
|
+
return super().completion(state, lambda: {t: None for t in ExportDatabases.table_names(state.export_session)})
|
|
52
|
+
|
|
53
|
+
def help(self, _: ReplState):
|
|
54
|
+
return f'{ShowColumnCounts.COMMAND} <export-table-name>\t show column count per id'
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.devices.device_export import DeviceExport
|
|
3
|
+
from adam.repl_state import ReplState
|
|
4
|
+
|
|
5
|
+
class ShowExportDatabases(Command):
|
|
6
|
+
COMMAND = 'show export databases'
|
|
7
|
+
|
|
8
|
+
# the singleton pattern
|
|
9
|
+
def __new__(cls, *args, **kwargs):
|
|
10
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ShowExportDatabases, cls).__new__(cls)
|
|
11
|
+
|
|
12
|
+
return cls.instance
|
|
13
|
+
|
|
14
|
+
def __init__(self, successor: Command=None):
|
|
15
|
+
super().__init__(successor)
|
|
16
|
+
|
|
17
|
+
def command(self):
|
|
18
|
+
return ShowExportDatabases.COMMAND
|
|
19
|
+
|
|
20
|
+
def required(self):
|
|
21
|
+
return [ReplState.C, ReplState.X]
|
|
22
|
+
|
|
23
|
+
def run(self, cmd: str, state: ReplState):
|
|
24
|
+
if not(args := self.args(cmd)):
|
|
25
|
+
return super().run(cmd, state)
|
|
26
|
+
|
|
27
|
+
with self.validate(args, state) as (args, state):
|
|
28
|
+
DeviceExport().show_export_databases()
|
|
29
|
+
|
|
30
|
+
return state
|
|
31
|
+
|
|
32
|
+
def completion(self, state: ReplState):
|
|
33
|
+
return DeviceExport().ls_completion(ShowExportDatabases.COMMAND, state, default = super().completion(state))
|
|
34
|
+
|
|
35
|
+
def help(self, _: ReplState):
|
|
36
|
+
return f'{ShowExportDatabases.COMMAND}\t list export databases'
|