kaqing 2.0.14__py3-none-any.whl → 2.0.189__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaqing might be problematic. Click here for more details.
- adam/__init__.py +0 -2
- adam/app_session.py +9 -12
- adam/apps.py +20 -6
- adam/batch.py +16 -6
- adam/checks/check_utils.py +19 -49
- adam/checks/compactionstats.py +1 -1
- adam/checks/cpu.py +9 -3
- adam/checks/cpu_metrics.py +52 -0
- adam/checks/disk.py +3 -4
- adam/checks/gossip.py +1 -1
- adam/checks/memory.py +3 -3
- adam/checks/status.py +1 -1
- adam/columns/columns.py +3 -1
- adam/columns/cpu.py +3 -1
- adam/columns/cpu_metrics.py +22 -0
- adam/columns/memory.py +3 -4
- adam/commands/__init__.py +24 -0
- adam/commands/alter_tables.py +66 -0
- adam/commands/app/app.py +38 -0
- adam/commands/{app_ping.py → app/app_ping.py} +8 -14
- adam/commands/app/show_app_actions.py +49 -0
- adam/commands/{show → app}/show_app_id.py +9 -12
- adam/commands/{show → app}/show_app_queues.py +8 -14
- adam/commands/app/utils_app.py +106 -0
- adam/commands/audit/__init__.py +0 -0
- adam/commands/audit/audit.py +67 -0
- adam/commands/audit/audit_repair_tables.py +72 -0
- adam/commands/audit/audit_run.py +50 -0
- adam/commands/audit/completions_l.py +15 -0
- adam/commands/audit/show_last10.py +36 -0
- adam/commands/audit/show_slow10.py +36 -0
- adam/commands/audit/show_top10.py +36 -0
- adam/commands/audit/utils_show_top10.py +71 -0
- adam/commands/bash/__init__.py +5 -0
- adam/commands/bash/bash.py +36 -0
- adam/commands/bash/bash_completer.py +93 -0
- adam/commands/bash/utils_bash.py +16 -0
- adam/commands/cat.py +36 -0
- adam/commands/cd.py +14 -88
- adam/commands/check.py +18 -21
- adam/commands/cli_commands.py +11 -7
- adam/commands/clipboard_copy.py +87 -0
- adam/commands/code.py +57 -0
- adam/commands/command.py +220 -19
- adam/commands/commands_utils.py +28 -31
- adam/commands/cql/__init__.py +0 -0
- adam/commands/cql/completions_c.py +28 -0
- adam/commands/{cqlsh.py → cql/cqlsh.py} +13 -32
- adam/commands/cql/utils_cql.py +305 -0
- adam/commands/deploy/code_start.py +7 -10
- adam/commands/deploy/code_stop.py +4 -21
- adam/commands/deploy/code_utils.py +5 -5
- adam/commands/deploy/deploy.py +4 -40
- adam/commands/deploy/deploy_frontend.py +15 -18
- adam/commands/deploy/deploy_pg_agent.py +4 -7
- adam/commands/deploy/deploy_pod.py +74 -77
- adam/commands/deploy/deploy_utils.py +16 -26
- adam/commands/deploy/undeploy.py +4 -40
- adam/commands/deploy/undeploy_frontend.py +5 -8
- adam/commands/deploy/undeploy_pg_agent.py +7 -8
- adam/commands/deploy/undeploy_pod.py +16 -17
- adam/commands/devices/__init__.py +0 -0
- adam/commands/devices/device.py +149 -0
- adam/commands/devices/device_app.py +163 -0
- adam/commands/devices/device_auit_log.py +49 -0
- adam/commands/devices/device_cass.py +179 -0
- adam/commands/devices/device_export.py +87 -0
- adam/commands/devices/device_postgres.py +160 -0
- adam/commands/devices/devices.py +25 -0
- adam/commands/download_file.py +47 -0
- adam/commands/exit.py +1 -4
- adam/commands/export/__init__.py +0 -0
- adam/commands/export/clean_up_all_export_sessions.py +37 -0
- adam/commands/export/clean_up_export_sessions.py +39 -0
- adam/commands/export/completions_x.py +11 -0
- adam/commands/export/download_export_session.py +40 -0
- adam/commands/export/drop_export_database.py +39 -0
- adam/commands/export/drop_export_databases.py +37 -0
- adam/commands/export/export.py +37 -0
- adam/commands/export/export_databases.py +246 -0
- adam/commands/export/export_select.py +34 -0
- adam/commands/export/export_sessions.py +209 -0
- adam/commands/export/export_use.py +49 -0
- adam/commands/export/export_x_select.py +48 -0
- adam/commands/export/exporter.py +332 -0
- adam/commands/export/import_files.py +44 -0
- adam/commands/export/import_session.py +44 -0
- adam/commands/export/importer.py +81 -0
- adam/commands/export/importer_athena.py +148 -0
- adam/commands/export/importer_sqlite.py +67 -0
- adam/commands/export/show_column_counts.py +45 -0
- adam/commands/export/show_export_databases.py +39 -0
- adam/commands/export/show_export_session.py +39 -0
- adam/commands/export/show_export_sessions.py +37 -0
- adam/commands/export/utils_export.py +344 -0
- adam/commands/find_files.py +51 -0
- adam/commands/find_processes.py +76 -0
- adam/commands/head.py +36 -0
- adam/commands/help.py +14 -9
- adam/commands/intermediate_command.py +52 -0
- adam/commands/issues.py +14 -40
- adam/commands/kubectl.py +38 -0
- adam/commands/login.py +26 -25
- adam/commands/logs.py +5 -7
- adam/commands/ls.py +11 -115
- adam/commands/medusa/medusa.py +4 -46
- adam/commands/medusa/medusa_backup.py +22 -29
- adam/commands/medusa/medusa_restore.py +51 -49
- adam/commands/medusa/medusa_show_backupjobs.py +20 -21
- adam/commands/medusa/medusa_show_restorejobs.py +16 -21
- adam/commands/medusa/utils_medusa.py +15 -0
- adam/commands/nodetool.py +8 -17
- adam/commands/param_get.py +11 -14
- adam/commands/param_set.py +9 -13
- adam/commands/postgres/completions_p.py +22 -0
- adam/commands/postgres/postgres.py +49 -73
- adam/commands/postgres/postgres_databases.py +270 -0
- adam/commands/postgres/postgres_ls.py +4 -8
- adam/commands/postgres/postgres_preview.py +5 -9
- adam/commands/postgres/utils_postgres.py +79 -0
- adam/commands/preview_table.py +10 -69
- adam/commands/pwd.py +14 -43
- adam/commands/reaper/reaper.py +6 -49
- adam/commands/reaper/reaper_forward.py +49 -56
- adam/commands/reaper/reaper_forward_session.py +6 -0
- adam/commands/reaper/reaper_forward_stop.py +10 -16
- adam/commands/reaper/reaper_restart.py +8 -15
- adam/commands/reaper/reaper_run_abort.py +8 -33
- adam/commands/reaper/reaper_runs.py +43 -58
- adam/commands/reaper/reaper_runs_abort.py +29 -49
- adam/commands/reaper/reaper_schedule_activate.py +14 -33
- adam/commands/reaper/reaper_schedule_start.py +9 -33
- adam/commands/reaper/reaper_schedule_stop.py +9 -33
- adam/commands/reaper/reaper_schedules.py +4 -14
- adam/commands/reaper/reaper_status.py +8 -16
- adam/commands/reaper/utils_reaper.py +203 -0
- adam/commands/repair/repair.py +4 -46
- adam/commands/repair/repair_log.py +6 -12
- adam/commands/repair/repair_run.py +29 -36
- adam/commands/repair/repair_scan.py +33 -41
- adam/commands/repair/repair_stop.py +6 -13
- adam/commands/report.py +25 -21
- adam/commands/restart.py +27 -28
- adam/commands/rollout.py +20 -25
- adam/commands/shell.py +12 -4
- adam/commands/show/show.py +15 -46
- adam/commands/show/show_adam.py +3 -3
- adam/commands/show/show_cassandra_repairs.py +37 -0
- adam/commands/show/show_cassandra_status.py +48 -52
- adam/commands/show/show_cassandra_version.py +5 -18
- adam/commands/show/show_cli_commands.py +56 -0
- adam/commands/show/show_host.py +33 -0
- adam/commands/show/show_login.py +23 -27
- adam/commands/show/show_params.py +2 -5
- adam/commands/show/show_processes.py +18 -21
- adam/commands/show/show_storage.py +11 -20
- adam/commands/watch.py +27 -30
- adam/config.py +8 -6
- adam/embedded_params.py +1 -1
- adam/log.py +4 -4
- adam/pod_exec_result.py +13 -5
- adam/repl.py +136 -120
- adam/repl_commands.py +66 -24
- adam/repl_session.py +8 -1
- adam/repl_state.py +343 -73
- adam/sql/__init__.py +0 -0
- adam/sql/lark_completer.py +284 -0
- adam/sql/lark_parser.py +604 -0
- adam/sql/sql_completer.py +118 -0
- adam/sql/sql_state_machine.py +630 -0
- adam/sql/term_completer.py +76 -0
- adam/sso/authn_ad.py +7 -9
- adam/sso/authn_okta.py +4 -6
- adam/sso/cred_cache.py +4 -6
- adam/sso/idp.py +10 -13
- adam/utils.py +539 -11
- adam/utils_athena.py +145 -0
- adam/utils_audits.py +102 -0
- adam/utils_issues.py +32 -0
- adam/utils_k8s/__init__.py +0 -0
- adam/utils_k8s/app_clusters.py +28 -0
- adam/utils_k8s/app_pods.py +36 -0
- adam/utils_k8s/cassandra_clusters.py +44 -0
- adam/{k8s_utils → utils_k8s}/cassandra_nodes.py +12 -5
- adam/{k8s_utils → utils_k8s}/custom_resources.py +16 -17
- adam/{k8s_utils → utils_k8s}/deployment.py +2 -2
- adam/{k8s_utils → utils_k8s}/ingresses.py +2 -2
- adam/{k8s_utils → utils_k8s}/jobs.py +7 -11
- adam/utils_k8s/k8s.py +96 -0
- adam/{k8s_utils → utils_k8s}/kube_context.py +3 -3
- adam/{k8s_utils → utils_k8s}/pods.py +132 -83
- adam/{k8s_utils → utils_k8s}/secrets.py +7 -3
- adam/{k8s_utils → utils_k8s}/service_accounts.py +5 -4
- adam/{k8s_utils → utils_k8s}/services.py +2 -2
- adam/{k8s_utils → utils_k8s}/statefulsets.py +9 -16
- adam/utils_local.py +4 -0
- adam/utils_net.py +24 -0
- adam/utils_repl/__init__.py +0 -0
- adam/utils_repl/appendable_completer.py +6 -0
- adam/utils_repl/automata_completer.py +48 -0
- adam/utils_repl/repl_completer.py +172 -0
- adam/utils_repl/state_machine.py +173 -0
- adam/utils_sqlite.py +137 -0
- adam/version.py +1 -1
- {kaqing-2.0.14.dist-info → kaqing-2.0.189.dist-info}/METADATA +1 -1
- kaqing-2.0.189.dist-info/RECORD +253 -0
- kaqing-2.0.189.dist-info/top_level.txt +2 -0
- teddy/__init__.py +0 -0
- teddy/lark_parser.py +436 -0
- teddy/lark_parser2.py +618 -0
- adam/commands/app.py +0 -67
- adam/commands/bash.py +0 -87
- adam/commands/cp.py +0 -95
- adam/commands/cql_utils.py +0 -53
- adam/commands/devices.py +0 -89
- adam/commands/postgres/postgres_session.py +0 -247
- adam/commands/reaper/reaper_session.py +0 -159
- adam/commands/show/show_app_actions.py +0 -53
- adam/commands/show/show_commands.py +0 -61
- adam/commands/show/show_repairs.py +0 -47
- adam/k8s_utils/cassandra_clusters.py +0 -48
- kaqing-2.0.14.dist-info/RECORD +0 -167
- kaqing-2.0.14.dist-info/top_level.txt +0 -1
- /adam/{k8s_utils → commands/app}/__init__.py +0 -0
- /adam/{k8s_utils → utils_k8s}/config_maps.py +0 -0
- /adam/{k8s_utils → utils_k8s}/volumes.py +0 -0
- {kaqing-2.0.14.dist-info → kaqing-2.0.189.dist-info}/WHEEL +0 -0
- {kaqing-2.0.14.dist-info → kaqing-2.0.189.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
from adam.commands.export.importer import Importer
|
|
6
|
+
from adam.commands.export.utils_export import ExportTableStatus, csv_dir, find_files
|
|
7
|
+
from adam.config import Config
|
|
8
|
+
from adam.repl_state import ReplState
|
|
9
|
+
from adam.utils import log2, tabulize, log, parallelize
|
|
10
|
+
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
11
|
+
from adam.utils_k8s.pods import Pods, log_prefix
|
|
12
|
+
from adam.utils_k8s.statefulsets import StatefulSets
|
|
13
|
+
from adam.utils_local import local_tmp_dir
|
|
14
|
+
|
|
15
|
+
class ExportSessions:
|
|
16
|
+
def clear_export_session_cache():
|
|
17
|
+
ExportSessions.find_export_sessions.cache_clear()
|
|
18
|
+
ExportSessions.export_session_names.cache_clear()
|
|
19
|
+
|
|
20
|
+
@functools.lru_cache()
|
|
21
|
+
def export_session_names(sts: str, pod: str, namespace: str, importer: str = None, export_state = None):
|
|
22
|
+
if not sts or not namespace:
|
|
23
|
+
return []
|
|
24
|
+
|
|
25
|
+
if not pod:
|
|
26
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
27
|
+
|
|
28
|
+
if not pod:
|
|
29
|
+
return []
|
|
30
|
+
|
|
31
|
+
return [session for session, state in ExportSessions.find_export_sessions(pod, namespace, importer).items() if not export_state or state == export_state]
|
|
32
|
+
|
|
33
|
+
@functools.lru_cache()
|
|
34
|
+
def find_export_sessions(pod: str, namespace: str, importer: str = None, limit = 100):
|
|
35
|
+
sessions: dict[str, str] = {}
|
|
36
|
+
|
|
37
|
+
prefix = Importer.prefix_from_importer(importer)
|
|
38
|
+
|
|
39
|
+
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{prefix}*_*.log*')
|
|
40
|
+
|
|
41
|
+
if not log_files:
|
|
42
|
+
return {}
|
|
43
|
+
|
|
44
|
+
for log_file in log_files[:limit]:
|
|
45
|
+
m = re.match(f'{log_prefix()}-(.*?)_.*\.log?(.*)', log_file)
|
|
46
|
+
if m:
|
|
47
|
+
s = m.group(1)
|
|
48
|
+
state = m.group(2) # '', '.pending_import', '.done'
|
|
49
|
+
if state:
|
|
50
|
+
state = state.strip('.')
|
|
51
|
+
else:
|
|
52
|
+
state = 'in_export'
|
|
53
|
+
|
|
54
|
+
if s not in sessions:
|
|
55
|
+
sessions[s] = state
|
|
56
|
+
elif sessions[s] == 'done' and state != 'done':
|
|
57
|
+
sessions[s] = state
|
|
58
|
+
|
|
59
|
+
return sessions
|
|
60
|
+
|
|
61
|
+
def clean_up_all_sessions(sts: str, pod: str, namespace: str):
|
|
62
|
+
if not sts or not namespace:
|
|
63
|
+
return False
|
|
64
|
+
|
|
65
|
+
if not pod:
|
|
66
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
67
|
+
|
|
68
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {csv_dir()}/*', show_out=Config().is_debug(), shell='bash')
|
|
69
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {log_prefix()}-*.log*', show_out=Config().is_debug(), shell='bash')
|
|
70
|
+
|
|
71
|
+
return True
|
|
72
|
+
|
|
73
|
+
def clean_up_sessions(sts: str, pod: str, namespace: str, sessions: list[str], max_workers = 0):
|
|
74
|
+
if not sessions:
|
|
75
|
+
return []
|
|
76
|
+
|
|
77
|
+
if not max_workers:
|
|
78
|
+
max_workers = Config().action_workers('export', 8)
|
|
79
|
+
|
|
80
|
+
with parallelize(sessions,
|
|
81
|
+
max_workers,
|
|
82
|
+
msg='Cleaning|Cleaned up {size} export sessions') as exec:
|
|
83
|
+
cnt_tuples = exec.map(lambda session: ExportSessions.clean_up_session(sts, pod, namespace, session, True))
|
|
84
|
+
csv_cnt = 0
|
|
85
|
+
log_cnt = 0
|
|
86
|
+
for (csv, log) in cnt_tuples:
|
|
87
|
+
csv_cnt += csv
|
|
88
|
+
log_cnt += log
|
|
89
|
+
|
|
90
|
+
return csv_cnt, log_cnt
|
|
91
|
+
|
|
92
|
+
def clean_up_session(sts: str, pod: str, namespace: str, session: str, multi_tables = True):
|
|
93
|
+
if not sts or not namespace:
|
|
94
|
+
return 0, 0
|
|
95
|
+
|
|
96
|
+
if not pod:
|
|
97
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
98
|
+
|
|
99
|
+
if not pod:
|
|
100
|
+
return 0, 0
|
|
101
|
+
|
|
102
|
+
csv_cnt = 0
|
|
103
|
+
log_cnt = 0
|
|
104
|
+
|
|
105
|
+
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{session}_*.log*')
|
|
106
|
+
|
|
107
|
+
for log_file in log_files:
|
|
108
|
+
m = re.match(f'{log_prefix()}-{session}_(.*?)\.(.*?)\.log.*', log_file)
|
|
109
|
+
if m:
|
|
110
|
+
table = m.group(2)
|
|
111
|
+
|
|
112
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {csv_dir()}/{session}_{table}', show_out=not multi_tables, shell='bash')
|
|
113
|
+
csv_cnt += 1
|
|
114
|
+
|
|
115
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {log_file}', show_out=not multi_tables, shell='bash')
|
|
116
|
+
log_cnt += 1
|
|
117
|
+
|
|
118
|
+
return csv_cnt, log_cnt
|
|
119
|
+
|
|
120
|
+
def show_session(sts: str, pod: str, namespace: str, session: str):
|
|
121
|
+
if not pod:
|
|
122
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
123
|
+
|
|
124
|
+
if not pod:
|
|
125
|
+
return
|
|
126
|
+
|
|
127
|
+
tables, _ = ExportTableStatus.from_session(sts, pod, namespace, session)
|
|
128
|
+
log()
|
|
129
|
+
tabulize(tables,
|
|
130
|
+
lambda t: f'{t.keyspace}\t{t.target_table}\t{"export_completed_pending_import" if t.status == "pending_import" else t.status}\t{t.csv_file}',
|
|
131
|
+
header='KEYSPACE\tTARGET_TABLE\tSTATUS\tCSV_FILES',
|
|
132
|
+
separator='\t')
|
|
133
|
+
|
|
134
|
+
def download_session(sts: str, pod: str, namespace: str, session: str):
|
|
135
|
+
if not pod:
|
|
136
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
137
|
+
|
|
138
|
+
if not pod:
|
|
139
|
+
return
|
|
140
|
+
|
|
141
|
+
tables, _ = ExportTableStatus.from_session(sts, pod, namespace, session)
|
|
142
|
+
def download_csv(table):
|
|
143
|
+
from_path: str = table.csv_file
|
|
144
|
+
|
|
145
|
+
to_path = from_path.replace(csv_dir(), local_tmp_dir())
|
|
146
|
+
os.makedirs(os.path.dirname(to_path), exist_ok=True)
|
|
147
|
+
Pods.download_file(pod, 'cassandra', namespace, from_path, to_path)
|
|
148
|
+
|
|
149
|
+
log2(f'[{session}] Downloaded to {to_path}.')
|
|
150
|
+
|
|
151
|
+
with parallelize(tables,
|
|
152
|
+
workers=Config().get('download.workers', 8),
|
|
153
|
+
msg='Downloading|Downloaded {size} csv files') as exec:
|
|
154
|
+
exec.map(download_csv)
|
|
155
|
+
|
|
156
|
+
class ExportSessionService:
|
|
157
|
+
def __init__(self, handler: 'ExportSessionHandler'):
|
|
158
|
+
self.handler = handler
|
|
159
|
+
|
|
160
|
+
def clean_up(self, sessions: list[str]):
|
|
161
|
+
state = self.handler.state
|
|
162
|
+
|
|
163
|
+
csv_cnt, log_cnt = ExportSessions.clean_up_sessions(state.sts, self.pod(), state.namespace, sessions)
|
|
164
|
+
|
|
165
|
+
log(f'Removed {csv_cnt} csv and {log_cnt} log files.')
|
|
166
|
+
|
|
167
|
+
ExportSessions.clear_export_session_cache()
|
|
168
|
+
|
|
169
|
+
def clean_up_all(self):
|
|
170
|
+
state = self.handler.state
|
|
171
|
+
|
|
172
|
+
if ExportSessions.clean_up_all_sessions(state.sts, self.pod(), state.namespace):
|
|
173
|
+
ExportSessions.clear_export_session_cache()
|
|
174
|
+
|
|
175
|
+
def show_all_sessions(self):
|
|
176
|
+
state = self.handler.state
|
|
177
|
+
|
|
178
|
+
sessions = sorted(ExportSessions.find_export_sessions(self.pod(), state.namespace).items(), reverse=True)
|
|
179
|
+
tabulize(sessions, lambda args: f'{args[0]}\t{args[1]}', header='EXPORT_SESSION\tSTATUS', separator='\t')
|
|
180
|
+
|
|
181
|
+
def show_session(self, session: str):
|
|
182
|
+
state = self.handler.state
|
|
183
|
+
ExportSessions.show_session(state.sts, self.pod(), state.namespace, session)
|
|
184
|
+
|
|
185
|
+
def download_session(self, session: str):
|
|
186
|
+
state = self.handler.state
|
|
187
|
+
ExportSessions.download_session(state.sts, self.pod(), state.namespace, session)
|
|
188
|
+
|
|
189
|
+
def pod(self):
|
|
190
|
+
state = self.handler.state
|
|
191
|
+
|
|
192
|
+
pod = state.pod
|
|
193
|
+
if not pod:
|
|
194
|
+
pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
|
|
195
|
+
|
|
196
|
+
return pod
|
|
197
|
+
|
|
198
|
+
class ExportSessionHandler:
|
|
199
|
+
def __init__(self, state: ReplState = None):
|
|
200
|
+
self.state = state
|
|
201
|
+
|
|
202
|
+
def __enter__(self):
|
|
203
|
+
return ExportSessionService(self)
|
|
204
|
+
|
|
205
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
206
|
+
return False
|
|
207
|
+
|
|
208
|
+
def export_session(state: ReplState = None):
|
|
209
|
+
return ExportSessionHandler(state)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from adam.commands import validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.export.export_databases import export_db
|
|
4
|
+
from adam.repl_state import ReplState
|
|
5
|
+
from adam.utils import log2
|
|
6
|
+
|
|
7
|
+
class ExportUse(Command):
|
|
8
|
+
COMMAND = 'use'
|
|
9
|
+
|
|
10
|
+
# the singleton pattern
|
|
11
|
+
def __new__(cls, *args, **kwargs):
|
|
12
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ExportUse, cls).__new__(cls)
|
|
13
|
+
|
|
14
|
+
return cls.instance
|
|
15
|
+
|
|
16
|
+
def __init__(self, successor: Command=None):
|
|
17
|
+
super().__init__(successor)
|
|
18
|
+
|
|
19
|
+
def command(self):
|
|
20
|
+
return ExportUse.COMMAND
|
|
21
|
+
|
|
22
|
+
def required(self):
|
|
23
|
+
return [ReplState.C]
|
|
24
|
+
|
|
25
|
+
def run(self, cmd: str, state: ReplState):
|
|
26
|
+
if not(args := self.args(cmd)):
|
|
27
|
+
return super().run(cmd, state)
|
|
28
|
+
|
|
29
|
+
with self.validate(args, state) as (args, state):
|
|
30
|
+
with validate_args(args, state, at_least=0) as session:
|
|
31
|
+
if not session:
|
|
32
|
+
state.export_session = None
|
|
33
|
+
|
|
34
|
+
log2('Export database is unset.')
|
|
35
|
+
|
|
36
|
+
return state
|
|
37
|
+
|
|
38
|
+
state.export_session = session
|
|
39
|
+
|
|
40
|
+
with export_db(state) as dbs:
|
|
41
|
+
dbs.show_database()
|
|
42
|
+
|
|
43
|
+
return state
|
|
44
|
+
|
|
45
|
+
def completion(self, state: ReplState):
|
|
46
|
+
return {}
|
|
47
|
+
|
|
48
|
+
def help(self, _: ReplState):
|
|
49
|
+
return f'{ExportUse.COMMAND} <export-database-name>\t use export database'
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from adam.commands import extract_trailing_options, validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.export.completions_x import completions_x
|
|
4
|
+
from adam.commands.export.export_databases import export_db
|
|
5
|
+
from adam.repl_state import ReplState, RequiredState
|
|
6
|
+
|
|
7
|
+
class ExportXSelect(Command):
|
|
8
|
+
COMMAND = 'xelect'
|
|
9
|
+
|
|
10
|
+
# the singleton pattern
|
|
11
|
+
def __new__(cls, *args, **kwargs):
|
|
12
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ExportXSelect, cls).__new__(cls)
|
|
13
|
+
|
|
14
|
+
return cls.instance
|
|
15
|
+
|
|
16
|
+
def __init__(self, successor: Command=None):
|
|
17
|
+
super().__init__(successor)
|
|
18
|
+
|
|
19
|
+
def command(self):
|
|
20
|
+
return ExportXSelect.COMMAND
|
|
21
|
+
|
|
22
|
+
def required(self):
|
|
23
|
+
return RequiredState.EXPORT_DB
|
|
24
|
+
|
|
25
|
+
def run(self, cmd: str, state: ReplState):
|
|
26
|
+
if not(args := self.args(cmd)):
|
|
27
|
+
return super().run(cmd, state)
|
|
28
|
+
|
|
29
|
+
with self.validate(args, state) as (args, state):
|
|
30
|
+
with extract_trailing_options(args, '&') as (args, backgrounded):
|
|
31
|
+
with validate_args(args, state, name='SQL statement') as query:
|
|
32
|
+
with export_db(state) as dbs:
|
|
33
|
+
dbs.sql(f'select {query}', backgrounded=backgrounded)
|
|
34
|
+
|
|
35
|
+
return state
|
|
36
|
+
|
|
37
|
+
def completion(self, state: ReplState):
|
|
38
|
+
if state.device != ReplState.C:
|
|
39
|
+
return {}
|
|
40
|
+
|
|
41
|
+
if not state.export_session:
|
|
42
|
+
return {}
|
|
43
|
+
|
|
44
|
+
# add only xelect completions to c: drive from lark
|
|
45
|
+
return {ExportXSelect.COMMAND: completions_x(state)[ExportXSelect.COMMAND]}
|
|
46
|
+
|
|
47
|
+
def help(self, _: ReplState):
|
|
48
|
+
return f'xelect...\t run queries on export database'
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
import time
|
|
3
|
+
|
|
4
|
+
from adam.commands.command import InvalidArgumentsException
|
|
5
|
+
from adam.commands.cql.utils_cql import cassandra_table_names, run_cql, table_spec
|
|
6
|
+
from adam.commands.export.export_databases import export_db
|
|
7
|
+
from adam.commands.export.export_sessions import ExportSessions
|
|
8
|
+
from adam.commands.export.importer import Importer
|
|
9
|
+
from adam.commands.export.importer_athena import AthenaImporter
|
|
10
|
+
from adam.commands.export.importer_sqlite import SqliteImporter
|
|
11
|
+
from adam.commands.export.utils_export import ExportSpec, ExportTableStatus, ExportTableSpec, ImportSpec, csv_dir, find_files, state_with_pod
|
|
12
|
+
from adam.config import Config
|
|
13
|
+
from adam.pod_exec_result import PodExecResult
|
|
14
|
+
from adam.repl_state import ReplState
|
|
15
|
+
from adam.utils import debug, log, parallelize, log2, ing, log_exc
|
|
16
|
+
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
17
|
+
from adam.utils_k8s.pods import log_prefix
|
|
18
|
+
|
|
19
|
+
class Exporter:
|
|
20
|
+
def export_tables(args: list[str], state: ReplState, export_only: bool = False, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
21
|
+
if export_only:
|
|
22
|
+
log2('export-only for testing')
|
|
23
|
+
|
|
24
|
+
spec: ExportSpec = None
|
|
25
|
+
with log_exc(True):
|
|
26
|
+
spec = Exporter.export_spec(' '.join(args), state)
|
|
27
|
+
|
|
28
|
+
statuses, spec = Exporter._export_tables(spec, state, max_workers=max_workers, export_state='init')
|
|
29
|
+
if not statuses:
|
|
30
|
+
return statuses, spec
|
|
31
|
+
|
|
32
|
+
return Exporter._export_tables(spec, state, export_only, max_workers, 'pending_export')
|
|
33
|
+
|
|
34
|
+
return [], None
|
|
35
|
+
|
|
36
|
+
def export_spec(spec_str: str, state: ReplState):
|
|
37
|
+
spec: ExportSpec = ExportSpec.parse_specs(spec_str)
|
|
38
|
+
|
|
39
|
+
session = state.export_session
|
|
40
|
+
if session:
|
|
41
|
+
if spec.importer:
|
|
42
|
+
importer_from_session = Importer.importer_from_session(session)
|
|
43
|
+
if spec.importer != importer_from_session:
|
|
44
|
+
if spec.importer == 'csv':
|
|
45
|
+
prefix = Importer.prefix_from_importer(spec.importer)
|
|
46
|
+
session = f'{prefix}{session[1:]}'
|
|
47
|
+
else:
|
|
48
|
+
raise Exception(f"You're currently using {importer_from_session} export database. You cannot export tables with {spec.importer} type database.")
|
|
49
|
+
else:
|
|
50
|
+
spec.importer = Importer.importer_from_session(session)
|
|
51
|
+
|
|
52
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
53
|
+
raise Exception('Credentials for Athena is not present.')
|
|
54
|
+
else:
|
|
55
|
+
if not spec.importer:
|
|
56
|
+
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
57
|
+
|
|
58
|
+
prefix = Importer.prefix_from_importer(spec.importer)
|
|
59
|
+
session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
|
|
60
|
+
|
|
61
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
62
|
+
raise Exception('Credentials for Athena is not present.')
|
|
63
|
+
|
|
64
|
+
if spec.importer != 'csv':
|
|
65
|
+
state.export_session = session
|
|
66
|
+
|
|
67
|
+
spec.session = session
|
|
68
|
+
|
|
69
|
+
return spec
|
|
70
|
+
|
|
71
|
+
def import_session(spec_str: str, state: ReplState, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
72
|
+
import_spec: ImportSpec = None
|
|
73
|
+
with log_exc(True):
|
|
74
|
+
import_spec = Exporter.import_spec(spec_str, state)
|
|
75
|
+
tables, status_in_whole = ExportTableStatus.from_session(state.sts, state.pod, state.namespace, import_spec.session)
|
|
76
|
+
if status_in_whole == 'done':
|
|
77
|
+
log2(f'The session has been completely done - no more csv files are found.')
|
|
78
|
+
return [], ExportSpec(None, None, importer=import_spec.importer, tables=[])
|
|
79
|
+
|
|
80
|
+
spec = ExportSpec(None, None, importer=import_spec.importer, tables=[ExportTableSpec.from_status(table) for table in tables], session=import_spec.session)
|
|
81
|
+
|
|
82
|
+
return Exporter._export_tables(spec, state, max_workers=max_workers, export_state = 'import')
|
|
83
|
+
|
|
84
|
+
return [], None
|
|
85
|
+
|
|
86
|
+
def import_local_csv_files(spec_str: str, state: ReplState, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
87
|
+
spec: ImportSpec = None
|
|
88
|
+
with log_exc(True):
|
|
89
|
+
spec = Exporter.import_spec(spec_str, state, files=True)
|
|
90
|
+
if not spec.table_name:
|
|
91
|
+
log2(f"Use 'as <database-name>.<table-name>'.")
|
|
92
|
+
raise InvalidArgumentsException()
|
|
93
|
+
|
|
94
|
+
d_t = spec.table_name.split('.')
|
|
95
|
+
if len(d_t) != 2:
|
|
96
|
+
log2(f'Need <database-name>.<table-name> format for target table.')
|
|
97
|
+
raise InvalidArgumentsException()
|
|
98
|
+
|
|
99
|
+
database = d_t[0]
|
|
100
|
+
table = d_t[1]
|
|
101
|
+
im = AthenaImporter() if spec.importer == 'athena' else SqliteImporter()
|
|
102
|
+
|
|
103
|
+
with parallelize(spec.files, max_workers, msg='Importing|Imported {size} csv files') as exec:
|
|
104
|
+
return exec.map(lambda f: im.import_from_local_csv(state, database, table, f, len(spec.files) > 1, True)), spec
|
|
105
|
+
|
|
106
|
+
return [], None
|
|
107
|
+
|
|
108
|
+
def import_spec(spec_str: str, state: ReplState, files = False):
|
|
109
|
+
spec: ImportSpec = ImportSpec.parse_specs(spec_str, files=files)
|
|
110
|
+
|
|
111
|
+
session = state.export_session
|
|
112
|
+
if session:
|
|
113
|
+
if spec.importer:
|
|
114
|
+
importer = Importer.importer_from_session(state.export_session)
|
|
115
|
+
if spec.importer != importer:
|
|
116
|
+
raise Exception(f"You're currently using {importer} export database. You cannot import to {spec.importer} type database.")
|
|
117
|
+
else:
|
|
118
|
+
spec.importer = Importer.importer_from_session(state.export_session)
|
|
119
|
+
if not spec.importer:
|
|
120
|
+
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
121
|
+
|
|
122
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
123
|
+
raise Exception('Credentials for Athena is not present.')
|
|
124
|
+
else:
|
|
125
|
+
if not spec.importer:
|
|
126
|
+
spec.importer = Importer.importer_from_session(spec.session)
|
|
127
|
+
|
|
128
|
+
if not spec.importer or spec.importer == 'csv':
|
|
129
|
+
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
130
|
+
|
|
131
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
132
|
+
raise Exception('Credentials for Athena is not present.')
|
|
133
|
+
|
|
134
|
+
prefix = Importer.prefix_from_importer(spec.importer)
|
|
135
|
+
if spec.session:
|
|
136
|
+
spec.session = f'{prefix}{spec.session[1:]}'
|
|
137
|
+
else:
|
|
138
|
+
spec.session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
|
|
139
|
+
|
|
140
|
+
state.export_session = spec.session
|
|
141
|
+
|
|
142
|
+
return spec
|
|
143
|
+
|
|
144
|
+
def _export_tables(spec: ExportSpec, state: ReplState, export_only = False, max_workers = 0, export_state = None) -> tuple[list[str], ExportSpec]:
|
|
145
|
+
if not spec.keyspace:
|
|
146
|
+
spec.keyspace = f'{state.namespace}_db'
|
|
147
|
+
|
|
148
|
+
if not spec.tables:
|
|
149
|
+
spec.tables = [ExportTableSpec.parse(t) for t in cassandra_table_names(state, keyspace=spec.keyspace)]
|
|
150
|
+
|
|
151
|
+
if not max_workers:
|
|
152
|
+
max_workers = Config().action_workers(f'export.{spec.importer}', 8)
|
|
153
|
+
|
|
154
|
+
if export_state == 'init':
|
|
155
|
+
CassandraNodes.exec(state.pod, state.namespace, f'rm -rf {csv_dir()}/{spec.session}_*', show_out=Config().is_debug(), shell='bash')
|
|
156
|
+
|
|
157
|
+
action = f'[{spec.session}] Exporting|Exported'
|
|
158
|
+
if export_state == 'init':
|
|
159
|
+
action = f'[{spec.session}] Preparing|Prepared'
|
|
160
|
+
elif export_state == 'import':
|
|
161
|
+
action = f'[{spec.session}] Importing|Imported'
|
|
162
|
+
|
|
163
|
+
with parallelize(spec.tables, max_workers, msg=action + ' {size} Cassandra tables') as exec:
|
|
164
|
+
return exec.map(lambda table: Exporter.export_table(table, state, spec.session, spec.importer, export_only, len(spec.tables) > 1, consistency=spec.consistency, export_state=export_state)), spec
|
|
165
|
+
|
|
166
|
+
def export_table(spec: ExportTableSpec, state: ReplState, session: str, importer: str, export_only = False, multi_tables = True, consistency: str = None, export_state=None):
|
|
167
|
+
s: str = None
|
|
168
|
+
|
|
169
|
+
table, target_table, columns = Exporter.resove_table_n_columns(spec, state, include_ks_in_target=False, importer=importer)
|
|
170
|
+
|
|
171
|
+
log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
172
|
+
create_db = not state.export_session
|
|
173
|
+
|
|
174
|
+
if export_state == 'init':
|
|
175
|
+
Exporter.create_table_log(spec, state, session, table, target_table)
|
|
176
|
+
return 'table_log_created'
|
|
177
|
+
else:
|
|
178
|
+
if export_state == 'pending_export':
|
|
179
|
+
Exporter.export_to_csv(spec, state, session, table, target_table, columns, multi_tables=multi_tables, consistency=consistency)
|
|
180
|
+
|
|
181
|
+
log_files: list[str] = find_files(state.pod, state.namespace, f'{log_file}*')
|
|
182
|
+
if not log_files:
|
|
183
|
+
return s
|
|
184
|
+
|
|
185
|
+
log_file = log_files[0]
|
|
186
|
+
|
|
187
|
+
status: ExportTableStatus = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
|
|
188
|
+
while status.status != 'done':
|
|
189
|
+
if status.status == 'export_in_pregress':
|
|
190
|
+
debug('Exporting to CSV is still in progess, sleeping for 1 sec...')
|
|
191
|
+
time.sleep(1)
|
|
192
|
+
elif status.status == 'exported':
|
|
193
|
+
log_file = Exporter.rename_to_pending_import(spec, state, session, target_table)
|
|
194
|
+
if importer == 'csv' or export_only:
|
|
195
|
+
return 'pending_import'
|
|
196
|
+
elif status.status == 'pending_import':
|
|
197
|
+
log_file, session = Exporter.import_from_csv(spec, state, session, importer, table, target_table, columns, multi_tables=multi_tables, create_db=create_db)
|
|
198
|
+
|
|
199
|
+
status = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
|
|
200
|
+
|
|
201
|
+
return status.status
|
|
202
|
+
|
|
203
|
+
def create_table_log(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str):
|
|
204
|
+
log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
205
|
+
|
|
206
|
+
CassandraNodes.exec(state.pod, state.namespace, f'rm -f {log_file}* && touch {log_file}', show_out=Config().is_debug(), shell='bash')
|
|
207
|
+
|
|
208
|
+
return table
|
|
209
|
+
|
|
210
|
+
def export_to_csv(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str, columns: str, multi_tables = True, consistency: str = None):
|
|
211
|
+
db = f'{session}_{target_table}'
|
|
212
|
+
|
|
213
|
+
CassandraNodes.exec(state.pod, state.namespace, f'mkdir -p {csv_dir()}/{db}', show_out=Config().is_debug(), shell='bash')
|
|
214
|
+
csv_file = f'{csv_dir()}/{db}/{table}.csv'
|
|
215
|
+
log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
216
|
+
|
|
217
|
+
suppress_ing_log = Config().is_debug() or multi_tables
|
|
218
|
+
queries = []
|
|
219
|
+
if consistency:
|
|
220
|
+
queries.append(f'CONSISTENCY {consistency}')
|
|
221
|
+
queries.append(f"COPY {spec.keyspace}.{table}({columns}) TO '{csv_file}' WITH HEADER = TRUE")
|
|
222
|
+
r: PodExecResult = ing(
|
|
223
|
+
f'[{session}] Dumping table {spec.keyspace}.{table}{f" with consistency {consistency}" if consistency else ""}',
|
|
224
|
+
lambda: run_cql(state, ';'.join(queries), show_out=Config().is_debug(), backgrounded=True, log_file=log_file),
|
|
225
|
+
suppress_log=suppress_ing_log)
|
|
226
|
+
|
|
227
|
+
return log_file
|
|
228
|
+
|
|
229
|
+
def rename_to_pending_import(spec: ExportTableSpec, state: ReplState, session: str, target_table: str):
|
|
230
|
+
log_file = f'{log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
231
|
+
to = f'{log_file}.pending_import'
|
|
232
|
+
|
|
233
|
+
CassandraNodes.exec(state.pod, state.namespace, f'mv {log_file} {to}', show_out=Config().is_debug(), shell='bash')
|
|
234
|
+
|
|
235
|
+
return to
|
|
236
|
+
|
|
237
|
+
def import_from_csv(spec: ExportTableSpec, state: ReplState, session: str, importer: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
238
|
+
im = AthenaImporter() if importer == 'athena' else SqliteImporter()
|
|
239
|
+
return im.import_from_csv(state, session if session else state.export_session, spec.keyspace, table, target_table, columns, multi_tables, create_db)
|
|
240
|
+
|
|
241
|
+
def resove_table_n_columns(spec: ExportTableSpec, state: ReplState, include_ks_in_target = False, importer = 'sqlite'):
|
|
242
|
+
table = spec.table
|
|
243
|
+
columns = spec.columns
|
|
244
|
+
if not columns:
|
|
245
|
+
columns = Config().get(f'export.{importer}.columns', f'<keys>')
|
|
246
|
+
|
|
247
|
+
keyspaced_table = f'{spec.keyspace}.{spec.table}'
|
|
248
|
+
if columns == '<keys>':
|
|
249
|
+
columns = ','.join(table_spec(state, keyspaced_table, on_any=True).keys())
|
|
250
|
+
elif columns == '<row-key>':
|
|
251
|
+
columns = table_spec(state, keyspaced_table, on_any=True).row_key()
|
|
252
|
+
elif columns == '*':
|
|
253
|
+
columns = ','.join([c.name for c in table_spec(state, keyspaced_table, on_any=True).columns])
|
|
254
|
+
|
|
255
|
+
if not columns:
|
|
256
|
+
log2(f'ERROR: Empty columns on {table}.')
|
|
257
|
+
return table, None, None
|
|
258
|
+
|
|
259
|
+
target_table = spec.target_table if spec.target_table else table
|
|
260
|
+
if not include_ks_in_target and '.' in target_table:
|
|
261
|
+
target_table = target_table.split('.')[-1]
|
|
262
|
+
|
|
263
|
+
return table, target_table, columns
|
|
264
|
+
|
|
265
|
+
class ExportService:
|
|
266
|
+
def __init__(self, handler: 'ExporterHandler'):
|
|
267
|
+
self.handler = handler
|
|
268
|
+
|
|
269
|
+
def export(self, args: list[str], export_only=False):
|
|
270
|
+
state = self.handler.state
|
|
271
|
+
export_session = state.export_session
|
|
272
|
+
spec: ExportSpec = None
|
|
273
|
+
try:
|
|
274
|
+
with state_with_pod(state) as state:
|
|
275
|
+
# --export-only for testing only
|
|
276
|
+
statuses, spec = Exporter.export_tables(args, state, export_only=export_only)
|
|
277
|
+
if not statuses:
|
|
278
|
+
return state
|
|
279
|
+
|
|
280
|
+
ExportSessions.clear_export_session_cache()
|
|
281
|
+
|
|
282
|
+
if spec.importer == 'csv' or export_only:
|
|
283
|
+
ExportSessions.show_session(state.sts, state.pod, state.namespace, spec.session)
|
|
284
|
+
else:
|
|
285
|
+
log()
|
|
286
|
+
with export_db(state) as dbs:
|
|
287
|
+
dbs.show_database()
|
|
288
|
+
finally:
|
|
289
|
+
# if exporting to csv, do not bind the new session id to repl state
|
|
290
|
+
if spec and spec.importer == 'csv':
|
|
291
|
+
state.export_session = export_session
|
|
292
|
+
|
|
293
|
+
return state
|
|
294
|
+
|
|
295
|
+
def import_session(self, spec_str: str):
|
|
296
|
+
state = self.handler.state
|
|
297
|
+
|
|
298
|
+
tables, _ = Exporter.import_session(spec_str, state)
|
|
299
|
+
if tables:
|
|
300
|
+
ExportSessions.clear_export_session_cache()
|
|
301
|
+
|
|
302
|
+
log()
|
|
303
|
+
with export_db(state) as dbs:
|
|
304
|
+
dbs.show_database()
|
|
305
|
+
|
|
306
|
+
return state
|
|
307
|
+
|
|
308
|
+
def import_files(self, spec_str: str):
|
|
309
|
+
state = self.handler.state
|
|
310
|
+
|
|
311
|
+
tables, _ = Exporter.import_local_csv_files(spec_str, state)
|
|
312
|
+
if tables:
|
|
313
|
+
ExportSessions.clear_export_session_cache()
|
|
314
|
+
|
|
315
|
+
log()
|
|
316
|
+
with export_db(state) as dbs:
|
|
317
|
+
dbs.show_database()
|
|
318
|
+
|
|
319
|
+
return state
|
|
320
|
+
|
|
321
|
+
class ExporterHandler:
|
|
322
|
+
def __init__(self, state: ReplState):
|
|
323
|
+
self.state = state
|
|
324
|
+
|
|
325
|
+
def __enter__(self):
|
|
326
|
+
return ExportService(self)
|
|
327
|
+
|
|
328
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
329
|
+
return False
|
|
330
|
+
|
|
331
|
+
def export(state: ReplState):
|
|
332
|
+
return ExporterHandler(state)
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from adam.commands import validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.export.export_sessions import ExportSessions
|
|
4
|
+
from adam.commands.export.exporter import export
|
|
5
|
+
from adam.commands.export.utils_export import state_with_pod
|
|
6
|
+
from adam.repl_state import ReplState, RequiredState
|
|
7
|
+
|
|
8
|
+
class ImportCSVFiles(Command):
|
|
9
|
+
COMMAND = 'import files'
|
|
10
|
+
|
|
11
|
+
# the singleton pattern
|
|
12
|
+
def __new__(cls, *args, **kwargs):
|
|
13
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ImportCSVFiles, cls).__new__(cls)
|
|
14
|
+
|
|
15
|
+
return cls.instance
|
|
16
|
+
|
|
17
|
+
def __init__(self, successor: Command=None):
|
|
18
|
+
super().__init__(successor)
|
|
19
|
+
|
|
20
|
+
def command(self):
|
|
21
|
+
return ImportCSVFiles.COMMAND
|
|
22
|
+
|
|
23
|
+
def required(self):
|
|
24
|
+
return RequiredState.CLUSTER_OR_POD
|
|
25
|
+
|
|
26
|
+
def run(self, cmd: str, state: ReplState):
|
|
27
|
+
if not(args := self.args(cmd)):
|
|
28
|
+
return super().run(cmd, state)
|
|
29
|
+
|
|
30
|
+
with self.validate(args, state) as (args, state):
|
|
31
|
+
with validate_args(args, state, name='file') as spec:
|
|
32
|
+
with state_with_pod(state) as state:
|
|
33
|
+
with export(state) as exporter:
|
|
34
|
+
return exporter.import_files(spec)
|
|
35
|
+
|
|
36
|
+
def completion(self, state: ReplState):
|
|
37
|
+
# warm up cache
|
|
38
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
|
|
39
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
|
|
40
|
+
|
|
41
|
+
return {}
|
|
42
|
+
|
|
43
|
+
def help(self, _: ReplState):
|
|
44
|
+
return f'{ImportCSVFiles.COMMAND} <file-names,...>\t import files in session to Athena or SQLite'
|