kaqing 2.0.174__py3-none-any.whl → 2.0.188__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaqing might be problematic. Click here for more details.
- adam/app_session.py +2 -2
- adam/apps.py +18 -4
- adam/batch.py +1 -1
- adam/checks/check_utils.py +3 -1
- adam/commands/__init__.py +4 -2
- adam/commands/app/__init__.py +0 -0
- adam/commands/app/app.py +38 -0
- adam/commands/app/app_ping.py +38 -0
- adam/commands/app/show_app_actions.py +49 -0
- adam/commands/app/show_app_id.py +44 -0
- adam/commands/app/show_app_queues.py +38 -0
- adam/commands/app/utils_app.py +106 -0
- adam/commands/audit/audit.py +9 -27
- adam/commands/audit/audit_repair_tables.py +5 -7
- adam/commands/audit/audit_run.py +1 -1
- adam/commands/audit/completions_l.py +15 -0
- adam/commands/audit/show_last10.py +2 -14
- adam/commands/audit/show_slow10.py +2 -13
- adam/commands/audit/show_top10.py +2 -11
- adam/commands/audit/utils_show_top10.py +14 -1
- adam/commands/bash/bash.py +1 -1
- adam/commands/cat.py +3 -7
- adam/commands/check.py +2 -2
- adam/commands/cli_commands.py +6 -1
- adam/commands/{cp.py → clipboard_copy.py} +18 -12
- adam/commands/code.py +2 -2
- adam/commands/command.py +61 -11
- adam/commands/commands_utils.py +19 -12
- adam/commands/cql/completions_c.py +28 -0
- adam/commands/cql/cqlsh.py +3 -7
- adam/commands/cql/utils_cql.py +22 -58
- adam/commands/deploy/deploy_pg_agent.py +2 -2
- adam/commands/deploy/undeploy_pg_agent.py +2 -2
- adam/commands/devices/device.py +39 -8
- adam/commands/devices/device_app.py +18 -28
- adam/commands/devices/device_auit_log.py +3 -3
- adam/commands/devices/device_cass.py +16 -22
- adam/commands/devices/device_export.py +6 -3
- adam/commands/devices/device_postgres.py +79 -63
- adam/commands/download_file.py +47 -0
- adam/commands/export/clean_up_all_export_sessions.py +3 -3
- adam/commands/export/clean_up_export_sessions.py +5 -10
- adam/commands/export/completions_x.py +11 -0
- adam/commands/export/download_export_session.py +40 -0
- adam/commands/export/export.py +0 -16
- adam/commands/export/export_databases.py +26 -9
- adam/commands/export/export_select.py +9 -58
- adam/commands/export/export_sessions.py +90 -5
- adam/commands/export/export_use.py +13 -10
- adam/commands/export/export_x_select.py +48 -0
- adam/commands/export/exporter.py +60 -22
- adam/commands/export/import_files.py +44 -0
- adam/commands/export/import_session.py +8 -4
- adam/commands/export/importer.py +7 -0
- adam/commands/export/importer_athena.py +101 -34
- adam/commands/export/importer_sqlite.py +30 -5
- adam/commands/export/show_column_counts.py +11 -11
- adam/commands/export/show_export_databases.py +5 -3
- adam/commands/export/show_export_session.py +5 -6
- adam/commands/export/show_export_sessions.py +4 -11
- adam/commands/export/utils_export.py +42 -14
- adam/commands/find_files.py +51 -0
- adam/commands/find_processes.py +76 -0
- adam/commands/head.py +36 -0
- adam/commands/help.py +2 -2
- adam/commands/intermediate_command.py +6 -3
- adam/commands/ls.py +1 -1
- adam/commands/medusa/medusa_backup.py +12 -14
- adam/commands/medusa/medusa_restore.py +20 -15
- adam/commands/medusa/medusa_show_backupjobs.py +6 -4
- adam/commands/medusa/medusa_show_restorejobs.py +5 -3
- adam/commands/medusa/utils_medusa.py +15 -0
- adam/commands/nodetool.py +3 -8
- adam/commands/param_get.py +2 -3
- adam/commands/param_set.py +1 -1
- adam/commands/postgres/completions_p.py +22 -0
- adam/commands/postgres/postgres.py +14 -21
- adam/commands/postgres/postgres_databases.py +270 -0
- adam/commands/postgres/utils_postgres.py +29 -20
- adam/commands/preview_table.py +3 -1
- adam/commands/pwd.py +3 -3
- adam/commands/reaper/reaper_forward.py +2 -2
- adam/commands/reaper/reaper_runs.py +3 -3
- adam/commands/reaper/reaper_schedule_activate.py +6 -2
- adam/commands/reaper/reaper_schedule_start.py +1 -2
- adam/commands/reaper/reaper_schedule_stop.py +1 -2
- adam/commands/reaper/utils_reaper.py +13 -6
- adam/commands/repair/repair_scan.py +0 -2
- adam/commands/repair/repair_stop.py +0 -1
- adam/commands/shell.py +7 -5
- adam/commands/show/show.py +1 -1
- adam/commands/show/show_adam.py +3 -3
- adam/commands/show/show_cassandra_repairs.py +5 -3
- adam/commands/show/show_cassandra_status.py +27 -20
- adam/commands/show/{show_commands.py → show_cli_commands.py} +2 -2
- adam/commands/show/show_login.py +2 -2
- adam/commands/show/show_params.py +2 -5
- adam/commands/show/show_processes.py +15 -14
- adam/commands/show/show_storage.py +9 -8
- adam/config.py +1 -0
- adam/embedded_params.py +1 -1
- adam/repl.py +16 -9
- adam/repl_commands.py +16 -9
- adam/repl_session.py +8 -1
- adam/repl_state.py +33 -10
- adam/sql/lark_completer.py +284 -0
- adam/sql/lark_parser.py +604 -0
- adam/sql/sql_state_machine.py +8 -2
- adam/utils.py +116 -29
- adam/utils_athena.py +7 -8
- adam/utils_issues.py +2 -2
- adam/utils_k8s/app_clusters.py +2 -2
- adam/utils_k8s/app_pods.py +5 -2
- adam/utils_k8s/cassandra_clusters.py +11 -3
- adam/utils_k8s/cassandra_nodes.py +2 -2
- adam/utils_k8s/k8s.py +9 -0
- adam/utils_k8s/kube_context.py +2 -2
- adam/utils_k8s/pods.py +23 -5
- adam/utils_k8s/statefulsets.py +5 -2
- adam/utils_local.py +4 -0
- adam/utils_repl/appendable_completer.py +6 -0
- adam/utils_repl/repl_completer.py +128 -2
- adam/utils_sqlite.py +2 -2
- adam/version.py +1 -1
- {kaqing-2.0.174.dist-info → kaqing-2.0.188.dist-info}/METADATA +1 -1
- kaqing-2.0.188.dist-info/RECORD +253 -0
- kaqing-2.0.188.dist-info/top_level.txt +2 -0
- teddy/__init__.py +0 -0
- teddy/lark_parser.py +436 -0
- teddy/lark_parser2.py +618 -0
- adam/commands/cql/cql_completions.py +0 -32
- adam/commands/export/export_select_x.py +0 -54
- adam/commands/postgres/postgres_context.py +0 -272
- adam/commands/postgres/psql_completions.py +0 -10
- kaqing-2.0.174.dist-info/RECORD +0 -230
- kaqing-2.0.174.dist-info/top_level.txt +0 -1
- {kaqing-2.0.174.dist-info → kaqing-2.0.188.dist-info}/WHEEL +0 -0
- {kaqing-2.0.174.dist-info → kaqing-2.0.188.dist-info}/entry_points.txt +0 -0
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from collections.abc import Callable
|
|
2
|
+
from datetime import datetime
|
|
2
3
|
import os
|
|
3
4
|
import boto3
|
|
4
5
|
|
|
@@ -7,7 +8,7 @@ from adam.commands.export.importer import Importer
|
|
|
7
8
|
from adam.config import Config
|
|
8
9
|
from adam.repl_session import ReplSession
|
|
9
10
|
from adam.repl_state import ReplState
|
|
10
|
-
from adam.utils import debug,
|
|
11
|
+
from adam.utils import debug, log_timing, tabulize, log2, ing, log_exc
|
|
11
12
|
from adam.utils_athena import Athena
|
|
12
13
|
from adam.utils_sqlite import SQLite
|
|
13
14
|
|
|
@@ -87,21 +88,21 @@ class ExportDatabases:
|
|
|
87
88
|
|
|
88
89
|
return dbs
|
|
89
90
|
|
|
90
|
-
def
|
|
91
|
-
if not
|
|
91
|
+
def show_database(database: str):
|
|
92
|
+
if not database:
|
|
92
93
|
return
|
|
93
94
|
|
|
94
95
|
ExportDatabases.clear_cache()
|
|
95
96
|
|
|
96
97
|
keyspaces = {}
|
|
97
|
-
for table in ExportDatabases.table_names(
|
|
98
|
+
for table in ExportDatabases.table_names(database):
|
|
98
99
|
keyspace = table.split('.')[0]
|
|
99
100
|
if keyspace in keyspaces:
|
|
100
101
|
keyspaces[keyspace] += 1
|
|
101
102
|
else:
|
|
102
103
|
keyspaces[keyspace] = 1
|
|
103
104
|
|
|
104
|
-
|
|
105
|
+
tabulize(keyspaces.items(), lambda a: f'{a[0]},{a[1]}', header='SCHEMA,# of TABLES', separator=',')
|
|
105
106
|
|
|
106
107
|
def database_names():
|
|
107
108
|
return ExportDatabases.copy_database_names() + ExportDatabases.export_database_names()
|
|
@@ -110,7 +111,8 @@ class ExportDatabases:
|
|
|
110
111
|
return list({n.split('_')[0] for n in SQLite.database_names()})
|
|
111
112
|
|
|
112
113
|
def export_database_names():
|
|
113
|
-
|
|
114
|
+
with log_timing('ExportDatabases.Athena.database_names'):
|
|
115
|
+
return list({n.split('_')[0] for n in Athena.database_names(LIKE)})
|
|
114
116
|
|
|
115
117
|
def database_names_with_keyspace_cnt(importer: str = None):
|
|
116
118
|
r = {}
|
|
@@ -184,17 +186,26 @@ class ExportDatabases:
|
|
|
184
186
|
|
|
185
187
|
def show_databases(importer: str = None):
|
|
186
188
|
lines = [f'{k}\t{v}' for k, v in ExportDatabases.database_names_with_keyspace_cnt(importer).items()]
|
|
187
|
-
|
|
189
|
+
tabulize(lines, header='NAME\tKEYSPACES', separator='\t')
|
|
188
190
|
|
|
189
191
|
class ExportDatabaseService:
|
|
190
192
|
def __init__(self, handler: 'ExportDatabaseHandler'):
|
|
191
193
|
self.handler = handler
|
|
192
194
|
|
|
193
|
-
def sql(self, query: str, database: str = None):
|
|
195
|
+
def sql(self, query: str, database: str = None, backgrounded = False):
|
|
194
196
|
if not database:
|
|
195
197
|
database = self.handler.state.export_session
|
|
196
198
|
|
|
197
|
-
|
|
199
|
+
def output(out: str):
|
|
200
|
+
log_prefix = Config().get('export.log-prefix', '/tmp/qing')
|
|
201
|
+
log_file = f'{log_prefix}-{datetime.now().strftime("%d%H%M%S")}-export.log'
|
|
202
|
+
|
|
203
|
+
with open(log_file, 'w') as f:
|
|
204
|
+
f.write(out)
|
|
205
|
+
|
|
206
|
+
return log_file
|
|
207
|
+
|
|
208
|
+
ExportDatabases.run_query(query, database, output = output if backgrounded else None, show_query = not backgrounded)
|
|
198
209
|
|
|
199
210
|
def drop(self, database: str):
|
|
200
211
|
state = self.handler.state
|
|
@@ -215,6 +226,12 @@ class ExportDatabaseService:
|
|
|
215
226
|
def show_databases(self, importer: str = None):
|
|
216
227
|
ExportDatabases.show_databases(importer)
|
|
217
228
|
|
|
229
|
+
def show_database(self, database: str = None):
|
|
230
|
+
if not database:
|
|
231
|
+
database = self.handler.state.export_session
|
|
232
|
+
|
|
233
|
+
ExportDatabases.show_database(database)
|
|
234
|
+
|
|
218
235
|
class ExportDatabaseHandler:
|
|
219
236
|
def __init__(self, state: ReplState = None):
|
|
220
237
|
self.state = state
|
|
@@ -1,15 +1,10 @@
|
|
|
1
|
-
from
|
|
2
|
-
from adam.commands import
|
|
3
|
-
from adam.commands.command import Command, InvalidArgumentsException
|
|
4
|
-
from adam.commands.export.export_databases import ExportDatabases
|
|
5
|
-
from adam.config import Config
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.export.completions_x import completions_x
|
|
6
3
|
from adam.repl_state import ReplState, RequiredState
|
|
7
|
-
from adam.sql.sql_completer import SqlCompleter, SqlVariant
|
|
8
|
-
from adam.utils import log2
|
|
9
|
-
from adam.utils_athena import Athena
|
|
10
4
|
|
|
5
|
+
# No action body, only for a help entry and auto-completion
|
|
11
6
|
class ExportSelect(Command):
|
|
12
|
-
COMMAND = '
|
|
7
|
+
COMMAND = 'select_on_x'
|
|
13
8
|
|
|
14
9
|
# the singleton pattern
|
|
15
10
|
def __new__(cls, *args, **kwargs):
|
|
@@ -26,58 +21,14 @@ class ExportSelect(Command):
|
|
|
26
21
|
def required(self):
|
|
27
22
|
return RequiredState.EXPORT_DB
|
|
28
23
|
|
|
29
|
-
def run(self, cmd: str, state: ReplState):
|
|
30
|
-
if not(args := self.args(cmd)):
|
|
31
|
-
return super().run(cmd, state)
|
|
32
|
-
|
|
33
|
-
with self.validate(args, state) as (args, state):
|
|
34
|
-
with extract_trailing_options(args, '&') as (args, backgrounded):
|
|
35
|
-
if not state.export_session:
|
|
36
|
-
if state.in_repl:
|
|
37
|
-
if state.device == ReplState.C:
|
|
38
|
-
log2("Select an export database first with 'use' command.")
|
|
39
|
-
else:
|
|
40
|
-
log2('cd to an export database first.')
|
|
41
|
-
else:
|
|
42
|
-
log2('* export database is missing.')
|
|
43
|
-
|
|
44
|
-
Command.display_help()
|
|
45
|
-
|
|
46
|
-
raise InvalidArgumentsException()
|
|
47
|
-
|
|
48
|
-
with validate_args(args, state, name='SQL statement') as query:
|
|
49
|
-
def output(out: str):
|
|
50
|
-
log_prefix = Config().get('export.log-prefix', '/tmp/qing')
|
|
51
|
-
log_file = f'{log_prefix}-{datetime.now().strftime("%d%H%M%S")}-sqlite.log'
|
|
52
|
-
|
|
53
|
-
with open(log_file, 'w') as f:
|
|
54
|
-
f.write(out)
|
|
55
|
-
|
|
56
|
-
return log_file
|
|
57
|
-
|
|
58
|
-
ExportDatabases.run_query(f'select {query}', database=state.export_session, output=output if backgrounded else None)
|
|
59
|
-
|
|
60
|
-
return state
|
|
61
|
-
|
|
62
24
|
def completion(self, state: ReplState):
|
|
63
|
-
if
|
|
25
|
+
if state.device != ReplState.X:
|
|
64
26
|
return {}
|
|
65
27
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
# warm up the caches first time when x: drive is accessed
|
|
69
|
-
ExportDatabases.table_names(db)
|
|
70
|
-
Athena.column_names(database=db, function='export')
|
|
71
|
-
Athena.column_names(partition_cols_only=True, database=db, function='export')
|
|
28
|
+
if state.export_session:
|
|
29
|
+
return completions_x(state)
|
|
72
30
|
|
|
73
|
-
return {
|
|
74
|
-
lambda: ExportDatabases.table_names(db),
|
|
75
|
-
dml='select',
|
|
76
|
-
expandables={
|
|
77
|
-
'columns':lambda table: Athena.column_names(database=db, function='export'),
|
|
78
|
-
},
|
|
79
|
-
variant=SqlVariant.ATHENA
|
|
80
|
-
)}
|
|
31
|
+
return {}
|
|
81
32
|
|
|
82
33
|
def help(self, _: ReplState):
|
|
83
|
-
return f'
|
|
34
|
+
return f'<sql-select-statements>\t run queries on export database'
|
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
import functools
|
|
2
|
+
import os
|
|
2
3
|
import re
|
|
3
4
|
|
|
4
5
|
from adam.commands.export.importer import Importer
|
|
5
6
|
from adam.commands.export.utils_export import ExportTableStatus, csv_dir, find_files
|
|
6
7
|
from adam.config import Config
|
|
7
|
-
from adam.
|
|
8
|
+
from adam.repl_state import ReplState
|
|
9
|
+
from adam.utils import log2, tabulize, log, parallelize
|
|
8
10
|
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
9
|
-
from adam.utils_k8s.pods import log_prefix
|
|
11
|
+
from adam.utils_k8s.pods import Pods, log_prefix
|
|
10
12
|
from adam.utils_k8s.statefulsets import StatefulSets
|
|
13
|
+
from adam.utils_local import local_tmp_dir
|
|
11
14
|
|
|
12
15
|
class ExportSessions:
|
|
13
16
|
def clear_export_session_cache():
|
|
@@ -74,7 +77,9 @@ class ExportSessions:
|
|
|
74
77
|
if not max_workers:
|
|
75
78
|
max_workers = Config().action_workers('export', 8)
|
|
76
79
|
|
|
77
|
-
with parallelize(sessions,
|
|
80
|
+
with parallelize(sessions,
|
|
81
|
+
max_workers,
|
|
82
|
+
msg='Cleaning|Cleaned up {size} export sessions') as exec:
|
|
78
83
|
cnt_tuples = exec.map(lambda session: ExportSessions.clean_up_session(sts, pod, namespace, session, True))
|
|
79
84
|
csv_cnt = 0
|
|
80
85
|
log_cnt = 0
|
|
@@ -112,7 +117,7 @@ class ExportSessions:
|
|
|
112
117
|
|
|
113
118
|
return csv_cnt, log_cnt
|
|
114
119
|
|
|
115
|
-
def
|
|
120
|
+
def show_session(sts: str, pod: str, namespace: str, session: str):
|
|
116
121
|
if not pod:
|
|
117
122
|
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
118
123
|
|
|
@@ -121,4 +126,84 @@ class ExportSessions:
|
|
|
121
126
|
|
|
122
127
|
tables, _ = ExportTableStatus.from_session(sts, pod, namespace, session)
|
|
123
128
|
log()
|
|
124
|
-
|
|
129
|
+
tabulize(tables,
|
|
130
|
+
lambda t: f'{t.keyspace}\t{t.target_table}\t{"export_completed_pending_import" if t.status == "pending_import" else t.status}\t{t.csv_file}',
|
|
131
|
+
header='KEYSPACE\tTARGET_TABLE\tSTATUS\tCSV_FILES',
|
|
132
|
+
separator='\t')
|
|
133
|
+
|
|
134
|
+
def download_session(sts: str, pod: str, namespace: str, session: str):
|
|
135
|
+
if not pod:
|
|
136
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
137
|
+
|
|
138
|
+
if not pod:
|
|
139
|
+
return
|
|
140
|
+
|
|
141
|
+
tables, _ = ExportTableStatus.from_session(sts, pod, namespace, session)
|
|
142
|
+
def download_csv(table):
|
|
143
|
+
from_path: str = table.csv_file
|
|
144
|
+
|
|
145
|
+
to_path = from_path.replace(csv_dir(), local_tmp_dir())
|
|
146
|
+
os.makedirs(os.path.dirname(to_path), exist_ok=True)
|
|
147
|
+
Pods.download_file(pod, 'cassandra', namespace, from_path, to_path)
|
|
148
|
+
|
|
149
|
+
log2(f'[{session}] Downloaded to {to_path}.')
|
|
150
|
+
|
|
151
|
+
with parallelize(tables,
|
|
152
|
+
workers=Config().get('download.workers', 8),
|
|
153
|
+
msg='Downloading|Downloaded {size} csv files') as exec:
|
|
154
|
+
exec.map(download_csv)
|
|
155
|
+
|
|
156
|
+
class ExportSessionService:
|
|
157
|
+
def __init__(self, handler: 'ExportSessionHandler'):
|
|
158
|
+
self.handler = handler
|
|
159
|
+
|
|
160
|
+
def clean_up(self, sessions: list[str]):
|
|
161
|
+
state = self.handler.state
|
|
162
|
+
|
|
163
|
+
csv_cnt, log_cnt = ExportSessions.clean_up_sessions(state.sts, self.pod(), state.namespace, sessions)
|
|
164
|
+
|
|
165
|
+
log(f'Removed {csv_cnt} csv and {log_cnt} log files.')
|
|
166
|
+
|
|
167
|
+
ExportSessions.clear_export_session_cache()
|
|
168
|
+
|
|
169
|
+
def clean_up_all(self):
|
|
170
|
+
state = self.handler.state
|
|
171
|
+
|
|
172
|
+
if ExportSessions.clean_up_all_sessions(state.sts, self.pod(), state.namespace):
|
|
173
|
+
ExportSessions.clear_export_session_cache()
|
|
174
|
+
|
|
175
|
+
def show_all_sessions(self):
|
|
176
|
+
state = self.handler.state
|
|
177
|
+
|
|
178
|
+
sessions = sorted(ExportSessions.find_export_sessions(self.pod(), state.namespace).items(), reverse=True)
|
|
179
|
+
tabulize(sessions, lambda args: f'{args[0]}\t{args[1]}', header='EXPORT_SESSION\tSTATUS', separator='\t')
|
|
180
|
+
|
|
181
|
+
def show_session(self, session: str):
|
|
182
|
+
state = self.handler.state
|
|
183
|
+
ExportSessions.show_session(state.sts, self.pod(), state.namespace, session)
|
|
184
|
+
|
|
185
|
+
def download_session(self, session: str):
|
|
186
|
+
state = self.handler.state
|
|
187
|
+
ExportSessions.download_session(state.sts, self.pod(), state.namespace, session)
|
|
188
|
+
|
|
189
|
+
def pod(self):
|
|
190
|
+
state = self.handler.state
|
|
191
|
+
|
|
192
|
+
pod = state.pod
|
|
193
|
+
if not pod:
|
|
194
|
+
pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
|
|
195
|
+
|
|
196
|
+
return pod
|
|
197
|
+
|
|
198
|
+
class ExportSessionHandler:
|
|
199
|
+
def __init__(self, state: ReplState = None):
|
|
200
|
+
self.state = state
|
|
201
|
+
|
|
202
|
+
def __enter__(self):
|
|
203
|
+
return ExportSessionService(self)
|
|
204
|
+
|
|
205
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
206
|
+
return False
|
|
207
|
+
|
|
208
|
+
def export_session(state: ReplState = None):
|
|
209
|
+
return ExportSessionHandler(state)
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
+
from adam.commands import validate_args
|
|
1
2
|
from adam.commands.command import Command
|
|
2
|
-
from adam.commands.export.export_databases import
|
|
3
|
+
from adam.commands.export.export_databases import export_db
|
|
3
4
|
from adam.repl_state import ReplState
|
|
4
5
|
from adam.utils import log2
|
|
5
6
|
|
|
@@ -19,28 +20,30 @@ class ExportUse(Command):
|
|
|
19
20
|
return ExportUse.COMMAND
|
|
20
21
|
|
|
21
22
|
def required(self):
|
|
22
|
-
return [ReplState.C
|
|
23
|
+
return [ReplState.C]
|
|
23
24
|
|
|
24
25
|
def run(self, cmd: str, state: ReplState):
|
|
25
26
|
if not(args := self.args(cmd)):
|
|
26
27
|
return super().run(cmd, state)
|
|
27
28
|
|
|
28
29
|
with self.validate(args, state) as (args, state):
|
|
29
|
-
|
|
30
|
-
|
|
30
|
+
with validate_args(args, state, at_least=0) as session:
|
|
31
|
+
if not session:
|
|
32
|
+
state.export_session = None
|
|
31
33
|
|
|
32
|
-
|
|
34
|
+
log2('Export database is unset.')
|
|
33
35
|
|
|
34
|
-
|
|
36
|
+
return state
|
|
35
37
|
|
|
36
|
-
|
|
38
|
+
state.export_session = session
|
|
37
39
|
|
|
38
|
-
|
|
40
|
+
with export_db(state) as dbs:
|
|
41
|
+
dbs.show_database()
|
|
39
42
|
|
|
40
|
-
|
|
43
|
+
return state
|
|
41
44
|
|
|
42
45
|
def completion(self, state: ReplState):
|
|
43
|
-
return
|
|
46
|
+
return {}
|
|
44
47
|
|
|
45
48
|
def help(self, _: ReplState):
|
|
46
49
|
return f'{ExportUse.COMMAND} <export-database-name>\t use export database'
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from adam.commands import extract_trailing_options, validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.export.completions_x import completions_x
|
|
4
|
+
from adam.commands.export.export_databases import export_db
|
|
5
|
+
from adam.repl_state import ReplState, RequiredState
|
|
6
|
+
|
|
7
|
+
class ExportXSelect(Command):
|
|
8
|
+
COMMAND = 'xelect'
|
|
9
|
+
|
|
10
|
+
# the singleton pattern
|
|
11
|
+
def __new__(cls, *args, **kwargs):
|
|
12
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ExportXSelect, cls).__new__(cls)
|
|
13
|
+
|
|
14
|
+
return cls.instance
|
|
15
|
+
|
|
16
|
+
def __init__(self, successor: Command=None):
|
|
17
|
+
super().__init__(successor)
|
|
18
|
+
|
|
19
|
+
def command(self):
|
|
20
|
+
return ExportXSelect.COMMAND
|
|
21
|
+
|
|
22
|
+
def required(self):
|
|
23
|
+
return RequiredState.EXPORT_DB
|
|
24
|
+
|
|
25
|
+
def run(self, cmd: str, state: ReplState):
|
|
26
|
+
if not(args := self.args(cmd)):
|
|
27
|
+
return super().run(cmd, state)
|
|
28
|
+
|
|
29
|
+
with self.validate(args, state) as (args, state):
|
|
30
|
+
with extract_trailing_options(args, '&') as (args, backgrounded):
|
|
31
|
+
with validate_args(args, state, name='SQL statement') as query:
|
|
32
|
+
with export_db(state) as dbs:
|
|
33
|
+
dbs.sql(f'select {query}', backgrounded=backgrounded)
|
|
34
|
+
|
|
35
|
+
return state
|
|
36
|
+
|
|
37
|
+
def completion(self, state: ReplState):
|
|
38
|
+
if state.device != ReplState.C:
|
|
39
|
+
return {}
|
|
40
|
+
|
|
41
|
+
if not state.export_session:
|
|
42
|
+
return {}
|
|
43
|
+
|
|
44
|
+
# add only xelect completions to c: drive from lark
|
|
45
|
+
return {ExportXSelect.COMMAND: completions_x(state)[ExportXSelect.COMMAND]}
|
|
46
|
+
|
|
47
|
+
def help(self, _: ReplState):
|
|
48
|
+
return f'xelect...\t run queries on export database'
|
adam/commands/export/exporter.py
CHANGED
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
2
|
import time
|
|
3
3
|
|
|
4
|
-
from adam.commands import
|
|
5
|
-
from adam.commands.command import Command
|
|
4
|
+
from adam.commands.command import InvalidArgumentsException
|
|
6
5
|
from adam.commands.cql.utils_cql import cassandra_table_names, run_cql, table_spec
|
|
7
|
-
from adam.commands.export.export_databases import
|
|
6
|
+
from adam.commands.export.export_databases import export_db
|
|
8
7
|
from adam.commands.export.export_sessions import ExportSessions
|
|
9
8
|
from adam.commands.export.importer import Importer
|
|
10
9
|
from adam.commands.export.importer_athena import AthenaImporter
|
|
@@ -69,10 +68,10 @@ class Exporter:
|
|
|
69
68
|
|
|
70
69
|
return spec
|
|
71
70
|
|
|
72
|
-
def import_session(
|
|
71
|
+
def import_session(spec_str: str, state: ReplState, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
73
72
|
import_spec: ImportSpec = None
|
|
74
73
|
with log_exc(True):
|
|
75
|
-
import_spec = Exporter.import_spec(
|
|
74
|
+
import_spec = Exporter.import_spec(spec_str, state)
|
|
76
75
|
tables, status_in_whole = ExportTableStatus.from_session(state.sts, state.pod, state.namespace, import_spec.session)
|
|
77
76
|
if status_in_whole == 'done':
|
|
78
77
|
log2(f'The session has been completely done - no more csv files are found.')
|
|
@@ -84,8 +83,30 @@ class Exporter:
|
|
|
84
83
|
|
|
85
84
|
return [], None
|
|
86
85
|
|
|
87
|
-
def
|
|
88
|
-
spec: ImportSpec =
|
|
86
|
+
def import_local_csv_files(spec_str: str, state: ReplState, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
87
|
+
spec: ImportSpec = None
|
|
88
|
+
with log_exc(True):
|
|
89
|
+
spec = Exporter.import_spec(spec_str, state, files=True)
|
|
90
|
+
if not spec.table_name:
|
|
91
|
+
log2(f"Use 'as <database-name>.<table-name>'.")
|
|
92
|
+
raise InvalidArgumentsException()
|
|
93
|
+
|
|
94
|
+
d_t = spec.table_name.split('.')
|
|
95
|
+
if len(d_t) != 2:
|
|
96
|
+
log2(f'Need <database-name>.<table-name> format for target table.')
|
|
97
|
+
raise InvalidArgumentsException()
|
|
98
|
+
|
|
99
|
+
database = d_t[0]
|
|
100
|
+
table = d_t[1]
|
|
101
|
+
im = AthenaImporter() if spec.importer == 'athena' else SqliteImporter()
|
|
102
|
+
|
|
103
|
+
with parallelize(spec.files, max_workers, msg='Importing|Imported {size} csv files') as exec:
|
|
104
|
+
return exec.map(lambda f: im.import_from_local_csv(state, database, table, f, len(spec.files) > 1, True)), spec
|
|
105
|
+
|
|
106
|
+
return [], None
|
|
107
|
+
|
|
108
|
+
def import_spec(spec_str: str, state: ReplState, files = False):
|
|
109
|
+
spec: ImportSpec = ImportSpec.parse_specs(spec_str, files=files)
|
|
89
110
|
|
|
90
111
|
session = state.export_session
|
|
91
112
|
if session:
|
|
@@ -104,15 +125,19 @@ class Exporter:
|
|
|
104
125
|
if not spec.importer:
|
|
105
126
|
spec.importer = Importer.importer_from_session(spec.session)
|
|
106
127
|
|
|
107
|
-
if spec.importer == 'csv':
|
|
128
|
+
if not spec.importer or spec.importer == 'csv':
|
|
108
129
|
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
109
130
|
|
|
110
131
|
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
111
132
|
raise Exception('Credentials for Athena is not present.')
|
|
112
133
|
|
|
113
134
|
prefix = Importer.prefix_from_importer(spec.importer)
|
|
114
|
-
|
|
115
|
-
|
|
135
|
+
if spec.session:
|
|
136
|
+
spec.session = f'{prefix}{spec.session[1:]}'
|
|
137
|
+
else:
|
|
138
|
+
spec.session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
|
|
139
|
+
|
|
140
|
+
state.export_session = spec.session
|
|
116
141
|
|
|
117
142
|
return spec
|
|
118
143
|
|
|
@@ -196,7 +221,7 @@ class Exporter:
|
|
|
196
221
|
queries.append(f"COPY {spec.keyspace}.{table}({columns}) TO '{csv_file}' WITH HEADER = TRUE")
|
|
197
222
|
r: PodExecResult = ing(
|
|
198
223
|
f'[{session}] Dumping table {spec.keyspace}.{table}{f" with consistency {consistency}" if consistency else ""}',
|
|
199
|
-
lambda: run_cql(state, ';'.join(queries), show_out=Config().is_debug(),
|
|
224
|
+
lambda: run_cql(state, ';'.join(queries), show_out=Config().is_debug(), backgrounded=True, log_file=log_file),
|
|
200
225
|
suppress_log=suppress_ing_log)
|
|
201
226
|
|
|
202
227
|
return log_file
|
|
@@ -255,10 +280,11 @@ class ExportService:
|
|
|
255
280
|
ExportSessions.clear_export_session_cache()
|
|
256
281
|
|
|
257
282
|
if spec.importer == 'csv' or export_only:
|
|
258
|
-
ExportSessions.
|
|
283
|
+
ExportSessions.show_session(state.sts, state.pod, state.namespace, spec.session)
|
|
259
284
|
else:
|
|
260
285
|
log()
|
|
261
|
-
|
|
286
|
+
with export_db(state) as dbs:
|
|
287
|
+
dbs.show_database()
|
|
262
288
|
finally:
|
|
263
289
|
# if exporting to csv, do not bind the new session id to repl state
|
|
264
290
|
if spec and spec.importer == 'csv':
|
|
@@ -266,19 +292,31 @@ class ExportService:
|
|
|
266
292
|
|
|
267
293
|
return state
|
|
268
294
|
|
|
269
|
-
def
|
|
295
|
+
def import_session(self, spec_str: str):
|
|
270
296
|
state = self.handler.state
|
|
271
297
|
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
if tables:
|
|
276
|
-
ExportSessions.clear_export_session_cache()
|
|
298
|
+
tables, _ = Exporter.import_session(spec_str, state)
|
|
299
|
+
if tables:
|
|
300
|
+
ExportSessions.clear_export_session_cache()
|
|
277
301
|
|
|
278
|
-
|
|
279
|
-
|
|
302
|
+
log()
|
|
303
|
+
with export_db(state) as dbs:
|
|
304
|
+
dbs.show_database()
|
|
280
305
|
|
|
281
|
-
|
|
306
|
+
return state
|
|
307
|
+
|
|
308
|
+
def import_files(self, spec_str: str):
|
|
309
|
+
state = self.handler.state
|
|
310
|
+
|
|
311
|
+
tables, _ = Exporter.import_local_csv_files(spec_str, state)
|
|
312
|
+
if tables:
|
|
313
|
+
ExportSessions.clear_export_session_cache()
|
|
314
|
+
|
|
315
|
+
log()
|
|
316
|
+
with export_db(state) as dbs:
|
|
317
|
+
dbs.show_database()
|
|
318
|
+
|
|
319
|
+
return state
|
|
282
320
|
|
|
283
321
|
class ExporterHandler:
|
|
284
322
|
def __init__(self, state: ReplState):
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from adam.commands import validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.export.export_sessions import ExportSessions
|
|
4
|
+
from adam.commands.export.exporter import export
|
|
5
|
+
from adam.commands.export.utils_export import state_with_pod
|
|
6
|
+
from adam.repl_state import ReplState, RequiredState
|
|
7
|
+
|
|
8
|
+
class ImportCSVFiles(Command):
|
|
9
|
+
COMMAND = 'import files'
|
|
10
|
+
|
|
11
|
+
# the singleton pattern
|
|
12
|
+
def __new__(cls, *args, **kwargs):
|
|
13
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ImportCSVFiles, cls).__new__(cls)
|
|
14
|
+
|
|
15
|
+
return cls.instance
|
|
16
|
+
|
|
17
|
+
def __init__(self, successor: Command=None):
|
|
18
|
+
super().__init__(successor)
|
|
19
|
+
|
|
20
|
+
def command(self):
|
|
21
|
+
return ImportCSVFiles.COMMAND
|
|
22
|
+
|
|
23
|
+
def required(self):
|
|
24
|
+
return RequiredState.CLUSTER_OR_POD
|
|
25
|
+
|
|
26
|
+
def run(self, cmd: str, state: ReplState):
|
|
27
|
+
if not(args := self.args(cmd)):
|
|
28
|
+
return super().run(cmd, state)
|
|
29
|
+
|
|
30
|
+
with self.validate(args, state) as (args, state):
|
|
31
|
+
with validate_args(args, state, name='file') as spec:
|
|
32
|
+
with state_with_pod(state) as state:
|
|
33
|
+
with export(state) as exporter:
|
|
34
|
+
return exporter.import_files(spec)
|
|
35
|
+
|
|
36
|
+
def completion(self, state: ReplState):
|
|
37
|
+
# warm up cache
|
|
38
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
|
|
39
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
|
|
40
|
+
|
|
41
|
+
return {}
|
|
42
|
+
|
|
43
|
+
def help(self, _: ReplState):
|
|
44
|
+
return f'{ImportCSVFiles.COMMAND} <file-names,...>\t import files in session to Athena or SQLite'
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
+
from adam.commands import validate_args
|
|
1
2
|
from adam.commands.command import Command
|
|
2
3
|
from adam.commands.export.export_sessions import ExportSessions
|
|
3
4
|
from adam.commands.export.exporter import export
|
|
5
|
+
from adam.commands.export.utils_export import state_with_pod
|
|
4
6
|
from adam.repl_state import ReplState, RequiredState
|
|
5
7
|
|
|
6
8
|
class ImportSession(Command):
|
|
@@ -26,13 +28,15 @@ class ImportSession(Command):
|
|
|
26
28
|
return super().run(cmd, state)
|
|
27
29
|
|
|
28
30
|
with self.validate(args, state) as (args, state):
|
|
29
|
-
with
|
|
30
|
-
|
|
31
|
+
with validate_args(args, state, name='export session') as spec:
|
|
32
|
+
with state_with_pod(state) as state:
|
|
33
|
+
with export(state) as exporter:
|
|
34
|
+
return exporter.import_session(spec)
|
|
31
35
|
|
|
32
36
|
def completion(self, state: ReplState):
|
|
33
37
|
# warm up cache
|
|
34
|
-
ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
|
|
35
|
-
ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
|
|
38
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
|
|
39
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
|
|
36
40
|
|
|
37
41
|
return {}
|
|
38
42
|
|
adam/commands/export/importer.py
CHANGED
|
@@ -16,6 +16,13 @@ class Importer:
|
|
|
16
16
|
def import_from_csv(self, state: ReplState, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
17
17
|
pass
|
|
18
18
|
|
|
19
|
+
@abstractmethod
|
|
20
|
+
def import_from_local_csv(self, state: ReplState,
|
|
21
|
+
keyspace: str, table: str, target_table: str, columns: str,
|
|
22
|
+
csv_file: str,
|
|
23
|
+
multi_tables = True, create_db = False):
|
|
24
|
+
pass
|
|
25
|
+
|
|
19
26
|
def move_to_done(self, state: ReplState, from_session: str, keyspace: str, target_table: str):
|
|
20
27
|
pod = state.pod
|
|
21
28
|
namespace = state.namespace
|