kaqing 2.0.110__py3-none-any.whl → 2.0.184__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaqing might be problematic. Click here for more details.
- adam/__init__.py +0 -2
- adam/app_session.py +9 -12
- adam/apps.py +18 -4
- adam/batch.py +5 -5
- adam/checks/check_utils.py +16 -46
- adam/checks/cpu.py +7 -1
- adam/checks/cpu_metrics.py +52 -0
- adam/checks/disk.py +2 -3
- adam/columns/columns.py +3 -1
- adam/columns/cpu.py +3 -1
- adam/columns/cpu_metrics.py +22 -0
- adam/columns/memory.py +3 -4
- adam/commands/__init__.py +24 -0
- adam/commands/alter_tables.py +33 -48
- adam/commands/app/__init__.py +0 -0
- adam/commands/app/app.py +38 -0
- adam/commands/{app_ping.py → app/app_ping.py} +7 -13
- adam/commands/app/show_app_actions.py +49 -0
- adam/commands/{show → app}/show_app_id.py +8 -11
- adam/commands/{show → app}/show_app_queues.py +7 -14
- adam/commands/app/utils_app.py +98 -0
- adam/commands/audit/audit.py +27 -31
- adam/commands/audit/audit_repair_tables.py +14 -18
- adam/commands/audit/audit_run.py +16 -23
- adam/commands/audit/show_last10.py +4 -17
- adam/commands/audit/show_slow10.py +4 -17
- adam/commands/audit/show_top10.py +4 -16
- adam/commands/audit/utils_show_top10.py +15 -3
- adam/commands/bash/__init__.py +5 -0
- adam/commands/bash/bash.py +36 -0
- adam/commands/bash/bash_completer.py +93 -0
- adam/commands/bash/utils_bash.py +16 -0
- adam/commands/cat.py +36 -0
- adam/commands/cd.py +11 -95
- adam/commands/check.py +15 -24
- adam/commands/cli_commands.py +2 -3
- adam/commands/clipboard_copy.py +86 -0
- adam/commands/code.py +57 -0
- adam/commands/command.py +198 -40
- adam/commands/commands_utils.py +12 -27
- adam/commands/cql/cql_completions.py +27 -10
- adam/commands/cql/cqlsh.py +12 -30
- adam/commands/cql/utils_cql.py +297 -0
- adam/commands/deploy/code_start.py +7 -10
- adam/commands/deploy/code_stop.py +4 -21
- adam/commands/deploy/code_utils.py +3 -3
- adam/commands/deploy/deploy.py +4 -27
- adam/commands/deploy/deploy_frontend.py +14 -17
- adam/commands/deploy/deploy_pg_agent.py +3 -6
- adam/commands/deploy/deploy_pod.py +65 -73
- adam/commands/deploy/deploy_utils.py +14 -24
- adam/commands/deploy/undeploy.py +4 -27
- adam/commands/deploy/undeploy_frontend.py +4 -7
- adam/commands/deploy/undeploy_pg_agent.py +6 -8
- adam/commands/deploy/undeploy_pod.py +11 -12
- adam/commands/devices/__init__.py +0 -0
- adam/commands/devices/device.py +123 -0
- adam/commands/devices/device_app.py +163 -0
- adam/commands/devices/device_auit_log.py +49 -0
- adam/commands/devices/device_cass.py +179 -0
- adam/commands/devices/device_export.py +84 -0
- adam/commands/devices/device_postgres.py +150 -0
- adam/commands/devices/devices.py +25 -0
- adam/commands/download_file.py +47 -0
- adam/commands/exit.py +1 -4
- adam/commands/export/__init__.py +0 -0
- adam/commands/export/clean_up_all_export_sessions.py +37 -0
- adam/commands/export/clean_up_export_sessions.py +39 -0
- adam/commands/export/download_export_session.py +39 -0
- adam/commands/export/drop_export_database.py +39 -0
- adam/commands/export/drop_export_databases.py +37 -0
- adam/commands/export/export.py +53 -0
- adam/commands/export/export_databases.py +245 -0
- adam/commands/export/export_select.py +59 -0
- adam/commands/export/export_select_x.py +54 -0
- adam/commands/export/export_sessions.py +209 -0
- adam/commands/export/export_use.py +49 -0
- adam/commands/export/exporter.py +332 -0
- adam/commands/export/import_files.py +44 -0
- adam/commands/export/import_session.py +44 -0
- adam/commands/export/importer.py +81 -0
- adam/commands/export/importer_athena.py +177 -0
- adam/commands/export/importer_sqlite.py +67 -0
- adam/commands/export/show_column_counts.py +45 -0
- adam/commands/export/show_export_databases.py +38 -0
- adam/commands/export/show_export_session.py +39 -0
- adam/commands/export/show_export_sessions.py +37 -0
- adam/commands/export/utils_export.py +343 -0
- adam/commands/find_files.py +51 -0
- adam/commands/find_processes.py +76 -0
- adam/commands/head.py +36 -0
- adam/commands/help.py +5 -3
- adam/commands/intermediate_command.py +49 -0
- adam/commands/issues.py +11 -43
- adam/commands/kubectl.py +38 -0
- adam/commands/login.py +22 -24
- adam/commands/logs.py +3 -6
- adam/commands/ls.py +11 -116
- adam/commands/medusa/medusa.py +4 -22
- adam/commands/medusa/medusa_backup.py +20 -27
- adam/commands/medusa/medusa_restore.py +38 -37
- adam/commands/medusa/medusa_show_backupjobs.py +16 -18
- adam/commands/medusa/medusa_show_restorejobs.py +13 -18
- adam/commands/nodetool.py +11 -17
- adam/commands/param_get.py +11 -14
- adam/commands/param_set.py +8 -12
- adam/commands/postgres/postgres.py +45 -46
- adam/commands/postgres/postgres_databases.py +269 -0
- adam/commands/postgres/postgres_ls.py +4 -8
- adam/commands/postgres/postgres_preview.py +5 -9
- adam/commands/postgres/psql_completions.py +4 -3
- adam/commands/postgres/utils_postgres.py +70 -0
- adam/commands/preview_table.py +8 -44
- adam/commands/pwd.py +14 -46
- adam/commands/reaper/reaper.py +4 -27
- adam/commands/reaper/reaper_forward.py +49 -56
- adam/commands/reaper/reaper_forward_session.py +6 -0
- adam/commands/reaper/reaper_forward_stop.py +10 -16
- adam/commands/reaper/reaper_restart.py +7 -14
- adam/commands/reaper/reaper_run_abort.py +8 -33
- adam/commands/reaper/reaper_runs.py +43 -58
- adam/commands/reaper/reaper_runs_abort.py +29 -49
- adam/commands/reaper/reaper_schedule_activate.py +9 -32
- adam/commands/reaper/reaper_schedule_start.py +9 -32
- adam/commands/reaper/reaper_schedule_stop.py +9 -32
- adam/commands/reaper/reaper_schedules.py +4 -14
- adam/commands/reaper/reaper_status.py +8 -16
- adam/commands/reaper/utils_reaper.py +194 -0
- adam/commands/repair/repair.py +4 -22
- adam/commands/repair/repair_log.py +5 -11
- adam/commands/repair/repair_run.py +27 -34
- adam/commands/repair/repair_scan.py +32 -38
- adam/commands/repair/repair_stop.py +5 -11
- adam/commands/report.py +27 -29
- adam/commands/restart.py +25 -26
- adam/commands/rollout.py +19 -24
- adam/commands/shell.py +12 -4
- adam/commands/show/show.py +10 -25
- adam/commands/show/show_adam.py +3 -3
- adam/commands/show/show_cassandra_repairs.py +35 -0
- adam/commands/show/show_cassandra_status.py +33 -51
- adam/commands/show/show_cassandra_version.py +5 -18
- adam/commands/show/show_commands.py +20 -25
- adam/commands/show/show_host.py +1 -1
- adam/commands/show/show_login.py +20 -27
- adam/commands/show/show_params.py +2 -5
- adam/commands/show/show_processes.py +15 -19
- adam/commands/show/show_storage.py +10 -20
- adam/commands/watch.py +26 -29
- adam/config.py +5 -14
- adam/embedded_params.py +1 -1
- adam/log.py +4 -4
- adam/pod_exec_result.py +6 -3
- adam/repl.py +69 -115
- adam/repl_commands.py +52 -19
- adam/repl_state.py +161 -40
- adam/sql/sql_completer.py +52 -27
- adam/sql/sql_state_machine.py +131 -19
- adam/sso/authn_ad.py +6 -8
- adam/sso/authn_okta.py +4 -6
- adam/sso/cred_cache.py +3 -5
- adam/sso/idp.py +9 -12
- adam/utils.py +511 -9
- adam/utils_athena.py +145 -0
- adam/utils_audits.py +12 -103
- adam/utils_issues.py +32 -0
- adam/utils_k8s/app_clusters.py +28 -0
- adam/utils_k8s/app_pods.py +36 -0
- adam/utils_k8s/cassandra_clusters.py +30 -19
- adam/utils_k8s/cassandra_nodes.py +3 -3
- adam/utils_k8s/custom_resources.py +16 -17
- adam/utils_k8s/ingresses.py +2 -2
- adam/utils_k8s/jobs.py +7 -11
- adam/utils_k8s/k8s.py +87 -0
- adam/utils_k8s/kube_context.py +2 -2
- adam/utils_k8s/pods.py +89 -78
- adam/utils_k8s/secrets.py +4 -4
- adam/utils_k8s/service_accounts.py +5 -4
- adam/utils_k8s/services.py +2 -2
- adam/utils_k8s/statefulsets.py +1 -12
- adam/utils_local.py +4 -0
- adam/utils_net.py +4 -4
- adam/utils_repl/__init__.py +0 -0
- adam/utils_repl/automata_completer.py +48 -0
- adam/utils_repl/repl_completer.py +46 -0
- adam/utils_repl/state_machine.py +173 -0
- adam/utils_sqlite.py +137 -0
- adam/version.py +1 -1
- {kaqing-2.0.110.dist-info → kaqing-2.0.184.dist-info}/METADATA +1 -1
- kaqing-2.0.184.dist-info/RECORD +244 -0
- adam/commands/app.py +0 -67
- adam/commands/bash.py +0 -150
- adam/commands/cp.py +0 -95
- adam/commands/cql/cql_utils.py +0 -112
- adam/commands/devices.py +0 -118
- adam/commands/postgres/postgres_context.py +0 -239
- adam/commands/postgres/postgres_utils.py +0 -31
- adam/commands/reaper/reaper_session.py +0 -159
- adam/commands/show/show_app_actions.py +0 -56
- adam/commands/show/show_repairs.py +0 -47
- kaqing-2.0.110.dist-info/RECORD +0 -187
- {kaqing-2.0.110.dist-info → kaqing-2.0.184.dist-info}/WHEEL +0 -0
- {kaqing-2.0.110.dist-info → kaqing-2.0.184.dist-info}/entry_points.txt +0 -0
- {kaqing-2.0.110.dist-info → kaqing-2.0.184.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.export.export_databases import export_db
|
|
3
|
+
from adam.repl_state import ReplState
|
|
4
|
+
|
|
5
|
+
class DropExportDatabases(Command):
|
|
6
|
+
COMMAND = 'drop all export databases'
|
|
7
|
+
|
|
8
|
+
# the singleton pattern
|
|
9
|
+
def __new__(cls, *args, **kwargs):
|
|
10
|
+
if not hasattr(cls, 'instance'): cls.instance = super(DropExportDatabases, cls).__new__(cls)
|
|
11
|
+
|
|
12
|
+
return cls.instance
|
|
13
|
+
|
|
14
|
+
def __init__(self, successor: Command=None):
|
|
15
|
+
super().__init__(successor)
|
|
16
|
+
|
|
17
|
+
def command(self):
|
|
18
|
+
return DropExportDatabases.COMMAND
|
|
19
|
+
|
|
20
|
+
def required(self):
|
|
21
|
+
return [ReplState.C, ReplState.X]
|
|
22
|
+
|
|
23
|
+
def run(self, cmd: str, state: ReplState):
|
|
24
|
+
if not(args := self.args(cmd)):
|
|
25
|
+
return super().run(cmd, state)
|
|
26
|
+
|
|
27
|
+
with self.validate(args, state) as (args, state):
|
|
28
|
+
with export_db(state) as dbs:
|
|
29
|
+
dbs.drop_all()
|
|
30
|
+
|
|
31
|
+
return state
|
|
32
|
+
|
|
33
|
+
def completion(self, _: ReplState):
|
|
34
|
+
return {}
|
|
35
|
+
|
|
36
|
+
def help(self, _: ReplState):
|
|
37
|
+
return f'{DropExportDatabases.COMMAND}\t drop all export databases'
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from adam.commands import extract_options
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.cql.utils_cql import cassandra_keyspaces, cassandra_table_names
|
|
4
|
+
from adam.commands.export.export_databases import ExportDatabases
|
|
5
|
+
from adam.commands.export.exporter import export
|
|
6
|
+
from adam.repl_state import ReplState, RequiredState
|
|
7
|
+
from adam.sql.sql_completer import SqlCompleter, SqlVariant
|
|
8
|
+
from adam.utils import log
|
|
9
|
+
from adam.utils_k8s.statefulsets import StatefulSets
|
|
10
|
+
|
|
11
|
+
class ExportTables(Command):
|
|
12
|
+
COMMAND = 'export'
|
|
13
|
+
|
|
14
|
+
# the singleton pattern
|
|
15
|
+
def __new__(cls, *args, **kwargs):
|
|
16
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ExportTables, cls).__new__(cls)
|
|
17
|
+
|
|
18
|
+
return cls.instance
|
|
19
|
+
|
|
20
|
+
def __init__(self, successor: Command=None):
|
|
21
|
+
super().__init__(successor)
|
|
22
|
+
|
|
23
|
+
def command(self):
|
|
24
|
+
return ExportTables.COMMAND
|
|
25
|
+
|
|
26
|
+
def required(self):
|
|
27
|
+
return RequiredState.CLUSTER_OR_POD
|
|
28
|
+
|
|
29
|
+
def run(self, cmd: str, state: ReplState):
|
|
30
|
+
if not(args := self.args(cmd)):
|
|
31
|
+
return super().run(cmd, state)
|
|
32
|
+
|
|
33
|
+
with self.validate(args, state) as (args, state):
|
|
34
|
+
with extract_options(args, '--export-only') as (args, export_only):
|
|
35
|
+
with export(state) as exporter:
|
|
36
|
+
return exporter.export(args, export_only=export_only)
|
|
37
|
+
|
|
38
|
+
def completion(self, state: ReplState):
|
|
39
|
+
def sc():
|
|
40
|
+
return SqlCompleter(lambda: cassandra_table_names(state), expandables={
|
|
41
|
+
'dml':'export',
|
|
42
|
+
'columns': lambda table: ['id', '*'],
|
|
43
|
+
'keyspaces': lambda: cassandra_keyspaces(state),
|
|
44
|
+
'export-dbs': lambda: ExportDatabases.database_names(),
|
|
45
|
+
}, variant=SqlVariant.CQL)
|
|
46
|
+
|
|
47
|
+
if super().completion(state):
|
|
48
|
+
return {f'@{p}': {ExportTables.COMMAND: sc()} for p in StatefulSets.pod_names(state.sts, state.namespace)}
|
|
49
|
+
|
|
50
|
+
return {}
|
|
51
|
+
|
|
52
|
+
def help(self, _: ReplState):
|
|
53
|
+
return f'{ExportTables.COMMAND} [* [in KEYSPACE]] | [TABLE] [as target-name] [with consistency <level>]\t export tables to Sqlite, Athena or CSV file'
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
from collections.abc import Callable
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
import os
|
|
4
|
+
import boto3
|
|
5
|
+
|
|
6
|
+
from adam.commands.export.export_sessions import ExportSessions
|
|
7
|
+
from adam.commands.export.importer import Importer
|
|
8
|
+
from adam.config import Config
|
|
9
|
+
from adam.repl_session import ReplSession
|
|
10
|
+
from adam.repl_state import ReplState
|
|
11
|
+
from adam.utils import debug, tabulize, log2, ing, log_exc
|
|
12
|
+
from adam.utils_athena import Athena
|
|
13
|
+
from adam.utils_sqlite import SQLite
|
|
14
|
+
|
|
15
|
+
LIKE = 'e%_%'
|
|
16
|
+
|
|
17
|
+
class ExportDatabases:
|
|
18
|
+
def run_query(query: str, database: str, show_query=False, output: Callable[[str], str] = None) -> int:
|
|
19
|
+
cnt: int = 0
|
|
20
|
+
|
|
21
|
+
if show_query:
|
|
22
|
+
log2(query)
|
|
23
|
+
|
|
24
|
+
log_file = None
|
|
25
|
+
if database.startswith('s'):
|
|
26
|
+
cnt0, log_file = SQLite.run_query(query, database=database, output=output)
|
|
27
|
+
cnt += cnt0
|
|
28
|
+
else:
|
|
29
|
+
cnt0, log_file = Athena.run_query(query, database=database, output=output)
|
|
30
|
+
cnt += cnt0
|
|
31
|
+
|
|
32
|
+
if Config().get('repl.history.push-cat-log-file', True):
|
|
33
|
+
if log_file and ReplSession().prompt_session:
|
|
34
|
+
ReplSession().prompt_session.history.append_string(f':sh cat {log_file}')
|
|
35
|
+
|
|
36
|
+
return cnt
|
|
37
|
+
|
|
38
|
+
def sessions_from_dbs(dbs: list[str]):
|
|
39
|
+
|
|
40
|
+
sessions = set()
|
|
41
|
+
|
|
42
|
+
for db in dbs:
|
|
43
|
+
sessions.add(db.split('_')[0])
|
|
44
|
+
|
|
45
|
+
return list(sessions)
|
|
46
|
+
|
|
47
|
+
def drop_export_dbs(db: str = None):
|
|
48
|
+
dbs: list[str] = []
|
|
49
|
+
|
|
50
|
+
if not db or db.startswith('s'):
|
|
51
|
+
dbs.extend(ExportDatabases.drop_sqlite_dbs(db))
|
|
52
|
+
if not db or db.startswith('e'):
|
|
53
|
+
dbs.extend(ExportDatabases.drop_athena_dbs(db))
|
|
54
|
+
|
|
55
|
+
return dbs
|
|
56
|
+
|
|
57
|
+
def drop_sqlite_dbs(db: str = None):
|
|
58
|
+
dbs = SQLite.database_names(db)
|
|
59
|
+
if dbs:
|
|
60
|
+
with ing(f'Droping {len(dbs)} SQLite databases'):
|
|
61
|
+
with log_exc():
|
|
62
|
+
for db in dbs:
|
|
63
|
+
file_path = f'{SQLite.local_db_dir()}/{db}'
|
|
64
|
+
try:
|
|
65
|
+
os.remove(file_path)
|
|
66
|
+
except OSError as e:
|
|
67
|
+
pass
|
|
68
|
+
|
|
69
|
+
return dbs
|
|
70
|
+
|
|
71
|
+
def drop_athena_dbs(db: str = None):
|
|
72
|
+
dbs = Athena.database_names(f'{db}_%' if db else LIKE)
|
|
73
|
+
if dbs:
|
|
74
|
+
with ing(f'Droping {len(dbs)} Athena databases'):
|
|
75
|
+
for db in dbs:
|
|
76
|
+
query = f'DROP DATABASE {db} CASCADE'
|
|
77
|
+
debug(query)
|
|
78
|
+
Athena.query(query)
|
|
79
|
+
|
|
80
|
+
with ing(f'Deleting s3 folder: export'):
|
|
81
|
+
with log_exc():
|
|
82
|
+
if not db:
|
|
83
|
+
db = ''
|
|
84
|
+
|
|
85
|
+
s3 = boto3.resource('s3')
|
|
86
|
+
bucket = s3.Bucket(Config().get('export.bucket', 'c3.ops--qing'))
|
|
87
|
+
bucket.objects.filter(Prefix=f'export/{db}').delete()
|
|
88
|
+
|
|
89
|
+
return dbs
|
|
90
|
+
|
|
91
|
+
def show_database(database: str):
|
|
92
|
+
if not database:
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
ExportDatabases.clear_cache()
|
|
96
|
+
|
|
97
|
+
keyspaces = {}
|
|
98
|
+
for table in ExportDatabases.table_names(database):
|
|
99
|
+
keyspace = table.split('.')[0]
|
|
100
|
+
if keyspace in keyspaces:
|
|
101
|
+
keyspaces[keyspace] += 1
|
|
102
|
+
else:
|
|
103
|
+
keyspaces[keyspace] = 1
|
|
104
|
+
|
|
105
|
+
tabulize(keyspaces.items(), lambda a: f'{a[0]},{a[1]}', header='SCHEMA,# of TABLES', separator=',')
|
|
106
|
+
|
|
107
|
+
def database_names():
|
|
108
|
+
return ExportDatabases.copy_database_names() + ExportDatabases.export_database_names()
|
|
109
|
+
|
|
110
|
+
def copy_database_names():
|
|
111
|
+
return list({n.split('_')[0] for n in SQLite.database_names()})
|
|
112
|
+
|
|
113
|
+
def export_database_names():
|
|
114
|
+
return list({n.split('_')[0] for n in Athena.database_names(LIKE)})
|
|
115
|
+
|
|
116
|
+
def database_names_with_keyspace_cnt(importer: str = None):
|
|
117
|
+
r = {}
|
|
118
|
+
|
|
119
|
+
names = []
|
|
120
|
+
if not importer:
|
|
121
|
+
names = SQLite.database_names() + Athena.database_names(LIKE)
|
|
122
|
+
elif importer == 'athena':
|
|
123
|
+
names = Athena.database_names(LIKE)
|
|
124
|
+
else:
|
|
125
|
+
names = SQLite.database_names()
|
|
126
|
+
|
|
127
|
+
for n in names:
|
|
128
|
+
tokens = n.split('_')
|
|
129
|
+
name = tokens[0]
|
|
130
|
+
keyspace = None
|
|
131
|
+
if len(tokens) > 1:
|
|
132
|
+
keyspace = tokens[1].replace('.db', '')
|
|
133
|
+
|
|
134
|
+
if keyspace == 'root':
|
|
135
|
+
continue
|
|
136
|
+
|
|
137
|
+
if name in r:
|
|
138
|
+
r[name] += 1
|
|
139
|
+
else:
|
|
140
|
+
r[name] = 1
|
|
141
|
+
|
|
142
|
+
return r
|
|
143
|
+
|
|
144
|
+
def table_names(session: str):
|
|
145
|
+
tables = []
|
|
146
|
+
|
|
147
|
+
for session in ExportDatabases._session_database_names(session):
|
|
148
|
+
if session.startswith('s'):
|
|
149
|
+
for table in SQLite.table_names(database=session):
|
|
150
|
+
tables.append(f'{SQLite.keyspace(session)}.{table}')
|
|
151
|
+
else:
|
|
152
|
+
for table in Athena.table_names(database=session, function='export'):
|
|
153
|
+
tables.append(f'{session}.{table}')
|
|
154
|
+
|
|
155
|
+
return tables
|
|
156
|
+
|
|
157
|
+
def _session_database_names(db: str):
|
|
158
|
+
eprefix = db
|
|
159
|
+
if '_' in db:
|
|
160
|
+
eprefix = db.split('_')[0]
|
|
161
|
+
|
|
162
|
+
if db.startswith('s'):
|
|
163
|
+
return SQLite.database_names(prefix=f'{eprefix}_')
|
|
164
|
+
else:
|
|
165
|
+
return Athena.database_names(like=f'{eprefix}_%')
|
|
166
|
+
|
|
167
|
+
def drop_databases(sts: str, pod: str, namespace: str, database: str = None):
|
|
168
|
+
importer = None
|
|
169
|
+
if database:
|
|
170
|
+
importer = Importer.importer_from_session(database)
|
|
171
|
+
|
|
172
|
+
sessions_done = ExportSessions.export_session_names(sts, pod, namespace, importer=importer, export_state='done')
|
|
173
|
+
sessions = ExportDatabases.sessions_from_dbs(ExportDatabases.drop_export_dbs(database))
|
|
174
|
+
if sessions_done and sessions:
|
|
175
|
+
intersects = list(set(sessions_done) & set(sessions))
|
|
176
|
+
with ing(f'Cleaning up {len(intersects)} completed sessions'):
|
|
177
|
+
ExportSessions.clean_up_sessions(sts, pod, namespace, list(intersects))
|
|
178
|
+
ExportSessions.clear_export_session_cache()
|
|
179
|
+
|
|
180
|
+
def clear_cache(database: str = None):
|
|
181
|
+
if not database or database.startswith('s'):
|
|
182
|
+
SQLite.clear_cache()
|
|
183
|
+
if not database or database.startswith('e'):
|
|
184
|
+
Athena.clear_cache()
|
|
185
|
+
|
|
186
|
+
def show_databases(importer: str = None):
|
|
187
|
+
lines = [f'{k}\t{v}' for k, v in ExportDatabases.database_names_with_keyspace_cnt(importer).items()]
|
|
188
|
+
tabulize(lines, header='NAME\tKEYSPACES', separator='\t')
|
|
189
|
+
|
|
190
|
+
class ExportDatabaseService:
|
|
191
|
+
def __init__(self, handler: 'ExportDatabaseHandler'):
|
|
192
|
+
self.handler = handler
|
|
193
|
+
|
|
194
|
+
def sql(self, query: str, database: str = None, backgrounded = False):
|
|
195
|
+
if not database:
|
|
196
|
+
database = self.handler.state.export_session
|
|
197
|
+
|
|
198
|
+
def output(out: str):
|
|
199
|
+
log_prefix = Config().get('export.log-prefix', '/tmp/qing')
|
|
200
|
+
log_file = f'{log_prefix}-{datetime.now().strftime("%d%H%M%S")}-export.log'
|
|
201
|
+
|
|
202
|
+
with open(log_file, 'w') as f:
|
|
203
|
+
f.write(out)
|
|
204
|
+
|
|
205
|
+
return log_file
|
|
206
|
+
|
|
207
|
+
ExportDatabases.run_query(query, database, output = output if backgrounded else None, show_query = not backgrounded)
|
|
208
|
+
|
|
209
|
+
def drop(self, database: str):
|
|
210
|
+
state = self.handler.state
|
|
211
|
+
|
|
212
|
+
ExportDatabases.drop_databases(state.sts, state.pod, state.namespace, database)
|
|
213
|
+
ExportDatabases.clear_cache(database)
|
|
214
|
+
if state.export_session == database:
|
|
215
|
+
state.export_session = None
|
|
216
|
+
|
|
217
|
+
def drop_all(self):
|
|
218
|
+
state = self.handler.state
|
|
219
|
+
|
|
220
|
+
ExportDatabases.drop_databases(state.sts, state.pod, state.namespace)
|
|
221
|
+
ExportDatabases.clear_cache()
|
|
222
|
+
|
|
223
|
+
state.export_session = None
|
|
224
|
+
|
|
225
|
+
def show_databases(self, importer: str = None):
|
|
226
|
+
ExportDatabases.show_databases(importer)
|
|
227
|
+
|
|
228
|
+
def show_database(self, database: str = None):
|
|
229
|
+
if not database:
|
|
230
|
+
database = self.handler.state.export_session
|
|
231
|
+
|
|
232
|
+
ExportDatabases.show_database(database)
|
|
233
|
+
|
|
234
|
+
class ExportDatabaseHandler:
|
|
235
|
+
def __init__(self, state: ReplState = None):
|
|
236
|
+
self.state = state
|
|
237
|
+
|
|
238
|
+
def __enter__(self):
|
|
239
|
+
return ExportDatabaseService(self)
|
|
240
|
+
|
|
241
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
242
|
+
return False
|
|
243
|
+
|
|
244
|
+
def export_db(state: ReplState = None):
|
|
245
|
+
return ExportDatabaseHandler(state)
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from adam.commands import extract_trailing_options, validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.export.export_databases import ExportDatabases, export_db
|
|
4
|
+
from adam.repl_state import ReplState, RequiredState
|
|
5
|
+
from adam.sql.sql_completer import SqlCompleter, SqlVariant
|
|
6
|
+
from adam.utils_athena import Athena
|
|
7
|
+
|
|
8
|
+
class ExportSelect(Command):
|
|
9
|
+
COMMAND = '.select'
|
|
10
|
+
|
|
11
|
+
# the singleton pattern
|
|
12
|
+
def __new__(cls, *args, **kwargs):
|
|
13
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ExportSelect, cls).__new__(cls)
|
|
14
|
+
|
|
15
|
+
return cls.instance
|
|
16
|
+
|
|
17
|
+
def __init__(self, successor: Command=None):
|
|
18
|
+
super().__init__(successor)
|
|
19
|
+
|
|
20
|
+
def command(self):
|
|
21
|
+
return ExportSelect.COMMAND
|
|
22
|
+
|
|
23
|
+
def required(self):
|
|
24
|
+
return RequiredState.EXPORT_DB
|
|
25
|
+
|
|
26
|
+
def run(self, cmd: str, state: ReplState):
|
|
27
|
+
if not(args := self.args(cmd)):
|
|
28
|
+
return super().run(cmd, state)
|
|
29
|
+
|
|
30
|
+
with self.validate(args, state) as (args, state):
|
|
31
|
+
with extract_trailing_options(args, '&') as (args, backgrounded):
|
|
32
|
+
with validate_args(args, state, name='SQL statement') as query:
|
|
33
|
+
with export_db(state) as dbs:
|
|
34
|
+
dbs.sql(f'select {query}', backgrounded=backgrounded)
|
|
35
|
+
|
|
36
|
+
return state
|
|
37
|
+
|
|
38
|
+
def completion(self, state: ReplState):
|
|
39
|
+
if not state.export_session:
|
|
40
|
+
return {}
|
|
41
|
+
|
|
42
|
+
db = state.export_session
|
|
43
|
+
|
|
44
|
+
# warm up the caches first time when x: drive is accessed
|
|
45
|
+
ExportDatabases.table_names(db)
|
|
46
|
+
Athena.column_names(database=db, function='export')
|
|
47
|
+
Athena.column_names(partition_cols_only=True, database=db, function='export')
|
|
48
|
+
|
|
49
|
+
return {ExportSelect.COMMAND: SqlCompleter(
|
|
50
|
+
lambda: ExportDatabases.table_names(db),
|
|
51
|
+
dml='select',
|
|
52
|
+
expandables={
|
|
53
|
+
'columns':lambda table: Athena.column_names(database=db, function='export'),
|
|
54
|
+
},
|
|
55
|
+
variant=SqlVariant.ATHENA
|
|
56
|
+
)}
|
|
57
|
+
|
|
58
|
+
def help(self, _: ReplState):
|
|
59
|
+
return f'.<sql-select-statements>\t run queries on export database'
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.export.export_databases import ExportDatabases
|
|
3
|
+
from adam.repl_state import ReplState, RequiredState
|
|
4
|
+
from adam.sql.sql_completer import SqlCompleter, SqlVariant
|
|
5
|
+
from adam.utils_athena import Athena
|
|
6
|
+
|
|
7
|
+
# No action body, only for a help entry and auto-completion
|
|
8
|
+
class ExportSelectX(Command):
|
|
9
|
+
COMMAND = 'select_on_x'
|
|
10
|
+
|
|
11
|
+
# the singleton pattern
|
|
12
|
+
def __new__(cls, *args, **kwargs):
|
|
13
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ExportSelectX, cls).__new__(cls)
|
|
14
|
+
|
|
15
|
+
return cls.instance
|
|
16
|
+
|
|
17
|
+
def __init__(self, successor: Command=None):
|
|
18
|
+
super().__init__(successor)
|
|
19
|
+
|
|
20
|
+
def command(self):
|
|
21
|
+
return ExportSelectX.COMMAND
|
|
22
|
+
|
|
23
|
+
def required(self):
|
|
24
|
+
return RequiredState.EXPORT_DB
|
|
25
|
+
|
|
26
|
+
def completion(self, state: ReplState):
|
|
27
|
+
completions = {}
|
|
28
|
+
|
|
29
|
+
if state.device == ReplState.X:
|
|
30
|
+
completions = {'drop': SqlCompleter(
|
|
31
|
+
lambda: ExportDatabases.table_names(state.export_session),
|
|
32
|
+
dml='drop',
|
|
33
|
+
expandables={
|
|
34
|
+
'export-dbs': lambda: ExportDatabases.database_names(),
|
|
35
|
+
'columns':lambda _: Athena.column_names(database=state.export_session, function='export'),
|
|
36
|
+
},
|
|
37
|
+
variant=SqlVariant.ATHENA
|
|
38
|
+
)}
|
|
39
|
+
|
|
40
|
+
if state.export_session:
|
|
41
|
+
completions |= {dml: SqlCompleter(
|
|
42
|
+
lambda: ExportDatabases.table_names(state.export_session),
|
|
43
|
+
dml=dml,
|
|
44
|
+
expandables={
|
|
45
|
+
'export-dbs': lambda: ExportDatabases.database_names(),
|
|
46
|
+
'columns':lambda _: Athena.column_names(database=state.export_session, function='export'),
|
|
47
|
+
},
|
|
48
|
+
variant=SqlVariant.ATHENA
|
|
49
|
+
) for dml in ['select', 'preview']}
|
|
50
|
+
|
|
51
|
+
return completions
|
|
52
|
+
|
|
53
|
+
def help(self, _: ReplState):
|
|
54
|
+
return f'<sql-select-statements>\t run queries on export database'
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
from adam.commands.export.importer import Importer
|
|
6
|
+
from adam.commands.export.utils_export import ExportTableStatus, csv_dir, find_files
|
|
7
|
+
from adam.config import Config
|
|
8
|
+
from adam.repl_state import ReplState
|
|
9
|
+
from adam.utils import log2, tabulize, log, parallelize
|
|
10
|
+
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
11
|
+
from adam.utils_k8s.pods import Pods, log_prefix
|
|
12
|
+
from adam.utils_k8s.statefulsets import StatefulSets
|
|
13
|
+
from adam.utils_local import local_tmp_dir
|
|
14
|
+
|
|
15
|
+
class ExportSessions:
|
|
16
|
+
def clear_export_session_cache():
|
|
17
|
+
ExportSessions.find_export_sessions.cache_clear()
|
|
18
|
+
ExportSessions.export_session_names.cache_clear()
|
|
19
|
+
|
|
20
|
+
@functools.lru_cache()
|
|
21
|
+
def export_session_names(sts: str, pod: str, namespace: str, importer: str = None, export_state = None):
|
|
22
|
+
if not sts or not namespace:
|
|
23
|
+
return []
|
|
24
|
+
|
|
25
|
+
if not pod:
|
|
26
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
27
|
+
|
|
28
|
+
if not pod:
|
|
29
|
+
return []
|
|
30
|
+
|
|
31
|
+
return [session for session, state in ExportSessions.find_export_sessions(pod, namespace, importer).items() if not export_state or state == export_state]
|
|
32
|
+
|
|
33
|
+
@functools.lru_cache()
|
|
34
|
+
def find_export_sessions(pod: str, namespace: str, importer: str = None, limit = 100):
|
|
35
|
+
sessions: dict[str, str] = {}
|
|
36
|
+
|
|
37
|
+
prefix = Importer.prefix_from_importer(importer)
|
|
38
|
+
|
|
39
|
+
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{prefix}*_*.log*')
|
|
40
|
+
|
|
41
|
+
if not log_files:
|
|
42
|
+
return {}
|
|
43
|
+
|
|
44
|
+
for log_file in log_files[:limit]:
|
|
45
|
+
m = re.match(f'{log_prefix()}-(.*?)_.*\.log?(.*)', log_file)
|
|
46
|
+
if m:
|
|
47
|
+
s = m.group(1)
|
|
48
|
+
state = m.group(2) # '', '.pending_import', '.done'
|
|
49
|
+
if state:
|
|
50
|
+
state = state.strip('.')
|
|
51
|
+
else:
|
|
52
|
+
state = 'in_export'
|
|
53
|
+
|
|
54
|
+
if s not in sessions:
|
|
55
|
+
sessions[s] = state
|
|
56
|
+
elif sessions[s] == 'done' and state != 'done':
|
|
57
|
+
sessions[s] = state
|
|
58
|
+
|
|
59
|
+
return sessions
|
|
60
|
+
|
|
61
|
+
def clean_up_all_sessions(sts: str, pod: str, namespace: str):
|
|
62
|
+
if not sts or not namespace:
|
|
63
|
+
return False
|
|
64
|
+
|
|
65
|
+
if not pod:
|
|
66
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
67
|
+
|
|
68
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {csv_dir()}/*', show_out=Config().is_debug(), shell='bash')
|
|
69
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {log_prefix()}-*.log*', show_out=Config().is_debug(), shell='bash')
|
|
70
|
+
|
|
71
|
+
return True
|
|
72
|
+
|
|
73
|
+
def clean_up_sessions(sts: str, pod: str, namespace: str, sessions: list[str], max_workers = 0):
|
|
74
|
+
if not sessions:
|
|
75
|
+
return []
|
|
76
|
+
|
|
77
|
+
if not max_workers:
|
|
78
|
+
max_workers = Config().action_workers('export', 8)
|
|
79
|
+
|
|
80
|
+
with parallelize(sessions,
|
|
81
|
+
max_workers,
|
|
82
|
+
msg='Cleaning|Cleaned up {size} export sessions') as exec:
|
|
83
|
+
cnt_tuples = exec.map(lambda session: ExportSessions.clean_up_session(sts, pod, namespace, session, True))
|
|
84
|
+
csv_cnt = 0
|
|
85
|
+
log_cnt = 0
|
|
86
|
+
for (csv, log) in cnt_tuples:
|
|
87
|
+
csv_cnt += csv
|
|
88
|
+
log_cnt += log
|
|
89
|
+
|
|
90
|
+
return csv_cnt, log_cnt
|
|
91
|
+
|
|
92
|
+
def clean_up_session(sts: str, pod: str, namespace: str, session: str, multi_tables = True):
|
|
93
|
+
if not sts or not namespace:
|
|
94
|
+
return 0, 0
|
|
95
|
+
|
|
96
|
+
if not pod:
|
|
97
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
98
|
+
|
|
99
|
+
if not pod:
|
|
100
|
+
return 0, 0
|
|
101
|
+
|
|
102
|
+
csv_cnt = 0
|
|
103
|
+
log_cnt = 0
|
|
104
|
+
|
|
105
|
+
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{session}_*.log*')
|
|
106
|
+
|
|
107
|
+
for log_file in log_files:
|
|
108
|
+
m = re.match(f'{log_prefix()}-{session}_(.*?)\.(.*?)\.log.*', log_file)
|
|
109
|
+
if m:
|
|
110
|
+
table = m.group(2)
|
|
111
|
+
|
|
112
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {csv_dir()}/{session}_{table}', show_out=not multi_tables, shell='bash')
|
|
113
|
+
csv_cnt += 1
|
|
114
|
+
|
|
115
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {log_file}', show_out=not multi_tables, shell='bash')
|
|
116
|
+
log_cnt += 1
|
|
117
|
+
|
|
118
|
+
return csv_cnt, log_cnt
|
|
119
|
+
|
|
120
|
+
def show_session(sts: str, pod: str, namespace: str, session: str):
|
|
121
|
+
if not pod:
|
|
122
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
123
|
+
|
|
124
|
+
if not pod:
|
|
125
|
+
return
|
|
126
|
+
|
|
127
|
+
tables, _ = ExportTableStatus.from_session(sts, pod, namespace, session)
|
|
128
|
+
log()
|
|
129
|
+
tabulize(tables,
|
|
130
|
+
lambda t: f'{t.keyspace}\t{t.target_table}\t{"export_completed_pending_import" if t.status == "pending_import" else t.status}\t{t.csv_file}',
|
|
131
|
+
header='KEYSPACE\tTARGET_TABLE\tSTATUS\tCSV_FILES',
|
|
132
|
+
separator='\t')
|
|
133
|
+
|
|
134
|
+
def download_session(sts: str, pod: str, namespace: str, session: str):
|
|
135
|
+
if not pod:
|
|
136
|
+
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
137
|
+
|
|
138
|
+
if not pod:
|
|
139
|
+
return
|
|
140
|
+
|
|
141
|
+
tables, _ = ExportTableStatus.from_session(sts, pod, namespace, session)
|
|
142
|
+
def download_csv(table):
|
|
143
|
+
from_path: str = table.csv_file
|
|
144
|
+
|
|
145
|
+
to_path = from_path.replace(csv_dir(), local_tmp_dir())
|
|
146
|
+
os.makedirs(os.path.dirname(to_path), exist_ok=True)
|
|
147
|
+
Pods.download_file(pod, 'cassandra', namespace, from_path, to_path)
|
|
148
|
+
|
|
149
|
+
log2(f'[{session}] Downloaded to {to_path}.')
|
|
150
|
+
|
|
151
|
+
with parallelize(tables,
|
|
152
|
+
workers=Config().get('download.workers', 8),
|
|
153
|
+
msg='Downloading|Downloaded {size} csv files') as exec:
|
|
154
|
+
exec.map(download_csv)
|
|
155
|
+
|
|
156
|
+
class ExportSessionService:
|
|
157
|
+
def __init__(self, handler: 'ExportSessionHandler'):
|
|
158
|
+
self.handler = handler
|
|
159
|
+
|
|
160
|
+
def clean_up(self, sessions: list[str]):
|
|
161
|
+
state = self.handler.state
|
|
162
|
+
|
|
163
|
+
csv_cnt, log_cnt = ExportSessions.clean_up_sessions(state.sts, self.pod(), state.namespace, sessions)
|
|
164
|
+
|
|
165
|
+
log(f'Removed {csv_cnt} csv and {log_cnt} log files.')
|
|
166
|
+
|
|
167
|
+
ExportSessions.clear_export_session_cache()
|
|
168
|
+
|
|
169
|
+
def clean_up_all(self):
|
|
170
|
+
state = self.handler.state
|
|
171
|
+
|
|
172
|
+
if ExportSessions.clean_up_all_sessions(state.sts, self.pod(), state.namespace):
|
|
173
|
+
ExportSessions.clear_export_session_cache()
|
|
174
|
+
|
|
175
|
+
def show_all_sessions(self):
|
|
176
|
+
state = self.handler.state
|
|
177
|
+
|
|
178
|
+
sessions = sorted(ExportSessions.find_export_sessions(self.pod(), state.namespace).items(), reverse=True)
|
|
179
|
+
tabulize(sessions, lambda args: f'{args[0]}\t{args[1]}', header='EXPORT_SESSION\tSTATUS', separator='\t')
|
|
180
|
+
|
|
181
|
+
def show_session(self, session: str):
|
|
182
|
+
state = self.handler.state
|
|
183
|
+
ExportSessions.show_session(state.sts, self.pod(), state.namespace, session)
|
|
184
|
+
|
|
185
|
+
def download_session(self, session: str):
|
|
186
|
+
state = self.handler.state
|
|
187
|
+
ExportSessions.download_session(state.sts, self.pod(), state.namespace, session)
|
|
188
|
+
|
|
189
|
+
def pod(self):
|
|
190
|
+
state = self.handler.state
|
|
191
|
+
|
|
192
|
+
pod = state.pod
|
|
193
|
+
if not pod:
|
|
194
|
+
pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
|
|
195
|
+
|
|
196
|
+
return pod
|
|
197
|
+
|
|
198
|
+
class ExportSessionHandler:
|
|
199
|
+
def __init__(self, state: ReplState = None):
|
|
200
|
+
self.state = state
|
|
201
|
+
|
|
202
|
+
def __enter__(self):
|
|
203
|
+
return ExportSessionService(self)
|
|
204
|
+
|
|
205
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
206
|
+
return False
|
|
207
|
+
|
|
208
|
+
def export_session(state: ReplState = None):
|
|
209
|
+
return ExportSessionHandler(state)
|