kaqing 1.98.15__py3-none-any.whl → 2.0.145__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaqing might be problematic. Click here for more details.
- adam/app_session.py +1 -1
- adam/apps.py +2 -2
- adam/batch.py +30 -31
- adam/checks/check_utils.py +4 -4
- adam/checks/compactionstats.py +1 -1
- adam/checks/cpu.py +2 -2
- adam/checks/disk.py +1 -1
- adam/checks/gossip.py +1 -1
- adam/checks/memory.py +3 -3
- adam/checks/status.py +1 -1
- adam/commands/alter_tables.py +81 -0
- adam/commands/app.py +3 -3
- adam/commands/app_ping.py +2 -2
- adam/commands/audit/audit.py +86 -0
- adam/commands/audit/audit_repair_tables.py +77 -0
- adam/commands/audit/audit_run.py +58 -0
- adam/commands/audit/show_last10.py +51 -0
- adam/commands/audit/show_slow10.py +50 -0
- adam/commands/audit/show_top10.py +48 -0
- adam/commands/audit/utils_show_top10.py +59 -0
- adam/commands/bash/bash.py +133 -0
- adam/commands/bash/bash_completer.py +93 -0
- adam/commands/cat.py +56 -0
- adam/commands/cd.py +12 -82
- adam/commands/check.py +6 -0
- adam/commands/cli_commands.py +3 -3
- adam/commands/code.py +60 -0
- adam/commands/command.py +48 -12
- adam/commands/commands_utils.py +4 -5
- adam/commands/cql/cql_completions.py +28 -0
- adam/commands/cql/cql_utils.py +209 -0
- adam/commands/{cqlsh.py → cql/cqlsh.py} +15 -10
- adam/commands/deploy/__init__.py +0 -0
- adam/commands/{frontend → deploy}/code_start.py +1 -1
- adam/commands/{frontend → deploy}/code_stop.py +1 -1
- adam/commands/{frontend → deploy}/code_utils.py +2 -2
- adam/commands/deploy/deploy.py +48 -0
- adam/commands/deploy/deploy_frontend.py +52 -0
- adam/commands/deploy/deploy_pg_agent.py +38 -0
- adam/commands/deploy/deploy_pod.py +110 -0
- adam/commands/deploy/deploy_utils.py +29 -0
- adam/commands/deploy/undeploy.py +48 -0
- adam/commands/deploy/undeploy_frontend.py +41 -0
- adam/commands/deploy/undeploy_pg_agent.py +42 -0
- adam/commands/deploy/undeploy_pod.py +51 -0
- adam/commands/devices/__init__.py +0 -0
- adam/commands/devices/device.py +27 -0
- adam/commands/devices/device_app.py +146 -0
- adam/commands/devices/device_auit_log.py +43 -0
- adam/commands/devices/device_cass.py +145 -0
- adam/commands/devices/device_export.py +86 -0
- adam/commands/devices/device_postgres.py +109 -0
- adam/commands/devices/devices.py +25 -0
- adam/commands/export/__init__.py +0 -0
- adam/commands/export/clean_up_export_session.py +53 -0
- adam/commands/{frontend/teardown_frontend.py → export/clean_up_export_sessions.py} +9 -11
- adam/commands/export/drop_export_database.py +58 -0
- adam/commands/export/drop_export_databases.py +46 -0
- adam/commands/export/export.py +83 -0
- adam/commands/export/export_databases.py +170 -0
- adam/commands/export/export_select.py +85 -0
- adam/commands/export/export_select_x.py +54 -0
- adam/commands/export/export_use.py +55 -0
- adam/commands/export/exporter.py +364 -0
- adam/commands/export/import_session.py +68 -0
- adam/commands/export/importer.py +67 -0
- adam/commands/export/importer_athena.py +80 -0
- adam/commands/export/importer_sqlite.py +47 -0
- adam/commands/export/show_column_counts.py +63 -0
- adam/commands/export/show_export_databases.py +39 -0
- adam/commands/export/show_export_session.py +51 -0
- adam/commands/export/show_export_sessions.py +47 -0
- adam/commands/export/utils_export.py +291 -0
- adam/commands/help.py +12 -7
- adam/commands/issues.py +6 -0
- adam/commands/kubectl.py +41 -0
- adam/commands/login.py +9 -5
- adam/commands/logs.py +2 -1
- adam/commands/ls.py +4 -107
- adam/commands/medusa/medusa.py +2 -26
- adam/commands/medusa/medusa_backup.py +2 -2
- adam/commands/medusa/medusa_restore.py +3 -4
- adam/commands/medusa/medusa_show_backupjobs.py +4 -3
- adam/commands/medusa/medusa_show_restorejobs.py +3 -3
- adam/commands/nodetool.py +9 -4
- adam/commands/param_set.py +1 -1
- adam/commands/postgres/postgres.py +42 -43
- adam/commands/postgres/postgres_context.py +248 -0
- adam/commands/postgres/postgres_preview.py +0 -1
- adam/commands/postgres/postgres_utils.py +31 -0
- adam/commands/postgres/psql_completions.py +10 -0
- adam/commands/preview_table.py +18 -40
- adam/commands/pwd.py +2 -28
- adam/commands/reaper/reaper.py +4 -24
- adam/commands/reaper/reaper_restart.py +1 -1
- adam/commands/reaper/reaper_session.py +2 -2
- adam/commands/repair/repair.py +3 -27
- adam/commands/repair/repair_log.py +1 -1
- adam/commands/repair/repair_run.py +2 -2
- adam/commands/repair/repair_scan.py +2 -7
- adam/commands/repair/repair_stop.py +1 -1
- adam/commands/report.py +6 -0
- adam/commands/restart.py +2 -2
- adam/commands/rollout.py +1 -1
- adam/commands/shell.py +33 -0
- adam/commands/show/show.py +11 -26
- adam/commands/show/show_app_actions.py +3 -0
- adam/commands/show/show_app_id.py +1 -1
- adam/commands/show/show_app_queues.py +3 -2
- adam/commands/show/show_cassandra_status.py +3 -3
- adam/commands/show/show_cassandra_version.py +3 -3
- adam/commands/show/show_commands.py +4 -1
- adam/commands/show/show_host.py +33 -0
- adam/commands/show/show_login.py +3 -0
- adam/commands/show/show_processes.py +1 -1
- adam/commands/show/show_repairs.py +2 -2
- adam/commands/show/show_storage.py +1 -1
- adam/commands/watch.py +1 -1
- adam/config.py +16 -3
- adam/embedded_params.py +1 -1
- adam/pod_exec_result.py +10 -2
- adam/repl.py +132 -117
- adam/repl_commands.py +62 -18
- adam/repl_state.py +276 -55
- adam/sql/__init__.py +0 -0
- adam/sql/sql_completer.py +120 -0
- adam/sql/sql_state_machine.py +617 -0
- adam/sql/term_completer.py +76 -0
- adam/sso/authenticator.py +1 -1
- adam/sso/authn_ad.py +36 -56
- adam/sso/authn_okta.py +6 -32
- adam/sso/cred_cache.py +1 -1
- adam/sso/idp.py +74 -9
- adam/sso/idp_login.py +2 -2
- adam/sso/idp_session.py +10 -7
- adam/utils.py +85 -4
- adam/utils_athena.py +145 -0
- adam/utils_audits.py +102 -0
- adam/utils_k8s/__init__.py +0 -0
- adam/utils_k8s/app_clusters.py +33 -0
- adam/utils_k8s/app_pods.py +31 -0
- adam/{k8s_utils → utils_k8s}/cassandra_clusters.py +6 -21
- adam/{k8s_utils → utils_k8s}/cassandra_nodes.py +12 -5
- adam/utils_k8s/config_maps.py +34 -0
- adam/utils_k8s/deployment.py +56 -0
- adam/{k8s_utils → utils_k8s}/jobs.py +1 -1
- adam/{k8s_utils → utils_k8s}/kube_context.py +1 -1
- adam/utils_k8s/pods.py +342 -0
- adam/{k8s_utils → utils_k8s}/secrets.py +4 -0
- adam/utils_k8s/service_accounts.py +169 -0
- adam/{k8s_utils → utils_k8s}/statefulsets.py +5 -4
- adam/{k8s_utils → utils_k8s}/volumes.py +9 -0
- adam/utils_net.py +24 -0
- adam/utils_repl/__init__.py +0 -0
- adam/utils_repl/automata_completer.py +48 -0
- adam/utils_repl/repl_completer.py +46 -0
- adam/utils_repl/state_machine.py +173 -0
- adam/utils_sqlite.py +101 -0
- adam/version.py +1 -1
- {kaqing-1.98.15.dist-info → kaqing-2.0.145.dist-info}/METADATA +1 -1
- kaqing-2.0.145.dist-info/RECORD +227 -0
- adam/commands/bash.py +0 -87
- adam/commands/cql_utils.py +0 -53
- adam/commands/devices.py +0 -89
- adam/commands/frontend/setup.py +0 -60
- adam/commands/frontend/setup_frontend.py +0 -58
- adam/commands/frontend/teardown.py +0 -61
- adam/commands/postgres/postgres_session.py +0 -225
- adam/commands/user_entry.py +0 -77
- adam/k8s_utils/pods.py +0 -211
- kaqing-1.98.15.dist-info/RECORD +0 -160
- /adam/commands/{frontend → audit}/__init__.py +0 -0
- /adam/{k8s_utils → commands/bash}/__init__.py +0 -0
- /adam/{medusa_show_restorejobs.py → commands/cql/__init__.py} +0 -0
- /adam/{k8s_utils → utils_k8s}/custom_resources.py +0 -0
- /adam/{k8s_utils → utils_k8s}/ingresses.py +0 -0
- /adam/{k8s_utils → utils_k8s}/services.py +0 -0
- {kaqing-1.98.15.dist-info → kaqing-2.0.145.dist-info}/WHEEL +0 -0
- {kaqing-1.98.15.dist-info → kaqing-2.0.145.dist-info}/entry_points.txt +0 -0
- {kaqing-1.98.15.dist-info → kaqing-2.0.145.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from abc import abstractmethod
|
|
2
|
+
|
|
3
|
+
from adam.commands.export.utils_export import csv_dir
|
|
4
|
+
from adam.config import Config
|
|
5
|
+
from adam.utils import ing
|
|
6
|
+
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
7
|
+
from adam.utils_k8s.pods import log_prefix
|
|
8
|
+
|
|
9
|
+
class Importer:
|
|
10
|
+
@abstractmethod
|
|
11
|
+
def prefix(self):
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
@abstractmethod
|
|
15
|
+
def import_from_csv(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
def move_to_done(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, target_table: str):
|
|
19
|
+
log_file = f'{log_prefix()}-{from_session}_{keyspace}.{target_table}.log.pending_import'
|
|
20
|
+
|
|
21
|
+
to = f'{log_prefix()}-{to_session}_{keyspace}.{target_table}.log.done'
|
|
22
|
+
|
|
23
|
+
CassandraNodes.exec(pod, namespace, f'mv {log_file} {to}', show_out=Config().is_debug(), shell='bash')
|
|
24
|
+
|
|
25
|
+
return to, to_session
|
|
26
|
+
|
|
27
|
+
def prefix_adjusted_session(self, session: str):
|
|
28
|
+
if not session.startswith(self.prefix()):
|
|
29
|
+
return f'{self.prefix()}{session[1:]}'
|
|
30
|
+
|
|
31
|
+
return session
|
|
32
|
+
|
|
33
|
+
def remove_csv(self, pod: str, namespace: str, session: str, table: str, target_table: str, multi_tables = True):
|
|
34
|
+
with ing(f'[{session}] Cleaning up temporary files', suppress_log=multi_tables):
|
|
35
|
+
CassandraNodes.exec(pod, namespace, f'rm -rf {self.csv_file(session, table, target_table)}', show_out=Config().is_debug(), shell='bash')
|
|
36
|
+
|
|
37
|
+
def db(self, session: str, keyspace: str):
|
|
38
|
+
return f'{session}_{keyspace}'
|
|
39
|
+
|
|
40
|
+
def csv_file(self, session: str, table: str, target_table: str):
|
|
41
|
+
return f'{csv_dir()}/{session}_{target_table}/{table}.csv'
|
|
42
|
+
|
|
43
|
+
def prefix_from_importer(importer: str = ''):
|
|
44
|
+
if not importer:
|
|
45
|
+
return ''
|
|
46
|
+
|
|
47
|
+
prefix = 's'
|
|
48
|
+
|
|
49
|
+
if importer == 'athena':
|
|
50
|
+
prefix = 'e'
|
|
51
|
+
elif importer == 'csv':
|
|
52
|
+
prefix = 'c'
|
|
53
|
+
|
|
54
|
+
return prefix
|
|
55
|
+
|
|
56
|
+
def importer_from_session(session: str):
|
|
57
|
+
if not session:
|
|
58
|
+
return None
|
|
59
|
+
|
|
60
|
+
importer = 'csv'
|
|
61
|
+
|
|
62
|
+
if session.startswith('s'):
|
|
63
|
+
importer = 'sqlite'
|
|
64
|
+
elif session.startswith('e'):
|
|
65
|
+
importer = 'athena'
|
|
66
|
+
|
|
67
|
+
return importer
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import boto3
|
|
2
|
+
|
|
3
|
+
from adam.commands.export.importer import Importer
|
|
4
|
+
from adam.commands.export.utils_export import GeneratorStream
|
|
5
|
+
from adam.config import Config
|
|
6
|
+
from adam.utils import log2, ing
|
|
7
|
+
from adam.utils_athena import Athena
|
|
8
|
+
from adam.utils_k8s.pods import Pods
|
|
9
|
+
|
|
10
|
+
class AthenaImporter(Importer):
|
|
11
|
+
def ping():
|
|
12
|
+
session = boto3.session.Session()
|
|
13
|
+
credentials = session.get_credentials()
|
|
14
|
+
|
|
15
|
+
return credentials is not None
|
|
16
|
+
|
|
17
|
+
def prefix(self):
|
|
18
|
+
return 'e'
|
|
19
|
+
|
|
20
|
+
def import_from_csv(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
21
|
+
csv_file = self.csv_file(from_session, table, target_table)
|
|
22
|
+
db = self.db(to_session, keyspace)
|
|
23
|
+
|
|
24
|
+
succeeded = False
|
|
25
|
+
try:
|
|
26
|
+
bucket = Config().get('export.bucket', 'c3.ops--qing')
|
|
27
|
+
|
|
28
|
+
with ing(f'[{to_session}] Uploading to S3', suppress_log=multi_tables):
|
|
29
|
+
bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
|
|
30
|
+
|
|
31
|
+
s3 = boto3.client('s3')
|
|
32
|
+
s3.upload_fileobj(GeneratorStream(bytes), bucket, f'export/{db}/{keyspace}/{target_table}/{table}.csv')
|
|
33
|
+
|
|
34
|
+
msg: str = None
|
|
35
|
+
if create_db:
|
|
36
|
+
msg = f"[{to_session}] Creating database {db}"
|
|
37
|
+
else:
|
|
38
|
+
msg = f"[{to_session}] Creating table {target_table}"
|
|
39
|
+
with ing(msg, suppress_log=multi_tables):
|
|
40
|
+
query = f'CREATE DATABASE IF NOT EXISTS {db};'
|
|
41
|
+
if Config().is_debug():
|
|
42
|
+
log2(query)
|
|
43
|
+
Athena.query(query, 'default')
|
|
44
|
+
|
|
45
|
+
query = f'DROP TABLE IF EXISTS {target_table};'
|
|
46
|
+
if Config().is_debug():
|
|
47
|
+
log2(query)
|
|
48
|
+
Athena.query(query, db)
|
|
49
|
+
|
|
50
|
+
athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
|
|
51
|
+
query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {target_table}(\n' + \
|
|
52
|
+
f' {athena_columns})\n' + \
|
|
53
|
+
"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
|
|
54
|
+
'WITH SERDEPROPERTIES (\n' + \
|
|
55
|
+
' "separatorChar" = ",",\n' + \
|
|
56
|
+
' "quoteChar" = "\\"")\n' + \
|
|
57
|
+
f"LOCATION 's3://{bucket}/export/{db}/{keyspace}/{target_table}'\n" + \
|
|
58
|
+
'TBLPROPERTIES ("skip.header.line.count"="1");'
|
|
59
|
+
if Config().is_debug():
|
|
60
|
+
log2(query)
|
|
61
|
+
try:
|
|
62
|
+
Athena.query(query, db)
|
|
63
|
+
except Exception as e:
|
|
64
|
+
log2(f'*** Failed query:\n{query}')
|
|
65
|
+
raise e
|
|
66
|
+
|
|
67
|
+
to, _ = self.move_to_done(pod, namespace, to_session, from_session, keyspace, target_table)
|
|
68
|
+
|
|
69
|
+
succeeded = True
|
|
70
|
+
|
|
71
|
+
return to, to_session
|
|
72
|
+
finally:
|
|
73
|
+
if succeeded:
|
|
74
|
+
self.remove_csv(pod, namespace, from_session, table, target_table, multi_tables)
|
|
75
|
+
Athena.clear_cache()
|
|
76
|
+
|
|
77
|
+
if not multi_tables:
|
|
78
|
+
query = f'select * from {target_table} limit 10'
|
|
79
|
+
log2(query)
|
|
80
|
+
Athena.run_query(query, db)
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sqlite3
|
|
3
|
+
import pandas
|
|
4
|
+
|
|
5
|
+
from adam.commands.export.importer import Importer
|
|
6
|
+
from adam.commands.export.utils_export import GeneratorStream
|
|
7
|
+
from adam.utils import log2, ing
|
|
8
|
+
from adam.utils_k8s.pods import Pods
|
|
9
|
+
from adam.utils_sqlite import SQLite
|
|
10
|
+
|
|
11
|
+
class SqliteImporter(Importer):
|
|
12
|
+
def prefix(self):
|
|
13
|
+
return 's'
|
|
14
|
+
|
|
15
|
+
def import_from_csv(self, pod: str, namespace: str, to_session: str, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
16
|
+
csv_file = self.csv_file(from_session, table, target_table)
|
|
17
|
+
db = self.db(to_session, keyspace)
|
|
18
|
+
|
|
19
|
+
succeeded = False
|
|
20
|
+
conn = None
|
|
21
|
+
try:
|
|
22
|
+
os.makedirs(SQLite.local_db_dir(), exist_ok=True)
|
|
23
|
+
conn = sqlite3.connect(f'{SQLite.local_db_dir()}/{db}.db')
|
|
24
|
+
|
|
25
|
+
with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables):
|
|
26
|
+
bytes = Pods.read_file(pod, 'cassandra', namespace, csv_file)
|
|
27
|
+
df = pandas.read_csv(GeneratorStream(bytes))
|
|
28
|
+
|
|
29
|
+
df.to_sql(target_table, conn, index=False, if_exists='replace')
|
|
30
|
+
|
|
31
|
+
to, _ = self.move_to_done(pod, namespace, to_session, from_session, keyspace, target_table)
|
|
32
|
+
|
|
33
|
+
succeeded = True
|
|
34
|
+
|
|
35
|
+
return to, to_session
|
|
36
|
+
finally:
|
|
37
|
+
if succeeded:
|
|
38
|
+
self.remove_csv(pod, namespace, from_session, table, target_table, multi_tables)
|
|
39
|
+
SQLite.clear_cache()
|
|
40
|
+
|
|
41
|
+
if not multi_tables:
|
|
42
|
+
query = f'select * from {target_table} limit 10'
|
|
43
|
+
log2(query)
|
|
44
|
+
SQLite.run_query(query, conn_passed=conn)
|
|
45
|
+
|
|
46
|
+
if conn:
|
|
47
|
+
conn.close()
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.export.export_databases import ExportDatabases
|
|
3
|
+
from adam.config import Config
|
|
4
|
+
from adam.repl_state import ReplState, RequiredState
|
|
5
|
+
from adam.utils import log2
|
|
6
|
+
from adam.utils_athena import Athena
|
|
7
|
+
from adam.utils_sqlite import SQLite
|
|
8
|
+
|
|
9
|
+
class ShowColumnCounts(Command):
|
|
10
|
+
COMMAND = 'show column counts on'
|
|
11
|
+
|
|
12
|
+
# the singleton pattern
|
|
13
|
+
def __new__(cls, *args, **kwargs):
|
|
14
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ShowColumnCounts, cls).__new__(cls)
|
|
15
|
+
|
|
16
|
+
return cls.instance
|
|
17
|
+
|
|
18
|
+
def __init__(self, successor: Command=None):
|
|
19
|
+
super().__init__(successor)
|
|
20
|
+
|
|
21
|
+
def command(self):
|
|
22
|
+
return ShowColumnCounts.COMMAND
|
|
23
|
+
|
|
24
|
+
def required(self):
|
|
25
|
+
return RequiredState.EXPORT_DB
|
|
26
|
+
|
|
27
|
+
def run(self, cmd: str, state: ReplState):
|
|
28
|
+
if not(args := self.args(cmd)):
|
|
29
|
+
return super().run(cmd, state)
|
|
30
|
+
|
|
31
|
+
state, args = self.apply_state(args, state)
|
|
32
|
+
if not self.validate_state(state):
|
|
33
|
+
return state
|
|
34
|
+
|
|
35
|
+
if not args:
|
|
36
|
+
if state.in_repl:
|
|
37
|
+
log2('Use a SQL statement.')
|
|
38
|
+
else:
|
|
39
|
+
log2('* SQL statement is missing.')
|
|
40
|
+
|
|
41
|
+
Command.display_help()
|
|
42
|
+
|
|
43
|
+
return 'command-missing'
|
|
44
|
+
|
|
45
|
+
copy_or_export = 'copy'
|
|
46
|
+
if state.export_session.startswith('e'):
|
|
47
|
+
copy_or_export = 'export'
|
|
48
|
+
|
|
49
|
+
table = args[0]
|
|
50
|
+
query = Config().get(f'{copy_or_export}.column_counts_query', 'select id, count(id) as columns from {table} group by id')
|
|
51
|
+
query = query.replace('{table}', table)
|
|
52
|
+
ExportDatabases.run_query(query, state.export_session)
|
|
53
|
+
|
|
54
|
+
return state
|
|
55
|
+
|
|
56
|
+
def completion(self, state: ReplState):
|
|
57
|
+
if not state.export_session:
|
|
58
|
+
return {}
|
|
59
|
+
|
|
60
|
+
return super().completion(state, lambda: {t: None for t in ExportDatabases.table_names(state.export_session)})
|
|
61
|
+
|
|
62
|
+
def help(self, _: ReplState):
|
|
63
|
+
return f'{ShowColumnCounts.COMMAND} <export-table-name>\t show column count per id'
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.devices.device_export import DeviceExport
|
|
3
|
+
from adam.repl_state import ReplState
|
|
4
|
+
|
|
5
|
+
class ShowExportDatabases(Command):
|
|
6
|
+
COMMAND = 'show export databases'
|
|
7
|
+
|
|
8
|
+
# the singleton pattern
|
|
9
|
+
def __new__(cls, *args, **kwargs):
|
|
10
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ShowExportDatabases, cls).__new__(cls)
|
|
11
|
+
|
|
12
|
+
return cls.instance
|
|
13
|
+
|
|
14
|
+
def __init__(self, successor: Command=None):
|
|
15
|
+
super().__init__(successor)
|
|
16
|
+
|
|
17
|
+
def command(self):
|
|
18
|
+
return ShowExportDatabases.COMMAND
|
|
19
|
+
|
|
20
|
+
def required(self):
|
|
21
|
+
return [ReplState.C, ReplState.X]
|
|
22
|
+
|
|
23
|
+
def run(self, cmd: str, state: ReplState):
|
|
24
|
+
if not(args := self.args(cmd)):
|
|
25
|
+
return super().run(cmd, state)
|
|
26
|
+
|
|
27
|
+
state, args = self.apply_state(args, state)
|
|
28
|
+
if not self.validate_state(state):
|
|
29
|
+
return state
|
|
30
|
+
|
|
31
|
+
DeviceExport().show_export_databases()
|
|
32
|
+
|
|
33
|
+
return state
|
|
34
|
+
|
|
35
|
+
def completion(self, state: ReplState):
|
|
36
|
+
return DeviceExport().ls_completion(ShowExportDatabases.COMMAND, state, default = super().completion(state))
|
|
37
|
+
|
|
38
|
+
def help(self, _: ReplState):
|
|
39
|
+
return f'{ShowExportDatabases.COMMAND}\t list export databases'
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.export.export_databases import ExportDatabases
|
|
3
|
+
from adam.commands.export.exporter import Exporter
|
|
4
|
+
from adam.repl_state import ReplState, RequiredState
|
|
5
|
+
from adam.utils import log2
|
|
6
|
+
|
|
7
|
+
class ShowExportSession(Command):
|
|
8
|
+
COMMAND = 'show export session'
|
|
9
|
+
|
|
10
|
+
# the singleton pattern
|
|
11
|
+
def __new__(cls, *args, **kwargs):
|
|
12
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ShowExportSession, cls).__new__(cls)
|
|
13
|
+
|
|
14
|
+
return cls.instance
|
|
15
|
+
|
|
16
|
+
def __init__(self, successor: Command=None):
|
|
17
|
+
super().__init__(successor)
|
|
18
|
+
|
|
19
|
+
def command(self):
|
|
20
|
+
return ShowExportSession.COMMAND
|
|
21
|
+
|
|
22
|
+
def required(self):
|
|
23
|
+
return RequiredState.CLUSTER_OR_POD
|
|
24
|
+
|
|
25
|
+
def run(self, cmd: str, state: ReplState):
|
|
26
|
+
if not(args := self.args(cmd)):
|
|
27
|
+
return super().run(cmd, state)
|
|
28
|
+
|
|
29
|
+
state, args = self.apply_state(args, state)
|
|
30
|
+
if not self.validate_state(state):
|
|
31
|
+
return state
|
|
32
|
+
|
|
33
|
+
if not args:
|
|
34
|
+
if state.in_repl:
|
|
35
|
+
log2('Specify export database name.')
|
|
36
|
+
else:
|
|
37
|
+
log2('* Database name is missing.')
|
|
38
|
+
|
|
39
|
+
Command.display_help()
|
|
40
|
+
|
|
41
|
+
return 'command-missing'
|
|
42
|
+
|
|
43
|
+
ExportDatabases.disply_export_session(state.sts, state.pod, state.namespace, args[0])
|
|
44
|
+
|
|
45
|
+
return state
|
|
46
|
+
|
|
47
|
+
def completion(self, state: ReplState):
|
|
48
|
+
return super().completion(state, {session: None for session in Exporter.export_session_names(state.sts, state.pod, state.namespace)})
|
|
49
|
+
|
|
50
|
+
def help(self, _: ReplState):
|
|
51
|
+
return f'{ShowExportSession.COMMAND} <export-session-name>\t show export session'
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from adam.commands.command import Command
|
|
2
|
+
from adam.commands.export.exporter import Exporter
|
|
3
|
+
from adam.repl_state import ReplState, RequiredState
|
|
4
|
+
from adam.utils import lines_to_tabular, log
|
|
5
|
+
from adam.utils_k8s.statefulsets import StatefulSets
|
|
6
|
+
|
|
7
|
+
class ShowExportSessions(Command):
|
|
8
|
+
COMMAND = 'show export sessions'
|
|
9
|
+
|
|
10
|
+
# the singleton pattern
|
|
11
|
+
def __new__(cls, *args, **kwargs):
|
|
12
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ShowExportSessions, cls).__new__(cls)
|
|
13
|
+
|
|
14
|
+
return cls.instance
|
|
15
|
+
|
|
16
|
+
def __init__(self, successor: Command=None):
|
|
17
|
+
super().__init__(successor)
|
|
18
|
+
|
|
19
|
+
def command(self):
|
|
20
|
+
return ShowExportSessions.COMMAND
|
|
21
|
+
|
|
22
|
+
def required(self):
|
|
23
|
+
return RequiredState.CLUSTER_OR_POD
|
|
24
|
+
|
|
25
|
+
def run(self, cmd: str, state: ReplState):
|
|
26
|
+
if not(args := self.args(cmd)):
|
|
27
|
+
return super().run(cmd, state)
|
|
28
|
+
|
|
29
|
+
state, args = self.apply_state(args, state)
|
|
30
|
+
if not self.validate_state(state):
|
|
31
|
+
return state
|
|
32
|
+
|
|
33
|
+
pod = state.pod
|
|
34
|
+
if not pod:
|
|
35
|
+
pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
|
|
36
|
+
|
|
37
|
+
sessions: dict[str, str] = Exporter.find_export_sessions(pod, state.namespace)
|
|
38
|
+
log(lines_to_tabular([f'{session}\t{export_state}' for session, export_state in sorted(sessions.items(), reverse=True)],
|
|
39
|
+
header='EXPORT_SESSION\tSTATUS', separator='\t'))
|
|
40
|
+
|
|
41
|
+
return state
|
|
42
|
+
|
|
43
|
+
def completion(self, state: ReplState):
|
|
44
|
+
return super().completion(state)
|
|
45
|
+
|
|
46
|
+
def help(self, _: ReplState):
|
|
47
|
+
return f'{ShowExportSessions.COMMAND}\t list export sessions'
|
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
import io
|
|
2
|
+
import re
|
|
3
|
+
|
|
4
|
+
from adam.config import Config
|
|
5
|
+
from adam.pod_exec_result import PodExecResult
|
|
6
|
+
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
7
|
+
from adam.utils_k8s.pods import log_prefix
|
|
8
|
+
|
|
9
|
+
class ImportSpec:
|
|
10
|
+
def __init__(self, session: str, importer: str):
|
|
11
|
+
self.session = session
|
|
12
|
+
self.importer = importer
|
|
13
|
+
|
|
14
|
+
def parse_specs(specs_str: str):
|
|
15
|
+
session: str = None
|
|
16
|
+
importer: str = None
|
|
17
|
+
|
|
18
|
+
if specs_str:
|
|
19
|
+
importer, session = ImportSpec._extract_importer(specs_str.strip(' '))
|
|
20
|
+
|
|
21
|
+
return ImportSpec(session, importer)
|
|
22
|
+
|
|
23
|
+
def _extract_importer(spec_str: str) -> tuple[str, str]:
|
|
24
|
+
importer = None
|
|
25
|
+
rest = spec_str
|
|
26
|
+
|
|
27
|
+
p = re.compile(r"(.*?)to\s+(.*)", re.IGNORECASE)
|
|
28
|
+
match = p.match(spec_str)
|
|
29
|
+
if match:
|
|
30
|
+
rest = match.group(1).strip(' ')
|
|
31
|
+
importer = match.group(2).strip(' ')
|
|
32
|
+
|
|
33
|
+
return importer, rest
|
|
34
|
+
|
|
35
|
+
class ExportSpec(ImportSpec):
|
|
36
|
+
def __init__(self, keyspace: str, consistency: str, importer: str, tables: list['ExportTableSpec'], session: str = None):
|
|
37
|
+
super().__init__(None, importer)
|
|
38
|
+
|
|
39
|
+
self.keyspace = keyspace
|
|
40
|
+
self.consistency = consistency
|
|
41
|
+
self.tables = tables
|
|
42
|
+
self.session = session
|
|
43
|
+
|
|
44
|
+
def __str__(self):
|
|
45
|
+
return f'keyspace: {self.keyspace}, consistency: {self.consistency}, importer: {self.importer}, tables: {",".join([t.table for t in self.tables])}, session: {self.session}'
|
|
46
|
+
|
|
47
|
+
def __eq__(self, other):
|
|
48
|
+
if not isinstance(other, ExportSpec):
|
|
49
|
+
return NotImplemented
|
|
50
|
+
|
|
51
|
+
return self.keyspace == other.keyspace and self.tables == other.tables and self.consistency == other.consistency and self.importer == other.importer and self.session == other.session
|
|
52
|
+
|
|
53
|
+
def parse_specs(specs_str: str):
|
|
54
|
+
keyspace: str = None
|
|
55
|
+
consistency: str = None
|
|
56
|
+
importer: str = None
|
|
57
|
+
specs: list[ExportTableSpec] = None
|
|
58
|
+
|
|
59
|
+
if specs_str:
|
|
60
|
+
importer, specs_str = ExportSpec._extract_importer(specs_str.strip(' '))
|
|
61
|
+
keyspace, specs_str = ExportSpec._extract_keyspace(specs_str)
|
|
62
|
+
consistency, specs = ExportSpec._extract_consisteny(specs_str)
|
|
63
|
+
|
|
64
|
+
return ExportSpec(keyspace, consistency, importer, specs)
|
|
65
|
+
|
|
66
|
+
def _extract_keyspace(spec_str: str) -> tuple[str, str]:
|
|
67
|
+
keyspace = None
|
|
68
|
+
rest = spec_str
|
|
69
|
+
|
|
70
|
+
p = re.compile(r"\s*\*\s+in\s+(\S+)(.*)", re.IGNORECASE)
|
|
71
|
+
match = p.match(spec_str)
|
|
72
|
+
if match:
|
|
73
|
+
keyspace = match.group(1).strip(' ')
|
|
74
|
+
rest = match.group(2).strip(' ')
|
|
75
|
+
elif spec_str.startswith('*'):
|
|
76
|
+
keyspace = '*'
|
|
77
|
+
rest = spec_str[1:].strip(' ')
|
|
78
|
+
|
|
79
|
+
return keyspace, rest
|
|
80
|
+
|
|
81
|
+
def _extract_consisteny(spec_str: str) -> tuple[str, list['ExportTableSpec']]:
|
|
82
|
+
consistency = None
|
|
83
|
+
|
|
84
|
+
p = re.compile(r"(.*?)with\s+consistency\s+(.*)", re.IGNORECASE)
|
|
85
|
+
match = p.match(spec_str)
|
|
86
|
+
if match:
|
|
87
|
+
spec_str = match.group(1).strip(' ')
|
|
88
|
+
consistency = match.group(2)
|
|
89
|
+
|
|
90
|
+
if spec_str:
|
|
91
|
+
p = r",\s*(?![^()]*\))"
|
|
92
|
+
specs = re.split(p, spec_str)
|
|
93
|
+
|
|
94
|
+
return consistency, [ExportTableSpec.parse(spec) for spec in specs]
|
|
95
|
+
|
|
96
|
+
return consistency, None
|
|
97
|
+
|
|
98
|
+
class ExportTableSpec:
|
|
99
|
+
def __init__(self, keyspace: str, table: str, columns: str = None, target_table: str = None):
|
|
100
|
+
self.keyspace = keyspace
|
|
101
|
+
self.table = table
|
|
102
|
+
self.columns = columns
|
|
103
|
+
self.target_table = target_table
|
|
104
|
+
|
|
105
|
+
def __str__(self):
|
|
106
|
+
return f'{self.keyspace}.{self.table}({self.columns}) AS {self.target_table}'
|
|
107
|
+
|
|
108
|
+
def __eq__(self, other):
|
|
109
|
+
if not isinstance(other, ExportTableSpec):
|
|
110
|
+
return NotImplemented
|
|
111
|
+
|
|
112
|
+
return self.keyspace == other.keyspace and self.table == other.table and self.columns == other.columns and self.target_table == other.target_table
|
|
113
|
+
|
|
114
|
+
def from_status(status: 'ExportTableStatus'):
|
|
115
|
+
return ExportTableSpec(status.keyspace, status.table, target_table=status.target_table)
|
|
116
|
+
|
|
117
|
+
def parse(spec_str: str) -> 'ExportTableSpec':
|
|
118
|
+
target = None
|
|
119
|
+
|
|
120
|
+
p = re.compile(r"(.*?)\s+as\s+(.*)", re.IGNORECASE)
|
|
121
|
+
match = p.match(spec_str)
|
|
122
|
+
if match:
|
|
123
|
+
spec_str = match.group(1)
|
|
124
|
+
target = match.group(2)
|
|
125
|
+
|
|
126
|
+
keyspace = None
|
|
127
|
+
table = spec_str
|
|
128
|
+
columns = None
|
|
129
|
+
|
|
130
|
+
p = re.compile('(.*?)\.(.*?)\((.*)\)')
|
|
131
|
+
match = p.match(spec_str)
|
|
132
|
+
if match:
|
|
133
|
+
keyspace = match.group(1)
|
|
134
|
+
table = match.group(2)
|
|
135
|
+
columns = match.group(3)
|
|
136
|
+
else:
|
|
137
|
+
p = re.compile('(.*?)\.(.*)')
|
|
138
|
+
match = p.match(spec_str)
|
|
139
|
+
if match:
|
|
140
|
+
keyspace = match.group(1)
|
|
141
|
+
table = match.group(2)
|
|
142
|
+
|
|
143
|
+
return ExportTableSpec(keyspace, table, columns, target)
|
|
144
|
+
|
|
145
|
+
def __eq__(self, other):
|
|
146
|
+
if isinstance(other, ExportTableSpec):
|
|
147
|
+
return self.keyspace == other.keyspace and self.table == other.table and self.columns == other.columns and self.target_table == other.target_table
|
|
148
|
+
|
|
149
|
+
return False
|
|
150
|
+
|
|
151
|
+
def __str__(self):
|
|
152
|
+
return f'{self.keyspace}.{self.table}({self.columns}) as {self.target_table}'
|
|
153
|
+
|
|
154
|
+
class ExportTableStatus:
|
|
155
|
+
def __init__(self, keyspace: str, target_table: str, status: str, table: str = None):
|
|
156
|
+
self.keyspace = keyspace
|
|
157
|
+
self.target_table = target_table
|
|
158
|
+
self.status = status
|
|
159
|
+
self.table = table
|
|
160
|
+
|
|
161
|
+
def __str__(self):
|
|
162
|
+
return f'{self.keyspace}.{self.table} as {self.target_table} = {self.status}'
|
|
163
|
+
|
|
164
|
+
def __eq__(self, other):
|
|
165
|
+
if isinstance(other, ExportTableStatus):
|
|
166
|
+
return self.keyspace == other.keyspace and self.table == other.table and self.status == other.status and self.target_table == other.target_table
|
|
167
|
+
|
|
168
|
+
return False
|
|
169
|
+
|
|
170
|
+
def from_session(sts: str, pod: str, namespace: str, export_session: str):
|
|
171
|
+
statuses: list[ExportTableStatus] = []
|
|
172
|
+
|
|
173
|
+
status_in_whole = 'done'
|
|
174
|
+
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{export_session}_*.log*')
|
|
175
|
+
|
|
176
|
+
for log_file in log_files:
|
|
177
|
+
status: ExportTableStatus = ExportTableStatus.from_log_file(pod, namespace, export_session, log_file)
|
|
178
|
+
statuses.append(status)
|
|
179
|
+
|
|
180
|
+
if status.status != 'done':
|
|
181
|
+
status_in_whole = status.status
|
|
182
|
+
|
|
183
|
+
return statuses, status_in_whole
|
|
184
|
+
|
|
185
|
+
def from_log_file(pod: str, namespace: str, copy_session: str, log_file: str):
|
|
186
|
+
def get_csv_files_n_table(target_table: str):
|
|
187
|
+
db = f'{copy_session}_{target_table}'
|
|
188
|
+
csv_file = f'{csv_dir()}/{db}/*.csv'
|
|
189
|
+
csv_files: list[str] = find_files(pod, namespace, csv_file)
|
|
190
|
+
if csv_files:
|
|
191
|
+
table = target_table
|
|
192
|
+
m = re.match(f'{csv_dir()}/{db}/(.*).csv', csv_files[0])
|
|
193
|
+
if m:
|
|
194
|
+
table = m.group(1)
|
|
195
|
+
return csv_files, table
|
|
196
|
+
|
|
197
|
+
return csv_files, target_table
|
|
198
|
+
|
|
199
|
+
m = re.match(f'{log_prefix()}-{copy_session}_(.*?)\.(.*?)\.log(.*)', log_file)
|
|
200
|
+
if m:
|
|
201
|
+
keyspace = m.group(1)
|
|
202
|
+
target_table = m.group(2)
|
|
203
|
+
state = m.group(3)
|
|
204
|
+
if state == '.pending_import':
|
|
205
|
+
_, table = get_csv_files_n_table(target_table)
|
|
206
|
+
return ExportTableStatus(keyspace, target_table, 'pending_import', table)
|
|
207
|
+
elif state == '.done':
|
|
208
|
+
return ExportTableStatus(keyspace, target_table, 'done', target_table)
|
|
209
|
+
|
|
210
|
+
# 4 rows exported to 1 files in 0 day, 0 hour, 0 minute, and 1.335 seconds.
|
|
211
|
+
pattern = 'rows exported to'
|
|
212
|
+
r: PodExecResult = CassandraNodes.exec(pod, namespace, f"grep '{pattern}' {log_file}", show_out=Config().is_debug(), shell='bash')
|
|
213
|
+
if r.exit_code() == 0:
|
|
214
|
+
csv_files, table = get_csv_files_n_table(target_table)
|
|
215
|
+
if csv_files:
|
|
216
|
+
return ExportTableStatus(keyspace, target_table, 'exported', table)
|
|
217
|
+
else:
|
|
218
|
+
return ExportTableStatus(keyspace, target_table, 'imported', target_table)
|
|
219
|
+
else:
|
|
220
|
+
return ExportTableStatus(keyspace, target_table, 'export_in_pregress')
|
|
221
|
+
|
|
222
|
+
return ExportTableStatus(None, None, 'unknown')
|
|
223
|
+
|
|
224
|
+
def csv_dir():
|
|
225
|
+
return Config().get('export.csv_dir', '/c3/cassandra/tmp')
|
|
226
|
+
|
|
227
|
+
def find_files(pod: str, namespace: str, pattern: str, mmin: int = 0):
|
|
228
|
+
if mmin:
|
|
229
|
+
r = CassandraNodes.exec(pod, namespace, f'find {pattern} -mmin -{mmin}', show_out=Config().is_debug(), shell='bash')
|
|
230
|
+
else:
|
|
231
|
+
r = CassandraNodes.exec(pod, namespace, f'find {pattern}', show_out=Config().is_debug(), shell='bash')
|
|
232
|
+
|
|
233
|
+
log_files = []
|
|
234
|
+
for line in r.stdout.split('\n'):
|
|
235
|
+
line = line.strip(' \r')
|
|
236
|
+
if line:
|
|
237
|
+
log_files.append(line)
|
|
238
|
+
|
|
239
|
+
return log_files
|
|
240
|
+
|
|
241
|
+
class GeneratorStream(io.RawIOBase):
|
|
242
|
+
def __init__(self, generator):
|
|
243
|
+
self._generator = generator
|
|
244
|
+
self._buffer = b'' # Buffer to store leftover bytes from generator yields
|
|
245
|
+
|
|
246
|
+
def readable(self):
|
|
247
|
+
return True
|
|
248
|
+
|
|
249
|
+
def _read_from_generator(self):
|
|
250
|
+
try:
|
|
251
|
+
chunk = next(self._generator)
|
|
252
|
+
if isinstance(chunk, str):
|
|
253
|
+
chunk = chunk.encode('utf-8') # Encode if generator yields strings
|
|
254
|
+
self._buffer += chunk
|
|
255
|
+
except StopIteration:
|
|
256
|
+
pass # Generator exhausted
|
|
257
|
+
|
|
258
|
+
def readinto(self, b):
|
|
259
|
+
# Fill the buffer if necessary
|
|
260
|
+
while len(self._buffer) < len(b):
|
|
261
|
+
old_buffer_len = len(self._buffer)
|
|
262
|
+
self._read_from_generator()
|
|
263
|
+
if len(self._buffer) == old_buffer_len: # Generator exhausted and buffer empty
|
|
264
|
+
break
|
|
265
|
+
|
|
266
|
+
bytes_to_read = min(len(b), len(self._buffer))
|
|
267
|
+
b[:bytes_to_read] = self._buffer[:bytes_to_read]
|
|
268
|
+
self._buffer = self._buffer[bytes_to_read:]
|
|
269
|
+
return bytes_to_read
|
|
270
|
+
|
|
271
|
+
def read(self, size=-1):
|
|
272
|
+
if size == -1: # Read all remaining data
|
|
273
|
+
while True:
|
|
274
|
+
old_buffer_len = len(self._buffer)
|
|
275
|
+
self._read_from_generator()
|
|
276
|
+
if len(self._buffer) == old_buffer_len:
|
|
277
|
+
break
|
|
278
|
+
data = self._buffer
|
|
279
|
+
self._buffer = b''
|
|
280
|
+
return data
|
|
281
|
+
else:
|
|
282
|
+
# Ensure enough data in buffer
|
|
283
|
+
while len(self._buffer) < size:
|
|
284
|
+
old_buffer_len = len(self._buffer)
|
|
285
|
+
self._read_from_generator()
|
|
286
|
+
if len(self._buffer) == old_buffer_len:
|
|
287
|
+
break
|
|
288
|
+
|
|
289
|
+
data = self._buffer[:size]
|
|
290
|
+
self._buffer = self._buffer[size:]
|
|
291
|
+
return data
|