kaqing 2.0.171__py3-none-any.whl → 2.0.204__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- adam/app_session.py +5 -10
- adam/apps.py +18 -4
- adam/batch.py +7 -7
- adam/checks/check_utils.py +3 -1
- adam/checks/disk.py +2 -3
- adam/columns/memory.py +3 -4
- adam/commands/__init__.py +15 -6
- adam/commands/alter_tables.py +26 -41
- adam/commands/app/__init__.py +0 -0
- adam/commands/{app_cmd.py → app/app.py} +2 -2
- adam/commands/{show → app}/show_app_actions.py +7 -15
- adam/commands/{show → app}/show_app_queues.py +1 -4
- adam/{utils_app.py → commands/app/utils_app.py} +9 -1
- adam/commands/audit/audit.py +9 -26
- adam/commands/audit/audit_repair_tables.py +5 -7
- adam/commands/audit/audit_run.py +1 -1
- adam/commands/audit/completions_l.py +15 -0
- adam/commands/audit/show_last10.py +2 -14
- adam/commands/audit/show_slow10.py +2 -13
- adam/commands/audit/show_top10.py +2 -11
- adam/commands/audit/utils_show_top10.py +15 -3
- adam/commands/bash/bash.py +1 -1
- adam/commands/bash/utils_bash.py +1 -1
- adam/commands/cassandra/__init__.py +0 -0
- adam/commands/cassandra/download_cassandra_log.py +45 -0
- adam/commands/cassandra/nodetool.py +64 -0
- adam/commands/cassandra/nodetool_commands.py +120 -0
- adam/commands/cassandra/restart_cluster.py +47 -0
- adam/commands/cassandra/restart_node.py +51 -0
- adam/commands/cassandra/restart_nodes.py +47 -0
- adam/commands/cassandra/rollout.py +88 -0
- adam/commands/cat.py +5 -19
- adam/commands/cd.py +7 -9
- adam/commands/check.py +10 -18
- adam/commands/cli_commands.py +6 -1
- adam/commands/{cp.py → clipboard_copy.py} +34 -36
- adam/commands/code.py +2 -2
- adam/commands/command.py +139 -22
- adam/commands/commands_utils.py +14 -12
- adam/commands/cql/alter_tables.py +66 -0
- adam/commands/cql/completions_c.py +29 -0
- adam/commands/cql/cqlsh.py +3 -7
- adam/commands/cql/utils_cql.py +23 -61
- adam/commands/debug/__init__.py +0 -0
- adam/commands/debug/debug.py +22 -0
- adam/commands/debug/debug_completes.py +35 -0
- adam/commands/debug/debug_timings.py +35 -0
- adam/commands/deploy/deploy_pg_agent.py +2 -2
- adam/commands/deploy/deploy_pod.py +2 -4
- adam/commands/deploy/undeploy_pg_agent.py +2 -2
- adam/commands/devices/device.py +40 -9
- adam/commands/devices/device_app.py +19 -29
- adam/commands/devices/device_auit_log.py +3 -3
- adam/commands/devices/device_cass.py +17 -23
- adam/commands/devices/device_export.py +12 -11
- adam/commands/devices/device_postgres.py +79 -63
- adam/commands/devices/devices.py +1 -1
- adam/commands/download_cassandra_log.py +45 -0
- adam/commands/download_file.py +47 -0
- adam/commands/export/clean_up_all_export_sessions.py +3 -3
- adam/commands/export/clean_up_export_sessions.py +7 -19
- adam/commands/export/completions_x.py +11 -0
- adam/commands/export/download_export_session.py +40 -0
- adam/commands/export/drop_export_database.py +6 -22
- adam/commands/export/drop_export_databases.py +3 -9
- adam/commands/export/export.py +1 -17
- adam/commands/export/export_databases.py +109 -32
- adam/commands/export/export_select.py +8 -55
- adam/commands/export/export_sessions.py +211 -0
- adam/commands/export/export_use.py +13 -16
- adam/commands/export/export_x_select.py +48 -0
- adam/commands/export/exporter.py +176 -167
- adam/commands/export/import_files.py +44 -0
- adam/commands/export/import_session.py +10 -6
- adam/commands/export/importer.py +24 -9
- adam/commands/export/importer_athena.py +114 -44
- adam/commands/export/importer_sqlite.py +45 -23
- adam/commands/export/show_column_counts.py +11 -20
- adam/commands/export/show_export_databases.py +5 -2
- adam/commands/export/show_export_session.py +6 -15
- adam/commands/export/show_export_sessions.py +4 -11
- adam/commands/export/utils_export.py +79 -27
- adam/commands/find_files.py +51 -0
- adam/commands/find_processes.py +76 -0
- adam/commands/generate_report.py +52 -0
- adam/commands/head.py +36 -0
- adam/commands/help.py +2 -2
- adam/commands/intermediate_command.py +6 -3
- adam/commands/login.py +3 -6
- adam/commands/ls.py +2 -2
- adam/commands/medusa/medusa_backup.py +13 -16
- adam/commands/medusa/medusa_restore.py +26 -37
- adam/commands/medusa/medusa_show_backupjobs.py +7 -7
- adam/commands/medusa/medusa_show_restorejobs.py +6 -6
- adam/commands/medusa/utils_medusa.py +15 -0
- adam/commands/nodetool.py +3 -8
- adam/commands/os/__init__.py +0 -0
- adam/commands/os/cat.py +36 -0
- adam/commands/os/download_file.py +47 -0
- adam/commands/os/find_files.py +51 -0
- adam/commands/os/find_processes.py +76 -0
- adam/commands/os/head.py +36 -0
- adam/commands/os/shell.py +41 -0
- adam/commands/param_get.py +10 -12
- adam/commands/param_set.py +7 -10
- adam/commands/postgres/completions_p.py +22 -0
- adam/commands/postgres/postgres.py +25 -40
- adam/commands/postgres/postgres_databases.py +269 -0
- adam/commands/postgres/utils_postgres.py +33 -20
- adam/commands/preview_table.py +4 -2
- adam/commands/pwd.py +4 -6
- adam/commands/reaper/reaper_forward.py +2 -2
- adam/commands/reaper/reaper_run_abort.py +4 -10
- adam/commands/reaper/reaper_runs.py +3 -3
- adam/commands/reaper/reaper_schedule_activate.py +12 -12
- adam/commands/reaper/reaper_schedule_start.py +7 -12
- adam/commands/reaper/reaper_schedule_stop.py +7 -12
- adam/commands/reaper/utils_reaper.py +13 -6
- adam/commands/repair/repair_log.py +1 -4
- adam/commands/repair/repair_run.py +3 -8
- adam/commands/repair/repair_scan.py +1 -6
- adam/commands/repair/repair_stop.py +1 -5
- adam/commands/restart_cluster.py +47 -0
- adam/commands/restart_node.py +51 -0
- adam/commands/restart_nodes.py +47 -0
- adam/commands/shell.py +9 -2
- adam/commands/show/show.py +4 -4
- adam/commands/show/show_adam.py +3 -3
- adam/commands/show/show_cassandra_repairs.py +5 -6
- adam/commands/show/show_cassandra_status.py +29 -29
- adam/commands/show/show_cassandra_version.py +1 -4
- adam/commands/show/{show_commands.py → show_cli_commands.py} +3 -6
- adam/commands/show/show_login.py +3 -9
- adam/commands/show/show_params.py +2 -5
- adam/commands/show/show_processes.py +15 -16
- adam/commands/show/show_storage.py +9 -8
- adam/config.py +4 -5
- adam/embedded_params.py +1 -1
- adam/log.py +4 -4
- adam/repl.py +26 -18
- adam/repl_commands.py +32 -20
- adam/repl_session.py +9 -1
- adam/repl_state.py +39 -10
- adam/sql/async_executor.py +44 -0
- adam/sql/lark_completer.py +286 -0
- adam/sql/lark_parser.py +604 -0
- adam/sql/qingl.lark +1076 -0
- adam/sql/sql_completer.py +4 -6
- adam/sql/sql_state_machine.py +25 -13
- adam/sso/authn_ad.py +2 -5
- adam/sso/authn_okta.py +2 -4
- adam/sso/cred_cache.py +2 -5
- adam/sso/idp.py +8 -11
- adam/utils.py +299 -105
- adam/utils_athena.py +18 -18
- adam/utils_audits.py +3 -7
- adam/utils_issues.py +2 -2
- adam/utils_k8s/app_clusters.py +4 -4
- adam/utils_k8s/app_pods.py +8 -6
- adam/utils_k8s/cassandra_clusters.py +16 -5
- adam/utils_k8s/cassandra_nodes.py +7 -6
- adam/utils_k8s/custom_resources.py +11 -17
- adam/utils_k8s/jobs.py +7 -11
- adam/utils_k8s/k8s.py +14 -5
- adam/utils_k8s/kube_context.py +3 -6
- adam/{pod_exec_result.py → utils_k8s/pod_exec_result.py} +4 -4
- adam/utils_k8s/pods.py +98 -36
- adam/utils_k8s/statefulsets.py +5 -2
- adam/utils_local.py +42 -0
- adam/utils_repl/appendable_completer.py +6 -0
- adam/utils_repl/repl_completer.py +45 -2
- adam/utils_repl/state_machine.py +3 -3
- adam/utils_sqlite.py +58 -30
- adam/version.py +1 -1
- {kaqing-2.0.171.dist-info → kaqing-2.0.204.dist-info}/METADATA +1 -1
- kaqing-2.0.204.dist-info/RECORD +277 -0
- kaqing-2.0.204.dist-info/top_level.txt +2 -0
- teddy/__init__.py +0 -0
- teddy/lark_parser.py +436 -0
- teddy/lark_parser2.py +618 -0
- adam/commands/cql/cql_completions.py +0 -33
- adam/commands/export/export_handlers.py +0 -71
- adam/commands/export/export_select_x.py +0 -54
- adam/commands/logs.py +0 -37
- adam/commands/postgres/postgres_context.py +0 -274
- adam/commands/postgres/psql_completions.py +0 -10
- adam/commands/report.py +0 -61
- adam/commands/restart.py +0 -60
- kaqing-2.0.171.dist-info/RECORD +0 -236
- kaqing-2.0.171.dist-info/top_level.txt +0 -1
- /adam/commands/{app_ping.py → app/app_ping.py} +0 -0
- /adam/commands/{show → app}/show_app_id.py +0 -0
- {kaqing-2.0.171.dist-info → kaqing-2.0.204.dist-info}/WHEEL +0 -0
- {kaqing-2.0.171.dist-info → kaqing-2.0.204.dist-info}/entry_points.txt +0 -0
adam/commands/export/exporter.py
CHANGED
|
@@ -1,22 +1,20 @@
|
|
|
1
|
+
import copy
|
|
1
2
|
from datetime import datetime
|
|
2
|
-
import functools
|
|
3
|
-
import re
|
|
4
3
|
import time
|
|
5
4
|
import traceback
|
|
6
5
|
|
|
6
|
+
from adam.commands.command import InvalidArgumentsException
|
|
7
7
|
from adam.commands.cql.utils_cql import cassandra_table_names, run_cql, table_spec
|
|
8
|
-
from adam.commands.export.export_databases import
|
|
8
|
+
from adam.commands.export.export_databases import export_db
|
|
9
|
+
from adam.commands.export.export_sessions import ExportSessions
|
|
9
10
|
from adam.commands.export.importer import Importer
|
|
10
11
|
from adam.commands.export.importer_athena import AthenaImporter
|
|
11
12
|
from adam.commands.export.importer_sqlite import SqliteImporter
|
|
12
|
-
from adam.commands.export.utils_export import ExportSpec, ExportTableStatus, ExportTableSpec, ImportSpec, csv_dir, find_files
|
|
13
|
+
from adam.commands.export.utils_export import ExportSpec, ExportTableStatus, ExportTableSpec, ImportSpec, csv_dir, export_log_prefix, find_files, os_system_exec, state_with_pod
|
|
13
14
|
from adam.config import Config
|
|
14
|
-
from adam.pod_exec_result import PodExecResult
|
|
15
15
|
from adam.repl_state import ReplState
|
|
16
|
-
from adam.utils import parallelize, log2, ing
|
|
16
|
+
from adam.utils import debug, log, offload, parallelize, log2, ing, log_exc
|
|
17
17
|
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
18
|
-
from adam.utils_k8s.pods import log_prefix
|
|
19
|
-
from adam.utils_k8s.statefulsets import StatefulSets
|
|
20
18
|
|
|
21
19
|
class Exporter:
|
|
22
20
|
def export_tables(args: list[str], state: ReplState, export_only: bool = False, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
@@ -24,7 +22,7 @@ class Exporter:
|
|
|
24
22
|
log2('export-only for testing')
|
|
25
23
|
|
|
26
24
|
spec: ExportSpec = None
|
|
27
|
-
|
|
25
|
+
with log_exc(True):
|
|
28
26
|
spec = Exporter.export_spec(' '.join(args), state)
|
|
29
27
|
|
|
30
28
|
statuses, spec = Exporter._export_tables(spec, state, max_workers=max_workers, export_state='init')
|
|
@@ -32,8 +30,6 @@ class Exporter:
|
|
|
32
30
|
return statuses, spec
|
|
33
31
|
|
|
34
32
|
return Exporter._export_tables(spec, state, export_only, max_workers, 'pending_export')
|
|
35
|
-
except Exception as e:
|
|
36
|
-
log2(e)
|
|
37
33
|
|
|
38
34
|
return [], None
|
|
39
35
|
|
|
@@ -52,12 +48,19 @@ class Exporter:
|
|
|
52
48
|
raise Exception(f"You're currently using {importer_from_session} export database. You cannot export tables with {spec.importer} type database.")
|
|
53
49
|
else:
|
|
54
50
|
spec.importer = Importer.importer_from_session(session)
|
|
51
|
+
|
|
52
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
53
|
+
raise Exception('Credentials for Athena is not present.')
|
|
55
54
|
else:
|
|
56
55
|
if not spec.importer:
|
|
57
56
|
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
58
57
|
|
|
59
58
|
prefix = Importer.prefix_from_importer(spec.importer)
|
|
60
59
|
session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
|
|
60
|
+
|
|
61
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
62
|
+
raise Exception('Credentials for Athena is not present.')
|
|
63
|
+
|
|
61
64
|
if spec.importer != 'csv':
|
|
62
65
|
state.export_session = session
|
|
63
66
|
|
|
@@ -65,10 +68,10 @@ class Exporter:
|
|
|
65
68
|
|
|
66
69
|
return spec
|
|
67
70
|
|
|
68
|
-
def import_session(
|
|
71
|
+
def import_session(spec_str: str, state: ReplState, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
69
72
|
import_spec: ImportSpec = None
|
|
70
|
-
|
|
71
|
-
import_spec = Exporter.import_spec(
|
|
73
|
+
with log_exc(True):
|
|
74
|
+
import_spec = Exporter.import_spec(spec_str, state)
|
|
72
75
|
tables, status_in_whole = ExportTableStatus.from_session(state.sts, state.pod, state.namespace, import_spec.session)
|
|
73
76
|
if status_in_whole == 'done':
|
|
74
77
|
log2(f'The session has been completely done - no more csv files are found.')
|
|
@@ -77,16 +80,33 @@ class Exporter:
|
|
|
77
80
|
spec = ExportSpec(None, None, importer=import_spec.importer, tables=[ExportTableSpec.from_status(table) for table in tables], session=import_spec.session)
|
|
78
81
|
|
|
79
82
|
return Exporter._export_tables(spec, state, max_workers=max_workers, export_state = 'import')
|
|
80
|
-
except Exception as e:
|
|
81
|
-
if Config().is_debug():
|
|
82
|
-
traceback.print_exception(e)
|
|
83
|
-
else:
|
|
84
|
-
log2(e)
|
|
85
83
|
|
|
86
84
|
return [], None
|
|
87
85
|
|
|
88
|
-
def
|
|
89
|
-
spec: ImportSpec =
|
|
86
|
+
def import_local_csv_files(spec_str: str, state: ReplState, max_workers = 0) -> tuple[list[str], ExportSpec]:
|
|
87
|
+
spec: ImportSpec = None
|
|
88
|
+
with log_exc(True):
|
|
89
|
+
spec = Exporter.import_spec(spec_str, state, files=True)
|
|
90
|
+
if not spec.table_name:
|
|
91
|
+
log2(f"Use 'as <database-name>.<table-name>'.")
|
|
92
|
+
raise InvalidArgumentsException()
|
|
93
|
+
|
|
94
|
+
d_t = spec.table_name.split('.')
|
|
95
|
+
if len(d_t) != 2:
|
|
96
|
+
log2(f'Need <database-name>.<table-name> format for target table.')
|
|
97
|
+
raise InvalidArgumentsException()
|
|
98
|
+
|
|
99
|
+
database = d_t[0]
|
|
100
|
+
table = d_t[1]
|
|
101
|
+
im = AthenaImporter() if spec.importer == 'athena' else SqliteImporter()
|
|
102
|
+
|
|
103
|
+
with parallelize(spec.files, max_workers, msg='Importing|Imported {size} csv files') as exec:
|
|
104
|
+
return exec.map(lambda f: im.import_from_local_csv(state, database, table, f, len(spec.files) > 1, True)), spec
|
|
105
|
+
|
|
106
|
+
return [], None
|
|
107
|
+
|
|
108
|
+
def import_spec(spec_str: str, state: ReplState, files = False):
|
|
109
|
+
spec: ImportSpec = ImportSpec.parse_specs(spec_str, files=files)
|
|
90
110
|
|
|
91
111
|
session = state.export_session
|
|
92
112
|
if session:
|
|
@@ -98,19 +118,26 @@ class Exporter:
|
|
|
98
118
|
spec.importer = Importer.importer_from_session(state.export_session)
|
|
99
119
|
if not spec.importer:
|
|
100
120
|
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
121
|
+
|
|
122
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
123
|
+
raise Exception('Credentials for Athena is not present.')
|
|
101
124
|
else:
|
|
102
|
-
if spec.importer:
|
|
103
|
-
if not AthenaImporter.ping():
|
|
104
|
-
raise Exception('Credentials for Athena are not present.')
|
|
105
|
-
else:
|
|
125
|
+
if not spec.importer:
|
|
106
126
|
spec.importer = Importer.importer_from_session(spec.session)
|
|
107
127
|
|
|
108
|
-
if spec.importer == 'csv':
|
|
128
|
+
if not spec.importer or spec.importer == 'csv':
|
|
109
129
|
spec.importer = Config().get('export.default-importer', 'sqlite')
|
|
110
130
|
|
|
131
|
+
if spec.importer == 'athena' and not AthenaImporter.ping():
|
|
132
|
+
raise Exception('Credentials for Athena is not present.')
|
|
133
|
+
|
|
111
134
|
prefix = Importer.prefix_from_importer(spec.importer)
|
|
112
|
-
|
|
113
|
-
|
|
135
|
+
if spec.session:
|
|
136
|
+
spec.session = f'{prefix}{spec.session[1:]}'
|
|
137
|
+
else:
|
|
138
|
+
spec.session = f'{prefix}{datetime.now().strftime("%Y%m%d%H%M%S")[3:]}'
|
|
139
|
+
|
|
140
|
+
state.export_session = spec.session
|
|
114
141
|
|
|
115
142
|
return spec
|
|
116
143
|
|
|
@@ -127,21 +154,21 @@ class Exporter:
|
|
|
127
154
|
if export_state == 'init':
|
|
128
155
|
CassandraNodes.exec(state.pod, state.namespace, f'rm -rf {csv_dir()}/{spec.session}_*', show_out=Config().is_debug(), shell='bash')
|
|
129
156
|
|
|
130
|
-
action = f'[{spec.session}]
|
|
157
|
+
action = f'[{spec.session}] Triggering export of'
|
|
131
158
|
if export_state == 'init':
|
|
132
159
|
action = f'[{spec.session}] Preparing|Prepared'
|
|
133
160
|
elif export_state == 'import':
|
|
134
161
|
action = f'[{spec.session}] Importing|Imported'
|
|
135
162
|
|
|
136
|
-
with parallelize(spec.tables, max_workers, msg=action + ' {size} Cassandra tables') as exec:
|
|
137
|
-
return exec.map(lambda table: Exporter.export_table(table, state, spec.session, spec.importer, export_only, len(spec.tables) > 1, consistency=spec.consistency, export_state=export_state)), spec
|
|
163
|
+
with parallelize(spec.tables, max_workers, msg=action + ' {size} Cassandra tables', collect=export_state == 'init', name='exporter') as exec:
|
|
164
|
+
return exec.map(lambda table: Exporter.export_table(table, copy.copy(state), spec.session, spec.importer, export_only, len(spec.tables) > 1, consistency=spec.consistency, export_state=export_state)), spec
|
|
138
165
|
|
|
139
166
|
def export_table(spec: ExportTableSpec, state: ReplState, session: str, importer: str, export_only = False, multi_tables = True, consistency: str = None, export_state=None):
|
|
140
167
|
s: str = None
|
|
141
168
|
|
|
142
169
|
table, target_table, columns = Exporter.resove_table_n_columns(spec, state, include_ks_in_target=False, importer=importer)
|
|
143
170
|
|
|
144
|
-
log_file = f'{
|
|
171
|
+
log_file = f'{export_log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
145
172
|
create_db = not state.export_session
|
|
146
173
|
|
|
147
174
|
if export_state == 'init':
|
|
@@ -158,26 +185,38 @@ class Exporter:
|
|
|
158
185
|
log_file = log_files[0]
|
|
159
186
|
|
|
160
187
|
status: ExportTableStatus = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
188
|
+
|
|
189
|
+
with offload(name='exporter') as exec:
|
|
190
|
+
exec.submit(lambda: Exporter.export_loop(ExportTableContext(spec, state, session, importer, export_only, multi_tables, table, target_table, columns, create_db, log_file, status)))
|
|
191
|
+
# Exporter.export_loop(ExportTableContext(spec, state, session, importer, export_only, multi_tables, table, target_table, columns, create_db, log_file, status))
|
|
192
|
+
|
|
193
|
+
return status.status
|
|
194
|
+
|
|
195
|
+
def export_loop(ctx: 'ExportTableContext'):
|
|
196
|
+
try:
|
|
197
|
+
while ctx.status.status != 'done':
|
|
198
|
+
if ctx.status.status == 'export_in_pregress':
|
|
199
|
+
debug('Exporting to CSV is still in progess, sleeping for 1 sec...')
|
|
165
200
|
time.sleep(1)
|
|
166
|
-
elif status.status == 'exported':
|
|
167
|
-
log_file = Exporter.rename_to_pending_import(spec, state, session, target_table)
|
|
168
|
-
|
|
201
|
+
elif ctx.status.status == 'exported':
|
|
202
|
+
ctx.log_file = Exporter.rename_to_pending_import(ctx.spec, ctx.state, ctx.session, ctx.target_table)
|
|
203
|
+
ExportSessions.clear_export_session_cache()
|
|
204
|
+
if ctx.importer == 'csv' or ctx.export_only:
|
|
169
205
|
return 'pending_import'
|
|
170
|
-
elif status.status == 'pending_import':
|
|
171
|
-
log_file, session = Exporter.import_from_csv(spec, state, session, importer, table, target_table, columns, multi_tables=multi_tables, create_db=create_db)
|
|
206
|
+
elif ctx.status.status == 'pending_import':
|
|
207
|
+
ctx.log_file, ctx.session = Exporter.import_from_csv(ctx.spec, ctx.state, ctx.session, ctx.importer, ctx.table, ctx.target_table, ctx.columns, multi_tables=ctx.multi_tables, create_db=ctx.create_db)
|
|
172
208
|
|
|
173
|
-
status = ExportTableStatus.from_log_file(state.pod, state.namespace, session, log_file)
|
|
209
|
+
ctx.status = ExportTableStatus.from_log_file(ctx.state.pod, ctx.state.namespace, ctx.session, ctx.log_file)
|
|
174
210
|
|
|
175
|
-
return status.status
|
|
211
|
+
return ctx.status.status
|
|
212
|
+
except:
|
|
213
|
+
traceback.print_exc()
|
|
176
214
|
|
|
177
215
|
def create_table_log(spec: ExportTableSpec, state: ReplState, session: str, table: str, target_table: str):
|
|
178
|
-
log_file = f'{
|
|
216
|
+
log_file = f'{export_log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
179
217
|
|
|
180
|
-
|
|
218
|
+
cmd = f'rm -f {log_file}* && touch {log_file}'
|
|
219
|
+
os_system_exec(cmd, show_out=Config().is_debug())
|
|
181
220
|
|
|
182
221
|
return table
|
|
183
222
|
|
|
@@ -186,133 +225,32 @@ class Exporter:
|
|
|
186
225
|
|
|
187
226
|
CassandraNodes.exec(state.pod, state.namespace, f'mkdir -p {csv_dir()}/{db}', show_out=Config().is_debug(), shell='bash')
|
|
188
227
|
csv_file = f'{csv_dir()}/{db}/{table}.csv'
|
|
189
|
-
log_file = f'{
|
|
228
|
+
log_file = f'{export_log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
190
229
|
|
|
191
230
|
suppress_ing_log = Config().is_debug() or multi_tables
|
|
192
231
|
queries = []
|
|
193
232
|
if consistency:
|
|
194
233
|
queries.append(f'CONSISTENCY {consistency}')
|
|
195
234
|
queries.append(f"COPY {spec.keyspace}.{table}({columns}) TO '{csv_file}' WITH HEADER = TRUE")
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
235
|
+
|
|
236
|
+
with ing(f'[{session}] Triggering dump of table {spec.keyspace}.{table}{f" with consistency {consistency}" if consistency else ""}',
|
|
237
|
+
suppress_log=suppress_ing_log):
|
|
238
|
+
run_cql(state, ';'.join(queries), show_out=Config().is_debug(), backgrounded=True, log_file=log_file)
|
|
200
239
|
|
|
201
240
|
return log_file
|
|
202
241
|
|
|
203
242
|
def rename_to_pending_import(spec: ExportTableSpec, state: ReplState, session: str, target_table: str):
|
|
204
|
-
log_file = f'{
|
|
243
|
+
log_file = f'{export_log_prefix()}-{session}_{spec.keyspace}.{target_table}.log'
|
|
205
244
|
to = f'{log_file}.pending_import'
|
|
206
245
|
|
|
207
|
-
|
|
246
|
+
cmd =f'mv {log_file} {to}'
|
|
247
|
+
os_system_exec(cmd, show_out=Config().is_debug())
|
|
208
248
|
|
|
209
249
|
return to
|
|
210
250
|
|
|
211
251
|
def import_from_csv(spec: ExportTableSpec, state: ReplState, session: str, importer: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
212
252
|
im = AthenaImporter() if importer == 'athena' else SqliteImporter()
|
|
213
|
-
return im.import_from_csv(state
|
|
214
|
-
|
|
215
|
-
def clear_export_session_cache():
|
|
216
|
-
Exporter.find_export_sessions.cache_clear()
|
|
217
|
-
Exporter.export_session_names.cache_clear()
|
|
218
|
-
|
|
219
|
-
@functools.lru_cache()
|
|
220
|
-
def export_session_names(sts: str, pod: str, namespace: str, importer: str = None, export_state = None):
|
|
221
|
-
if not sts or not namespace:
|
|
222
|
-
return []
|
|
223
|
-
|
|
224
|
-
if not pod:
|
|
225
|
-
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
226
|
-
|
|
227
|
-
if not pod:
|
|
228
|
-
return []
|
|
229
|
-
|
|
230
|
-
return [session for session, state in Exporter.find_export_sessions(pod, namespace, importer).items() if not export_state or state == export_state]
|
|
231
|
-
|
|
232
|
-
@functools.lru_cache()
|
|
233
|
-
def find_export_sessions(pod: str, namespace: str, importer: str = None, limit = 100):
|
|
234
|
-
sessions: dict[str, str] = {}
|
|
235
|
-
|
|
236
|
-
prefix = Importer.prefix_from_importer(importer)
|
|
237
|
-
|
|
238
|
-
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{prefix}*_*.log*')
|
|
239
|
-
|
|
240
|
-
if not log_files:
|
|
241
|
-
return {}
|
|
242
|
-
|
|
243
|
-
for log_file in log_files[:limit]:
|
|
244
|
-
m = re.match(f'{log_prefix()}-(.*?)_.*\.log?(.*)', log_file)
|
|
245
|
-
if m:
|
|
246
|
-
s = m.group(1)
|
|
247
|
-
state = m.group(2) # '', '.pending_import', '.done'
|
|
248
|
-
if state:
|
|
249
|
-
state = state.strip('.')
|
|
250
|
-
else:
|
|
251
|
-
state = 'in_export'
|
|
252
|
-
|
|
253
|
-
if s not in sessions:
|
|
254
|
-
sessions[s] = state
|
|
255
|
-
elif sessions[s] == 'done' and state != 'done':
|
|
256
|
-
sessions[s] = state
|
|
257
|
-
|
|
258
|
-
return sessions
|
|
259
|
-
|
|
260
|
-
def clean_up_all_sessions(sts: str, pod: str, namespace: str):
|
|
261
|
-
if not sts or not namespace:
|
|
262
|
-
return False
|
|
263
|
-
|
|
264
|
-
if not pod:
|
|
265
|
-
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
266
|
-
|
|
267
|
-
CassandraNodes.exec(pod, namespace, f'rm -rf {csv_dir()}/*', show_out=Config().is_debug(), shell='bash')
|
|
268
|
-
CassandraNodes.exec(pod, namespace, f'rm -rf {log_prefix()}-*.log*', show_out=Config().is_debug(), shell='bash')
|
|
269
|
-
|
|
270
|
-
return True
|
|
271
|
-
|
|
272
|
-
def clean_up_sessions(sts: str, pod: str, namespace: str, sessions: list[str], max_workers = 0):
|
|
273
|
-
if not sessions:
|
|
274
|
-
return []
|
|
275
|
-
|
|
276
|
-
if not max_workers:
|
|
277
|
-
max_workers = Config().action_workers('export', 8)
|
|
278
|
-
|
|
279
|
-
with parallelize(sessions, max_workers, msg='Cleaning|Cleaned up {size} export sessions') as exec:
|
|
280
|
-
cnt_tuples = exec.map(lambda session: Exporter.clean_up_session(sts, pod, namespace, session, True))
|
|
281
|
-
csv_cnt = 0
|
|
282
|
-
log_cnt = 0
|
|
283
|
-
for (csv, log) in cnt_tuples:
|
|
284
|
-
csv_cnt += csv
|
|
285
|
-
log_cnt += log
|
|
286
|
-
|
|
287
|
-
return csv_cnt, log_cnt
|
|
288
|
-
|
|
289
|
-
def clean_up_session(sts: str, pod: str, namespace: str, session: str, multi_tables = True):
|
|
290
|
-
if not sts or not namespace:
|
|
291
|
-
return 0, 0
|
|
292
|
-
|
|
293
|
-
if not pod:
|
|
294
|
-
pod = StatefulSets.pod_names(sts, namespace)[0]
|
|
295
|
-
|
|
296
|
-
if not pod:
|
|
297
|
-
return 0, 0
|
|
298
|
-
|
|
299
|
-
csv_cnt = 0
|
|
300
|
-
log_cnt = 0
|
|
301
|
-
|
|
302
|
-
log_files: list[str] = find_files(pod, namespace, f'{log_prefix()}-{session}_*.log*')
|
|
303
|
-
|
|
304
|
-
for log_file in log_files:
|
|
305
|
-
m = re.match(f'{log_prefix()}-{session}_(.*?)\.(.*?)\.log.*', log_file)
|
|
306
|
-
if m:
|
|
307
|
-
table = m.group(2)
|
|
308
|
-
|
|
309
|
-
CassandraNodes.exec(pod, namespace, f'rm -rf {csv_dir()}/{session}_{table}', show_out=not multi_tables, shell='bash')
|
|
310
|
-
csv_cnt += 1
|
|
311
|
-
|
|
312
|
-
CassandraNodes.exec(pod, namespace, f'rm -rf {log_file}', show_out=not multi_tables, shell='bash')
|
|
313
|
-
log_cnt += 1
|
|
314
|
-
|
|
315
|
-
return csv_cnt, log_cnt
|
|
253
|
+
return im.import_from_csv(state, session if session else state.export_session, spec.keyspace, table, target_table, columns, multi_tables, create_db)
|
|
316
254
|
|
|
317
255
|
def resove_table_n_columns(spec: ExportTableSpec, state: ReplState, include_ks_in_target = False, importer = 'sqlite'):
|
|
318
256
|
table = spec.table
|
|
@@ -338,15 +276,86 @@ class Exporter:
|
|
|
338
276
|
|
|
339
277
|
return table, target_table, columns
|
|
340
278
|
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
279
|
+
class ExportTableContext:
|
|
280
|
+
def __init__(self, spec: ExportTableSpec, state: ReplState, session: str, importer: str, export_only = False, multi_tables = True, table: str = None, target_table: str = None, columns: str = None, create_db = False, log_file: str = None, status: ExportTableStatus = None):
|
|
281
|
+
self.spec = spec
|
|
282
|
+
self.state = state
|
|
283
|
+
self.session = session
|
|
284
|
+
self.importer = importer
|
|
285
|
+
self.export_only = export_only
|
|
286
|
+
self.multi_tables = multi_tables
|
|
287
|
+
self.table = table
|
|
288
|
+
self.target_table = target_table
|
|
289
|
+
self.columns = columns
|
|
290
|
+
self.create_db = create_db
|
|
291
|
+
self.log_file = log_file
|
|
292
|
+
self.status = status
|
|
293
|
+
|
|
294
|
+
class ExportService:
|
|
295
|
+
def __init__(self, handler: 'ExporterHandler'):
|
|
296
|
+
self.handler = handler
|
|
297
|
+
|
|
298
|
+
def export(self, args: list[str], export_only=False):
|
|
299
|
+
state = self.handler.state
|
|
300
|
+
export_session = state.export_session
|
|
301
|
+
spec: ExportSpec = None
|
|
302
|
+
try:
|
|
303
|
+
with state_with_pod(state) as state:
|
|
304
|
+
# --export-only for testing only
|
|
305
|
+
statuses, spec = Exporter.export_tables(args, state, export_only=export_only)
|
|
306
|
+
if not statuses:
|
|
307
|
+
return state
|
|
308
|
+
|
|
309
|
+
ExportSessions.clear_export_session_cache()
|
|
310
|
+
|
|
311
|
+
if spec.importer == 'csv' or export_only:
|
|
312
|
+
ExportSessions.show_session(state.sts, state.pod, state.namespace, spec.session)
|
|
313
|
+
else:
|
|
314
|
+
log()
|
|
315
|
+
with export_db(state) as dbs:
|
|
316
|
+
dbs.show_database()
|
|
317
|
+
finally:
|
|
318
|
+
# if exporting to csv, do not bind the new session id to repl state
|
|
319
|
+
if spec and spec.importer == 'csv':
|
|
320
|
+
state.export_session = export_session
|
|
321
|
+
|
|
322
|
+
return state
|
|
323
|
+
|
|
324
|
+
def import_session(self, spec_str: str):
|
|
325
|
+
state = self.handler.state
|
|
326
|
+
|
|
327
|
+
tables, _ = Exporter.import_session(spec_str, state)
|
|
328
|
+
if tables:
|
|
329
|
+
ExportSessions.clear_export_session_cache()
|
|
330
|
+
|
|
331
|
+
log()
|
|
332
|
+
with export_db(state) as dbs:
|
|
333
|
+
dbs.show_database()
|
|
334
|
+
|
|
335
|
+
return state
|
|
336
|
+
|
|
337
|
+
def import_files(self, spec_str: str):
|
|
338
|
+
state = self.handler.state
|
|
339
|
+
|
|
340
|
+
tables, _ = Exporter.import_local_csv_files(spec_str, state)
|
|
341
|
+
if tables:
|
|
342
|
+
ExportSessions.clear_export_session_cache()
|
|
343
|
+
|
|
344
|
+
log()
|
|
345
|
+
with export_db(state) as dbs:
|
|
346
|
+
dbs.show_database()
|
|
347
|
+
|
|
348
|
+
return state
|
|
349
|
+
|
|
350
|
+
class ExporterHandler:
|
|
351
|
+
def __init__(self, state: ReplState):
|
|
352
|
+
self.state = state
|
|
353
|
+
|
|
354
|
+
def __enter__(self):
|
|
355
|
+
return ExportService(self)
|
|
356
|
+
|
|
357
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
358
|
+
return False
|
|
359
|
+
|
|
360
|
+
def export(state: ReplState):
|
|
361
|
+
return ExporterHandler(state)
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from adam.commands import validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.export.export_sessions import ExportSessions
|
|
4
|
+
from adam.commands.export.exporter import export
|
|
5
|
+
from adam.commands.export.utils_export import state_with_pod
|
|
6
|
+
from adam.repl_state import ReplState, RequiredState
|
|
7
|
+
|
|
8
|
+
class ImportCSVFiles(Command):
|
|
9
|
+
COMMAND = 'import files'
|
|
10
|
+
|
|
11
|
+
# the singleton pattern
|
|
12
|
+
def __new__(cls, *args, **kwargs):
|
|
13
|
+
if not hasattr(cls, 'instance'): cls.instance = super(ImportCSVFiles, cls).__new__(cls)
|
|
14
|
+
|
|
15
|
+
return cls.instance
|
|
16
|
+
|
|
17
|
+
def __init__(self, successor: Command=None):
|
|
18
|
+
super().__init__(successor)
|
|
19
|
+
|
|
20
|
+
def command(self):
|
|
21
|
+
return ImportCSVFiles.COMMAND
|
|
22
|
+
|
|
23
|
+
def required(self):
|
|
24
|
+
return RequiredState.CLUSTER_OR_POD
|
|
25
|
+
|
|
26
|
+
def run(self, cmd: str, state: ReplState):
|
|
27
|
+
if not(args := self.args(cmd)):
|
|
28
|
+
return super().run(cmd, state)
|
|
29
|
+
|
|
30
|
+
with self.validate(args, state) as (args, state):
|
|
31
|
+
with validate_args(args, state, name='file') as spec:
|
|
32
|
+
with state_with_pod(state) as state:
|
|
33
|
+
with export(state) as exporter:
|
|
34
|
+
return exporter.import_files(spec)
|
|
35
|
+
|
|
36
|
+
def completion(self, state: ReplState):
|
|
37
|
+
# warm up cache
|
|
38
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
|
|
39
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
|
|
40
|
+
|
|
41
|
+
return {}
|
|
42
|
+
|
|
43
|
+
def help(self, _: ReplState):
|
|
44
|
+
return f'{ImportCSVFiles.COMMAND} <file-names,...>\t import files in session to Athena or SQLite'
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
+
from adam.commands import validate_args
|
|
1
2
|
from adam.commands.command import Command
|
|
2
|
-
from adam.commands.export.
|
|
3
|
-
from adam.commands.export.exporter import
|
|
3
|
+
from adam.commands.export.export_sessions import ExportSessions
|
|
4
|
+
from adam.commands.export.exporter import export
|
|
5
|
+
from adam.commands.export.utils_export import state_with_pod
|
|
4
6
|
from adam.repl_state import ReplState, RequiredState
|
|
5
7
|
|
|
6
8
|
class ImportSession(Command):
|
|
@@ -26,13 +28,15 @@ class ImportSession(Command):
|
|
|
26
28
|
return super().run(cmd, state)
|
|
27
29
|
|
|
28
30
|
with self.validate(args, state) as (args, state):
|
|
29
|
-
with
|
|
30
|
-
|
|
31
|
+
with validate_args(args, state, name='export session') as spec:
|
|
32
|
+
with state_with_pod(state) as state:
|
|
33
|
+
with export(state) as exporter:
|
|
34
|
+
return exporter.import_session(spec)
|
|
31
35
|
|
|
32
36
|
def completion(self, state: ReplState):
|
|
33
37
|
# warm up cache
|
|
34
|
-
|
|
35
|
-
|
|
38
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace)
|
|
39
|
+
# ExportSessions.export_session_names(state.sts, state.pod, state.namespace, export_state='pending_import')
|
|
36
40
|
|
|
37
41
|
return {}
|
|
38
42
|
|
adam/commands/export/importer.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
from abc import abstractmethod
|
|
2
2
|
|
|
3
|
-
from adam.commands.export.utils_export import csv_dir
|
|
3
|
+
from adam.commands.export.utils_export import csv_dir, os_system_exec
|
|
4
4
|
from adam.config import Config
|
|
5
|
+
from adam.repl_state import ReplState
|
|
5
6
|
from adam.utils import ing
|
|
6
|
-
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
7
7
|
from adam.utils_k8s.pods import log_prefix
|
|
8
8
|
|
|
9
9
|
class Importer:
|
|
@@ -12,15 +12,26 @@ class Importer:
|
|
|
12
12
|
pass
|
|
13
13
|
|
|
14
14
|
@abstractmethod
|
|
15
|
-
def import_from_csv(self,
|
|
15
|
+
def import_from_csv(self, state: ReplState, from_session: str, keyspace: str, table: str, target_table: str, columns: str, multi_tables = True, create_db = False):
|
|
16
16
|
pass
|
|
17
17
|
|
|
18
|
-
|
|
18
|
+
@abstractmethod
|
|
19
|
+
def import_from_local_csv(self, state: ReplState,
|
|
20
|
+
keyspace: str, table: str, target_table: str, columns: str,
|
|
21
|
+
csv_file: str,
|
|
22
|
+
multi_tables = True, create_db = False):
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
def move_to_done(self, state: ReplState, from_session: str, keyspace: str, target_table: str):
|
|
26
|
+
pod = state.pod
|
|
27
|
+
namespace = state.namespace
|
|
28
|
+
to_session = state.export_session
|
|
19
29
|
log_file = f'{log_prefix()}-{from_session}_{keyspace}.{target_table}.log.pending_import'
|
|
20
30
|
|
|
21
31
|
to = f'{log_prefix()}-{to_session}_{keyspace}.{target_table}.log.done'
|
|
22
32
|
|
|
23
|
-
|
|
33
|
+
cmd = f'mv {log_file} {to}'
|
|
34
|
+
os_system_exec(cmd, show_out=Config().is_debug())
|
|
24
35
|
|
|
25
36
|
return to, to_session
|
|
26
37
|
|
|
@@ -30,9 +41,13 @@ class Importer:
|
|
|
30
41
|
|
|
31
42
|
return session
|
|
32
43
|
|
|
33
|
-
def remove_csv(self,
|
|
34
|
-
|
|
35
|
-
|
|
44
|
+
def remove_csv(self, state: ReplState, from_session: str, table: str, target_table: str, multi_tables = True):
|
|
45
|
+
pod = state.pod
|
|
46
|
+
namespace = state.namespace
|
|
47
|
+
|
|
48
|
+
with ing(f'[{from_session}] Cleaning up temporary files', suppress_log=multi_tables):
|
|
49
|
+
cmd = f'rm -rf {self.csv_file(from_session, table, target_table)}'
|
|
50
|
+
os_system_exec(cmd, show_out=Config().is_debug())
|
|
36
51
|
|
|
37
52
|
def db(self, session: str, keyspace: str):
|
|
38
53
|
return f'{session}_{keyspace}'
|
|
@@ -64,4 +79,4 @@ class Importer:
|
|
|
64
79
|
elif session.startswith('e'):
|
|
65
80
|
importer = 'athena'
|
|
66
81
|
|
|
67
|
-
return importer
|
|
82
|
+
return importer
|