kaqing 2.0.174__py3-none-any.whl → 2.0.186__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaqing might be problematic. Click here for more details.
- adam/app_session.py +2 -2
- adam/apps.py +18 -4
- adam/batch.py +1 -1
- adam/checks/check_utils.py +3 -1
- adam/commands/__init__.py +4 -2
- adam/commands/app/__init__.py +0 -0
- adam/commands/app/app.py +38 -0
- adam/commands/app/app_ping.py +38 -0
- adam/commands/app/show_app_actions.py +49 -0
- adam/commands/app/show_app_id.py +44 -0
- adam/commands/app/show_app_queues.py +38 -0
- adam/commands/app/utils_app.py +106 -0
- adam/commands/audit/audit.py +9 -27
- adam/commands/audit/audit_repair_tables.py +5 -7
- adam/commands/audit/audit_run.py +1 -1
- adam/commands/audit/completions_l.py +15 -0
- adam/commands/audit/show_last10.py +2 -14
- adam/commands/audit/show_slow10.py +2 -13
- adam/commands/audit/show_top10.py +2 -11
- adam/commands/audit/utils_show_top10.py +14 -1
- adam/commands/bash/bash.py +1 -1
- adam/commands/cat.py +3 -7
- adam/commands/check.py +2 -2
- adam/commands/cli_commands.py +6 -1
- adam/commands/{cp.py → clipboard_copy.py} +18 -12
- adam/commands/code.py +2 -2
- adam/commands/command.py +61 -11
- adam/commands/commands_utils.py +19 -12
- adam/commands/cql/completions_c.py +28 -0
- adam/commands/cql/cqlsh.py +3 -7
- adam/commands/cql/utils_cql.py +22 -58
- adam/commands/deploy/deploy_pg_agent.py +2 -2
- adam/commands/deploy/undeploy_pg_agent.py +2 -2
- adam/commands/devices/device.py +39 -8
- adam/commands/devices/device_app.py +18 -28
- adam/commands/devices/device_auit_log.py +3 -3
- adam/commands/devices/device_cass.py +16 -22
- adam/commands/devices/device_export.py +6 -3
- adam/commands/devices/device_postgres.py +79 -63
- adam/commands/download_file.py +47 -0
- adam/commands/export/clean_up_all_export_sessions.py +3 -3
- adam/commands/export/clean_up_export_sessions.py +5 -10
- adam/commands/export/completions_x.py +11 -0
- adam/commands/export/download_export_session.py +40 -0
- adam/commands/export/export.py +0 -16
- adam/commands/export/export_databases.py +26 -9
- adam/commands/export/export_select.py +9 -58
- adam/commands/export/export_sessions.py +90 -5
- adam/commands/export/export_use.py +13 -10
- adam/commands/export/export_x_select.py +48 -0
- adam/commands/export/exporter.py +60 -22
- adam/commands/export/import_files.py +44 -0
- adam/commands/export/import_session.py +8 -4
- adam/commands/export/importer.py +7 -0
- adam/commands/export/importer_athena.py +101 -34
- adam/commands/export/importer_sqlite.py +30 -5
- adam/commands/export/show_column_counts.py +11 -11
- adam/commands/export/show_export_databases.py +5 -3
- adam/commands/export/show_export_session.py +5 -6
- adam/commands/export/show_export_sessions.py +4 -11
- adam/commands/export/utils_export.py +42 -14
- adam/commands/find_files.py +51 -0
- adam/commands/find_processes.py +76 -0
- adam/commands/head.py +36 -0
- adam/commands/help.py +2 -2
- adam/commands/intermediate_command.py +6 -3
- adam/commands/ls.py +1 -1
- adam/commands/medusa/medusa_backup.py +12 -14
- adam/commands/medusa/medusa_restore.py +20 -15
- adam/commands/medusa/medusa_show_backupjobs.py +6 -4
- adam/commands/medusa/medusa_show_restorejobs.py +5 -3
- adam/commands/medusa/utils_medusa.py +15 -0
- adam/commands/nodetool.py +3 -8
- adam/commands/param_get.py +2 -3
- adam/commands/param_set.py +1 -1
- adam/commands/postgres/completions_p.py +22 -0
- adam/commands/postgres/postgres.py +14 -21
- adam/commands/postgres/postgres_databases.py +270 -0
- adam/commands/postgres/utils_postgres.py +29 -20
- adam/commands/preview_table.py +3 -1
- adam/commands/pwd.py +3 -3
- adam/commands/reaper/reaper_forward.py +2 -2
- adam/commands/reaper/reaper_runs.py +3 -3
- adam/commands/reaper/reaper_schedule_activate.py +6 -2
- adam/commands/reaper/reaper_schedule_start.py +1 -2
- adam/commands/reaper/reaper_schedule_stop.py +1 -2
- adam/commands/reaper/utils_reaper.py +13 -6
- adam/commands/repair/repair_scan.py +0 -2
- adam/commands/repair/repair_stop.py +0 -1
- adam/commands/shell.py +7 -5
- adam/commands/show/show.py +1 -1
- adam/commands/show/show_adam.py +3 -3
- adam/commands/show/show_cassandra_repairs.py +5 -3
- adam/commands/show/show_cassandra_status.py +27 -20
- adam/commands/show/{show_commands.py → show_cli_commands.py} +2 -2
- adam/commands/show/show_login.py +2 -2
- adam/commands/show/show_params.py +2 -5
- adam/commands/show/show_processes.py +15 -14
- adam/commands/show/show_storage.py +9 -8
- adam/config.py +1 -0
- adam/embedded_params.py +1 -1
- adam/repl.py +16 -9
- adam/repl_commands.py +16 -9
- adam/repl_session.py +8 -1
- adam/repl_state.py +33 -10
- adam/sql/lark_completer.py +280 -0
- adam/sql/lark_parser.py +604 -0
- adam/sql/sql_state_machine.py +8 -2
- adam/utils.py +116 -29
- adam/utils_athena.py +7 -8
- adam/utils_issues.py +2 -2
- adam/utils_k8s/app_clusters.py +2 -2
- adam/utils_k8s/app_pods.py +5 -2
- adam/utils_k8s/cassandra_clusters.py +11 -3
- adam/utils_k8s/cassandra_nodes.py +2 -2
- adam/utils_k8s/k8s.py +9 -0
- adam/utils_k8s/kube_context.py +2 -2
- adam/utils_k8s/pods.py +23 -5
- adam/utils_k8s/statefulsets.py +5 -2
- adam/utils_local.py +4 -0
- adam/utils_repl/appendable_completer.py +6 -0
- adam/utils_repl/repl_completer.py +128 -2
- adam/utils_sqlite.py +2 -2
- adam/version.py +1 -1
- {kaqing-2.0.174.dist-info → kaqing-2.0.186.dist-info}/METADATA +1 -1
- kaqing-2.0.186.dist-info/RECORD +250 -0
- adam/commands/cql/cql_completions.py +0 -32
- adam/commands/export/export_select_x.py +0 -54
- adam/commands/postgres/postgres_context.py +0 -272
- adam/commands/postgres/psql_completions.py +0 -10
- kaqing-2.0.174.dist-info/RECORD +0 -230
- {kaqing-2.0.174.dist-info → kaqing-2.0.186.dist-info}/WHEEL +0 -0
- {kaqing-2.0.174.dist-info → kaqing-2.0.186.dist-info}/entry_points.txt +0 -0
- {kaqing-2.0.174.dist-info → kaqing-2.0.186.dist-info}/top_level.txt +0 -0
|
@@ -1,11 +1,11 @@
|
|
|
1
|
+
import os
|
|
1
2
|
import boto3
|
|
2
3
|
|
|
3
4
|
from adam.commands.export.export_databases import export_db
|
|
4
5
|
from adam.commands.export.importer import Importer
|
|
5
|
-
from adam.commands.export.utils_export import GeneratorStream
|
|
6
6
|
from adam.config import Config
|
|
7
7
|
from adam.repl_state import ReplState
|
|
8
|
-
from adam.utils import debug, log2, ing
|
|
8
|
+
from adam.utils import GeneratorStream, bytes_generator_from_file, debug, log2, ing
|
|
9
9
|
from adam.utils_athena import Athena
|
|
10
10
|
from adam.utils_k8s.pods import Pods
|
|
11
11
|
|
|
@@ -19,7 +19,9 @@ class AthenaImporter(Importer):
|
|
|
19
19
|
def prefix(self):
|
|
20
20
|
return 'e'
|
|
21
21
|
|
|
22
|
-
def import_from_csv(self, state: ReplState, from_session: str,
|
|
22
|
+
def import_from_csv(self, state: ReplState, from_session: str,
|
|
23
|
+
keyspace: str, table: str, target_table: str, columns: str,
|
|
24
|
+
multi_tables = True, create_db = False):
|
|
23
25
|
csv_file = self.csv_file(from_session, table, target_table)
|
|
24
26
|
pod = state.pod
|
|
25
27
|
namespace = state.namespace
|
|
@@ -36,35 +38,7 @@ class AthenaImporter(Importer):
|
|
|
36
38
|
s3 = boto3.client('s3')
|
|
37
39
|
s3.upload_fileobj(GeneratorStream(bytes), bucket, f'export/{database}/{keyspace}/{target_table}/{table}.csv')
|
|
38
40
|
|
|
39
|
-
|
|
40
|
-
if create_db:
|
|
41
|
-
msg = f"[{to_session}] Creating database {database}"
|
|
42
|
-
else:
|
|
43
|
-
msg = f"[{to_session}] Creating table {target_table}"
|
|
44
|
-
with ing(msg, suppress_log=multi_tables):
|
|
45
|
-
query = f'CREATE DATABASE IF NOT EXISTS {database};'
|
|
46
|
-
debug(query)
|
|
47
|
-
Athena.query(query, 'default')
|
|
48
|
-
|
|
49
|
-
query = f'DROP TABLE IF EXISTS {target_table};'
|
|
50
|
-
debug(query)
|
|
51
|
-
Athena.query(query, database)
|
|
52
|
-
|
|
53
|
-
athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
|
|
54
|
-
query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {target_table}(\n' + \
|
|
55
|
-
f' {athena_columns})\n' + \
|
|
56
|
-
"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
|
|
57
|
-
'WITH SERDEPROPERTIES (\n' + \
|
|
58
|
-
' "separatorChar" = ",",\n' + \
|
|
59
|
-
' "quoteChar" = "\\"")\n' + \
|
|
60
|
-
f"LOCATION 's3://{bucket}/export/{database}/{keyspace}/{target_table}'\n" + \
|
|
61
|
-
'TBLPROPERTIES ("skip.header.line.count"="1");'
|
|
62
|
-
debug(query)
|
|
63
|
-
try:
|
|
64
|
-
Athena.query(query, database)
|
|
65
|
-
except Exception as e:
|
|
66
|
-
log2(f'*** Failed query:\n{query}')
|
|
67
|
-
raise e
|
|
41
|
+
self.create_schema(to_session, bucket, database, keyspace, table, columns, multi_tables, create_db)
|
|
68
42
|
|
|
69
43
|
to, _ = self.move_to_done(state, from_session, keyspace, target_table)
|
|
70
44
|
|
|
@@ -77,5 +51,98 @@ class AthenaImporter(Importer):
|
|
|
77
51
|
Athena.clear_cache()
|
|
78
52
|
|
|
79
53
|
if not multi_tables:
|
|
80
|
-
with export_db(state) as
|
|
81
|
-
|
|
54
|
+
with export_db(state) as dbs:
|
|
55
|
+
dbs.sql(f'select * from {database}.{target_table} limit 10')
|
|
56
|
+
|
|
57
|
+
def import_from_local_csv(self, state: ReplState,
|
|
58
|
+
keyspace: str, table: str, csv_file: str, multi_tables = True, create_db = False):
|
|
59
|
+
to_session = state.export_session
|
|
60
|
+
database = self.db(to_session, keyspace)
|
|
61
|
+
|
|
62
|
+
succeeded = False
|
|
63
|
+
try:
|
|
64
|
+
columns = None
|
|
65
|
+
with open(csv_file, 'r') as f:
|
|
66
|
+
columns = f.readline()
|
|
67
|
+
|
|
68
|
+
bucket = Config().get('export.bucket', 'c3.ops--qing')
|
|
69
|
+
|
|
70
|
+
with ing(f'[{to_session}] Uploading to S3', suppress_log=multi_tables):
|
|
71
|
+
bytes = bytes_generator_from_file(csv_file)
|
|
72
|
+
|
|
73
|
+
s3 = boto3.client('s3')
|
|
74
|
+
s3.upload_fileobj(GeneratorStream(bytes), bucket, f'export/{database}/{keyspace}/{table}/{os.path.basename(csv_file)}')
|
|
75
|
+
|
|
76
|
+
self.create_schema(to_session, bucket, database, keyspace, table, columns, multi_tables, create_db)
|
|
77
|
+
# msg: str = None
|
|
78
|
+
# if create_db:
|
|
79
|
+
# msg = f"[{to_session}] Creating database {database}"
|
|
80
|
+
# else:
|
|
81
|
+
# msg = f"[{to_session}] Creating table {target_table}"
|
|
82
|
+
# with ing(msg, suppress_log=multi_tables):
|
|
83
|
+
# query = f'CREATE DATABASE IF NOT EXISTS {database};'
|
|
84
|
+
# debug(query)
|
|
85
|
+
# Athena.query(query, 'default')
|
|
86
|
+
|
|
87
|
+
# query = f'DROP TABLE IF EXISTS {target_table};'
|
|
88
|
+
# debug(query)
|
|
89
|
+
# Athena.query(query, database)
|
|
90
|
+
|
|
91
|
+
# athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
|
|
92
|
+
# query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {target_table}(\n' + \
|
|
93
|
+
# f' {athena_columns})\n' + \
|
|
94
|
+
# "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
|
|
95
|
+
# 'WITH SERDEPROPERTIES (\n' + \
|
|
96
|
+
# ' "separatorChar" = ",",\n' + \
|
|
97
|
+
# ' "quoteChar" = "\\"")\n' + \
|
|
98
|
+
# f"LOCATION 's3://{bucket}/export/{database}/{keyspace}/{target_table}'\n" + \
|
|
99
|
+
# 'TBLPROPERTIES ("skip.header.line.count"="1");'
|
|
100
|
+
# debug(query)
|
|
101
|
+
# try:
|
|
102
|
+
# Athena.query(query, database)
|
|
103
|
+
# except Exception as e:
|
|
104
|
+
# log2(f'*** Failed query:\n{query}')
|
|
105
|
+
# raise e
|
|
106
|
+
|
|
107
|
+
succeeded = True
|
|
108
|
+
|
|
109
|
+
return csv_file, to_session
|
|
110
|
+
finally:
|
|
111
|
+
if succeeded:
|
|
112
|
+
Athena.clear_cache()
|
|
113
|
+
|
|
114
|
+
if not multi_tables:
|
|
115
|
+
with export_db(state) as dbs:
|
|
116
|
+
dbs.sql(f'select * from {database}.{table} limit 10')
|
|
117
|
+
|
|
118
|
+
def create_schema(self, to_session: str, bucket: str, database: str, keyspace: str, table: str, columns: list[str], multi_tables: bool, create_db = False):
|
|
119
|
+
msg: str = None
|
|
120
|
+
if create_db:
|
|
121
|
+
msg = f"[{to_session}] Creating database {database}"
|
|
122
|
+
else:
|
|
123
|
+
msg = f"[{to_session}] Creating table {table}"
|
|
124
|
+
|
|
125
|
+
with ing(msg, suppress_log=multi_tables):
|
|
126
|
+
query = f'CREATE DATABASE IF NOT EXISTS {database};'
|
|
127
|
+
debug(query)
|
|
128
|
+
Athena.query(query, 'default')
|
|
129
|
+
|
|
130
|
+
query = f'DROP TABLE IF EXISTS {table};'
|
|
131
|
+
debug(query)
|
|
132
|
+
Athena.query(query, database)
|
|
133
|
+
|
|
134
|
+
athena_columns = ', '.join([f'{c} string' for c in columns.split(',')])
|
|
135
|
+
query = f'CREATE EXTERNAL TABLE IF NOT EXISTS {table}(\n' + \
|
|
136
|
+
f' {athena_columns})\n' + \
|
|
137
|
+
"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'\n" + \
|
|
138
|
+
'WITH SERDEPROPERTIES (\n' + \
|
|
139
|
+
' "separatorChar" = ",",\n' + \
|
|
140
|
+
' "quoteChar" = "\\"")\n' + \
|
|
141
|
+
f"LOCATION 's3://{bucket}/export/{database}/{keyspace}/{table}'\n" + \
|
|
142
|
+
'TBLPROPERTIES ("skip.header.line.count"="1");'
|
|
143
|
+
debug(query)
|
|
144
|
+
try:
|
|
145
|
+
Athena.query(query, database)
|
|
146
|
+
except Exception as e:
|
|
147
|
+
log2(f'*** Failed query:\n{query}')
|
|
148
|
+
raise e
|
|
@@ -2,9 +2,8 @@ import pandas
|
|
|
2
2
|
|
|
3
3
|
from adam.commands.export.export_databases import export_db
|
|
4
4
|
from adam.commands.export.importer import Importer
|
|
5
|
-
from adam.commands.export.utils_export import GeneratorStream
|
|
6
5
|
from adam.repl_state import ReplState
|
|
7
|
-
from adam.utils import
|
|
6
|
+
from adam.utils import GeneratorStream, bytes_generator_from_file, ing
|
|
8
7
|
from adam.utils_k8s.pods import Pods
|
|
9
8
|
from adam.utils_sqlite import SQLite, sqlite
|
|
10
9
|
|
|
@@ -12,7 +11,9 @@ class SqliteImporter(Importer):
|
|
|
12
11
|
def prefix(self):
|
|
13
12
|
return 's'
|
|
14
13
|
|
|
15
|
-
def import_from_csv(self, state: ReplState, from_session: str,
|
|
14
|
+
def import_from_csv(self, state: ReplState, from_session: str,
|
|
15
|
+
keyspace: str, table: str, target_table: str, columns: str,
|
|
16
|
+
multi_tables = True, create_db = False):
|
|
16
17
|
csv_file = self.csv_file(from_session, table, target_table)
|
|
17
18
|
pod = state.pod
|
|
18
19
|
namespace = state.namespace
|
|
@@ -38,5 +39,29 @@ class SqliteImporter(Importer):
|
|
|
38
39
|
SQLite.clear_cache()
|
|
39
40
|
|
|
40
41
|
if not multi_tables:
|
|
41
|
-
with export_db(state) as
|
|
42
|
-
|
|
42
|
+
with export_db(state) as dbs:
|
|
43
|
+
dbs.sql(f'select * from {keyspace}.{target_table} limit 10')
|
|
44
|
+
|
|
45
|
+
def import_from_local_csv(self, state: ReplState,
|
|
46
|
+
keyspace: str, table: str, csv_file: str, multi_tables = True, create_db = False):
|
|
47
|
+
to_session = state.export_session
|
|
48
|
+
|
|
49
|
+
succeeded = False
|
|
50
|
+
try:
|
|
51
|
+
with ing(f'[{to_session}] Uploading to Sqlite', suppress_log=multi_tables):
|
|
52
|
+
# create a connection to single keyspace
|
|
53
|
+
with sqlite(to_session, keyspace) as conn:
|
|
54
|
+
bytes = bytes_generator_from_file(csv_file)
|
|
55
|
+
df = pandas.read_csv(GeneratorStream(bytes))
|
|
56
|
+
df.to_sql(table, conn, index=False, if_exists='replace')
|
|
57
|
+
|
|
58
|
+
succeeded = True
|
|
59
|
+
|
|
60
|
+
return csv_file, to_session
|
|
61
|
+
finally:
|
|
62
|
+
if succeeded:
|
|
63
|
+
SQLite.clear_cache()
|
|
64
|
+
|
|
65
|
+
if not multi_tables:
|
|
66
|
+
with export_db(state) as dbs:
|
|
67
|
+
dbs.sql(f'select * from {keyspace}.{table} limit 10')
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
from adam.commands import validate_args
|
|
1
|
+
from adam.commands import extract_trailing_options, validate_args
|
|
2
2
|
from adam.commands.command import Command
|
|
3
|
-
from adam.commands.
|
|
3
|
+
from adam.commands.cql.utils_cql import cassandra_table_names
|
|
4
|
+
from adam.commands.export.export_databases import ExportDatabases, export_db
|
|
4
5
|
from adam.config import Config
|
|
5
6
|
from adam.repl_state import ReplState, RequiredState
|
|
6
|
-
from adam.utils import log2
|
|
7
7
|
|
|
8
8
|
class ShowColumnCounts(Command):
|
|
9
9
|
COMMAND = 'show column counts on'
|
|
@@ -28,18 +28,18 @@ class ShowColumnCounts(Command):
|
|
|
28
28
|
return super().run(cmd, state)
|
|
29
29
|
|
|
30
30
|
with self.validate(args, state) as (args, state):
|
|
31
|
-
with
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
31
|
+
with extract_trailing_options(args, '&') as (args, backgrounded):
|
|
32
|
+
with validate_args(args, state, name='SQL statement') as table:
|
|
33
|
+
with export_db(state) as dbs:
|
|
34
|
+
query = Config().get(f'export.column_counts_query', 'select id, count(id) as columns from {table} group by id')
|
|
35
|
+
query = query.replace('{table}', table)
|
|
36
|
+
dbs.sql(query, state.export_session, backgrounded=backgrounded)
|
|
35
37
|
|
|
36
38
|
return state
|
|
37
39
|
|
|
38
40
|
def completion(self, state: ReplState):
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
return super().completion(state, lambda: {t: None for t in ExportDatabases.table_names(state.export_session)})
|
|
41
|
+
return super().completion(state, lambda: {t: None for t in ExportDatabases.table_names(state.export_session)}, auto_key='x.tables')
|
|
42
|
+
# return {}
|
|
43
43
|
|
|
44
44
|
def help(self, _: ReplState):
|
|
45
45
|
return f'{ShowColumnCounts.COMMAND} <export-table-name>\t show column count per id'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from adam.commands.command import Command
|
|
2
2
|
from adam.commands.devices.device_export import DeviceExport
|
|
3
|
-
from adam.commands.export.export_databases import
|
|
3
|
+
from adam.commands.export.export_databases import export_db
|
|
4
4
|
from adam.repl_state import ReplState
|
|
5
5
|
|
|
6
6
|
class ShowExportDatabases(Command):
|
|
@@ -26,12 +26,14 @@ class ShowExportDatabases(Command):
|
|
|
26
26
|
return super().run(cmd, state)
|
|
27
27
|
|
|
28
28
|
with self.validate(args, state) as (args, state):
|
|
29
|
-
|
|
29
|
+
with export_db(state) as dbs:
|
|
30
|
+
dbs.show_databases()
|
|
30
31
|
|
|
31
32
|
return state
|
|
32
33
|
|
|
33
34
|
def completion(self, state: ReplState):
|
|
34
|
-
return
|
|
35
|
+
return {}
|
|
36
|
+
# return DeviceExport().ls_completion(ShowExportDatabases.COMMAND, state, default = super().completion(state))
|
|
35
37
|
|
|
36
38
|
def help(self, _: ReplState):
|
|
37
39
|
return f'{ShowExportDatabases.COMMAND}\t list export databases'
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
from adam.commands import validate_args
|
|
2
2
|
from adam.commands.command import Command
|
|
3
|
-
from adam.commands.export.
|
|
4
|
-
from adam.commands.export.export_sessions import ExportSessions
|
|
3
|
+
from adam.commands.export.export_sessions import ExportSessions, export_session
|
|
5
4
|
from adam.repl_state import ReplState, RequiredState
|
|
6
|
-
from adam.utils import log2
|
|
7
5
|
|
|
8
6
|
class ShowExportSession(Command):
|
|
9
7
|
COMMAND = 'show export session'
|
|
@@ -28,13 +26,14 @@ class ShowExportSession(Command):
|
|
|
28
26
|
return super().run(cmd, state)
|
|
29
27
|
|
|
30
28
|
with self.validate(args, state) as (args, state):
|
|
31
|
-
with validate_args(args, state, name='
|
|
32
|
-
|
|
29
|
+
with validate_args(args, state, name='export session') as session:
|
|
30
|
+
with export_session(state) as sessions:
|
|
31
|
+
sessions.show_session(session)
|
|
33
32
|
|
|
34
33
|
return state
|
|
35
34
|
|
|
36
35
|
def completion(self, state: ReplState):
|
|
37
|
-
return
|
|
36
|
+
return {}
|
|
38
37
|
|
|
39
38
|
def help(self, _: ReplState):
|
|
40
39
|
return f'{ShowExportSession.COMMAND} <export-session-name>\t show export session'
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
from adam.commands.command import Command
|
|
2
|
-
from adam.commands.export.export_sessions import
|
|
2
|
+
from adam.commands.export.export_sessions import export_session
|
|
3
3
|
from adam.repl_state import ReplState, RequiredState
|
|
4
|
-
from adam.utils import lines_to_tabular, log
|
|
5
|
-
from adam.utils_k8s.statefulsets import StatefulSets
|
|
6
4
|
|
|
7
5
|
class ShowExportSessions(Command):
|
|
8
6
|
COMMAND = 'show export sessions'
|
|
@@ -27,18 +25,13 @@ class ShowExportSessions(Command):
|
|
|
27
25
|
return super().run(cmd, state)
|
|
28
26
|
|
|
29
27
|
with self.validate(args, state) as (args, state):
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
|
|
33
|
-
|
|
34
|
-
sessions: dict[str, str] = ExportSessions.find_export_sessions(pod, state.namespace)
|
|
35
|
-
log(lines_to_tabular([f'{session}\t{export_state}' for session, export_state in sorted(sessions.items(), reverse=True)],
|
|
36
|
-
header='EXPORT_SESSION\tSTATUS', separator='\t'))
|
|
28
|
+
with export_session(state) as sessions:
|
|
29
|
+
sessions.show_all_sessions()
|
|
37
30
|
|
|
38
31
|
return state
|
|
39
32
|
|
|
40
33
|
def completion(self, state: ReplState):
|
|
41
|
-
return
|
|
34
|
+
return {}
|
|
42
35
|
|
|
43
36
|
def help(self, _: ReplState):
|
|
44
37
|
return f'{ShowExportSessions.COMMAND}\t list export sessions'
|
|
@@ -9,18 +9,27 @@ from adam.utils_k8s.pods import log_prefix
|
|
|
9
9
|
from adam.utils_k8s.statefulsets import StatefulSets
|
|
10
10
|
|
|
11
11
|
class ImportSpec:
|
|
12
|
-
def __init__(self, session: str, importer: str):
|
|
12
|
+
def __init__(self, table_name: str, session: str = None, files: list[str] = None, importer: str = None):
|
|
13
|
+
self.table_name = table_name
|
|
13
14
|
self.session = session
|
|
15
|
+
self.files = files
|
|
14
16
|
self.importer = importer
|
|
15
17
|
|
|
16
|
-
def parse_specs(specs_str: str):
|
|
17
|
-
|
|
18
|
+
def parse_specs(specs_str: str, files = False):
|
|
19
|
+
session_or_files: str = None
|
|
18
20
|
importer: str = None
|
|
21
|
+
table_name: str = None
|
|
19
22
|
|
|
20
23
|
if specs_str:
|
|
21
|
-
importer,
|
|
24
|
+
importer, rest = ImportSpec._extract_importer(specs_str.strip(' '))
|
|
25
|
+
|
|
26
|
+
if rest:
|
|
27
|
+
table_name, session_or_files = ImportSpec._extract_table_name(rest)
|
|
22
28
|
|
|
23
|
-
|
|
29
|
+
if not files:
|
|
30
|
+
return ImportSpec(table_name, session=session_or_files, importer=importer)
|
|
31
|
+
|
|
32
|
+
return ImportSpec(table_name, files=[f.strip(' ') for f in session_or_files.split(',')], importer=importer)
|
|
24
33
|
|
|
25
34
|
def _extract_importer(spec_str: str) -> tuple[str, str]:
|
|
26
35
|
importer = None
|
|
@@ -34,12 +43,25 @@ class ImportSpec:
|
|
|
34
43
|
|
|
35
44
|
return importer, rest
|
|
36
45
|
|
|
46
|
+
def _extract_table_name(spec_str: str) -> tuple[str, str]:
|
|
47
|
+
table_name = None
|
|
48
|
+
rest = spec_str
|
|
49
|
+
|
|
50
|
+
p = re.compile(r"(.*?)as\s+(.*)", re.IGNORECASE)
|
|
51
|
+
match = p.match(spec_str)
|
|
52
|
+
if match:
|
|
53
|
+
rest = match.group(1).strip(' ')
|
|
54
|
+
table_name = match.group(2).strip(' ')
|
|
55
|
+
|
|
56
|
+
return table_name, rest
|
|
57
|
+
|
|
37
58
|
class ExportSpec(ImportSpec):
|
|
38
59
|
def __init__(self, keyspace: str, consistency: str, importer: str, tables: list['ExportTableSpec'], session: str = None):
|
|
39
60
|
super().__init__(None, importer)
|
|
40
61
|
|
|
41
62
|
self.keyspace = keyspace
|
|
42
63
|
self.consistency = consistency
|
|
64
|
+
self.importer = importer
|
|
43
65
|
self.tables = tables
|
|
44
66
|
self.session = session
|
|
45
67
|
|
|
@@ -154,11 +176,12 @@ class ExportTableSpec:
|
|
|
154
176
|
return f'{self.keyspace}.{self.table}({self.columns}) as {self.target_table}'
|
|
155
177
|
|
|
156
178
|
class ExportTableStatus:
|
|
157
|
-
def __init__(self, keyspace: str, target_table: str, status: str, table: str = None):
|
|
179
|
+
def __init__(self, keyspace: str, target_table: str, status: str, table: str = None, csv_file: str = ''):
|
|
158
180
|
self.keyspace = keyspace
|
|
159
181
|
self.target_table = target_table
|
|
160
182
|
self.status = status
|
|
161
183
|
self.table = table
|
|
184
|
+
self.csv_file = csv_file
|
|
162
185
|
|
|
163
186
|
def __str__(self):
|
|
164
187
|
return f'{self.keyspace}.{self.table} as {self.target_table} = {self.status}'
|
|
@@ -169,7 +192,7 @@ class ExportTableStatus:
|
|
|
169
192
|
|
|
170
193
|
return False
|
|
171
194
|
|
|
172
|
-
def from_session(sts: str, pod: str, namespace: str, export_session: str):
|
|
195
|
+
def from_session(sts: str, pod: str, namespace: str, export_session: str) -> tuple['ExportTableStatus', str]:
|
|
173
196
|
statuses: list[ExportTableStatus] = []
|
|
174
197
|
|
|
175
198
|
status_in_whole = 'done'
|
|
@@ -204,8 +227,8 @@ class ExportTableStatus:
|
|
|
204
227
|
target_table = m.group(2)
|
|
205
228
|
state = m.group(3)
|
|
206
229
|
if state == '.pending_import':
|
|
207
|
-
|
|
208
|
-
return ExportTableStatus(keyspace, target_table, 'pending_import', table)
|
|
230
|
+
csv_files, table = get_csv_files_n_table(target_table)
|
|
231
|
+
return ExportTableStatus(keyspace, target_table, 'pending_import', table, csv_files[0] if csv_files else '')
|
|
209
232
|
elif state == '.done':
|
|
210
233
|
return ExportTableStatus(keyspace, target_table, 'done', target_table)
|
|
211
234
|
|
|
@@ -215,7 +238,7 @@ class ExportTableStatus:
|
|
|
215
238
|
if r.exit_code() == 0:
|
|
216
239
|
csv_files, table = get_csv_files_n_table(target_table)
|
|
217
240
|
if csv_files:
|
|
218
|
-
return ExportTableStatus(keyspace, target_table, 'exported', table)
|
|
241
|
+
return ExportTableStatus(keyspace, target_table, 'exported', table, csv_files[0])
|
|
219
242
|
else:
|
|
220
243
|
return ExportTableStatus(keyspace, target_table, 'imported', target_table)
|
|
221
244
|
else:
|
|
@@ -293,16 +316,21 @@ class GeneratorStream(io.RawIOBase):
|
|
|
293
316
|
return data
|
|
294
317
|
|
|
295
318
|
class PodPushHandler:
|
|
296
|
-
def __init__(self, state: ReplState):
|
|
319
|
+
def __init__(self, state: ReplState, pod: str = None):
|
|
297
320
|
self.state = state
|
|
298
321
|
self.pushed = False
|
|
322
|
+
self.pod = pod
|
|
299
323
|
|
|
300
324
|
def __enter__(self):
|
|
301
325
|
state = self.state
|
|
302
326
|
|
|
303
327
|
if not state.pod:
|
|
328
|
+
self.pushed = True
|
|
304
329
|
state.push()
|
|
305
|
-
|
|
330
|
+
|
|
331
|
+
if not self.pod:
|
|
332
|
+
self.pod = StatefulSets.pod_names(state.sts, state.namespace)[0]
|
|
333
|
+
state.pod = self.pod
|
|
306
334
|
|
|
307
335
|
return state
|
|
308
336
|
|
|
@@ -312,5 +340,5 @@ class PodPushHandler:
|
|
|
312
340
|
|
|
313
341
|
return False
|
|
314
342
|
|
|
315
|
-
def state_with_pod(state: ReplState):
|
|
316
|
-
return PodPushHandler(state)
|
|
343
|
+
def state_with_pod(state: ReplState, pod: str = None):
|
|
344
|
+
return PodPushHandler(state, pod=pod)
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from adam.commands.command import Command
|
|
4
|
+
from adam.repl_state import ReplState
|
|
5
|
+
from adam.utils import log2
|
|
6
|
+
from adam.utils_local import local_tmp_dir
|
|
7
|
+
|
|
8
|
+
class FindLocalFiles(Command):
|
|
9
|
+
COMMAND = 'find local'
|
|
10
|
+
|
|
11
|
+
# the singleton pattern
|
|
12
|
+
def __new__(cls, *args, **kwargs):
|
|
13
|
+
if not hasattr(cls, 'instance'): cls.instance = super(FindLocalFiles, cls).__new__(cls)
|
|
14
|
+
|
|
15
|
+
return cls.instance
|
|
16
|
+
|
|
17
|
+
def __init__(self, successor: Command=None):
|
|
18
|
+
super().__init__(successor)
|
|
19
|
+
|
|
20
|
+
def command(self):
|
|
21
|
+
return FindLocalFiles.COMMAND
|
|
22
|
+
|
|
23
|
+
def run(self, cmd: str, state: ReplState):
|
|
24
|
+
if not(args := self.args(cmd)):
|
|
25
|
+
return super().run(cmd, state)
|
|
26
|
+
|
|
27
|
+
with self.validate(args, state) as (args, state):
|
|
28
|
+
cmd = 'find'
|
|
29
|
+
|
|
30
|
+
if not args:
|
|
31
|
+
cmd = f'find {local_tmp_dir()}'
|
|
32
|
+
elif len(args) == 1:
|
|
33
|
+
cmd = f"find {local_tmp_dir()} -name '{args[0]}'"
|
|
34
|
+
else:
|
|
35
|
+
new_args = [f"'{arg}'" if '*' in arg else arg for arg in args]
|
|
36
|
+
cmd = 'find ' + ' '.join(new_args)
|
|
37
|
+
|
|
38
|
+
log2(cmd)
|
|
39
|
+
os.system(cmd)
|
|
40
|
+
|
|
41
|
+
return state
|
|
42
|
+
|
|
43
|
+
def completion(self, state: ReplState):
|
|
44
|
+
return super().completion(state, {
|
|
45
|
+
'*.csv': None,
|
|
46
|
+
'*.db': None,
|
|
47
|
+
'*': None
|
|
48
|
+
})
|
|
49
|
+
|
|
50
|
+
def help(self, _: ReplState):
|
|
51
|
+
return f'{FindLocalFiles.COMMAND} [linux-find-arguments]\t find files from local machine'
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from adam.commands import extract_options, validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.devices.devices import Devices
|
|
4
|
+
from adam.commands.export.utils_export import state_with_pod
|
|
5
|
+
from adam.repl_state import ReplState, RequiredState
|
|
6
|
+
from adam.utils import log2, tabulize
|
|
7
|
+
|
|
8
|
+
class FindProcesses(Command):
|
|
9
|
+
COMMAND = 'find processes'
|
|
10
|
+
|
|
11
|
+
# the singleton pattern
|
|
12
|
+
def __new__(cls, *args, **kwargs):
|
|
13
|
+
if not hasattr(cls, 'instance'): cls.instance = super(FindProcesses, cls).__new__(cls)
|
|
14
|
+
|
|
15
|
+
return cls.instance
|
|
16
|
+
|
|
17
|
+
def __init__(self, successor: Command=None):
|
|
18
|
+
super().__init__(successor)
|
|
19
|
+
|
|
20
|
+
def command(self):
|
|
21
|
+
return FindProcesses.COMMAND
|
|
22
|
+
|
|
23
|
+
def required(self):
|
|
24
|
+
return [RequiredState.CLUSTER_OR_POD, RequiredState.APP_APP, ReplState.P]
|
|
25
|
+
|
|
26
|
+
def run(self, cmd: str, state: ReplState):
|
|
27
|
+
if not(args := self.args(cmd)):
|
|
28
|
+
return super().run(cmd, state)
|
|
29
|
+
|
|
30
|
+
with self.validate(args, state) as (args, state):
|
|
31
|
+
with extract_options(args, '-kill') as (args, kill):
|
|
32
|
+
with validate_args(args, state, name='words to look for'):
|
|
33
|
+
arg = ' | '.join([f'grep {a}' for a in args])
|
|
34
|
+
awk = "awk '{ print $1, $2, $8, $NF }'"
|
|
35
|
+
rs = Devices.device(state).bash(state, state, f"ps -ef | grep -v grep | {arg} | {awk}".split(' '))
|
|
36
|
+
|
|
37
|
+
lines: list[list[str]] = []
|
|
38
|
+
for r in rs:
|
|
39
|
+
for l in r.stdout.split('\n'):
|
|
40
|
+
l = l.strip(' \t\r\n')
|
|
41
|
+
if not l:
|
|
42
|
+
continue
|
|
43
|
+
|
|
44
|
+
tokens = [r.pod] + l.split(' ')
|
|
45
|
+
lines.append(tokens)
|
|
46
|
+
|
|
47
|
+
pids = []
|
|
48
|
+
for l in lines:
|
|
49
|
+
pids.append(f'{l[2]}@{l[0]}')
|
|
50
|
+
|
|
51
|
+
tabulize(lines, lambda l: '\t'.join(l), header = 'POD\tUSER\tPID\tCMD\tLAST_ARG', separator='\t')
|
|
52
|
+
log2()
|
|
53
|
+
log2(f'PIDS with {",".join(args)}: {",".join(pids)}')
|
|
54
|
+
|
|
55
|
+
if kill:
|
|
56
|
+
log2()
|
|
57
|
+
for pidp in pids:
|
|
58
|
+
pid_n_pod = pidp.split('@')
|
|
59
|
+
pid = pid_n_pod[0]
|
|
60
|
+
if len(pid_n_pod) < 2:
|
|
61
|
+
continue
|
|
62
|
+
|
|
63
|
+
pod = pid_n_pod[1]
|
|
64
|
+
|
|
65
|
+
log2(f'@{pod} bash kill -9 {pid}')
|
|
66
|
+
|
|
67
|
+
with state_with_pod(state, pod) as state1:
|
|
68
|
+
Devices.device(state).bash(state, state1, ['kill', '-9', pid])
|
|
69
|
+
|
|
70
|
+
return rs
|
|
71
|
+
|
|
72
|
+
def completion(self, state: ReplState):
|
|
73
|
+
return super().completion(state)
|
|
74
|
+
|
|
75
|
+
def help(self, _: ReplState):
|
|
76
|
+
return f'{FindProcesses.COMMAND} word... [-kill]\t find processes with words'
|
adam/commands/head.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from adam.commands import validate_args
|
|
2
|
+
from adam.commands.command import Command
|
|
3
|
+
from adam.commands.devices.devices import Devices
|
|
4
|
+
from adam.repl_state import ReplState, RequiredState
|
|
5
|
+
|
|
6
|
+
class Head(Command):
|
|
7
|
+
COMMAND = 'head'
|
|
8
|
+
|
|
9
|
+
# the singleton pattern
|
|
10
|
+
def __new__(cls, *args, **kwargs):
|
|
11
|
+
if not hasattr(cls, 'instance'): cls.instance = super(Head, cls).__new__(cls)
|
|
12
|
+
|
|
13
|
+
return cls.instance
|
|
14
|
+
|
|
15
|
+
def __init__(self, successor: Command=None):
|
|
16
|
+
super().__init__(successor)
|
|
17
|
+
|
|
18
|
+
def command(self):
|
|
19
|
+
return Head.COMMAND
|
|
20
|
+
|
|
21
|
+
def required(self):
|
|
22
|
+
return [RequiredState.CLUSTER_OR_POD, RequiredState.APP_APP, ReplState.P]
|
|
23
|
+
|
|
24
|
+
def run(self, cmd: str, state: ReplState):
|
|
25
|
+
if not(args := self.args(cmd)):
|
|
26
|
+
return super().run(cmd, state)
|
|
27
|
+
|
|
28
|
+
with self.validate(args, state) as (args, state):
|
|
29
|
+
with validate_args(args, state, name='file'):
|
|
30
|
+
return Devices.device(state).bash(state, state, cmd.split(' '))
|
|
31
|
+
|
|
32
|
+
def completion(self, state: ReplState):
|
|
33
|
+
return super().completion(state, lambda: {f: None for f in Devices.device(state).files(state)}, pods=Devices.device(state).pods(state, '-'), auto='jit')
|
|
34
|
+
|
|
35
|
+
def help(self, _: ReplState):
|
|
36
|
+
return f'{Head.COMMAND} file [&]\t run head command on the pod'
|
adam/commands/help.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from adam.commands.command import Command
|
|
2
2
|
from adam.repl_commands import ReplCommands
|
|
3
3
|
from adam.repl_state import ReplState
|
|
4
|
-
from adam.utils import
|
|
4
|
+
from adam.utils import tabulize, log
|
|
5
5
|
|
|
6
6
|
class Help(Command):
|
|
7
7
|
COMMAND = 'help'
|
|
@@ -45,7 +45,7 @@ class Help(Command):
|
|
|
45
45
|
lines.append('')
|
|
46
46
|
lines.extend(section(ReplCommands.exit()))
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
tabulize(lines, separator='\t')
|
|
49
49
|
|
|
50
50
|
return lines
|
|
51
51
|
|