kaqing 2.0.14__py3-none-any.whl → 2.0.145__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kaqing might be problematic. Click here for more details.
- adam/apps.py +2 -2
- adam/batch.py +13 -3
- adam/checks/check_utils.py +4 -4
- adam/checks/compactionstats.py +1 -1
- adam/checks/cpu.py +2 -2
- adam/checks/disk.py +1 -1
- adam/checks/gossip.py +1 -1
- adam/checks/memory.py +3 -3
- adam/checks/status.py +1 -1
- adam/commands/alter_tables.py +81 -0
- adam/commands/app.py +3 -3
- adam/commands/app_ping.py +2 -2
- adam/commands/audit/audit.py +86 -0
- adam/commands/audit/audit_repair_tables.py +77 -0
- adam/commands/audit/audit_run.py +58 -0
- adam/commands/audit/show_last10.py +51 -0
- adam/commands/audit/show_slow10.py +50 -0
- adam/commands/audit/show_top10.py +48 -0
- adam/commands/audit/utils_show_top10.py +59 -0
- adam/commands/bash/__init__.py +0 -0
- adam/commands/bash/bash.py +133 -0
- adam/commands/bash/bash_completer.py +93 -0
- adam/commands/cat.py +56 -0
- adam/commands/cd.py +12 -82
- adam/commands/check.py +6 -0
- adam/commands/cli_commands.py +3 -3
- adam/commands/code.py +60 -0
- adam/commands/command.py +48 -12
- adam/commands/commands_utils.py +4 -5
- adam/commands/cql/__init__.py +0 -0
- adam/commands/cql/cql_completions.py +28 -0
- adam/commands/cql/cql_utils.py +209 -0
- adam/commands/{cqlsh.py → cql/cqlsh.py} +15 -10
- adam/commands/deploy/code_utils.py +2 -2
- adam/commands/deploy/deploy.py +8 -21
- adam/commands/deploy/deploy_frontend.py +1 -1
- adam/commands/deploy/deploy_pg_agent.py +3 -3
- adam/commands/deploy/deploy_pod.py +28 -27
- adam/commands/deploy/deploy_utils.py +16 -26
- adam/commands/deploy/undeploy.py +8 -21
- adam/commands/deploy/undeploy_frontend.py +1 -1
- adam/commands/deploy/undeploy_pg_agent.py +5 -3
- adam/commands/deploy/undeploy_pod.py +12 -10
- adam/commands/devices/__init__.py +0 -0
- adam/commands/devices/device.py +27 -0
- adam/commands/devices/device_app.py +146 -0
- adam/commands/devices/device_auit_log.py +43 -0
- adam/commands/devices/device_cass.py +145 -0
- adam/commands/devices/device_export.py +86 -0
- adam/commands/devices/device_postgres.py +109 -0
- adam/commands/devices/devices.py +25 -0
- adam/commands/export/__init__.py +0 -0
- adam/commands/export/clean_up_export_session.py +53 -0
- adam/commands/export/clean_up_export_sessions.py +40 -0
- adam/commands/export/drop_export_database.py +58 -0
- adam/commands/export/drop_export_databases.py +46 -0
- adam/commands/export/export.py +83 -0
- adam/commands/export/export_databases.py +170 -0
- adam/commands/export/export_select.py +85 -0
- adam/commands/export/export_select_x.py +54 -0
- adam/commands/export/export_use.py +55 -0
- adam/commands/export/exporter.py +364 -0
- adam/commands/export/import_session.py +68 -0
- adam/commands/export/importer.py +67 -0
- adam/commands/export/importer_athena.py +80 -0
- adam/commands/export/importer_sqlite.py +47 -0
- adam/commands/export/show_column_counts.py +63 -0
- adam/commands/export/show_export_databases.py +39 -0
- adam/commands/export/show_export_session.py +51 -0
- adam/commands/export/show_export_sessions.py +47 -0
- adam/commands/export/utils_export.py +291 -0
- adam/commands/help.py +12 -7
- adam/commands/issues.py +6 -0
- adam/commands/kubectl.py +41 -0
- adam/commands/login.py +7 -4
- adam/commands/logs.py +2 -1
- adam/commands/ls.py +4 -107
- adam/commands/medusa/medusa.py +2 -26
- adam/commands/medusa/medusa_backup.py +2 -2
- adam/commands/medusa/medusa_restore.py +3 -4
- adam/commands/medusa/medusa_show_backupjobs.py +4 -3
- adam/commands/medusa/medusa_show_restorejobs.py +3 -3
- adam/commands/nodetool.py +9 -4
- adam/commands/param_set.py +1 -1
- adam/commands/postgres/postgres.py +42 -43
- adam/commands/postgres/{postgres_session.py → postgres_context.py} +43 -42
- adam/commands/postgres/postgres_utils.py +31 -0
- adam/commands/postgres/psql_completions.py +10 -0
- adam/commands/preview_table.py +18 -40
- adam/commands/pwd.py +2 -28
- adam/commands/reaper/reaper.py +4 -24
- adam/commands/reaper/reaper_restart.py +1 -1
- adam/commands/reaper/reaper_session.py +2 -2
- adam/commands/repair/repair.py +3 -27
- adam/commands/repair/repair_log.py +1 -1
- adam/commands/repair/repair_run.py +2 -2
- adam/commands/repair/repair_scan.py +1 -1
- adam/commands/repair/repair_stop.py +1 -1
- adam/commands/report.py +6 -0
- adam/commands/restart.py +2 -2
- adam/commands/rollout.py +1 -1
- adam/commands/show/show.py +11 -26
- adam/commands/show/show_app_actions.py +3 -0
- adam/commands/show/show_app_id.py +1 -1
- adam/commands/show/show_app_queues.py +3 -2
- adam/commands/show/show_cassandra_status.py +3 -3
- adam/commands/show/show_cassandra_version.py +3 -3
- adam/commands/show/show_host.py +33 -0
- adam/commands/show/show_login.py +3 -0
- adam/commands/show/show_processes.py +1 -1
- adam/commands/show/show_repairs.py +2 -2
- adam/commands/show/show_storage.py +1 -1
- adam/commands/watch.py +1 -1
- adam/config.py +16 -3
- adam/embedded_params.py +1 -1
- adam/pod_exec_result.py +10 -2
- adam/repl.py +127 -117
- adam/repl_commands.py +51 -16
- adam/repl_state.py +276 -55
- adam/sql/__init__.py +0 -0
- adam/sql/sql_completer.py +120 -0
- adam/sql/sql_state_machine.py +617 -0
- adam/sql/term_completer.py +76 -0
- adam/sso/authn_ad.py +1 -1
- adam/sso/cred_cache.py +1 -1
- adam/sso/idp.py +1 -1
- adam/utils.py +83 -2
- adam/utils_athena.py +145 -0
- adam/utils_audits.py +102 -0
- adam/utils_k8s/__init__.py +0 -0
- adam/utils_k8s/app_clusters.py +33 -0
- adam/utils_k8s/app_pods.py +31 -0
- adam/{k8s_utils → utils_k8s}/cassandra_clusters.py +6 -21
- adam/{k8s_utils → utils_k8s}/cassandra_nodes.py +12 -5
- adam/{k8s_utils → utils_k8s}/deployment.py +2 -2
- adam/{k8s_utils → utils_k8s}/kube_context.py +1 -1
- adam/{k8s_utils → utils_k8s}/pods.py +119 -26
- adam/{k8s_utils → utils_k8s}/secrets.py +4 -0
- adam/{k8s_utils → utils_k8s}/statefulsets.py +5 -4
- adam/utils_net.py +24 -0
- adam/utils_repl/__init__.py +0 -0
- adam/utils_repl/automata_completer.py +48 -0
- adam/utils_repl/repl_completer.py +46 -0
- adam/utils_repl/state_machine.py +173 -0
- adam/utils_sqlite.py +101 -0
- adam/version.py +1 -1
- {kaqing-2.0.14.dist-info → kaqing-2.0.145.dist-info}/METADATA +1 -1
- kaqing-2.0.145.dist-info/RECORD +227 -0
- adam/commands/bash.py +0 -87
- adam/commands/cql_utils.py +0 -53
- adam/commands/devices.py +0 -89
- kaqing-2.0.14.dist-info/RECORD +0 -167
- /adam/{k8s_utils → commands/audit}/__init__.py +0 -0
- /adam/{k8s_utils → utils_k8s}/config_maps.py +0 -0
- /adam/{k8s_utils → utils_k8s}/custom_resources.py +0 -0
- /adam/{k8s_utils → utils_k8s}/ingresses.py +0 -0
- /adam/{k8s_utils → utils_k8s}/jobs.py +0 -0
- /adam/{k8s_utils → utils_k8s}/service_accounts.py +0 -0
- /adam/{k8s_utils → utils_k8s}/services.py +0 -0
- /adam/{k8s_utils → utils_k8s}/volumes.py +0 -0
- {kaqing-2.0.14.dist-info → kaqing-2.0.145.dist-info}/WHEEL +0 -0
- {kaqing-2.0.14.dist-info → kaqing-2.0.145.dist-info}/entry_points.txt +0 -0
- {kaqing-2.0.14.dist-info → kaqing-2.0.145.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from typing import Callable, Iterable, List, Mapping, Optional, Pattern, Union
|
|
2
|
+
|
|
3
|
+
from prompt_toolkit.completion import CompleteEvent, Completion, WordCompleter
|
|
4
|
+
from prompt_toolkit.document import Document
|
|
5
|
+
from prompt_toolkit.formatted_text import AnyFormattedText
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"TermCompleter",
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
class TermCompleter(WordCompleter):
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
words: Union[List[str], Callable[[], List[str]]],
|
|
15
|
+
ignore_case: bool = False,
|
|
16
|
+
display_dict: Optional[Mapping[str, AnyFormattedText]] = None,
|
|
17
|
+
meta_dict: Optional[Mapping[str, AnyFormattedText]] = None,
|
|
18
|
+
WORD: bool = False,
|
|
19
|
+
sentence: bool = False,
|
|
20
|
+
match_middle: bool = False,
|
|
21
|
+
pattern: Optional[Pattern[str]] = None,
|
|
22
|
+
) -> None:
|
|
23
|
+
super().__init__(words, ignore_case, display_dict, meta_dict, WORD, sentence, match_middle, pattern)
|
|
24
|
+
|
|
25
|
+
def __str__(self):
|
|
26
|
+
return ','.join(self.words)
|
|
27
|
+
|
|
28
|
+
def get_completions(
|
|
29
|
+
self, document: Document, complete_event: CompleteEvent
|
|
30
|
+
) -> Iterable[Completion]:
|
|
31
|
+
# Get list of words.
|
|
32
|
+
words = self.words
|
|
33
|
+
if callable(words):
|
|
34
|
+
words = words()
|
|
35
|
+
|
|
36
|
+
# Get word/text before cursor.
|
|
37
|
+
if self.sentence:
|
|
38
|
+
word_before_cursor = document.text_before_cursor
|
|
39
|
+
else:
|
|
40
|
+
word_before_cursor = document.get_word_before_cursor(
|
|
41
|
+
WORD=self.WORD, pattern=self.pattern
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
if self.ignore_case:
|
|
45
|
+
word_before_cursor = word_before_cursor.lower()
|
|
46
|
+
|
|
47
|
+
def word_matches(word: str) -> bool:
|
|
48
|
+
"""True when the word before the cursor matches."""
|
|
49
|
+
if self.ignore_case:
|
|
50
|
+
word = word.lower()
|
|
51
|
+
|
|
52
|
+
if self.match_middle:
|
|
53
|
+
return word_before_cursor in word
|
|
54
|
+
else:
|
|
55
|
+
return word.startswith(word_before_cursor)
|
|
56
|
+
|
|
57
|
+
for a in words:
|
|
58
|
+
if word_before_cursor in ['(', ',', '=', 'in', "',"]:
|
|
59
|
+
display = self.display_dict.get(a, a)
|
|
60
|
+
display_meta = self.meta_dict.get(a, "")
|
|
61
|
+
yield Completion(
|
|
62
|
+
a,
|
|
63
|
+
0,
|
|
64
|
+
display=display,
|
|
65
|
+
display_meta=display_meta,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
if word_matches(a):
|
|
69
|
+
display = self.display_dict.get(a, a)
|
|
70
|
+
display_meta = self.meta_dict.get(a, "")
|
|
71
|
+
yield Completion(
|
|
72
|
+
a,
|
|
73
|
+
-len(word_before_cursor),
|
|
74
|
+
display=display,
|
|
75
|
+
display_meta=display_meta,
|
|
76
|
+
)
|
adam/sso/authn_ad.py
CHANGED
|
@@ -137,7 +137,7 @@ class AdAuthenticator(Authenticator):
|
|
|
137
137
|
return []
|
|
138
138
|
|
|
139
139
|
def parse_id_token(self, id_token: str) -> IdToken:
|
|
140
|
-
jwks_url = Config().get('idps.ad.jwks-uri', '
|
|
140
|
+
jwks_url = Config().get('idps.ad.jwks-uri', '')
|
|
141
141
|
try:
|
|
142
142
|
jwks_client = jwt.PyJWKClient(jwks_url, cache_jwk_set=True, lifespan=360)
|
|
143
143
|
signing_key = jwks_client.get_signing_key_from_jwt(id_token)
|
adam/sso/cred_cache.py
CHANGED
adam/sso/idp.py
CHANGED
adam/utils.py
CHANGED
|
@@ -9,7 +9,8 @@ import os
|
|
|
9
9
|
from pathlib import Path
|
|
10
10
|
import random
|
|
11
11
|
import string
|
|
12
|
-
|
|
12
|
+
import threading
|
|
13
|
+
from typing import Callable
|
|
13
14
|
from dateutil import parser
|
|
14
15
|
import subprocess
|
|
15
16
|
import sys
|
|
@@ -19,6 +20,8 @@ import yaml
|
|
|
19
20
|
|
|
20
21
|
from . import __version__
|
|
21
22
|
|
|
23
|
+
is_debug_holder = [lambda: False]
|
|
24
|
+
|
|
22
25
|
def to_tabular(lines: str, header: str = None, dashed_line = False):
|
|
23
26
|
return lines_to_tabular(lines.split('\n'), header, dashed_line)
|
|
24
27
|
|
|
@@ -70,6 +73,9 @@ def epoch(timestamp_string: str):
|
|
|
70
73
|
return parser.parse(timestamp_string).timestamp()
|
|
71
74
|
|
|
72
75
|
def log(s = None):
|
|
76
|
+
if not loggable():
|
|
77
|
+
return
|
|
78
|
+
|
|
73
79
|
# want to print empty line for False or empty collection
|
|
74
80
|
if s == None:
|
|
75
81
|
print()
|
|
@@ -77,6 +83,9 @@ def log(s = None):
|
|
|
77
83
|
click.echo(s)
|
|
78
84
|
|
|
79
85
|
def log2(s = None, nl = True):
|
|
86
|
+
if not loggable():
|
|
87
|
+
return
|
|
88
|
+
|
|
80
89
|
if s:
|
|
81
90
|
click.echo(s, err=True, nl=nl)
|
|
82
91
|
else:
|
|
@@ -125,6 +134,19 @@ def deep_merge_dicts(dict1, dict2):
|
|
|
125
134
|
merged_dict[key] = value
|
|
126
135
|
return merged_dict
|
|
127
136
|
|
|
137
|
+
def deep_sort_dict(d):
|
|
138
|
+
"""
|
|
139
|
+
Recursively sorts a dictionary by its keys, and any nested lists by their elements.
|
|
140
|
+
"""
|
|
141
|
+
if not isinstance(d, (dict, list)):
|
|
142
|
+
return d
|
|
143
|
+
|
|
144
|
+
if isinstance(d, dict):
|
|
145
|
+
return {k: deep_sort_dict(d[k]) for k in sorted(d)}
|
|
146
|
+
|
|
147
|
+
if isinstance(d, list):
|
|
148
|
+
return sorted([deep_sort_dict(item) for item in d])
|
|
149
|
+
|
|
128
150
|
def get_deep_keys(d, current_path=""):
|
|
129
151
|
"""
|
|
130
152
|
Recursively collects all combined keys (paths) from a deep dictionary.
|
|
@@ -228,4 +250,63 @@ def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out
|
|
|
228
250
|
return path
|
|
229
251
|
|
|
230
252
|
def idp_token_from_env():
|
|
231
|
-
return os.getenv('IDP_TOKEN')
|
|
253
|
+
return os.getenv('IDP_TOKEN')
|
|
254
|
+
|
|
255
|
+
def is_lambda(func):
|
|
256
|
+
return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
|
|
257
|
+
|
|
258
|
+
class Ing:
|
|
259
|
+
state = threading.local()
|
|
260
|
+
|
|
261
|
+
def __init__(self, msg: str, suppress_log=False):
|
|
262
|
+
self.msg = msg
|
|
263
|
+
self.suppress_log = suppress_log
|
|
264
|
+
self.nested = False
|
|
265
|
+
|
|
266
|
+
def __enter__(self):
|
|
267
|
+
if not hasattr(Ing.state, 'ing_cnt'):
|
|
268
|
+
Ing.state.ing_cnt = 0
|
|
269
|
+
|
|
270
|
+
try:
|
|
271
|
+
if not Ing.state.ing_cnt:
|
|
272
|
+
if not self.suppress_log and not is_debug_holder[0]():
|
|
273
|
+
log2(f'{self.msg}...', nl=False)
|
|
274
|
+
|
|
275
|
+
return None
|
|
276
|
+
finally:
|
|
277
|
+
Ing.state.ing_cnt += 1
|
|
278
|
+
|
|
279
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
280
|
+
Ing.state.ing_cnt -= 1
|
|
281
|
+
if not Ing.state.ing_cnt:
|
|
282
|
+
if not self.suppress_log and not is_debug_holder[0]():
|
|
283
|
+
log2(' OK')
|
|
284
|
+
|
|
285
|
+
return False
|
|
286
|
+
|
|
287
|
+
def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
|
|
288
|
+
if not body:
|
|
289
|
+
return Ing(msg, suppress_log=suppress_log)
|
|
290
|
+
|
|
291
|
+
r = None
|
|
292
|
+
|
|
293
|
+
if not hasattr(Ing.state, 'ing_cnt'):
|
|
294
|
+
Ing.state.ing_cnt = 0
|
|
295
|
+
|
|
296
|
+
if not Ing.state.ing_cnt:
|
|
297
|
+
if not suppress_log and not is_debug_holder[0]():
|
|
298
|
+
log2(f'{msg}...', nl=False)
|
|
299
|
+
|
|
300
|
+
Ing.state.ing_cnt += 1
|
|
301
|
+
try:
|
|
302
|
+
r = body()
|
|
303
|
+
finally:
|
|
304
|
+
Ing.state.ing_cnt -= 1
|
|
305
|
+
if not Ing.state.ing_cnt:
|
|
306
|
+
if not suppress_log and not is_debug_holder[0]():
|
|
307
|
+
log2(' OK')
|
|
308
|
+
|
|
309
|
+
return r
|
|
310
|
+
|
|
311
|
+
def loggable():
|
|
312
|
+
return is_debug_holder[0]() or not hasattr(Ing.state, 'ing_cnt') or not Ing.state.ing_cnt
|
adam/utils_athena.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import time
|
|
3
|
+
import boto3
|
|
4
|
+
import botocore
|
|
5
|
+
|
|
6
|
+
from adam.config import Config
|
|
7
|
+
from adam.utils import lines_to_tabular, log, log2
|
|
8
|
+
|
|
9
|
+
# no state utility class
|
|
10
|
+
class Athena:
|
|
11
|
+
@functools.lru_cache()
|
|
12
|
+
def database_names(like: str = None):
|
|
13
|
+
# this function is called only from export currently
|
|
14
|
+
Config().wait_log(f'Inspecting export database schema...')
|
|
15
|
+
|
|
16
|
+
query = f"SELECT schema_name FROM information_schema.schemata WHERE schema_name <> 'information_schema'"
|
|
17
|
+
if like:
|
|
18
|
+
query = f"{query} AND schema_name like '{like}'"
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
state, reason, rs = Athena.query(query)
|
|
22
|
+
if rs:
|
|
23
|
+
names = []
|
|
24
|
+
for row in rs[1:]:
|
|
25
|
+
row_data = [col.get('VarCharValue') if col else '' for col in row['Data']]
|
|
26
|
+
names.append(row_data[0])
|
|
27
|
+
|
|
28
|
+
return names
|
|
29
|
+
except:
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
return []
|
|
33
|
+
|
|
34
|
+
def clear_cache(cache: str = None):
|
|
35
|
+
if not cache or cache == 'databases':
|
|
36
|
+
Athena.database_names.cache_clear()
|
|
37
|
+
if not cache or cache == 'tables':
|
|
38
|
+
Athena.table_names.cache_clear()
|
|
39
|
+
if not cache or cache == 'columns':
|
|
40
|
+
Athena.column_names.cache_clear()
|
|
41
|
+
|
|
42
|
+
@functools.lru_cache()
|
|
43
|
+
def table_names(database: str = 'audit', function: str = 'audit'):
|
|
44
|
+
table_names = []
|
|
45
|
+
try:
|
|
46
|
+
region_name = Config().get(f'{function}.athena.region', 'us-west-2')
|
|
47
|
+
database_name = Config().get(f'{function}.athena.database', database)
|
|
48
|
+
catalog_name = Config().get(f'{function}.athena.catalog', 'AwsDataCatalog')
|
|
49
|
+
|
|
50
|
+
athena_client = boto3.client('athena', region_name=region_name)
|
|
51
|
+
paginator = athena_client.get_paginator('list_table_metadata')
|
|
52
|
+
|
|
53
|
+
for page in paginator.paginate(CatalogName=catalog_name, DatabaseName=database_name):
|
|
54
|
+
for table_metadata in page.get('TableMetadataList', []):
|
|
55
|
+
table_names.append(table_metadata['Name'])
|
|
56
|
+
except botocore.exceptions.NoCredentialsError as e:
|
|
57
|
+
# aws credentials not found
|
|
58
|
+
if function == 'audit':
|
|
59
|
+
log2(f'Please configure AWS credentials to Audit Log Database.')
|
|
60
|
+
except:
|
|
61
|
+
pass
|
|
62
|
+
|
|
63
|
+
return table_names
|
|
64
|
+
|
|
65
|
+
@functools.lru_cache()
|
|
66
|
+
def column_names(tables: list[str] = [], database: str = None, function: str = 'audit', partition_cols_only = False):
|
|
67
|
+
try:
|
|
68
|
+
if not database:
|
|
69
|
+
database = Config().get(f'{function}.athena.database', 'audit')
|
|
70
|
+
|
|
71
|
+
if not tables:
|
|
72
|
+
tables = Config().get(f'{function}.athena.tables', 'audit').split(',')
|
|
73
|
+
|
|
74
|
+
table_names = "'" + "','".join([table.strip() for table in tables]) + "'"
|
|
75
|
+
|
|
76
|
+
query = f"select column_name from information_schema.columns where table_name in ({table_names}) and table_schema = '{database}'"
|
|
77
|
+
if partition_cols_only:
|
|
78
|
+
query = f"{query} and extra_info = 'partition key'"
|
|
79
|
+
|
|
80
|
+
_, _, rs = Athena.query(query)
|
|
81
|
+
if rs:
|
|
82
|
+
return [row['Data'][0].get('VarCharValue') for row in rs[1:]]
|
|
83
|
+
except:
|
|
84
|
+
# aws credentials not found
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
return []
|
|
88
|
+
|
|
89
|
+
def run_query(sql: str, database: str = None):
|
|
90
|
+
state, reason, rs = Athena.query(sql, database)
|
|
91
|
+
|
|
92
|
+
if state == 'SUCCEEDED':
|
|
93
|
+
if rs:
|
|
94
|
+
column_info = rs[0]['Data']
|
|
95
|
+
columns = [col.get('VarCharValue') for col in column_info]
|
|
96
|
+
lines = []
|
|
97
|
+
for row in rs[1:]:
|
|
98
|
+
row_data = [col.get('VarCharValue') if col else '' for col in row['Data']]
|
|
99
|
+
lines.append('\t'.join(row_data))
|
|
100
|
+
|
|
101
|
+
log(lines_to_tabular(lines, header='\t'.join(columns), separator='\t'))
|
|
102
|
+
|
|
103
|
+
return len(lines)
|
|
104
|
+
else:
|
|
105
|
+
log2(f"Query failed or was cancelled. State: {state}")
|
|
106
|
+
log2(f"Reason: {reason}")
|
|
107
|
+
|
|
108
|
+
return 0
|
|
109
|
+
|
|
110
|
+
def query(sql: str, database: str = None, function: str = 'audit') -> tuple[str, str, list]:
|
|
111
|
+
region_name = Config().get(f'{function}.athena.region', 'us-west-2')
|
|
112
|
+
athena_client = boto3.client('athena', region_name=region_name)
|
|
113
|
+
|
|
114
|
+
if not database:
|
|
115
|
+
database = Config().get(f'{function}.athena.database', 'audit')
|
|
116
|
+
|
|
117
|
+
s3_output_location = Config().get(f'{function}.athena.output', f's3://s3.ops--{function}/ddl/results')
|
|
118
|
+
|
|
119
|
+
response = athena_client.start_query_execution(
|
|
120
|
+
QueryString=sql,
|
|
121
|
+
QueryExecutionContext={
|
|
122
|
+
'Database': database
|
|
123
|
+
},
|
|
124
|
+
ResultConfiguration={
|
|
125
|
+
'OutputLocation': s3_output_location
|
|
126
|
+
}
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
query_execution_id = response['QueryExecutionId']
|
|
130
|
+
|
|
131
|
+
while True:
|
|
132
|
+
query_status = athena_client.get_query_execution(QueryExecutionId=query_execution_id)
|
|
133
|
+
state = query_status['QueryExecution']['Status']['State']
|
|
134
|
+
if state in ['SUCCEEDED', 'FAILED', 'CANCELLED']:
|
|
135
|
+
break
|
|
136
|
+
time.sleep(1)
|
|
137
|
+
|
|
138
|
+
if state == 'SUCCEEDED':
|
|
139
|
+
results_response = athena_client.get_query_results(QueryExecutionId=query_execution_id)
|
|
140
|
+
if results_response['ResultSet']['Rows']:
|
|
141
|
+
return (state, None, results_response['ResultSet']['Rows'])
|
|
142
|
+
|
|
143
|
+
return (state, None, [])
|
|
144
|
+
else:
|
|
145
|
+
return (state, query_status['QueryExecution']['Status'].get('StateChangeReason'), [])
|
adam/utils_audits.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
import getpass
|
|
3
|
+
import time
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
from adam.config import Config
|
|
7
|
+
from adam.utils import log2
|
|
8
|
+
from adam.utils_net import get_my_host
|
|
9
|
+
|
|
10
|
+
class AuditMeta:
|
|
11
|
+
def __init__(self, partitions_last_checked: float, cluster_last_checked: float):
|
|
12
|
+
self.partitions_last_checked = partitions_last_checked
|
|
13
|
+
self.cluster_last_checked = cluster_last_checked
|
|
14
|
+
|
|
15
|
+
# no state utility class
|
|
16
|
+
class Audits:
|
|
17
|
+
PARTITIONS_ADDED = 'partitions-added'
|
|
18
|
+
ADD_CLUSTERS = 'add-clusters'
|
|
19
|
+
|
|
20
|
+
def log(cmd: str, cluster = 'NA', drive: str = 'NA', duration: float = 0.0, audit_extra = None):
|
|
21
|
+
payload = {
|
|
22
|
+
'cluster': cluster if cluster else 'NA',
|
|
23
|
+
'ts': time.time(),
|
|
24
|
+
'host': get_my_host(),
|
|
25
|
+
'user': getpass.getuser(),
|
|
26
|
+
'line': cmd.replace('"', '""').replace('\n', ' '),
|
|
27
|
+
'drive': drive,
|
|
28
|
+
'duration': duration,
|
|
29
|
+
'audit_extra': audit_extra if audit_extra else '',
|
|
30
|
+
}
|
|
31
|
+
audit_endpoint = Config().get("audit.endpoint", "https://4psvtaxlcb.execute-api.us-west-2.amazonaws.com/prod/")
|
|
32
|
+
try:
|
|
33
|
+
response = requests.post(audit_endpoint, json=payload, timeout=Config().get("audit.timeout", 10))
|
|
34
|
+
if response.status_code in [200, 201]:
|
|
35
|
+
Config().debug(response.text)
|
|
36
|
+
else:
|
|
37
|
+
log2(f"Error: {response.status_code} {response.text}")
|
|
38
|
+
except requests.exceptions.Timeout as e:
|
|
39
|
+
log2(f"Timeout occurred: {e}")
|
|
40
|
+
|
|
41
|
+
def get_meta() -> AuditMeta:
|
|
42
|
+
checked_in = 0.0
|
|
43
|
+
cluster_last_checked = 0.0
|
|
44
|
+
|
|
45
|
+
state, _, rs = Audits.audit_query(f'select partitions_last_checked, clusters_last_checked from meta')
|
|
46
|
+
if state == 'SUCCEEDED':
|
|
47
|
+
if len(rs) > 1:
|
|
48
|
+
try:
|
|
49
|
+
row = rs[1]['Data']
|
|
50
|
+
checked_in = float(row[0]['VarCharValue'])
|
|
51
|
+
cluster_last_checked = float(row[1]['VarCharValue'])
|
|
52
|
+
except:
|
|
53
|
+
pass
|
|
54
|
+
|
|
55
|
+
return AuditMeta(checked_in, cluster_last_checked)
|
|
56
|
+
|
|
57
|
+
def put_meta(action: str, meta: AuditMeta, clusters: list[str] = None):
|
|
58
|
+
payload = {
|
|
59
|
+
'action': action,
|
|
60
|
+
'partitions-last-checked': meta.partitions_last_checked,
|
|
61
|
+
'clusters-last-checked': meta.cluster_last_checked
|
|
62
|
+
}
|
|
63
|
+
if clusters:
|
|
64
|
+
payload['clusters'] = clusters
|
|
65
|
+
|
|
66
|
+
audit_endpoint = Config().get("audit.endpoint", "https://4psvtaxlcb.execute-api.us-west-2.amazonaws.com/prod/")
|
|
67
|
+
try:
|
|
68
|
+
response = requests.post(audit_endpoint, json=payload, timeout=Config().get("audit.timeout", 10))
|
|
69
|
+
if response.status_code in [200, 201]:
|
|
70
|
+
Config().debug(response.text)
|
|
71
|
+
else:
|
|
72
|
+
log2(f"Error: {response.status_code} {response.text}")
|
|
73
|
+
except requests.exceptions.Timeout as e:
|
|
74
|
+
log2(f"Timeout occurred: {e}")
|
|
75
|
+
|
|
76
|
+
def find_new_clusters(cluster_last_checked: float) -> list[str]:
|
|
77
|
+
dt_object = datetime.fromtimestamp(cluster_last_checked)
|
|
78
|
+
|
|
79
|
+
# select distinct c2.name from cluster as c1 right outer join
|
|
80
|
+
# (select distinct c as name from audit where y = '1969' and m = '12' and d >= '31' or y = '1969' and m > '12' or y > '1969') as c2
|
|
81
|
+
# on c1.name = c2.name where c1.name is null
|
|
82
|
+
query = '\n '.join([
|
|
83
|
+
'select distinct c2.name from cluster as c1 right outer join',
|
|
84
|
+
f'(select distinct c as name from audit where {Audits.date_from(dt_object)}) as c2',
|
|
85
|
+
'on c1.name = c2.name where c1.name is null'])
|
|
86
|
+
log2(query)
|
|
87
|
+
state, _, rs = Audits.audit_query(query)
|
|
88
|
+
if state == 'SUCCEEDED':
|
|
89
|
+
if len(rs) > 1:
|
|
90
|
+
try:
|
|
91
|
+
return [r['Data'][0]['VarCharValue'] for r in rs[1:]]
|
|
92
|
+
except:
|
|
93
|
+
pass
|
|
94
|
+
|
|
95
|
+
return []
|
|
96
|
+
|
|
97
|
+
def date_from(dt_object: datetime):
|
|
98
|
+
y = dt_object.strftime("%Y")
|
|
99
|
+
m = dt_object.strftime("%m")
|
|
100
|
+
d = dt_object.strftime("%d")
|
|
101
|
+
|
|
102
|
+
return f"y = '{y}' and m = '{m}' and d >= '{d}' or y = '{y}' and m > '{m}' or y > '{y}'"
|
|
File without changes
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
2
|
+
import sys
|
|
3
|
+
from typing import TypeVar
|
|
4
|
+
|
|
5
|
+
from adam.utils_k8s.app_pods import AppPods
|
|
6
|
+
from adam.pod_exec_result import PodExecResult
|
|
7
|
+
from adam.utils import log2
|
|
8
|
+
from adam.utils_k8s.pods import Pods
|
|
9
|
+
from .kube_context import KubeContext
|
|
10
|
+
|
|
11
|
+
T = TypeVar('T')
|
|
12
|
+
|
|
13
|
+
# utility collection on app clusters; methods are all static
|
|
14
|
+
class AppClusters:
|
|
15
|
+
def exec(pods: list[str], namespace: str, command: str, action: str = 'action',
|
|
16
|
+
max_workers=0, show_out=True, on_any = False, shell = '/bin/sh', background = False) -> list[PodExecResult]:
|
|
17
|
+
def body(executor: ThreadPoolExecutor, pod: str, namespace: str, show_out: bool):
|
|
18
|
+
if executor:
|
|
19
|
+
return executor.submit(AppPods.exec, pod, namespace, command, False, False, shell, background)
|
|
20
|
+
|
|
21
|
+
return AppPods.exec(pod, namespace, command, show_out=show_out, background=background)
|
|
22
|
+
|
|
23
|
+
def post(result, show_out: bool):
|
|
24
|
+
if KubeContext.show_out(show_out):
|
|
25
|
+
print(result.command)
|
|
26
|
+
if result.stdout:
|
|
27
|
+
print(result.stdout)
|
|
28
|
+
if result.stderr:
|
|
29
|
+
log2(result.stderr, file=sys.stderr)
|
|
30
|
+
|
|
31
|
+
return result
|
|
32
|
+
|
|
33
|
+
return Pods.on_pods(pods, namespace, body, post=post, action=action, max_workers=max_workers, show_out=show_out, on_any=on_any, background=background)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
from kubernetes import client
|
|
3
|
+
|
|
4
|
+
from adam.config import Config
|
|
5
|
+
from adam.utils_k8s.pods import Pods
|
|
6
|
+
from adam.pod_exec_result import PodExecResult
|
|
7
|
+
from adam.repl_session import ReplSession
|
|
8
|
+
|
|
9
|
+
# utility collection on app pods; methods are all static
|
|
10
|
+
class AppPods:
|
|
11
|
+
def pod_names(namespace: str, env: str, app: str):
|
|
12
|
+
return [pod.metadata.name for pod in AppPods.app_pods(namespace, env, app)]
|
|
13
|
+
|
|
14
|
+
def app_pods(namespace: str, env: str, app: str) -> List[client.V1Pod]:
|
|
15
|
+
v1 = client.CoreV1Api()
|
|
16
|
+
|
|
17
|
+
env_key = Config().get('app.env', 'c3__env-0')
|
|
18
|
+
app_key = Config().get('app.app', 'c3__app-0')
|
|
19
|
+
label_selector = f'applicationGroup=c3,{env_key}=0{env}0,{app_key}=0{app}0'
|
|
20
|
+
|
|
21
|
+
return v1.list_namespaced_pod(namespace, label_selector=label_selector).items
|
|
22
|
+
|
|
23
|
+
def exec(pod_name: str, namespace: str, command: str, show_out = True, throw_err = False, shell = '/bin/sh', background = False) -> PodExecResult:
|
|
24
|
+
container = Config().get('app.container-name', 'c3-server')
|
|
25
|
+
r = Pods.exec(pod_name, container, namespace, command, show_out = show_out, throw_err = throw_err, shell = shell, background = background)
|
|
26
|
+
|
|
27
|
+
if r and Config().get('repl.history.push-cat-remote-log-file', True):
|
|
28
|
+
if r.log_file and ReplSession().prompt_session:
|
|
29
|
+
ReplSession().prompt_session.history.append_string(f'@{r.pod} cat {r.log_file}')
|
|
30
|
+
|
|
31
|
+
return r
|
|
@@ -1,25 +1,24 @@
|
|
|
1
|
-
from collections.abc import Callable
|
|
2
1
|
from concurrent.futures import ThreadPoolExecutor
|
|
3
2
|
import sys
|
|
4
3
|
from typing import TypeVar
|
|
5
4
|
|
|
6
|
-
from adam.
|
|
5
|
+
from adam.utils_k8s.cassandra_nodes import CassandraNodes
|
|
7
6
|
from adam.pod_exec_result import PodExecResult
|
|
8
7
|
from adam.utils import log2
|
|
9
8
|
from .statefulsets import StatefulSets
|
|
10
|
-
from .pods import Pods
|
|
11
9
|
from .kube_context import KubeContext
|
|
12
10
|
|
|
13
11
|
T = TypeVar('T')
|
|
14
12
|
|
|
15
13
|
# utility collection on cassandra clusters; methods are all static
|
|
16
14
|
class CassandraClusters:
|
|
17
|
-
def exec(statefulset: str, namespace: str, command: str, action: str = 'action',
|
|
15
|
+
def exec(statefulset: str, namespace: str, command: str, action: str = 'action',
|
|
16
|
+
max_workers=0, show_out=True, on_any = False, shell = '/bin/sh', background = False, log_file = None) -> list[PodExecResult]:
|
|
18
17
|
def body(executor: ThreadPoolExecutor, pod: str, namespace: str, show_out: bool):
|
|
19
18
|
if executor:
|
|
20
|
-
return executor.submit(CassandraNodes.exec, pod, namespace, command, False, False,)
|
|
19
|
+
return executor.submit(CassandraNodes.exec, pod, namespace, command, False, False, shell, background, log_file)
|
|
21
20
|
|
|
22
|
-
return CassandraNodes.exec(pod, namespace, command, show_out=show_out)
|
|
21
|
+
return CassandraNodes.exec(pod, namespace, command, show_out=show_out, background=background, log_file=log_file)
|
|
23
22
|
|
|
24
23
|
def post(result, show_out: bool):
|
|
25
24
|
if KubeContext.show_out(show_out):
|
|
@@ -31,18 +30,4 @@ class CassandraClusters:
|
|
|
31
30
|
|
|
32
31
|
return result
|
|
33
32
|
|
|
34
|
-
return StatefulSets.on_cluster(statefulset, namespace, body, post=post, action=action, max_workers=max_workers, show_out=show_out)
|
|
35
|
-
|
|
36
|
-
def on_cluster(statefulset: str,
|
|
37
|
-
namespace: str,
|
|
38
|
-
body: Callable[[ThreadPoolExecutor, str, str, bool], T],
|
|
39
|
-
post: Callable[[T], T] = None,
|
|
40
|
-
action: str = 'action', max_workers=0, show_out=True) -> list[T]:
|
|
41
|
-
pods = StatefulSets.pod_names(statefulset, namespace)
|
|
42
|
-
|
|
43
|
-
return Pods.on_pods(pods, namespace, body, post=post, action=action, max_workers=max_workers, show_out=show_out)
|
|
44
|
-
|
|
45
|
-
def pod_names_by_host_id(ss: str, ns: str):
|
|
46
|
-
pods = StatefulSets.pods(ss, ns)
|
|
47
|
-
|
|
48
|
-
return {CassandraNodes.get_host_id(pod.metadata.name, ns): pod.metadata.name for pod in pods}
|
|
33
|
+
return StatefulSets.on_cluster(statefulset, namespace, body, post=post, action=action, max_workers=max_workers, show_out=show_out, on_any=on_any, background=background)
|
|
@@ -1,18 +1,25 @@
|
|
|
1
1
|
from adam.config import Config
|
|
2
|
-
from adam.
|
|
3
|
-
from adam.
|
|
2
|
+
from adam.utils_k8s.pods import Pods
|
|
3
|
+
from adam.utils_k8s.secrets import Secrets
|
|
4
4
|
from adam.pod_exec_result import PodExecResult
|
|
5
|
+
from adam.repl_session import ReplSession
|
|
5
6
|
|
|
6
7
|
# utility collection on cassandra nodes; methods are all static
|
|
7
8
|
class CassandraNodes:
|
|
8
|
-
def exec(pod_name: str, namespace: str, command: str, show_out = True, throw_err = False) -> PodExecResult:
|
|
9
|
-
|
|
9
|
+
def exec(pod_name: str, namespace: str, command: str, show_out = True, throw_err = False, shell = '/bin/sh', background = False, log_file = None) -> PodExecResult:
|
|
10
|
+
r = Pods.exec(pod_name, "cassandra", namespace, command, show_out = show_out, throw_err = throw_err, shell = shell, background = background, log_file=log_file)
|
|
11
|
+
|
|
12
|
+
if r and Config().get('repl.history.push-cat-remote-log-file', True):
|
|
13
|
+
if r.log_file and ReplSession().prompt_session:
|
|
14
|
+
ReplSession().prompt_session.history.append_string(f'@{r.pod} cat {r.log_file}')
|
|
15
|
+
|
|
16
|
+
return r
|
|
10
17
|
|
|
11
18
|
def get_host_id(pod_name: str, ns: str):
|
|
12
19
|
try:
|
|
13
20
|
user, pw = Secrets.get_user_pass(pod_name, ns)
|
|
14
21
|
command = f'echo "SELECT host_id FROM system.local; exit" | cqlsh --no-color -u {user} -p {pw}'
|
|
15
|
-
result: PodExecResult = CassandraNodes.exec(pod_name, ns, command, show_out=Config().
|
|
22
|
+
result: PodExecResult = CassandraNodes.exec(pod_name, ns, command, show_out=Config().is_debug())
|
|
16
23
|
next = False
|
|
17
24
|
for line in result.stdout.splitlines():
|
|
18
25
|
if next:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from kubernetes import client
|
|
2
2
|
|
|
3
|
-
from adam.
|
|
4
|
-
from adam.
|
|
3
|
+
from adam.utils_k8s.pods import Pods
|
|
4
|
+
from adam.utils_k8s.volumes import ConfigMapMount
|
|
5
5
|
|
|
6
6
|
# utility collection on deployments; methods are all static
|
|
7
7
|
class Deployments:
|