kaqing 2.0.98__py3-none-any.whl → 2.0.171__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- adam/__init__.py +0 -2
- adam/app_session.py +9 -7
- adam/batch.py +4 -18
- adam/checks/check_utils.py +14 -46
- adam/checks/cpu.py +7 -1
- adam/checks/cpu_metrics.py +52 -0
- adam/columns/columns.py +3 -1
- adam/columns/cpu.py +3 -1
- adam/columns/cpu_metrics.py +22 -0
- adam/commands/__init__.py +15 -0
- adam/commands/alter_tables.py +50 -61
- adam/commands/app_cmd.py +38 -0
- adam/commands/app_ping.py +8 -14
- adam/commands/audit/audit.py +43 -30
- adam/commands/audit/audit_repair_tables.py +26 -46
- adam/commands/audit/audit_run.py +50 -0
- adam/commands/audit/show_last10.py +48 -0
- adam/commands/audit/show_slow10.py +47 -0
- adam/commands/audit/show_top10.py +45 -0
- adam/commands/audit/utils_show_top10.py +59 -0
- adam/commands/bash/__init__.py +5 -0
- adam/commands/bash/bash.py +36 -0
- adam/commands/bash/bash_completer.py +93 -0
- adam/commands/bash/utils_bash.py +16 -0
- adam/commands/cat.py +50 -0
- adam/commands/cd.py +15 -91
- adam/commands/check.py +23 -18
- adam/commands/cli_commands.py +2 -3
- adam/commands/code.py +57 -0
- adam/commands/command.py +96 -40
- adam/commands/commands_utils.py +9 -19
- adam/commands/cp.py +33 -39
- adam/commands/cql/cql_completions.py +30 -8
- adam/commands/cql/cqlsh.py +12 -27
- adam/commands/cql/utils_cql.py +343 -0
- adam/commands/deploy/code_start.py +7 -10
- adam/commands/deploy/code_stop.py +4 -21
- adam/commands/deploy/code_utils.py +3 -3
- adam/commands/deploy/deploy.py +4 -21
- adam/commands/deploy/deploy_frontend.py +14 -17
- adam/commands/deploy/deploy_pg_agent.py +3 -6
- adam/commands/deploy/deploy_pod.py +67 -73
- adam/commands/deploy/deploy_utils.py +14 -24
- adam/commands/deploy/undeploy.py +4 -21
- adam/commands/deploy/undeploy_frontend.py +4 -7
- adam/commands/deploy/undeploy_pg_agent.py +6 -8
- adam/commands/deploy/undeploy_pod.py +11 -12
- adam/commands/devices/device.py +118 -0
- adam/commands/devices/device_app.py +173 -0
- adam/commands/devices/device_auit_log.py +49 -0
- adam/commands/devices/device_cass.py +185 -0
- adam/commands/devices/device_export.py +86 -0
- adam/commands/devices/device_postgres.py +144 -0
- adam/commands/devices/devices.py +25 -0
- adam/commands/exit.py +1 -4
- adam/commands/export/__init__.py +0 -0
- adam/commands/export/clean_up_all_export_sessions.py +37 -0
- adam/commands/export/clean_up_export_sessions.py +51 -0
- adam/commands/export/drop_export_database.py +55 -0
- adam/commands/export/drop_export_databases.py +43 -0
- adam/commands/export/export.py +53 -0
- adam/commands/export/export_databases.py +170 -0
- adam/commands/export/export_handlers.py +71 -0
- adam/commands/export/export_select.py +81 -0
- adam/commands/export/export_select_x.py +54 -0
- adam/commands/export/export_use.py +52 -0
- adam/commands/export/exporter.py +352 -0
- adam/commands/export/import_session.py +40 -0
- adam/commands/export/importer.py +67 -0
- adam/commands/export/importer_athena.py +80 -0
- adam/commands/export/importer_sqlite.py +47 -0
- adam/commands/export/show_column_counts.py +54 -0
- adam/commands/export/show_export_databases.py +36 -0
- adam/commands/export/show_export_session.py +48 -0
- adam/commands/export/show_export_sessions.py +44 -0
- adam/commands/export/utils_export.py +314 -0
- adam/commands/help.py +10 -6
- adam/commands/intermediate_command.py +49 -0
- adam/commands/issues.py +14 -40
- adam/commands/kubectl.py +38 -0
- adam/commands/login.py +28 -24
- adam/commands/logs.py +4 -6
- adam/commands/ls.py +11 -116
- adam/commands/medusa/medusa.py +4 -22
- adam/commands/medusa/medusa_backup.py +20 -24
- adam/commands/medusa/medusa_restore.py +30 -32
- adam/commands/medusa/medusa_show_backupjobs.py +16 -17
- adam/commands/medusa/medusa_show_restorejobs.py +12 -17
- adam/commands/nodetool.py +11 -17
- adam/commands/param_get.py +11 -12
- adam/commands/param_set.py +9 -10
- adam/commands/postgres/postgres.py +43 -36
- adam/commands/postgres/{postgres_session.py → postgres_context.py} +80 -46
- adam/commands/postgres/postgres_ls.py +4 -8
- adam/commands/postgres/postgres_preview.py +5 -9
- adam/commands/postgres/psql_completions.py +2 -2
- adam/commands/postgres/utils_postgres.py +66 -0
- adam/commands/preview_table.py +8 -61
- adam/commands/pwd.py +14 -44
- adam/commands/reaper/reaper.py +4 -24
- adam/commands/reaper/reaper_forward.py +48 -55
- adam/commands/reaper/reaper_forward_session.py +6 -0
- adam/commands/reaper/reaper_forward_stop.py +10 -16
- adam/commands/reaper/reaper_restart.py +7 -14
- adam/commands/reaper/reaper_run_abort.py +11 -30
- adam/commands/reaper/reaper_runs.py +42 -57
- adam/commands/reaper/reaper_runs_abort.py +29 -49
- adam/commands/reaper/reaper_schedule_activate.py +11 -30
- adam/commands/reaper/reaper_schedule_start.py +10 -29
- adam/commands/reaper/reaper_schedule_stop.py +10 -29
- adam/commands/reaper/reaper_schedules.py +4 -14
- adam/commands/reaper/reaper_status.py +8 -16
- adam/commands/reaper/utils_reaper.py +196 -0
- adam/commands/repair/repair.py +4 -22
- adam/commands/repair/repair_log.py +4 -7
- adam/commands/repair/repair_run.py +27 -29
- adam/commands/repair/repair_scan.py +31 -34
- adam/commands/repair/repair_stop.py +4 -7
- adam/commands/report.py +25 -21
- adam/commands/restart.py +25 -26
- adam/commands/rollout.py +19 -24
- adam/commands/shell.py +5 -4
- adam/commands/show/show.py +6 -19
- adam/commands/show/show_app_actions.py +26 -22
- adam/commands/show/show_app_id.py +8 -11
- adam/commands/show/show_app_queues.py +7 -10
- adam/commands/show/{show_repairs.py → show_cassandra_repairs.py} +8 -17
- adam/commands/show/show_cassandra_status.py +29 -33
- adam/commands/show/show_cassandra_version.py +4 -14
- adam/commands/show/show_commands.py +19 -21
- adam/commands/show/show_host.py +1 -1
- adam/commands/show/show_login.py +26 -24
- adam/commands/show/show_processes.py +16 -18
- adam/commands/show/show_storage.py +10 -20
- adam/commands/watch.py +26 -29
- adam/config.py +5 -14
- adam/embedded_params.py +1 -1
- adam/pod_exec_result.py +7 -1
- adam/repl.py +95 -131
- adam/repl_commands.py +48 -20
- adam/repl_state.py +270 -61
- adam/sql/sql_completer.py +105 -63
- adam/sql/sql_state_machine.py +618 -0
- adam/sql/term_completer.py +3 -0
- adam/sso/authn_ad.py +6 -5
- adam/sso/authn_okta.py +3 -3
- adam/sso/cred_cache.py +3 -2
- adam/sso/idp.py +3 -3
- adam/utils.py +439 -3
- adam/utils_app.py +98 -0
- adam/utils_athena.py +140 -87
- adam/utils_audits.py +106 -0
- adam/utils_issues.py +32 -0
- adam/utils_k8s/app_clusters.py +28 -0
- adam/utils_k8s/app_pods.py +33 -0
- adam/utils_k8s/cassandra_clusters.py +22 -20
- adam/utils_k8s/cassandra_nodes.py +4 -4
- adam/utils_k8s/custom_resources.py +5 -0
- adam/utils_k8s/ingresses.py +2 -2
- adam/utils_k8s/k8s.py +87 -0
- adam/utils_k8s/pods.py +77 -68
- adam/utils_k8s/secrets.py +4 -4
- adam/utils_k8s/service_accounts.py +5 -4
- adam/utils_k8s/services.py +2 -2
- adam/utils_k8s/statefulsets.py +1 -12
- adam/utils_net.py +4 -4
- adam/utils_repl/__init__.py +0 -0
- adam/utils_repl/automata_completer.py +48 -0
- adam/utils_repl/repl_completer.py +46 -0
- adam/utils_repl/state_machine.py +173 -0
- adam/utils_sqlite.py +109 -0
- adam/version.py +1 -1
- {kaqing-2.0.98.dist-info → kaqing-2.0.171.dist-info}/METADATA +1 -1
- kaqing-2.0.171.dist-info/RECORD +236 -0
- adam/commands/app.py +0 -67
- adam/commands/bash.py +0 -92
- adam/commands/cql/cql_table_completer.py +0 -8
- adam/commands/cql/cql_utils.py +0 -115
- adam/commands/describe/describe.py +0 -47
- adam/commands/describe/describe_keyspace.py +0 -60
- adam/commands/describe/describe_keyspaces.py +0 -49
- adam/commands/describe/describe_schema.py +0 -49
- adam/commands/describe/describe_table.py +0 -60
- adam/commands/describe/describe_tables.py +0 -49
- adam/commands/devices.py +0 -118
- adam/commands/postgres/postgres_utils.py +0 -31
- adam/commands/postgres/psql_table_completer.py +0 -11
- adam/commands/reaper/reaper_session.py +0 -159
- adam/sql/state_machine.py +0 -460
- kaqing-2.0.98.dist-info/RECORD +0 -191
- /adam/commands/{describe → devices}/__init__.py +0 -0
- {kaqing-2.0.98.dist-info → kaqing-2.0.171.dist-info}/WHEEL +0 -0
- {kaqing-2.0.98.dist-info → kaqing-2.0.171.dist-info}/entry_points.txt +0 -0
- {kaqing-2.0.98.dist-info → kaqing-2.0.171.dist-info}/top_level.txt +0 -0
adam/sso/authn_ad.py
CHANGED
|
@@ -8,6 +8,7 @@ from urllib.parse import urlparse, parse_qs
|
|
|
8
8
|
from adam.log import Log
|
|
9
9
|
from adam.sso.authenticator import Authenticator
|
|
10
10
|
from adam.sso.id_token import IdToken
|
|
11
|
+
from adam.utils import debug
|
|
11
12
|
from .idp_login import IdpLogin
|
|
12
13
|
from adam.config import Config
|
|
13
14
|
|
|
@@ -33,7 +34,7 @@ class AdAuthenticator(Authenticator):
|
|
|
33
34
|
|
|
34
35
|
session = requests.Session()
|
|
35
36
|
r = session.get(idp_uri)
|
|
36
|
-
|
|
37
|
+
debug(f'{r.status_code} {idp_uri}')
|
|
37
38
|
|
|
38
39
|
config = self.validate_and_return_config(r)
|
|
39
40
|
|
|
@@ -52,7 +53,7 @@ class AdAuthenticator(Authenticator):
|
|
|
52
53
|
r = session.post(login_uri, data=body, headers={
|
|
53
54
|
'Content-Type': 'application/x-www-form-urlencoded'
|
|
54
55
|
})
|
|
55
|
-
|
|
56
|
+
debug(f'{r.status_code} {login_uri}')
|
|
56
57
|
|
|
57
58
|
config = self.validate_and_return_config(r)
|
|
58
59
|
|
|
@@ -69,7 +70,7 @@ class AdAuthenticator(Authenticator):
|
|
|
69
70
|
r = session.post(kmsi_uri, data=body, headers={
|
|
70
71
|
'Content-Type': 'application/x-www-form-urlencoded'
|
|
71
72
|
})
|
|
72
|
-
|
|
73
|
+
debug(f'{r.status_code} {kmsi_uri}')
|
|
73
74
|
|
|
74
75
|
if (config := self.extract_config_object(r.text)):
|
|
75
76
|
if 'sErrorCode' in config and config['sErrorCode'] == '50058':
|
|
@@ -101,7 +102,7 @@ class AdAuthenticator(Authenticator):
|
|
|
101
102
|
|
|
102
103
|
def validate_and_return_config(self, r: requests.Response):
|
|
103
104
|
if r.status_code < 200 or r.status_code >= 300:
|
|
104
|
-
|
|
105
|
+
debug(r.text)
|
|
105
106
|
|
|
106
107
|
return None
|
|
107
108
|
|
|
@@ -164,6 +165,6 @@ class AdAuthenticator(Authenticator):
|
|
|
164
165
|
exp=data['exp'] if 'exp' in data else 0
|
|
165
166
|
)
|
|
166
167
|
except:
|
|
167
|
-
|
|
168
|
+
debug(traceback.format_exc())
|
|
168
169
|
|
|
169
170
|
return None
|
adam/sso/authn_okta.py
CHANGED
|
@@ -8,7 +8,7 @@ from adam.sso.id_token import IdToken
|
|
|
8
8
|
|
|
9
9
|
from .idp_login import IdpLogin
|
|
10
10
|
from adam.config import Config
|
|
11
|
-
from adam.utils import log2
|
|
11
|
+
from adam.utils import debug, log2
|
|
12
12
|
|
|
13
13
|
class OktaException(Exception):
|
|
14
14
|
pass
|
|
@@ -49,7 +49,7 @@ class OktaAuthenticator(Authenticator):
|
|
|
49
49
|
|
|
50
50
|
session = requests.Session()
|
|
51
51
|
response = session.post(authn_uri, headers=headers, data=json.dumps(payload))
|
|
52
|
-
|
|
52
|
+
debug(f'{response.status_code} {authn_uri}')
|
|
53
53
|
auth_response = response.json()
|
|
54
54
|
|
|
55
55
|
if 'sessionToken' not in auth_response:
|
|
@@ -59,7 +59,7 @@ class OktaAuthenticator(Authenticator):
|
|
|
59
59
|
|
|
60
60
|
url = f'{idp_uri}&sessionToken={session_token}'
|
|
61
61
|
r = session.get(url)
|
|
62
|
-
|
|
62
|
+
debug(f'{r.status_code} {url}')
|
|
63
63
|
|
|
64
64
|
id_token = OktaAuthenticator().extract(r.text, r'.*name=\"id_token\" value=\"(.*?)\".*')
|
|
65
65
|
if not id_token:
|
adam/sso/cred_cache.py
CHANGED
|
@@ -4,6 +4,7 @@ import traceback
|
|
|
4
4
|
from dotenv import load_dotenv
|
|
5
5
|
|
|
6
6
|
from adam.config import Config
|
|
7
|
+
from adam.utils import debug
|
|
7
8
|
from adam.utils_k8s.kube_context import KubeContext
|
|
8
9
|
|
|
9
10
|
class CredCache:
|
|
@@ -37,7 +38,7 @@ class CredCache:
|
|
|
37
38
|
try:
|
|
38
39
|
file.truncate()
|
|
39
40
|
except:
|
|
40
|
-
|
|
41
|
+
debug(traceback.format_exc())
|
|
41
42
|
|
|
42
43
|
updated = []
|
|
43
44
|
updated.append(f'IDP_USERNAME={username}')
|
|
@@ -56,4 +57,4 @@ class CredCache:
|
|
|
56
57
|
if password:
|
|
57
58
|
self.overrides['IDP_PASSWORD'] = password
|
|
58
59
|
|
|
59
|
-
|
|
60
|
+
debug(f'Cached username: {username}, password: {password}, try load: {self.get_username()}')
|
adam/sso/idp.py
CHANGED
|
@@ -15,7 +15,7 @@ from .cred_cache import CredCache
|
|
|
15
15
|
from .idp_session import IdpSession
|
|
16
16
|
from .idp_login import IdpLogin
|
|
17
17
|
from adam.config import Config
|
|
18
|
-
from adam.utils import
|
|
18
|
+
from adam.utils import debug, log
|
|
19
19
|
|
|
20
20
|
T = TypeVar('T')
|
|
21
21
|
|
|
@@ -57,7 +57,7 @@ class Idp:
|
|
|
57
57
|
default_user: str = None
|
|
58
58
|
if use_cached_creds:
|
|
59
59
|
default_user = CredCache().get_username()
|
|
60
|
-
|
|
60
|
+
debug(f'User read from cache: {default_user}')
|
|
61
61
|
|
|
62
62
|
if from_env := os.getenv('USERNAME'):
|
|
63
63
|
default_user = from_env
|
|
@@ -137,7 +137,7 @@ class Idp:
|
|
|
137
137
|
|
|
138
138
|
return IdpLogin(None, None, None, username)
|
|
139
139
|
except:
|
|
140
|
-
|
|
140
|
+
debug(traceback.format_exc())
|
|
141
141
|
pass
|
|
142
142
|
|
|
143
143
|
return None
|
adam/utils.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from abc import abstractmethod
|
|
2
|
+
from concurrent.futures import Future, ThreadPoolExecutor
|
|
1
3
|
from contextlib import redirect_stdout
|
|
2
4
|
import copy
|
|
3
5
|
import csv
|
|
@@ -9,6 +11,8 @@ import os
|
|
|
9
11
|
from pathlib import Path
|
|
10
12
|
import random
|
|
11
13
|
import string
|
|
14
|
+
import threading
|
|
15
|
+
from typing import Callable, Iterator, TypeVar
|
|
12
16
|
from dateutil import parser
|
|
13
17
|
import subprocess
|
|
14
18
|
import sys
|
|
@@ -18,6 +22,13 @@ import yaml
|
|
|
18
22
|
|
|
19
23
|
from . import __version__
|
|
20
24
|
|
|
25
|
+
log_state = threading.local()
|
|
26
|
+
|
|
27
|
+
class LogConfig:
|
|
28
|
+
is_debug = lambda: False
|
|
29
|
+
is_debug_timing = lambda: False
|
|
30
|
+
is_display_help = True
|
|
31
|
+
|
|
21
32
|
def to_tabular(lines: str, header: str = None, dashed_line = False):
|
|
22
33
|
return lines_to_tabular(lines.split('\n'), header, dashed_line)
|
|
23
34
|
|
|
@@ -69,18 +80,28 @@ def epoch(timestamp_string: str):
|
|
|
69
80
|
return parser.parse(timestamp_string).timestamp()
|
|
70
81
|
|
|
71
82
|
def log(s = None):
|
|
83
|
+
if not loggable():
|
|
84
|
+
return False
|
|
85
|
+
|
|
72
86
|
# want to print empty line for False or empty collection
|
|
73
87
|
if s == None:
|
|
74
88
|
print()
|
|
75
89
|
else:
|
|
76
90
|
click.echo(s)
|
|
77
91
|
|
|
92
|
+
return True
|
|
93
|
+
|
|
78
94
|
def log2(s = None, nl = True):
|
|
95
|
+
if not loggable():
|
|
96
|
+
return False
|
|
97
|
+
|
|
79
98
|
if s:
|
|
80
99
|
click.echo(s, err=True, nl=nl)
|
|
81
100
|
else:
|
|
82
101
|
print(file=sys.stderr)
|
|
83
102
|
|
|
103
|
+
return True
|
|
104
|
+
|
|
84
105
|
def elapsed_time(start_time: float):
|
|
85
106
|
end_time = time.time()
|
|
86
107
|
elapsed_time = end_time - start_time
|
|
@@ -95,8 +116,8 @@ def duration(start_time: float, end_time: float = None):
|
|
|
95
116
|
end_time = time.time()
|
|
96
117
|
d = convert_seconds(end_time - start_time)
|
|
97
118
|
t = []
|
|
98
|
-
if d
|
|
99
|
-
t.append(f'{d
|
|
119
|
+
if d:
|
|
120
|
+
t.append(f'{d}h')
|
|
100
121
|
if t or d[1]:
|
|
101
122
|
t.append(f'{d[1]}m')
|
|
102
123
|
t.append(f'{d[2]}s')
|
|
@@ -159,6 +180,9 @@ def get_deep_keys(d, current_path=""):
|
|
|
159
180
|
return keys
|
|
160
181
|
|
|
161
182
|
def display_help(replace_arg = False):
|
|
183
|
+
if not LogConfig.is_display_help:
|
|
184
|
+
return
|
|
185
|
+
|
|
162
186
|
args = copy.copy(sys.argv)
|
|
163
187
|
if replace_arg:
|
|
164
188
|
args[len(args) - 1] = '--help'
|
|
@@ -203,6 +227,7 @@ def json_to_csv(json_data: list[dict[any, any]], delimiter: str = ','):
|
|
|
203
227
|
with redirect_stdout(body) as f:
|
|
204
228
|
dict_writer = csv.DictWriter(f, keys, delimiter=delimiter)
|
|
205
229
|
dict_writer.writerows(flattened_data)
|
|
230
|
+
|
|
206
231
|
return header.getvalue().strip('\r\n'), [l.strip('\r') for l in body.getvalue().split('\n')]
|
|
207
232
|
else:
|
|
208
233
|
return None
|
|
@@ -240,4 +265,415 @@ def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out
|
|
|
240
265
|
return path
|
|
241
266
|
|
|
242
267
|
def idp_token_from_env():
|
|
243
|
-
return os.getenv('IDP_TOKEN')
|
|
268
|
+
return os.getenv('IDP_TOKEN')
|
|
269
|
+
|
|
270
|
+
def is_lambda(func):
|
|
271
|
+
return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
|
|
272
|
+
|
|
273
|
+
def debug(s = None):
|
|
274
|
+
if LogConfig.is_debug():
|
|
275
|
+
log2(f'DEBUG {s}')
|
|
276
|
+
|
|
277
|
+
class Ing:
|
|
278
|
+
def __init__(self, msg: str, suppress_log=False):
|
|
279
|
+
self.msg = msg
|
|
280
|
+
self.suppress_log = suppress_log
|
|
281
|
+
|
|
282
|
+
def __enter__(self):
|
|
283
|
+
if not hasattr(log_state, 'ing_cnt'):
|
|
284
|
+
log_state.ing_cnt = 0
|
|
285
|
+
|
|
286
|
+
try:
|
|
287
|
+
if not log_state.ing_cnt:
|
|
288
|
+
if not self.suppress_log and not LogConfig.is_debug():
|
|
289
|
+
log2(f'{self.msg}...', nl=False)
|
|
290
|
+
|
|
291
|
+
return None
|
|
292
|
+
finally:
|
|
293
|
+
log_state.ing_cnt += 1
|
|
294
|
+
|
|
295
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
296
|
+
log_state.ing_cnt -= 1
|
|
297
|
+
if not log_state.ing_cnt:
|
|
298
|
+
if not self.suppress_log and not LogConfig.is_debug():
|
|
299
|
+
log2(' OK')
|
|
300
|
+
|
|
301
|
+
return False
|
|
302
|
+
|
|
303
|
+
def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
|
|
304
|
+
if not body:
|
|
305
|
+
return Ing(msg, suppress_log=suppress_log)
|
|
306
|
+
|
|
307
|
+
r = None
|
|
308
|
+
|
|
309
|
+
t = Ing(msg, suppress_log=suppress_log)
|
|
310
|
+
t.__enter__()
|
|
311
|
+
try:
|
|
312
|
+
r = body()
|
|
313
|
+
finally:
|
|
314
|
+
t.__exit__(None, None, None)
|
|
315
|
+
|
|
316
|
+
return r
|
|
317
|
+
|
|
318
|
+
def loggable():
|
|
319
|
+
return LogConfig.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
|
|
320
|
+
|
|
321
|
+
class TimingNode:
|
|
322
|
+
def __init__(self, depth: int, s0: time.time = time.time(), line: str = None):
|
|
323
|
+
self.depth = depth
|
|
324
|
+
self.s0 = s0
|
|
325
|
+
self.line = line
|
|
326
|
+
self.children = []
|
|
327
|
+
|
|
328
|
+
def __str__(self):
|
|
329
|
+
return f'[{self.depth}: {self.line}, children={len(self.children)}]'
|
|
330
|
+
|
|
331
|
+
def tree(self):
|
|
332
|
+
lines = []
|
|
333
|
+
if self.line:
|
|
334
|
+
lines.append(self.line)
|
|
335
|
+
|
|
336
|
+
for child in self.children:
|
|
337
|
+
if child.line:
|
|
338
|
+
lines.append(child.tree())
|
|
339
|
+
return '\n'.join(lines)
|
|
340
|
+
|
|
341
|
+
class LogTiming:
|
|
342
|
+
def __init__(self, msg: str, s0: time.time = None):
|
|
343
|
+
self.msg = msg
|
|
344
|
+
self.s0 = s0
|
|
345
|
+
|
|
346
|
+
def __enter__(self):
|
|
347
|
+
if not LogConfig.is_debug_timing():
|
|
348
|
+
return
|
|
349
|
+
|
|
350
|
+
if not hasattr(log_state, 'timings'):
|
|
351
|
+
log_state.timings = TimingNode(0)
|
|
352
|
+
|
|
353
|
+
self.me = log_state.timings
|
|
354
|
+
log_state.timings = TimingNode(self.me.depth+1)
|
|
355
|
+
if not self.s0:
|
|
356
|
+
self.s0 = time.time()
|
|
357
|
+
|
|
358
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
359
|
+
if not LogConfig.is_debug_timing():
|
|
360
|
+
return False
|
|
361
|
+
|
|
362
|
+
child = log_state.timings
|
|
363
|
+
log_state.timings.line = timing_log_line(self.me.depth, self.msg, self.s0)
|
|
364
|
+
|
|
365
|
+
if child and child.line:
|
|
366
|
+
self.me.children.append(child)
|
|
367
|
+
log_state.timings = self.me
|
|
368
|
+
|
|
369
|
+
if not self.me.depth:
|
|
370
|
+
log2(self.me.tree())
|
|
371
|
+
log_state.timings = TimingNode(0)
|
|
372
|
+
|
|
373
|
+
return False
|
|
374
|
+
|
|
375
|
+
def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
|
|
376
|
+
if not s0 and not body:
|
|
377
|
+
return LogTiming(msg, s0=s0)
|
|
378
|
+
|
|
379
|
+
if not LogConfig.is_debug_timing():
|
|
380
|
+
if body:
|
|
381
|
+
return body()
|
|
382
|
+
|
|
383
|
+
return
|
|
384
|
+
|
|
385
|
+
r = None
|
|
386
|
+
|
|
387
|
+
t = LogTiming(msg, s0=s0)
|
|
388
|
+
t.__enter__()
|
|
389
|
+
try:
|
|
390
|
+
if body:
|
|
391
|
+
r = body()
|
|
392
|
+
finally:
|
|
393
|
+
t.__exit__(None, None, None)
|
|
394
|
+
|
|
395
|
+
return r
|
|
396
|
+
|
|
397
|
+
def timing_log_line(depth: int, msg: str, s0: time.time):
|
|
398
|
+
elapsed = time.time() - s0
|
|
399
|
+
prefix = '[timings] '
|
|
400
|
+
if depth:
|
|
401
|
+
if elapsed > 0.01:
|
|
402
|
+
prefix = (' ' * (depth-1)) + '* '
|
|
403
|
+
else:
|
|
404
|
+
prefix = ' ' * depth
|
|
405
|
+
|
|
406
|
+
return f'{prefix}{msg}: {elapsed:.2f} sec'
|
|
407
|
+
|
|
408
|
+
class WaitLog:
|
|
409
|
+
wait_log_flag = False
|
|
410
|
+
|
|
411
|
+
def wait_log(msg: str):
|
|
412
|
+
if not WaitLog.wait_log_flag:
|
|
413
|
+
log2(msg)
|
|
414
|
+
WaitLog.wait_log_flag = True
|
|
415
|
+
|
|
416
|
+
def clear_wait_log_flag():
|
|
417
|
+
WaitLog.wait_log_flag = False
|
|
418
|
+
|
|
419
|
+
T = TypeVar('T')
|
|
420
|
+
|
|
421
|
+
class ParallelService:
|
|
422
|
+
def __init__(self, handler: 'ParallelMapHandler'):
|
|
423
|
+
self.handler = handler
|
|
424
|
+
|
|
425
|
+
def map(self, fn: Callable[..., T]) -> Iterator[T]:
|
|
426
|
+
executor = self.handler.executor
|
|
427
|
+
collection = self.handler.collection
|
|
428
|
+
collect = self.handler.collect
|
|
429
|
+
samples_cnt = self.handler.samples
|
|
430
|
+
|
|
431
|
+
iterator = None
|
|
432
|
+
if executor:
|
|
433
|
+
iterator = executor.map(fn, collection)
|
|
434
|
+
elif samples_cnt < sys.maxsize:
|
|
435
|
+
samples = []
|
|
436
|
+
|
|
437
|
+
for elem in collection:
|
|
438
|
+
if not samples_cnt:
|
|
439
|
+
break
|
|
440
|
+
|
|
441
|
+
samples.append(fn(elem))
|
|
442
|
+
samples_cnt -= 1
|
|
443
|
+
|
|
444
|
+
iterator = iter(samples)
|
|
445
|
+
else:
|
|
446
|
+
iterator = map(fn, collection)
|
|
447
|
+
|
|
448
|
+
if collect:
|
|
449
|
+
return list(iterator)
|
|
450
|
+
else:
|
|
451
|
+
return iterator
|
|
452
|
+
|
|
453
|
+
class ParallelMapHandler:
|
|
454
|
+
def __init__(self, collection: list, max_workers: int, samples: int = sys.maxsize, msg: str = None, collect = True):
|
|
455
|
+
self.collection = collection
|
|
456
|
+
self.max_workers = max_workers
|
|
457
|
+
self.executor = None
|
|
458
|
+
self.samples = samples
|
|
459
|
+
self.msg = msg
|
|
460
|
+
if msg and msg.startswith('d`'):
|
|
461
|
+
if LogConfig.is_debug():
|
|
462
|
+
self.msg = msg.replace('d`', '', 1)
|
|
463
|
+
else:
|
|
464
|
+
self.msg = None
|
|
465
|
+
self.collect = collect
|
|
466
|
+
|
|
467
|
+
self.begin = []
|
|
468
|
+
self.end = []
|
|
469
|
+
self.start_time = None
|
|
470
|
+
|
|
471
|
+
def __enter__(self):
|
|
472
|
+
self.calc_msgs()
|
|
473
|
+
|
|
474
|
+
if self.max_workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
|
|
475
|
+
self.start_time = time.time()
|
|
476
|
+
|
|
477
|
+
self.executor = ThreadPoolExecutor(max_workers=self.max_workers)
|
|
478
|
+
self.executor.__enter__()
|
|
479
|
+
|
|
480
|
+
return ParallelService(self)
|
|
481
|
+
|
|
482
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
483
|
+
if self.executor:
|
|
484
|
+
self.executor.__exit__(exc_type, exc_val, exc_tb)
|
|
485
|
+
|
|
486
|
+
if self.end:
|
|
487
|
+
log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
|
|
488
|
+
|
|
489
|
+
return False
|
|
490
|
+
|
|
491
|
+
def size(self):
|
|
492
|
+
if not self.collection:
|
|
493
|
+
return 0
|
|
494
|
+
|
|
495
|
+
return len(self.collection)
|
|
496
|
+
|
|
497
|
+
def calc_msgs(self):
|
|
498
|
+
if not self.msg:
|
|
499
|
+
return
|
|
500
|
+
|
|
501
|
+
size = self.size()
|
|
502
|
+
# return
|
|
503
|
+
|
|
504
|
+
offloaded = False
|
|
505
|
+
serially = False
|
|
506
|
+
sampling = False
|
|
507
|
+
if size == 0:
|
|
508
|
+
offloaded = True
|
|
509
|
+
self.msg = self.msg.replace('{size}', '1')
|
|
510
|
+
elif self.max_workers > 1 and size > 1 and self.samples == sys.maxsize:
|
|
511
|
+
self.msg = self.msg.replace('{size}', f'{size}')
|
|
512
|
+
elif self.samples < sys.maxsize:
|
|
513
|
+
sampling = True
|
|
514
|
+
if self.samples > size:
|
|
515
|
+
self.samples = size
|
|
516
|
+
self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
|
|
517
|
+
else:
|
|
518
|
+
serially = True
|
|
519
|
+
self.msg = self.msg.replace('{size}', f'{size}')
|
|
520
|
+
# return
|
|
521
|
+
|
|
522
|
+
for token in self.msg.split(' '):
|
|
523
|
+
if '|' in token:
|
|
524
|
+
self.begin.append(token.split('|')[0])
|
|
525
|
+
if not sampling and not serially and not offloaded:
|
|
526
|
+
self.end.append(token.split('|')[1])
|
|
527
|
+
else:
|
|
528
|
+
self.begin.append(token)
|
|
529
|
+
if not sampling and not serially and not offloaded:
|
|
530
|
+
self.end.append(token)
|
|
531
|
+
|
|
532
|
+
if offloaded:
|
|
533
|
+
log2(f'{" ".join(self.begin)} offloaded...')
|
|
534
|
+
elif sampling or serially:
|
|
535
|
+
log2(f'{" ".join(self.begin)} serially...')
|
|
536
|
+
else:
|
|
537
|
+
log2(f'{" ".join(self.begin)} with {self.max_workers} workers...')
|
|
538
|
+
|
|
539
|
+
class OffloadService:
|
|
540
|
+
def __init__(self, handler: 'OffloadHandler'):
|
|
541
|
+
self.handler = handler
|
|
542
|
+
|
|
543
|
+
def submit(self, fn: Callable[..., T], /, *args, **kwargs) -> Future[T]:
|
|
544
|
+
executor = self.handler.executor
|
|
545
|
+
|
|
546
|
+
if executor:
|
|
547
|
+
return executor.submit(fn, *args, **kwargs)
|
|
548
|
+
else:
|
|
549
|
+
future = Future()
|
|
550
|
+
|
|
551
|
+
future.set_result(fn(*args, **kwargs))
|
|
552
|
+
|
|
553
|
+
return future
|
|
554
|
+
|
|
555
|
+
class OffloadHandler(ParallelMapHandler):
|
|
556
|
+
def __init__(self, max_workers: int, msg: str = None):
|
|
557
|
+
super().__init__(None, max_workers, msg=msg, collect=False )
|
|
558
|
+
|
|
559
|
+
def __enter__(self):
|
|
560
|
+
self.calc_msgs()
|
|
561
|
+
|
|
562
|
+
if self.max_workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
|
|
563
|
+
self.start_time = time.time()
|
|
564
|
+
|
|
565
|
+
self.executor = ThreadPoolExecutor(max_workers=self.max_workers)
|
|
566
|
+
self.executor.__enter__()
|
|
567
|
+
|
|
568
|
+
return OffloadService(self)
|
|
569
|
+
|
|
570
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
571
|
+
if self.executor:
|
|
572
|
+
self.executor.__exit__(exc_type, exc_val, exc_tb)
|
|
573
|
+
|
|
574
|
+
if self.end:
|
|
575
|
+
log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
|
|
576
|
+
|
|
577
|
+
return False
|
|
578
|
+
|
|
579
|
+
def size(self):
|
|
580
|
+
if not self.collection:
|
|
581
|
+
return 0
|
|
582
|
+
|
|
583
|
+
return len(self.collection)
|
|
584
|
+
|
|
585
|
+
def calc_msgs(self):
|
|
586
|
+
if not self.msg:
|
|
587
|
+
return
|
|
588
|
+
|
|
589
|
+
size = self.size()
|
|
590
|
+
# return
|
|
591
|
+
|
|
592
|
+
offloaded = False
|
|
593
|
+
serially = False
|
|
594
|
+
sampling = False
|
|
595
|
+
if size == 0:
|
|
596
|
+
offloaded = True
|
|
597
|
+
self.msg = self.msg.replace('{size}', '1')
|
|
598
|
+
elif self.max_workers > 1 and size > 1 and self.samples == sys.maxsize:
|
|
599
|
+
self.msg = self.msg.replace('{size}', f'{size}')
|
|
600
|
+
elif self.samples < sys.maxsize:
|
|
601
|
+
sampling = True
|
|
602
|
+
if self.samples > size:
|
|
603
|
+
self.samples = size
|
|
604
|
+
self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
|
|
605
|
+
else:
|
|
606
|
+
serially = True
|
|
607
|
+
self.msg = self.msg.replace('{size}', f'{size}')
|
|
608
|
+
# return
|
|
609
|
+
|
|
610
|
+
for token in self.msg.split(' '):
|
|
611
|
+
if '|' in token:
|
|
612
|
+
self.begin.append(token.split('|')[0])
|
|
613
|
+
if not sampling and not serially and not offloaded:
|
|
614
|
+
self.end.append(token.split('|')[1])
|
|
615
|
+
else:
|
|
616
|
+
self.begin.append(token)
|
|
617
|
+
if not sampling and not serially and not offloaded:
|
|
618
|
+
self.end.append(token)
|
|
619
|
+
|
|
620
|
+
if offloaded:
|
|
621
|
+
log2(f'{" ".join(self.begin)} offloaded...')
|
|
622
|
+
elif sampling or serially:
|
|
623
|
+
log2(f'{" ".join(self.begin)} serially...')
|
|
624
|
+
else:
|
|
625
|
+
log2(f'{" ".join(self.begin)} with {self.max_workers} workers...')
|
|
626
|
+
|
|
627
|
+
# class ParallelMapHandler(ParallelHandler):
|
|
628
|
+
# def __enter__(self):
|
|
629
|
+
# self.calc_msgs()
|
|
630
|
+
|
|
631
|
+
# if self.max_workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
|
|
632
|
+
# self.start_time = time.time()
|
|
633
|
+
|
|
634
|
+
# self.executor = ThreadPoolExecutor(max_workers=self.max_workers)
|
|
635
|
+
# self.executor.__enter__()
|
|
636
|
+
|
|
637
|
+
# if self.collection:
|
|
638
|
+
# return self.map
|
|
639
|
+
# else:
|
|
640
|
+
# return self.submit
|
|
641
|
+
|
|
642
|
+
# def map(self, fn: Callable[..., T]) -> Iterator[T]:
|
|
643
|
+
# iterator = None
|
|
644
|
+
# if self.executor:
|
|
645
|
+
# iterator = self.executor.map(fn, self.collection)
|
|
646
|
+
# elif self.samples < sys.maxsize:
|
|
647
|
+
# samples = []
|
|
648
|
+
|
|
649
|
+
# for elem in self.collection:
|
|
650
|
+
# if not self.samples:
|
|
651
|
+
# break
|
|
652
|
+
|
|
653
|
+
# samples.append(fn(elem))
|
|
654
|
+
# self.samples -= 1
|
|
655
|
+
|
|
656
|
+
# iterator = iter(samples)
|
|
657
|
+
# else:
|
|
658
|
+
# iterator = map(fn, self.collection)
|
|
659
|
+
|
|
660
|
+
# if self.collect:
|
|
661
|
+
# return list(iterator)
|
|
662
|
+
# else:
|
|
663
|
+
# return iterator
|
|
664
|
+
|
|
665
|
+
# def submit(self, fn: Callable[..., T], /, *args, **kwargs) -> Future[T]:
|
|
666
|
+
# if self.executor:
|
|
667
|
+
# return self.executor.submit(fn, *args, **kwargs)
|
|
668
|
+
# else:
|
|
669
|
+
# future = Future()
|
|
670
|
+
|
|
671
|
+
# future.set_result(fn(*args, **kwargs))
|
|
672
|
+
|
|
673
|
+
# return future
|
|
674
|
+
|
|
675
|
+
def parallelize(collection: list, max_workers: int = 0, samples = sys.maxsize, msg: str = None, collect = True):
|
|
676
|
+
return ParallelMapHandler(collection, max_workers, samples = samples, msg = msg, collect = collect)
|
|
677
|
+
|
|
678
|
+
def offload(max_workers: int = 3, msg: str = None):
|
|
679
|
+
return OffloadHandler(max_workers, msg = msg)
|