kaqing 1.77.0__py3-none-any.whl → 2.0.171__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- adam/__init__.py +1 -0
- adam/app_session.py +182 -0
- {walker → adam}/apps.py +8 -24
- {walker → adam}/batch.py +54 -97
- {walker → adam}/checks/check.py +3 -3
- {walker → adam}/checks/check_result.py +1 -1
- adam/checks/check_utils.py +65 -0
- {walker → adam}/checks/compactionstats.py +6 -6
- {walker → adam}/checks/cpu.py +14 -8
- adam/checks/cpu_metrics.py +52 -0
- {walker → adam}/checks/disk.py +6 -6
- {walker → adam}/checks/gossip.py +5 -5
- {walker → adam}/checks/memory.py +7 -7
- {walker → adam}/checks/status.py +5 -5
- {walker → adam}/cli.py +3 -3
- {walker → adam}/columns/column.py +1 -1
- adam/columns/columns.py +45 -0
- {walker → adam}/columns/compactions.py +5 -5
- {walker → adam}/columns/cpu.py +6 -4
- adam/columns/cpu_metrics.py +22 -0
- {walker → adam}/columns/dir_data.py +3 -3
- {walker → adam}/columns/dir_snapshots.py +3 -3
- {walker → adam}/columns/gossip.py +5 -5
- {walker → adam}/columns/host_id.py +3 -3
- {walker → adam}/columns/memory.py +3 -3
- {walker → adam}/columns/node_address.py +3 -3
- {walker → adam}/columns/node_load.py +3 -3
- {walker → adam}/columns/node_owns.py +3 -3
- {walker → adam}/columns/node_status.py +3 -3
- {walker → adam}/columns/node_tokens.py +3 -3
- {walker → adam}/columns/node_utils.py +2 -2
- {walker → adam}/columns/pod_name.py +2 -2
- {walker → adam}/columns/volume_cassandra.py +4 -4
- {walker → adam}/columns/volume_root.py +3 -3
- adam/commands/__init__.py +15 -0
- adam/commands/alter_tables.py +81 -0
- adam/commands/app_cmd.py +38 -0
- {walker → adam}/commands/app_ping.py +10 -16
- adam/commands/audit/audit.py +84 -0
- adam/commands/audit/audit_repair_tables.py +74 -0
- adam/commands/audit/audit_run.py +50 -0
- adam/commands/audit/show_last10.py +48 -0
- adam/commands/audit/show_slow10.py +47 -0
- adam/commands/audit/show_top10.py +45 -0
- adam/commands/audit/utils_show_top10.py +59 -0
- adam/commands/bash/__init__.py +5 -0
- adam/commands/bash/bash.py +36 -0
- adam/commands/bash/bash_completer.py +93 -0
- adam/commands/bash/utils_bash.py +16 -0
- adam/commands/cat.py +50 -0
- adam/commands/cd.py +43 -0
- adam/commands/check.py +73 -0
- {walker → adam}/commands/cli_commands.py +7 -8
- adam/commands/code.py +57 -0
- adam/commands/command.py +190 -0
- {walker → adam}/commands/command_helpers.py +1 -1
- {walker → adam}/commands/commands_utils.py +15 -25
- adam/commands/cp.py +89 -0
- adam/commands/cql/cql_completions.py +33 -0
- {walker/commands → adam/commands/cql}/cqlsh.py +20 -35
- adam/commands/cql/utils_cql.py +343 -0
- {walker/commands/frontend → adam/commands/deploy}/code_start.py +11 -14
- adam/commands/deploy/code_stop.py +40 -0
- {walker/commands/frontend → adam/commands/deploy}/code_utils.py +7 -9
- adam/commands/deploy/deploy.py +25 -0
- adam/commands/deploy/deploy_frontend.py +49 -0
- adam/commands/deploy/deploy_pg_agent.py +35 -0
- adam/commands/deploy/deploy_pod.py +108 -0
- adam/commands/deploy/deploy_utils.py +29 -0
- adam/commands/deploy/undeploy.py +25 -0
- adam/commands/deploy/undeploy_frontend.py +38 -0
- adam/commands/deploy/undeploy_pg_agent.py +39 -0
- adam/commands/deploy/undeploy_pod.py +48 -0
- adam/commands/devices/device.py +118 -0
- adam/commands/devices/device_app.py +173 -0
- adam/commands/devices/device_auit_log.py +49 -0
- adam/commands/devices/device_cass.py +185 -0
- adam/commands/devices/device_export.py +86 -0
- adam/commands/devices/device_postgres.py +144 -0
- adam/commands/devices/devices.py +25 -0
- {walker → adam}/commands/exit.py +3 -6
- adam/commands/export/clean_up_all_export_sessions.py +37 -0
- adam/commands/export/clean_up_export_sessions.py +51 -0
- adam/commands/export/drop_export_database.py +55 -0
- adam/commands/export/drop_export_databases.py +43 -0
- adam/commands/export/export.py +53 -0
- adam/commands/export/export_databases.py +170 -0
- adam/commands/export/export_handlers.py +71 -0
- adam/commands/export/export_select.py +81 -0
- adam/commands/export/export_select_x.py +54 -0
- adam/commands/export/export_use.py +52 -0
- adam/commands/export/exporter.py +352 -0
- adam/commands/export/import_session.py +40 -0
- adam/commands/export/importer.py +67 -0
- adam/commands/export/importer_athena.py +80 -0
- adam/commands/export/importer_sqlite.py +47 -0
- adam/commands/export/show_column_counts.py +54 -0
- adam/commands/export/show_export_databases.py +36 -0
- adam/commands/export/show_export_session.py +48 -0
- adam/commands/export/show_export_sessions.py +44 -0
- adam/commands/export/utils_export.py +314 -0
- {walker → adam}/commands/help.py +17 -12
- adam/commands/intermediate_command.py +49 -0
- adam/commands/issues.py +43 -0
- adam/commands/kubectl.py +38 -0
- adam/commands/login.py +70 -0
- {walker → adam}/commands/logs.py +8 -10
- adam/commands/ls.py +41 -0
- adam/commands/medusa/medusa.py +27 -0
- adam/commands/medusa/medusa_backup.py +57 -0
- adam/commands/medusa/medusa_restore.py +83 -0
- adam/commands/medusa/medusa_show_backupjobs.py +51 -0
- adam/commands/medusa/medusa_show_restorejobs.py +47 -0
- {walker → adam}/commands/nodetool.py +17 -21
- {walker → adam}/commands/param_get.py +15 -16
- adam/commands/param_set.py +43 -0
- adam/commands/postgres/postgres.py +104 -0
- adam/commands/postgres/postgres_context.py +274 -0
- {walker → adam}/commands/postgres/postgres_ls.py +7 -11
- {walker → adam}/commands/postgres/postgres_preview.py +8 -13
- adam/commands/postgres/psql_completions.py +10 -0
- adam/commands/postgres/utils_postgres.py +66 -0
- adam/commands/preview_table.py +37 -0
- adam/commands/pwd.py +47 -0
- adam/commands/reaper/reaper.py +35 -0
- adam/commands/reaper/reaper_forward.py +93 -0
- adam/commands/reaper/reaper_forward_session.py +6 -0
- {walker → adam}/commands/reaper/reaper_forward_stop.py +13 -19
- {walker → adam}/commands/reaper/reaper_restart.py +10 -17
- adam/commands/reaper/reaper_run_abort.py +46 -0
- adam/commands/reaper/reaper_runs.py +82 -0
- adam/commands/reaper/reaper_runs_abort.py +63 -0
- adam/commands/reaper/reaper_schedule_activate.py +45 -0
- adam/commands/reaper/reaper_schedule_start.py +45 -0
- adam/commands/reaper/reaper_schedule_stop.py +45 -0
- {walker → adam}/commands/reaper/reaper_schedules.py +6 -16
- {walker → adam}/commands/reaper/reaper_status.py +11 -19
- adam/commands/reaper/utils_reaper.py +196 -0
- adam/commands/repair/repair.py +26 -0
- {walker → adam}/commands/repair/repair_log.py +7 -10
- adam/commands/repair/repair_run.py +70 -0
- adam/commands/repair/repair_scan.py +71 -0
- {walker → adam}/commands/repair/repair_stop.py +8 -11
- adam/commands/report.py +61 -0
- adam/commands/restart.py +60 -0
- {walker → adam}/commands/rollout.py +25 -30
- adam/commands/shell.py +34 -0
- adam/commands/show/show.py +39 -0
- walker/commands/show/show_version.py → adam/commands/show/show_adam.py +14 -10
- adam/commands/show/show_app_actions.py +57 -0
- {walker → adam}/commands/show/show_app_id.py +12 -15
- {walker → adam}/commands/show/show_app_queues.py +9 -12
- adam/commands/show/show_cassandra_repairs.py +38 -0
- adam/commands/show/show_cassandra_status.py +124 -0
- {walker → adam}/commands/show/show_cassandra_version.py +6 -16
- adam/commands/show/show_commands.py +59 -0
- walker/commands/show/show_storage.py → adam/commands/show/show_host.py +11 -13
- adam/commands/show/show_login.py +62 -0
- {walker → adam}/commands/show/show_params.py +4 -4
- adam/commands/show/show_processes.py +51 -0
- adam/commands/show/show_storage.py +42 -0
- adam/commands/watch.py +82 -0
- {walker → adam}/config.py +10 -22
- {walker → adam}/embedded_apps.py +1 -1
- adam/embedded_params.py +2 -0
- adam/log.py +47 -0
- {walker → adam}/pod_exec_result.py +10 -2
- adam/repl.py +182 -0
- adam/repl_commands.py +124 -0
- adam/repl_state.py +458 -0
- adam/sql/__init__.py +0 -0
- adam/sql/sql_completer.py +120 -0
- adam/sql/sql_state_machine.py +618 -0
- adam/sql/term_completer.py +76 -0
- adam/sso/__init__.py +0 -0
- {walker → adam}/sso/authenticator.py +5 -1
- adam/sso/authn_ad.py +170 -0
- {walker → adam}/sso/authn_okta.py +39 -22
- adam/sso/cred_cache.py +60 -0
- adam/sso/id_token.py +23 -0
- adam/sso/idp.py +143 -0
- adam/sso/idp_login.py +50 -0
- adam/sso/idp_session.py +55 -0
- adam/sso/sso_config.py +63 -0
- adam/utils.py +679 -0
- adam/utils_app.py +98 -0
- adam/utils_athena.py +145 -0
- adam/utils_audits.py +106 -0
- adam/utils_issues.py +32 -0
- adam/utils_k8s/__init__.py +0 -0
- adam/utils_k8s/app_clusters.py +28 -0
- adam/utils_k8s/app_pods.py +33 -0
- adam/utils_k8s/cassandra_clusters.py +36 -0
- adam/utils_k8s/cassandra_nodes.py +33 -0
- adam/utils_k8s/config_maps.py +34 -0
- {walker/k8s_utils → adam/utils_k8s}/custom_resources.py +7 -2
- adam/utils_k8s/deployment.py +56 -0
- {walker/k8s_utils → adam/utils_k8s}/ingresses.py +3 -4
- {walker/k8s_utils → adam/utils_k8s}/jobs.py +3 -3
- adam/utils_k8s/k8s.py +87 -0
- {walker/k8s_utils → adam/utils_k8s}/kube_context.py +4 -4
- adam/utils_k8s/pods.py +290 -0
- {walker/k8s_utils → adam/utils_k8s}/secrets.py +8 -4
- adam/utils_k8s/service_accounts.py +170 -0
- {walker/k8s_utils → adam/utils_k8s}/services.py +3 -4
- {walker/k8s_utils → adam/utils_k8s}/statefulsets.py +6 -16
- {walker/k8s_utils → adam/utils_k8s}/volumes.py +10 -1
- adam/utils_net.py +24 -0
- adam/utils_repl/__init__.py +0 -0
- adam/utils_repl/automata_completer.py +48 -0
- adam/utils_repl/repl_completer.py +46 -0
- adam/utils_repl/state_machine.py +173 -0
- adam/utils_sqlite.py +109 -0
- adam/version.py +5 -0
- {kaqing-1.77.0.dist-info → kaqing-2.0.171.dist-info}/METADATA +1 -1
- kaqing-2.0.171.dist-info/RECORD +236 -0
- kaqing-2.0.171.dist-info/entry_points.txt +3 -0
- kaqing-2.0.171.dist-info/top_level.txt +1 -0
- kaqing-1.77.0.dist-info/RECORD +0 -159
- kaqing-1.77.0.dist-info/entry_points.txt +0 -3
- kaqing-1.77.0.dist-info/top_level.txt +0 -1
- walker/__init__.py +0 -3
- walker/app_session.py +0 -168
- walker/checks/check_utils.py +0 -97
- walker/columns/columns.py +0 -43
- walker/commands/add_user.py +0 -68
- walker/commands/app.py +0 -67
- walker/commands/bash.py +0 -87
- walker/commands/cd.py +0 -115
- walker/commands/check.py +0 -68
- walker/commands/command.py +0 -104
- walker/commands/cp.py +0 -95
- walker/commands/cql_utils.py +0 -53
- walker/commands/devices.py +0 -89
- walker/commands/frontend/code_stop.py +0 -57
- walker/commands/frontend/setup.py +0 -60
- walker/commands/frontend/setup_frontend.py +0 -58
- walker/commands/frontend/teardown.py +0 -61
- walker/commands/frontend/teardown_frontend.py +0 -42
- walker/commands/issues.py +0 -69
- walker/commands/login.py +0 -72
- walker/commands/ls.py +0 -145
- walker/commands/medusa/medusa.py +0 -69
- walker/commands/medusa/medusa_backup.py +0 -61
- walker/commands/medusa/medusa_restore.py +0 -86
- walker/commands/medusa/medusa_show_backupjobs.py +0 -52
- walker/commands/medusa/medusa_show_restorejobs.py +0 -52
- walker/commands/param_set.py +0 -44
- walker/commands/postgres/postgres.py +0 -113
- walker/commands/postgres/postgres_session.py +0 -225
- walker/commands/preview_table.py +0 -98
- walker/commands/processes.py +0 -53
- walker/commands/pwd.py +0 -64
- walker/commands/reaper/reaper.py +0 -78
- walker/commands/reaper/reaper_forward.py +0 -100
- walker/commands/reaper/reaper_run_abort.py +0 -65
- walker/commands/reaper/reaper_runs.py +0 -97
- walker/commands/reaper/reaper_runs_abort.py +0 -83
- walker/commands/reaper/reaper_schedule_activate.py +0 -64
- walker/commands/reaper/reaper_schedule_start.py +0 -64
- walker/commands/reaper/reaper_schedule_stop.py +0 -64
- walker/commands/reaper/reaper_session.py +0 -159
- walker/commands/repair/repair.py +0 -68
- walker/commands/repair/repair_run.py +0 -72
- walker/commands/repair/repair_scan.py +0 -79
- walker/commands/report.py +0 -57
- walker/commands/restart.py +0 -61
- walker/commands/show/show.py +0 -72
- walker/commands/show/show_app_actions.py +0 -53
- walker/commands/show/show_cassandra_status.py +0 -35
- walker/commands/show/show_commands.py +0 -58
- walker/commands/show/show_processes.py +0 -35
- walker/commands/show/show_repairs.py +0 -47
- walker/commands/status.py +0 -128
- walker/commands/storage.py +0 -52
- walker/commands/user_entry.py +0 -69
- walker/commands/watch.py +0 -85
- walker/embedded_params.py +0 -2
- walker/k8s_utils/cassandra_clusters.py +0 -48
- walker/k8s_utils/cassandra_nodes.py +0 -26
- walker/k8s_utils/pods.py +0 -211
- walker/repl.py +0 -165
- walker/repl_commands.py +0 -58
- walker/repl_state.py +0 -211
- walker/sso/authn_ad.py +0 -94
- walker/sso/idp.py +0 -150
- walker/sso/idp_login.py +0 -29
- walker/sso/sso_config.py +0 -45
- walker/utils.py +0 -194
- walker/version.py +0 -5
- {walker → adam}/checks/__init__.py +0 -0
- {walker → adam}/checks/check_context.py +0 -0
- {walker → adam}/checks/issue.py +0 -0
- {walker → adam}/cli_group.py +0 -0
- {walker → adam}/columns/__init__.py +0 -0
- {walker/commands → adam/commands/audit}/__init__.py +0 -0
- {walker/commands/frontend → adam/commands/cql}/__init__.py +0 -0
- {walker/commands/medusa → adam/commands/deploy}/__init__.py +0 -0
- {walker/commands/postgres → adam/commands/devices}/__init__.py +0 -0
- {walker/commands/reaper → adam/commands/export}/__init__.py +0 -0
- {walker/commands/repair → adam/commands/medusa}/__init__.py +0 -0
- {walker → adam}/commands/nodetool_commands.py +0 -0
- {walker/commands/show → adam/commands/postgres}/__init__.py +0 -0
- {walker/k8s_utils → adam/commands/reaper}/__init__.py +0 -0
- {walker/sso → adam/commands/repair}/__init__.py +0 -0
- /walker/medusa_show_restorejobs.py → /adam/commands/show/__init__.py +0 -0
- {walker → adam}/repl_session.py +0 -0
- {kaqing-1.77.0.dist-info → kaqing-2.0.171.dist-info}/WHEEL +0 -0
adam/utils.py
ADDED
|
@@ -0,0 +1,679 @@
|
|
|
1
|
+
from abc import abstractmethod
|
|
2
|
+
from concurrent.futures import Future, ThreadPoolExecutor
|
|
3
|
+
from contextlib import redirect_stdout
|
|
4
|
+
import copy
|
|
5
|
+
import csv
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
import importlib
|
|
8
|
+
import io
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
import random
|
|
13
|
+
import string
|
|
14
|
+
import threading
|
|
15
|
+
from typing import Callable, Iterator, TypeVar
|
|
16
|
+
from dateutil import parser
|
|
17
|
+
import subprocess
|
|
18
|
+
import sys
|
|
19
|
+
import time
|
|
20
|
+
import click
|
|
21
|
+
import yaml
|
|
22
|
+
|
|
23
|
+
from . import __version__
|
|
24
|
+
|
|
25
|
+
log_state = threading.local()
|
|
26
|
+
|
|
27
|
+
class LogConfig:
|
|
28
|
+
is_debug = lambda: False
|
|
29
|
+
is_debug_timing = lambda: False
|
|
30
|
+
is_display_help = True
|
|
31
|
+
|
|
32
|
+
def to_tabular(lines: str, header: str = None, dashed_line = False):
|
|
33
|
+
return lines_to_tabular(lines.split('\n'), header, dashed_line)
|
|
34
|
+
|
|
35
|
+
def lines_to_tabular(lines: list[str], header: str = None, dashed_line = False, separator = ' '):
|
|
36
|
+
maxes = []
|
|
37
|
+
nls = []
|
|
38
|
+
|
|
39
|
+
def format_line(line: str):
|
|
40
|
+
nl = []
|
|
41
|
+
words = line.split(separator)
|
|
42
|
+
for i, word in enumerate(words):
|
|
43
|
+
nl.append(word.ljust(maxes[i], ' '))
|
|
44
|
+
nls.append(' '.join(nl))
|
|
45
|
+
|
|
46
|
+
all_lines = lines
|
|
47
|
+
if header:
|
|
48
|
+
all_lines = [header] + lines
|
|
49
|
+
|
|
50
|
+
for line in all_lines:
|
|
51
|
+
words = line.split(separator)
|
|
52
|
+
for i, word in enumerate(words):
|
|
53
|
+
lw = len(word)
|
|
54
|
+
if len(maxes) <= i:
|
|
55
|
+
maxes.append(lw)
|
|
56
|
+
elif maxes[i] < lw:
|
|
57
|
+
maxes[i] = lw
|
|
58
|
+
|
|
59
|
+
if header:
|
|
60
|
+
format_line(header)
|
|
61
|
+
if dashed_line:
|
|
62
|
+
nls.append(''.ljust(sum(maxes) + (len(maxes) - 1) * 2, '-'))
|
|
63
|
+
for line in lines:
|
|
64
|
+
format_line(line)
|
|
65
|
+
|
|
66
|
+
return '\n'.join(nls)
|
|
67
|
+
|
|
68
|
+
def convert_seconds(total_seconds_float):
|
|
69
|
+
total_seconds_int = int(total_seconds_float) # Convert float to integer seconds
|
|
70
|
+
|
|
71
|
+
hours = total_seconds_int // 3600
|
|
72
|
+
remaining_seconds_after_hours = total_seconds_int % 3600
|
|
73
|
+
|
|
74
|
+
minutes = remaining_seconds_after_hours // 60
|
|
75
|
+
seconds = remaining_seconds_after_hours % 60
|
|
76
|
+
|
|
77
|
+
return hours, minutes, seconds
|
|
78
|
+
|
|
79
|
+
def epoch(timestamp_string: str):
|
|
80
|
+
return parser.parse(timestamp_string).timestamp()
|
|
81
|
+
|
|
82
|
+
def log(s = None):
|
|
83
|
+
if not loggable():
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
# want to print empty line for False or empty collection
|
|
87
|
+
if s == None:
|
|
88
|
+
print()
|
|
89
|
+
else:
|
|
90
|
+
click.echo(s)
|
|
91
|
+
|
|
92
|
+
return True
|
|
93
|
+
|
|
94
|
+
def log2(s = None, nl = True):
|
|
95
|
+
if not loggable():
|
|
96
|
+
return False
|
|
97
|
+
|
|
98
|
+
if s:
|
|
99
|
+
click.echo(s, err=True, nl=nl)
|
|
100
|
+
else:
|
|
101
|
+
print(file=sys.stderr)
|
|
102
|
+
|
|
103
|
+
return True
|
|
104
|
+
|
|
105
|
+
def elapsed_time(start_time: float):
|
|
106
|
+
end_time = time.time()
|
|
107
|
+
elapsed_time = end_time - start_time
|
|
108
|
+
hours = int(elapsed_time // 3600)
|
|
109
|
+
minutes = int((elapsed_time % 3600) // 60)
|
|
110
|
+
seconds = int(elapsed_time % 60)
|
|
111
|
+
|
|
112
|
+
return f"{hours:02}:{minutes:02}:{seconds:02}"
|
|
113
|
+
|
|
114
|
+
def duration(start_time: float, end_time: float = None):
|
|
115
|
+
if not end_time:
|
|
116
|
+
end_time = time.time()
|
|
117
|
+
d = convert_seconds(end_time - start_time)
|
|
118
|
+
t = []
|
|
119
|
+
if d:
|
|
120
|
+
t.append(f'{d}h')
|
|
121
|
+
if t or d[1]:
|
|
122
|
+
t.append(f'{d[1]}m')
|
|
123
|
+
t.append(f'{d[2]}s')
|
|
124
|
+
|
|
125
|
+
return ' '.join(t)
|
|
126
|
+
|
|
127
|
+
def strip(lines):
|
|
128
|
+
return '\n'.join([line.strip(' ') for line in lines.split('\n')]).strip('\n')
|
|
129
|
+
|
|
130
|
+
def deep_merge_dicts(dict1, dict2):
|
|
131
|
+
"""
|
|
132
|
+
Recursively merges dict2 into dict1.
|
|
133
|
+
If a key exists in both dictionaries and its value is a dictionary,
|
|
134
|
+
the function recursively merges those nested dictionaries.
|
|
135
|
+
Otherwise, values from dict2 overwrite values in dict1.
|
|
136
|
+
"""
|
|
137
|
+
merged_dict = dict1.copy() # Create a copy to avoid modifying original dict1
|
|
138
|
+
|
|
139
|
+
for key, value in dict2.items():
|
|
140
|
+
if key in merged_dict and isinstance(merged_dict[key], dict) and isinstance(value, dict):
|
|
141
|
+
# If both values are dictionaries, recursively merge them
|
|
142
|
+
merged_dict[key] = deep_merge_dicts(merged_dict[key], value)
|
|
143
|
+
elif key not in merged_dict or value:
|
|
144
|
+
# Otherwise, overwrite or add the value from dict2
|
|
145
|
+
merged_dict[key] = value
|
|
146
|
+
return merged_dict
|
|
147
|
+
|
|
148
|
+
def deep_sort_dict(d):
|
|
149
|
+
"""
|
|
150
|
+
Recursively sorts a dictionary by its keys, and any nested lists by their elements.
|
|
151
|
+
"""
|
|
152
|
+
if not isinstance(d, (dict, list)):
|
|
153
|
+
return d
|
|
154
|
+
|
|
155
|
+
if isinstance(d, dict):
|
|
156
|
+
return {k: deep_sort_dict(d[k]) for k in sorted(d)}
|
|
157
|
+
|
|
158
|
+
if isinstance(d, list):
|
|
159
|
+
return sorted([deep_sort_dict(item) for item in d])
|
|
160
|
+
|
|
161
|
+
def get_deep_keys(d, current_path=""):
|
|
162
|
+
"""
|
|
163
|
+
Recursively collects all combined keys (paths) from a deep dictionary.
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
d (dict): The dictionary to traverse.
|
|
167
|
+
current_path (str): The current path of keys, used for recursion.
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
list: A list of strings, where each string represents a combined key path
|
|
171
|
+
(e.g., "key1.subkey1.nestedkey").
|
|
172
|
+
"""
|
|
173
|
+
keys = []
|
|
174
|
+
for k, v in d.items():
|
|
175
|
+
new_path = f"{current_path}.{k}" if current_path else str(k)
|
|
176
|
+
if isinstance(v, dict):
|
|
177
|
+
keys.extend(get_deep_keys(v, new_path))
|
|
178
|
+
else:
|
|
179
|
+
keys.append(new_path)
|
|
180
|
+
return keys
|
|
181
|
+
|
|
182
|
+
def display_help(replace_arg = False):
|
|
183
|
+
if not LogConfig.is_display_help:
|
|
184
|
+
return
|
|
185
|
+
|
|
186
|
+
args = copy.copy(sys.argv)
|
|
187
|
+
if replace_arg:
|
|
188
|
+
args[len(args) - 1] = '--help'
|
|
189
|
+
else:
|
|
190
|
+
args.extend(['--help'])
|
|
191
|
+
subprocess.run(args)
|
|
192
|
+
|
|
193
|
+
def random_alphanumeric(length):
|
|
194
|
+
characters = string.ascii_letters + string.digits
|
|
195
|
+
random_string = ''.join(random.choice(characters) for _ in range(length))
|
|
196
|
+
|
|
197
|
+
return random_string.lower()
|
|
198
|
+
|
|
199
|
+
def json_to_csv(json_data: list[dict[any, any]], delimiter: str = ','):
|
|
200
|
+
def flatten_json(y):
|
|
201
|
+
out = {}
|
|
202
|
+
def flatten(x, name=''):
|
|
203
|
+
if type(x) is dict:
|
|
204
|
+
for a in x:
|
|
205
|
+
flatten(x[a], name + a + '_')
|
|
206
|
+
elif type(x) is list:
|
|
207
|
+
i = 0
|
|
208
|
+
for a in x:
|
|
209
|
+
flatten(a, name + str(i) + '_')
|
|
210
|
+
i += 1
|
|
211
|
+
else:
|
|
212
|
+
out[name[:-1]] = x
|
|
213
|
+
flatten(y)
|
|
214
|
+
return out
|
|
215
|
+
|
|
216
|
+
if isinstance(json_data, dict):
|
|
217
|
+
json_data = [json_data]
|
|
218
|
+
|
|
219
|
+
flattened_data = [flatten_json(record) for record in json_data]
|
|
220
|
+
if flattened_data:
|
|
221
|
+
keys = flattened_data[0].keys()
|
|
222
|
+
header = io.StringIO()
|
|
223
|
+
with redirect_stdout(header) as f:
|
|
224
|
+
dict_writer = csv.DictWriter(f, keys, delimiter=delimiter)
|
|
225
|
+
dict_writer.writeheader()
|
|
226
|
+
body = io.StringIO()
|
|
227
|
+
with redirect_stdout(body) as f:
|
|
228
|
+
dict_writer = csv.DictWriter(f, keys, delimiter=delimiter)
|
|
229
|
+
dict_writer.writerows(flattened_data)
|
|
230
|
+
|
|
231
|
+
return header.getvalue().strip('\r\n'), [l.strip('\r') for l in body.getvalue().split('\n')]
|
|
232
|
+
else:
|
|
233
|
+
return None
|
|
234
|
+
|
|
235
|
+
def log_to_file(config: dict[any, any]):
|
|
236
|
+
try:
|
|
237
|
+
base = f"/kaqing/logs"
|
|
238
|
+
os.makedirs(base, exist_ok=True)
|
|
239
|
+
|
|
240
|
+
now = datetime.now()
|
|
241
|
+
timestamp_str = now.strftime("%Y%m%d-%H%M%S")
|
|
242
|
+
filename = f"{base}/login.{timestamp_str}.txt"
|
|
243
|
+
with open(filename, 'w') as f:
|
|
244
|
+
if isinstance(config, dict):
|
|
245
|
+
try:
|
|
246
|
+
json.dump(config, f, indent=4)
|
|
247
|
+
except:
|
|
248
|
+
f.write(config)
|
|
249
|
+
else:
|
|
250
|
+
f.write(config)
|
|
251
|
+
except:
|
|
252
|
+
pass
|
|
253
|
+
|
|
254
|
+
def copy_config_file(rel_path: str, module: str, suffix: str = '.yaml', show_out = True):
|
|
255
|
+
dir = f'{Path.home()}/.kaqing'
|
|
256
|
+
path = f'{dir}/{rel_path}'
|
|
257
|
+
if not os.path.exists(path):
|
|
258
|
+
os.makedirs(dir, exist_ok=True)
|
|
259
|
+
module = importlib.import_module(module)
|
|
260
|
+
with open(path, 'w') as f:
|
|
261
|
+
yaml.dump(module.config(), f, default_flow_style=False)
|
|
262
|
+
if show_out and not idp_token_from_env():
|
|
263
|
+
log2(f'Default {os.path.basename(path).split(suffix)[0] + suffix} has been written to {path}.')
|
|
264
|
+
|
|
265
|
+
return path
|
|
266
|
+
|
|
267
|
+
def idp_token_from_env():
|
|
268
|
+
return os.getenv('IDP_TOKEN')
|
|
269
|
+
|
|
270
|
+
def is_lambda(func):
|
|
271
|
+
return callable(func) and hasattr(func, '__name__') and func.__name__ == '<lambda>'
|
|
272
|
+
|
|
273
|
+
def debug(s = None):
|
|
274
|
+
if LogConfig.is_debug():
|
|
275
|
+
log2(f'DEBUG {s}')
|
|
276
|
+
|
|
277
|
+
class Ing:
|
|
278
|
+
def __init__(self, msg: str, suppress_log=False):
|
|
279
|
+
self.msg = msg
|
|
280
|
+
self.suppress_log = suppress_log
|
|
281
|
+
|
|
282
|
+
def __enter__(self):
|
|
283
|
+
if not hasattr(log_state, 'ing_cnt'):
|
|
284
|
+
log_state.ing_cnt = 0
|
|
285
|
+
|
|
286
|
+
try:
|
|
287
|
+
if not log_state.ing_cnt:
|
|
288
|
+
if not self.suppress_log and not LogConfig.is_debug():
|
|
289
|
+
log2(f'{self.msg}...', nl=False)
|
|
290
|
+
|
|
291
|
+
return None
|
|
292
|
+
finally:
|
|
293
|
+
log_state.ing_cnt += 1
|
|
294
|
+
|
|
295
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
296
|
+
log_state.ing_cnt -= 1
|
|
297
|
+
if not log_state.ing_cnt:
|
|
298
|
+
if not self.suppress_log and not LogConfig.is_debug():
|
|
299
|
+
log2(' OK')
|
|
300
|
+
|
|
301
|
+
return False
|
|
302
|
+
|
|
303
|
+
def ing(msg: str, body: Callable[[], None]=None, suppress_log=False):
|
|
304
|
+
if not body:
|
|
305
|
+
return Ing(msg, suppress_log=suppress_log)
|
|
306
|
+
|
|
307
|
+
r = None
|
|
308
|
+
|
|
309
|
+
t = Ing(msg, suppress_log=suppress_log)
|
|
310
|
+
t.__enter__()
|
|
311
|
+
try:
|
|
312
|
+
r = body()
|
|
313
|
+
finally:
|
|
314
|
+
t.__exit__(None, None, None)
|
|
315
|
+
|
|
316
|
+
return r
|
|
317
|
+
|
|
318
|
+
def loggable():
|
|
319
|
+
return LogConfig.is_debug() or not hasattr(log_state, 'ing_cnt') or not log_state.ing_cnt
|
|
320
|
+
|
|
321
|
+
class TimingNode:
|
|
322
|
+
def __init__(self, depth: int, s0: time.time = time.time(), line: str = None):
|
|
323
|
+
self.depth = depth
|
|
324
|
+
self.s0 = s0
|
|
325
|
+
self.line = line
|
|
326
|
+
self.children = []
|
|
327
|
+
|
|
328
|
+
def __str__(self):
|
|
329
|
+
return f'[{self.depth}: {self.line}, children={len(self.children)}]'
|
|
330
|
+
|
|
331
|
+
def tree(self):
|
|
332
|
+
lines = []
|
|
333
|
+
if self.line:
|
|
334
|
+
lines.append(self.line)
|
|
335
|
+
|
|
336
|
+
for child in self.children:
|
|
337
|
+
if child.line:
|
|
338
|
+
lines.append(child.tree())
|
|
339
|
+
return '\n'.join(lines)
|
|
340
|
+
|
|
341
|
+
class LogTiming:
|
|
342
|
+
def __init__(self, msg: str, s0: time.time = None):
|
|
343
|
+
self.msg = msg
|
|
344
|
+
self.s0 = s0
|
|
345
|
+
|
|
346
|
+
def __enter__(self):
|
|
347
|
+
if not LogConfig.is_debug_timing():
|
|
348
|
+
return
|
|
349
|
+
|
|
350
|
+
if not hasattr(log_state, 'timings'):
|
|
351
|
+
log_state.timings = TimingNode(0)
|
|
352
|
+
|
|
353
|
+
self.me = log_state.timings
|
|
354
|
+
log_state.timings = TimingNode(self.me.depth+1)
|
|
355
|
+
if not self.s0:
|
|
356
|
+
self.s0 = time.time()
|
|
357
|
+
|
|
358
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
359
|
+
if not LogConfig.is_debug_timing():
|
|
360
|
+
return False
|
|
361
|
+
|
|
362
|
+
child = log_state.timings
|
|
363
|
+
log_state.timings.line = timing_log_line(self.me.depth, self.msg, self.s0)
|
|
364
|
+
|
|
365
|
+
if child and child.line:
|
|
366
|
+
self.me.children.append(child)
|
|
367
|
+
log_state.timings = self.me
|
|
368
|
+
|
|
369
|
+
if not self.me.depth:
|
|
370
|
+
log2(self.me.tree())
|
|
371
|
+
log_state.timings = TimingNode(0)
|
|
372
|
+
|
|
373
|
+
return False
|
|
374
|
+
|
|
375
|
+
def log_timing(msg: str, body: Callable[[], None]=None, s0: time.time = None):
|
|
376
|
+
if not s0 and not body:
|
|
377
|
+
return LogTiming(msg, s0=s0)
|
|
378
|
+
|
|
379
|
+
if not LogConfig.is_debug_timing():
|
|
380
|
+
if body:
|
|
381
|
+
return body()
|
|
382
|
+
|
|
383
|
+
return
|
|
384
|
+
|
|
385
|
+
r = None
|
|
386
|
+
|
|
387
|
+
t = LogTiming(msg, s0=s0)
|
|
388
|
+
t.__enter__()
|
|
389
|
+
try:
|
|
390
|
+
if body:
|
|
391
|
+
r = body()
|
|
392
|
+
finally:
|
|
393
|
+
t.__exit__(None, None, None)
|
|
394
|
+
|
|
395
|
+
return r
|
|
396
|
+
|
|
397
|
+
def timing_log_line(depth: int, msg: str, s0: time.time):
|
|
398
|
+
elapsed = time.time() - s0
|
|
399
|
+
prefix = '[timings] '
|
|
400
|
+
if depth:
|
|
401
|
+
if elapsed > 0.01:
|
|
402
|
+
prefix = (' ' * (depth-1)) + '* '
|
|
403
|
+
else:
|
|
404
|
+
prefix = ' ' * depth
|
|
405
|
+
|
|
406
|
+
return f'{prefix}{msg}: {elapsed:.2f} sec'
|
|
407
|
+
|
|
408
|
+
class WaitLog:
|
|
409
|
+
wait_log_flag = False
|
|
410
|
+
|
|
411
|
+
def wait_log(msg: str):
|
|
412
|
+
if not WaitLog.wait_log_flag:
|
|
413
|
+
log2(msg)
|
|
414
|
+
WaitLog.wait_log_flag = True
|
|
415
|
+
|
|
416
|
+
def clear_wait_log_flag():
|
|
417
|
+
WaitLog.wait_log_flag = False
|
|
418
|
+
|
|
419
|
+
T = TypeVar('T')
|
|
420
|
+
|
|
421
|
+
class ParallelService:
|
|
422
|
+
def __init__(self, handler: 'ParallelMapHandler'):
|
|
423
|
+
self.handler = handler
|
|
424
|
+
|
|
425
|
+
def map(self, fn: Callable[..., T]) -> Iterator[T]:
|
|
426
|
+
executor = self.handler.executor
|
|
427
|
+
collection = self.handler.collection
|
|
428
|
+
collect = self.handler.collect
|
|
429
|
+
samples_cnt = self.handler.samples
|
|
430
|
+
|
|
431
|
+
iterator = None
|
|
432
|
+
if executor:
|
|
433
|
+
iterator = executor.map(fn, collection)
|
|
434
|
+
elif samples_cnt < sys.maxsize:
|
|
435
|
+
samples = []
|
|
436
|
+
|
|
437
|
+
for elem in collection:
|
|
438
|
+
if not samples_cnt:
|
|
439
|
+
break
|
|
440
|
+
|
|
441
|
+
samples.append(fn(elem))
|
|
442
|
+
samples_cnt -= 1
|
|
443
|
+
|
|
444
|
+
iterator = iter(samples)
|
|
445
|
+
else:
|
|
446
|
+
iterator = map(fn, collection)
|
|
447
|
+
|
|
448
|
+
if collect:
|
|
449
|
+
return list(iterator)
|
|
450
|
+
else:
|
|
451
|
+
return iterator
|
|
452
|
+
|
|
453
|
+
class ParallelMapHandler:
|
|
454
|
+
def __init__(self, collection: list, max_workers: int, samples: int = sys.maxsize, msg: str = None, collect = True):
|
|
455
|
+
self.collection = collection
|
|
456
|
+
self.max_workers = max_workers
|
|
457
|
+
self.executor = None
|
|
458
|
+
self.samples = samples
|
|
459
|
+
self.msg = msg
|
|
460
|
+
if msg and msg.startswith('d`'):
|
|
461
|
+
if LogConfig.is_debug():
|
|
462
|
+
self.msg = msg.replace('d`', '', 1)
|
|
463
|
+
else:
|
|
464
|
+
self.msg = None
|
|
465
|
+
self.collect = collect
|
|
466
|
+
|
|
467
|
+
self.begin = []
|
|
468
|
+
self.end = []
|
|
469
|
+
self.start_time = None
|
|
470
|
+
|
|
471
|
+
def __enter__(self):
|
|
472
|
+
self.calc_msgs()
|
|
473
|
+
|
|
474
|
+
if self.max_workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
|
|
475
|
+
self.start_time = time.time()
|
|
476
|
+
|
|
477
|
+
self.executor = ThreadPoolExecutor(max_workers=self.max_workers)
|
|
478
|
+
self.executor.__enter__()
|
|
479
|
+
|
|
480
|
+
return ParallelService(self)
|
|
481
|
+
|
|
482
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
483
|
+
if self.executor:
|
|
484
|
+
self.executor.__exit__(exc_type, exc_val, exc_tb)
|
|
485
|
+
|
|
486
|
+
if self.end:
|
|
487
|
+
log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
|
|
488
|
+
|
|
489
|
+
return False
|
|
490
|
+
|
|
491
|
+
def size(self):
|
|
492
|
+
if not self.collection:
|
|
493
|
+
return 0
|
|
494
|
+
|
|
495
|
+
return len(self.collection)
|
|
496
|
+
|
|
497
|
+
def calc_msgs(self):
|
|
498
|
+
if not self.msg:
|
|
499
|
+
return
|
|
500
|
+
|
|
501
|
+
size = self.size()
|
|
502
|
+
# return
|
|
503
|
+
|
|
504
|
+
offloaded = False
|
|
505
|
+
serially = False
|
|
506
|
+
sampling = False
|
|
507
|
+
if size == 0:
|
|
508
|
+
offloaded = True
|
|
509
|
+
self.msg = self.msg.replace('{size}', '1')
|
|
510
|
+
elif self.max_workers > 1 and size > 1 and self.samples == sys.maxsize:
|
|
511
|
+
self.msg = self.msg.replace('{size}', f'{size}')
|
|
512
|
+
elif self.samples < sys.maxsize:
|
|
513
|
+
sampling = True
|
|
514
|
+
if self.samples > size:
|
|
515
|
+
self.samples = size
|
|
516
|
+
self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
|
|
517
|
+
else:
|
|
518
|
+
serially = True
|
|
519
|
+
self.msg = self.msg.replace('{size}', f'{size}')
|
|
520
|
+
# return
|
|
521
|
+
|
|
522
|
+
for token in self.msg.split(' '):
|
|
523
|
+
if '|' in token:
|
|
524
|
+
self.begin.append(token.split('|')[0])
|
|
525
|
+
if not sampling and not serially and not offloaded:
|
|
526
|
+
self.end.append(token.split('|')[1])
|
|
527
|
+
else:
|
|
528
|
+
self.begin.append(token)
|
|
529
|
+
if not sampling and not serially and not offloaded:
|
|
530
|
+
self.end.append(token)
|
|
531
|
+
|
|
532
|
+
if offloaded:
|
|
533
|
+
log2(f'{" ".join(self.begin)} offloaded...')
|
|
534
|
+
elif sampling or serially:
|
|
535
|
+
log2(f'{" ".join(self.begin)} serially...')
|
|
536
|
+
else:
|
|
537
|
+
log2(f'{" ".join(self.begin)} with {self.max_workers} workers...')
|
|
538
|
+
|
|
539
|
+
class OffloadService:
|
|
540
|
+
def __init__(self, handler: 'OffloadHandler'):
|
|
541
|
+
self.handler = handler
|
|
542
|
+
|
|
543
|
+
def submit(self, fn: Callable[..., T], /, *args, **kwargs) -> Future[T]:
|
|
544
|
+
executor = self.handler.executor
|
|
545
|
+
|
|
546
|
+
if executor:
|
|
547
|
+
return executor.submit(fn, *args, **kwargs)
|
|
548
|
+
else:
|
|
549
|
+
future = Future()
|
|
550
|
+
|
|
551
|
+
future.set_result(fn(*args, **kwargs))
|
|
552
|
+
|
|
553
|
+
return future
|
|
554
|
+
|
|
555
|
+
class OffloadHandler(ParallelMapHandler):
|
|
556
|
+
def __init__(self, max_workers: int, msg: str = None):
|
|
557
|
+
super().__init__(None, max_workers, msg=msg, collect=False )
|
|
558
|
+
|
|
559
|
+
def __enter__(self):
|
|
560
|
+
self.calc_msgs()
|
|
561
|
+
|
|
562
|
+
if self.max_workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
|
|
563
|
+
self.start_time = time.time()
|
|
564
|
+
|
|
565
|
+
self.executor = ThreadPoolExecutor(max_workers=self.max_workers)
|
|
566
|
+
self.executor.__enter__()
|
|
567
|
+
|
|
568
|
+
return OffloadService(self)
|
|
569
|
+
|
|
570
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
571
|
+
if self.executor:
|
|
572
|
+
self.executor.__exit__(exc_type, exc_val, exc_tb)
|
|
573
|
+
|
|
574
|
+
if self.end:
|
|
575
|
+
log2(f'{" ".join(self.end)} in {elapsed_time(self.start_time)}.')
|
|
576
|
+
|
|
577
|
+
return False
|
|
578
|
+
|
|
579
|
+
def size(self):
|
|
580
|
+
if not self.collection:
|
|
581
|
+
return 0
|
|
582
|
+
|
|
583
|
+
return len(self.collection)
|
|
584
|
+
|
|
585
|
+
def calc_msgs(self):
|
|
586
|
+
if not self.msg:
|
|
587
|
+
return
|
|
588
|
+
|
|
589
|
+
size = self.size()
|
|
590
|
+
# return
|
|
591
|
+
|
|
592
|
+
offloaded = False
|
|
593
|
+
serially = False
|
|
594
|
+
sampling = False
|
|
595
|
+
if size == 0:
|
|
596
|
+
offloaded = True
|
|
597
|
+
self.msg = self.msg.replace('{size}', '1')
|
|
598
|
+
elif self.max_workers > 1 and size > 1 and self.samples == sys.maxsize:
|
|
599
|
+
self.msg = self.msg.replace('{size}', f'{size}')
|
|
600
|
+
elif self.samples < sys.maxsize:
|
|
601
|
+
sampling = True
|
|
602
|
+
if self.samples > size:
|
|
603
|
+
self.samples = size
|
|
604
|
+
self.msg = self.msg.replace('{size}', f'{self.samples}/{size} sample')
|
|
605
|
+
else:
|
|
606
|
+
serially = True
|
|
607
|
+
self.msg = self.msg.replace('{size}', f'{size}')
|
|
608
|
+
# return
|
|
609
|
+
|
|
610
|
+
for token in self.msg.split(' '):
|
|
611
|
+
if '|' in token:
|
|
612
|
+
self.begin.append(token.split('|')[0])
|
|
613
|
+
if not sampling and not serially and not offloaded:
|
|
614
|
+
self.end.append(token.split('|')[1])
|
|
615
|
+
else:
|
|
616
|
+
self.begin.append(token)
|
|
617
|
+
if not sampling and not serially and not offloaded:
|
|
618
|
+
self.end.append(token)
|
|
619
|
+
|
|
620
|
+
if offloaded:
|
|
621
|
+
log2(f'{" ".join(self.begin)} offloaded...')
|
|
622
|
+
elif sampling or serially:
|
|
623
|
+
log2(f'{" ".join(self.begin)} serially...')
|
|
624
|
+
else:
|
|
625
|
+
log2(f'{" ".join(self.begin)} with {self.max_workers} workers...')
|
|
626
|
+
|
|
627
|
+
# class ParallelMapHandler(ParallelHandler):
|
|
628
|
+
# def __enter__(self):
|
|
629
|
+
# self.calc_msgs()
|
|
630
|
+
|
|
631
|
+
# if self.max_workers > 1 and (not self.size() or self.size()) and self.samples == sys.maxsize:
|
|
632
|
+
# self.start_time = time.time()
|
|
633
|
+
|
|
634
|
+
# self.executor = ThreadPoolExecutor(max_workers=self.max_workers)
|
|
635
|
+
# self.executor.__enter__()
|
|
636
|
+
|
|
637
|
+
# if self.collection:
|
|
638
|
+
# return self.map
|
|
639
|
+
# else:
|
|
640
|
+
# return self.submit
|
|
641
|
+
|
|
642
|
+
# def map(self, fn: Callable[..., T]) -> Iterator[T]:
|
|
643
|
+
# iterator = None
|
|
644
|
+
# if self.executor:
|
|
645
|
+
# iterator = self.executor.map(fn, self.collection)
|
|
646
|
+
# elif self.samples < sys.maxsize:
|
|
647
|
+
# samples = []
|
|
648
|
+
|
|
649
|
+
# for elem in self.collection:
|
|
650
|
+
# if not self.samples:
|
|
651
|
+
# break
|
|
652
|
+
|
|
653
|
+
# samples.append(fn(elem))
|
|
654
|
+
# self.samples -= 1
|
|
655
|
+
|
|
656
|
+
# iterator = iter(samples)
|
|
657
|
+
# else:
|
|
658
|
+
# iterator = map(fn, self.collection)
|
|
659
|
+
|
|
660
|
+
# if self.collect:
|
|
661
|
+
# return list(iterator)
|
|
662
|
+
# else:
|
|
663
|
+
# return iterator
|
|
664
|
+
|
|
665
|
+
# def submit(self, fn: Callable[..., T], /, *args, **kwargs) -> Future[T]:
|
|
666
|
+
# if self.executor:
|
|
667
|
+
# return self.executor.submit(fn, *args, **kwargs)
|
|
668
|
+
# else:
|
|
669
|
+
# future = Future()
|
|
670
|
+
|
|
671
|
+
# future.set_result(fn(*args, **kwargs))
|
|
672
|
+
|
|
673
|
+
# return future
|
|
674
|
+
|
|
675
|
+
def parallelize(collection: list, max_workers: int = 0, samples = sys.maxsize, msg: str = None, collect = True):
|
|
676
|
+
return ParallelMapHandler(collection, max_workers, samples = samples, msg = msg, collect = collect)
|
|
677
|
+
|
|
678
|
+
def offload(max_workers: int = 3, msg: str = None):
|
|
679
|
+
return OffloadHandler(max_workers, msg = msg)
|