hcs-core 0.1.250__py3-none-any.whl → 0.1.316__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hcs_core/__init__.py +1 -0
- hcs_core/ctxp/__init__.py +12 -4
- hcs_core/ctxp/_init.py +94 -22
- hcs_core/ctxp/built_in_cmds/_ut.py +4 -3
- hcs_core/ctxp/built_in_cmds/context.py +16 -1
- hcs_core/ctxp/built_in_cmds/profile.py +30 -11
- hcs_core/ctxp/cli_options.py +34 -13
- hcs_core/ctxp/cli_processor.py +33 -20
- hcs_core/ctxp/cmd_util.py +87 -0
- hcs_core/ctxp/config.py +1 -1
- hcs_core/ctxp/context.py +82 -3
- hcs_core/ctxp/data_util.py +56 -20
- hcs_core/ctxp/dispatcher.py +82 -0
- hcs_core/ctxp/duration.py +65 -0
- hcs_core/ctxp/extension.py +7 -6
- hcs_core/ctxp/fn_util.py +57 -0
- hcs_core/ctxp/fstore.py +39 -22
- hcs_core/ctxp/jsondot.py +259 -78
- hcs_core/ctxp/logger.py +7 -6
- hcs_core/ctxp/profile.py +53 -21
- hcs_core/ctxp/profile_store.py +1 -0
- hcs_core/ctxp/recent.py +3 -3
- hcs_core/ctxp/state.py +4 -3
- hcs_core/ctxp/task_schd.py +168 -0
- hcs_core/ctxp/telemetry.py +145 -0
- hcs_core/ctxp/template_util.py +21 -0
- hcs_core/ctxp/timeutil.py +11 -0
- hcs_core/ctxp/util.py +194 -33
- hcs_core/ctxp/var_template.py +3 -4
- hcs_core/plan/__init__.py +11 -5
- hcs_core/plan/base_provider.py +1 -0
- hcs_core/plan/core.py +29 -26
- hcs_core/plan/dag.py +15 -12
- hcs_core/plan/helper.py +4 -2
- hcs_core/plan/kop.py +21 -8
- hcs_core/plan/provider/dev/dummy.py +3 -3
- hcs_core/sglib/auth.py +137 -95
- hcs_core/sglib/cli_options.py +20 -5
- hcs_core/sglib/client_util.py +230 -62
- hcs_core/sglib/csp.py +73 -6
- hcs_core/sglib/ez_client.py +139 -41
- hcs_core/sglib/hcs_client.py +3 -9
- hcs_core/sglib/init.py +17 -0
- hcs_core/sglib/login_support.py +22 -83
- hcs_core/sglib/payload_util.py +3 -1
- hcs_core/sglib/requtil.py +38 -0
- hcs_core/sglib/utils.py +107 -0
- hcs_core/util/check_license.py +0 -2
- hcs_core/util/duration.py +6 -3
- hcs_core/util/job_view.py +35 -15
- hcs_core/util/pki_util.py +48 -1
- hcs_core/util/query_util.py +54 -8
- hcs_core/util/scheduler.py +3 -3
- hcs_core/util/ssl_util.py +1 -1
- hcs_core/util/versions.py +15 -12
- hcs_core-0.1.316.dist-info/METADATA +54 -0
- hcs_core-0.1.316.dist-info/RECORD +69 -0
- {hcs_core-0.1.250.dist-info → hcs_core-0.1.316.dist-info}/WHEEL +1 -2
- hcs_core-0.1.250.dist-info/METADATA +0 -36
- hcs_core-0.1.250.dist-info/RECORD +0 -59
- hcs_core-0.1.250.dist-info/top_level.txt +0 -1
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import threading
|
|
3
|
+
from copy import deepcopy
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from time import sleep, time
|
|
6
|
+
|
|
7
|
+
import schedule
|
|
8
|
+
|
|
9
|
+
from . import duration
|
|
10
|
+
|
|
11
|
+
log = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TaskRef:
|
|
15
|
+
def __init__(self, ref1, ref2=None):
|
|
16
|
+
self.ref1 = ref1
|
|
17
|
+
self.ref2 = ref2
|
|
18
|
+
|
|
19
|
+
def cancel(self):
|
|
20
|
+
if self.ref1:
|
|
21
|
+
fn = list(self.ref1.job_func.args)[0]
|
|
22
|
+
log.debug("Cancel job %s: %s" % (fn.__name__, self.ref1))
|
|
23
|
+
schedule.cancel_job(self.ref1)
|
|
24
|
+
self.ref1 = None
|
|
25
|
+
if self.ref2:
|
|
26
|
+
fn2 = list(self.ref2.job_func.args)[0]
|
|
27
|
+
log.debug("Cancel job %s: %s" % (fn2.__name__, self.ref2))
|
|
28
|
+
schedule.cancel_job(self.ref2)
|
|
29
|
+
self.ref2 = None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class JobStatistics:
|
|
34
|
+
last_start: int = 0
|
|
35
|
+
last_end: int = 0
|
|
36
|
+
history_total: int = 0
|
|
37
|
+
last_cycle_total: int = 0
|
|
38
|
+
|
|
39
|
+
def start(self):
|
|
40
|
+
self.last_start = int(time())
|
|
41
|
+
self.last_cycle_total += 1
|
|
42
|
+
self.history_total += 1
|
|
43
|
+
|
|
44
|
+
def end(self):
|
|
45
|
+
self.last_end = int(time())
|
|
46
|
+
|
|
47
|
+
def reset_cycle(self):
|
|
48
|
+
self.last_cycle_total = 0
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass
|
|
52
|
+
class Statistics:
|
|
53
|
+
run_once_job: JobStatistics = JobStatistics()
|
|
54
|
+
recurring_job: JobStatistics = JobStatistics()
|
|
55
|
+
size: int = 0
|
|
56
|
+
|
|
57
|
+
def reset_cycle(self):
|
|
58
|
+
self.run_once_job.reset_cycle()
|
|
59
|
+
self.recurring_job.reset_cycle()
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
_g_worker_thread: threading.Event = None
|
|
63
|
+
_g_flag_stop_daemon: threading.Thread = None
|
|
64
|
+
_g_flag_running: bool = True
|
|
65
|
+
_g_statistics = Statistics()
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _task_wrapper_repeat(fn_impl, kwargs):
|
|
69
|
+
_g_statistics.recurring_job.start()
|
|
70
|
+
fn_impl(**kwargs)
|
|
71
|
+
_g_statistics.recurring_job.end()
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _task_wrapper_once(fn_impl, kwargs):
|
|
75
|
+
_g_statistics.run_once_job.start()
|
|
76
|
+
fn_impl(**kwargs)
|
|
77
|
+
_g_statistics.run_once_job.end()
|
|
78
|
+
return schedule.CancelJob
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def submit(fn_task: callable, initial_delay: str = None, repeat_interval: str = None, **kwargs):
|
|
82
|
+
if initial_delay:
|
|
83
|
+
initial_delay_seconds = duration.to_seconds(initial_delay)
|
|
84
|
+
else:
|
|
85
|
+
initial_delay_seconds = 1
|
|
86
|
+
|
|
87
|
+
if repeat_interval:
|
|
88
|
+
# Have an initial run
|
|
89
|
+
# TODO: this is not correct, but better than no lib-provided initial delay.
|
|
90
|
+
job1 = schedule.every(initial_delay_seconds).seconds.do(_task_wrapper_once, fn_task, kwargs)
|
|
91
|
+
log.debug(f"Register initial run {fn_task.__name__} {job1}")
|
|
92
|
+
|
|
93
|
+
# TODO: identify a way to use the scheduler with initial delay, with cancellation in mind.
|
|
94
|
+
seconds = duration.to_seconds(repeat_interval)
|
|
95
|
+
job2 = schedule.every(seconds).seconds.do(_task_wrapper_repeat, fn_task, kwargs)
|
|
96
|
+
log.debug(f"Register scheduled job {fn_task.__name__} at interval {repeat_interval} {job2}")
|
|
97
|
+
else:
|
|
98
|
+
job1 = schedule.every(initial_delay_seconds).seconds.do(_task_wrapper_once, fn_task, kwargs)
|
|
99
|
+
log.debug(f"Register one-shot job {fn_task.__name__} {job1}")
|
|
100
|
+
job2 = None
|
|
101
|
+
return TaskRef(job1, job2)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def statistics(reset_cycle: bool = False):
|
|
105
|
+
_g_statistics.size = len(schedule.get_jobs())
|
|
106
|
+
ret = deepcopy(_g_statistics)
|
|
107
|
+
if reset_cycle:
|
|
108
|
+
_g_statistics.reset_cycle()
|
|
109
|
+
return ret
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _daemon_worker():
|
|
113
|
+
log.info("task scheduler daemon thread start")
|
|
114
|
+
while not _g_flag_stop_daemon.is_set():
|
|
115
|
+
if _g_flag_running:
|
|
116
|
+
schedule.run_pending()
|
|
117
|
+
sleep(1)
|
|
118
|
+
log.info("task scheduler daemon thread exit")
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def start_daemon(paused: bool = False):
|
|
122
|
+
global _g_flag_stop_daemon
|
|
123
|
+
global _g_worker_thread
|
|
124
|
+
if _g_worker_thread:
|
|
125
|
+
raise Exception("Already started")
|
|
126
|
+
|
|
127
|
+
if paused:
|
|
128
|
+
global _g_flag_running
|
|
129
|
+
_g_flag_running = False
|
|
130
|
+
|
|
131
|
+
_g_flag_stop_daemon = threading.Event()
|
|
132
|
+
_g_worker_thread = threading.Thread(target=_daemon_worker, daemon=True, name="task-schd")
|
|
133
|
+
_g_worker_thread.start()
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def pause():
|
|
137
|
+
global _g_flag_running
|
|
138
|
+
if _g_flag_running:
|
|
139
|
+
_g_flag_running = False
|
|
140
|
+
log.info("task scheduler daemon paused")
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def resume():
|
|
144
|
+
global _g_flag_running
|
|
145
|
+
if not _g_flag_running:
|
|
146
|
+
_g_flag_running = True
|
|
147
|
+
log.info("task scheduler daemon resumed")
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def stop_daemon():
|
|
151
|
+
global _g_worker_thread
|
|
152
|
+
if _g_worker_thread:
|
|
153
|
+
_g_flag_stop_daemon.set()
|
|
154
|
+
_g_worker_thread.join()
|
|
155
|
+
_g_worker_thread = None
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
if __name__ == "__main__":
|
|
159
|
+
|
|
160
|
+
def job1(a, b):
|
|
161
|
+
log.info(f"job1 {a} {b}")
|
|
162
|
+
|
|
163
|
+
# logutil.setup()
|
|
164
|
+
start_daemon()
|
|
165
|
+
j1 = submit(fn_task=job1, initial_delay="PT3S", repeat_interval="PT5S", a="aa", b="bb")
|
|
166
|
+
sleep(10)
|
|
167
|
+
stop_daemon()
|
|
168
|
+
log.info("exit")
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import sys
|
|
4
|
+
import time
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
import httpx
|
|
9
|
+
from yumako import env
|
|
10
|
+
|
|
11
|
+
log = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
_record = None
|
|
14
|
+
_enabled = None
|
|
15
|
+
_app_name = ""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def disable():
|
|
19
|
+
global _enabled
|
|
20
|
+
_enabled = False
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _is_disabled():
|
|
24
|
+
global _enabled
|
|
25
|
+
if _enabled is None:
|
|
26
|
+
_enabled = env.bool("HCS_CLI_TELEMETRY", True)
|
|
27
|
+
return not _enabled
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _get_version():
|
|
31
|
+
try:
|
|
32
|
+
from importlib.metadata import version
|
|
33
|
+
|
|
34
|
+
return version("hcs-cli")
|
|
35
|
+
except Exception as e:
|
|
36
|
+
log.debug(f"Failed to get hcs-cli version: {e}")
|
|
37
|
+
return "unknown"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _get_record():
|
|
41
|
+
global _record
|
|
42
|
+
if _record is None:
|
|
43
|
+
_record = {
|
|
44
|
+
"@timestamp": datetime.now(timezone.utc).isoformat(timespec="milliseconds"),
|
|
45
|
+
"app": _app_name,
|
|
46
|
+
"command": None,
|
|
47
|
+
"options": [],
|
|
48
|
+
"return": -1,
|
|
49
|
+
"error": None,
|
|
50
|
+
"time_ms": -1,
|
|
51
|
+
"version": _get_version(),
|
|
52
|
+
"env": {
|
|
53
|
+
"python_version": sys.version,
|
|
54
|
+
"platform": sys.platform,
|
|
55
|
+
"executable": sys.executable,
|
|
56
|
+
},
|
|
57
|
+
}
|
|
58
|
+
return _record
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def start(app_name: str = None):
|
|
62
|
+
if _is_disabled():
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
global _app_name
|
|
66
|
+
_app_name = app_name
|
|
67
|
+
_get_record()
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def update(cmd_path: str, params: dict):
|
|
71
|
+
if _is_disabled():
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
record = _get_record()
|
|
75
|
+
record["command"] = cmd_path
|
|
76
|
+
record["options"] = [k.replace("_", "-") for k, v in params.items() if v]
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def end(return_code: int = 0, error: Exception = None):
|
|
80
|
+
if _is_disabled():
|
|
81
|
+
return
|
|
82
|
+
|
|
83
|
+
record = _get_record()
|
|
84
|
+
if error:
|
|
85
|
+
if isinstance(error, click.exceptions.Exit):
|
|
86
|
+
return_code = error.exit_code
|
|
87
|
+
elif isinstance(error, SystemExit):
|
|
88
|
+
return_code = error.code
|
|
89
|
+
else:
|
|
90
|
+
record["error"] = str(error)
|
|
91
|
+
if return_code == 0:
|
|
92
|
+
return_code = 1
|
|
93
|
+
record["return"] = return_code
|
|
94
|
+
record["time_ms"] = int((time.time() - datetime.fromisoformat(record["@timestamp"]).timestamp()) * 1000)
|
|
95
|
+
|
|
96
|
+
_fix_missing_commands(record)
|
|
97
|
+
_injest(record)
|
|
98
|
+
return record
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _fix_missing_commands(record):
|
|
102
|
+
if record["command"]:
|
|
103
|
+
return
|
|
104
|
+
|
|
105
|
+
args = sys.argv[1:]
|
|
106
|
+
|
|
107
|
+
# this does not work for all cases, but only as best effort.
|
|
108
|
+
options_started = False
|
|
109
|
+
options = record["options"]
|
|
110
|
+
command = [_app_name]
|
|
111
|
+
for arg in args:
|
|
112
|
+
if arg.startswith("-"):
|
|
113
|
+
options_started = True
|
|
114
|
+
|
|
115
|
+
if options_started:
|
|
116
|
+
if arg.startswith("--"):
|
|
117
|
+
options.append(arg[2:])
|
|
118
|
+
elif arg.startswith("-"):
|
|
119
|
+
options.append(arg[1:])
|
|
120
|
+
else:
|
|
121
|
+
# value. For privacy no logging.
|
|
122
|
+
continue
|
|
123
|
+
else:
|
|
124
|
+
command.append(arg)
|
|
125
|
+
|
|
126
|
+
record["command"] = " ".join(command)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def _injest(doc):
|
|
130
|
+
|
|
131
|
+
# print('TELEMETRY end', json.dumps(doc, indent=4), flush=True)
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
response = httpx.post(
|
|
135
|
+
"https://collie.omnissa.com/es/hcs-cli/_doc",
|
|
136
|
+
auth=("append_user", "public"),
|
|
137
|
+
headers={"Content-Type": "application/json"},
|
|
138
|
+
content=json.dumps(doc),
|
|
139
|
+
timeout=4,
|
|
140
|
+
verify=False,
|
|
141
|
+
)
|
|
142
|
+
response.raise_for_status()
|
|
143
|
+
except Exception as e:
|
|
144
|
+
log.debug(f"Telemetry ingestion failed: {e}", exc_info=True)
|
|
145
|
+
return
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def replace(text: str, mapping: dict, strict_on_unresolved_vars: bool = True, strict_on_unused_var: bool = False):
|
|
5
|
+
unused_vars = set(mapping.keys())
|
|
6
|
+
for k, v in mapping.items():
|
|
7
|
+
new_text = text.replace("{{" + k + "}}", str(v))
|
|
8
|
+
if new_text != text:
|
|
9
|
+
text = new_text
|
|
10
|
+
unused_vars.remove(k)
|
|
11
|
+
|
|
12
|
+
if strict_on_unresolved_vars:
|
|
13
|
+
unresolved_vars = re.findall(r"\{\{([^}]+)\}\}", text)
|
|
14
|
+
if unresolved_vars:
|
|
15
|
+
raise Exception(f"Strict mode: template variables unresolved: {unresolved_vars}")
|
|
16
|
+
|
|
17
|
+
if strict_on_unused_var:
|
|
18
|
+
if unused_vars:
|
|
19
|
+
raise Exception(f"Strict mode: var specified but not in template: {unused_vars}")
|
|
20
|
+
|
|
21
|
+
return text
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from datetime import datetime, timezone
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def iso_date_to_timestamp(datetime_string: str) -> int:
|
|
5
|
+
dt_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
6
|
+
return int(dt_object.replace(tzinfo=timezone.utc).timestamp() * 1000)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def timestamp_to_iso_date(timestamp_ms: int) -> str:
|
|
10
|
+
dt = datetime.fromtimestamp(timestamp_ms / 1000, tz=timezone.utc)
|
|
11
|
+
return dt.isoformat(timespec="milliseconds").replace("+00:00", "Z")
|
hcs_core/ctxp/util.py
CHANGED
|
@@ -13,17 +13,21 @@ See the License for the specific language governing permissions and
|
|
|
13
13
|
limitations under the License.
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
|
+
import datetime
|
|
17
|
+
import json
|
|
16
18
|
import os
|
|
19
|
+
import re
|
|
17
20
|
import subprocess
|
|
18
|
-
import json
|
|
19
|
-
import yaml
|
|
20
|
-
import types
|
|
21
21
|
import sys
|
|
22
|
-
import httpx
|
|
23
22
|
import traceback
|
|
24
|
-
import
|
|
23
|
+
import types
|
|
25
24
|
from typing import Any, Callable
|
|
25
|
+
|
|
26
|
+
import click
|
|
27
|
+
import httpx
|
|
26
28
|
import questionary
|
|
29
|
+
import yaml
|
|
30
|
+
import yumako
|
|
27
31
|
|
|
28
32
|
|
|
29
33
|
class CtxpException(Exception):
|
|
@@ -55,7 +59,8 @@ def validate_error_return(reason, return_code):
|
|
|
55
59
|
def print_output(data: Any, args: dict, file=sys.stdout):
|
|
56
60
|
output = args.get("output", "json")
|
|
57
61
|
fields = args.get("field")
|
|
58
|
-
|
|
62
|
+
exclude_field = args.get("exclude_field")
|
|
63
|
+
ids = args.get("ids", False)
|
|
59
64
|
first = args.get("first", False)
|
|
60
65
|
|
|
61
66
|
if type(data) is str:
|
|
@@ -66,20 +71,25 @@ def print_output(data: Any, args: dict, file=sys.stdout):
|
|
|
66
71
|
try:
|
|
67
72
|
data = _convert_generator(data)
|
|
68
73
|
if first and isinstance(data, list):
|
|
74
|
+
if len(data) == 0:
|
|
75
|
+
return
|
|
69
76
|
data = data[0]
|
|
70
77
|
|
|
71
|
-
if
|
|
78
|
+
if ids:
|
|
72
79
|
if fields:
|
|
73
|
-
raise CtxpException("--
|
|
80
|
+
raise CtxpException("--ids and --fields should not be used together.")
|
|
74
81
|
data = _convert_to_id_only(data)
|
|
75
|
-
|
|
76
|
-
|
|
82
|
+
else:
|
|
83
|
+
if fields:
|
|
84
|
+
data = _filter_fields(data, fields)
|
|
85
|
+
if exclude_field:
|
|
86
|
+
data = _exclude_fields(data, exclude_field)
|
|
77
87
|
|
|
78
88
|
if output is None or output == "json":
|
|
79
89
|
text = json.dumps(data, default=vars, indent=4)
|
|
80
90
|
elif output == "json-compact":
|
|
81
91
|
text = json.dumps(data, default=vars)
|
|
82
|
-
elif output == "yaml":
|
|
92
|
+
elif output == "yaml" or output == "yml":
|
|
83
93
|
from . import jsondot
|
|
84
94
|
|
|
85
95
|
text = yaml.dump(jsondot.plain(data), sort_keys=False)
|
|
@@ -87,7 +97,18 @@ def print_output(data: Any, args: dict, file=sys.stdout):
|
|
|
87
97
|
if isinstance(data, list):
|
|
88
98
|
text = ""
|
|
89
99
|
for i in data:
|
|
90
|
-
|
|
100
|
+
t = type(i)
|
|
101
|
+
if t is str:
|
|
102
|
+
line = i
|
|
103
|
+
elif isinstance(i, dict):
|
|
104
|
+
if len(i) == 0:
|
|
105
|
+
continue
|
|
106
|
+
if len(i) == 1:
|
|
107
|
+
line = str(next(iter(i.values())))
|
|
108
|
+
else:
|
|
109
|
+
line = json.dumps(i)
|
|
110
|
+
else:
|
|
111
|
+
line = json.dumps(i)
|
|
91
112
|
text += line + "\n"
|
|
92
113
|
elif isinstance(data, dict):
|
|
93
114
|
text = json.dumps(data, indent=4)
|
|
@@ -95,7 +116,7 @@ def print_output(data: Any, args: dict, file=sys.stdout):
|
|
|
95
116
|
text = data
|
|
96
117
|
else:
|
|
97
118
|
text = json.dumps(data, indent=4)
|
|
98
|
-
elif output == "table":
|
|
119
|
+
elif output == "table" or output == "t":
|
|
99
120
|
formatter = args["format"]
|
|
100
121
|
text = formatter(data)
|
|
101
122
|
else:
|
|
@@ -108,7 +129,7 @@ def print_output(data: Any, args: dict, file=sys.stdout):
|
|
|
108
129
|
|
|
109
130
|
|
|
110
131
|
def print_error(error):
|
|
111
|
-
critical_errors = [KeyError, TypeError]
|
|
132
|
+
critical_errors = [KeyError, TypeError, AttributeError, ValueError, IndentationError, ImportError]
|
|
112
133
|
for ex in critical_errors:
|
|
113
134
|
if isinstance(error, ex):
|
|
114
135
|
traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
|
|
@@ -155,7 +176,27 @@ def _filter_fields(obj: Any, fields: str):
|
|
|
155
176
|
return _filter_obj(obj)
|
|
156
177
|
|
|
157
178
|
|
|
179
|
+
def _exclude_fields(obj: Any, fields_exclude: str):
|
|
180
|
+
parts = fields_exclude.split(",")
|
|
181
|
+
|
|
182
|
+
def _filter_obj(o):
|
|
183
|
+
if not isinstance(o, dict):
|
|
184
|
+
return o
|
|
185
|
+
for k in list(o.keys()):
|
|
186
|
+
if k in parts:
|
|
187
|
+
del o[k]
|
|
188
|
+
return o
|
|
189
|
+
|
|
190
|
+
if isinstance(obj, list):
|
|
191
|
+
return list(map(_filter_obj, obj))
|
|
192
|
+
return _filter_obj(obj)
|
|
193
|
+
|
|
194
|
+
|
|
158
195
|
def panic(reason: Any = None, code: int = 1):
|
|
196
|
+
if isinstance(reason, SystemExit):
|
|
197
|
+
os._exit(reason.code)
|
|
198
|
+
if isinstance(reason, click.exceptions.Exit):
|
|
199
|
+
os._exit(reason.exit_code)
|
|
159
200
|
if isinstance(reason, Exception):
|
|
160
201
|
text = error_details(reason)
|
|
161
202
|
else:
|
|
@@ -182,7 +223,11 @@ def choose(prompt: str, items: list, fn_get_text: Callable = None, selected=None
|
|
|
182
223
|
panic(prompt + " ERROR: no item available.")
|
|
183
224
|
|
|
184
225
|
if fn_get_text is None:
|
|
185
|
-
|
|
226
|
+
|
|
227
|
+
def _default_fn_get_text(t):
|
|
228
|
+
return str(t)
|
|
229
|
+
|
|
230
|
+
fn_get_text = _default_fn_get_text
|
|
186
231
|
|
|
187
232
|
if select_by_default and len(items) == 1:
|
|
188
233
|
ret = items[0]
|
|
@@ -221,31 +266,44 @@ def input_array(prompt: str, default: list[str] = None):
|
|
|
221
266
|
return ret
|
|
222
267
|
|
|
223
268
|
|
|
224
|
-
def error_details(
|
|
225
|
-
if isinstance(
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
details = e.__class__.__name__
|
|
230
|
-
msg = str(e)
|
|
231
|
-
if msg:
|
|
232
|
-
details += ": " + msg
|
|
233
|
-
cause = e.__cause__
|
|
234
|
-
if cause and cause != e:
|
|
235
|
-
details += " | Caused by: " + error_details(cause)
|
|
269
|
+
def error_details(ex):
|
|
270
|
+
if not isinstance(ex, Exception):
|
|
271
|
+
return str(ex)
|
|
272
|
+
|
|
273
|
+
collector = []
|
|
236
274
|
|
|
275
|
+
def _collect_details(e):
|
|
276
|
+
if isinstance(e, click.ClickException):
|
|
277
|
+
collector.append(str(e))
|
|
278
|
+
return
|
|
279
|
+
|
|
280
|
+
details = e.__class__.__name__
|
|
281
|
+
msg = str(e)
|
|
282
|
+
if msg:
|
|
283
|
+
details += ": " + msg
|
|
237
284
|
if isinstance(e, httpx.HTTPStatusError):
|
|
238
285
|
details += "\n" + e.response.text
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
286
|
+
collector.append(details)
|
|
287
|
+
|
|
288
|
+
cause = e.__cause__
|
|
289
|
+
if cause and cause != e:
|
|
290
|
+
_collect_details(cause)
|
|
291
|
+
|
|
292
|
+
_collect_details(ex)
|
|
293
|
+
|
|
294
|
+
# remove_consecutive_duplicates
|
|
295
|
+
result = [collector[0]]
|
|
296
|
+
for item in collector[1:]:
|
|
297
|
+
if item != result[-1]:
|
|
298
|
+
result.append(item)
|
|
299
|
+
return " | Caused by: ".join(result)
|
|
242
300
|
|
|
243
301
|
|
|
244
302
|
def avoid_trace_for_ctrl_c():
|
|
245
303
|
import sys
|
|
246
304
|
|
|
247
305
|
def my_except_hook(exctype, value, traceback):
|
|
248
|
-
if exctype
|
|
306
|
+
if exctype is KeyboardInterrupt:
|
|
249
307
|
print("Aborted (KeyboardInterrupt).", flush=True)
|
|
250
308
|
sys.exit(1)
|
|
251
309
|
else:
|
|
@@ -292,14 +350,117 @@ def flatten_dict(data, fields_mapping):
|
|
|
292
350
|
return flattened_data
|
|
293
351
|
|
|
294
352
|
|
|
295
|
-
def
|
|
353
|
+
def strip_ansi(text):
|
|
354
|
+
# Regular expression to match ANSI escape sequences
|
|
355
|
+
ansi_escape = re.compile(r"\x1b\[[0-9;]*m")
|
|
356
|
+
return ansi_escape.sub("", text)
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def format_table(data: list, fields_mapping: dict, columns_to_sum: list = None):
|
|
296
360
|
from tabulate import tabulate
|
|
297
361
|
|
|
298
362
|
flattened_data = flatten_dict(data, fields_mapping)
|
|
299
363
|
try:
|
|
300
364
|
headers = list(fields_mapping.values())
|
|
301
365
|
table = [[item.get(field) for field in headers] for item in flattened_data]
|
|
302
|
-
|
|
366
|
+
|
|
367
|
+
if columns_to_sum:
|
|
368
|
+
columns_to_sum_indices = {col: headers.index(col) for col in columns_to_sum if col in headers}
|
|
369
|
+
footer = [""] * len(headers)
|
|
370
|
+
footer[0] = "Total"
|
|
371
|
+
for col_name, col_index in columns_to_sum_indices.items():
|
|
372
|
+
total = 0
|
|
373
|
+
for row in table:
|
|
374
|
+
v = row[col_index]
|
|
375
|
+
if isinstance(v, str):
|
|
376
|
+
v = strip_ansi(v)
|
|
377
|
+
v = int(v)
|
|
378
|
+
elif isinstance(v, int) or isinstance(v, float):
|
|
379
|
+
pass
|
|
380
|
+
elif v is None:
|
|
381
|
+
continue
|
|
382
|
+
else:
|
|
383
|
+
raise Exception(f"Unexpected cell value type. Type={type(v)}, value={v}, col={col_name}")
|
|
384
|
+
total += v
|
|
385
|
+
footer[col_index] = total
|
|
386
|
+
separator = ["-" * len(header) for header in headers]
|
|
387
|
+
table += [separator, footer]
|
|
388
|
+
except Exception:
|
|
303
389
|
traceback.print_exc()
|
|
304
390
|
|
|
305
391
|
return tabulate(table, headers=headers) + "\n"
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def colorize(data: dict, name: str, mapping: dict):
|
|
395
|
+
if os.environ.get("TERM_COLOR") == "0":
|
|
396
|
+
return
|
|
397
|
+
|
|
398
|
+
s = data.get(name)
|
|
399
|
+
if not s:
|
|
400
|
+
return
|
|
401
|
+
|
|
402
|
+
if isinstance(mapping, dict):
|
|
403
|
+
c = mapping.get(s)
|
|
404
|
+
if c:
|
|
405
|
+
if isinstance(c, str):
|
|
406
|
+
data[name] = click.style(s, fg=c)
|
|
407
|
+
elif callable(c):
|
|
408
|
+
color = c(data)
|
|
409
|
+
data[name] = click.style(s, fg=color)
|
|
410
|
+
else:
|
|
411
|
+
raise Exception(f"Unexpected color type: {type(c)} {c}")
|
|
412
|
+
elif callable(mapping):
|
|
413
|
+
c = mapping(s)
|
|
414
|
+
if c:
|
|
415
|
+
data[name] = click.style(s, fg=c)
|
|
416
|
+
else:
|
|
417
|
+
raise Exception(f"Unexpected mapping type: {type(mapping)} {mapping}")
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
def default_table_formatter(data: Any, mapping: dict = None):
|
|
421
|
+
if not isinstance(data, list):
|
|
422
|
+
return data
|
|
423
|
+
|
|
424
|
+
def _restrict_readable_length(data: dict, name: str, length: int):
|
|
425
|
+
text = data.get(name)
|
|
426
|
+
if not text:
|
|
427
|
+
return
|
|
428
|
+
if len(text) > length:
|
|
429
|
+
data[name] = text[: length - 3] + "..."
|
|
430
|
+
|
|
431
|
+
field_mapping = {}
|
|
432
|
+
for d in data:
|
|
433
|
+
if "id" in d:
|
|
434
|
+
field_mapping["id"] = "Id"
|
|
435
|
+
if "name" in d:
|
|
436
|
+
field_mapping["name"] = "Name"
|
|
437
|
+
if "location" in d:
|
|
438
|
+
field_mapping["location"] = "Location"
|
|
439
|
+
if "type" in d:
|
|
440
|
+
field_mapping["type"] = "Type"
|
|
441
|
+
if "status" in d:
|
|
442
|
+
field_mapping["status"] = "Status"
|
|
443
|
+
if "createdAt" in d:
|
|
444
|
+
d["_createdStale"] = yumako.time.stale(d["createdAt"], datetime.timezone.utc)
|
|
445
|
+
field_mapping["_createdStale"] = "Created At"
|
|
446
|
+
if "updatedAt" in d:
|
|
447
|
+
d["_updatedStale"] = yumako.time.stale(d["updatedAt"], datetime.timezone.utc)
|
|
448
|
+
field_mapping["_updatedStale"] = "Updated At"
|
|
449
|
+
|
|
450
|
+
colorize(
|
|
451
|
+
d,
|
|
452
|
+
"status",
|
|
453
|
+
{
|
|
454
|
+
"READY": "green",
|
|
455
|
+
"SUCCESS": "green",
|
|
456
|
+
"ERROR": "red",
|
|
457
|
+
},
|
|
458
|
+
)
|
|
459
|
+
_restrict_readable_length(d, "name", 60)
|
|
460
|
+
if mapping:
|
|
461
|
+
for k, v in mapping.items():
|
|
462
|
+
if v is None:
|
|
463
|
+
field_mapping.pop(k, None)
|
|
464
|
+
else:
|
|
465
|
+
field_mapping[k] = v
|
|
466
|
+
return format_table(data, fields_mapping=field_mapping)
|
hcs_core/ctxp/var_template.py
CHANGED
|
@@ -13,13 +13,12 @@ See the License for the specific language governing permissions and
|
|
|
13
13
|
limitations under the License.
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
|
-
import re
|
|
17
16
|
import logging
|
|
17
|
+
import re
|
|
18
18
|
from typing import Any
|
|
19
|
-
|
|
19
|
+
|
|
20
|
+
from . import context, jsondot, profile
|
|
20
21
|
from .jsondot import dotdict
|
|
21
|
-
from . import profile
|
|
22
|
-
from . import context
|
|
23
22
|
|
|
24
23
|
log = logging.getLogger(__name__)
|
|
25
24
|
|
hcs_core/plan/__init__.py
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
|
-
from .
|
|
2
|
-
from .
|
|
3
|
-
from .
|
|
4
|
-
from . import
|
|
5
|
-
from .
|
|
1
|
+
from . import context as context
|
|
2
|
+
from .actions import actions as actions
|
|
3
|
+
from .core import apply as apply
|
|
4
|
+
from .core import clear as clear
|
|
5
|
+
from .core import destroy as destroy
|
|
6
|
+
from .core import get_deployment_data as get_deployment_data
|
|
7
|
+
from .core import graph as graph
|
|
8
|
+
from .core import resolve as resolve
|
|
9
|
+
from .helper import PlanException as PlanException
|
|
10
|
+
from .helper import PluginException as PluginException
|
|
11
|
+
from .kop import attach_job_view as attach_job_view
|