hcs-cli 0.1.318__py3-none-any.whl → 0.1.319__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hcs_cli/__init__.py +1 -1
- hcs_cli/cmds/advisor/html_utils.py +30 -26
- hcs_cli/cmds/advisor/recommendation_engine.py +7 -10
- hcs_cli/cmds/daas/tenant/plan.py +1 -1
- hcs_cli/cmds/debug/start.py +0 -1
- hcs_cli/cmds/dev/fs/helper/credential_helper.py +2 -0
- hcs_cli/cmds/dev/fs/helper/k8s_util.py +0 -1
- hcs_cli/cmds/dev/fs/init.py +38 -5
- hcs_cli/cmds/dev/fs/profiler.py +0 -1
- hcs_cli/cmds/dev/fs/provided_files/akka.plan.yml +94 -250
- hcs_cli/cmds/dev/fs/provided_files/azsim.plan.yml +27 -34
- hcs_cli/cmds/dev/fs/provided_files/azure.plan.yml +294 -322
- hcs_cli/cmds/dev/fs/provided_files/mqtt-secret.yaml +188 -93
- hcs_cli/cmds/dev/fs/provided_files/mqtt-server-external.yaml +4 -5
- hcs_cli/cmds/dev/fs/provided_files/patch-mqtt-hostname.yml +3 -3
- hcs_cli/cmds/dev/fs/provided_files/patch-vernemq-ssl-depth.json +1 -1
- hcs_cli/cmds/dev/fs/tailor.py +7 -12
- hcs_cli/cmds/dev/mqtt.py +1 -2
- hcs_cli/cmds/dev/util/mqtt_helper.py +0 -1
- hcs_cli/cmds/hoc/search.py +39 -9
- hcs_cli/cmds/hst/clean.py +2 -1
- hcs_cli/cmds/inventory/assign.py +1 -3
- hcs_cli/cmds/inventory/deassign.py +1 -1
- hcs_cli/cmds/scm/plan.py +131 -3
- hcs_cli/cmds/task.py +2 -4
- hcs_cli/cmds/template/list_usage.py +2 -2
- hcs_cli/cmds/template/usage.py +20 -7
- hcs_cli/cmds/vm/list.py +0 -1
- hcs_cli/config/hcs-deployments.yaml +52 -52
- hcs_cli/main.py +0 -2
- hcs_cli/payload/akka.blueprint.yml +95 -243
- hcs_cli/payload/app/manual.json +19 -19
- hcs_cli/payload/edge/akka.json +6 -6
- hcs_cli/payload/edge/vsphere.json +6 -6
- hcs_cli/payload/hoc/lcm-capcalc.json.template +43 -0
- hcs_cli/payload/hoc/no-spare.json.template +1 -1
- hcs_cli/payload/inventory/assign.json +14 -16
- hcs_cli/payload/inventory/deassign.json +11 -11
- hcs_cli/payload/lcm/akka.json +31 -33
- hcs_cli/payload/lcm/azure-dummy-nt.json +64 -66
- hcs_cli/payload/lcm/azure-dummy.json +64 -66
- hcs_cli/payload/lcm/azure-real.json +13 -11
- hcs_cli/payload/lcm/edge-proxy.json +34 -36
- hcs_cli/payload/lcm/zero-dedicated.json +34 -36
- hcs_cli/payload/lcm/zero-delay-1m-per-vm.json +53 -69
- hcs_cli/payload/lcm/zero-fail-delete-template.json +43 -0
- hcs_cli/payload/lcm/zero-fail-destroy-onthread.json +38 -40
- hcs_cli/payload/lcm/zero-fail-destroy.json +38 -40
- hcs_cli/payload/lcm/zero-fail-prepare-onthread.json +38 -40
- hcs_cli/payload/lcm/zero-fail-prepare.json +38 -40
- hcs_cli/payload/lcm/zero-fail-vm-onthread.json +58 -74
- hcs_cli/payload/lcm/zero-fail-vm.json +58 -74
- hcs_cli/payload/lcm/zero-floating.json +34 -36
- hcs_cli/payload/lcm/zero-manual.json +33 -35
- hcs_cli/payload/lcm/zero-multisession.json +34 -36
- hcs_cli/payload/lcm/zero-nanw.json +31 -33
- hcs_cli/payload/lcm/zero-new-5k-delay.json +69 -78
- hcs_cli/payload/lcm/zero-new-5k.json +36 -38
- hcs_cli/payload/lcm/zero-new-snapshot.json +37 -39
- hcs_cli/payload/lcm/zero-new.json +37 -39
- hcs_cli/payload/lcm/zero-reuse-vm-id.json +33 -35
- hcs_cli/payload/lcm/zero-with-max-id-offset.json +32 -34
- hcs_cli/payload/lcm/zero.json +59 -73
- hcs_cli/payload/provider/ad-stes-vsphere.json +26 -26
- hcs_cli/payload/provider/akka.json +12 -12
- hcs_cli/payload/provider/azure.json +14 -14
- hcs_cli/payload/provider/edgeproxy.json +12 -12
- hcs_cli/payload/provider/vsphere.json +14 -14
- hcs_cli/payload/scm/starter.json +22 -23
- hcs_cli/payload/synt/core/p01-dummy-success.json +11 -15
- hcs_cli/payload/synt/core/p02-dummy-fail.json +12 -15
- hcs_cli/payload/synt/core/p03-dummy-exception.json +12 -15
- hcs_cli/payload/synt/core/p04-dummy-success-repeat.json +12 -15
- hcs_cli/payload/synt/core/p05-dummy-fail-repeat.json +13 -16
- hcs_cli/payload/synt/core/p06-dummy-exception-repeat.json +13 -16
- hcs_cli/payload/synt/core/p07-dummy-delay.json +12 -15
- hcs_cli/payload/synt/core/p08-dummy-property.json +12 -15
- hcs_cli/payload/synt/ext/p20-connect-success.json +12 -15
- hcs_cli/payload/synt/ext/p21-connect-fail.json +12 -15
- hcs_cli/payload/synt/ext/p30-ssl-success.json +12 -15
- hcs_cli/payload/synt/ext/p31-ssl-fail.json +13 -16
- hcs_cli/payload/synt/ext/p40-http-success.json +12 -15
- hcs_cli/payload/synt/ext/p41-http-fail.json +12 -15
- hcs_cli/payload/synt/ext/p42-http-status-code.json +14 -20
- hcs_cli/payload/synt/ext1/p10-ping-success.json +13 -16
- hcs_cli/payload/synt/ext1/p11-ping-fail.json +12 -15
- hcs_cli/payload/synt/ext1/p12-ping-success-repeat.json +14 -17
- hcs_cli/provider/hcs/cert.py +0 -1
- hcs_cli/provider/hcs/edge.py +1 -1
- hcs_cli/provider/hcs/uag.py +1 -1
- hcs_cli/service/hoc/diagnostic.py +0 -3
- hcs_cli/service/lcm/vm.py +0 -1
- hcs_cli/service/task.py +0 -1
- hcs_cli/support/debug_util.py +0 -1
- hcs_cli/support/plan_util.py +0 -1
- hcs_cli/support/predefined_payload.py +4 -1
- hcs_cli/support/template_util.py +0 -1
- hcs_cli/support/test_utils.py +2 -2
- hcs_cli/support/test_utils2.py +536 -0
- {hcs_cli-0.1.318.dist-info → hcs_cli-0.1.319.dist-info}/METADATA +24 -17
- {hcs_cli-0.1.318.dist-info → hcs_cli-0.1.319.dist-info}/RECORD +103 -100
- {hcs_cli-0.1.318.dist-info → hcs_cli-0.1.319.dist-info}/WHEEL +0 -0
- {hcs_cli-0.1.318.dist-info → hcs_cli-0.1.319.dist-info}/entry_points.txt +0 -0
hcs_cli/cmds/scm/plan.py
CHANGED
|
@@ -16,11 +16,13 @@ limitations under the License.
|
|
|
16
16
|
import json
|
|
17
17
|
import re
|
|
18
18
|
import sys
|
|
19
|
+
from datetime import datetime, timedelta
|
|
19
20
|
from os import path
|
|
20
21
|
|
|
21
22
|
import click
|
|
22
23
|
import hcs_core.sglib.cli_options as cli
|
|
23
|
-
|
|
24
|
+
import yumako
|
|
25
|
+
from hcs_core.ctxp import data_util, recent, util
|
|
24
26
|
from hcs_core.sglib.client_util import wait_for_res_status
|
|
25
27
|
|
|
26
28
|
import hcs_cli.service.scm as scm
|
|
@@ -33,6 +35,98 @@ def plan():
|
|
|
33
35
|
pass
|
|
34
36
|
|
|
35
37
|
|
|
38
|
+
def _get_next_slot_name():
|
|
39
|
+
"""
|
|
40
|
+
Get the current and next half-hour aligned slot names and time delta based on current UTC time.
|
|
41
|
+
|
|
42
|
+
Slots are half-hour aligned (HH:00 or HH:30).
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
tuple: (current_slot_name, next_slot_name, timedelta_to_next_slot)
|
|
46
|
+
where slot names are in format 'weekday/HH:MM' (e.g., 'monday/18:00')
|
|
47
|
+
and timedelta_to_next_slot is a timedelta object
|
|
48
|
+
"""
|
|
49
|
+
current_time_utc = datetime.utcnow()
|
|
50
|
+
|
|
51
|
+
# Weekday names mapping (0=Monday, 6=Sunday in Python's weekday())
|
|
52
|
+
weekday_names = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
|
|
53
|
+
|
|
54
|
+
# Get current half-hour aligned slot (round down)
|
|
55
|
+
current_minute = current_time_utc.minute
|
|
56
|
+
if current_minute < 30:
|
|
57
|
+
# Current slot is this hour:00
|
|
58
|
+
current_slot_time = current_time_utc.replace(minute=0, second=0, microsecond=0)
|
|
59
|
+
else:
|
|
60
|
+
# Current slot is this hour:30
|
|
61
|
+
current_slot_time = current_time_utc.replace(minute=30, second=0, microsecond=0)
|
|
62
|
+
|
|
63
|
+
current_weekday_name = weekday_names[current_slot_time.weekday()]
|
|
64
|
+
current_time_str = current_slot_time.strftime("%H:%M")
|
|
65
|
+
current_slot_name = f"{current_weekday_name}/{current_time_str}"
|
|
66
|
+
|
|
67
|
+
# Get next half-hour aligned slot
|
|
68
|
+
if current_minute < 30:
|
|
69
|
+
# Next slot is this hour:30
|
|
70
|
+
next_time = current_time_utc.replace(minute=30, second=0, microsecond=0)
|
|
71
|
+
else:
|
|
72
|
+
# Next slot is next hour:00
|
|
73
|
+
next_time = current_time_utc + timedelta(hours=1)
|
|
74
|
+
next_time = next_time.replace(minute=0, second=0, microsecond=0)
|
|
75
|
+
|
|
76
|
+
next_weekday_name = weekday_names[next_time.weekday()]
|
|
77
|
+
next_time_str = next_time.strftime("%H:%M")
|
|
78
|
+
next_slot_name = f"{next_weekday_name}/{next_time_str}"
|
|
79
|
+
|
|
80
|
+
timedelta_to_next = next_time - current_time_utc
|
|
81
|
+
|
|
82
|
+
return current_slot_name, next_slot_name, timedelta_to_next
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _format_scm_plan_task_table(data):
|
|
86
|
+
fields_mapping = {
|
|
87
|
+
"location": "Location",
|
|
88
|
+
"_slot": "Slot",
|
|
89
|
+
"_timeCreatedStale": "Time Created",
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
current_slot_name, next_slot_name, timedelta_to_next = _get_next_slot_name()
|
|
93
|
+
time_str_to_next = yumako.time.display(timedelta_to_next.total_seconds())
|
|
94
|
+
|
|
95
|
+
for d in data:
|
|
96
|
+
d["_timeCreatedStale"] = yumako.time.stale(d["timeCreated"] / 1000)
|
|
97
|
+
meta = d["meta"]
|
|
98
|
+
input = d["input"]
|
|
99
|
+
my_slot_name = meta["scm_plan_day"] + "/" + meta["scm_plan_slot"]
|
|
100
|
+
d["_slot"] = my_slot_name
|
|
101
|
+
|
|
102
|
+
if d["worker"] == "com.vmware.horizon.sg.scm.task.CapacityOptimization":
|
|
103
|
+
d["_idealCapacity"] = input["idealCapacity"]
|
|
104
|
+
d["_forecastCapacity"] = input["forecastCapacity"]
|
|
105
|
+
d["_taskKey"] = d["key"]
|
|
106
|
+
|
|
107
|
+
if my_slot_name == next_slot_name:
|
|
108
|
+
d["_nextExecution"] = click.style(f"In {time_str_to_next}", fg="bright_blue")
|
|
109
|
+
else:
|
|
110
|
+
d["_nextExecution"] = ""
|
|
111
|
+
fields_mapping["_nextExecution"] = "Next"
|
|
112
|
+
|
|
113
|
+
if my_slot_name == current_slot_name:
|
|
114
|
+
# d['_lastExecutedStale'] = 'TODO'
|
|
115
|
+
# d['_lastExecutionStatus'] = 'TODO'
|
|
116
|
+
# fields_mapping['_lastExecutedStale'] = 'Executed'
|
|
117
|
+
# fields_mapping['_lastExecutionStatus'] = 'Status'
|
|
118
|
+
pass
|
|
119
|
+
|
|
120
|
+
fields_mapping["_idealCapacity"] = "Ideal"
|
|
121
|
+
fields_mapping["_forecastCapacity"] = "Forecast"
|
|
122
|
+
fields_mapping["_taskKey"] = "Task Key"
|
|
123
|
+
else:
|
|
124
|
+
# TODO other worker types
|
|
125
|
+
pass
|
|
126
|
+
|
|
127
|
+
return util.format_table(data, fields_mapping)
|
|
128
|
+
|
|
129
|
+
|
|
36
130
|
@plan.command
|
|
37
131
|
@click.option("--template", help="Filter plan by template.")
|
|
38
132
|
@click.option("--task", help="Filter plan by task class name.")
|
|
@@ -99,7 +193,7 @@ def delete(org: str, name: str, confirm: bool, **kwargs):
|
|
|
99
193
|
|
|
100
194
|
@plan.command
|
|
101
195
|
@cli.org_id
|
|
102
|
-
@cli.limit
|
|
196
|
+
@cli.limit(default=336)
|
|
103
197
|
@click.option("--day", required=False, help="Search by day-identifier. Example: Monday")
|
|
104
198
|
@click.option("--time", required=False, help="Search by time. Example: 13:30")
|
|
105
199
|
@click.option("--slot", required=False, help="Search by time slot. Example: Mon/13:30")
|
|
@@ -111,6 +205,7 @@ def delete(org: str, name: str, confirm: bool, **kwargs):
|
|
|
111
205
|
help="Search by task state, as comma-separated values. E.g. 'init,running,success,error', or 'all'.",
|
|
112
206
|
)
|
|
113
207
|
@click.argument("name", required=False)
|
|
208
|
+
@cli.formatter(_format_scm_plan_task_table)
|
|
114
209
|
def tasks(org: str, limit: int, day: str, time: str, slot: str, name: str, state: str, **kwargs):
|
|
115
210
|
"""Get tasks of a named calendar plan."""
|
|
116
211
|
org = cli.get_org_id(org)
|
|
@@ -134,6 +229,40 @@ def tasks(org: str, limit: int, day: str, time: str, slot: str, name: str, state
|
|
|
134
229
|
ret = scm.plan.tasks(org_id=org, id=name, limit=limit, day=day, slot=time, states=state, **kwargs)
|
|
135
230
|
if ret is None:
|
|
136
231
|
return "", 1
|
|
232
|
+
|
|
233
|
+
# sort tasks by slot.
|
|
234
|
+
for t in ret:
|
|
235
|
+
meta = t["meta"]
|
|
236
|
+
d = meta["scm_plan_day"]
|
|
237
|
+
if d == "sunday":
|
|
238
|
+
d = "0"
|
|
239
|
+
elif d == "monday":
|
|
240
|
+
d = "1"
|
|
241
|
+
elif d == "tuesday":
|
|
242
|
+
d = "2"
|
|
243
|
+
elif d == "wednesday":
|
|
244
|
+
d = "3"
|
|
245
|
+
elif d == "thursday":
|
|
246
|
+
d = "4"
|
|
247
|
+
elif d == "friday":
|
|
248
|
+
d = "5"
|
|
249
|
+
elif d == "saturday":
|
|
250
|
+
d = "6"
|
|
251
|
+
else:
|
|
252
|
+
raise ValueError("Invalid day name in task meta: " + d)
|
|
253
|
+
t["_slot"] = d + "/" + meta["scm_plan_slot"]
|
|
254
|
+
ret = sorted(ret, key=lambda x: x["_slot"])
|
|
255
|
+
for t in ret:
|
|
256
|
+
del t["_slot"]
|
|
257
|
+
|
|
258
|
+
# identify the current task result
|
|
259
|
+
# current_slot_name, next_slot_name, timedelta_to_next = _get_next_slot_name()
|
|
260
|
+
# for t in ret:
|
|
261
|
+
# meta = t['meta']
|
|
262
|
+
# my_slot_name = meta['scm_plan_day'] + '/' + meta['scm_plan_slot']
|
|
263
|
+
# if current_slot_name == my_slot_name:
|
|
264
|
+
# break
|
|
265
|
+
|
|
137
266
|
return ret
|
|
138
267
|
|
|
139
268
|
|
|
@@ -343,7 +472,6 @@ def run(org: str, name: str, slot: str, config: str, wait: str, **kwargs):
|
|
|
343
472
|
|
|
344
473
|
|
|
345
474
|
def _wait_for_task(org_id: str, name: str, task_key: str, timeout: str):
|
|
346
|
-
|
|
347
475
|
return wait_for_res_status(
|
|
348
476
|
resource_name=name + "/" + task_key,
|
|
349
477
|
fn_get=lambda: scm.plan.get_task(org_id, name, task_key),
|
hcs_cli/cmds/task.py
CHANGED
|
@@ -13,8 +13,6 @@ See the License for the specific language governing permissions and
|
|
|
13
13
|
limitations under the License.
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
|
-
import json
|
|
17
|
-
|
|
18
16
|
import click
|
|
19
17
|
import hcs_core.sglib.cli_options as cli
|
|
20
18
|
import yumako
|
|
@@ -29,9 +27,9 @@ def _format_task_table(data):
|
|
|
29
27
|
schedule = d.get("schedule")
|
|
30
28
|
if schedule:
|
|
31
29
|
if schedule.get("intervalMs"):
|
|
32
|
-
recurring = f
|
|
30
|
+
recurring = f"Every {yumako.time.display(schedule['intervalMs'] / 1000)}"
|
|
33
31
|
elif schedule.get("cronExpression"):
|
|
34
|
-
recurring = f
|
|
32
|
+
recurring = f"{schedule['cronExpression']}"
|
|
35
33
|
else:
|
|
36
34
|
recurring = "<No>"
|
|
37
35
|
else:
|
|
@@ -111,8 +111,8 @@ def list_usage(org: str, **kwargs):
|
|
|
111
111
|
"addedVmHours": summary.get("addedVmHours"),
|
|
112
112
|
"offloadVmHours": summary.get("offloadVmHours"),
|
|
113
113
|
"reducedVmHours": summary.get("reducedVmHours"),
|
|
114
|
-
"historyVmUtilizationPercent": f
|
|
115
|
-
"predictionVmUtilizationPercent": f
|
|
114
|
+
"historyVmUtilizationPercent": f"{summary.get('historyVmUtilizationPercent', 0)}%",
|
|
115
|
+
"predictionVmUtilizationPercent": f"{summary.get('predictionVmUtilizationPercent', 0)}%",
|
|
116
116
|
}
|
|
117
117
|
|
|
118
118
|
ret2.append(item)
|
hcs_cli/cmds/template/usage.py
CHANGED
|
@@ -13,6 +13,7 @@ See the License for the specific language governing permissions and
|
|
|
13
13
|
limitations under the License.
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
|
+
import json
|
|
16
17
|
import os
|
|
17
18
|
import tempfile
|
|
18
19
|
import time
|
|
@@ -32,17 +33,28 @@ def _timestamp_to_date(timestamp: int):
|
|
|
32
33
|
|
|
33
34
|
|
|
34
35
|
@click.command(hidden=True)
|
|
36
|
+
@click.option(
|
|
37
|
+
"--local-plan-file",
|
|
38
|
+
type=str,
|
|
39
|
+
required=False,
|
|
40
|
+
help="Instead of reading from API, read from local file for the template usage data. Debug only.",
|
|
41
|
+
)
|
|
35
42
|
@click.argument("id", type=str, required=False)
|
|
36
43
|
@cli.org_id
|
|
37
|
-
def usage(id: str, org: str, **kwargs):
|
|
44
|
+
def usage(id: str, org: str, local_plan_file: str = None, **kwargs):
|
|
38
45
|
"""Show usage visualization"""
|
|
39
46
|
|
|
40
47
|
org_id = cli.get_org_id(org)
|
|
41
48
|
id = recent.require("template", id)
|
|
42
49
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
50
|
+
if local_plan_file:
|
|
51
|
+
with open(local_plan_file, "r") as f:
|
|
52
|
+
plan_data = json.load(f)
|
|
53
|
+
usage = plan_data["meta"]
|
|
54
|
+
else:
|
|
55
|
+
usage = scm.template_usage(org_id, id)
|
|
56
|
+
if not usage:
|
|
57
|
+
return "No usage data found", 1
|
|
46
58
|
|
|
47
59
|
x_axis = []
|
|
48
60
|
consumed_capacity = []
|
|
@@ -61,8 +73,9 @@ def usage(id: str, org: str, **kwargs):
|
|
|
61
73
|
x_axis.append(_timestamp_to_date(t))
|
|
62
74
|
max_capacity = history["maxCapacity"][i]
|
|
63
75
|
min_free = history["minFree"][i]
|
|
64
|
-
consumed_capacity.append(max_capacity - min_free)
|
|
65
|
-
|
|
76
|
+
# consumed_capacity.append(max_capacity - min_free)
|
|
77
|
+
consumed_capacity.append(history["poweredOnAssignedVms"][i])
|
|
78
|
+
spare_capacity.append(max_capacity - consumed_capacity[-1])
|
|
66
79
|
no_spare_error.append(history["noSpare"][i])
|
|
67
80
|
|
|
68
81
|
start_timestamp = prediction["startTimestamp"]
|
|
@@ -72,7 +85,7 @@ def usage(id: str, org: str, **kwargs):
|
|
|
72
85
|
max_capacity = prediction["maxCapacity"][i]
|
|
73
86
|
min_free = prediction["minFree"][i]
|
|
74
87
|
ideal_capacity = prediction["idealCapacity"][i]
|
|
75
|
-
consumed_capacity_predicated.append(ideal_capacity
|
|
88
|
+
consumed_capacity_predicated.append(ideal_capacity)
|
|
76
89
|
spare_capacity_predicated.append(min_free)
|
|
77
90
|
optimized_capacity.append(ideal_capacity)
|
|
78
91
|
no_spare_error_predicated.append(prediction["noSpare"][i])
|
hcs_cli/cmds/vm/list.py
CHANGED
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
prod:
|
|
2
2
|
- env: prod-global
|
|
3
3
|
alias: prod
|
|
4
|
-
description:
|
|
4
|
+
description:
|
|
5
5
|
azure-reion: WestUS2
|
|
6
|
-
primary:
|
|
6
|
+
primary:
|
|
7
7
|
portal-url:
|
|
8
8
|
- https://cloud-sg.horizon.omnissa.com
|
|
9
9
|
- https://cloud.vmwarehorizon.com/
|
|
10
10
|
hdc:
|
|
11
11
|
url: https://cloud-sg.horizon.omnissa.com
|
|
12
12
|
mqtt: cloud-sg-us-hdc-mqtt.horizon.omnissa.com
|
|
13
|
-
regions:
|
|
13
|
+
regions:
|
|
14
14
|
- name: EastUS2
|
|
15
15
|
url: https://cloud-sg-us-r-eastus2.horizon.omnissa.com
|
|
16
16
|
#mqtt: http://prod-sg-mqtt-eastus2.trafficmanager.net
|
|
@@ -25,16 +25,16 @@ prod:
|
|
|
25
25
|
url: https://hv2-cloud-us-2.horizon.omnissa.com
|
|
26
26
|
- env: prod-na-cp102
|
|
27
27
|
alias: prod-us
|
|
28
|
-
description:
|
|
28
|
+
description:
|
|
29
29
|
azure-reion: WestUS2
|
|
30
|
-
primary:
|
|
30
|
+
primary:
|
|
31
31
|
portal-url:
|
|
32
32
|
- https://cloud-sg.horizon.omnissa.com
|
|
33
33
|
- https://cloud.vmwarehorizon.com/
|
|
34
34
|
hdc:
|
|
35
35
|
url: https://cloud-sg-us.horizon.omnissa.com
|
|
36
36
|
mqtt: cloud-sg-us-hdc-mqtt.horizon.omnissa.com
|
|
37
|
-
regions:
|
|
37
|
+
regions:
|
|
38
38
|
- name: EastUS2
|
|
39
39
|
url: https://cloud-sg-us-r-eastus2.horizon.omnissa.com
|
|
40
40
|
mqtt: cloud-sg-us-r-eastus2-mqtt-a.horizon.omnissa.com
|
|
@@ -45,7 +45,7 @@ prod:
|
|
|
45
45
|
url: https://hv2-cloud-us-2.horizon.omnissa.com
|
|
46
46
|
- env: prod-centralus-cp102
|
|
47
47
|
alias: prod-us-standby
|
|
48
|
-
description:
|
|
48
|
+
description:
|
|
49
49
|
azure-reion: CentralUS
|
|
50
50
|
primary: false
|
|
51
51
|
portal-url:
|
|
@@ -53,8 +53,8 @@ prod:
|
|
|
53
53
|
- https://cloud.vmwarehorizon.com/
|
|
54
54
|
hdc:
|
|
55
55
|
url: https://cloud-sg-centralus.horizon.omnissa.com
|
|
56
|
-
mqtt:
|
|
57
|
-
regions:
|
|
56
|
+
mqtt:
|
|
57
|
+
regions:
|
|
58
58
|
- name: EastUS2
|
|
59
59
|
url: https://cloud-sg-us-r-eastus2.horizon.omnissa.com
|
|
60
60
|
mqtt: cloud-sg-us-r-eastus2-mqtt-a.horizon.omnissa.com
|
|
@@ -65,16 +65,16 @@ prod:
|
|
|
65
65
|
url: https://hv2-cloud-us-2.horizon.omnissa.com
|
|
66
66
|
- env: prod-eu-cp102
|
|
67
67
|
alias: prod-eu
|
|
68
|
-
description:
|
|
68
|
+
description:
|
|
69
69
|
azure-reion: NorthEurope
|
|
70
|
-
primary:
|
|
70
|
+
primary:
|
|
71
71
|
portal-url:
|
|
72
72
|
- https://cloud-sg.horizon.omnissa.com
|
|
73
73
|
- https://cloud.vmwarehorizon.com/
|
|
74
74
|
hdc:
|
|
75
75
|
url: https://cloud-sg-eu.horizon.omnissa.com
|
|
76
76
|
mqtt: cloud-sg-eu-hdc-mqtt.horizon.omnissa.com
|
|
77
|
-
regions:
|
|
77
|
+
regions:
|
|
78
78
|
- name: NothEurope
|
|
79
79
|
url: https://cloud-sg-eu-r-northeurope.horizon.omnissa.com
|
|
80
80
|
mqtt: cloud-sg-eu-r-northeurope-mqtt-a.horizon.omnissa.com
|
|
@@ -88,8 +88,8 @@ prod:
|
|
|
88
88
|
url: https://hv2-cloud-eu-2.horizon.omnissa.com
|
|
89
89
|
- env: prod-westeu-cp102
|
|
90
90
|
alias: prod-eu-standby
|
|
91
|
-
description:
|
|
92
|
-
azure-reion:
|
|
91
|
+
description:
|
|
92
|
+
azure-reion:
|
|
93
93
|
primary: false
|
|
94
94
|
portal-url:
|
|
95
95
|
- https://cloud-sg.horizon.omnissa.com
|
|
@@ -97,7 +97,7 @@ prod:
|
|
|
97
97
|
hdc:
|
|
98
98
|
url: https://cloud-sg-westeu.horizon.omnissa.com
|
|
99
99
|
mqtt: null
|
|
100
|
-
regions:
|
|
100
|
+
regions:
|
|
101
101
|
- name: NothEurope
|
|
102
102
|
url: https://cloud-sg-eu-r-northeurope.horizon.omnissa.com
|
|
103
103
|
mqtt: cloud-sg-eu-r-northeurope-mqtt-a.horizon.omnissa.com
|
|
@@ -111,16 +111,16 @@ prod:
|
|
|
111
111
|
url: https://hv2-cloud-eu-2.horizon.omnissa.com
|
|
112
112
|
- env: prod-jp-cp102
|
|
113
113
|
alias: prod-jp
|
|
114
|
-
description:
|
|
114
|
+
description:
|
|
115
115
|
azure-reion: JapanEast
|
|
116
|
-
primary:
|
|
116
|
+
primary:
|
|
117
117
|
portal-url:
|
|
118
118
|
- https://cloud-sg.horizon.omnissa.com
|
|
119
119
|
- https://cloud.vmwarehorizon.com/
|
|
120
120
|
hdc:
|
|
121
121
|
url: https://cloud-sg-jp.horizon.omnissa.com
|
|
122
122
|
mqtt: cloud-sg-jp-hdc-mqtt.horizon.omnissa.com
|
|
123
|
-
regions:
|
|
123
|
+
regions:
|
|
124
124
|
- name: JapanEast
|
|
125
125
|
url: https://cloud-sg-jp-r-japaneast.horizon.omnissa.com
|
|
126
126
|
mqtt: cloud-sg-jp-r-japaneast-mqtt-a.horizon.omnissa.com
|
|
@@ -134,8 +134,8 @@ prod:
|
|
|
134
134
|
url: https://hv2-cloud-jp-2.horizon.omnissa.com
|
|
135
135
|
- env: prod-westjp-cp102
|
|
136
136
|
alias: prod-jp-standby
|
|
137
|
-
description:
|
|
138
|
-
azure-reion:
|
|
137
|
+
description:
|
|
138
|
+
azure-reion:
|
|
139
139
|
primary: false
|
|
140
140
|
portal-url:
|
|
141
141
|
- https://cloud-sg.horizon.omnissa.com
|
|
@@ -143,7 +143,7 @@ prod:
|
|
|
143
143
|
hdc:
|
|
144
144
|
url: https://cloud-sg-westjp.horizon.omnissa.com
|
|
145
145
|
mqtt: null
|
|
146
|
-
regions:
|
|
146
|
+
regions:
|
|
147
147
|
- name: JapanEast
|
|
148
148
|
url: https://cloud-sg-jp-r-japaneast.horizon.omnissa.com
|
|
149
149
|
mqtt: cloud-sg-jp-r-japaneast-mqtt-a.horizon.omnissa.com
|
|
@@ -158,55 +158,55 @@ prod:
|
|
|
158
158
|
staging:
|
|
159
159
|
- env: staging1-northcentralus-cp102
|
|
160
160
|
alias: stg-us
|
|
161
|
-
description:
|
|
161
|
+
description:
|
|
162
162
|
azure-reion: North Central US
|
|
163
|
-
primary:
|
|
164
|
-
portal-url:
|
|
163
|
+
primary:
|
|
164
|
+
portal-url:
|
|
165
165
|
- https://cloud.stg.vmwarehorizon.com/
|
|
166
166
|
- https://staging1-cp102.horizon.omnissa.com/
|
|
167
167
|
hdc:
|
|
168
168
|
url: https://staging1-northcentralus-cp102.horizon.omnissa.com/
|
|
169
169
|
mqtt: staging1-northcentralus-cp102-mqtt.horizon.omnissa.com
|
|
170
|
-
regions:
|
|
170
|
+
regions:
|
|
171
171
|
- name: staging1-northcentralus-cp102a
|
|
172
172
|
url: https://staging1-northcentralus-cp102a.horizon.omnissa.com/
|
|
173
|
-
mqtt:
|
|
173
|
+
mqtt:
|
|
174
174
|
- name: staging1-northcentralus-cp102b
|
|
175
|
-
url: https://staging1-northcentralus-cp102b.horizon.omnissa.com/
|
|
175
|
+
url: https://staging1-northcentralus-cp102b.horizon.omnissa.com/
|
|
176
176
|
mqtt:
|
|
177
177
|
edgehub:
|
|
178
178
|
url: https://test-hv2-cloud-us-2.horizon.omnissa.com
|
|
179
179
|
- env: staging1-northeurope-cp102
|
|
180
180
|
alias: stg-eu
|
|
181
|
-
description:
|
|
181
|
+
description:
|
|
182
182
|
azure-reion: North Europe
|
|
183
|
-
primary:
|
|
183
|
+
primary:
|
|
184
184
|
portal-url:
|
|
185
185
|
- https://cloud.stg.vmwarehorizon.com/
|
|
186
186
|
- https://staging1-cp102.horizon.omnissa.com/
|
|
187
187
|
hdc:
|
|
188
188
|
url: https://staging1-northeurope-cp102.horizon.omnissa.com/
|
|
189
189
|
mqtt: staging1-northeurope-cp102-mqtt.horizon.omnissa.com
|
|
190
|
-
regions:
|
|
190
|
+
regions:
|
|
191
191
|
- name: staging1-northeurope-cp102a
|
|
192
192
|
url: https://staging1-northeurope-cp102a.horizon.omnissa.com/
|
|
193
|
-
mqtt:
|
|
193
|
+
mqtt:
|
|
194
194
|
- name: staging1-northeurope-cp102b
|
|
195
195
|
url: https://staging1-northeurope-cp102b.horizon.omnissa.com/
|
|
196
196
|
mqtt:
|
|
197
197
|
edgehub:
|
|
198
198
|
url: https://test-hv2-cloud-eu-2.horizon.omnissa.com
|
|
199
199
|
dev:
|
|
200
|
-
- env:
|
|
200
|
+
- env: "dev1b-westus2-cp102"
|
|
201
201
|
alias: bat
|
|
202
202
|
description: BAT
|
|
203
|
-
azure-reion:
|
|
204
|
-
primary:
|
|
203
|
+
azure-reion: "West US 2"
|
|
204
|
+
primary:
|
|
205
205
|
portal-url: []
|
|
206
206
|
hdc:
|
|
207
207
|
url: https://dev1b-westus2-cp102.azcp.horizon.omnissa.com/
|
|
208
208
|
mqtt: dev1b-westus2-cp102-mqtt.azcp.horizon.omnissa.com
|
|
209
|
-
regions:
|
|
209
|
+
regions:
|
|
210
210
|
- name: westus2
|
|
211
211
|
url: https://dev1b-westus2-cp102a.azcp.horizon.omnissa.com/
|
|
212
212
|
mqtt:
|
|
@@ -215,13 +215,13 @@ dev:
|
|
|
215
215
|
- env: dev1b-westus2-cp103
|
|
216
216
|
alias: integration
|
|
217
217
|
description: Integration/Nightly
|
|
218
|
-
azure-reion:
|
|
219
|
-
primary:
|
|
218
|
+
azure-reion: "West US 2"
|
|
219
|
+
primary:
|
|
220
220
|
portal-url: []
|
|
221
221
|
hdc:
|
|
222
222
|
url: https://dev1b-westus2-cp103.azcp.horizon.omnissa.com/
|
|
223
223
|
mqtt: dev1b-westus2-cp103-mqtt.azcp.horizon.omnissa.com
|
|
224
|
-
regions:
|
|
224
|
+
regions:
|
|
225
225
|
- name: westus2
|
|
226
226
|
url: https://dev1b-westus2-cp103a.azcp.horizon.omnissa.com/
|
|
227
227
|
mqtt: dev1b-westus2-cp103a-mqtt.azcp.horizon.omnissa.com
|
|
@@ -230,16 +230,16 @@ dev:
|
|
|
230
230
|
- env: dev1b-westus2-cp104
|
|
231
231
|
alias: chaos
|
|
232
232
|
description: Chaos
|
|
233
|
-
azure-reion:
|
|
234
|
-
primary:
|
|
233
|
+
azure-reion: "West US 2"
|
|
234
|
+
primary:
|
|
235
235
|
portal-url: []
|
|
236
236
|
hdc:
|
|
237
237
|
url: https://dev1b-westus2-cp104.azcp.horizon.omnissa.com/
|
|
238
238
|
mqtt: null
|
|
239
239
|
regions:
|
|
240
240
|
- name: westus2
|
|
241
|
-
url:
|
|
242
|
-
mqtt:
|
|
241
|
+
url:
|
|
242
|
+
mqtt:
|
|
243
243
|
edgehub:
|
|
244
244
|
url: https://dev-westus2-cp101.azcp.horizon.omnissa.com
|
|
245
245
|
- env: stress1b-westus2-cp102
|
|
@@ -247,14 +247,14 @@ dev:
|
|
|
247
247
|
description: Stress
|
|
248
248
|
azure-reion: West US 2
|
|
249
249
|
primary:
|
|
250
|
-
portal-url: []
|
|
250
|
+
portal-url: []
|
|
251
251
|
hdc:
|
|
252
252
|
url: https://stress1b-westus2-cp102.azcp.horizon.omnissa.com/
|
|
253
253
|
mqtt: null
|
|
254
|
-
regions:
|
|
254
|
+
regions:
|
|
255
255
|
- name: westus2-cp102a
|
|
256
256
|
url: https://stress1b-westus2-cp102a.azcp.horizon.omnissa.com/
|
|
257
|
-
mqtt:
|
|
257
|
+
mqtt:
|
|
258
258
|
- name: westus2-cp102b
|
|
259
259
|
url: https://stress1b-westus2-cp102b.azcp.horizon.omnissa.com/
|
|
260
260
|
mqtt:
|
|
@@ -265,14 +265,14 @@ dev:
|
|
|
265
265
|
description: Service Gateway master
|
|
266
266
|
azure-reion: West US 2
|
|
267
267
|
primary:
|
|
268
|
-
portal-url: []
|
|
268
|
+
portal-url: []
|
|
269
269
|
hdc:
|
|
270
270
|
url: https://horizonv2-sg.devframe.cp.horizon.omnissa.com/
|
|
271
|
-
mqtt:
|
|
272
|
-
regions:
|
|
271
|
+
mqtt:
|
|
272
|
+
regions:
|
|
273
273
|
- name: westus2
|
|
274
274
|
url: https://horizonv2-sg.devframe.cp.horizon.omnissa.com/
|
|
275
|
-
mqtt:
|
|
275
|
+
mqtt:
|
|
276
276
|
edgehub:
|
|
277
277
|
url: https://horizonv2-em.devframe.cp.horizon.omnissa.com/
|
|
278
278
|
- env: hm-master
|
|
@@ -280,13 +280,13 @@ dev:
|
|
|
280
280
|
description: horizonv2-monitoring master stack
|
|
281
281
|
azure-reion: West US 2
|
|
282
282
|
primary:
|
|
283
|
-
portal-url: []
|
|
283
|
+
portal-url: []
|
|
284
284
|
hdc:
|
|
285
285
|
url: https://horizonv2-monitoring.devframe.cp.horizon.omnissa.com/
|
|
286
|
-
regions:
|
|
286
|
+
regions:
|
|
287
287
|
- name: westus2
|
|
288
288
|
url: https://horizonv2-monitoring.devframe.cp.horizon.omnissa.com/
|
|
289
|
-
mqtt:
|
|
289
|
+
mqtt:
|
|
290
290
|
edgehub:
|
|
291
291
|
url: https://horizonv2-em.devframe.cp.horizon.omnissa.com/
|
|
292
292
|
|
hcs_cli/main.py
CHANGED
|
@@ -15,7 +15,6 @@ See the License for the specific language governing permissions and
|
|
|
15
15
|
limitations under the License.
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
|
-
|
|
19
18
|
import logging
|
|
20
19
|
import os
|
|
21
20
|
import os.path as path
|
|
@@ -65,7 +64,6 @@ CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
|
|
65
64
|
@click.option("--no-upgrade-check", is_flag=True, default=False, help="Check new version of HCS CLI.")
|
|
66
65
|
@click.option("--no-telemetry", is_flag=True, default=False, help="Disable telemetry collection")
|
|
67
66
|
def cli(profile: str, no_upgrade_check: bool, no_telemetry: bool, **kwargs):
|
|
68
|
-
|
|
69
67
|
if not no_upgrade_check:
|
|
70
68
|
_check_upgrade()
|
|
71
69
|
|