qairt-visualizer 0.8.0__py3-none-macosx_11_0_arm64.whl → 0.9.0__py3-none-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qairt_visualizer/__init__.py +6 -3
- qairt_visualizer/apis.py +46 -0
- qairt_visualizer/constants/node_constants.py +10 -0
- qairt_visualizer/core/launchers/electron_launcher_context.py +3 -18
- qairt_visualizer/core/launchers/node_launcher_context.py +88 -0
- qairt_visualizer/core/launchers/web_launcher_context.py +2 -2
- qairt_visualizer/core/parsers/dlc_parser/dlc_parser.py +203 -72
- qairt_visualizer/core/parsers/dlc_parser/get_htp_topology.py +40 -0
- qairt_visualizer/core/parsers/dlc_parser/get_source_topology.py +7 -19
- qairt_visualizer/core/parsers/dlc_parser/helpers/file_helpers.py +30 -0
- qairt_visualizer/core/parsers/dlc_parser/models/op_tensor_mappings.py +13 -8
- qairt_visualizer/core/parsers/dlc_parser/op_tensor_parser.py +1 -1
- qairt_visualizer/core/ui/dist/browser/assets/i18n/panels/en.json +1 -2
- qairt_visualizer/core/ui/dist/browser/base.js +3 -0
- qairt_visualizer/core/ui/dist/browser/browser.js +115 -61
- qairt_visualizer/core/ui/dist/browser/chunk-2C6RAPGY.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-2E6U2SJH.js +1 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-MCR4N53U.js → chunk-3UPRHIAT.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-42HWH57D.js +1 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-HXB7IAZI.js → chunk-4HGTXTCS.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-5D3ISGML.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-5MOGL4JU.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-6L7EKQZ2.js +19 -0
- qairt_visualizer/core/ui/dist/browser/chunk-6PPH52ML.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-6UK7KPII.js +1 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-447NKSFT.js → chunk-6Z7ZLSJK.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-7BBOKDAA.js +1 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-L3QJ7DR2.js → chunk-7EXIKJ2M.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-7R25LRWQ.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-7SF3SVTT.js +84325 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-HISUA6LZ.js → chunk-7T7WNXJF.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-BBPJUBWD.js +1 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-X25J6H7V.js → chunk-BOGKO7FP.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/{chunk-WFAVWONI.js → chunk-BZNLCEGN.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-D4HH7IFA.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-EILGPAXW.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-EMMQJKAL.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-G5ASLGBT.js +1200 -0
- qairt_visualizer/core/ui/dist/browser/chunk-HZMVY4NU.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-IO73KNLH.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-IOU6BESM.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-IXLZJSOP.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-J6NORJ4T.js +12922 -0
- qairt_visualizer/core/ui/dist/browser/chunk-K46FUO3G.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-KSPZKKBK.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-M4TVKCAJ.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-MCN3FOIX.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-OA7IE3RF.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-OASGMCWW.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-OPIHR5IK.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-OWYSLMHU.js +1 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-NPWGWH4M.js → chunk-PDSQXAAJ.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-PMCRUXSG.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-PMGI3RWX.js +1941 -0
- qairt_visualizer/core/ui/dist/browser/chunk-QJ7EFB4C.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-RNOMX2NS.js +9190 -0
- qairt_visualizer/core/ui/dist/browser/chunk-RV22Q7F3.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-S7YXY2U7.js +761 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-TKZG6FLW.js → chunk-SCZY565A.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-SPFKX6RC.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-TR2WZXKJ.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-TSN4CR4L.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-TYZXU7LT.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-UG5BONNW.js +1 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-WRMDGTCT.js → chunk-UMHD32SF.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-WK74WSTM.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-YCC7P6Q6.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-YIR3WVP5.js +1875 -0
- qairt_visualizer/core/ui/dist/browser/chunk-YKOSK2YU.js +1 -0
- qairt_visualizer/core/ui/dist/browser/{chunk-7PTY53DS.js → chunk-ZGO67G6S.js} +1 -1
- qairt_visualizer/core/ui/dist/browser/chunk-ZMAMZCOZ.js +1 -0
- qairt_visualizer/core/ui/dist/browser/chunk-ZWJUX2JC.js +121 -0
- qairt_visualizer/core/ui/dist/browser/diff-cli.js +112393 -0
- qairt_visualizer/core/ui/dist/browser/dlc.js +132 -18
- qairt_visualizer/core/ui/dist/browser/index.html +1 -1
- qairt_visualizer/core/ui/dist/browser/main-3GHPD3FF.js +1 -0
- qairt_visualizer/core/ui/dist/browser/node-file-stream.js +185 -0
- qairt_visualizer/core/ui/dist/browser/package.json +1 -0
- qairt_visualizer/core/ui/dist/qairt_visualizer.app/Contents/Frameworks/QAIRT Visualizer Helper (GPU).app/Contents/Info.plist +1 -1
- qairt_visualizer/core/ui/dist/qairt_visualizer.app/Contents/Frameworks/QAIRT Visualizer Helper (Plugin).app/Contents/Info.plist +1 -1
- qairt_visualizer/core/ui/dist/qairt_visualizer.app/Contents/Frameworks/QAIRT Visualizer Helper (Renderer).app/Contents/Info.plist +1 -1
- qairt_visualizer/core/ui/dist/qairt_visualizer.app/Contents/Frameworks/QAIRT Visualizer Helper.app/Contents/Info.plist +1 -1
- qairt_visualizer/core/ui/dist/qairt_visualizer.app/Contents/Info.plist +1 -1
- qairt_visualizer/core/ui/dist/qairt_visualizer.app/Contents/Resources/app.asar +0 -0
- qairt_visualizer/core/ui/helpers/os_helpers.py +53 -0
- qairt_visualizer/core/ui/helpers/path_helpers.py +39 -0
- qairt_visualizer/core/ui/helpers/post_install.py +65 -169
- qairt_visualizer/core/ui/ui_runner.py +77 -6
- qairt_visualizer/core/ui/visualizer_web_server.py +2 -2
- qairt_visualizer/core/visualizer_service.py +72 -1
- qairt_visualizer/helpers/{ui_helpers.py → path_helpers.py} +4 -4
- qairt_visualizer/helpers/tools/download_info_helpers.py +135 -0
- qairt_visualizer/helpers/tools/node_bundler.py +74 -0
- qairt_visualizer/helpers/tools/node_targets.py +119 -0
- qairt_visualizer/models/download_info.py +204 -0
- {qairt_visualizer-0.8.0.dist-info → qairt_visualizer-0.9.0.dist-info}/METADATA +1 -1
- {qairt_visualizer-0.8.0.dist-info → qairt_visualizer-0.9.0.dist-info}/RECORD +101 -47
- qairt_visualizer/core/ui/dist/browser/chunk-43LQFSHA.js +0 -1
- qairt_visualizer/core/ui/dist/browser/chunk-5MAR72QP.js +0 -1
- qairt_visualizer/core/ui/dist/browser/chunk-EDLT7DPU.js +0 -120
- qairt_visualizer/core/ui/dist/browser/chunk-H7TLLCSI.js +0 -1
- qairt_visualizer/core/ui/dist/browser/chunk-LIDZK7LW.js +0 -19
- qairt_visualizer/core/ui/dist/browser/main-4KT5O4ZZ.js +0 -1
- {qairt_visualizer-0.8.0.dist-info → qairt_visualizer-0.9.0.dist-info}/LICENSE.pdf +0 -0
- {qairt_visualizer-0.8.0.dist-info → qairt_visualizer-0.9.0.dist-info}/WHEEL +0 -0
- {qairt_visualizer-0.8.0.dist-info → qairt_visualizer-0.9.0.dist-info}/entry_points.txt +0 -0
- {qairt_visualizer-0.8.0.dist-info → qairt_visualizer-0.9.0.dist-info}/top_level.txt +0 -0
qairt_visualizer/__init__.py
CHANGED
|
@@ -8,13 +8,16 @@
|
|
|
8
8
|
"""qairt_visualizer module exports"""
|
|
9
9
|
|
|
10
10
|
import importlib
|
|
11
|
+
import os
|
|
11
12
|
|
|
12
|
-
from qairt_visualizer.apis import view
|
|
13
|
+
from qairt_visualizer.apis import diff, view
|
|
13
14
|
from qairt_visualizer.core.ui.helpers import post_install
|
|
14
15
|
from qairt_visualizer.core.visualizer_logging.helpers import set_log_level
|
|
15
16
|
from qairt_visualizer.models.display_options import DisplayOptions
|
|
16
17
|
|
|
17
|
-
|
|
18
|
+
skip_post_install_run = os.getenv("SKIP_AUTOMATIC_POST_INSTALL_RUN", "false").lower()
|
|
19
|
+
if skip_post_install_run == "false":
|
|
20
|
+
post_install.run()
|
|
18
21
|
|
|
19
|
-
__all__ = ["view", "set_log_level", "DisplayOptions"]
|
|
22
|
+
__all__ = ["view", "diff", "set_log_level", "DisplayOptions"]
|
|
20
23
|
__version__ = importlib.metadata.version("qairt_visualizer")
|
qairt_visualizer/apis.py
CHANGED
|
@@ -15,6 +15,7 @@ from qairt_visualizer.core.communicators.base_communicator_context import BaseCo
|
|
|
15
15
|
from qairt_visualizer.core.communicators.factory_communicator import get_communicator
|
|
16
16
|
from qairt_visualizer.core.launchers.base_ui_launcher_context import BaseUILauncherContext
|
|
17
17
|
from qairt_visualizer.core.launchers.factory_ui_launcher import get_launcher
|
|
18
|
+
from qairt_visualizer.core.launchers.node_launcher_context import NodeLauncherContext
|
|
18
19
|
from qairt_visualizer.core.visualizer_logging.logger_constants import api_logger
|
|
19
20
|
from qairt_visualizer.core.visualizer_logging.logging_config import LoggingConfig
|
|
20
21
|
from qairt_visualizer.core.visualizer_service import VisualizerService
|
|
@@ -45,6 +46,8 @@ def view(
|
|
|
45
46
|
visualization window to display.
|
|
46
47
|
:param options: Customizes the visualization window behavior.
|
|
47
48
|
"""
|
|
49
|
+
|
|
50
|
+
print("Opening QAIRT Visualizer...\n")
|
|
48
51
|
LoggingConfig.setup_logging()
|
|
49
52
|
|
|
50
53
|
options = options or DisplayOptions()
|
|
@@ -100,3 +103,46 @@ async def _view(
|
|
|
100
103
|
except Exception as e: # pylint: disable=broad-exception-caught
|
|
101
104
|
api_logger.debug(e, exc_info=True)
|
|
102
105
|
api_logger.error("An error occurred when attempting to view: %s", str(e))
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def diff(
|
|
109
|
+
reference_model_path: str,
|
|
110
|
+
comparison_model_path: str,
|
|
111
|
+
output_path: Optional[str] = None,
|
|
112
|
+
profile: bool = False,
|
|
113
|
+
) -> str:
|
|
114
|
+
"""
|
|
115
|
+
Compare two models and return a diff report path.
|
|
116
|
+
|
|
117
|
+
:param reference_model_path: Path to the reference model file
|
|
118
|
+
:param comparison_model_path: Path to the comparison model file
|
|
119
|
+
:param output_path: Optional path to save the JSON diff report
|
|
120
|
+
:param profile: Display diff profiling metrics
|
|
121
|
+
:return: Path to the diff report
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
print("Starting model diff...\n")
|
|
125
|
+
LoggingConfig.setup_logging()
|
|
126
|
+
|
|
127
|
+
api_logger.info("Comparing models: %s vs %s", reference_model_path, comparison_model_path)
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
node_launcher = NodeLauncherContext()
|
|
131
|
+
visualizer_service = VisualizerService(
|
|
132
|
+
ui_launcher=None, communicator=None, node_launcher=node_launcher
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
report_path = visualizer_service.diff(
|
|
136
|
+
reference_model_path=reference_model_path,
|
|
137
|
+
comparison_model_path=comparison_model_path,
|
|
138
|
+
output_path=output_path,
|
|
139
|
+
profile=profile,
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
api_logger.info("Diff operation completed successfully")
|
|
143
|
+
return report_path
|
|
144
|
+
|
|
145
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
146
|
+
api_logger.debug(e, exc_info=True)
|
|
147
|
+
api_logger.error("An error occurred during diff operation: %s", str(e))
|
|
148
|
+
raise
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
#
|
|
3
|
+
# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries.
|
|
4
|
+
# All Rights Reserved.
|
|
5
|
+
# Confidential and Proprietary - Qualcomm Technologies, Inc.
|
|
6
|
+
#
|
|
7
|
+
# ==============================================================================
|
|
8
|
+
"""Node.js related constants"""
|
|
9
|
+
|
|
10
|
+
NODE_VERSION = "v20.12.2"
|
|
@@ -10,14 +10,13 @@
|
|
|
10
10
|
import os
|
|
11
11
|
import platform
|
|
12
12
|
import subprocess
|
|
13
|
-
import threading
|
|
14
13
|
import time
|
|
15
14
|
from pathlib import Path
|
|
16
15
|
from typing import List, Optional, cast
|
|
17
16
|
|
|
18
17
|
from qairt_visualizer.core.launchers.base_ui_launcher_context import BaseUILauncherContext
|
|
19
18
|
from qairt_visualizer.core.launchers.models.process_attributes import ProcessAttributes
|
|
20
|
-
from qairt_visualizer.helpers.
|
|
19
|
+
from qairt_visualizer.helpers.path_helpers import find_path_to
|
|
21
20
|
|
|
22
21
|
|
|
23
22
|
class ElectronLauncherContext(BaseUILauncherContext):
|
|
@@ -38,7 +37,7 @@ class ElectronLauncherContext(BaseUILauncherContext):
|
|
|
38
37
|
app_extension = ".exe"
|
|
39
38
|
elif caller_platform == "darwin":
|
|
40
39
|
app_extension = ".app"
|
|
41
|
-
return
|
|
40
|
+
return find_path_to(f"dist/{self.application_name}{app_extension}")
|
|
42
41
|
|
|
43
42
|
def is_same_process(self, process_attrs: ProcessAttributes, process_name: str) -> bool:
|
|
44
43
|
return process_attrs.proc_name == process_name
|
|
@@ -106,7 +105,7 @@ class ElectronLauncherContext(BaseUILauncherContext):
|
|
|
106
105
|
proc = subprocess.Popen( # pylint: disable=consider-using-with
|
|
107
106
|
command_line_args,
|
|
108
107
|
stdout=subprocess.DEVNULL,
|
|
109
|
-
stderr=subprocess.
|
|
108
|
+
stderr=subprocess.DEVNULL,
|
|
110
109
|
start_new_session=True,
|
|
111
110
|
text=True,
|
|
112
111
|
bufsize=1,
|
|
@@ -115,20 +114,6 @@ class ElectronLauncherContext(BaseUILauncherContext):
|
|
|
115
114
|
raise RuntimeError(f"Spawning QAIRT visualizer application failed: {e}") from e
|
|
116
115
|
self.set_pid(proc.pid)
|
|
117
116
|
|
|
118
|
-
capture_seconds = 3.0
|
|
119
|
-
if proc.stderr:
|
|
120
|
-
start_time = time.time()
|
|
121
|
-
|
|
122
|
-
def _capture_initial_stderr():
|
|
123
|
-
while True:
|
|
124
|
-
if proc.poll() is not None or time.time() - start_time > capture_seconds:
|
|
125
|
-
break
|
|
126
|
-
line = proc.stderr.readline().rstrip() if proc.stderr else None
|
|
127
|
-
if line:
|
|
128
|
-
self.logger.error("Error while starting QAIRT Visualizer: %s", line)
|
|
129
|
-
|
|
130
|
-
threading.Thread(target=_capture_initial_stderr, daemon=True).start()
|
|
131
|
-
|
|
132
117
|
def _get_existing_zmq_port(self) -> Optional[int]:
|
|
133
118
|
"""Check for existing ZMQ port from temp file"""
|
|
134
119
|
try:
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
#
|
|
3
|
+
# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries.
|
|
4
|
+
# All Rights Reserved.
|
|
5
|
+
# Confidential and Proprietary - Qualcomm Technologies, Inc.
|
|
6
|
+
#
|
|
7
|
+
# ==============================================================================
|
|
8
|
+
"""Node Launcher Context"""
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
import platform
|
|
12
|
+
import shutil
|
|
13
|
+
import subprocess
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import List, Optional
|
|
16
|
+
|
|
17
|
+
from qairt_visualizer.core.ui.helpers.os_helpers import is_linux, is_mac, is_windows
|
|
18
|
+
from qairt_visualizer.core.visualizer_logging.logger_constants import api_logger
|
|
19
|
+
from qairt_visualizer.helpers.path_helpers import find_path_to
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class NodeLauncherContext:
|
|
23
|
+
"""
|
|
24
|
+
Context class for launching and managing Node.js subprocess executions.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self, node_home: str | None = None):
|
|
28
|
+
self.logger = api_logger
|
|
29
|
+
self.node_home = node_home
|
|
30
|
+
|
|
31
|
+
def launch(
|
|
32
|
+
self,
|
|
33
|
+
script_path: str,
|
|
34
|
+
args: List[str],
|
|
35
|
+
cwd: Optional[str] = None,
|
|
36
|
+
) -> subprocess.CompletedProcess:
|
|
37
|
+
"""
|
|
38
|
+
Execute program script with Node.js.
|
|
39
|
+
|
|
40
|
+
:param script_path: Path to the program script
|
|
41
|
+
:param args: List of arguments to pass to the script
|
|
42
|
+
:param cwd: Optional working directory for the subprocess
|
|
43
|
+
:return: CompletedProcess object containing the result
|
|
44
|
+
"""
|
|
45
|
+
script_path_obj = Path(script_path)
|
|
46
|
+
working_dir = cwd or str(script_path_obj.parent)
|
|
47
|
+
node_path = (
|
|
48
|
+
self._get_node_path(platform.system().lower()) if self.node_home is None else self.node_home
|
|
49
|
+
)
|
|
50
|
+
cmd = [node_path, str(script_path_obj)] + args
|
|
51
|
+
|
|
52
|
+
self.logger.debug("Working directory: %s", working_dir)
|
|
53
|
+
self.logger.debug("Executing node command: %s", " ".join(cmd))
|
|
54
|
+
|
|
55
|
+
try:
|
|
56
|
+
return subprocess.run(
|
|
57
|
+
cmd,
|
|
58
|
+
text=True,
|
|
59
|
+
encoding="utf-8",
|
|
60
|
+
check=True,
|
|
61
|
+
cwd=working_dir,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
except (subprocess.CalledProcessError, FileNotFoundError) as e:
|
|
65
|
+
error_msg = e.stderr if isinstance(e, subprocess.CalledProcessError) else str(e)
|
|
66
|
+
self.logger.debug("Node.js script execution failed", exc_info=True)
|
|
67
|
+
raise RuntimeError(f"Failed to execute Node.js script: {error_msg}") from None
|
|
68
|
+
|
|
69
|
+
def _get_node_path(self, caller_platform: str) -> str:
|
|
70
|
+
path = ""
|
|
71
|
+
if is_windows(caller_platform):
|
|
72
|
+
path = "node.exe"
|
|
73
|
+
elif is_mac(caller_platform) or is_linux(caller_platform):
|
|
74
|
+
path = "bin/node"
|
|
75
|
+
else:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
f"Unknown platform to run node, platform={caller_platform}. Contact support and attach logs."
|
|
78
|
+
)
|
|
79
|
+
try:
|
|
80
|
+
return find_path_to(f"resources/node/{path}", "qairt_visualizer")
|
|
81
|
+
except (ValueError, FileNotFoundError):
|
|
82
|
+
global_node_path = shutil.which("node")
|
|
83
|
+
if os.getenv("SKIP_POST_INSTALL", "false").lower() == "true" and global_node_path is not None:
|
|
84
|
+
self.logger.debug(
|
|
85
|
+
"SKIP_POST_INSTALL=true, unable to locate node path. Using global node instance."
|
|
86
|
+
)
|
|
87
|
+
return global_node_path
|
|
88
|
+
raise
|
|
@@ -18,7 +18,7 @@ from IPython.display import HTML, display
|
|
|
18
18
|
|
|
19
19
|
from qairt_visualizer.core.launchers.base_ui_launcher_context import BaseUILauncherContext
|
|
20
20
|
from qairt_visualizer.core.launchers.models.process_attributes import ProcessAttributes
|
|
21
|
-
from qairt_visualizer.helpers.
|
|
21
|
+
from qairt_visualizer.helpers.path_helpers import find_path_to
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
class WebLauncherContext(BaseUILauncherContext):
|
|
@@ -33,7 +33,7 @@ class WebLauncherContext(BaseUILauncherContext):
|
|
|
33
33
|
|
|
34
34
|
def launch(self):
|
|
35
35
|
port = self.detect_port(5555)
|
|
36
|
-
server_path =
|
|
36
|
+
server_path = find_path_to(self.application_name)
|
|
37
37
|
cmd = [
|
|
38
38
|
sys.executable,
|
|
39
39
|
str(server_path),
|
|
@@ -7,7 +7,9 @@
|
|
|
7
7
|
# ==============================================================================
|
|
8
8
|
"""DLC Parser class"""
|
|
9
9
|
|
|
10
|
+
import json
|
|
10
11
|
import os
|
|
12
|
+
import re
|
|
11
13
|
from collections import defaultdict
|
|
12
14
|
from typing import Any, Dict, Literal, Optional, Set, Tuple
|
|
13
15
|
|
|
@@ -56,7 +58,7 @@ class DlcParser:
|
|
|
56
58
|
if not os.path.splitext(dlc_file_path)[1] == ".dlc":
|
|
57
59
|
raise ValueError(f"The specified file {dlc_file_path} is not a DLC file")
|
|
58
60
|
|
|
59
|
-
def
|
|
61
|
+
def extract_mappings(self) -> DlcOpTensorMappings:
|
|
60
62
|
"""
|
|
61
63
|
Creates a JSON serializable map of DLC ops and tensors to source,
|
|
62
64
|
and source model ops and tensors back to DLC
|
|
@@ -66,39 +68,94 @@ class DlcParser:
|
|
|
66
68
|
DLC ops/tensors
|
|
67
69
|
"""
|
|
68
70
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
71
|
+
source_mappings: Optional[OpTensorMappings] = None
|
|
72
|
+
has_valid_sdk_trace = "source.topology0" in self.model_reader.get_record_names()
|
|
73
|
+
if has_valid_sdk_trace:
|
|
74
|
+
graph = self.get_ir_graph(self.model_reader)
|
|
75
|
+
if graph is not None:
|
|
76
|
+
op_source = graph.get_trace_info().get_op_trace_info()
|
|
77
|
+
tensor_source = graph.get_trace_info().get_tensor_trace_info()
|
|
78
|
+
source_mappings = self._extract_onnx_mappings(op_source, tensor_source)
|
|
79
|
+
htp_file_data = self.get_htp_file()
|
|
80
|
+
backend_mappings = self.extract_htp_mappings(htp_file_data) if htp_file_data else None
|
|
73
81
|
return DlcOpTensorMappings(
|
|
74
82
|
dlc_model_path=self.dlc_path,
|
|
75
|
-
source_mappings=
|
|
83
|
+
source_mappings=source_mappings,
|
|
76
84
|
# Add backend parsing here for phase 2
|
|
77
|
-
backend_mappings=
|
|
85
|
+
backend_mappings=backend_mappings,
|
|
78
86
|
)
|
|
79
87
|
|
|
80
88
|
# pylint: disable=too-many-locals
|
|
81
|
-
def
|
|
82
|
-
"""
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
89
|
+
def extract_htp_mappings(self, data) -> Optional[OpTensorMappings]:
|
|
90
|
+
"""Extract mappings between backend operations/tensors and their parent DLC ops.
|
|
91
|
+
|
|
92
|
+
The backend graph JSON contains nodes keyed by hex identifiers (e.g. "0x0000103000000016")
|
|
93
|
+
with a nested qnn_op_name->1544 field that gives the originating high-level
|
|
94
|
+
DLC op name (e.g. "_layernorm_0", "Input").
|
|
95
|
+
|
|
96
|
+
During backend compilation, each DLC operation may be decomposed into multiple backend operations.
|
|
97
|
+
Note: Backend tensors map to DLC ops, not DLC tensors, as the backend graph structure
|
|
98
|
+
links tensors to their producing/consuming operations rather than to tensor names as HTP graphs
|
|
99
|
+
do not have tensor data.
|
|
100
|
+
|
|
101
|
+
:param data: JSON string containing the backend graph structure
|
|
102
|
+
:return: OpTensorMappings with backend_mappings populated, or None if no valid mappings found
|
|
88
103
|
"""
|
|
89
|
-
dlc_ops_to_source_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
90
|
-
source_ops_to_dlc_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
91
|
-
dlc_tensors_to_source_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
92
|
-
source_tensors_to_dlc_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
93
104
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
105
|
+
try:
|
|
106
|
+
data = json.loads(data)
|
|
107
|
+
except json.JSONDecodeError as exc:
|
|
108
|
+
raise ValueError(f"File data {data} is not valid JSON: {exc}") from exc
|
|
109
|
+
|
|
110
|
+
graph = data.get("graph", {})
|
|
111
|
+
nodes: Dict[str, Any] = graph.get("nodes", {})
|
|
112
|
+
|
|
113
|
+
# Tracks cross-domain relationships that will be used to:
|
|
114
|
+
# 1. Build direct source↔target mappings
|
|
115
|
+
# 2. Derive same-graph mappings via transitive relationships
|
|
116
|
+
source_item_to_target_set: Dict[Tuple[str, str], Set[str]] = defaultdict(set)
|
|
117
|
+
target_item_to_source_set: Dict[Tuple[str, str], Set[str]] = defaultdict(set)
|
|
118
|
+
|
|
119
|
+
# Parse backend graph nodes to build mappings:
|
|
120
|
+
# - DLC op ↔ backend op
|
|
121
|
+
# - DLC op ↔ backend tensors (inputs/outputs)
|
|
122
|
+
for backend_op_id, node in nodes.items():
|
|
123
|
+
scalar_params = node.get("scalar_params", {})
|
|
124
|
+
qnn_name_entry = scalar_params.get("qnn_op_name", {})
|
|
125
|
+
htp_string_key = "1544"
|
|
126
|
+
qnn_op_name = None
|
|
127
|
+
if isinstance(qnn_name_entry, dict):
|
|
128
|
+
qnn_op_name = qnn_name_entry.get(htp_string_key)
|
|
129
|
+
if not qnn_op_name:
|
|
130
|
+
continue
|
|
131
|
+
|
|
132
|
+
source_item_to_target_set[(OPS_KEY, qnn_op_name)].add(f"{OPS_KEY}:{backend_op_id}")
|
|
133
|
+
target_item_to_source_set[(OPS_KEY, backend_op_id)].add(f"{OPS_KEY}:{qnn_op_name}")
|
|
134
|
+
|
|
135
|
+
input_names = node.get("input_names", []) or []
|
|
136
|
+
output_names = node.get("output_names", []) or []
|
|
137
|
+
referenced_tensors = set(input_names + output_names)
|
|
138
|
+
for tensor_id in referenced_tensors:
|
|
139
|
+
source_item_to_target_set[(OPS_KEY, qnn_op_name)].add(f"{TENSOR_KEY}:{tensor_id}")
|
|
140
|
+
target_item_to_source_set[(TENSOR_KEY, tensor_id)].add(f"{OPS_KEY}:{qnn_op_name}")
|
|
98
141
|
|
|
99
|
-
|
|
100
|
-
source_item_to_dlc_set: Dict[Tuple[str, str], Set[str]] = defaultdict(set)
|
|
142
|
+
return self._build_op_tensor_mappings(source_item_to_target_set, target_item_to_source_set)
|
|
101
143
|
|
|
144
|
+
# pylint: disable=too-many-locals
|
|
145
|
+
def _extract_onnx_mappings(self, dlc_op_map, dlc_tensor_map) -> Optional[OpTensorMappings]:
|
|
146
|
+
"""
|
|
147
|
+
Takes op and tensor traces and returns mappings between ONNX (source) and DLC (target):
|
|
148
|
+
- ONNX op -> DLC op/tensor (source_ops_to_target_ops_and_tensors)
|
|
149
|
+
- DLC op -> ONNX op/tensor (target_ops_to_source_ops_and_tensors)
|
|
150
|
+
- ONNX tensor -> DLC op/tensor (source_tensors_to_target_ops_and_tensors)
|
|
151
|
+
- DLC tensor -> ONNX op/tensor (target_tensors_to_source_ops_and_tensors)
|
|
152
|
+
Plus same-domain mappings for ONNX (source) and DLC (target).
|
|
153
|
+
"""
|
|
154
|
+
# Track relationships for building same-graph mappings
|
|
155
|
+
source_item_to_target_set: Dict[Tuple[str, str], Set[str]] = defaultdict(set)
|
|
156
|
+
target_item_to_source_set: Dict[Tuple[str, str], Set[str]] = defaultdict(set)
|
|
157
|
+
|
|
158
|
+
# Extract ONNX -> DLC mappings
|
|
102
159
|
for dlc_type, dlc_items in zip([OPS_KEY, TENSOR_KEY], [dlc_op_map, dlc_tensor_map]):
|
|
103
160
|
for dlc_item in dlc_items:
|
|
104
161
|
dlc_name = dlc_item.get_name()
|
|
@@ -109,28 +166,60 @@ class DlcParser:
|
|
|
109
166
|
OPS_KEY if source_item.get_type() == OP else TENSOR_KEY
|
|
110
167
|
)
|
|
111
168
|
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
else:
|
|
117
|
-
self.update_mapping(
|
|
118
|
-
dlc_tensors_to_source_ops_and_tensors, dlc_name, source_type, source_name
|
|
119
|
-
)
|
|
120
|
-
|
|
121
|
-
dlc_type_literal: Any = dlc_type
|
|
122
|
-
if source_type == OPS_KEY:
|
|
123
|
-
self.update_mapping(
|
|
124
|
-
source_ops_to_dlc_ops_and_tensors, source_name, dlc_type_literal, dlc_name
|
|
125
|
-
)
|
|
126
|
-
else:
|
|
127
|
-
self.update_mapping(
|
|
128
|
-
source_tensors_to_dlc_ops_and_tensors, source_name, dlc_type_literal, dlc_name
|
|
129
|
-
)
|
|
130
|
-
|
|
131
|
-
dlc_item_to_source_set[(dlc_type, dlc_name)].add(f"{source_type}:{source_name}")
|
|
132
|
-
source_item_to_dlc_set[(source_type, source_name)].add(f"{dlc_type}:{dlc_name}")
|
|
169
|
+
target_item_to_source_set[(dlc_type, dlc_name)].add(f"{source_type}:{source_name}")
|
|
170
|
+
source_item_to_target_set[(source_type, source_name)].add(f"{dlc_type}:{dlc_name}")
|
|
171
|
+
|
|
172
|
+
return self._build_op_tensor_mappings(source_item_to_target_set, target_item_to_source_set)
|
|
133
173
|
|
|
174
|
+
def _build_op_tensor_mappings(
|
|
175
|
+
self,
|
|
176
|
+
source_item_to_target_set: Dict[Tuple[str, str], Set[str]],
|
|
177
|
+
target_item_to_source_set: Dict[Tuple[str, str], Set[str]],
|
|
178
|
+
) -> Optional[OpTensorMappings]:
|
|
179
|
+
"""
|
|
180
|
+
Build OpTensorMappings from the collected source-to-target and target-to-source relationships.
|
|
181
|
+
|
|
182
|
+
:param source_item_to_target_set: Map of (item_type, item_name) -> set of "type:name" strings
|
|
183
|
+
:param target_item_to_source_set: Map of (item_type, item_name) -> set of "type:name" strings
|
|
184
|
+
:return: OpTensorMappings object or None if no mappings found
|
|
185
|
+
"""
|
|
186
|
+
source_ops_to_target_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
187
|
+
target_ops_to_source_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
188
|
+
source_tensors_to_target_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
189
|
+
target_tensors_to_source_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
190
|
+
|
|
191
|
+
source_ops_to_source_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
192
|
+
source_tensors_to_source_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
193
|
+
target_ops_to_target_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
194
|
+
target_tensors_to_target_ops_and_tensors: Dict[str, Dict[str, Set[str]]] = {}
|
|
195
|
+
|
|
196
|
+
for (item_type, item_name), related_set in source_item_to_target_set.items():
|
|
197
|
+
for related_str in related_set:
|
|
198
|
+
related_type_str, related_name = related_str.split(":", 1)
|
|
199
|
+
related_type: Literal["ops", "tensors"] = related_type_str # type: ignore
|
|
200
|
+
if item_type == OPS_KEY:
|
|
201
|
+
self.update_mapping(
|
|
202
|
+
source_ops_to_target_ops_and_tensors, item_name, related_type, related_name
|
|
203
|
+
)
|
|
204
|
+
else:
|
|
205
|
+
self.update_mapping(
|
|
206
|
+
source_tensors_to_target_ops_and_tensors, item_name, related_type, related_name
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
for (item_type, item_name), related_set in target_item_to_source_set.items():
|
|
210
|
+
for related_str in related_set:
|
|
211
|
+
related_type_str, related_name = related_str.split(":", 1)
|
|
212
|
+
related_type: Literal["ops", "tensors"] = related_type_str # type: ignore
|
|
213
|
+
if item_type == OPS_KEY:
|
|
214
|
+
self.update_mapping(
|
|
215
|
+
target_ops_to_source_ops_and_tensors, item_name, related_type, related_name
|
|
216
|
+
)
|
|
217
|
+
else:
|
|
218
|
+
self.update_mapping(
|
|
219
|
+
target_tensors_to_source_ops_and_tensors, item_name, related_type, related_name
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
# Builds same-graph mappings
|
|
134
223
|
def build_intra_relations(items_map, reverse_map, ops_map, tensors_map):
|
|
135
224
|
for (item_type, item_name), related_set in items_map.items():
|
|
136
225
|
for related_str in related_set:
|
|
@@ -145,18 +234,17 @@ class DlcParser:
|
|
|
145
234
|
self.update_mapping(tensors_map, item_name, other_type, other_name)
|
|
146
235
|
|
|
147
236
|
build_intra_relations(
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
dlc_ops_to_dlc_ops_and_tensors,
|
|
151
|
-
dlc_tensors_to_dlc_ops_and_tensors,
|
|
152
|
-
)
|
|
153
|
-
|
|
154
|
-
build_intra_relations(
|
|
155
|
-
source_item_to_dlc_set,
|
|
156
|
-
dlc_item_to_source_set,
|
|
237
|
+
source_item_to_target_set,
|
|
238
|
+
target_item_to_source_set,
|
|
157
239
|
source_ops_to_source_ops_and_tensors,
|
|
158
240
|
source_tensors_to_source_ops_and_tensors,
|
|
159
241
|
)
|
|
242
|
+
build_intra_relations(
|
|
243
|
+
target_item_to_source_set,
|
|
244
|
+
source_item_to_target_set,
|
|
245
|
+
target_ops_to_target_ops_and_tensors,
|
|
246
|
+
target_tensors_to_target_ops_and_tensors,
|
|
247
|
+
)
|
|
160
248
|
|
|
161
249
|
def convert_sets(d: Dict[str, Dict[str, Set[str]]]) -> Dict[str, MappingGroup]:
|
|
162
250
|
return {
|
|
@@ -164,29 +252,38 @@ class DlcParser:
|
|
|
164
252
|
for k, v in d.items()
|
|
165
253
|
}
|
|
166
254
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
255
|
+
# Check if any mappings exist
|
|
256
|
+
has_mappings = (
|
|
257
|
+
source_ops_to_target_ops_and_tensors
|
|
258
|
+
or source_tensors_to_target_ops_and_tensors
|
|
259
|
+
or target_ops_to_source_ops_and_tensors
|
|
260
|
+
or target_tensors_to_source_ops_and_tensors
|
|
261
|
+
or source_ops_to_source_ops_and_tensors
|
|
262
|
+
or source_tensors_to_source_ops_and_tensors
|
|
263
|
+
or target_ops_to_target_ops_and_tensors
|
|
264
|
+
or target_tensors_to_target_ops_and_tensors
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
if has_mappings:
|
|
268
|
+
return OpTensorMappings(
|
|
269
|
+
source_ops_to_target_ops_and_tensors=convert_sets(source_ops_to_target_ops_and_tensors),
|
|
270
|
+
target_ops_to_source_ops_and_tensors=convert_sets(target_ops_to_source_ops_and_tensors),
|
|
271
|
+
source_tensors_to_target_ops_and_tensors=convert_sets(
|
|
272
|
+
source_tensors_to_target_ops_and_tensors
|
|
273
|
+
),
|
|
274
|
+
target_tensors_to_source_ops_and_tensors=convert_sets(
|
|
275
|
+
target_tensors_to_source_ops_and_tensors
|
|
276
|
+
),
|
|
175
277
|
source_ops_to_source_ops_and_tensors=convert_sets(source_ops_to_source_ops_and_tensors),
|
|
176
278
|
source_tensors_to_source_ops_and_tensors=convert_sets(
|
|
177
279
|
source_tensors_to_source_ops_and_tensors
|
|
178
280
|
),
|
|
281
|
+
target_ops_to_target_ops_and_tensors=convert_sets(target_ops_to_target_ops_and_tensors),
|
|
282
|
+
target_tensors_to_target_ops_and_tensors=convert_sets(
|
|
283
|
+
target_tensors_to_target_ops_and_tensors
|
|
284
|
+
),
|
|
179
285
|
)
|
|
180
|
-
|
|
181
|
-
or len(source_ops_to_dlc_ops_and_tensors)
|
|
182
|
-
or len(dlc_tensors_to_source_ops_and_tensors)
|
|
183
|
-
or len(source_tensors_to_dlc_ops_and_tensors)
|
|
184
|
-
or len(dlc_ops_to_dlc_ops_and_tensors)
|
|
185
|
-
or len(dlc_tensors_to_dlc_ops_and_tensors)
|
|
186
|
-
or len(source_ops_to_source_ops_and_tensors)
|
|
187
|
-
or len(source_tensors_to_source_ops_and_tensors)
|
|
188
|
-
else None
|
|
189
|
-
)
|
|
286
|
+
return None
|
|
190
287
|
|
|
191
288
|
def update_mapping(
|
|
192
289
|
self,
|
|
@@ -207,6 +304,8 @@ class DlcParser:
|
|
|
207
304
|
:return: An IR graph object
|
|
208
305
|
"""
|
|
209
306
|
graph_names: set = model_reader.get_ir_graph_names()
|
|
307
|
+
if len(graph_names) == 0:
|
|
308
|
+
return None
|
|
210
309
|
# We only support single IR graph, so there's no need to iterate
|
|
211
310
|
# It also might make sense to make 'graph' a member variable in the future
|
|
212
311
|
if len(graph_names) > 1:
|
|
@@ -219,6 +318,8 @@ class DlcParser:
|
|
|
219
318
|
model with the --enable_framework_trace argument. Source topology will always be the
|
|
220
319
|
first and only entry 0
|
|
221
320
|
"""
|
|
321
|
+
if not "source.topology0" in self.model_reader.get_record_names():
|
|
322
|
+
return ""
|
|
222
323
|
# Eventually when we support multiple source types, we need to
|
|
223
324
|
# get new enum values from the converter team to use for recordType
|
|
224
325
|
topology = self.model_reader.extract_record(
|
|
@@ -230,6 +331,36 @@ class DlcParser:
|
|
|
230
331
|
)
|
|
231
332
|
return topology.get_bytes()
|
|
232
333
|
|
|
334
|
+
# Contains both HTP topology and mappings
|
|
335
|
+
def get_htp_file(self):
|
|
336
|
+
"""
|
|
337
|
+
Gets the HTP file from the DLC, if available
|
|
338
|
+
:return: The HTP topology + mappings file
|
|
339
|
+
"""
|
|
340
|
+
try:
|
|
341
|
+
record_names = self.model_reader.get_record_names()
|
|
342
|
+
htp_record_name = None
|
|
343
|
+
for record_name in record_names:
|
|
344
|
+
if re.match(r"^htp\.graph\.mapping\.", record_name):
|
|
345
|
+
htp_record_name = record_name
|
|
346
|
+
break
|
|
347
|
+
|
|
348
|
+
if htp_record_name is None:
|
|
349
|
+
return None
|
|
350
|
+
|
|
351
|
+
topology = self.model_reader.extract_record(
|
|
352
|
+
# There is no DlcRecordType for HTP, and it appears
|
|
353
|
+
# to be a bug in their code that this value isn't used at
|
|
354
|
+
# all but is still required
|
|
355
|
+
recordName=htp_record_name,
|
|
356
|
+
recordType=modeltools.DlcRecordType.SOURCE_TOPOLOGY,
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
return topology.get_bytes().decode("utf-8")
|
|
360
|
+
except Exception: # pylint: disable=broad-exception-caught
|
|
361
|
+
# HTP mappings may not necessarily be in the DLC
|
|
362
|
+
return None
|
|
363
|
+
|
|
233
364
|
def close_file(self):
|
|
234
365
|
"""
|
|
235
366
|
Closes the model_reader file
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
#
|
|
3
|
+
# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries.
|
|
4
|
+
# All Rights Reserved.
|
|
5
|
+
# Confidential and Proprietary - Qualcomm Technologies, Inc.
|
|
6
|
+
#
|
|
7
|
+
# ==============================================================================
|
|
8
|
+
"""Extract HTP Topology From DLC"""
|
|
9
|
+
|
|
10
|
+
import sys
|
|
11
|
+
|
|
12
|
+
from qairt_visualizer.core.parsers.dlc_parser.dlc_parser import DlcParser
|
|
13
|
+
from qairt_visualizer.core.parsers.dlc_parser.helpers.exception_helpers import handle_exception
|
|
14
|
+
from qairt_visualizer.core.parsers.dlc_parser.helpers.file_helpers import save_topology_to_tmp
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def main():
|
|
18
|
+
"""
|
|
19
|
+
CLI entry point: Gets the DLC's topology, if available.
|
|
20
|
+
"""
|
|
21
|
+
try:
|
|
22
|
+
if len(sys.argv) == 1:
|
|
23
|
+
raise ValueError("An argument for a DLC file path was not provided")
|
|
24
|
+
dlc_path = sys.argv[1]
|
|
25
|
+
|
|
26
|
+
reader = DlcParser(dlc_path)
|
|
27
|
+
htp_topology = reader.get_htp_file()
|
|
28
|
+
|
|
29
|
+
if htp_topology:
|
|
30
|
+
sys.stdout.write(save_topology_to_tmp(htp_topology, extension=".json"))
|
|
31
|
+
sys.stdout.flush()
|
|
32
|
+
else:
|
|
33
|
+
sys.stdout.write("")
|
|
34
|
+
sys.stdout.flush()
|
|
35
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
36
|
+
handle_exception(e)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
if __name__ == "__main__":
|
|
40
|
+
main()
|