ttnn-visualizer 0.42.0__py3-none-any.whl → 0.43.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/__init__.py +0 -1
- ttnn_visualizer/app.py +15 -4
- ttnn_visualizer/csv_queries.py +82 -48
- ttnn_visualizer/decorators.py +38 -15
- ttnn_visualizer/exceptions.py +29 -1
- ttnn_visualizer/file_uploads.py +1 -0
- ttnn_visualizer/instances.py +42 -15
- ttnn_visualizer/models.py +12 -7
- ttnn_visualizer/remote_sqlite_setup.py +37 -30
- ttnn_visualizer/requirements.txt +1 -0
- ttnn_visualizer/serializers.py +1 -0
- ttnn_visualizer/settings.py +9 -5
- ttnn_visualizer/sftp_operations.py +144 -125
- ttnn_visualizer/sockets.py +9 -3
- ttnn_visualizer/static/assets/{allPaths-wwXsGKJ2.js → allPaths-CGmhlOs-.js} +1 -1
- ttnn_visualizer/static/assets/{allPathsLoader-BK9jqlVe.js → allPathsLoader-CH9za42_.js} +2 -2
- ttnn_visualizer/static/assets/index-B-fsa5Ru.js +1 -0
- ttnn_visualizer/static/assets/{index-C1rJBrMl.css → index-C-t6jBt9.css} +1 -1
- ttnn_visualizer/static/assets/{index-Ybr1HJxx.js → index-DEb3r1jy.js} +69 -69
- ttnn_visualizer/static/assets/index-DLOviMB1.js +1 -0
- ttnn_visualizer/static/assets/{splitPathsBySizeLoader-CauQGZHk.js → splitPathsBySizeLoader-CP-kodGu.js} +1 -1
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/__init__.py +0 -1
- ttnn_visualizer/tests/test_queries.py +0 -1
- ttnn_visualizer/tests/test_serializers.py +2 -2
- ttnn_visualizer/utils.py +7 -3
- ttnn_visualizer/views.py +250 -82
- {ttnn_visualizer-0.42.0.dist-info → ttnn_visualizer-0.43.1.dist-info}/METADATA +5 -1
- ttnn_visualizer-0.43.1.dist-info/RECORD +45 -0
- ttnn_visualizer/static/assets/index-BKzgFDAn.js +0 -1
- ttnn_visualizer/static/assets/index-BvSuWPlB.js +0 -1
- ttnn_visualizer-0.42.0.dist-info/RECORD +0 -45
- {ttnn_visualizer-0.42.0.dist-info → ttnn_visualizer-0.43.1.dist-info}/LICENSE +0 -0
- {ttnn_visualizer-0.42.0.dist-info → ttnn_visualizer-0.43.1.dist-info}/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.42.0.dist-info → ttnn_visualizer-0.43.1.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.42.0.dist-info → ttnn_visualizer-0.43.1.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.42.0.dist-info → ttnn_visualizer-0.43.1.dist-info}/top_level.txt +0 -0
ttnn_visualizer/views.py
CHANGED
@@ -7,6 +7,7 @@ import json
|
|
7
7
|
import logging
|
8
8
|
import re
|
9
9
|
import shutil
|
10
|
+
import subprocess
|
10
11
|
import time
|
11
12
|
from http import HTTPStatus
|
12
13
|
from pathlib import Path
|
@@ -23,12 +24,22 @@ from flask import (
|
|
23
24
|
request,
|
24
25
|
)
|
25
26
|
|
26
|
-
from ttnn_visualizer.csv_queries import
|
27
|
-
|
27
|
+
from ttnn_visualizer.csv_queries import (
|
28
|
+
DeviceLogProfilerQueries,
|
29
|
+
OpsPerformanceQueries,
|
30
|
+
OpsPerformanceReportQueries,
|
31
|
+
NPEQueries,
|
32
|
+
)
|
28
33
|
from ttnn_visualizer.decorators import with_instance, local_only
|
29
34
|
from ttnn_visualizer.enums import ConnectionTestStates
|
30
35
|
from ttnn_visualizer.exceptions import DataFormatError
|
31
36
|
from ttnn_visualizer.exceptions import RemoteConnectionException
|
37
|
+
from ttnn_visualizer.exceptions import (
|
38
|
+
SSHException,
|
39
|
+
AuthenticationException,
|
40
|
+
NoValidConnectionsError,
|
41
|
+
AuthenticationFailedException,
|
42
|
+
)
|
32
43
|
from ttnn_visualizer.file_uploads import (
|
33
44
|
extract_folder_name_from_files,
|
34
45
|
extract_npe_name,
|
@@ -36,7 +47,8 @@ from ttnn_visualizer.file_uploads import (
|
|
36
47
|
validate_files,
|
37
48
|
)
|
38
49
|
from ttnn_visualizer.instances import (
|
39
|
-
get_instances,
|
50
|
+
get_instances,
|
51
|
+
update_instance,
|
40
52
|
)
|
41
53
|
from ttnn_visualizer.models import (
|
42
54
|
RemoteReportFolder,
|
@@ -53,7 +65,8 @@ from ttnn_visualizer.serializers import (
|
|
53
65
|
serialize_buffer_pages,
|
54
66
|
serialize_operation_buffers,
|
55
67
|
serialize_operations_buffers,
|
56
|
-
serialize_devices,
|
68
|
+
serialize_devices,
|
69
|
+
serialize_buffer,
|
57
70
|
)
|
58
71
|
from ttnn_visualizer.sftp_operations import (
|
59
72
|
sync_remote_profiler_folders,
|
@@ -65,8 +78,6 @@ from ttnn_visualizer.sftp_operations import (
|
|
65
78
|
sync_remote_performance_folders,
|
66
79
|
get_cluster_desc,
|
67
80
|
)
|
68
|
-
from ttnn_visualizer.exceptions import SSHException, AuthenticationException, NoValidConnectionsError
|
69
|
-
import subprocess
|
70
81
|
from ttnn_visualizer.utils import (
|
71
82
|
get_cluster_descriptor_path,
|
72
83
|
read_last_synced_file,
|
@@ -85,23 +96,31 @@ def handle_ssh_subprocess_error(e: subprocess.CalledProcessError, remote_connect
|
|
85
96
|
stderr = e.stderr.lower() if e.stderr else ""
|
86
97
|
|
87
98
|
# Check for authentication failures
|
88
|
-
if any(
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
99
|
+
if any(
|
100
|
+
auth_err in stderr
|
101
|
+
for auth_err in [
|
102
|
+
"permission denied",
|
103
|
+
"authentication failed",
|
104
|
+
"publickey",
|
105
|
+
"password",
|
106
|
+
"host key verification failed",
|
107
|
+
]
|
108
|
+
):
|
109
|
+
raise AuthenticationException(
|
110
|
+
f"SSH authentication failed: {remote_connection.username}@{remote_connection.host}: Permission denied (publickey,password)"
|
111
|
+
)
|
96
112
|
|
97
113
|
# Check for connection failures
|
98
|
-
elif any(
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
114
|
+
elif any(
|
115
|
+
conn_err in stderr
|
116
|
+
for conn_err in [
|
117
|
+
"connection refused",
|
118
|
+
"network is unreachable",
|
119
|
+
"no route to host",
|
120
|
+
"name or service not known",
|
121
|
+
"connection timed out",
|
122
|
+
]
|
123
|
+
):
|
105
124
|
raise NoValidConnectionsError(f"SSH connection failed: {e.stderr}")
|
106
125
|
|
107
126
|
# Check for general SSH protocol errors
|
@@ -115,42 +134,76 @@ def handle_ssh_subprocess_error(e: subprocess.CalledProcessError, remote_connect
|
|
115
134
|
|
116
135
|
def test_ssh_connection(connection) -> bool:
|
117
136
|
"""Test SSH connection by running a simple command."""
|
118
|
-
ssh_cmd = ["ssh"]
|
137
|
+
ssh_cmd = ["ssh", "-o", "PasswordAuthentication=no"]
|
119
138
|
|
120
139
|
# Handle non-standard SSH port
|
121
140
|
if connection.port != 22:
|
122
141
|
ssh_cmd.extend(["-p", str(connection.port)])
|
123
142
|
|
124
|
-
ssh_cmd.extend(
|
125
|
-
f"{connection.username}@{connection.host}",
|
126
|
-
|
127
|
-
])
|
143
|
+
ssh_cmd.extend(
|
144
|
+
[f"{connection.username}@{connection.host}", "echo 'SSH connection test'"]
|
145
|
+
)
|
128
146
|
|
129
147
|
try:
|
130
148
|
result = subprocess.run(
|
131
|
-
ssh_cmd,
|
132
|
-
capture_output=True,
|
133
|
-
text=True,
|
134
|
-
check=True,
|
135
|
-
timeout=10
|
149
|
+
ssh_cmd, capture_output=True, text=True, check=True, timeout=10
|
136
150
|
)
|
137
151
|
return True
|
138
152
|
except subprocess.CalledProcessError as e:
|
139
153
|
if e.returncode == 255: # SSH protocol errors
|
140
|
-
|
154
|
+
try:
|
155
|
+
handle_ssh_subprocess_error(e, connection)
|
156
|
+
except AuthenticationException:
|
157
|
+
# Convert to AuthenticationFailedException for proper HTTP 422 response
|
158
|
+
user_message = (
|
159
|
+
"SSH authentication failed. This application requires SSH key-based authentication. "
|
160
|
+
"Please ensure your SSH public key is added to the authorized_keys file on the remote server. "
|
161
|
+
"Password authentication is not supported."
|
162
|
+
)
|
163
|
+
logger.info(
|
164
|
+
f"SSH authentication failed for {connection.username}@{connection.host}: {user_message}"
|
165
|
+
)
|
166
|
+
raise AuthenticationFailedException(message=user_message)
|
167
|
+
except NoValidConnectionsError as ssh_err:
|
168
|
+
user_message = (
|
169
|
+
f"Unable to establish SSH connection to {connection.host}. "
|
170
|
+
"Please check the hostname, port, and network connectivity. "
|
171
|
+
"Ensure SSH key-based authentication is properly configured."
|
172
|
+
)
|
173
|
+
logger.warning(
|
174
|
+
f"SSH connection failed for {connection.username}@{connection.host}: {user_message}"
|
175
|
+
)
|
176
|
+
raise RemoteConnectionException(
|
177
|
+
message=user_message, status=ConnectionTestStates.FAILED
|
178
|
+
)
|
179
|
+
except SSHException as ssh_err:
|
180
|
+
user_message = f"SSH connection error to {connection.host}: {str(ssh_err)}. Ensure SSH key-based authentication is properly configured."
|
181
|
+
logger.warning(
|
182
|
+
f"SSH error for {connection.username}@{connection.host}: {user_message}"
|
183
|
+
)
|
184
|
+
raise RemoteConnectionException(
|
185
|
+
message=user_message, status=ConnectionTestStates.FAILED
|
186
|
+
)
|
141
187
|
else:
|
188
|
+
error_message = f"SSH connection test failed: {e.stderr}"
|
189
|
+
logger.error(
|
190
|
+
f"SSH test failed for {connection.username}@{connection.host}: {error_message}"
|
191
|
+
)
|
142
192
|
raise RemoteConnectionException(
|
143
|
-
message=
|
144
|
-
status=ConnectionTestStates.FAILED
|
193
|
+
message=error_message, status=ConnectionTestStates.FAILED
|
145
194
|
)
|
146
195
|
return False
|
147
196
|
except subprocess.TimeoutExpired:
|
197
|
+
timeout_message = "SSH connection test timed out"
|
198
|
+
logger.warning(
|
199
|
+
f"SSH timeout for {connection.username}@{connection.host}: {timeout_message}"
|
200
|
+
)
|
148
201
|
raise RemoteConnectionException(
|
149
|
-
message=
|
150
|
-
status=ConnectionTestStates.FAILED
|
202
|
+
message=timeout_message, status=ConnectionTestStates.FAILED
|
151
203
|
)
|
152
204
|
return False
|
153
205
|
|
206
|
+
|
154
207
|
logger = logging.getLogger(__name__)
|
155
208
|
|
156
209
|
api = Blueprint("api", __name__)
|
@@ -466,7 +519,7 @@ def get_profiler_data_list(instance: Instance):
|
|
466
519
|
# Doesn't handle remote at the moment
|
467
520
|
# is_remote = True if instance.remote_connection else False
|
468
521
|
# config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
469
|
-
config_key =
|
522
|
+
config_key = "LOCAL_DATA_DIRECTORY"
|
470
523
|
data_directory = Path(current_app.config[config_key])
|
471
524
|
|
472
525
|
# if is_remote:
|
@@ -483,18 +536,26 @@ def get_profiler_data_list(instance: Instance):
|
|
483
536
|
if current_app.config["SERVER_MODE"]:
|
484
537
|
session_instances = session.get("instances", [])
|
485
538
|
instances = get_instances(session_instances)
|
486
|
-
db_paths = [
|
539
|
+
db_paths = [
|
540
|
+
instance.profiler_path for instance in instances if instance.profiler_path
|
541
|
+
]
|
487
542
|
db_directory_names = [str(Path(db_path).parent.name) for db_path in db_paths]
|
488
543
|
session_paths = session.get("profiler_paths", [])
|
489
|
-
session_directory_names = [
|
544
|
+
session_directory_names = [
|
545
|
+
str(Path(session_path).parent.name) for session_path in session_paths
|
546
|
+
]
|
490
547
|
demo_directory_names = []
|
491
548
|
demo_pattern = re.compile(r"^demo", re.IGNORECASE)
|
492
549
|
for report in path.glob("*"):
|
493
550
|
if demo_pattern.match(report.name):
|
494
551
|
demo_directory_names.append(report.name)
|
495
|
-
directory_names = list(
|
552
|
+
directory_names = list(
|
553
|
+
set(db_directory_names + session_directory_names + demo_directory_names)
|
554
|
+
)
|
496
555
|
else:
|
497
|
-
directory_names = [
|
556
|
+
directory_names = [
|
557
|
+
directory.name for directory in path.iterdir() if directory.is_dir()
|
558
|
+
]
|
498
559
|
|
499
560
|
# Sort directory names by modified time (most recent first)
|
500
561
|
def get_modified_time(dir_name):
|
@@ -539,25 +600,40 @@ def delete_profiler_report(profiler_name, instance: Instance):
|
|
539
600
|
data_directory = Path(current_app.config[config_key])
|
540
601
|
|
541
602
|
if not profiler_name:
|
542
|
-
return Response(
|
603
|
+
return Response(
|
604
|
+
status=HTTPStatus.BAD_REQUEST, response="Report name is required."
|
605
|
+
)
|
543
606
|
|
544
607
|
if is_remote:
|
545
|
-
connection = RemoteConnection.model_validate(
|
546
|
-
|
608
|
+
connection = RemoteConnection.model_validate(
|
609
|
+
instance.remote_connection, strict=False
|
610
|
+
)
|
611
|
+
path = (
|
612
|
+
data_directory
|
613
|
+
/ connection.host
|
614
|
+
/ current_app.config["PROFILER_DIRECTORY_NAME"]
|
615
|
+
)
|
547
616
|
else:
|
548
|
-
path =
|
617
|
+
path = (
|
618
|
+
data_directory
|
619
|
+
/ current_app.config["PROFILER_DIRECTORY_NAME"]
|
620
|
+
/ profiler_name
|
621
|
+
)
|
549
622
|
|
550
623
|
if instance.active_report and instance.active_report.profiler_name == profiler_name:
|
551
624
|
instance_id = request.args.get("instanceId")
|
552
|
-
update_instance(instance_id=instance_id,profiler_name="")
|
625
|
+
update_instance(instance_id=instance_id, profiler_name="")
|
553
626
|
|
554
627
|
if path.exists() and path.is_dir():
|
555
628
|
shutil.rmtree(path)
|
556
629
|
else:
|
557
|
-
return Response(
|
558
|
-
|
559
|
-
|
630
|
+
return Response(
|
631
|
+
status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}"
|
632
|
+
)
|
560
633
|
|
634
|
+
return Response(
|
635
|
+
status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}"
|
636
|
+
)
|
561
637
|
|
562
638
|
|
563
639
|
@api.route("/performance", methods=["GET"])
|
@@ -574,21 +650,37 @@ def get_performance_data_list(instance: Instance):
|
|
574
650
|
if current_app.config["SERVER_MODE"]:
|
575
651
|
session_instances = session.get("instances", [])
|
576
652
|
instances = get_instances(session_instances)
|
577
|
-
db_paths = [
|
653
|
+
db_paths = [
|
654
|
+
instance.performance_path
|
655
|
+
for instance in instances
|
656
|
+
if instance.performance_path
|
657
|
+
]
|
578
658
|
db_directory_names = [str(Path(db_path).name) for db_path in db_paths]
|
579
659
|
session_paths = session.get("performance_paths", [])
|
580
|
-
session_directory_names = [
|
660
|
+
session_directory_names = [
|
661
|
+
str(Path(session_path).name) for session_path in session_paths
|
662
|
+
]
|
581
663
|
demo_directory_names = []
|
582
664
|
demo_pattern = re.compile(r"^demo", re.IGNORECASE)
|
583
665
|
for report in path.glob("*"):
|
584
666
|
if demo_pattern.match(report.name):
|
585
667
|
demo_directory_names.append(report.name)
|
586
|
-
directory_names = list(
|
668
|
+
directory_names = list(
|
669
|
+
set(db_directory_names + session_directory_names + demo_directory_names)
|
670
|
+
)
|
587
671
|
else:
|
588
672
|
if is_remote:
|
589
|
-
connection = RemoteConnection.model_validate(
|
590
|
-
|
591
|
-
|
673
|
+
connection = RemoteConnection.model_validate(
|
674
|
+
instance.remote_connection, strict=False
|
675
|
+
)
|
676
|
+
path = (
|
677
|
+
data_directory
|
678
|
+
/ connection.host
|
679
|
+
/ current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
680
|
+
)
|
681
|
+
directory_names = [
|
682
|
+
directory.name for directory in path.iterdir() if directory.is_dir()
|
683
|
+
]
|
592
684
|
|
593
685
|
valid_dirs = []
|
594
686
|
|
@@ -648,24 +740,43 @@ def delete_performance_report(performance_name, instance: Instance):
|
|
648
740
|
data_directory = Path(current_app.config[config_key])
|
649
741
|
|
650
742
|
if not performance_name:
|
651
|
-
return Response(
|
743
|
+
return Response(
|
744
|
+
status=HTTPStatus.BAD_REQUEST, response="Report name is required."
|
745
|
+
)
|
652
746
|
|
653
747
|
if is_remote:
|
654
|
-
connection = RemoteConnection.model_validate(
|
655
|
-
|
748
|
+
connection = RemoteConnection.model_validate(
|
749
|
+
instance.remote_connection, strict=False
|
750
|
+
)
|
751
|
+
path = (
|
752
|
+
data_directory
|
753
|
+
/ connection.host
|
754
|
+
/ current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
755
|
+
)
|
656
756
|
else:
|
657
|
-
path =
|
757
|
+
path = (
|
758
|
+
data_directory
|
759
|
+
/ current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
760
|
+
/ performance_name
|
761
|
+
)
|
658
762
|
|
659
|
-
if
|
763
|
+
if (
|
764
|
+
instance.active_report
|
765
|
+
and instance.active_report.performance_name == performance_name
|
766
|
+
):
|
660
767
|
instance_id = request.args.get("instanceId")
|
661
|
-
update_instance(instance_id=instance_id,performance_name="")
|
768
|
+
update_instance(instance_id=instance_id, performance_name="")
|
662
769
|
|
663
770
|
if path.exists() and path.is_dir():
|
664
771
|
shutil.rmtree(path)
|
665
772
|
else:
|
666
|
-
return Response(
|
773
|
+
return Response(
|
774
|
+
status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}"
|
775
|
+
)
|
667
776
|
|
668
|
-
return Response(
|
777
|
+
return Response(
|
778
|
+
status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}"
|
779
|
+
)
|
669
780
|
|
670
781
|
|
671
782
|
@api.route("/performance/perf-results/raw", methods=["GET"])
|
@@ -714,6 +825,7 @@ def get_performance_data_raw(instance: Instance):
|
|
714
825
|
headers={"Content-Disposition": "attachment; filename=profile_log_device.csv"},
|
715
826
|
)
|
716
827
|
|
828
|
+
|
717
829
|
@api.route("/performance/npe/manifest", methods=["GET"])
|
718
830
|
@with_instance
|
719
831
|
def get_npe_manifest(instance: Instance):
|
@@ -735,6 +847,11 @@ def get_npe_timeline(instance: Instance):
|
|
735
847
|
|
736
848
|
filename = request.args.get("filename", default=None)
|
737
849
|
|
850
|
+
if not filename:
|
851
|
+
return jsonify({})
|
852
|
+
|
853
|
+
filename = Path(filename).name
|
854
|
+
|
738
855
|
try:
|
739
856
|
content = NPEQueries.get_npe_timeline(instance, filename=filename)
|
740
857
|
except FileNotFoundError:
|
@@ -764,8 +881,13 @@ def get_devices(instance: Instance):
|
|
764
881
|
@api.route("/local/upload/profiler", methods=["POST"])
|
765
882
|
def create_profiler_files():
|
766
883
|
files = request.files.getlist("files")
|
767
|
-
folder_name = request.form.get(
|
768
|
-
|
884
|
+
folder_name = request.form.get(
|
885
|
+
"folderName"
|
886
|
+
) # Optional folder name - Used for Safari compatibility
|
887
|
+
profiler_directory = (
|
888
|
+
current_app.config["LOCAL_DATA_DIRECTORY"]
|
889
|
+
/ current_app.config["PROFILER_DIRECTORY_NAME"]
|
890
|
+
)
|
769
891
|
|
770
892
|
if not validate_files(files, {"db.sqlite", "config.json"}, folder_name=folder_name):
|
771
893
|
return StatusMessage(
|
@@ -819,10 +941,11 @@ def create_profiler_files():
|
|
819
941
|
"reportName": report_name,
|
820
942
|
}
|
821
943
|
|
944
|
+
|
822
945
|
@api.route("/local/upload/performance", methods=["POST"])
|
823
946
|
def create_performance_files():
|
824
947
|
files = request.files.getlist("files")
|
825
|
-
folder_name = request.form.get("folderName")
|
948
|
+
folder_name = request.form.get("folderName") # Optional folder name
|
826
949
|
data_directory = Path(current_app.config["LOCAL_DATA_DIRECTORY"])
|
827
950
|
|
828
951
|
if not validate_files(
|
@@ -867,7 +990,9 @@ def create_performance_files():
|
|
867
990
|
performance_path=performance_path,
|
868
991
|
)
|
869
992
|
|
870
|
-
session["performance_paths"] = session.get("performance_paths", []) + [
|
993
|
+
session["performance_paths"] = session.get("performance_paths", []) + [
|
994
|
+
str(performance_path)
|
995
|
+
]
|
871
996
|
session.permanent = True
|
872
997
|
|
873
998
|
return StatusMessage(
|
@@ -881,7 +1006,9 @@ def create_npe_files():
|
|
881
1006
|
data_directory = current_app.config["LOCAL_DATA_DIRECTORY"]
|
882
1007
|
|
883
1008
|
for file in files:
|
884
|
-
if not file.filename.endswith(".json") and not file.filename.endswith(
|
1009
|
+
if not file.filename.endswith(".json") and not file.filename.endswith(
|
1010
|
+
".npeviz.zst"
|
1011
|
+
):
|
885
1012
|
return StatusMessage(
|
886
1013
|
status=ConnectionTestStates.FAILED,
|
887
1014
|
message="NPE requires a valid .json or .npeviz.zst file",
|
@@ -898,14 +1025,14 @@ def create_npe_files():
|
|
898
1025
|
|
899
1026
|
instance_id = request.args.get("instanceId")
|
900
1027
|
npe_path = str(paths[0])
|
901
|
-
update_instance(
|
1028
|
+
update_instance(
|
1029
|
+
instance_id=instance_id, npe_name=npe_name, clear_remote=True, npe_path=npe_path
|
1030
|
+
)
|
902
1031
|
|
903
1032
|
session["npe_paths"] = session.get("npe_paths", []) + [str(npe_path)]
|
904
1033
|
session.permanent = True
|
905
1034
|
|
906
|
-
return StatusMessage(
|
907
|
-
status=ConnectionTestStates.OK, message="Success"
|
908
|
-
).model_dump()
|
1035
|
+
return StatusMessage(status=ConnectionTestStates.OK, message="Success").model_dump()
|
909
1036
|
|
910
1037
|
|
911
1038
|
@api.route("/remote/profiler", methods=["POST"])
|
@@ -919,7 +1046,12 @@ def get_remote_folders_profiler():
|
|
919
1046
|
for rf in remote_folders:
|
920
1047
|
directory_name = Path(rf.remotePath).name
|
921
1048
|
remote_data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
922
|
-
local_path =
|
1049
|
+
local_path = (
|
1050
|
+
remote_data_directory
|
1051
|
+
/ current_app.config["PROFILER_DIRECTORY_NAME"]
|
1052
|
+
/ connection.host
|
1053
|
+
/ directory_name
|
1054
|
+
)
|
923
1055
|
logger.info(f"Checking last synced for {directory_name}")
|
924
1056
|
rf.lastSynced = read_last_synced_file(str(local_path))
|
925
1057
|
if not rf.lastSynced:
|
@@ -938,14 +1070,21 @@ def get_remote_folders_performance():
|
|
938
1070
|
)
|
939
1071
|
|
940
1072
|
try:
|
941
|
-
remote_performance_folders: List[RemoteReportFolder] =
|
942
|
-
|
1073
|
+
remote_performance_folders: List[RemoteReportFolder] = (
|
1074
|
+
get_remote_performance_folders(
|
1075
|
+
RemoteConnection.model_validate(connection, strict=False)
|
1076
|
+
)
|
943
1077
|
)
|
944
1078
|
|
945
1079
|
for rf in remote_performance_folders:
|
946
1080
|
performance_name = Path(rf.remotePath).name
|
947
1081
|
remote_data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
948
|
-
local_path =
|
1082
|
+
local_path = (
|
1083
|
+
remote_data_directory
|
1084
|
+
/ current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
1085
|
+
/ connection.host
|
1086
|
+
/ performance_name
|
1087
|
+
)
|
949
1088
|
logger.info(f"Checking last synced for {performance_name}")
|
950
1089
|
rf.lastSynced = read_last_synced_file(str(local_path))
|
951
1090
|
if not rf.lastSynced:
|
@@ -1009,6 +1148,10 @@ def test_remote_folder():
|
|
1009
1148
|
try:
|
1010
1149
|
test_ssh_connection(connection)
|
1011
1150
|
add_status(ConnectionTestStates.OK.value, "SSH connection established")
|
1151
|
+
except AuthenticationFailedException as e:
|
1152
|
+
# Return 422 for authentication failures
|
1153
|
+
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1154
|
+
return [status.model_dump() for status in statuses], e.http_status
|
1012
1155
|
except RemoteConnectionException as e:
|
1013
1156
|
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1014
1157
|
|
@@ -1017,6 +1160,9 @@ def test_remote_folder():
|
|
1017
1160
|
try:
|
1018
1161
|
check_remote_path_exists(connection, "profilerPath")
|
1019
1162
|
add_status(ConnectionTestStates.OK.value, "Memory folder path exists")
|
1163
|
+
except AuthenticationFailedException as e:
|
1164
|
+
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1165
|
+
return [status.model_dump() for status in statuses], e.http_status
|
1020
1166
|
except RemoteConnectionException as e:
|
1021
1167
|
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1022
1168
|
|
@@ -1025,6 +1171,9 @@ def test_remote_folder():
|
|
1025
1171
|
try:
|
1026
1172
|
check_remote_path_exists(connection, "performancePath")
|
1027
1173
|
add_status(ConnectionTestStates.OK.value, "Performance folder path exists")
|
1174
|
+
except AuthenticationFailedException as e:
|
1175
|
+
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1176
|
+
return [status.model_dump() for status in statuses], e.http_status
|
1028
1177
|
except RemoteConnectionException as e:
|
1029
1178
|
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1030
1179
|
|
@@ -1032,6 +1181,9 @@ def test_remote_folder():
|
|
1032
1181
|
if not has_failures():
|
1033
1182
|
try:
|
1034
1183
|
check_remote_path_for_reports(connection)
|
1184
|
+
except AuthenticationFailedException as e:
|
1185
|
+
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1186
|
+
return [status.model_dump() for status in statuses], e.http_status
|
1035
1187
|
except RemoteConnectionException as e:
|
1036
1188
|
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1037
1189
|
|
@@ -1043,6 +1195,9 @@ def test_remote_folder():
|
|
1043
1195
|
try:
|
1044
1196
|
check_sqlite_path(connection)
|
1045
1197
|
add_status(ConnectionTestStates.OK, "SQLite binary found.")
|
1198
|
+
except AuthenticationFailedException as e:
|
1199
|
+
add_status(ConnectionTestStates.FAILED.value, e.message)
|
1200
|
+
return [status.model_dump() for status in statuses], e.http_status
|
1046
1201
|
except RemoteConnectionException as e:
|
1047
1202
|
add_status(ConnectionTestStates.FAILED, e.message)
|
1048
1203
|
|
@@ -1153,14 +1308,21 @@ def use_remote_folder():
|
|
1153
1308
|
remote_performance_folder = None
|
1154
1309
|
|
1155
1310
|
if profile:
|
1156
|
-
remote_performance_folder = RemoteReportFolder.model_validate(
|
1311
|
+
remote_performance_folder = RemoteReportFolder.model_validate(
|
1312
|
+
profile, strict=False
|
1313
|
+
)
|
1157
1314
|
performance_name = remote_performance_folder.reportName
|
1158
1315
|
|
1159
1316
|
data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
1160
1317
|
profiler_name = folder.remotePath.split("/")[-1]
|
1161
1318
|
folder_name = folder.remotePath.split("/")[-1]
|
1162
1319
|
|
1163
|
-
connection_directory = Path(
|
1320
|
+
connection_directory = Path(
|
1321
|
+
data_directory,
|
1322
|
+
connection.host,
|
1323
|
+
current_app.config["PROFILER_DIRECTORY_NAME"],
|
1324
|
+
folder_name,
|
1325
|
+
)
|
1164
1326
|
|
1165
1327
|
if not connection.useRemoteQuerying and not connection_directory.exists():
|
1166
1328
|
return Response(
|
@@ -1168,7 +1330,9 @@ def use_remote_folder():
|
|
1168
1330
|
response=f"{connection_directory} does not exist.",
|
1169
1331
|
)
|
1170
1332
|
|
1171
|
-
remote_path =
|
1333
|
+
remote_path = (
|
1334
|
+
f"{Path(data_directory).name}/{connection.host}/{connection_directory.name}"
|
1335
|
+
)
|
1172
1336
|
|
1173
1337
|
instance_id = request.args.get("instanceId")
|
1174
1338
|
current_app.logger.info(f"Setting active reports for {instance_id} - {remote_path}")
|
@@ -1244,12 +1408,16 @@ def get_npe_data(instance: Instance):
|
|
1244
1408
|
compressed_path = Path(instance.npe_path)
|
1245
1409
|
uncompressed_path = Path(instance.npe_path)
|
1246
1410
|
|
1247
|
-
if not (compressed_path and compressed_path.exists()) and not (
|
1248
|
-
|
1411
|
+
if not (compressed_path and compressed_path.exists()) and not (
|
1412
|
+
uncompressed_path and uncompressed_path.exists()
|
1413
|
+
):
|
1414
|
+
logger.error(
|
1415
|
+
f"NPE file does not exist: {compressed_path} / {uncompressed_path}"
|
1416
|
+
)
|
1249
1417
|
return Response(status=HTTPStatus.NOT_FOUND)
|
1250
1418
|
|
1251
1419
|
if compressed_path and compressed_path.exists():
|
1252
|
-
|
1420
|
+
with open(compressed_path, "rb") as file:
|
1253
1421
|
compressed_data = file.read()
|
1254
1422
|
uncompressed_data = zstd.uncompress(compressed_data)
|
1255
1423
|
npe_data = json.loads(uncompressed_data)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ttnn_visualizer
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.43.1
|
4
4
|
Summary: TT-NN Visualizer
|
5
5
|
Classifier: Programming Language :: Python :: 3
|
6
6
|
Classifier: License :: OSI Approved :: MIT License
|
@@ -25,6 +25,10 @@ Requires-Dist: sqlalchemy==2.0.34
|
|
25
25
|
Requires-Dist: PyYAML==6.0.2
|
26
26
|
Requires-Dist: tt-perf-report==1.0.7
|
27
27
|
Requires-Dist: zstd==1.5.7.0
|
28
|
+
Provides-Extra: dev
|
29
|
+
Requires-Dist: black==25.1.0; extra == "dev"
|
30
|
+
Requires-Dist: mypy; extra == "dev"
|
31
|
+
Requires-Dist: pytest==8.4.1; extra == "dev"
|
28
32
|
|
29
33
|
|
30
34
|
<div align="center">
|