ttnn-visualizer 0.41.0__py3-none-any.whl → 0.43.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. ttnn_visualizer/__init__.py +0 -1
  2. ttnn_visualizer/app.py +15 -4
  3. ttnn_visualizer/csv_queries.py +150 -40
  4. ttnn_visualizer/decorators.py +42 -16
  5. ttnn_visualizer/exceptions.py +45 -1
  6. ttnn_visualizer/file_uploads.py +1 -0
  7. ttnn_visualizer/instances.py +42 -15
  8. ttnn_visualizer/models.py +12 -7
  9. ttnn_visualizer/queries.py +3 -109
  10. ttnn_visualizer/remote_sqlite_setup.py +104 -19
  11. ttnn_visualizer/requirements.txt +2 -3
  12. ttnn_visualizer/serializers.py +1 -0
  13. ttnn_visualizer/settings.py +9 -5
  14. ttnn_visualizer/sftp_operations.py +657 -220
  15. ttnn_visualizer/sockets.py +9 -3
  16. ttnn_visualizer/static/assets/{allPaths-4_pFqSAW.js → allPaths-BQN_j7ek.js} +1 -1
  17. ttnn_visualizer/static/assets/{allPathsLoader-CpLPTLlt.js → allPathsLoader-BvkkQ77q.js} +2 -2
  18. ttnn_visualizer/static/assets/index-B-fsa5Ru.js +1 -0
  19. ttnn_visualizer/static/assets/{index-DFVwehlj.js → index-Bng0kcmi.js} +214 -214
  20. ttnn_visualizer/static/assets/{index-C1rJBrMl.css → index-C-t6jBt9.css} +1 -1
  21. ttnn_visualizer/static/assets/index-DLOviMB1.js +1 -0
  22. ttnn_visualizer/static/assets/{splitPathsBySizeLoader-D-RvsTqO.js → splitPathsBySizeLoader-Cl0NRdfL.js} +1 -1
  23. ttnn_visualizer/static/index.html +2 -2
  24. ttnn_visualizer/tests/__init__.py +0 -1
  25. ttnn_visualizer/tests/test_queries.py +0 -69
  26. ttnn_visualizer/tests/test_serializers.py +2 -2
  27. ttnn_visualizer/utils.py +7 -3
  28. ttnn_visualizer/views.py +315 -52
  29. {ttnn_visualizer-0.41.0.dist-info → ttnn_visualizer-0.43.0.dist-info}/LICENSE +0 -1
  30. {ttnn_visualizer-0.41.0.dist-info → ttnn_visualizer-0.43.0.dist-info}/METADATA +6 -3
  31. ttnn_visualizer-0.43.0.dist-info/RECORD +45 -0
  32. ttnn_visualizer/ssh_client.py +0 -85
  33. ttnn_visualizer/static/assets/index-BKzgFDAn.js +0 -1
  34. ttnn_visualizer/static/assets/index-BvSuWPlB.js +0 -1
  35. ttnn_visualizer-0.41.0.dist-info/RECORD +0 -46
  36. {ttnn_visualizer-0.41.0.dist-info → ttnn_visualizer-0.43.0.dist-info}/LICENSE_understanding.txt +0 -0
  37. {ttnn_visualizer-0.41.0.dist-info → ttnn_visualizer-0.43.0.dist-info}/WHEEL +0 -0
  38. {ttnn_visualizer-0.41.0.dist-info → ttnn_visualizer-0.43.0.dist-info}/entry_points.txt +0 -0
  39. {ttnn_visualizer-0.41.0.dist-info → ttnn_visualizer-0.43.0.dist-info}/top_level.txt +0 -0
ttnn_visualizer/views.py CHANGED
@@ -7,6 +7,7 @@ import json
7
7
  import logging
8
8
  import re
9
9
  import shutil
10
+ import subprocess
10
11
  import time
11
12
  from http import HTTPStatus
12
13
  from pathlib import Path
@@ -23,12 +24,22 @@ from flask import (
23
24
  request,
24
25
  )
25
26
 
26
- from ttnn_visualizer.csv_queries import DeviceLogProfilerQueries, OpsPerformanceQueries, OpsPerformanceReportQueries, \
27
- NPEQueries
27
+ from ttnn_visualizer.csv_queries import (
28
+ DeviceLogProfilerQueries,
29
+ OpsPerformanceQueries,
30
+ OpsPerformanceReportQueries,
31
+ NPEQueries,
32
+ )
28
33
  from ttnn_visualizer.decorators import with_instance, local_only
29
34
  from ttnn_visualizer.enums import ConnectionTestStates
30
35
  from ttnn_visualizer.exceptions import DataFormatError
31
36
  from ttnn_visualizer.exceptions import RemoteConnectionException
37
+ from ttnn_visualizer.exceptions import (
38
+ SSHException,
39
+ AuthenticationException,
40
+ NoValidConnectionsError,
41
+ AuthenticationFailedException,
42
+ )
32
43
  from ttnn_visualizer.file_uploads import (
33
44
  extract_folder_name_from_files,
34
45
  extract_npe_name,
@@ -36,7 +47,8 @@ from ttnn_visualizer.file_uploads import (
36
47
  validate_files,
37
48
  )
38
49
  from ttnn_visualizer.instances import (
39
- get_instances, update_instance,
50
+ get_instances,
51
+ update_instance,
40
52
  )
41
53
  from ttnn_visualizer.models import (
42
54
  RemoteReportFolder,
@@ -53,7 +65,8 @@ from ttnn_visualizer.serializers import (
53
65
  serialize_buffer_pages,
54
66
  serialize_operation_buffers,
55
67
  serialize_operations_buffers,
56
- serialize_devices, serialize_buffer,
68
+ serialize_devices,
69
+ serialize_buffer,
57
70
  )
58
71
  from ttnn_visualizer.sftp_operations import (
59
72
  sync_remote_profiler_folders,
@@ -65,13 +78,132 @@ from ttnn_visualizer.sftp_operations import (
65
78
  sync_remote_performance_folders,
66
79
  get_cluster_desc,
67
80
  )
68
- from ttnn_visualizer.ssh_client import get_client
69
81
  from ttnn_visualizer.utils import (
70
82
  get_cluster_descriptor_path,
71
83
  read_last_synced_file,
72
84
  timer,
73
85
  )
74
86
 
87
+
88
+ def handle_ssh_subprocess_error(e: subprocess.CalledProcessError, remote_connection):
89
+ """
90
+ Convert subprocess SSH errors to appropriate SSH exceptions.
91
+
92
+ :param e: The subprocess.CalledProcessError
93
+ :param remote_connection: The RemoteConnection object for context
94
+ :raises: SSHException, AuthenticationException, or NoValidConnectionsError
95
+ """
96
+ stderr = e.stderr.lower() if e.stderr else ""
97
+
98
+ # Check for authentication failures
99
+ if any(
100
+ auth_err in stderr
101
+ for auth_err in [
102
+ "permission denied",
103
+ "authentication failed",
104
+ "publickey",
105
+ "password",
106
+ "host key verification failed",
107
+ ]
108
+ ):
109
+ raise AuthenticationException(
110
+ f"SSH authentication failed: {remote_connection.username}@{remote_connection.host}: Permission denied (publickey,password)"
111
+ )
112
+
113
+ # Check for connection failures
114
+ elif any(
115
+ conn_err in stderr
116
+ for conn_err in [
117
+ "connection refused",
118
+ "network is unreachable",
119
+ "no route to host",
120
+ "name or service not known",
121
+ "connection timed out",
122
+ ]
123
+ ):
124
+ raise NoValidConnectionsError(f"SSH connection failed: {e.stderr}")
125
+
126
+ # Check for general SSH protocol errors
127
+ elif "ssh:" in stderr or "protocol" in stderr:
128
+ raise SSHException(f"SSH protocol error: {e.stderr}")
129
+
130
+ # Default to generic SSH exception
131
+ else:
132
+ raise SSHException(f"SSH command failed: {e.stderr}")
133
+
134
+
135
+ def test_ssh_connection(connection) -> bool:
136
+ """Test SSH connection by running a simple command."""
137
+ ssh_cmd = ["ssh", "-o", "PasswordAuthentication=no"]
138
+
139
+ # Handle non-standard SSH port
140
+ if connection.port != 22:
141
+ ssh_cmd.extend(["-p", str(connection.port)])
142
+
143
+ ssh_cmd.extend(
144
+ [f"{connection.username}@{connection.host}", "echo 'SSH connection test'"]
145
+ )
146
+
147
+ try:
148
+ result = subprocess.run(
149
+ ssh_cmd, capture_output=True, text=True, check=True, timeout=10
150
+ )
151
+ return True
152
+ except subprocess.CalledProcessError as e:
153
+ if e.returncode == 255: # SSH protocol errors
154
+ try:
155
+ handle_ssh_subprocess_error(e, connection)
156
+ except AuthenticationException:
157
+ # Convert to AuthenticationFailedException for proper HTTP 422 response
158
+ user_message = (
159
+ "SSH authentication failed. This application requires SSH key-based authentication. "
160
+ "Please ensure your SSH public key is added to the authorized_keys file on the remote server. "
161
+ "Password authentication is not supported."
162
+ )
163
+ logger.info(
164
+ f"SSH authentication failed for {connection.username}@{connection.host}: {user_message}"
165
+ )
166
+ raise AuthenticationFailedException(message=user_message)
167
+ except NoValidConnectionsError as ssh_err:
168
+ user_message = (
169
+ f"Unable to establish SSH connection to {connection.host}. "
170
+ "Please check the hostname, port, and network connectivity. "
171
+ "Ensure SSH key-based authentication is properly configured."
172
+ )
173
+ logger.warning(
174
+ f"SSH connection failed for {connection.username}@{connection.host}: {user_message}"
175
+ )
176
+ raise RemoteConnectionException(
177
+ message=user_message, status=ConnectionTestStates.FAILED
178
+ )
179
+ except SSHException as ssh_err:
180
+ user_message = f"SSH connection error to {connection.host}: {str(ssh_err)}. Ensure SSH key-based authentication is properly configured."
181
+ logger.warning(
182
+ f"SSH error for {connection.username}@{connection.host}: {user_message}"
183
+ )
184
+ raise RemoteConnectionException(
185
+ message=user_message, status=ConnectionTestStates.FAILED
186
+ )
187
+ else:
188
+ error_message = f"SSH connection test failed: {e.stderr}"
189
+ logger.error(
190
+ f"SSH test failed for {connection.username}@{connection.host}: {error_message}"
191
+ )
192
+ raise RemoteConnectionException(
193
+ message=error_message, status=ConnectionTestStates.FAILED
194
+ )
195
+ return False
196
+ except subprocess.TimeoutExpired:
197
+ timeout_message = "SSH connection test timed out"
198
+ logger.warning(
199
+ f"SSH timeout for {connection.username}@{connection.host}: {timeout_message}"
200
+ )
201
+ raise RemoteConnectionException(
202
+ message=timeout_message, status=ConnectionTestStates.FAILED
203
+ )
204
+ return False
205
+
206
+
75
207
  logger = logging.getLogger(__name__)
76
208
 
77
209
  api = Blueprint("api", __name__)
@@ -387,7 +519,7 @@ def get_profiler_data_list(instance: Instance):
387
519
  # Doesn't handle remote at the moment
388
520
  # is_remote = True if instance.remote_connection else False
389
521
  # config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
390
- config_key = 'LOCAL_DATA_DIRECTORY'
522
+ config_key = "LOCAL_DATA_DIRECTORY"
391
523
  data_directory = Path(current_app.config[config_key])
392
524
 
393
525
  # if is_remote:
@@ -404,18 +536,26 @@ def get_profiler_data_list(instance: Instance):
404
536
  if current_app.config["SERVER_MODE"]:
405
537
  session_instances = session.get("instances", [])
406
538
  instances = get_instances(session_instances)
407
- db_paths = [instance.profiler_path for instance in instances if instance.profiler_path]
539
+ db_paths = [
540
+ instance.profiler_path for instance in instances if instance.profiler_path
541
+ ]
408
542
  db_directory_names = [str(Path(db_path).parent.name) for db_path in db_paths]
409
543
  session_paths = session.get("profiler_paths", [])
410
- session_directory_names = [str(Path(session_path).parent.name) for session_path in session_paths]
544
+ session_directory_names = [
545
+ str(Path(session_path).parent.name) for session_path in session_paths
546
+ ]
411
547
  demo_directory_names = []
412
548
  demo_pattern = re.compile(r"^demo", re.IGNORECASE)
413
549
  for report in path.glob("*"):
414
550
  if demo_pattern.match(report.name):
415
551
  demo_directory_names.append(report.name)
416
- directory_names = list(set(db_directory_names + session_directory_names + demo_directory_names))
552
+ directory_names = list(
553
+ set(db_directory_names + session_directory_names + demo_directory_names)
554
+ )
417
555
  else:
418
- directory_names = [directory.name for directory in path.iterdir() if directory.is_dir()]
556
+ directory_names = [
557
+ directory.name for directory in path.iterdir() if directory.is_dir()
558
+ ]
419
559
 
420
560
  # Sort directory names by modified time (most recent first)
421
561
  def get_modified_time(dir_name):
@@ -460,25 +600,40 @@ def delete_profiler_report(profiler_name, instance: Instance):
460
600
  data_directory = Path(current_app.config[config_key])
461
601
 
462
602
  if not profiler_name:
463
- return Response(status=HTTPStatus.BAD_REQUEST, response="Report name is required.")
603
+ return Response(
604
+ status=HTTPStatus.BAD_REQUEST, response="Report name is required."
605
+ )
464
606
 
465
607
  if is_remote:
466
- connection = RemoteConnection.model_validate(instance.remote_connection, strict=False)
467
- path = data_directory / connection.host / current_app.config["PROFILER_DIRECTORY_NAME"]
608
+ connection = RemoteConnection.model_validate(
609
+ instance.remote_connection, strict=False
610
+ )
611
+ path = (
612
+ data_directory
613
+ / connection.host
614
+ / current_app.config["PROFILER_DIRECTORY_NAME"]
615
+ )
468
616
  else:
469
- path = data_directory / current_app.config["PROFILER_DIRECTORY_NAME"] / profiler_name
617
+ path = (
618
+ data_directory
619
+ / current_app.config["PROFILER_DIRECTORY_NAME"]
620
+ / profiler_name
621
+ )
470
622
 
471
623
  if instance.active_report and instance.active_report.profiler_name == profiler_name:
472
624
  instance_id = request.args.get("instanceId")
473
- update_instance(instance_id=instance_id,profiler_name="")
625
+ update_instance(instance_id=instance_id, profiler_name="")
474
626
 
475
627
  if path.exists() and path.is_dir():
476
628
  shutil.rmtree(path)
477
629
  else:
478
- return Response(status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}")
479
-
480
- return Response(status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}")
630
+ return Response(
631
+ status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}"
632
+ )
481
633
 
634
+ return Response(
635
+ status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}"
636
+ )
482
637
 
483
638
 
484
639
  @api.route("/performance", methods=["GET"])
@@ -495,21 +650,37 @@ def get_performance_data_list(instance: Instance):
495
650
  if current_app.config["SERVER_MODE"]:
496
651
  session_instances = session.get("instances", [])
497
652
  instances = get_instances(session_instances)
498
- db_paths = [instance.performance_path for instance in instances if instance.performance_path]
653
+ db_paths = [
654
+ instance.performance_path
655
+ for instance in instances
656
+ if instance.performance_path
657
+ ]
499
658
  db_directory_names = [str(Path(db_path).name) for db_path in db_paths]
500
659
  session_paths = session.get("performance_paths", [])
501
- session_directory_names = [str(Path(session_path).name) for session_path in session_paths]
660
+ session_directory_names = [
661
+ str(Path(session_path).name) for session_path in session_paths
662
+ ]
502
663
  demo_directory_names = []
503
664
  demo_pattern = re.compile(r"^demo", re.IGNORECASE)
504
665
  for report in path.glob("*"):
505
666
  if demo_pattern.match(report.name):
506
667
  demo_directory_names.append(report.name)
507
- directory_names = list(set(db_directory_names + session_directory_names + demo_directory_names))
668
+ directory_names = list(
669
+ set(db_directory_names + session_directory_names + demo_directory_names)
670
+ )
508
671
  else:
509
672
  if is_remote:
510
- connection = RemoteConnection.model_validate(instance.remote_connection, strict=False)
511
- path = data_directory / connection.host / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
512
- directory_names = [directory.name for directory in path.iterdir() if directory.is_dir()]
673
+ connection = RemoteConnection.model_validate(
674
+ instance.remote_connection, strict=False
675
+ )
676
+ path = (
677
+ data_directory
678
+ / connection.host
679
+ / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
680
+ )
681
+ directory_names = [
682
+ directory.name for directory in path.iterdir() if directory.is_dir()
683
+ ]
513
684
 
514
685
  valid_dirs = []
515
686
 
@@ -569,24 +740,43 @@ def delete_performance_report(performance_name, instance: Instance):
569
740
  data_directory = Path(current_app.config[config_key])
570
741
 
571
742
  if not performance_name:
572
- return Response(status=HTTPStatus.BAD_REQUEST, response="Report name is required.")
743
+ return Response(
744
+ status=HTTPStatus.BAD_REQUEST, response="Report name is required."
745
+ )
573
746
 
574
747
  if is_remote:
575
- connection = RemoteConnection.model_validate(instance.remote_connection, strict=False)
576
- path = data_directory / connection.host / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
748
+ connection = RemoteConnection.model_validate(
749
+ instance.remote_connection, strict=False
750
+ )
751
+ path = (
752
+ data_directory
753
+ / connection.host
754
+ / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
755
+ )
577
756
  else:
578
- path = data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"] / performance_name
757
+ path = (
758
+ data_directory
759
+ / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
760
+ / performance_name
761
+ )
579
762
 
580
- if instance.active_report and instance.active_report.performance_name == performance_name:
763
+ if (
764
+ instance.active_report
765
+ and instance.active_report.performance_name == performance_name
766
+ ):
581
767
  instance_id = request.args.get("instanceId")
582
- update_instance(instance_id=instance_id,performance_name="")
768
+ update_instance(instance_id=instance_id, performance_name="")
583
769
 
584
770
  if path.exists() and path.is_dir():
585
771
  shutil.rmtree(path)
586
772
  else:
587
- return Response(status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}")
773
+ return Response(
774
+ status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}"
775
+ )
588
776
 
589
- return Response(status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}")
777
+ return Response(
778
+ status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}"
779
+ )
590
780
 
591
781
 
592
782
  @api.route("/performance/perf-results/raw", methods=["GET"])
@@ -635,6 +825,7 @@ def get_performance_data_raw(instance: Instance):
635
825
  headers={"Content-Disposition": "attachment; filename=profile_log_device.csv"},
636
826
  )
637
827
 
828
+
638
829
  @api.route("/performance/npe/manifest", methods=["GET"])
639
830
  @with_instance
640
831
  def get_npe_manifest(instance: Instance):
@@ -648,6 +839,27 @@ def get_npe_manifest(instance: Instance):
648
839
  return jsonify(content)
649
840
 
650
841
 
842
+ @api.route("/performance/npe/timeline", methods=["GET"])
843
+ @with_instance
844
+ def get_npe_timeline(instance: Instance):
845
+ if not instance.performance_path:
846
+ return Response(status=HTTPStatus.NOT_FOUND)
847
+
848
+ filename = request.args.get("filename", default=None)
849
+
850
+ if not filename:
851
+ return jsonify({})
852
+
853
+ filename = Path(filename).name
854
+
855
+ try:
856
+ content = NPEQueries.get_npe_timeline(instance, filename=filename)
857
+ except FileNotFoundError:
858
+ return jsonify({})
859
+
860
+ return jsonify(content)
861
+
862
+
651
863
  @api.route("/performance/device-log/zone/<zone>", methods=["GET"])
652
864
  @with_instance
653
865
  def get_zone_statistics(zone, instance: Instance):
@@ -669,8 +881,13 @@ def get_devices(instance: Instance):
669
881
  @api.route("/local/upload/profiler", methods=["POST"])
670
882
  def create_profiler_files():
671
883
  files = request.files.getlist("files")
672
- folder_name = request.form.get("folderName") # Optional folder name - Used for Safari compatibility
673
- profiler_directory = current_app.config["LOCAL_DATA_DIRECTORY"] / current_app.config["PROFILER_DIRECTORY_NAME"]
884
+ folder_name = request.form.get(
885
+ "folderName"
886
+ ) # Optional folder name - Used for Safari compatibility
887
+ profiler_directory = (
888
+ current_app.config["LOCAL_DATA_DIRECTORY"]
889
+ / current_app.config["PROFILER_DIRECTORY_NAME"]
890
+ )
674
891
 
675
892
  if not validate_files(files, {"db.sqlite", "config.json"}, folder_name=folder_name):
676
893
  return StatusMessage(
@@ -724,10 +941,11 @@ def create_profiler_files():
724
941
  "reportName": report_name,
725
942
  }
726
943
 
944
+
727
945
  @api.route("/local/upload/performance", methods=["POST"])
728
946
  def create_performance_files():
729
947
  files = request.files.getlist("files")
730
- folder_name = request.form.get("folderName") # Optional folder name
948
+ folder_name = request.form.get("folderName") # Optional folder name
731
949
  data_directory = Path(current_app.config["LOCAL_DATA_DIRECTORY"])
732
950
 
733
951
  if not validate_files(
@@ -772,7 +990,9 @@ def create_performance_files():
772
990
  performance_path=performance_path,
773
991
  )
774
992
 
775
- session["performance_paths"] = session.get("performance_paths", []) + [str(performance_path)]
993
+ session["performance_paths"] = session.get("performance_paths", []) + [
994
+ str(performance_path)
995
+ ]
776
996
  session.permanent = True
777
997
 
778
998
  return StatusMessage(
@@ -786,7 +1006,9 @@ def create_npe_files():
786
1006
  data_directory = current_app.config["LOCAL_DATA_DIRECTORY"]
787
1007
 
788
1008
  for file in files:
789
- if not file.filename.endswith(".json") and not file.filename.endswith('.npeviz.zst'):
1009
+ if not file.filename.endswith(".json") and not file.filename.endswith(
1010
+ ".npeviz.zst"
1011
+ ):
790
1012
  return StatusMessage(
791
1013
  status=ConnectionTestStates.FAILED,
792
1014
  message="NPE requires a valid .json or .npeviz.zst file",
@@ -803,14 +1025,14 @@ def create_npe_files():
803
1025
 
804
1026
  instance_id = request.args.get("instanceId")
805
1027
  npe_path = str(paths[0])
806
- update_instance(instance_id=instance_id, npe_name=npe_name, clear_remote=True, npe_path=npe_path)
1028
+ update_instance(
1029
+ instance_id=instance_id, npe_name=npe_name, clear_remote=True, npe_path=npe_path
1030
+ )
807
1031
 
808
1032
  session["npe_paths"] = session.get("npe_paths", []) + [str(npe_path)]
809
1033
  session.permanent = True
810
1034
 
811
- return StatusMessage(
812
- status=ConnectionTestStates.OK, message="Success"
813
- ).model_dump()
1035
+ return StatusMessage(status=ConnectionTestStates.OK, message="Success").model_dump()
814
1036
 
815
1037
 
816
1038
  @api.route("/remote/profiler", methods=["POST"])
@@ -824,7 +1046,12 @@ def get_remote_folders_profiler():
824
1046
  for rf in remote_folders:
825
1047
  directory_name = Path(rf.remotePath).name
826
1048
  remote_data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
827
- local_path = remote_data_directory / current_app.config["PROFILER_DIRECTORY_NAME"] / connection.host / directory_name
1049
+ local_path = (
1050
+ remote_data_directory
1051
+ / current_app.config["PROFILER_DIRECTORY_NAME"]
1052
+ / connection.host
1053
+ / directory_name
1054
+ )
828
1055
  logger.info(f"Checking last synced for {directory_name}")
829
1056
  rf.lastSynced = read_last_synced_file(str(local_path))
830
1057
  if not rf.lastSynced:
@@ -843,14 +1070,21 @@ def get_remote_folders_performance():
843
1070
  )
844
1071
 
845
1072
  try:
846
- remote_performance_folders: List[RemoteReportFolder] = get_remote_performance_folders(
847
- RemoteConnection.model_validate(connection, strict=False)
1073
+ remote_performance_folders: List[RemoteReportFolder] = (
1074
+ get_remote_performance_folders(
1075
+ RemoteConnection.model_validate(connection, strict=False)
1076
+ )
848
1077
  )
849
1078
 
850
1079
  for rf in remote_performance_folders:
851
1080
  performance_name = Path(rf.remotePath).name
852
1081
  remote_data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
853
- local_path = remote_data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"] / connection.host / performance_name
1082
+ local_path = (
1083
+ remote_data_directory
1084
+ / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
1085
+ / connection.host
1086
+ / performance_name
1087
+ )
854
1088
  logger.info(f"Checking last synced for {performance_name}")
855
1089
  rf.lastSynced = read_last_synced_file(str(local_path))
856
1090
  if not rf.lastSynced:
@@ -912,8 +1146,12 @@ def test_remote_folder():
912
1146
 
913
1147
  # Test SSH Connection
914
1148
  try:
915
- get_client(connection)
1149
+ test_ssh_connection(connection)
916
1150
  add_status(ConnectionTestStates.OK.value, "SSH connection established")
1151
+ except AuthenticationFailedException as e:
1152
+ # Return 422 for authentication failures
1153
+ add_status(ConnectionTestStates.FAILED.value, e.message)
1154
+ return [status.model_dump() for status in statuses], e.http_status
917
1155
  except RemoteConnectionException as e:
918
1156
  add_status(ConnectionTestStates.FAILED.value, e.message)
919
1157
 
@@ -922,6 +1160,9 @@ def test_remote_folder():
922
1160
  try:
923
1161
  check_remote_path_exists(connection, "profilerPath")
924
1162
  add_status(ConnectionTestStates.OK.value, "Memory folder path exists")
1163
+ except AuthenticationFailedException as e:
1164
+ add_status(ConnectionTestStates.FAILED.value, e.message)
1165
+ return [status.model_dump() for status in statuses], e.http_status
925
1166
  except RemoteConnectionException as e:
926
1167
  add_status(ConnectionTestStates.FAILED.value, e.message)
927
1168
 
@@ -930,6 +1171,9 @@ def test_remote_folder():
930
1171
  try:
931
1172
  check_remote_path_exists(connection, "performancePath")
932
1173
  add_status(ConnectionTestStates.OK.value, "Performance folder path exists")
1174
+ except AuthenticationFailedException as e:
1175
+ add_status(ConnectionTestStates.FAILED.value, e.message)
1176
+ return [status.model_dump() for status in statuses], e.http_status
933
1177
  except RemoteConnectionException as e:
934
1178
  add_status(ConnectionTestStates.FAILED.value, e.message)
935
1179
 
@@ -937,6 +1181,9 @@ def test_remote_folder():
937
1181
  if not has_failures():
938
1182
  try:
939
1183
  check_remote_path_for_reports(connection)
1184
+ except AuthenticationFailedException as e:
1185
+ add_status(ConnectionTestStates.FAILED.value, e.message)
1186
+ return [status.model_dump() for status in statuses], e.http_status
940
1187
  except RemoteConnectionException as e:
941
1188
  add_status(ConnectionTestStates.FAILED.value, e.message)
942
1189
 
@@ -948,6 +1195,9 @@ def test_remote_folder():
948
1195
  try:
949
1196
  check_sqlite_path(connection)
950
1197
  add_status(ConnectionTestStates.OK, "SQLite binary found.")
1198
+ except AuthenticationFailedException as e:
1199
+ add_status(ConnectionTestStates.FAILED.value, e.message)
1200
+ return [status.model_dump() for status in statuses], e.http_status
951
1201
  except RemoteConnectionException as e:
952
1202
  add_status(ConnectionTestStates.FAILED, e.message)
953
1203
 
@@ -1058,14 +1308,21 @@ def use_remote_folder():
1058
1308
  remote_performance_folder = None
1059
1309
 
1060
1310
  if profile:
1061
- remote_performance_folder = RemoteReportFolder.model_validate(profile, strict=False)
1311
+ remote_performance_folder = RemoteReportFolder.model_validate(
1312
+ profile, strict=False
1313
+ )
1062
1314
  performance_name = remote_performance_folder.reportName
1063
1315
 
1064
1316
  data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
1065
1317
  profiler_name = folder.remotePath.split("/")[-1]
1066
1318
  folder_name = folder.remotePath.split("/")[-1]
1067
1319
 
1068
- connection_directory = Path(data_directory, connection.host, current_app.config["PROFILER_DIRECTORY_NAME"], folder_name)
1320
+ connection_directory = Path(
1321
+ data_directory,
1322
+ connection.host,
1323
+ current_app.config["PROFILER_DIRECTORY_NAME"],
1324
+ folder_name,
1325
+ )
1069
1326
 
1070
1327
  if not connection.useRemoteQuerying and not connection_directory.exists():
1071
1328
  return Response(
@@ -1073,7 +1330,9 @@ def use_remote_folder():
1073
1330
  response=f"{connection_directory} does not exist.",
1074
1331
  )
1075
1332
 
1076
- remote_path = f"{Path(data_directory).name}/{connection.host}/{connection_directory.name}"
1333
+ remote_path = (
1334
+ f"{Path(data_directory).name}/{connection.host}/{connection_directory.name}"
1335
+ )
1077
1336
 
1078
1337
  instance_id = request.args.get("instanceId")
1079
1338
  current_app.logger.info(f"Setting active reports for {instance_id} - {remote_path}")
@@ -1149,12 +1408,16 @@ def get_npe_data(instance: Instance):
1149
1408
  compressed_path = Path(instance.npe_path)
1150
1409
  uncompressed_path = Path(instance.npe_path)
1151
1410
 
1152
- if not (compressed_path and compressed_path.exists()) and not (uncompressed_path and uncompressed_path.exists()):
1153
- logger.error(f"NPE file does not exist: {compressed_path} / {uncompressed_path}")
1411
+ if not (compressed_path and compressed_path.exists()) and not (
1412
+ uncompressed_path and uncompressed_path.exists()
1413
+ ):
1414
+ logger.error(
1415
+ f"NPE file does not exist: {compressed_path} / {uncompressed_path}"
1416
+ )
1154
1417
  return Response(status=HTTPStatus.NOT_FOUND)
1155
1418
 
1156
1419
  if compressed_path and compressed_path.exists():
1157
- with open(compressed_path, "rb") as file:
1420
+ with open(compressed_path, "rb") as file:
1158
1421
  compressed_data = file.read()
1159
1422
  uncompressed_data = zstd.uncompress(compressed_data)
1160
1423
  npe_data = json.loads(uncompressed_data)
@@ -109,7 +109,6 @@ The following separate and independent dependencies are utilized by this project
109
109
  - Flask – BSD‑3‑Clause – https://github.com/pallets/flask/blob/main/LICENSE.txt
110
110
  - gunicorn – MIT – https://github.com/benoitc/gunicorn/blob/master/LICENSE
111
111
  - uvicorn – BSD‑3‑Clause – https://github.com/encode/uvicorn/blob/master/LICENSE.md
112
- - paramiko~=3.4.0 – LGPL‑2.1+ – https://github.com/paramiko/paramiko/blob/main/LICENSE
113
112
  - flask_cors – MIT – https://github.com/corydolphin/flask-cors/blob/main/LICENSE
114
113
  - pydantic – MIT – https://github.com/pydantic/pydantic-settings/blob/main/LICENSE
115
114
  - setuptools – MIT – https://github.com/pypa/setuptools/blob/main/LICENSE
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ttnn_visualizer
3
- Version: 0.41.0
3
+ Version: 0.43.0
4
4
  Summary: TT-NN Visualizer
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: License :: OSI Approved :: MIT License
@@ -16,7 +16,6 @@ Requires-Dist: Flask-SocketIO==5.4.1
16
16
  Requires-Dist: Flask-SQLAlchemy==3.1.1
17
17
  Requires-Dist: pandas==2.2.3
18
18
  Requires-Dist: gunicorn~=22.0.0
19
- Requires-Dist: paramiko~=3.4.0
20
19
  Requires-Dist: pydantic==2.7.3
21
20
  Requires-Dist: pydantic-core==2.18.4
22
21
  Requires-Dist: setuptools==65.5.0
@@ -24,8 +23,12 @@ Requires-Dist: gevent===24.10.2
24
23
  Requires-Dist: python-dotenv==1.0.1
25
24
  Requires-Dist: sqlalchemy==2.0.34
26
25
  Requires-Dist: PyYAML==6.0.2
27
- Requires-Dist: tt-perf-report==1.0.6
26
+ Requires-Dist: tt-perf-report==1.0.7
28
27
  Requires-Dist: zstd==1.5.7.0
28
+ Provides-Extra: dev
29
+ Requires-Dist: black==25.1.0; extra == "dev"
30
+ Requires-Dist: mypy; extra == "dev"
31
+ Requires-Dist: pytest==8.4.1; extra == "dev"
29
32
 
30
33
 
31
34
  <div align="center">