ttnn-visualizer 0.66.0__py3-none-any.whl → 0.67.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +31 -9
- ttnn_visualizer/models.py +1 -0
- ttnn_visualizer/queries.py +77 -15
- ttnn_visualizer/sftp_operations.py +37 -16
- ttnn_visualizer/static/assets/{allPaths-Dy4oqQPs.js → allPaths-D8fLup7j.js} +1 -1
- ttnn_visualizer/static/assets/allPathsLoader-ib5itfwn.js +2 -0
- ttnn_visualizer/static/assets/{index-BE4YTLKK.js → index-Uz_q_wTj.js} +235 -235
- ttnn_visualizer/static/assets/{index-KUv_TPx-.css → index-bdndbd_j.css} +1 -1
- ttnn_visualizer/static/assets/{splitPathsBySizeLoader-BeG6l_W1.js → splitPathsBySizeLoader-BOwtF-oO.js} +1 -1
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/test_serializers.py +8 -1
- {ttnn_visualizer-0.66.0.dist-info → ttnn_visualizer-0.67.2.dist-info}/METADATA +1 -1
- {ttnn_visualizer-0.66.0.dist-info → ttnn_visualizer-0.67.2.dist-info}/RECORD +18 -18
- ttnn_visualizer/static/assets/allPathsLoader-CLdIoVk6.js +0 -2
- {ttnn_visualizer-0.66.0.dist-info → ttnn_visualizer-0.67.2.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.66.0.dist-info → ttnn_visualizer-0.67.2.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.66.0.dist-info → ttnn_visualizer-0.67.2.dist-info}/licenses/LICENSE +0 -0
- {ttnn_visualizer-0.66.0.dist-info → ttnn_visualizer-0.67.2.dist-info}/licenses/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.66.0.dist-info → ttnn_visualizer-0.67.2.dist-info}/top_level.txt +0 -0
ttnn_visualizer/app.py
CHANGED
|
@@ -9,10 +9,13 @@ import os
|
|
|
9
9
|
import subprocess
|
|
10
10
|
import sys
|
|
11
11
|
import threading
|
|
12
|
+
import time
|
|
12
13
|
import webbrowser
|
|
13
14
|
from os import environ
|
|
14
15
|
from pathlib import Path
|
|
15
16
|
from typing import cast
|
|
17
|
+
from urllib.error import URLError
|
|
18
|
+
from urllib.request import urlopen
|
|
16
19
|
|
|
17
20
|
import flask
|
|
18
21
|
from dotenv import load_dotenv
|
|
@@ -172,14 +175,33 @@ def open_browser(host, port, instance_id=None):
|
|
|
172
175
|
if instance_id:
|
|
173
176
|
url = f"{url}?instanceId={instance_id}"
|
|
174
177
|
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
178
|
+
max_attempts = 10
|
|
179
|
+
attempt = 0
|
|
180
|
+
server_ready = False
|
|
181
|
+
|
|
182
|
+
print(f"Waiting for server to be ready at {url}...")
|
|
183
|
+
while attempt < max_attempts and not server_ready:
|
|
184
|
+
try:
|
|
185
|
+
urlopen(url, timeout=1)
|
|
186
|
+
server_ready = True
|
|
187
|
+
except (URLError, ConnectionError, OSError):
|
|
188
|
+
attempt += 1
|
|
189
|
+
time.sleep(0.5)
|
|
190
|
+
|
|
191
|
+
if not server_ready:
|
|
192
|
+
print(f"❌ Server not ready after {max_attempts} attempts.")
|
|
193
|
+
else:
|
|
194
|
+
print(f"Launching browser with url: {url}")
|
|
195
|
+
|
|
196
|
+
try:
|
|
197
|
+
if (
|
|
198
|
+
os.name == "posix" and "DISPLAY" in os.environ
|
|
199
|
+
): # Checks for non-headless
|
|
200
|
+
subprocess.run(["xdg-open", url], check=True)
|
|
201
|
+
else:
|
|
202
|
+
webbrowser.open(url)
|
|
203
|
+
except webbrowser.Error as e:
|
|
204
|
+
print(f"Could not open the default browser: {e}")
|
|
183
205
|
|
|
184
206
|
|
|
185
207
|
def parse_args():
|
|
@@ -382,7 +404,7 @@ def main():
|
|
|
382
404
|
flask_env = os.getenv("FLASK_ENV", "development")
|
|
383
405
|
port = config.PORT if flask_env == "production" else config.DEV_SERVER_PORT
|
|
384
406
|
host = config.HOST if flask_env == "production" else config.DEV_SERVER_HOST
|
|
385
|
-
threading.
|
|
407
|
+
threading.Thread(target=open_browser, args=[host, port, instance_id]).start()
|
|
386
408
|
try:
|
|
387
409
|
subprocess.run(gunicorn_args)
|
|
388
410
|
except KeyboardInterrupt:
|
ttnn_visualizer/models.py
CHANGED
ttnn_visualizer/queries.py
CHANGED
|
@@ -185,26 +185,88 @@ class DatabaseQueries:
|
|
|
185
185
|
def query_tensors(
|
|
186
186
|
self, filters: Optional[Dict[str, Any]] = None
|
|
187
187
|
) -> Generator[Tensor, None, None]:
|
|
188
|
-
|
|
188
|
+
# Check if device_tensors table exists
|
|
189
|
+
device_tensors_exists = self._check_table_exists("device_tensors")
|
|
190
|
+
|
|
191
|
+
# Build the base query with joins to get size and optionally device_tensors
|
|
192
|
+
if device_tensors_exists:
|
|
193
|
+
query = """
|
|
194
|
+
SELECT
|
|
195
|
+
t.*,
|
|
196
|
+
b.max_size_per_bank as size,
|
|
197
|
+
GROUP_CONCAT(dt.device_id || ':' || dt.address, ',') as device_tensors_data
|
|
198
|
+
FROM tensors t
|
|
199
|
+
LEFT JOIN input_tensors it ON it.tensor_id = t.tensor_id
|
|
200
|
+
LEFT JOIN output_tensors ot ON ot.tensor_id = t.tensor_id
|
|
201
|
+
LEFT JOIN buffers b ON b.operation_id = COALESCE(it.operation_id, ot.operation_id)
|
|
202
|
+
AND t.address = b.address
|
|
203
|
+
AND t.device_id = b.device_id
|
|
204
|
+
LEFT JOIN device_tensors dt ON dt.tensor_id = t.tensor_id
|
|
205
|
+
WHERE 1=1
|
|
206
|
+
"""
|
|
207
|
+
else:
|
|
208
|
+
query = """
|
|
209
|
+
SELECT
|
|
210
|
+
t.*,
|
|
211
|
+
b.max_size_per_bank as size,
|
|
212
|
+
NULL as device_tensors_data
|
|
213
|
+
FROM tensors t
|
|
214
|
+
LEFT JOIN input_tensors it ON it.tensor_id = t.tensor_id
|
|
215
|
+
LEFT JOIN output_tensors ot ON ot.tensor_id = t.tensor_id
|
|
216
|
+
LEFT JOIN buffers b ON b.operation_id = COALESCE(it.operation_id, ot.operation_id)
|
|
217
|
+
AND t.address = b.address
|
|
218
|
+
AND t.device_id = b.device_id
|
|
219
|
+
WHERE 1=1
|
|
220
|
+
"""
|
|
221
|
+
params = []
|
|
222
|
+
|
|
223
|
+
# Apply filters to tensors table
|
|
224
|
+
if filters:
|
|
225
|
+
for column, value in filters.items():
|
|
226
|
+
if value is None:
|
|
227
|
+
continue
|
|
228
|
+
|
|
229
|
+
if isinstance(value, list):
|
|
230
|
+
if len(value) == 0:
|
|
231
|
+
continue
|
|
232
|
+
placeholders = ", ".join(["?"] * len(value))
|
|
233
|
+
query += f" AND t.{column} IN ({placeholders})"
|
|
234
|
+
params.extend(value)
|
|
235
|
+
else:
|
|
236
|
+
query += f" AND t.{column} = ?"
|
|
237
|
+
params.append(value)
|
|
238
|
+
|
|
239
|
+
query += " GROUP BY t.tensor_id"
|
|
240
|
+
|
|
241
|
+
rows = self.query_runner.execute_query(query, params)
|
|
189
242
|
for row in rows:
|
|
243
|
+
# Extract size and device_tensors_data (last two columns) and tensor data
|
|
244
|
+
tensor_row = row[:-2] # All tensor columns
|
|
245
|
+
size = row[-2] # size column
|
|
246
|
+
device_tensors_data = row[-1] # device_tensors_data column
|
|
247
|
+
|
|
190
248
|
device_addresses = []
|
|
191
249
|
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
250
|
+
if device_tensors_data:
|
|
251
|
+
# Parse the concatenated device_id:address pairs
|
|
252
|
+
pairs = device_tensors_data.split(",")
|
|
253
|
+
device_tensor_list = []
|
|
254
|
+
for pair in pairs:
|
|
255
|
+
if pair:
|
|
256
|
+
device_id_str, address_str = pair.split(":")
|
|
257
|
+
device_id = int(device_id_str)
|
|
258
|
+
address = int(address_str)
|
|
259
|
+
device_tensor_list.append((device_id, address))
|
|
260
|
+
|
|
261
|
+
# Sort by device_id and build the list with proper indexing
|
|
262
|
+
for device_id, address in sorted(
|
|
263
|
+
device_tensor_list, key=lambda x: x[0]
|
|
264
|
+
):
|
|
265
|
+
while len(device_addresses) < device_id:
|
|
204
266
|
device_addresses.append(None)
|
|
205
|
-
device_addresses.append(
|
|
267
|
+
device_addresses.append(address)
|
|
206
268
|
|
|
207
|
-
yield Tensor(*
|
|
269
|
+
yield Tensor(*tensor_row, device_addresses, size=size)
|
|
208
270
|
|
|
209
271
|
def query_input_tensors(
|
|
210
272
|
self, filters: Optional[Dict[str, Any]] = None
|
|
@@ -620,13 +620,21 @@ def read_remote_file(
|
|
|
620
620
|
|
|
621
621
|
@remote_exception_handler
|
|
622
622
|
def check_remote_path_for_reports(remote_connection):
|
|
623
|
-
remote_profiler_paths =
|
|
624
|
-
|
|
625
|
-
|
|
623
|
+
remote_profiler_paths = []
|
|
624
|
+
if remote_connection.profilerPath:
|
|
625
|
+
remote_profiler_paths = find_folders_by_files(
|
|
626
|
+
remote_connection, remote_connection.profilerPath, [TEST_CONFIG_FILE]
|
|
627
|
+
)
|
|
628
|
+
else:
|
|
629
|
+
logger.info("No profiler path configured; skipping check.")
|
|
626
630
|
|
|
627
|
-
remote_performance_paths =
|
|
628
|
-
|
|
629
|
-
|
|
631
|
+
remote_performance_paths = []
|
|
632
|
+
if remote_connection.performancePath:
|
|
633
|
+
remote_performance_paths = find_folders_by_files(
|
|
634
|
+
remote_connection, remote_connection.performancePath, [TEST_PROFILER_FILE]
|
|
635
|
+
)
|
|
636
|
+
else:
|
|
637
|
+
logger.info("No performance path configured; skipping check.")
|
|
630
638
|
|
|
631
639
|
errors = []
|
|
632
640
|
if not remote_profiler_paths and remote_connection.profilerPath:
|
|
@@ -682,6 +690,9 @@ def find_folders_by_files(
|
|
|
682
690
|
remote_connection: RemoteConnection, root_folder: str, file_names: List[str]
|
|
683
691
|
) -> List[str]:
|
|
684
692
|
"""Given a remote path, return a list of top-level folders that contain any of the specified files."""
|
|
693
|
+
if not root_folder:
|
|
694
|
+
return []
|
|
695
|
+
|
|
685
696
|
matched_folders: List[str] = []
|
|
686
697
|
|
|
687
698
|
# Build SSH command to find directories in root_folder
|
|
@@ -761,23 +772,28 @@ def get_remote_performance_folders(
|
|
|
761
772
|
remote_connection: RemoteConnection,
|
|
762
773
|
) -> List[RemoteReportFolder]:
|
|
763
774
|
"""Return a list of remote folders containing a profile_log_device file."""
|
|
764
|
-
|
|
775
|
+
performance_paths = []
|
|
776
|
+
|
|
777
|
+
if remote_connection.performancePath:
|
|
778
|
+
performance_paths = find_folders_by_files(
|
|
779
|
+
remote_connection, remote_connection.performancePath, [TEST_PROFILER_FILE]
|
|
780
|
+
)
|
|
781
|
+
else:
|
|
765
782
|
error = "Performance path is not configured for this connection"
|
|
766
783
|
logger.error(error)
|
|
767
784
|
raise NoProjectsException(status=ConnectionTestStates.FAILED, message=error)
|
|
768
785
|
|
|
769
|
-
performance_paths = find_folders_by_files(
|
|
770
|
-
remote_connection, remote_connection.performancePath, [TEST_PROFILER_FILE]
|
|
771
|
-
)
|
|
772
786
|
if not performance_paths:
|
|
773
|
-
error = f"No
|
|
787
|
+
error = f"No performance reports found at {remote_connection.performancePath}"
|
|
774
788
|
logger.info(error)
|
|
775
789
|
raise NoProjectsException(status=ConnectionTestStates.FAILED, message=error)
|
|
790
|
+
|
|
776
791
|
remote_folder_data = []
|
|
777
792
|
for path in performance_paths:
|
|
778
793
|
remote_folder_data.append(
|
|
779
794
|
get_remote_performance_folder(remote_connection, path)
|
|
780
795
|
)
|
|
796
|
+
|
|
781
797
|
return remote_folder_data
|
|
782
798
|
|
|
783
799
|
|
|
@@ -786,19 +802,24 @@ def get_remote_profiler_folders(
|
|
|
786
802
|
remote_connection: RemoteConnection,
|
|
787
803
|
) -> List[RemoteReportFolder]:
|
|
788
804
|
"""Return a list of remote folders containing a config.json file."""
|
|
789
|
-
remote_config_paths =
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
805
|
+
remote_config_paths = []
|
|
806
|
+
|
|
807
|
+
if remote_connection.profilerPath:
|
|
808
|
+
remote_config_paths = find_folders_by_files(
|
|
809
|
+
remote_connection, remote_connection.profilerPath, [TEST_CONFIG_FILE]
|
|
810
|
+
)
|
|
811
|
+
else:
|
|
812
|
+
error = f"No profiler reports found at {remote_connection.profilerPath}"
|
|
794
813
|
logger.info(error)
|
|
795
814
|
raise NoProjectsException(status=ConnectionTestStates.FAILED, message=error)
|
|
815
|
+
|
|
796
816
|
remote_folder_data = []
|
|
797
817
|
for config_path in remote_config_paths:
|
|
798
818
|
remote_folder = get_remote_profiler_folder_from_config_path(
|
|
799
819
|
remote_connection, str(Path(config_path).joinpath(TEST_CONFIG_FILE))
|
|
800
820
|
)
|
|
801
821
|
remote_folder_data.append(remote_folder)
|
|
822
|
+
|
|
802
823
|
return remote_folder_data
|
|
803
824
|
|
|
804
825
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{I as s}from"./index-voJy5fZe.js";import{I as r}from"./index-BZITDwoa.js";import{p as n,I as c}from"./index-
|
|
1
|
+
import{I as s}from"./index-voJy5fZe.js";import{I as r}from"./index-BZITDwoa.js";import{p as n,I as c}from"./index-Uz_q_wTj.js";function p(t,a){const o=n(t);return a===c.STANDARD?s[o]:r[o]}export{s as IconSvgPaths16,r as IconSvgPaths20,p as getIconPaths};
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/allPaths-D8fLup7j.js","assets/index-voJy5fZe.js","assets/index-BZITDwoa.js","assets/index-Uz_q_wTj.js","assets/index-bdndbd_j.css"])))=>i.map(i=>d[i]);
|
|
2
|
+
import{_ as e}from"./index-Uz_q_wTj.js";const s=async(t,a)=>{const{getIconPaths:o}=await e(async()=>{const{getIconPaths:r}=await import("./allPaths-D8fLup7j.js");return{getIconPaths:r}},__vite__mapDeps([0,1,2,3,4]));return o(t,a)};export{s as allPathsLoader};
|