ttnn-visualizer 0.43.1__py3-none-any.whl → 0.44.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +4 -5
- ttnn_visualizer/csv_queries.py +35 -287
- ttnn_visualizer/decorators.py +5 -6
- ttnn_visualizer/extensions.py +1 -2
- ttnn_visualizer/file_uploads.py +0 -1
- ttnn_visualizer/instances.py +4 -8
- ttnn_visualizer/models.py +3 -7
- ttnn_visualizer/queries.py +11 -18
- ttnn_visualizer/remote_sqlite_setup.py +2 -2
- ttnn_visualizer/settings.py +1 -1
- ttnn_visualizer/sftp_operations.py +4 -9
- ttnn_visualizer/sockets.py +1 -3
- ttnn_visualizer/static/assets/{allPaths-CGmhlOs-.js → allPaths-CFKU23gh.js} +1 -1
- ttnn_visualizer/static/assets/{allPathsLoader-CH9za42_.js → allPathsLoader-CpaihUCo.js} +2 -2
- ttnn_visualizer/static/assets/{index-DEb3r1jy.js → index-B2fHW2_O.js} +418 -411
- ttnn_visualizer/static/assets/index-BueCaPcI.css +7 -0
- ttnn_visualizer/static/assets/{splitPathsBySizeLoader-CP-kodGu.js → splitPathsBySizeLoader-BEb-7YZm.js} +1 -1
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/test_queries.py +4 -10
- ttnn_visualizer/tests/test_serializers.py +11 -11
- ttnn_visualizer/utils.py +3 -4
- ttnn_visualizer/views.py +34 -83
- {ttnn_visualizer-0.43.1.dist-info → ttnn_visualizer-0.44.1.dist-info}/METADATA +13 -11
- ttnn_visualizer-0.44.1.dist-info/RECORD +42 -0
- {ttnn_visualizer-0.43.1.dist-info → ttnn_visualizer-0.44.1.dist-info}/WHEEL +1 -1
- ttnn_visualizer/bin/docker-entrypoint-web +0 -16
- ttnn_visualizer/bin/pip3-install +0 -17
- ttnn_visualizer/requirements.txt +0 -23
- ttnn_visualizer/static/assets/index-C-t6jBt9.css +0 -7
- ttnn_visualizer-0.43.1.dist-info/RECORD +0 -45
- {ttnn_visualizer-0.43.1.dist-info → ttnn_visualizer-0.44.1.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.43.1.dist-info → ttnn_visualizer-0.44.1.dist-info/licenses}/LICENSE +0 -0
- {ttnn_visualizer-0.43.1.dist-info → ttnn_visualizer-0.44.1.dist-info/licenses}/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.43.1.dist-info → ttnn_visualizer-0.44.1.dist-info}/top_level.txt +0 -0
ttnn_visualizer/app.py
CHANGED
@@ -7,20 +7,17 @@ import json
|
|
7
7
|
import logging
|
8
8
|
import os
|
9
9
|
import subprocess
|
10
|
+
import sys
|
10
11
|
import threading
|
11
12
|
import webbrowser
|
12
13
|
from os import environ
|
13
14
|
from pathlib import Path
|
14
|
-
import sys
|
15
15
|
from typing import cast
|
16
16
|
|
17
17
|
import flask
|
18
18
|
from dotenv import load_dotenv
|
19
19
|
from flask import Flask, abort, jsonify
|
20
20
|
from flask_cors import CORS
|
21
|
-
from werkzeug.debug import DebuggedApplication
|
22
|
-
from werkzeug.middleware.proxy_fix import ProxyFix
|
23
|
-
|
24
21
|
from ttnn_visualizer.exceptions import (
|
25
22
|
DatabaseFileNotFoundException,
|
26
23
|
InvalidProfilerPath,
|
@@ -28,6 +25,8 @@ from ttnn_visualizer.exceptions import (
|
|
28
25
|
)
|
29
26
|
from ttnn_visualizer.instances import create_instance_from_local_paths
|
30
27
|
from ttnn_visualizer.settings import Config, DefaultConfig
|
28
|
+
from werkzeug.debug import DebuggedApplication
|
29
|
+
from werkzeug.middleware.proxy_fix import ProxyFix
|
31
30
|
|
32
31
|
logger = logging.getLogger(__name__)
|
33
32
|
|
@@ -104,7 +103,7 @@ def create_app(settings_override=None):
|
|
104
103
|
|
105
104
|
|
106
105
|
def extensions(app: flask.Flask):
|
107
|
-
from ttnn_visualizer.extensions import
|
106
|
+
from ttnn_visualizer.extensions import db, flask_static_digest, socketio
|
108
107
|
from ttnn_visualizer.sockets import register_handlers
|
109
108
|
|
110
109
|
"""
|
ttnn_visualizer/csv_queries.py
CHANGED
@@ -8,20 +8,18 @@ import subprocess
|
|
8
8
|
import tempfile
|
9
9
|
from io import StringIO
|
10
10
|
from pathlib import Path
|
11
|
-
from typing import List,
|
11
|
+
from typing import Dict, List, Optional, Union
|
12
12
|
|
13
13
|
import pandas as pd
|
14
14
|
import zstd
|
15
15
|
from tt_perf_report import perf_report
|
16
|
-
|
17
|
-
from ttnn_visualizer.exceptions import DataFormatError
|
18
16
|
from ttnn_visualizer.exceptions import (
|
19
|
-
SSHException,
|
20
17
|
AuthenticationException,
|
18
|
+
DataFormatError,
|
21
19
|
NoValidConnectionsError,
|
20
|
+
SSHException,
|
22
21
|
)
|
23
22
|
from ttnn_visualizer.models import Instance, RemoteConnection
|
24
|
-
from ttnn_visualizer.sftp_operations import read_remote_file
|
25
23
|
|
26
24
|
|
27
25
|
def handle_ssh_subprocess_error(
|
@@ -148,180 +146,6 @@ class LocalCSVQueryRunner:
|
|
148
146
|
return sanitized_df.values.tolist()
|
149
147
|
|
150
148
|
|
151
|
-
class RemoteCSVQueryRunner:
|
152
|
-
def __init__(
|
153
|
-
self, file_path: str, remote_connection, sep: str = ",", offset: int = 0
|
154
|
-
):
|
155
|
-
"""
|
156
|
-
Initialize the RemoteCSVQueryRunner.
|
157
|
-
|
158
|
-
:param file_path: Path to the remote file.
|
159
|
-
:param remote_connection: RemoteConnection object for SSH access.
|
160
|
-
:param sep: Separator used in the CSV file.
|
161
|
-
:param offset: Number of lines to skip before treating the first valid line as headers.
|
162
|
-
"""
|
163
|
-
self.file_path = file_path
|
164
|
-
self.remote_connection = remote_connection
|
165
|
-
self.sep = sep
|
166
|
-
self.offset = offset
|
167
|
-
|
168
|
-
def _execute_ssh_command(self, command: str) -> str:
|
169
|
-
"""Execute an SSH command and return the output."""
|
170
|
-
ssh_cmd = ["ssh", "-o", "PasswordAuthentication=no"]
|
171
|
-
|
172
|
-
# Handle non-standard SSH port
|
173
|
-
if self.remote_connection.port != 22:
|
174
|
-
ssh_cmd.extend(["-p", str(self.remote_connection.port)])
|
175
|
-
|
176
|
-
ssh_cmd.extend(
|
177
|
-
[
|
178
|
-
f"{self.remote_connection.username}@{self.remote_connection.host}",
|
179
|
-
command,
|
180
|
-
]
|
181
|
-
)
|
182
|
-
|
183
|
-
try:
|
184
|
-
result = subprocess.run(
|
185
|
-
ssh_cmd, capture_output=True, text=True, check=True, timeout=30
|
186
|
-
)
|
187
|
-
return result.stdout
|
188
|
-
except subprocess.CalledProcessError as e:
|
189
|
-
if e.returncode == 255: # SSH protocol errors
|
190
|
-
handle_ssh_subprocess_error(e, self.remote_connection)
|
191
|
-
# This line should never be reached as handle_ssh_subprocess_error raises an exception
|
192
|
-
raise RuntimeError(f"SSH command failed: {e.stderr}")
|
193
|
-
else:
|
194
|
-
raise RuntimeError(f"SSH command failed: {e.stderr}")
|
195
|
-
except subprocess.TimeoutExpired:
|
196
|
-
raise RuntimeError(f"SSH command timed out: {command}")
|
197
|
-
|
198
|
-
def execute_query(
|
199
|
-
self,
|
200
|
-
filters: Optional[Dict[str, str]] = None, # Allow unsanitized filter keys
|
201
|
-
as_dict: bool = False, # Convert rows to dictionaries if True
|
202
|
-
limit: int = None,
|
203
|
-
columns=None,
|
204
|
-
) -> Union[List[List[str]], List[Dict[str, str]]]:
|
205
|
-
"""
|
206
|
-
Fetch rows with optional filtering and limit, returning either raw rows or dictionaries.
|
207
|
-
:param filters: Dictionary of unsanitized column filters (e.g., {"zone name": "BRISC-FW"}).
|
208
|
-
:param as_dict: Whether to return results as a list of dictionaries.
|
209
|
-
:param limit: Maximum number of rows to return.
|
210
|
-
:return: List of rows as lists or dictionaries.
|
211
|
-
"""
|
212
|
-
# Fetch header row, accounting for the offset
|
213
|
-
header_cmd = f"head -n {self.offset + 1} {self.file_path} | tail -n 1"
|
214
|
-
raw_header = self._execute_ssh_command(header_cmd).strip()
|
215
|
-
|
216
|
-
# Sanitize headers
|
217
|
-
headers = [
|
218
|
-
col.strip().replace(" ", "_").lower() for col in raw_header.split(self.sep)
|
219
|
-
]
|
220
|
-
|
221
|
-
# Build the AWK command for filtering
|
222
|
-
awk_filter = ""
|
223
|
-
if filters:
|
224
|
-
filter_conditions = []
|
225
|
-
for unsanitized_col, value in filters.items():
|
226
|
-
# Sanitize the filter key
|
227
|
-
sanitized_col = unsanitized_col.strip().replace(" ", "_").lower()
|
228
|
-
if sanitized_col in headers:
|
229
|
-
col_idx = headers.index(sanitized_col) + 1
|
230
|
-
filter_conditions.append(f'${col_idx} == "{value}"')
|
231
|
-
else:
|
232
|
-
print(
|
233
|
-
f"WARNING: Column '{unsanitized_col}' (sanitized: '{sanitized_col}') not found in headers."
|
234
|
-
)
|
235
|
-
awk_filter = " && ".join(filter_conditions)
|
236
|
-
|
237
|
-
# Build AWK command
|
238
|
-
limit_clause = f"| head -n {limit}" if limit else ""
|
239
|
-
awk_cmd = f"awk -F'{self.sep}' 'NR > {self.offset + 1} {f'&& {awk_filter}' if awk_filter else ''} {{print}}' {self.file_path} {limit_clause}"
|
240
|
-
|
241
|
-
output = self._execute_ssh_command(awk_cmd).strip()
|
242
|
-
|
243
|
-
# Split rows into lists of strings
|
244
|
-
rows = [
|
245
|
-
[field.strip().strip('"') for field in line.split(self.sep)]
|
246
|
-
for line in output.splitlines()
|
247
|
-
]
|
248
|
-
if as_dict:
|
249
|
-
# Convert rows to dictionaries
|
250
|
-
result = [dict(zip(headers, row)) for row in rows]
|
251
|
-
|
252
|
-
if columns:
|
253
|
-
sanitized_columns = [
|
254
|
-
col.strip().replace(" ", "_").lower() for col in columns
|
255
|
-
]
|
256
|
-
result = [
|
257
|
-
{
|
258
|
-
key: value
|
259
|
-
for key, value in row.items()
|
260
|
-
if key in sanitized_columns
|
261
|
-
}
|
262
|
-
for row in result
|
263
|
-
]
|
264
|
-
print(f"DEBUG: Filtered columns: {sanitized_columns}")
|
265
|
-
return result
|
266
|
-
return rows
|
267
|
-
|
268
|
-
def execute_query_raw(self, limit: int = None) -> List[str]:
|
269
|
-
"""
|
270
|
-
Fetch raw lines from the remote CSV file, accounting for the offset.
|
271
|
-
|
272
|
-
:param limit: Maximum number of rows to fetch (including offset rows).
|
273
|
-
:return: List of raw rows as strings.
|
274
|
-
"""
|
275
|
-
total_lines = self.offset + limit if limit else ""
|
276
|
-
cmd = (
|
277
|
-
f"head -n {total_lines} {self.file_path}"
|
278
|
-
if total_lines
|
279
|
-
else f"cat {self.file_path}"
|
280
|
-
)
|
281
|
-
output = self._execute_ssh_command(cmd).strip()
|
282
|
-
|
283
|
-
return output.splitlines()[self.offset :]
|
284
|
-
|
285
|
-
def get_csv_header(self) -> Dict[str, int]:
|
286
|
-
"""
|
287
|
-
Retrieve the CSV headers as a dictionary mapping column names to their indices (1-based).
|
288
|
-
:return: Dictionary of headers.
|
289
|
-
"""
|
290
|
-
header_cmd = f"head -n {self.offset + 1} {self.file_path} | tail -n 1"
|
291
|
-
header = self._execute_ssh_command(header_cmd).strip()
|
292
|
-
|
293
|
-
# Trim spaces in header names
|
294
|
-
column_names = [name.strip() for name in header.split(self.sep)]
|
295
|
-
return {name: idx + 1 for idx, name in enumerate(column_names)}
|
296
|
-
|
297
|
-
def build_awk_filter(
|
298
|
-
self, column_indices: Dict[str, int], filters: Dict[str, str]
|
299
|
-
) -> str:
|
300
|
-
if not filters:
|
301
|
-
return ""
|
302
|
-
conditions = [
|
303
|
-
f'${column_indices[col]} == "{val}"' for col, val in filters.items()
|
304
|
-
]
|
305
|
-
return " && ".join(conditions)
|
306
|
-
|
307
|
-
def build_awk_columns(
|
308
|
-
self, column_indices: Dict[str, int], columns: List[str]
|
309
|
-
) -> str:
|
310
|
-
return ", ".join([f"${column_indices[col]}" for col in columns])
|
311
|
-
|
312
|
-
def __enter__(self):
|
313
|
-
"""
|
314
|
-
Enable usage with context management.
|
315
|
-
"""
|
316
|
-
return self
|
317
|
-
|
318
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
319
|
-
"""
|
320
|
-
Clean up resources when exiting context.
|
321
|
-
"""
|
322
|
-
pass
|
323
|
-
|
324
|
-
|
325
149
|
class NPEQueries:
|
326
150
|
NPE_FOLDER = "npe_viz"
|
327
151
|
MANIFEST_FILE = "manifest.json"
|
@@ -329,24 +153,13 @@ class NPEQueries:
|
|
329
153
|
@staticmethod
|
330
154
|
def get_npe_manifest(instance: Instance):
|
331
155
|
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
)
|
337
|
-
|
338
|
-
|
339
|
-
NPEQueries.NPE_FOLDER,
|
340
|
-
NPEQueries.MANIFEST_FILE,
|
341
|
-
)
|
342
|
-
with open(file_path, "r") as f:
|
343
|
-
return json.load(f)
|
344
|
-
else:
|
345
|
-
profiler_folder = instance.remote_profile_folder
|
346
|
-
return read_remote_file(
|
347
|
-
instance.remote_connection,
|
348
|
-
f"{profiler_folder.remotePath}/{NPEQueries.NPE_FOLDER}/{NPEQueries.MANIFEST_FILE}",
|
349
|
-
)
|
156
|
+
file_path = Path(
|
157
|
+
instance.performance_path,
|
158
|
+
NPEQueries.NPE_FOLDER,
|
159
|
+
NPEQueries.MANIFEST_FILE,
|
160
|
+
)
|
161
|
+
with open(file_path, "r") as f:
|
162
|
+
return json.load(f)
|
350
163
|
|
351
164
|
@staticmethod
|
352
165
|
def get_npe_timeline(instance: Instance, filename: str):
|
@@ -355,40 +168,19 @@ class NPEQueries:
|
|
355
168
|
"filename parameter is required and cannot be None or empty"
|
356
169
|
)
|
357
170
|
|
358
|
-
if
|
359
|
-
|
360
|
-
or not instance.remote_connection.useRemoteQuerying
|
361
|
-
):
|
362
|
-
if not instance.performance_path:
|
363
|
-
raise ValueError("instance.performance_path is None")
|
364
|
-
|
365
|
-
file_path = Path(instance.performance_path, NPEQueries.NPE_FOLDER, filename)
|
171
|
+
if not instance.performance_path:
|
172
|
+
raise ValueError("instance.performance_path is None")
|
366
173
|
|
367
|
-
|
368
|
-
with open(file_path, "rb") as file:
|
369
|
-
compressed_data = file.read()
|
370
|
-
uncompressed_data = zstd.uncompress(compressed_data)
|
371
|
-
return json.loads(uncompressed_data)
|
372
|
-
else:
|
373
|
-
with open(file_path, "r") as f:
|
374
|
-
return json.load(f)
|
375
|
-
|
376
|
-
else:
|
377
|
-
profiler_folder = instance.remote_profile_folder
|
378
|
-
remote_path = (
|
379
|
-
f"{profiler_folder.remotePath}/{NPEQueries.NPE_FOLDER}/{filename}"
|
380
|
-
)
|
381
|
-
remote_data = read_remote_file(instance.remote_connection, remote_path)
|
174
|
+
file_path = Path(instance.performance_path, NPEQueries.NPE_FOLDER, filename)
|
382
175
|
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
uncompressed_data = zstd.
|
176
|
+
if filename.endswith(".zst"):
|
177
|
+
with open(file_path, "rb") as file:
|
178
|
+
compressed_data = file.read()
|
179
|
+
uncompressed_data = zstd.uncompress(compressed_data)
|
387
180
|
return json.loads(uncompressed_data)
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
return json.loads(remote_data)
|
181
|
+
else:
|
182
|
+
with open(file_path, "r") as f:
|
183
|
+
return json.load(f)
|
392
184
|
|
393
185
|
|
394
186
|
class DeviceLogProfilerQueries:
|
@@ -421,36 +213,17 @@ class DeviceLogProfilerQueries:
|
|
421
213
|
"""
|
422
214
|
Determine the appropriate query runner based on the instance's remote connection.
|
423
215
|
"""
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
# use_remote_querying = self.instance.remote_connection.useRemoteQuerying
|
431
|
-
|
432
|
-
# Determine if this is a local or remote operation
|
433
|
-
if is_remote and use_remote_querying:
|
434
|
-
remote_profiler_folder = self.instance.remote_profile_folder
|
435
|
-
file_path = f"{remote_profiler_folder.remotePath}/{self.DEVICE_LOG_FILE}"
|
436
|
-
self.runner = RemoteCSVQueryRunner(
|
437
|
-
file_path=file_path,
|
438
|
-
remote_connection=self.instance.remote_connection,
|
439
|
-
offset=1, # Skip the first line for device log files
|
440
|
-
)
|
441
|
-
else:
|
442
|
-
self.runner = LocalCSVQueryRunner(
|
443
|
-
file_path=Path(self.instance.performance_path).joinpath(
|
444
|
-
self.DEVICE_LOG_FILE
|
445
|
-
),
|
446
|
-
offset=1, # Skip the first line for device log files
|
447
|
-
)
|
216
|
+
self.runner = LocalCSVQueryRunner(
|
217
|
+
file_path=Path(self.instance.performance_path).joinpath(
|
218
|
+
self.DEVICE_LOG_FILE
|
219
|
+
),
|
220
|
+
offset=1, # Skip the first line for device log files
|
221
|
+
)
|
448
222
|
|
449
223
|
self.runner.__enter__()
|
450
224
|
|
451
|
-
|
452
|
-
|
453
|
-
self.runner.df.columns = self.runner.df.columns.str.strip()
|
225
|
+
self.runner.df.columns = self.DEVICE_LOG_COLUMNS
|
226
|
+
self.runner.df.columns = self.runner.df.columns.str.strip()
|
454
227
|
|
455
228
|
return self
|
456
229
|
|
@@ -498,24 +271,11 @@ class DeviceLogProfilerQueries:
|
|
498
271
|
|
499
272
|
@staticmethod
|
500
273
|
def get_raw_csv(instance: Instance):
|
501
|
-
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
and not instance.remote_connection.useRemoteQuerying
|
507
|
-
):
|
508
|
-
file_path = Path(
|
509
|
-
instance.performance_path, DeviceLogProfilerQueries.DEVICE_LOG_FILE
|
510
|
-
)
|
511
|
-
with open(file_path, "r") as f:
|
512
|
-
return f.read()
|
513
|
-
else:
|
514
|
-
profiler_folder = instance.remote_profile_folder
|
515
|
-
return read_remote_file(
|
516
|
-
instance.remote_connection,
|
517
|
-
f"{profiler_folder.remotePath}/{DeviceLogProfilerQueries.DEVICE_LOG_FILE}",
|
518
|
-
)
|
274
|
+
file_path = Path(
|
275
|
+
instance.performance_path, DeviceLogProfilerQueries.DEVICE_LOG_FILE
|
276
|
+
)
|
277
|
+
with open(file_path, "r") as f:
|
278
|
+
return f.read()
|
519
279
|
|
520
280
|
|
521
281
|
class OpsPerformanceQueries:
|
@@ -637,20 +397,8 @@ class OpsPerformanceQueries:
|
|
637
397
|
|
638
398
|
@staticmethod
|
639
399
|
def get_raw_csv(instance):
|
640
|
-
|
641
|
-
|
642
|
-
if (
|
643
|
-
not instance.remote_connection
|
644
|
-
or instance.remote_connection
|
645
|
-
and not instance.remote_connection.useRemoteQuerying
|
646
|
-
):
|
647
|
-
with open(
|
648
|
-
OpsPerformanceQueries.get_local_ops_perf_file_path(instance)
|
649
|
-
) as f:
|
650
|
-
return f.read()
|
651
|
-
else:
|
652
|
-
path = OpsPerformanceQueries.get_remote_ops_perf_file_path(instance)
|
653
|
-
return read_remote_file(instance.remote_connection, path)
|
400
|
+
with open(OpsPerformanceQueries.get_local_ops_perf_file_path(instance)) as f:
|
401
|
+
return f.read()
|
654
402
|
|
655
403
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
656
404
|
"""
|
ttnn_visualizer/decorators.py
CHANGED
@@ -4,19 +4,18 @@
|
|
4
4
|
|
5
5
|
import logging
|
6
6
|
import re
|
7
|
-
from ttnn_visualizer.enums import ConnectionTestStates
|
8
|
-
|
9
|
-
|
10
7
|
from functools import wraps
|
8
|
+
|
11
9
|
from flask import abort, request, session
|
10
|
+
from ttnn_visualizer.enums import ConnectionTestStates
|
12
11
|
from ttnn_visualizer.exceptions import (
|
13
12
|
AuthenticationException,
|
14
|
-
NoValidConnectionsError,
|
15
|
-
SSHException,
|
16
|
-
RemoteConnectionException,
|
17
13
|
AuthenticationFailedException,
|
18
14
|
NoProjectsException,
|
15
|
+
NoValidConnectionsError,
|
16
|
+
RemoteConnectionException,
|
19
17
|
RemoteSqliteException,
|
18
|
+
SSHException,
|
20
19
|
)
|
21
20
|
from ttnn_visualizer.instances import get_or_create_instance
|
22
21
|
|
ttnn_visualizer/extensions.py
CHANGED
@@ -3,9 +3,8 @@
|
|
3
3
|
# SPDX-FileCopyrightText: © 2025 Tenstorrent AI ULC
|
4
4
|
|
5
5
|
from flask_socketio import SocketIO
|
6
|
-
from flask_static_digest import FlaskStaticDigest
|
7
6
|
from flask_sqlalchemy import SQLAlchemy
|
8
|
-
|
7
|
+
from flask_static_digest import FlaskStaticDigest
|
9
8
|
|
10
9
|
flask_static_digest = FlaskStaticDigest()
|
11
10
|
# Initialize Flask SQLAlchemy
|
ttnn_visualizer/file_uploads.py
CHANGED
ttnn_visualizer/instances.py
CHANGED
@@ -9,20 +9,16 @@ from logging import getLogger
|
|
9
9
|
from pathlib import Path
|
10
10
|
|
11
11
|
from flask import request
|
12
|
-
|
13
|
-
from ttnn_visualizer.exceptions import InvalidReportPath, InvalidProfilerPath
|
14
|
-
from ttnn_visualizer.utils import get_profiler_path, get_performance_path, get_npe_path
|
15
|
-
from ttnn_visualizer.models import (
|
16
|
-
InstanceTable,
|
17
|
-
)
|
12
|
+
from ttnn_visualizer.exceptions import InvalidProfilerPath, InvalidReportPath
|
18
13
|
from ttnn_visualizer.extensions import db
|
14
|
+
from ttnn_visualizer.models import InstanceTable
|
15
|
+
from ttnn_visualizer.utils import get_npe_path, get_performance_path, get_profiler_path
|
19
16
|
|
20
17
|
logger = getLogger(__name__)
|
21
18
|
|
22
|
-
from flask import
|
19
|
+
from flask import current_app, jsonify
|
23
20
|
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
|
24
21
|
|
25
|
-
|
26
22
|
_sentinel = object()
|
27
23
|
|
28
24
|
|
ttnn_visualizer/models.py
CHANGED
@@ -6,17 +6,14 @@ import dataclasses
|
|
6
6
|
import enum
|
7
7
|
import json
|
8
8
|
from json import JSONDecodeError
|
9
|
-
from typing import
|
9
|
+
from typing import Any, Optional
|
10
10
|
|
11
11
|
from pydantic import BaseModel, Field
|
12
|
-
from sqlalchemy import
|
12
|
+
from sqlalchemy import JSON, Column, Integer, String
|
13
13
|
from sqlalchemy.ext.mutable import MutableDict
|
14
|
-
|
15
|
-
from ttnn_visualizer.utils import SerializeableDataclass
|
16
14
|
from ttnn_visualizer.enums import ConnectionTestStates
|
17
15
|
from ttnn_visualizer.extensions import db
|
18
|
-
|
19
|
-
from ttnn_visualizer.utils import parse_memory_config
|
16
|
+
from ttnn_visualizer.utils import SerializeableDataclass, parse_memory_config
|
20
17
|
|
21
18
|
|
22
19
|
class BufferType(enum.Enum):
|
@@ -172,7 +169,6 @@ class RemoteConnection(SerializeableModel):
|
|
172
169
|
profilerPath: str
|
173
170
|
performancePath: Optional[str] = None
|
174
171
|
sqliteBinaryPath: Optional[str] = None
|
175
|
-
useRemoteQuerying: bool = False
|
176
172
|
|
177
173
|
|
178
174
|
class StatusMessage(SerializeableModel):
|
ttnn_visualizer/queries.py
CHANGED
@@ -2,29 +2,26 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-FileCopyrightText: © 2025 Tenstorrent AI ULC
|
4
4
|
|
5
|
-
|
5
|
+
import sqlite3
|
6
|
+
from pathlib import Path
|
7
|
+
from typing import Any, Dict, Generator, List, Optional, Union
|
6
8
|
|
7
|
-
from ttnn_visualizer.exceptions import
|
8
|
-
DatabaseFileNotFoundException,
|
9
|
-
)
|
9
|
+
from ttnn_visualizer.exceptions import DatabaseFileNotFoundException
|
10
10
|
from ttnn_visualizer.models import (
|
11
|
-
Operation,
|
12
|
-
DeviceOperation,
|
13
11
|
Buffer,
|
14
12
|
BufferPage,
|
13
|
+
Device,
|
14
|
+
DeviceOperation,
|
15
|
+
InputTensor,
|
15
16
|
Instance,
|
16
|
-
|
17
|
+
Operation,
|
17
18
|
OperationArgument,
|
18
|
-
StackTrace,
|
19
|
-
InputTensor,
|
20
19
|
OutputTensor,
|
21
|
-
Device,
|
22
20
|
ProducersConsumers,
|
21
|
+
StackTrace,
|
22
|
+
Tensor,
|
23
23
|
TensorComparisonRecord,
|
24
24
|
)
|
25
|
-
import sqlite3
|
26
|
-
from typing import List, Optional
|
27
|
-
from pathlib import Path
|
28
25
|
|
29
26
|
|
30
27
|
class LocalQueryRunner:
|
@@ -75,11 +72,7 @@ class DatabaseQueries:
|
|
75
72
|
raise ValueError(
|
76
73
|
"Must provide either an existing connection or instance"
|
77
74
|
)
|
78
|
-
|
79
|
-
if remote_connection and remote_connection.useRemoteQuerying:
|
80
|
-
raise NotImplementedError("Remote querying is not implemented yet")
|
81
|
-
else:
|
82
|
-
self.query_runner = LocalQueryRunner(instance=instance)
|
75
|
+
self.query_runner = LocalQueryRunner(instance=instance)
|
83
76
|
|
84
77
|
def _check_table_exists(self, table_name: str) -> bool:
|
85
78
|
"""
|
@@ -8,10 +8,10 @@ import subprocess
|
|
8
8
|
from ttnn_visualizer.decorators import remote_exception_handler
|
9
9
|
from ttnn_visualizer.enums import ConnectionTestStates
|
10
10
|
from ttnn_visualizer.exceptions import (
|
11
|
-
RemoteSqliteException,
|
12
|
-
SSHException,
|
13
11
|
AuthenticationException,
|
14
12
|
NoValidConnectionsError,
|
13
|
+
RemoteSqliteException,
|
14
|
+
SSHException,
|
15
15
|
)
|
16
16
|
from ttnn_visualizer.models import RemoteConnection
|
17
17
|
|
ttnn_visualizer/settings.py
CHANGED
@@ -5,30 +5,25 @@
|
|
5
5
|
import json
|
6
6
|
import logging
|
7
7
|
import re
|
8
|
-
import time
|
9
8
|
import subprocess
|
9
|
+
import time
|
10
10
|
from pathlib import Path
|
11
11
|
from stat import S_ISDIR
|
12
12
|
from threading import Thread
|
13
13
|
from typing import List, Optional
|
14
14
|
|
15
15
|
from flask import current_app
|
16
|
-
|
17
16
|
from ttnn_visualizer.decorators import remote_exception_handler
|
18
17
|
from ttnn_visualizer.enums import ConnectionTestStates
|
19
18
|
from ttnn_visualizer.exceptions import (
|
19
|
+
AuthenticationException,
|
20
20
|
NoProjectsException,
|
21
|
+
NoValidConnectionsError,
|
21
22
|
RemoteConnectionException,
|
22
23
|
SSHException,
|
23
|
-
AuthenticationException,
|
24
|
-
NoValidConnectionsError,
|
25
24
|
)
|
26
25
|
from ttnn_visualizer.models import RemoteConnection, RemoteReportFolder
|
27
|
-
from ttnn_visualizer.sockets import
|
28
|
-
FileProgress,
|
29
|
-
FileStatus,
|
30
|
-
emit_file_status,
|
31
|
-
)
|
26
|
+
from ttnn_visualizer.sockets import FileProgress, FileStatus, emit_file_status
|
32
27
|
from ttnn_visualizer.utils import update_last_synced
|
33
28
|
|
34
29
|
logger = logging.getLogger(__name__)
|
ttnn_visualizer/sockets.py
CHANGED
@@ -9,11 +9,9 @@ from datetime import datetime
|
|
9
9
|
from enum import Enum
|
10
10
|
from logging import getLogger
|
11
11
|
|
12
|
-
from flask_socketio import
|
13
|
-
|
12
|
+
from flask_socketio import disconnect, join_room, leave_room
|
14
13
|
from ttnn_visualizer.utils import SerializeableDataclass
|
15
14
|
|
16
|
-
|
17
15
|
logger = getLogger(__name__)
|
18
16
|
|
19
17
|
|
@@ -1 +1 @@
|
|
1
|
-
import{I as n}from"./index-DLOviMB1.js";import{I as e}from"./index-B-fsa5Ru.js";import{p as r,I as s}from"./index-
|
1
|
+
import{I as n}from"./index-DLOviMB1.js";import{I as e}from"./index-B-fsa5Ru.js";import{p as r,I as s}from"./index-B2fHW2_O.js";function I(o,t){var a=r(o);return t===s.STANDARD?n[a]:e[a]}function p(o){return r(o)}export{n as IconSvgPaths16,e as IconSvgPaths20,I as getIconPaths,p as iconNameToPathsRecordKey};
|
@@ -1,2 +1,2 @@
|
|
1
|
-
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/allPaths-
|
2
|
-
import{_ as o,a as n,b as i}from"./index-
|
1
|
+
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/allPaths-CFKU23gh.js","assets/index-DLOviMB1.js","assets/index-B-fsa5Ru.js","assets/index-B2fHW2_O.js","assets/index-BueCaPcI.css"])))=>i.map(i=>d[i]);
|
2
|
+
import{_ as o,a as n,b as i}from"./index-B2fHW2_O.js";var _=function(e,a){return o(void 0,void 0,void 0,function(){var t;return n(this,function(r){switch(r.label){case 0:return[4,i(()=>import("./allPaths-CFKU23gh.js"),__vite__mapDeps([0,1,2,3,4]))];case 1:return t=r.sent().getIconPaths,[2,t(e,a)]}})})};export{_ as allPathsLoader};
|