ttnn-visualizer 0.49.0__py3-none-any.whl → 0.64.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +151 -49
- ttnn_visualizer/csv_queries.py +154 -45
- ttnn_visualizer/decorators.py +0 -9
- ttnn_visualizer/exceptions.py +0 -7
- ttnn_visualizer/models.py +20 -1
- ttnn_visualizer/queries.py +8 -0
- ttnn_visualizer/serializers.py +53 -9
- ttnn_visualizer/settings.py +24 -10
- ttnn_visualizer/ssh_client.py +1 -4
- ttnn_visualizer/static/assets/allPaths-DWjqav_8.js +1 -0
- ttnn_visualizer/static/assets/allPathsLoader-B0eRT9aL.js +2 -0
- ttnn_visualizer/static/assets/index-BE2R-cuu.css +1 -0
- ttnn_visualizer/static/assets/index-BZITDwoa.js +1 -0
- ttnn_visualizer/static/assets/{index-DVrPLQJ7.js → index-DDrUX09k.js} +274 -479
- ttnn_visualizer/static/assets/index-voJy5fZe.js +1 -0
- ttnn_visualizer/static/assets/splitPathsBySizeLoader-_GpmIkFm.js +1 -0
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/test_serializers.py +2 -0
- ttnn_visualizer/tests/test_utils.py +362 -0
- ttnn_visualizer/utils.py +142 -0
- ttnn_visualizer/views.py +181 -87
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/METADATA +58 -30
- ttnn_visualizer-0.64.0.dist-info/RECORD +44 -0
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/licenses/LICENSE +6 -0
- ttnn_visualizer/remote_sqlite_setup.py +0 -100
- ttnn_visualizer/static/assets/allPaths-G_CNx_x1.js +0 -1
- ttnn_visualizer/static/assets/allPathsLoader-s_Yfmxfp.js +0 -2
- ttnn_visualizer/static/assets/index-CnPrfHYh.js +0 -1
- ttnn_visualizer/static/assets/index-Cnc1EkDo.js +0 -1
- ttnn_visualizer/static/assets/index-UuXdrHif.css +0 -7
- ttnn_visualizer/static/assets/splitPathsBySizeLoader-ivxxaHxa.js +0 -1
- ttnn_visualizer-0.49.0.dist-info/RECORD +0 -44
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/licenses/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/top_level.txt +0 -0
ttnn_visualizer/serializers.py
CHANGED
|
@@ -19,6 +19,7 @@ def serialize_operations(
|
|
|
19
19
|
devices,
|
|
20
20
|
producers_consumers,
|
|
21
21
|
device_operations,
|
|
22
|
+
error_records=None,
|
|
22
23
|
):
|
|
23
24
|
tensors_dict = {t.tensor_id: t for t in tensors}
|
|
24
25
|
device_operations_dict = {
|
|
@@ -29,6 +30,11 @@ def serialize_operations(
|
|
|
29
30
|
|
|
30
31
|
stack_traces_dict = {st.operation_id: st.stack_trace for st in stack_traces}
|
|
31
32
|
|
|
33
|
+
errors_dict = {}
|
|
34
|
+
if error_records:
|
|
35
|
+
for error in error_records:
|
|
36
|
+
errors_dict[error.operation_id] = error.to_nested_dict()
|
|
37
|
+
|
|
32
38
|
arguments_dict = defaultdict(list)
|
|
33
39
|
for argument in operation_arguments:
|
|
34
40
|
arguments_dict[argument.operation_id].append(argument)
|
|
@@ -49,6 +55,8 @@ def serialize_operations(
|
|
|
49
55
|
)
|
|
50
56
|
id = operation_data.pop("operation_id", None)
|
|
51
57
|
|
|
58
|
+
error_data = errors_dict.get(operation.operation_id)
|
|
59
|
+
|
|
52
60
|
results.append(
|
|
53
61
|
{
|
|
54
62
|
**operation_data,
|
|
@@ -58,6 +66,7 @@ def serialize_operations(
|
|
|
58
66
|
"arguments": arguments,
|
|
59
67
|
"inputs": inputs,
|
|
60
68
|
"outputs": outputs,
|
|
69
|
+
"error": error_data,
|
|
61
70
|
}
|
|
62
71
|
)
|
|
63
72
|
return results
|
|
@@ -144,6 +153,7 @@ def serialize_operation(
|
|
|
144
153
|
devices,
|
|
145
154
|
producers_consumers,
|
|
146
155
|
device_operations,
|
|
156
|
+
error_record=None,
|
|
147
157
|
):
|
|
148
158
|
tensors_dict = {t.tensor_id: t for t in tensors}
|
|
149
159
|
comparisons = comparisons_by_tensor_id(
|
|
@@ -176,6 +186,9 @@ def serialize_operation(
|
|
|
176
186
|
device_operations_data = do.captured_graph
|
|
177
187
|
break
|
|
178
188
|
|
|
189
|
+
# Convert error record to nested dict if it exists (excludes operation_id and operation_name)
|
|
190
|
+
error_data = error_record.to_nested_dict() if error_record else None
|
|
191
|
+
|
|
179
192
|
return {
|
|
180
193
|
**operation_data,
|
|
181
194
|
"id": id,
|
|
@@ -186,18 +199,30 @@ def serialize_operation(
|
|
|
186
199
|
"arguments": arguments_data,
|
|
187
200
|
"inputs": inputs_data or [],
|
|
188
201
|
"outputs": outputs_data or [],
|
|
202
|
+
"error": error_data,
|
|
189
203
|
}
|
|
190
204
|
|
|
191
205
|
|
|
192
206
|
def serialize_operation_buffers(operation: Operation, operation_buffers):
|
|
193
|
-
buffer_data = [
|
|
194
|
-
for b in
|
|
195
|
-
|
|
196
|
-
|
|
207
|
+
buffer_data = []
|
|
208
|
+
for b in operation_buffers:
|
|
209
|
+
buffer_dict = {
|
|
210
|
+
"device_id": b.device_id,
|
|
211
|
+
"address": b.address,
|
|
212
|
+
"buffer_type": (
|
|
213
|
+
b.buffer_type.value
|
|
214
|
+
if hasattr(b.buffer_type, "value")
|
|
215
|
+
else b.buffer_type
|
|
216
|
+
),
|
|
217
|
+
"buffer_layout": b.buffer_layout,
|
|
218
|
+
"size": b.max_size_per_bank,
|
|
219
|
+
}
|
|
220
|
+
buffer_data.append(buffer_dict)
|
|
221
|
+
|
|
197
222
|
return {
|
|
198
223
|
"id": operation.operation_id,
|
|
199
224
|
"name": operation.name,
|
|
200
|
-
"buffers":
|
|
225
|
+
"buffers": buffer_data,
|
|
201
226
|
}
|
|
202
227
|
|
|
203
228
|
|
|
@@ -206,14 +231,33 @@ def serialize_devices(devices):
|
|
|
206
231
|
|
|
207
232
|
|
|
208
233
|
def serialize_operations_buffers(operations, buffers):
|
|
209
|
-
|
|
234
|
+
# Pre-serialize all buffers once using optimized method with defaultdict
|
|
235
|
+
serialized_buffers = defaultdict(list)
|
|
210
236
|
for b in buffers:
|
|
211
|
-
buffer_dict
|
|
237
|
+
buffer_dict = {
|
|
238
|
+
"device_id": b.device_id,
|
|
239
|
+
"address": b.address,
|
|
240
|
+
"buffer_type": (
|
|
241
|
+
b.buffer_type.value
|
|
242
|
+
if hasattr(b.buffer_type, "value")
|
|
243
|
+
else b.buffer_type
|
|
244
|
+
),
|
|
245
|
+
"buffer_layout": b.buffer_layout,
|
|
246
|
+
"size": b.max_size_per_bank,
|
|
247
|
+
}
|
|
248
|
+
serialized_buffers[b.operation_id].append(buffer_dict)
|
|
212
249
|
|
|
213
250
|
results = []
|
|
214
251
|
for operation in operations:
|
|
215
|
-
operation_buffers =
|
|
216
|
-
results.append(
|
|
252
|
+
operation_buffers = serialized_buffers[operation.operation_id]
|
|
253
|
+
results.append(
|
|
254
|
+
{
|
|
255
|
+
"id": operation.operation_id,
|
|
256
|
+
"name": operation.name,
|
|
257
|
+
"buffers": operation_buffers,
|
|
258
|
+
}
|
|
259
|
+
)
|
|
260
|
+
|
|
217
261
|
return results
|
|
218
262
|
|
|
219
263
|
|
ttnn_visualizer/settings.py
CHANGED
|
@@ -6,7 +6,12 @@ import os
|
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
|
|
8
8
|
from dotenv import load_dotenv
|
|
9
|
-
from
|
|
9
|
+
from sqlalchemy.pool import NullPool
|
|
10
|
+
from ttnn_visualizer.utils import (
|
|
11
|
+
get_app_data_directory,
|
|
12
|
+
is_running_in_container,
|
|
13
|
+
str_to_bool,
|
|
14
|
+
)
|
|
10
15
|
|
|
11
16
|
load_dotenv()
|
|
12
17
|
|
|
@@ -40,9 +45,13 @@ class DefaultConfig(object):
|
|
|
40
45
|
PERFORMANCE_DIRECTORY_NAME = "performance-reports"
|
|
41
46
|
NPE_DIRECTORY_NAME = "npe-reports"
|
|
42
47
|
APPLICATION_DIR = os.path.abspath(os.path.join(__file__, "..", os.pardir))
|
|
43
|
-
APP_DATA_DIRECTORY = os.getenv("APP_DATA_DIRECTORY", APPLICATION_DIR)
|
|
44
|
-
STATIC_ASSETS_DIR = Path(APPLICATION_DIR).joinpath("ttnn_visualizer", "static")
|
|
45
48
|
TT_METAL_HOME = os.getenv("TT_METAL_HOME", None)
|
|
49
|
+
APP_DATA_DIRECTORY = os.getenv(
|
|
50
|
+
"APP_DATA_DIRECTORY",
|
|
51
|
+
get_app_data_directory(TT_METAL_HOME, APPLICATION_DIR),
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
STATIC_ASSETS_DIR = Path(APPLICATION_DIR).joinpath("ttnn_visualizer", "static")
|
|
46
55
|
SEND_FILE_MAX_AGE_DEFAULT = 0
|
|
47
56
|
|
|
48
57
|
LAUNCH_BROWSER_ON_START = str_to_bool(os.getenv("LAUNCH_BROWSER_ON_START", "true"))
|
|
@@ -55,21 +64,26 @@ class DefaultConfig(object):
|
|
|
55
64
|
USE_WEBSOCKETS = str_to_bool(os.getenv("USE_WEBSOCKETS", "true"))
|
|
56
65
|
|
|
57
66
|
# SQL Alchemy Settings
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
67
|
+
# Build database path - use absolute path to avoid any ambiguity
|
|
68
|
+
_db_file_path = str(Path(APP_DATA_DIRECTORY) / f"ttnn_{DB_VERSION}.db")
|
|
69
|
+
SQLALCHEMY_DATABASE_URI = f"sqlite:///{_db_file_path}"
|
|
61
70
|
SQLALCHEMY_ENGINE_OPTIONS = {
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
71
|
+
# SQLite-specific settings for multi-process/worker environments
|
|
72
|
+
# NullPool: Each worker gets its own connection, avoiding file locking issues
|
|
73
|
+
# This is critical for gunicorn's multi-worker mode with SQLite
|
|
74
|
+
"poolclass": NullPool,
|
|
75
|
+
"connect_args": {
|
|
76
|
+
"check_same_thread": False, # Allow SQLite to be used across threads in gevent
|
|
77
|
+
},
|
|
65
78
|
}
|
|
66
79
|
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
|
67
80
|
|
|
68
81
|
# Gunicorn settings
|
|
69
82
|
GUNICORN_WORKER_CLASS = os.getenv("GUNICORN_WORKER_CLASS", "gevent")
|
|
70
83
|
GUNICORN_WORKERS = os.getenv("GUNICORN_WORKERS", "1")
|
|
84
|
+
GUNICORN_TIMEOUT = os.getenv("GUNICORN_TIMEOUT", "60")
|
|
71
85
|
PORT = os.getenv("PORT", "8000")
|
|
72
|
-
HOST = os.getenv("HOST", "localhost")
|
|
86
|
+
HOST = os.getenv("HOST", "0.0.0.0" if is_running_in_container() else "localhost")
|
|
73
87
|
DEV_SERVER_PORT = "5173"
|
|
74
88
|
DEV_SERVER_HOST = "localhost"
|
|
75
89
|
|
ttnn_visualizer/ssh_client.py
CHANGED
|
@@ -210,9 +210,7 @@ class SSHClient:
|
|
|
210
210
|
detail=raw_error,
|
|
211
211
|
)
|
|
212
212
|
|
|
213
|
-
def read_file(
|
|
214
|
-
self, remote_path: Union[str, Path], timeout: int = 30
|
|
215
|
-
) -> Optional[bytes]:
|
|
213
|
+
def read_file(self, remote_path: str, timeout: int = 30) -> Optional[bytes]:
|
|
216
214
|
"""
|
|
217
215
|
Read a remote file using SSH cat command.
|
|
218
216
|
|
|
@@ -229,7 +227,6 @@ class SSHClient:
|
|
|
229
227
|
return result.encode("utf-8")
|
|
230
228
|
except SSHException as e:
|
|
231
229
|
if "No such file" in str(e) or "cannot open" in str(e):
|
|
232
|
-
logger.error(f"File not found or cannot be read: {path}")
|
|
233
230
|
return None
|
|
234
231
|
raise
|
|
235
232
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{I as s}from"./index-voJy5fZe.js";import{I as r}from"./index-BZITDwoa.js";import{p as n,I as c}from"./index-DDrUX09k.js";function p(t,a){const o=n(t);return a===c.STANDARD?s[o]:r[o]}export{s as IconSvgPaths16,r as IconSvgPaths20,p as getIconPaths};
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/allPaths-DWjqav_8.js","assets/index-voJy5fZe.js","assets/index-BZITDwoa.js","assets/index-DDrUX09k.js","assets/index-BE2R-cuu.css"])))=>i.map(i=>d[i]);
|
|
2
|
+
import{_ as e}from"./index-DDrUX09k.js";const s=async(t,a)=>{const{getIconPaths:o}=await e(async()=>{const{getIconPaths:r}=await import("./allPaths-DWjqav_8.js");return{getIconPaths:r}},__vite__mapDeps([0,1,2,3,4]));return o(t,a)};export{s as allPathsLoader};
|