ttnn-visualizer 0.52.1__py3-none-any.whl → 0.54.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +2 -0
- ttnn_visualizer/csv_queries.py +102 -54
- ttnn_visualizer/serializers.py +39 -9
- ttnn_visualizer/settings.py +1 -0
- ttnn_visualizer/static/assets/{allPaths-DOFQJGIv.js → allPaths-ClJTDoXf.js} +1 -1
- ttnn_visualizer/static/assets/allPathsLoader-DLMimLnJ.js +2 -0
- ttnn_visualizer/static/assets/index-DdmYzvVg.css +7 -0
- ttnn_visualizer/static/assets/{index-WayH7MCF.js → index-eW1dMfMf.js} +288 -288
- ttnn_visualizer/static/assets/{splitPathsBySizeLoader-BwHoJMaV.js → splitPathsBySizeLoader-DjzfvTVt.js} +1 -1
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/views.py +20 -9
- {ttnn_visualizer-0.52.1.dist-info → ttnn_visualizer-0.54.0.dist-info}/METADATA +2 -2
- {ttnn_visualizer-0.52.1.dist-info → ttnn_visualizer-0.54.0.dist-info}/RECORD +18 -18
- ttnn_visualizer/static/assets/allPathsLoader-Bca8XiL0.js +0 -2
- ttnn_visualizer/static/assets/index-BxeIYL6g.css +0 -7
- {ttnn_visualizer-0.52.1.dist-info → ttnn_visualizer-0.54.0.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.52.1.dist-info → ttnn_visualizer-0.54.0.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.52.1.dist-info → ttnn_visualizer-0.54.0.dist-info}/licenses/LICENSE +0 -0
- {ttnn_visualizer-0.52.1.dist-info → ttnn_visualizer-0.54.0.dist-info}/licenses/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.52.1.dist-info → ttnn_visualizer-0.54.0.dist-info}/top_level.txt +0 -0
ttnn_visualizer/app.py
CHANGED
ttnn_visualizer/csv_queries.py
CHANGED
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import csv
|
|
6
6
|
import json
|
|
7
|
+
import logging
|
|
7
8
|
import os
|
|
8
9
|
import tempfile
|
|
9
10
|
from io import StringIO
|
|
@@ -16,6 +17,8 @@ from tt_perf_report import perf_report
|
|
|
16
17
|
from ttnn_visualizer.exceptions import DataFormatError
|
|
17
18
|
from ttnn_visualizer.models import Instance
|
|
18
19
|
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
19
22
|
|
|
20
23
|
class LocalCSVQueryRunner:
|
|
21
24
|
def __init__(self, file_path: str, offset: int = 0):
|
|
@@ -432,38 +435,50 @@ class OpsPerformanceReportQueries:
|
|
|
432
435
|
}
|
|
433
436
|
|
|
434
437
|
DEFAULT_SIGNPOST = None
|
|
435
|
-
DEFAULT_IGNORE_SIGNPOSTS =
|
|
438
|
+
DEFAULT_IGNORE_SIGNPOSTS = True
|
|
436
439
|
DEFAULT_MIN_PERCENTAGE = 0.5
|
|
437
440
|
DEFAULT_ID_RANGE = None
|
|
438
441
|
DEFAULT_NO_ADVICE = False
|
|
439
442
|
DEFAULT_TRACING_MODE = False
|
|
443
|
+
DEFAULT_RAW_OP_CODES = True
|
|
444
|
+
DEFAULT_NO_HOST_OPS = False
|
|
445
|
+
DEFAULT_NO_STACKED_REPORT = False
|
|
446
|
+
DEFAULT_NO_STACK_BY_IN0 = True
|
|
440
447
|
|
|
441
448
|
@classmethod
|
|
442
|
-
def generate_report(cls, instance):
|
|
449
|
+
def generate_report(cls, instance, **kwargs):
|
|
443
450
|
raw_csv = OpsPerformanceQueries.get_raw_csv(instance)
|
|
444
451
|
csv_file = StringIO(raw_csv)
|
|
445
452
|
csv_output_file = tempfile.mktemp(suffix=".csv")
|
|
446
453
|
csv_stacked_output_file = tempfile.mktemp(suffix=".csv")
|
|
454
|
+
signpost = kwargs.get("signpost", cls.DEFAULT_SIGNPOST)
|
|
455
|
+
ignore_signposts = cls.DEFAULT_IGNORE_SIGNPOSTS
|
|
456
|
+
stack_by_in0 = kwargs.get("stack_by_in0", cls.DEFAULT_NO_STACK_BY_IN0)
|
|
457
|
+
|
|
458
|
+
if signpost:
|
|
459
|
+
ignore_signposts = False
|
|
460
|
+
|
|
447
461
|
# perf_report currently generates a PNG alongside the CSV using the same temp name - we'll just delete it afterwards
|
|
448
462
|
stacked_png_file = os.path.splitext(csv_output_file)[0] + ".png"
|
|
449
463
|
|
|
450
464
|
try:
|
|
451
465
|
perf_report.generate_perf_report(
|
|
452
466
|
csv_file,
|
|
453
|
-
|
|
454
|
-
|
|
467
|
+
signpost,
|
|
468
|
+
ignore_signposts,
|
|
455
469
|
cls.DEFAULT_MIN_PERCENTAGE,
|
|
456
470
|
cls.DEFAULT_ID_RANGE,
|
|
457
471
|
csv_output_file,
|
|
458
472
|
cls.DEFAULT_NO_ADVICE,
|
|
459
473
|
cls.DEFAULT_TRACING_MODE,
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
474
|
+
cls.DEFAULT_RAW_OP_CODES,
|
|
475
|
+
cls.DEFAULT_NO_HOST_OPS,
|
|
476
|
+
cls.DEFAULT_NO_STACKED_REPORT,
|
|
477
|
+
stack_by_in0,
|
|
464
478
|
csv_stacked_output_file,
|
|
465
479
|
)
|
|
466
480
|
except Exception as e:
|
|
481
|
+
logger.error(f"Error generating performance report: {e}")
|
|
467
482
|
raise DataFormatError(f"Error generating performance report: {e}") from e
|
|
468
483
|
|
|
469
484
|
ops_perf_results = []
|
|
@@ -472,56 +487,89 @@ class OpsPerformanceReportQueries:
|
|
|
472
487
|
for row in ops_perf_results_reader:
|
|
473
488
|
ops_perf_results.append(row)
|
|
474
489
|
|
|
490
|
+
# Returns a list of unique signposts in the order they appear
|
|
491
|
+
# TODO: Signpost names are not unique but tt-perf-report treats them as such
|
|
492
|
+
captured_signposts = set()
|
|
493
|
+
signposts = []
|
|
494
|
+
for index, row in enumerate(ops_perf_results):
|
|
495
|
+
if row.get("OP TYPE") == "signpost":
|
|
496
|
+
op_code = row["OP CODE"]
|
|
497
|
+
op_id = index + 2 # Match IDs with row numbers in ops perf results csv
|
|
498
|
+
if not any(s["op_code"] == op_code for s in signposts):
|
|
499
|
+
captured_signposts.add(op_code)
|
|
500
|
+
signposts.append({"id": op_id, "op_code": op_code})
|
|
501
|
+
|
|
475
502
|
report = []
|
|
476
503
|
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
+
if os.path.exists(csv_output_file):
|
|
505
|
+
try:
|
|
506
|
+
with open(csv_output_file, newline="") as csvfile:
|
|
507
|
+
reader = csv.reader(csvfile, delimiter=",")
|
|
508
|
+
next(reader, None)
|
|
509
|
+
for row in reader:
|
|
510
|
+
processed_row = {
|
|
511
|
+
column: row[index]
|
|
512
|
+
for index, column in enumerate(cls.REPORT_COLUMNS)
|
|
513
|
+
if index < len(row)
|
|
514
|
+
}
|
|
515
|
+
if "advice" in processed_row and processed_row["advice"]:
|
|
516
|
+
processed_row["advice"] = processed_row["advice"].split(
|
|
517
|
+
" • "
|
|
518
|
+
)
|
|
519
|
+
else:
|
|
520
|
+
processed_row["advice"] = []
|
|
521
|
+
|
|
522
|
+
# Get the op type from the raw file for this row as it is not returned from tt-perf-report
|
|
523
|
+
op_id = int(row[0])
|
|
524
|
+
raw_idx = op_id - 2
|
|
525
|
+
if 0 <= raw_idx < len(ops_perf_results):
|
|
526
|
+
processed_row["op_type"] = ops_perf_results[
|
|
527
|
+
raw_idx
|
|
528
|
+
].get("OP TYPE")
|
|
529
|
+
else:
|
|
530
|
+
processed_row["op_type"] = None
|
|
531
|
+
|
|
532
|
+
report.append(processed_row)
|
|
533
|
+
except csv.Error as e:
|
|
534
|
+
raise DataFormatError() from e
|
|
535
|
+
finally:
|
|
536
|
+
os.unlink(csv_output_file)
|
|
504
537
|
|
|
505
538
|
stacked_report = []
|
|
506
539
|
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
540
|
+
if os.path.exists(csv_stacked_output_file):
|
|
541
|
+
try:
|
|
542
|
+
with open(csv_stacked_output_file, newline="") as csvfile:
|
|
543
|
+
reader = csv.reader(csvfile, delimiter=",")
|
|
544
|
+
next(reader, None)
|
|
545
|
+
|
|
546
|
+
for row in reader:
|
|
547
|
+
processed_row = {
|
|
548
|
+
column: row[index]
|
|
549
|
+
for index, column in enumerate(cls.STACKED_REPORT_COLUMNS)
|
|
550
|
+
if index < len(row)
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
if "op_code" in processed_row and any(
|
|
554
|
+
processed_row["op_code"] in signpost["op_code"]
|
|
555
|
+
for signpost in signposts
|
|
556
|
+
):
|
|
557
|
+
processed_row["op_type"] = "signpost"
|
|
558
|
+
else:
|
|
559
|
+
processed_row["op_type"] = "unknown"
|
|
560
|
+
|
|
561
|
+
stacked_report.append(processed_row)
|
|
562
|
+
except csv.Error as e:
|
|
563
|
+
raise DataFormatError() from e
|
|
564
|
+
finally:
|
|
565
|
+
os.unlink(csv_stacked_output_file)
|
|
566
|
+
if os.path.exists(stacked_png_file):
|
|
567
|
+
os.unlink(stacked_png_file)
|
|
518
568
|
|
|
519
569
|
stacked_report.append(processed_row)
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
return {"report": report, "stacked_report": stacked_report}
|
|
570
|
+
|
|
571
|
+
return {
|
|
572
|
+
"report": report,
|
|
573
|
+
"stacked_report": stacked_report,
|
|
574
|
+
"signposts": signposts,
|
|
575
|
+
}
|
ttnn_visualizer/serializers.py
CHANGED
|
@@ -190,14 +190,25 @@ def serialize_operation(
|
|
|
190
190
|
|
|
191
191
|
|
|
192
192
|
def serialize_operation_buffers(operation: Operation, operation_buffers):
|
|
193
|
-
buffer_data = [
|
|
194
|
-
for b in
|
|
195
|
-
|
|
196
|
-
|
|
193
|
+
buffer_data = []
|
|
194
|
+
for b in operation_buffers:
|
|
195
|
+
buffer_dict = {
|
|
196
|
+
"device_id": b.device_id,
|
|
197
|
+
"address": b.address,
|
|
198
|
+
"buffer_type": (
|
|
199
|
+
b.buffer_type.value
|
|
200
|
+
if hasattr(b.buffer_type, "value")
|
|
201
|
+
else b.buffer_type
|
|
202
|
+
),
|
|
203
|
+
"buffer_layout": b.buffer_layout,
|
|
204
|
+
"size": b.max_size_per_bank,
|
|
205
|
+
}
|
|
206
|
+
buffer_data.append(buffer_dict)
|
|
207
|
+
|
|
197
208
|
return {
|
|
198
209
|
"id": operation.operation_id,
|
|
199
210
|
"name": operation.name,
|
|
200
|
-
"buffers":
|
|
211
|
+
"buffers": buffer_data,
|
|
201
212
|
}
|
|
202
213
|
|
|
203
214
|
|
|
@@ -206,14 +217,33 @@ def serialize_devices(devices):
|
|
|
206
217
|
|
|
207
218
|
|
|
208
219
|
def serialize_operations_buffers(operations, buffers):
|
|
209
|
-
|
|
220
|
+
# Pre-serialize all buffers once using optimized method with defaultdict
|
|
221
|
+
serialized_buffers = defaultdict(list)
|
|
210
222
|
for b in buffers:
|
|
211
|
-
buffer_dict
|
|
223
|
+
buffer_dict = {
|
|
224
|
+
"device_id": b.device_id,
|
|
225
|
+
"address": b.address,
|
|
226
|
+
"buffer_type": (
|
|
227
|
+
b.buffer_type.value
|
|
228
|
+
if hasattr(b.buffer_type, "value")
|
|
229
|
+
else b.buffer_type
|
|
230
|
+
),
|
|
231
|
+
"buffer_layout": b.buffer_layout,
|
|
232
|
+
"size": b.max_size_per_bank,
|
|
233
|
+
}
|
|
234
|
+
serialized_buffers[b.operation_id].append(buffer_dict)
|
|
212
235
|
|
|
213
236
|
results = []
|
|
214
237
|
for operation in operations:
|
|
215
|
-
operation_buffers =
|
|
216
|
-
results.append(
|
|
238
|
+
operation_buffers = serialized_buffers[operation.operation_id]
|
|
239
|
+
results.append(
|
|
240
|
+
{
|
|
241
|
+
"id": operation.operation_id,
|
|
242
|
+
"name": operation.name,
|
|
243
|
+
"buffers": operation_buffers,
|
|
244
|
+
}
|
|
245
|
+
)
|
|
246
|
+
|
|
217
247
|
return results
|
|
218
248
|
|
|
219
249
|
|
ttnn_visualizer/settings.py
CHANGED
|
@@ -68,6 +68,7 @@ class DefaultConfig(object):
|
|
|
68
68
|
# Gunicorn settings
|
|
69
69
|
GUNICORN_WORKER_CLASS = os.getenv("GUNICORN_WORKER_CLASS", "gevent")
|
|
70
70
|
GUNICORN_WORKERS = os.getenv("GUNICORN_WORKERS", "1")
|
|
71
|
+
GUNICORN_TIMEOUT = os.getenv("GUNICORN_TIMEOUT", "60")
|
|
71
72
|
PORT = os.getenv("PORT", "8000")
|
|
72
73
|
HOST = os.getenv("HOST", "localhost")
|
|
73
74
|
DEV_SERVER_PORT = "5173"
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{I as s}from"./index-CnPrfHYh.js";import{I as r}from"./index-Cnc1EkDo.js";import{p as n,I as c}from"./index-
|
|
1
|
+
import{I as s}from"./index-CnPrfHYh.js";import{I as r}from"./index-Cnc1EkDo.js";import{p as n,I as c}from"./index-eW1dMfMf.js";function p(t,a){const o=n(t);return a===c.STANDARD?s[o]:r[o]}export{s as IconSvgPaths16,r as IconSvgPaths20,p as getIconPaths};
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/allPaths-ClJTDoXf.js","assets/index-CnPrfHYh.js","assets/index-Cnc1EkDo.js","assets/index-eW1dMfMf.js","assets/index-DdmYzvVg.css"])))=>i.map(i=>d[i]);
|
|
2
|
+
import{_ as e}from"./index-eW1dMfMf.js";const s=async(t,a)=>{const{getIconPaths:o}=await e(async()=>{const{getIconPaths:r}=await import("./allPaths-ClJTDoXf.js");return{getIconPaths:r}},__vite__mapDeps([0,1,2,3,4]));return o(t,a)};export{s as allPathsLoader};
|