ttnn-visualizer 0.29.0__py3-none-any.whl → 0.31.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +4 -4
- ttnn_visualizer/csv_queries.py +20 -5
- ttnn_visualizer/decorators.py +0 -7
- ttnn_visualizer/file_uploads.py +5 -7
- ttnn_visualizer/models.py +32 -32
- ttnn_visualizer/queries.py +6 -6
- ttnn_visualizer/sessions.py +78 -69
- ttnn_visualizer/settings.py +4 -1
- ttnn_visualizer/sftp_operations.py +24 -25
- ttnn_visualizer/static/assets/{allPaths-CJHbl9k5.js → allPaths-LtEU5_sy.js} +1 -1
- ttnn_visualizer/static/assets/{allPathsLoader-BMROdgRm.js → allPathsLoader-B4F0G1kZ.js} +2 -2
- ttnn_visualizer/static/assets/{index-DRqEueCH.js → index-CC7TIRx-.js} +249 -249
- ttnn_visualizer/static/assets/{index-CINMcROY.css → index-T38m8LD9.css} +2 -2
- ttnn_visualizer/static/assets/{splitPathsBySizeLoader-Bff1kHt3.js → splitPathsBySizeLoader-gOHSu5Y3.js} +1 -1
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/test_queries.py +4 -4
- ttnn_visualizer/utils.py +17 -29
- ttnn_visualizer/views.py +251 -90
- {ttnn_visualizer-0.29.0.dist-info → ttnn_visualizer-0.31.0.dist-info}/METADATA +4 -4
- {ttnn_visualizer-0.29.0.dist-info → ttnn_visualizer-0.31.0.dist-info}/RECORD +25 -25
- {ttnn_visualizer-0.29.0.dist-info → ttnn_visualizer-0.31.0.dist-info}/LICENSE +0 -0
- {ttnn_visualizer-0.29.0.dist-info → ttnn_visualizer-0.31.0.dist-info}/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.29.0.dist-info → ttnn_visualizer-0.31.0.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.29.0.dist-info → ttnn_visualizer-0.31.0.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.29.0.dist-info → ttnn_visualizer-0.31.0.dist-info}/top_level.txt +0 -0
ttnn_visualizer/views.py
CHANGED
@@ -9,6 +9,7 @@ import time
|
|
9
9
|
from http import HTTPStatus
|
10
10
|
from pathlib import Path
|
11
11
|
from typing import List
|
12
|
+
import shutil
|
12
13
|
|
13
14
|
from flask import Blueprint
|
14
15
|
from flask import request, current_app
|
@@ -19,7 +20,7 @@ from ttnn_visualizer.enums import ConnectionTestStates
|
|
19
20
|
from ttnn_visualizer.exceptions import DataFormatError
|
20
21
|
from ttnn_visualizer.exceptions import RemoteConnectionException
|
21
22
|
from ttnn_visualizer.file_uploads import (
|
22
|
-
|
23
|
+
extract_profiler_name,
|
23
24
|
extract_npe_name,
|
24
25
|
save_uploaded_files,
|
25
26
|
validate_files,
|
@@ -45,13 +46,13 @@ from ttnn_visualizer.sessions import (
|
|
45
46
|
update_instance,
|
46
47
|
)
|
47
48
|
from ttnn_visualizer.sftp_operations import (
|
48
|
-
|
49
|
+
sync_remote_profiler_folders,
|
49
50
|
read_remote_file,
|
50
51
|
check_remote_path_for_reports,
|
51
|
-
get_remote_report_folders,
|
52
|
-
check_remote_path_exists,
|
53
52
|
get_remote_profiler_folders,
|
54
|
-
|
53
|
+
check_remote_path_exists,
|
54
|
+
get_remote_performance_folders,
|
55
|
+
sync_remote_performance_folders,
|
55
56
|
get_cluster_desc,
|
56
57
|
)
|
57
58
|
from ttnn_visualizer.ssh_client import get_client
|
@@ -168,12 +169,7 @@ def operation_detail(operation_id, session):
|
|
168
169
|
)
|
169
170
|
|
170
171
|
|
171
|
-
@api.route(
|
172
|
-
"operation-history",
|
173
|
-
methods=[
|
174
|
-
"GET",
|
175
|
-
],
|
176
|
-
)
|
172
|
+
@api.route("operation-history", methods=["GET"])
|
177
173
|
@with_session
|
178
174
|
@timer
|
179
175
|
def operation_history(session: Instance):
|
@@ -192,7 +188,7 @@ def operation_history(session: Instance):
|
|
192
188
|
return json.loads(operation_history)
|
193
189
|
else:
|
194
190
|
operation_history_file = (
|
195
|
-
Path(str(session.
|
191
|
+
Path(str(session.profiler_path)).parent / operation_history_filename
|
196
192
|
)
|
197
193
|
if not operation_history_file.exists():
|
198
194
|
return []
|
@@ -205,17 +201,17 @@ def operation_history(session: Instance):
|
|
205
201
|
@timer
|
206
202
|
def get_config(session: Instance):
|
207
203
|
if session.remote_connection and session.remote_connection.useRemoteQuerying:
|
208
|
-
if not session.
|
204
|
+
if not session.remote_profiler_folder:
|
209
205
|
return {}
|
210
206
|
config = read_remote_file(
|
211
207
|
remote_connection=session.remote_connection,
|
212
|
-
remote_path=Path(session.
|
208
|
+
remote_path=Path(session.remote_profiler_folder.remotePath, "config.json"),
|
213
209
|
)
|
214
210
|
if not config:
|
215
211
|
return {}
|
216
212
|
return config
|
217
213
|
else:
|
218
|
-
config_file = Path(str(session.
|
214
|
+
config_file = Path(str(session.profiler_path)).parent.joinpath("config.json")
|
219
215
|
if not config_file.exists():
|
220
216
|
return {}
|
221
217
|
with open(config_file, "r") as file:
|
@@ -375,20 +371,124 @@ def get_operation_buffers(operation_id, session: Instance):
|
|
375
371
|
return serialize_operation_buffers(operation, buffers)
|
376
372
|
|
377
373
|
|
378
|
-
@api.route("/profiler
|
374
|
+
@api.route("/profiler", methods=["GET"])
|
375
|
+
@with_session
|
376
|
+
def get_profiler_data_list(session: Instance):
|
377
|
+
# Doesn't handle remote at the moment
|
378
|
+
# is_remote = True if session.remote_connection else False
|
379
|
+
# config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
380
|
+
config_key = 'LOCAL_DATA_DIRECTORY'
|
381
|
+
data_directory = Path(current_app.config[config_key])
|
382
|
+
|
383
|
+
# if is_remote:
|
384
|
+
# connection = RemoteConnection.model_validate(session.remote_connection, strict=False)
|
385
|
+
# path = data_directory / connection.host / current_app.config["PROFILER_DIRECTORY_NAME"]
|
386
|
+
# else:
|
387
|
+
path = data_directory / current_app.config["PROFILER_DIRECTORY_NAME"]
|
388
|
+
|
389
|
+
if not path.exists():
|
390
|
+
path.mkdir(parents=True, exist_ok=True)
|
391
|
+
|
392
|
+
directory_names = [directory.name for directory in path.iterdir() if directory.is_dir()]
|
393
|
+
|
394
|
+
valid_dirs = []
|
395
|
+
|
396
|
+
for dir_name in directory_names:
|
397
|
+
dir_path = Path(path) / dir_name
|
398
|
+
files = list(dir_path.glob("**/*"))
|
399
|
+
|
400
|
+
# Would like to use the existing validate_files function but there's a type difference I'm not sure how to handle
|
401
|
+
if not any(file.name == "db.sqlite" for file in files):
|
402
|
+
continue
|
403
|
+
if not any(file.name == "config.json" for file in files):
|
404
|
+
continue
|
405
|
+
|
406
|
+
valid_dirs.append(dir_name)
|
407
|
+
|
408
|
+
return jsonify(valid_dirs)
|
409
|
+
|
410
|
+
|
411
|
+
@api.route("/profiler/<profiler_name>", methods=["DELETE"])
|
379
412
|
@with_session
|
380
|
-
def
|
381
|
-
|
413
|
+
def delete_profiler_report(profiler_name, session: Instance):
|
414
|
+
is_remote = bool(session.remote_connection)
|
415
|
+
config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
416
|
+
data_directory = Path(current_app.config[config_key])
|
417
|
+
|
418
|
+
if not profiler_name:
|
419
|
+
return Response(status=HTTPStatus.BAD_REQUEST, response="Report name is required.")
|
420
|
+
|
421
|
+
if is_remote:
|
422
|
+
connection = RemoteConnection.model_validate(session.remote_connection, strict=False)
|
423
|
+
path = data_directory / connection.host / current_app.config["PROFILER_DIRECTORY_NAME"]
|
424
|
+
else:
|
425
|
+
path = data_directory / current_app.config["PROFILER_DIRECTORY_NAME"] / profiler_name
|
426
|
+
|
427
|
+
if session.active_report.profiler_name == profiler_name:
|
428
|
+
instance_id = request.args.get("instanceId")
|
429
|
+
update_instance(instance_id=instance_id,profiler_name="")
|
430
|
+
|
431
|
+
if path.exists() and path.is_dir():
|
432
|
+
shutil.rmtree(path)
|
433
|
+
else:
|
434
|
+
return Response(status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}")
|
435
|
+
|
436
|
+
return Response(status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}")
|
437
|
+
|
438
|
+
|
439
|
+
|
440
|
+
@api.route("/performance", methods=["GET"])
|
441
|
+
@with_session
|
442
|
+
def get_performance_data_list(session: Instance):
|
443
|
+
is_remote = True if session.remote_connection else False
|
444
|
+
config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
445
|
+
config_key = 'LOCAL_DATA_DIRECTORY'
|
446
|
+
data_directory = Path(current_app.config[config_key])
|
447
|
+
|
448
|
+
if is_remote:
|
449
|
+
connection = RemoteConnection.model_validate(session.remote_connection, strict=False)
|
450
|
+
path = data_directory / connection.host / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
451
|
+
else:
|
452
|
+
path = data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
453
|
+
|
454
|
+
if not path.exists():
|
455
|
+
path.mkdir(parents=True, exist_ok=True)
|
456
|
+
|
457
|
+
directory_names = [directory.name for directory in path.iterdir() if directory.is_dir()]
|
458
|
+
|
459
|
+
valid_dirs = []
|
460
|
+
|
461
|
+
for dir_name in directory_names:
|
462
|
+
dir_path = Path(path) / dir_name
|
463
|
+
files = list(dir_path.glob("**/*"))
|
464
|
+
|
465
|
+
# Would like to use the existing validate_files function but there's a type difference I'm not sure how to handle
|
466
|
+
if not any(file.name == "profile_log_device.csv" for file in files):
|
467
|
+
continue
|
468
|
+
if not any(file.name == "tracy_profile_log_host.tracy" for file in files):
|
469
|
+
continue
|
470
|
+
if not any(file.name.startswith("ops_perf_results") for file in files):
|
471
|
+
continue
|
472
|
+
|
473
|
+
valid_dirs.append(dir_name)
|
474
|
+
|
475
|
+
return jsonify(valid_dirs)
|
476
|
+
|
477
|
+
|
478
|
+
@api.route("/performance/device-log", methods=["GET"])
|
479
|
+
@with_session
|
480
|
+
def get_performance_data(session: Instance):
|
481
|
+
if not session.performance_path:
|
382
482
|
return Response(status=HTTPStatus.NOT_FOUND)
|
383
483
|
with DeviceLogProfilerQueries(session) as csv:
|
384
484
|
result = csv.get_all_entries(as_dict=True, limit=100)
|
385
485
|
return jsonify(result)
|
386
486
|
|
387
487
|
|
388
|
-
@api.route("/
|
488
|
+
@api.route("/performance/perf-results", methods=["GET"])
|
389
489
|
@with_session
|
390
490
|
def get_profiler_performance_data(session: Instance):
|
391
|
-
if not session.
|
491
|
+
if not session.performance_path:
|
392
492
|
return Response(status=HTTPStatus.NOT_FOUND)
|
393
493
|
with OpsPerformanceQueries(session) as csv:
|
394
494
|
# result = csv.query_by_op_code(op_code="(torch) contiguous", as_dict=True)
|
@@ -396,10 +496,38 @@ def get_profiler_performance_data(session: Instance):
|
|
396
496
|
return jsonify(result)
|
397
497
|
|
398
498
|
|
399
|
-
@api.route("/
|
499
|
+
@api.route("/performance/<performance_name>", methods=["DELETE"])
|
500
|
+
@with_session
|
501
|
+
def delete_performance_report(performance_name, session: Instance):
|
502
|
+
is_remote = bool(session.remote_connection)
|
503
|
+
config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
504
|
+
data_directory = Path(current_app.config[config_key])
|
505
|
+
|
506
|
+
if not performance_name:
|
507
|
+
return Response(status=HTTPStatus.BAD_REQUEST, response="Report name is required.")
|
508
|
+
|
509
|
+
if is_remote:
|
510
|
+
connection = RemoteConnection.model_validate(session.remote_connection, strict=False)
|
511
|
+
path = data_directory / connection.host / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
512
|
+
else:
|
513
|
+
path = data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"] / performance_name
|
514
|
+
|
515
|
+
if session.active_report.performance_name == performance_name:
|
516
|
+
instance_id = request.args.get("instanceId")
|
517
|
+
update_instance(instance_id=instance_id,performance_name="")
|
518
|
+
|
519
|
+
if path.exists() and path.is_dir():
|
520
|
+
shutil.rmtree(path)
|
521
|
+
else:
|
522
|
+
return Response(status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}")
|
523
|
+
|
524
|
+
return Response(status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}")
|
525
|
+
|
526
|
+
|
527
|
+
@api.route("/performance/perf-results/raw", methods=["GET"])
|
400
528
|
@with_session
|
401
|
-
def
|
402
|
-
if not session.
|
529
|
+
def get_performance_results_data_raw(session: Instance):
|
530
|
+
if not session.performance_path:
|
403
531
|
return Response(status=HTTPStatus.NOT_FOUND)
|
404
532
|
content = OpsPerformanceQueries.get_raw_csv(session)
|
405
533
|
return Response(
|
@@ -409,12 +537,19 @@ def get_profiler_perf_results_data_raw(session: Instance):
|
|
409
537
|
)
|
410
538
|
|
411
539
|
|
412
|
-
@api.route("/
|
540
|
+
@api.route("/performance/perf-results/report", methods=["GET"])
|
413
541
|
@with_session
|
414
|
-
def
|
415
|
-
if not session.
|
542
|
+
def get_performance_results_report(session: Instance):
|
543
|
+
if not session.performance_path:
|
416
544
|
return Response(status=HTTPStatus.NOT_FOUND)
|
417
545
|
|
546
|
+
name = request.args.get("name", None)
|
547
|
+
performance_path = Path(session.performance_path)
|
548
|
+
if name:
|
549
|
+
performance_path = performance_path.parent / name
|
550
|
+
session.performance_path = str(performance_path)
|
551
|
+
logger.info(f"************ Profiler path set to {session.performance_path}")
|
552
|
+
|
418
553
|
try:
|
419
554
|
report = OpsPerformanceReportQueries.generate_report(session)
|
420
555
|
except DataFormatError:
|
@@ -423,10 +558,10 @@ def get_profiler_perf_results_report(session: Instance):
|
|
423
558
|
return jsonify(report), 200
|
424
559
|
|
425
560
|
|
426
|
-
@api.route("/
|
561
|
+
@api.route("/performance/device-log/raw", methods=["GET"])
|
427
562
|
@with_session
|
428
|
-
def
|
429
|
-
if not session.
|
563
|
+
def get_performance_data_raw(session: Instance):
|
564
|
+
if not session.performance_path:
|
430
565
|
return Response(status=HTTPStatus.NOT_FOUND)
|
431
566
|
content = DeviceLogProfilerQueries.get_raw_csv(session)
|
432
567
|
return Response(
|
@@ -436,10 +571,10 @@ def get_profiler_data_raw(session: Instance):
|
|
436
571
|
)
|
437
572
|
|
438
573
|
|
439
|
-
@api.route("/
|
574
|
+
@api.route("/performance/device-log/zone/<zone>", methods=["GET"])
|
440
575
|
@with_session
|
441
576
|
def get_zone_statistics(zone, session: Instance):
|
442
|
-
if not session.
|
577
|
+
if not session.performance_path:
|
443
578
|
return Response(status=HTTPStatus.NOT_FOUND)
|
444
579
|
with DeviceLogProfilerQueries(session) as csv:
|
445
580
|
result = csv.query_zone_statistics(zone_name=zone, as_dict=True)
|
@@ -454,10 +589,10 @@ def get_devices(session: Instance):
|
|
454
589
|
return serialize_devices(devices)
|
455
590
|
|
456
591
|
|
457
|
-
@api.route("/local/upload/
|
458
|
-
def
|
592
|
+
@api.route("/local/upload/profiler", methods=["POST"])
|
593
|
+
def create_profiler_files():
|
459
594
|
files = request.files.getlist("files")
|
460
|
-
|
595
|
+
profiler_directory = current_app.config["LOCAL_DATA_DIRECTORY"] / current_app.config["PROFILER_DIRECTORY_NAME"]
|
461
596
|
|
462
597
|
if not validate_files(files, {"db.sqlite", "config.json"}):
|
463
598
|
return StatusMessage(
|
@@ -465,23 +600,26 @@ def create_report_files():
|
|
465
600
|
message="Invalid project directory.",
|
466
601
|
).model_dump()
|
467
602
|
|
468
|
-
|
469
|
-
|
603
|
+
if not profiler_directory.exists():
|
604
|
+
profiler_directory.mkdir(parents=True, exist_ok=True)
|
605
|
+
|
606
|
+
profiler_name = extract_profiler_name(files)
|
607
|
+
logger.info(f"Writing report files to {profiler_directory}/{profiler_name}")
|
470
608
|
|
471
|
-
save_uploaded_files(files,
|
609
|
+
save_uploaded_files(files, profiler_directory, profiler_name)
|
472
610
|
|
473
611
|
instance_id = request.args.get("instanceId")
|
474
|
-
update_instance(instance_id=instance_id,
|
612
|
+
update_instance(instance_id=instance_id, profiler_name=profiler_name, clear_remote=True)
|
475
613
|
|
476
614
|
return StatusMessage(
|
477
615
|
status=ConnectionTestStates.OK, message="Success."
|
478
616
|
).model_dump()
|
479
617
|
|
480
618
|
|
481
|
-
@api.route("/local/upload/
|
619
|
+
@api.route("/local/upload/performance", methods=["POST"])
|
482
620
|
def create_profile_files():
|
483
621
|
files = request.files.getlist("files")
|
484
|
-
|
622
|
+
data_directory = Path(current_app.config["LOCAL_DATA_DIRECTORY"])
|
485
623
|
instance_id = request.args.get("instanceId")
|
486
624
|
|
487
625
|
if not validate_files(
|
@@ -494,10 +632,10 @@ def create_profile_files():
|
|
494
632
|
message="Invalid project directory.",
|
495
633
|
).model_dump()
|
496
634
|
|
497
|
-
logger.info(f"Writing profile files to {
|
635
|
+
logger.info(f"Writing profile files to {data_directory} / {current_app.config['PERFORMANCE_DIRECTORY_NAME']}")
|
498
636
|
|
499
|
-
# Construct the base directory with
|
500
|
-
target_directory =
|
637
|
+
# Construct the base directory with profiler_name first
|
638
|
+
target_directory = data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
501
639
|
target_directory.mkdir(parents=True, exist_ok=True)
|
502
640
|
|
503
641
|
if files:
|
@@ -516,11 +654,11 @@ def create_profile_files():
|
|
516
654
|
|
517
655
|
save_uploaded_files(
|
518
656
|
updated_files,
|
519
|
-
str(
|
657
|
+
str(data_directory),
|
520
658
|
)
|
521
659
|
|
522
660
|
update_instance(
|
523
|
-
instance_id=instance_id,
|
661
|
+
instance_id=instance_id, performance_name=profiler_folder_name, clear_remote=True
|
524
662
|
)
|
525
663
|
|
526
664
|
return StatusMessage(
|
@@ -531,7 +669,7 @@ def create_profile_files():
|
|
531
669
|
@api.route("/local/upload/npe", methods=["POST"])
|
532
670
|
def create_npe_files():
|
533
671
|
files = request.files.getlist("files")
|
534
|
-
|
672
|
+
data_directory = current_app.config["LOCAL_DATA_DIRECTORY"]
|
535
673
|
|
536
674
|
for file in files:
|
537
675
|
if not file.filename.endswith(".json"):
|
@@ -541,7 +679,7 @@ def create_npe_files():
|
|
541
679
|
).model_dump()
|
542
680
|
|
543
681
|
npe_name = extract_npe_name(files)
|
544
|
-
target_directory =
|
682
|
+
target_directory = data_directory / current_app.config["NPE_DIRECTORY_NAME"]
|
545
683
|
target_directory.mkdir(parents=True, exist_ok=True)
|
546
684
|
|
547
685
|
save_uploaded_files(files, target_directory, npe_name)
|
@@ -554,22 +692,18 @@ def create_npe_files():
|
|
554
692
|
).model_dump()
|
555
693
|
|
556
694
|
|
557
|
-
@api.route("/remote/
|
558
|
-
def
|
695
|
+
@api.route("/remote/profiler", methods=["POST"])
|
696
|
+
def get_remote_folders_profiler():
|
559
697
|
connection = RemoteConnection.model_validate(request.json, strict=False)
|
560
698
|
try:
|
561
|
-
remote_folders: List[RemoteReportFolder] =
|
699
|
+
remote_folders: List[RemoteReportFolder] = get_remote_profiler_folders(
|
562
700
|
RemoteConnection.model_validate(connection, strict=False)
|
563
701
|
)
|
564
702
|
|
565
703
|
for rf in remote_folders:
|
566
704
|
directory_name = Path(rf.remotePath).name
|
567
705
|
remote_data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
568
|
-
local_path =
|
569
|
-
Path(remote_data_directory)
|
570
|
-
.joinpath(connection.host)
|
571
|
-
.joinpath(directory_name)
|
572
|
-
)
|
706
|
+
local_path = remote_data_directory / current_app.config["PROFILER_DIRECTORY_NAME"] / connection.host / directory_name
|
573
707
|
logger.info(f"Checking last synced for {directory_name}")
|
574
708
|
rf.lastSynced = read_last_synced_file(str(local_path))
|
575
709
|
if not rf.lastSynced:
|
@@ -580,33 +714,28 @@ def get_remote_folders():
|
|
580
714
|
return Response(status=e.http_status, response=e.message)
|
581
715
|
|
582
716
|
|
583
|
-
@api.route("/remote/
|
584
|
-
def
|
717
|
+
@api.route("/remote/performance", methods=["POST"])
|
718
|
+
def get_remote_folders_performance():
|
585
719
|
request_body = request.get_json()
|
586
720
|
connection = RemoteConnection.model_validate(
|
587
721
|
request_body.get("connection"), strict=False
|
588
722
|
)
|
589
723
|
|
590
724
|
try:
|
591
|
-
|
725
|
+
remote_performance_folders: List[RemoteReportFolder] = get_remote_performance_folders(
|
592
726
|
RemoteConnection.model_validate(connection, strict=False)
|
593
727
|
)
|
594
728
|
|
595
|
-
for rf in
|
596
|
-
|
729
|
+
for rf in remote_performance_folders:
|
730
|
+
performance_name = Path(rf.remotePath).name
|
597
731
|
remote_data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
598
|
-
local_path =
|
599
|
-
|
600
|
-
.joinpath(connection.host)
|
601
|
-
.joinpath("profiler")
|
602
|
-
.joinpath(profile_name)
|
603
|
-
)
|
604
|
-
logger.info(f"Checking last synced for {profile_name}")
|
732
|
+
local_path = remote_data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"] / connection.host / performance_name
|
733
|
+
logger.info(f"Checking last synced for {performance_name}")
|
605
734
|
rf.lastSynced = read_last_synced_file(str(local_path))
|
606
735
|
if not rf.lastSynced:
|
607
|
-
logger.info(f"{
|
736
|
+
logger.info(f"{performance_name} not yet synced")
|
608
737
|
|
609
|
-
return [r.model_dump() for r in
|
738
|
+
return [r.model_dump() for r in remote_performance_folders]
|
610
739
|
except RemoteConnectionException as e:
|
611
740
|
return Response(status=e.http_status, response=e.message)
|
612
741
|
|
@@ -674,8 +803,8 @@ def test_remote_folder():
|
|
674
803
|
# Test Directory Configuration
|
675
804
|
if not has_failures():
|
676
805
|
try:
|
677
|
-
check_remote_path_exists(connection, "
|
678
|
-
add_status(ConnectionTestStates.OK.value, "
|
806
|
+
check_remote_path_exists(connection, "profilerPath")
|
807
|
+
add_status(ConnectionTestStates.OK.value, "Memory folder path exists")
|
679
808
|
except RemoteConnectionException as e:
|
680
809
|
add_status(ConnectionTestStates.FAILED.value, e.message)
|
681
810
|
|
@@ -737,7 +866,7 @@ def sync_remote_folder():
|
|
737
866
|
if profile:
|
738
867
|
profile_folder = RemoteReportFolder.model_validate(profile, strict=False)
|
739
868
|
try:
|
740
|
-
|
869
|
+
sync_remote_performance_folders(
|
741
870
|
connection,
|
742
871
|
remote_dir,
|
743
872
|
profile=profile_folder,
|
@@ -753,19 +882,19 @@ def sync_remote_folder():
|
|
753
882
|
return Response(status=e.http_status, response=e.message)
|
754
883
|
|
755
884
|
try:
|
756
|
-
|
885
|
+
remote_profiler_folder = RemoteReportFolder.model_validate(folder, strict=False)
|
757
886
|
|
758
|
-
|
887
|
+
sync_remote_profiler_folders(
|
759
888
|
connection,
|
760
|
-
|
889
|
+
remote_profiler_folder.remotePath,
|
761
890
|
remote_dir,
|
762
891
|
exclude_patterns=[r"/tensors(/|$)"],
|
763
892
|
sid=instance_id,
|
764
893
|
)
|
765
894
|
|
766
|
-
|
895
|
+
remote_profiler_folder.lastSynced = int(time.time())
|
767
896
|
|
768
|
-
return
|
897
|
+
return remote_profiler_folder.model_dump()
|
769
898
|
|
770
899
|
except RemoteConnectionException as e:
|
771
900
|
return Response(status=e.http_status, response=e.message)
|
@@ -808,15 +937,15 @@ def use_remote_folder():
|
|
808
937
|
|
809
938
|
connection = RemoteConnection.model_validate(connection, strict=False)
|
810
939
|
folder = RemoteReportFolder.model_validate(folder, strict=False)
|
811
|
-
|
812
|
-
|
940
|
+
performance_name = None
|
941
|
+
remote_performance_folder = None
|
813
942
|
if profile:
|
814
|
-
|
815
|
-
|
816
|
-
|
817
|
-
|
943
|
+
remote_performance_folder = RemoteReportFolder.model_validate(profile, strict=False)
|
944
|
+
performance_name = remote_performance_folder.testName
|
945
|
+
data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
946
|
+
profiler_name = Path(folder.remotePath).name
|
818
947
|
|
819
|
-
connection_directory = Path(
|
948
|
+
connection_directory = Path(data_directory, connection.host, current_app.config["PROFILER_DIRECTORY_NAME"], profiler_name)
|
820
949
|
|
821
950
|
if not connection.useRemoteQuerying and not connection_directory.exists():
|
822
951
|
return Response(
|
@@ -824,18 +953,18 @@ def use_remote_folder():
|
|
824
953
|
response=f"{connection_directory} does not exist.",
|
825
954
|
)
|
826
955
|
|
827
|
-
remote_path = f"{Path(
|
956
|
+
remote_path = f"{Path(data_directory).name}/{connection.host}/{connection_directory.name}"
|
828
957
|
|
829
958
|
instance_id = request.args.get("instanceId")
|
830
|
-
current_app.logger.info(f"Setting active
|
959
|
+
current_app.logger.info(f"Setting active reports for {instance_id} - {remote_path}")
|
831
960
|
|
832
961
|
update_instance(
|
833
962
|
instance_id=instance_id,
|
834
|
-
|
835
|
-
|
963
|
+
profiler_name=profiler_name,
|
964
|
+
performance_name=performance_name,
|
836
965
|
remote_connection=connection,
|
837
|
-
|
838
|
-
|
966
|
+
remote_profiler_folder=folder,
|
967
|
+
remote_performance_folder=remote_performance_folder,
|
839
968
|
)
|
840
969
|
|
841
970
|
return Response(status=HTTPStatus.OK)
|
@@ -853,6 +982,35 @@ def get_instance(session: Instance):
|
|
853
982
|
return session.model_dump()
|
854
983
|
|
855
984
|
|
985
|
+
@api.route("/session", methods=["PUT"])
|
986
|
+
def update_current_instance():
|
987
|
+
try:
|
988
|
+
update_data = request.get_json()
|
989
|
+
|
990
|
+
if not update_data:
|
991
|
+
return Response(status=HTTPStatus.BAD_REQUEST, response="No data provided.")
|
992
|
+
|
993
|
+
update_instance(
|
994
|
+
instance_id=update_data.get("instance_id"),
|
995
|
+
profiler_name=update_data["active_report"].get("profiler_name"),
|
996
|
+
performance_name=update_data["active_report"].get("performance_name"),
|
997
|
+
npe_name=update_data["active_report"].get("npe_name"),
|
998
|
+
# Doesn't handle remote at the moment
|
999
|
+
remote_connection=None,
|
1000
|
+
remote_profiler_folder=None,
|
1001
|
+
remote_performance_folder=None,
|
1002
|
+
)
|
1003
|
+
|
1004
|
+
return Response(status=HTTPStatus.OK)
|
1005
|
+
except Exception as e:
|
1006
|
+
logger.error(f"Error updating session: {str(e)}")
|
1007
|
+
|
1008
|
+
return Response(
|
1009
|
+
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
1010
|
+
response="An error occurred while updating the session.",
|
1011
|
+
)
|
1012
|
+
|
1013
|
+
|
856
1014
|
@api.route("/npe", methods=["GET"])
|
857
1015
|
@with_session
|
858
1016
|
@timer
|
@@ -862,9 +1020,12 @@ def get_npe_data(session: Instance):
|
|
862
1020
|
return Response(status=HTTPStatus.NOT_FOUND)
|
863
1021
|
|
864
1022
|
npe_file = Path(f"{session.npe_path}/{session.active_report.npe_name}.json")
|
1023
|
+
|
865
1024
|
if not npe_file.exists():
|
866
1025
|
logger.error(f"NPE file does not exist: {npe_file}")
|
867
1026
|
return Response(status=HTTPStatus.NOT_FOUND)
|
1027
|
+
|
868
1028
|
with open(npe_file, "r") as file:
|
869
1029
|
npe_data = json.load(file)
|
1030
|
+
|
870
1031
|
return jsonify(npe_data)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ttnn_visualizer
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.31.0
|
4
4
|
Summary: TT-NN Visualizer
|
5
5
|
Classifier: Programming Language :: Python :: 3
|
6
6
|
Classifier: License :: OSI Approved :: MIT License
|
@@ -123,13 +123,13 @@ You may test the application using the following sample reports.
|
|
123
123
|
Unzip the files into their own directories and select them with the local folder selector, or load the NPE data on the `/npe` route.
|
124
124
|
|
125
125
|
**Segformer encoder**
|
126
|
-
[report](https://github.com/user-attachments/files/17996493/segformer_encoder.zip)
|
126
|
+
[memory report](https://github.com/user-attachments/files/17996493/segformer_encoder.zip)
|
127
127
|
|
128
128
|
**Segformer decoder**
|
129
|
-
[report](https://github.com/user-attachments/files/17996491/segformer_decoder_good.zip)
|
129
|
+
[memory report](https://github.com/user-attachments/files/17996491/segformer_decoder_good.zip)
|
130
130
|
|
131
131
|
**Llama mlp**
|
132
|
-
[
|
132
|
+
[memory + performance report](https://github.com/user-attachments/files/18770763/llama_attn_32l_10iter_30jan.zip)
|
133
133
|
|
134
134
|
### NPE report
|
135
135
|
|