ttnn-visualizer 0.28.1__py3-none-any.whl → 0.30.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +4 -4
- ttnn_visualizer/csv_queries.py +20 -5
- ttnn_visualizer/decorators.py +0 -7
- ttnn_visualizer/file_uploads.py +5 -7
- ttnn_visualizer/models.py +32 -32
- ttnn_visualizer/queries.py +6 -6
- ttnn_visualizer/sessions.py +78 -69
- ttnn_visualizer/settings.py +4 -1
- ttnn_visualizer/sftp_operations.py +24 -25
- ttnn_visualizer/static/assets/{allPaths-CVqIgTho.js → allPaths-BFtaymsj.js} +1 -1
- ttnn_visualizer/static/assets/{allPathsLoader-C-VIuErI.js → allPathsLoader-krdfUMe8.js} +2 -2
- ttnn_visualizer/static/assets/{index-DWVfq2z5.css → index-Bq0nVwFb.css} +2 -2
- ttnn_visualizer/static/assets/{index-DBCJOx5j.js → index-BsP_KNDi.js} +255 -255
- ttnn_visualizer/static/assets/{splitPathsBySizeLoader-FZqEFRrU.js → splitPathsBySizeLoader-Exj9Tfv3.js} +1 -1
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/test_queries.py +4 -4
- ttnn_visualizer/utils.py +30 -27
- ttnn_visualizer/views.py +280 -106
- {ttnn_visualizer-0.28.1.dist-info → ttnn_visualizer-0.30.0.dist-info}/METADATA +4 -4
- {ttnn_visualizer-0.28.1.dist-info → ttnn_visualizer-0.30.0.dist-info}/RECORD +25 -25
- {ttnn_visualizer-0.28.1.dist-info → ttnn_visualizer-0.30.0.dist-info}/LICENSE +0 -0
- {ttnn_visualizer-0.28.1.dist-info → ttnn_visualizer-0.30.0.dist-info}/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.28.1.dist-info → ttnn_visualizer-0.30.0.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.28.1.dist-info → ttnn_visualizer-0.30.0.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.28.1.dist-info → ttnn_visualizer-0.30.0.dist-info}/top_level.txt +0 -0
ttnn_visualizer/views.py
CHANGED
@@ -9,6 +9,7 @@ import time
|
|
9
9
|
from http import HTTPStatus
|
10
10
|
from pathlib import Path
|
11
11
|
from typing import List
|
12
|
+
import shutil
|
12
13
|
|
13
14
|
from flask import Blueprint
|
14
15
|
from flask import request, current_app
|
@@ -19,7 +20,7 @@ from ttnn_visualizer.enums import ConnectionTestStates
|
|
19
20
|
from ttnn_visualizer.exceptions import DataFormatError
|
20
21
|
from ttnn_visualizer.exceptions import RemoteConnectionException
|
21
22
|
from ttnn_visualizer.file_uploads import (
|
22
|
-
|
23
|
+
extract_profiler_name,
|
23
24
|
extract_npe_name,
|
24
25
|
save_uploaded_files,
|
25
26
|
validate_files,
|
@@ -45,17 +46,18 @@ from ttnn_visualizer.sessions import (
|
|
45
46
|
update_instance,
|
46
47
|
)
|
47
48
|
from ttnn_visualizer.sftp_operations import (
|
48
|
-
|
49
|
+
sync_remote_profiler_folders,
|
49
50
|
read_remote_file,
|
50
51
|
check_remote_path_for_reports,
|
51
|
-
get_remote_report_folders,
|
52
|
-
check_remote_path_exists,
|
53
52
|
get_remote_profiler_folders,
|
54
|
-
|
53
|
+
check_remote_path_exists,
|
54
|
+
get_remote_performance_folders,
|
55
|
+
sync_remote_performance_folders,
|
55
56
|
get_cluster_desc,
|
56
57
|
)
|
57
58
|
from ttnn_visualizer.ssh_client import get_client
|
58
59
|
from ttnn_visualizer.utils import (
|
60
|
+
get_cluster_descriptor_path,
|
59
61
|
read_last_synced_file,
|
60
62
|
timer,
|
61
63
|
)
|
@@ -167,12 +169,7 @@ def operation_detail(operation_id, session):
|
|
167
169
|
)
|
168
170
|
|
169
171
|
|
170
|
-
@api.route(
|
171
|
-
"operation-history",
|
172
|
-
methods=[
|
173
|
-
"GET",
|
174
|
-
],
|
175
|
-
)
|
172
|
+
@api.route("operation-history", methods=["GET"])
|
176
173
|
@with_session
|
177
174
|
@timer
|
178
175
|
def operation_history(session: Instance):
|
@@ -191,7 +188,7 @@ def operation_history(session: Instance):
|
|
191
188
|
return json.loads(operation_history)
|
192
189
|
else:
|
193
190
|
operation_history_file = (
|
194
|
-
Path(str(session.
|
191
|
+
Path(str(session.profiler_path)).parent / operation_history_filename
|
195
192
|
)
|
196
193
|
if not operation_history_file.exists():
|
197
194
|
return []
|
@@ -204,17 +201,17 @@ def operation_history(session: Instance):
|
|
204
201
|
@timer
|
205
202
|
def get_config(session: Instance):
|
206
203
|
if session.remote_connection and session.remote_connection.useRemoteQuerying:
|
207
|
-
if not session.
|
204
|
+
if not session.remote_profiler_folder:
|
208
205
|
return {}
|
209
206
|
config = read_remote_file(
|
210
207
|
remote_connection=session.remote_connection,
|
211
|
-
remote_path=Path(session.
|
208
|
+
remote_path=Path(session.remote_profiler_folder.remotePath, "config.json"),
|
212
209
|
)
|
213
210
|
if not config:
|
214
211
|
return {}
|
215
212
|
return config
|
216
213
|
else:
|
217
|
-
config_file = Path(str(session.
|
214
|
+
config_file = Path(str(session.profiler_path)).parent.joinpath("config.json")
|
218
215
|
if not config_file.exists():
|
219
216
|
return {}
|
220
217
|
with open(config_file, "r") as file:
|
@@ -374,20 +371,85 @@ def get_operation_buffers(operation_id, session: Instance):
|
|
374
371
|
return serialize_operation_buffers(operation, buffers)
|
375
372
|
|
376
373
|
|
377
|
-
@api.route("/profiler
|
374
|
+
@api.route("/profiler", methods=["GET"])
|
375
|
+
@with_session
|
376
|
+
def get_profiler_data_list(session: Instance):
|
377
|
+
# Doesn't handle remote at the moment
|
378
|
+
# is_remote = True if session.remote_connection else False
|
379
|
+
# config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
380
|
+
config_key = 'LOCAL_DATA_DIRECTORY'
|
381
|
+
data_directory = Path(current_app.config[config_key])
|
382
|
+
|
383
|
+
# if is_remote:
|
384
|
+
# connection = RemoteConnection.model_validate(session.remote_connection, strict=False)
|
385
|
+
# path = data_directory / connection.host / current_app.config["PROFILER_DIRECTORY_NAME"]
|
386
|
+
# else:
|
387
|
+
path = data_directory / current_app.config["PROFILER_DIRECTORY_NAME"]
|
388
|
+
|
389
|
+
if not path.exists():
|
390
|
+
path.mkdir(parents=True, exist_ok=True)
|
391
|
+
|
392
|
+
directory_names = [directory.name for directory in path.iterdir() if directory.is_dir()]
|
393
|
+
|
394
|
+
valid_dirs = []
|
395
|
+
|
396
|
+
for dir_name in directory_names:
|
397
|
+
dir_path = Path(path) / dir_name
|
398
|
+
files = list(dir_path.glob("**/*"))
|
399
|
+
|
400
|
+
# Would like to use the existing validate_files function but there's a type difference I'm not sure how to handle
|
401
|
+
if not any(file.name == "db.sqlite" for file in files):
|
402
|
+
continue
|
403
|
+
if not any(file.name == "config.json" for file in files):
|
404
|
+
continue
|
405
|
+
|
406
|
+
valid_dirs.append(dir_name)
|
407
|
+
|
408
|
+
return jsonify(valid_dirs)
|
409
|
+
|
410
|
+
|
411
|
+
@api.route("/profiler/<profiler_name>", methods=["DELETE"])
|
412
|
+
@with_session
|
413
|
+
def delete_profiler_report(profiler_name, session: Instance):
|
414
|
+
is_remote = bool(session.remote_connection)
|
415
|
+
config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
416
|
+
data_directory = Path(current_app.config[config_key])
|
417
|
+
|
418
|
+
if not profiler_name:
|
419
|
+
return Response(status=HTTPStatus.BAD_REQUEST, response="Report name is required.")
|
420
|
+
|
421
|
+
if is_remote:
|
422
|
+
connection = RemoteConnection.model_validate(session.remote_connection, strict=False)
|
423
|
+
path = data_directory / connection.host / current_app.config["PROFILER_DIRECTORY_NAME"]
|
424
|
+
else:
|
425
|
+
path = data_directory / current_app.config["PROFILER_DIRECTORY_NAME"] / profiler_name
|
426
|
+
|
427
|
+
if session.active_report.profiler_name == profiler_name:
|
428
|
+
instance_id = request.args.get("instanceId")
|
429
|
+
update_instance(instance_id=instance_id,profiler_name="")
|
430
|
+
|
431
|
+
if path.exists() and path.is_dir():
|
432
|
+
shutil.rmtree(path)
|
433
|
+
else:
|
434
|
+
return Response(status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}")
|
435
|
+
|
436
|
+
return Response(status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}")
|
437
|
+
|
438
|
+
|
439
|
+
@api.route("/performance/device-log", methods=["GET"])
|
378
440
|
@with_session
|
379
|
-
def
|
380
|
-
if not session.
|
441
|
+
def get_performance_data(session: Instance):
|
442
|
+
if not session.performance_path:
|
381
443
|
return Response(status=HTTPStatus.NOT_FOUND)
|
382
444
|
with DeviceLogProfilerQueries(session) as csv:
|
383
445
|
result = csv.get_all_entries(as_dict=True, limit=100)
|
384
446
|
return jsonify(result)
|
385
447
|
|
386
448
|
|
387
|
-
@api.route("/
|
449
|
+
@api.route("/performance/perf-results", methods=["GET"])
|
388
450
|
@with_session
|
389
451
|
def get_profiler_performance_data(session: Instance):
|
390
|
-
if not session.
|
452
|
+
if not session.performance_path:
|
391
453
|
return Response(status=HTTPStatus.NOT_FOUND)
|
392
454
|
with OpsPerformanceQueries(session) as csv:
|
393
455
|
# result = csv.query_by_op_code(op_code="(torch) contiguous", as_dict=True)
|
@@ -395,10 +457,77 @@ def get_profiler_performance_data(session: Instance):
|
|
395
457
|
return jsonify(result)
|
396
458
|
|
397
459
|
|
398
|
-
@api.route("/
|
460
|
+
@api.route("/performance/<performance_name>", methods=["DELETE"])
|
461
|
+
@with_session
|
462
|
+
def delete_performance_report(performance_name, session: Instance):
|
463
|
+
is_remote = bool(session.remote_connection)
|
464
|
+
config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
465
|
+
data_directory = Path(current_app.config[config_key])
|
466
|
+
|
467
|
+
if not performance_name:
|
468
|
+
return Response(status=HTTPStatus.BAD_REQUEST, response="Report name is required.")
|
469
|
+
|
470
|
+
if is_remote:
|
471
|
+
connection = RemoteConnection.model_validate(session.remote_connection, strict=False)
|
472
|
+
path = data_directory / connection.host / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
473
|
+
else:
|
474
|
+
path = data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"] / performance_name
|
475
|
+
|
476
|
+
if session.active_report.performance_name == performance_name:
|
477
|
+
instance_id = request.args.get("instanceId")
|
478
|
+
update_instance(instance_id=instance_id,performance_name="")
|
479
|
+
|
480
|
+
if path.exists() and path.is_dir():
|
481
|
+
shutil.rmtree(path)
|
482
|
+
else:
|
483
|
+
return Response(status=HTTPStatus.NOT_FOUND, response=f"Report does not exist: {path}")
|
484
|
+
|
485
|
+
return Response(status=HTTPStatus.NO_CONTENT, response=f"Report deleted successfully: {path}")
|
486
|
+
|
487
|
+
|
488
|
+
@api.route("/performance", methods=["GET"])
|
489
|
+
@with_session
|
490
|
+
def get_performance_data_list(session: Instance):
|
491
|
+
# Doesn't handle remote at the moment
|
492
|
+
# is_remote = True if session.remote_connection else False
|
493
|
+
# config_key = "REMOTE_DATA_DIRECTORY" if is_remote else "LOCAL_DATA_DIRECTORY"
|
494
|
+
config_key = 'LOCAL_DATA_DIRECTORY'
|
495
|
+
data_directory = Path(current_app.config[config_key])
|
496
|
+
|
497
|
+
# if is_remote:
|
498
|
+
# connection = RemoteConnection.model_validate(session.remote_connection, strict=False)
|
499
|
+
# path = data_directory / connection.host / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
500
|
+
# else:
|
501
|
+
path = data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
502
|
+
|
503
|
+
if not path.exists():
|
504
|
+
path.mkdir(parents=True, exist_ok=True)
|
505
|
+
|
506
|
+
directory_names = [directory.name for directory in path.iterdir() if directory.is_dir()]
|
507
|
+
|
508
|
+
valid_dirs = []
|
509
|
+
|
510
|
+
for dir_name in directory_names:
|
511
|
+
dir_path = Path(path) / dir_name
|
512
|
+
files = list(dir_path.glob("**/*"))
|
513
|
+
|
514
|
+
# Would like to use the existing validate_files function but there's a type difference I'm not sure how to handle
|
515
|
+
if not any(file.name == "profile_log_device.csv" for file in files):
|
516
|
+
continue
|
517
|
+
if not any(file.name == "tracy_profile_log_host.tracy" for file in files):
|
518
|
+
continue
|
519
|
+
if not any(file.name.startswith("ops_perf_results") for file in files):
|
520
|
+
continue
|
521
|
+
|
522
|
+
valid_dirs.append(dir_name)
|
523
|
+
|
524
|
+
return jsonify(valid_dirs)
|
525
|
+
|
526
|
+
|
527
|
+
@api.route("/performance/perf-results/raw", methods=["GET"])
|
399
528
|
@with_session
|
400
|
-
def
|
401
|
-
if not session.
|
529
|
+
def get_performance_results_data_raw(session: Instance):
|
530
|
+
if not session.performance_path:
|
402
531
|
return Response(status=HTTPStatus.NOT_FOUND)
|
403
532
|
content = OpsPerformanceQueries.get_raw_csv(session)
|
404
533
|
return Response(
|
@@ -408,12 +537,19 @@ def get_profiler_perf_results_data_raw(session: Instance):
|
|
408
537
|
)
|
409
538
|
|
410
539
|
|
411
|
-
@api.route("/
|
540
|
+
@api.route("/performance/perf-results/report", methods=["GET"])
|
412
541
|
@with_session
|
413
|
-
def
|
414
|
-
if not session.
|
542
|
+
def get_performance_results_report(session: Instance):
|
543
|
+
if not session.performance_path:
|
415
544
|
return Response(status=HTTPStatus.NOT_FOUND)
|
416
545
|
|
546
|
+
name = request.args.get("name", None)
|
547
|
+
performance_path = Path(session.performance_path)
|
548
|
+
if name:
|
549
|
+
performance_path = performance_path.parent / name
|
550
|
+
session.performance_path = str(performance_path)
|
551
|
+
logger.info(f"************ Profiler path set to {session.performance_path}")
|
552
|
+
|
417
553
|
try:
|
418
554
|
report = OpsPerformanceReportQueries.generate_report(session)
|
419
555
|
except DataFormatError:
|
@@ -422,10 +558,10 @@ def get_profiler_perf_results_report(session: Instance):
|
|
422
558
|
return jsonify(report), 200
|
423
559
|
|
424
560
|
|
425
|
-
@api.route("/
|
561
|
+
@api.route("/performance/device-log/raw", methods=["GET"])
|
426
562
|
@with_session
|
427
|
-
def
|
428
|
-
if not session.
|
563
|
+
def get_performance_data_raw(session: Instance):
|
564
|
+
if not session.performance_path:
|
429
565
|
return Response(status=HTTPStatus.NOT_FOUND)
|
430
566
|
content = DeviceLogProfilerQueries.get_raw_csv(session)
|
431
567
|
return Response(
|
@@ -435,10 +571,10 @@ def get_profiler_data_raw(session: Instance):
|
|
435
571
|
)
|
436
572
|
|
437
573
|
|
438
|
-
@api.route("/
|
574
|
+
@api.route("/performance/device-log/zone/<zone>", methods=["GET"])
|
439
575
|
@with_session
|
440
576
|
def get_zone_statistics(zone, session: Instance):
|
441
|
-
if not session.
|
577
|
+
if not session.performance_path:
|
442
578
|
return Response(status=HTTPStatus.NOT_FOUND)
|
443
579
|
with DeviceLogProfilerQueries(session) as csv:
|
444
580
|
result = csv.query_zone_statistics(zone_name=zone, as_dict=True)
|
@@ -453,10 +589,10 @@ def get_devices(session: Instance):
|
|
453
589
|
return serialize_devices(devices)
|
454
590
|
|
455
591
|
|
456
|
-
@api.route("/local/upload/
|
457
|
-
def
|
592
|
+
@api.route("/local/upload/profiler", methods=["POST"])
|
593
|
+
def create_profiler_files():
|
458
594
|
files = request.files.getlist("files")
|
459
|
-
|
595
|
+
profiler_directory = current_app.config["LOCAL_DATA_DIRECTORY"] / current_app.config["PROFILER_DIRECTORY_NAME"]
|
460
596
|
|
461
597
|
if not validate_files(files, {"db.sqlite", "config.json"}):
|
462
598
|
return StatusMessage(
|
@@ -464,23 +600,26 @@ def create_report_files():
|
|
464
600
|
message="Invalid project directory.",
|
465
601
|
).model_dump()
|
466
602
|
|
467
|
-
|
468
|
-
|
603
|
+
if not profiler_directory.exists():
|
604
|
+
profiler_directory.mkdir(parents=True, exist_ok=True)
|
605
|
+
|
606
|
+
profiler_name = extract_profiler_name(files)
|
607
|
+
logger.info(f"Writing report files to {profiler_directory}/{profiler_name}")
|
469
608
|
|
470
|
-
save_uploaded_files(files,
|
609
|
+
save_uploaded_files(files, profiler_directory, profiler_name)
|
471
610
|
|
472
611
|
instance_id = request.args.get("instanceId")
|
473
|
-
update_instance(instance_id=instance_id,
|
612
|
+
update_instance(instance_id=instance_id, profiler_name=profiler_name, clear_remote=True)
|
474
613
|
|
475
614
|
return StatusMessage(
|
476
615
|
status=ConnectionTestStates.OK, message="Success."
|
477
616
|
).model_dump()
|
478
617
|
|
479
618
|
|
480
|
-
@api.route("/local/upload/
|
619
|
+
@api.route("/local/upload/performance", methods=["POST"])
|
481
620
|
def create_profile_files():
|
482
621
|
files = request.files.getlist("files")
|
483
|
-
|
622
|
+
data_directory = Path(current_app.config["LOCAL_DATA_DIRECTORY"])
|
484
623
|
instance_id = request.args.get("instanceId")
|
485
624
|
|
486
625
|
if not validate_files(
|
@@ -493,10 +632,10 @@ def create_profile_files():
|
|
493
632
|
message="Invalid project directory.",
|
494
633
|
).model_dump()
|
495
634
|
|
496
|
-
logger.info(f"Writing profile files to {
|
635
|
+
logger.info(f"Writing profile files to {data_directory} / {current_app.config['PERFORMANCE_DIRECTORY_NAME']}")
|
497
636
|
|
498
|
-
# Construct the base directory with
|
499
|
-
target_directory =
|
637
|
+
# Construct the base directory with profiler_name first
|
638
|
+
target_directory = data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"]
|
500
639
|
target_directory.mkdir(parents=True, exist_ok=True)
|
501
640
|
|
502
641
|
if files:
|
@@ -515,11 +654,11 @@ def create_profile_files():
|
|
515
654
|
|
516
655
|
save_uploaded_files(
|
517
656
|
updated_files,
|
518
|
-
str(
|
657
|
+
str(data_directory),
|
519
658
|
)
|
520
659
|
|
521
660
|
update_instance(
|
522
|
-
instance_id=instance_id,
|
661
|
+
instance_id=instance_id, performance_name=profiler_folder_name, clear_remote=True
|
523
662
|
)
|
524
663
|
|
525
664
|
return StatusMessage(
|
@@ -530,7 +669,7 @@ def create_profile_files():
|
|
530
669
|
@api.route("/local/upload/npe", methods=["POST"])
|
531
670
|
def create_npe_files():
|
532
671
|
files = request.files.getlist("files")
|
533
|
-
|
672
|
+
data_directory = current_app.config["LOCAL_DATA_DIRECTORY"]
|
534
673
|
|
535
674
|
for file in files:
|
536
675
|
if not file.filename.endswith(".json"):
|
@@ -540,7 +679,7 @@ def create_npe_files():
|
|
540
679
|
).model_dump()
|
541
680
|
|
542
681
|
npe_name = extract_npe_name(files)
|
543
|
-
target_directory =
|
682
|
+
target_directory = data_directory / current_app.config["NPE_DIRECTORY_NAME"]
|
544
683
|
target_directory.mkdir(parents=True, exist_ok=True)
|
545
684
|
|
546
685
|
save_uploaded_files(files, target_directory, npe_name)
|
@@ -553,22 +692,18 @@ def create_npe_files():
|
|
553
692
|
).model_dump()
|
554
693
|
|
555
694
|
|
556
|
-
@api.route("/remote/
|
557
|
-
def
|
695
|
+
@api.route("/remote/profiler", methods=["POST"])
|
696
|
+
def get_remote_folders_profiler():
|
558
697
|
connection = RemoteConnection.model_validate(request.json, strict=False)
|
559
698
|
try:
|
560
|
-
remote_folders: List[RemoteReportFolder] =
|
699
|
+
remote_folders: List[RemoteReportFolder] = get_remote_profiler_folders(
|
561
700
|
RemoteConnection.model_validate(connection, strict=False)
|
562
701
|
)
|
563
702
|
|
564
703
|
for rf in remote_folders:
|
565
704
|
directory_name = Path(rf.remotePath).name
|
566
705
|
remote_data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
567
|
-
local_path =
|
568
|
-
Path(remote_data_directory)
|
569
|
-
.joinpath(connection.host)
|
570
|
-
.joinpath(directory_name)
|
571
|
-
)
|
706
|
+
local_path = remote_data_directory / current_app.config["PROFILER_DIRECTORY_NAME"] / connection.host / directory_name
|
572
707
|
logger.info(f"Checking last synced for {directory_name}")
|
573
708
|
rf.lastSynced = read_last_synced_file(str(local_path))
|
574
709
|
if not rf.lastSynced:
|
@@ -579,33 +714,28 @@ def get_remote_folders():
|
|
579
714
|
return Response(status=e.http_status, response=e.message)
|
580
715
|
|
581
716
|
|
582
|
-
@api.route("/remote/
|
583
|
-
def
|
717
|
+
@api.route("/remote/performance", methods=["POST"])
|
718
|
+
def get_remote_folders_performance():
|
584
719
|
request_body = request.get_json()
|
585
720
|
connection = RemoteConnection.model_validate(
|
586
721
|
request_body.get("connection"), strict=False
|
587
722
|
)
|
588
723
|
|
589
724
|
try:
|
590
|
-
|
725
|
+
remote_performance_folders: List[RemoteReportFolder] = get_remote_performance_folders(
|
591
726
|
RemoteConnection.model_validate(connection, strict=False)
|
592
727
|
)
|
593
728
|
|
594
|
-
for rf in
|
595
|
-
|
729
|
+
for rf in remote_performance_folders:
|
730
|
+
performance_name = Path(rf.remotePath).name
|
596
731
|
remote_data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
597
|
-
local_path =
|
598
|
-
|
599
|
-
.joinpath(connection.host)
|
600
|
-
.joinpath("profiler")
|
601
|
-
.joinpath(profile_name)
|
602
|
-
)
|
603
|
-
logger.info(f"Checking last synced for {profile_name}")
|
732
|
+
local_path = remote_data_directory / current_app.config["PERFORMANCE_DIRECTORY_NAME"] / connection.host / performance_name
|
733
|
+
logger.info(f"Checking last synced for {performance_name}")
|
604
734
|
rf.lastSynced = read_last_synced_file(str(local_path))
|
605
735
|
if not rf.lastSynced:
|
606
|
-
logger.info(f"{
|
736
|
+
logger.info(f"{performance_name} not yet synced")
|
607
737
|
|
608
|
-
return [r.model_dump() for r in
|
738
|
+
return [r.model_dump() for r in remote_performance_folders]
|
609
739
|
except RemoteConnectionException as e:
|
610
740
|
return Response(status=e.http_status, response=e.message)
|
611
741
|
|
@@ -614,27 +744,39 @@ from flask import Response, jsonify
|
|
614
744
|
import yaml
|
615
745
|
|
616
746
|
|
617
|
-
@api.route("/
|
747
|
+
@api.route("/cluster-descriptor", methods=["GET"])
|
618
748
|
@with_session
|
619
|
-
def
|
620
|
-
if
|
621
|
-
|
749
|
+
def get_cluster_descriptor(session: Instance):
|
750
|
+
if session.remote_connection:
|
751
|
+
try:
|
752
|
+
cluster_desc_file = get_cluster_desc(session.remote_connection)
|
753
|
+
if not cluster_desc_file:
|
754
|
+
return jsonify({"error": "cluster_descriptor.yaml not found"}), 404
|
755
|
+
yaml_data = yaml.safe_load(cluster_desc_file.decode("utf-8"))
|
756
|
+
return jsonify(yaml_data), 200
|
622
757
|
|
623
|
-
|
624
|
-
|
625
|
-
if not cluster_desc_file:
|
626
|
-
return jsonify({"error": "cluster_descriptor.yaml not found"}), 404
|
627
|
-
yaml_data = yaml.safe_load(cluster_desc_file.decode("utf-8"))
|
628
|
-
return jsonify(yaml_data), 200
|
758
|
+
except yaml.YAMLError as e:
|
759
|
+
return jsonify({"error": f"Failed to parse YAML: {str(e)}"}), 400
|
629
760
|
|
630
|
-
|
631
|
-
|
761
|
+
except RemoteConnectionException as e:
|
762
|
+
return jsonify({"error": e.message}), e.http_status
|
632
763
|
|
633
|
-
|
634
|
-
|
764
|
+
except Exception as e:
|
765
|
+
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500
|
766
|
+
else:
|
767
|
+
local_path = get_cluster_descriptor_path(session)
|
635
768
|
|
636
|
-
|
637
|
-
|
769
|
+
if not local_path:
|
770
|
+
return jsonify({"error": "cluster_descriptor.yaml not found"}), 404
|
771
|
+
|
772
|
+
try:
|
773
|
+
with open(local_path) as cluster_desc_file:
|
774
|
+
yaml_data = yaml.safe_load(cluster_desc_file)
|
775
|
+
return jsonify(yaml_data), 200
|
776
|
+
except yaml.YAMLError as e:
|
777
|
+
return jsonify({"error": f"Failed to parse YAML: {str(e)}"}), 400
|
778
|
+
|
779
|
+
return jsonify({"error": "Cluster descriptor not found"}), 404
|
638
780
|
|
639
781
|
|
640
782
|
@api.route("/remote/test", methods=["POST"])
|
@@ -661,8 +803,8 @@ def test_remote_folder():
|
|
661
803
|
# Test Directory Configuration
|
662
804
|
if not has_failures():
|
663
805
|
try:
|
664
|
-
check_remote_path_exists(connection, "
|
665
|
-
add_status(ConnectionTestStates.OK.value, "
|
806
|
+
check_remote_path_exists(connection, "profilerPath")
|
807
|
+
add_status(ConnectionTestStates.OK.value, "Profiler folder path exists")
|
666
808
|
except RemoteConnectionException as e:
|
667
809
|
add_status(ConnectionTestStates.FAILED.value, e.message)
|
668
810
|
|
@@ -724,7 +866,7 @@ def sync_remote_folder():
|
|
724
866
|
if profile:
|
725
867
|
profile_folder = RemoteReportFolder.model_validate(profile, strict=False)
|
726
868
|
try:
|
727
|
-
|
869
|
+
sync_remote_performance_folders(
|
728
870
|
connection,
|
729
871
|
remote_dir,
|
730
872
|
profile=profile_folder,
|
@@ -740,19 +882,19 @@ def sync_remote_folder():
|
|
740
882
|
return Response(status=e.http_status, response=e.message)
|
741
883
|
|
742
884
|
try:
|
743
|
-
|
885
|
+
remote_profiler_folder = RemoteReportFolder.model_validate(folder, strict=False)
|
744
886
|
|
745
|
-
|
887
|
+
sync_remote_profiler_folders(
|
746
888
|
connection,
|
747
|
-
|
889
|
+
remote_profiler_folder.remotePath,
|
748
890
|
remote_dir,
|
749
891
|
exclude_patterns=[r"/tensors(/|$)"],
|
750
892
|
sid=instance_id,
|
751
893
|
)
|
752
894
|
|
753
|
-
|
895
|
+
remote_profiler_folder.lastSynced = int(time.time())
|
754
896
|
|
755
|
-
return
|
897
|
+
return remote_profiler_folder.model_dump()
|
756
898
|
|
757
899
|
except RemoteConnectionException as e:
|
758
900
|
return Response(status=e.http_status, response=e.message)
|
@@ -795,15 +937,15 @@ def use_remote_folder():
|
|
795
937
|
|
796
938
|
connection = RemoteConnection.model_validate(connection, strict=False)
|
797
939
|
folder = RemoteReportFolder.model_validate(folder, strict=False)
|
798
|
-
|
799
|
-
|
940
|
+
performance_name = None
|
941
|
+
remote_performance_folder = None
|
800
942
|
if profile:
|
801
|
-
|
802
|
-
|
803
|
-
|
804
|
-
|
943
|
+
remote_performance_folder = RemoteReportFolder.model_validate(profile, strict=False)
|
944
|
+
performance_name = remote_performance_folder.testName
|
945
|
+
data_directory = current_app.config["REMOTE_DATA_DIRECTORY"]
|
946
|
+
profiler_name = Path(folder.remotePath).name
|
805
947
|
|
806
|
-
connection_directory = Path(
|
948
|
+
connection_directory = Path(data_directory, connection.host, current_app.config["PROFILER_DIRECTORY_NAME"], profiler_name)
|
807
949
|
|
808
950
|
if not connection.useRemoteQuerying and not connection_directory.exists():
|
809
951
|
return Response(
|
@@ -811,18 +953,18 @@ def use_remote_folder():
|
|
811
953
|
response=f"{connection_directory} does not exist.",
|
812
954
|
)
|
813
955
|
|
814
|
-
remote_path = f"{Path(
|
956
|
+
remote_path = f"{Path(data_directory).name}/{connection.host}/{connection_directory.name}"
|
815
957
|
|
816
958
|
instance_id = request.args.get("instanceId")
|
817
|
-
current_app.logger.info(f"Setting active
|
959
|
+
current_app.logger.info(f"Setting active reports for {instance_id} - {remote_path}")
|
818
960
|
|
819
961
|
update_instance(
|
820
962
|
instance_id=instance_id,
|
821
|
-
|
822
|
-
|
963
|
+
profiler_name=profiler_name,
|
964
|
+
performance_name=performance_name,
|
823
965
|
remote_connection=connection,
|
824
|
-
|
825
|
-
|
966
|
+
remote_profiler_folder=folder,
|
967
|
+
remote_performance_folder=remote_performance_folder,
|
826
968
|
)
|
827
969
|
|
828
970
|
return Response(status=HTTPStatus.OK)
|
@@ -840,6 +982,35 @@ def get_instance(session: Instance):
|
|
840
982
|
return session.model_dump()
|
841
983
|
|
842
984
|
|
985
|
+
@api.route("/session", methods=["PUT"])
|
986
|
+
def update_current_instance():
|
987
|
+
try:
|
988
|
+
update_data = request.get_json()
|
989
|
+
|
990
|
+
if not update_data:
|
991
|
+
return Response(status=HTTPStatus.BAD_REQUEST, response="No data provided.")
|
992
|
+
|
993
|
+
update_instance(
|
994
|
+
instance_id=update_data.get("instance_id"),
|
995
|
+
profiler_name=update_data["active_report"].get("profiler_name"),
|
996
|
+
performance_name=update_data["active_report"].get("performance_name"),
|
997
|
+
npe_name=update_data["active_report"].get("npe_name"),
|
998
|
+
# Doesn't handle remote at the moment
|
999
|
+
remote_connection=None,
|
1000
|
+
remote_profiler_folder=None,
|
1001
|
+
remote_performance_folder=None,
|
1002
|
+
)
|
1003
|
+
|
1004
|
+
return Response(status=HTTPStatus.OK)
|
1005
|
+
except Exception as e:
|
1006
|
+
logger.error(f"Error updating session: {str(e)}")
|
1007
|
+
|
1008
|
+
return Response(
|
1009
|
+
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
1010
|
+
response="An error occurred while updating the session.",
|
1011
|
+
)
|
1012
|
+
|
1013
|
+
|
843
1014
|
@api.route("/npe", methods=["GET"])
|
844
1015
|
@with_session
|
845
1016
|
@timer
|
@@ -849,9 +1020,12 @@ def get_npe_data(session: Instance):
|
|
849
1020
|
return Response(status=HTTPStatus.NOT_FOUND)
|
850
1021
|
|
851
1022
|
npe_file = Path(f"{session.npe_path}/{session.active_report.npe_name}.json")
|
1023
|
+
|
852
1024
|
if not npe_file.exists():
|
853
1025
|
logger.error(f"NPE file does not exist: {npe_file}")
|
854
1026
|
return Response(status=HTTPStatus.NOT_FOUND)
|
1027
|
+
|
855
1028
|
with open(npe_file, "r") as file:
|
856
1029
|
npe_data = json.load(file)
|
1030
|
+
|
857
1031
|
return jsonify(npe_data)
|