ttnn-visualizer 0.49.0__py3-none-any.whl → 0.64.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +151 -49
- ttnn_visualizer/csv_queries.py +154 -45
- ttnn_visualizer/decorators.py +0 -9
- ttnn_visualizer/exceptions.py +0 -7
- ttnn_visualizer/models.py +20 -1
- ttnn_visualizer/queries.py +8 -0
- ttnn_visualizer/serializers.py +53 -9
- ttnn_visualizer/settings.py +24 -10
- ttnn_visualizer/ssh_client.py +1 -4
- ttnn_visualizer/static/assets/allPaths-DWjqav_8.js +1 -0
- ttnn_visualizer/static/assets/allPathsLoader-B0eRT9aL.js +2 -0
- ttnn_visualizer/static/assets/index-BE2R-cuu.css +1 -0
- ttnn_visualizer/static/assets/index-BZITDwoa.js +1 -0
- ttnn_visualizer/static/assets/{index-DVrPLQJ7.js → index-DDrUX09k.js} +274 -479
- ttnn_visualizer/static/assets/index-voJy5fZe.js +1 -0
- ttnn_visualizer/static/assets/splitPathsBySizeLoader-_GpmIkFm.js +1 -0
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/test_serializers.py +2 -0
- ttnn_visualizer/tests/test_utils.py +362 -0
- ttnn_visualizer/utils.py +142 -0
- ttnn_visualizer/views.py +181 -87
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/METADATA +58 -30
- ttnn_visualizer-0.64.0.dist-info/RECORD +44 -0
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/licenses/LICENSE +6 -0
- ttnn_visualizer/remote_sqlite_setup.py +0 -100
- ttnn_visualizer/static/assets/allPaths-G_CNx_x1.js +0 -1
- ttnn_visualizer/static/assets/allPathsLoader-s_Yfmxfp.js +0 -2
- ttnn_visualizer/static/assets/index-CnPrfHYh.js +0 -1
- ttnn_visualizer/static/assets/index-Cnc1EkDo.js +0 -1
- ttnn_visualizer/static/assets/index-UuXdrHif.css +0 -7
- ttnn_visualizer/static/assets/splitPathsBySizeLoader-ivxxaHxa.js +0 -1
- ttnn_visualizer-0.49.0.dist-info/RECORD +0 -44
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/licenses/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.49.0.dist-info → ttnn_visualizer-0.64.0.dist-info}/top_level.txt +0 -0
ttnn_visualizer/views.py
CHANGED
|
@@ -25,12 +25,9 @@ from ttnn_visualizer.csv_queries import (
|
|
|
25
25
|
from ttnn_visualizer.decorators import local_only, with_instance
|
|
26
26
|
from ttnn_visualizer.enums import ConnectionTestStates
|
|
27
27
|
from ttnn_visualizer.exceptions import (
|
|
28
|
-
AuthenticationException,
|
|
29
28
|
AuthenticationFailedException,
|
|
30
29
|
DataFormatError,
|
|
31
|
-
NoValidConnectionsError,
|
|
32
30
|
RemoteConnectionException,
|
|
33
|
-
SSHException,
|
|
34
31
|
)
|
|
35
32
|
from ttnn_visualizer.file_uploads import (
|
|
36
33
|
extract_folder_name_from_files,
|
|
@@ -46,7 +43,6 @@ from ttnn_visualizer.models import (
|
|
|
46
43
|
StatusMessage,
|
|
47
44
|
)
|
|
48
45
|
from ttnn_visualizer.queries import DatabaseQueries
|
|
49
|
-
from ttnn_visualizer.remote_sqlite_setup import check_sqlite_path, get_sqlite_path
|
|
50
46
|
from ttnn_visualizer.serializers import (
|
|
51
47
|
serialize_buffer,
|
|
52
48
|
serialize_buffer_pages,
|
|
@@ -72,6 +68,7 @@ from ttnn_visualizer.utils import (
|
|
|
72
68
|
create_path_resolver,
|
|
73
69
|
get_cluster_descriptor_path,
|
|
74
70
|
read_last_synced_file,
|
|
71
|
+
str_to_bool,
|
|
75
72
|
timer,
|
|
76
73
|
)
|
|
77
74
|
|
|
@@ -103,7 +100,11 @@ def operation_list(instance: Instance):
|
|
|
103
100
|
devices = list(db.query_devices())
|
|
104
101
|
producers_consumers = list(db.query_producers_consumers())
|
|
105
102
|
|
|
106
|
-
|
|
103
|
+
error_records = None
|
|
104
|
+
if db._check_table_exists("errors"):
|
|
105
|
+
error_records = list(db.query_error_records())
|
|
106
|
+
|
|
107
|
+
serialized_operations = serialize_operations(
|
|
107
108
|
inputs,
|
|
108
109
|
operation_arguments,
|
|
109
110
|
operations,
|
|
@@ -113,6 +114,11 @@ def operation_list(instance: Instance):
|
|
|
113
114
|
devices,
|
|
114
115
|
producers_consumers,
|
|
115
116
|
device_operations,
|
|
117
|
+
error_records,
|
|
118
|
+
)
|
|
119
|
+
return Response(
|
|
120
|
+
orjson.dumps(serialized_operations),
|
|
121
|
+
mimetype="application/json",
|
|
116
122
|
)
|
|
117
123
|
|
|
118
124
|
|
|
@@ -173,7 +179,15 @@ def operation_detail(operation_id, instance: Instance):
|
|
|
173
179
|
|
|
174
180
|
devices = list(db.query_devices())
|
|
175
181
|
|
|
176
|
-
|
|
182
|
+
error_record = None
|
|
183
|
+
if db._check_table_exists("errors"):
|
|
184
|
+
error_records = list(
|
|
185
|
+
db.query_error_records(filters={"operation_id": operation_id})
|
|
186
|
+
)
|
|
187
|
+
if error_records:
|
|
188
|
+
error_record = error_records[0]
|
|
189
|
+
|
|
190
|
+
serialized_operation = serialize_operation(
|
|
177
191
|
buffers,
|
|
178
192
|
inputs,
|
|
179
193
|
operation,
|
|
@@ -186,6 +200,12 @@ def operation_detail(operation_id, instance: Instance):
|
|
|
186
200
|
devices,
|
|
187
201
|
producers_consumers,
|
|
188
202
|
device_operations,
|
|
203
|
+
error_record,
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
return Response(
|
|
207
|
+
orjson.dumps(serialized_operation),
|
|
208
|
+
mimetype="application/json",
|
|
189
209
|
)
|
|
190
210
|
|
|
191
211
|
|
|
@@ -198,9 +218,36 @@ def operation_history(instance: Instance):
|
|
|
198
218
|
Path(str(instance.profiler_path)).parent / operation_history_filename
|
|
199
219
|
)
|
|
200
220
|
if not operation_history_file.exists():
|
|
201
|
-
return []
|
|
221
|
+
return jsonify([])
|
|
202
222
|
with open(operation_history_file, "r") as file:
|
|
203
|
-
return
|
|
223
|
+
return Response(
|
|
224
|
+
orjson.dumps(json.load(file)),
|
|
225
|
+
mimetype="application/json",
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
@api.route("/errors", methods=["GET"])
|
|
230
|
+
@with_instance
|
|
231
|
+
@timer
|
|
232
|
+
def errors_list(instance: Instance):
|
|
233
|
+
with DatabaseQueries(instance) as db:
|
|
234
|
+
if not db._check_table_exists("errors"):
|
|
235
|
+
return (
|
|
236
|
+
jsonify(
|
|
237
|
+
{
|
|
238
|
+
"error": "Error records table does not exist in this report database."
|
|
239
|
+
}
|
|
240
|
+
),
|
|
241
|
+
HTTPStatus.UNPROCESSABLE_ENTITY,
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
error_records = list(db.query_error_records())
|
|
245
|
+
serialized_errors = [dataclasses.asdict(error) for error in error_records]
|
|
246
|
+
|
|
247
|
+
return Response(
|
|
248
|
+
orjson.dumps(serialized_errors),
|
|
249
|
+
mimetype="application/json",
|
|
250
|
+
)
|
|
204
251
|
|
|
205
252
|
|
|
206
253
|
@api.route("/config")
|
|
@@ -211,7 +258,10 @@ def get_config(instance: Instance):
|
|
|
211
258
|
if not config_file.exists():
|
|
212
259
|
return {}
|
|
213
260
|
with open(config_file, "r") as file:
|
|
214
|
-
return
|
|
261
|
+
return Response(
|
|
262
|
+
orjson.dumps(json.load(file)),
|
|
263
|
+
mimetype="application/json",
|
|
264
|
+
)
|
|
215
265
|
|
|
216
266
|
|
|
217
267
|
@api.route("/tensors", methods=["GET"])
|
|
@@ -224,9 +274,13 @@ def tensors_list(instance: Instance):
|
|
|
224
274
|
local_comparisons = list(db.query_tensor_comparisons())
|
|
225
275
|
global_comparisons = list(db.query_tensor_comparisons(local=False))
|
|
226
276
|
producers_consumers = list(db.query_producers_consumers())
|
|
227
|
-
|
|
277
|
+
serialized_tensors = serialize_tensors(
|
|
228
278
|
tensors, producers_consumers, local_comparisons, global_comparisons
|
|
229
279
|
)
|
|
280
|
+
return Response(
|
|
281
|
+
orjson.dumps(serialized_tensors),
|
|
282
|
+
mimetype="application/json",
|
|
283
|
+
)
|
|
230
284
|
|
|
231
285
|
|
|
232
286
|
@api.route("/buffer", methods=["GET"])
|
|
@@ -248,7 +302,10 @@ def buffer_detail(instance: Instance):
|
|
|
248
302
|
buffer = db.query_next_buffer(operation_id, address)
|
|
249
303
|
if not buffer:
|
|
250
304
|
return Response(status=HTTPStatus.NOT_FOUND)
|
|
251
|
-
return
|
|
305
|
+
return Response(
|
|
306
|
+
orjson.dumps(dataclasses.asdict(buffer)),
|
|
307
|
+
mimetype="application/json",
|
|
308
|
+
)
|
|
252
309
|
|
|
253
310
|
|
|
254
311
|
@api.route("/buffer-pages", methods=["GET"])
|
|
@@ -283,7 +340,10 @@ def buffer_pages(instance: Instance):
|
|
|
283
340
|
)
|
|
284
341
|
)
|
|
285
342
|
)
|
|
286
|
-
return
|
|
343
|
+
return Response(
|
|
344
|
+
orjson.dumps(serialize_buffer_pages(buffers)),
|
|
345
|
+
mimetype="application/json",
|
|
346
|
+
)
|
|
287
347
|
|
|
288
348
|
|
|
289
349
|
@api.route("/tensors/<tensor_id>", methods=["GET"])
|
|
@@ -295,7 +355,10 @@ def tensor_detail(tensor_id, instance: Instance):
|
|
|
295
355
|
if not tensors:
|
|
296
356
|
return Response(status=HTTPStatus.NOT_FOUND)
|
|
297
357
|
|
|
298
|
-
return
|
|
358
|
+
return Response(
|
|
359
|
+
orjson.dumps(dataclasses.asdict(tensors[0])),
|
|
360
|
+
mimetype="application/json",
|
|
361
|
+
)
|
|
299
362
|
|
|
300
363
|
|
|
301
364
|
@api.route("/buffers", methods=["GET"])
|
|
@@ -315,7 +378,7 @@ def get_all_buffers(instance: Instance):
|
|
|
315
378
|
)
|
|
316
379
|
)
|
|
317
380
|
serialized = [serialize_buffer(b) for b in buffers]
|
|
318
|
-
return
|
|
381
|
+
return Response(orjson.dumps(serialized), mimetype="application/json")
|
|
319
382
|
|
|
320
383
|
|
|
321
384
|
@api.route("/operation-buffers", methods=["GET"])
|
|
@@ -323,6 +386,7 @@ def get_all_buffers(instance: Instance):
|
|
|
323
386
|
def get_operations_buffers(instance: Instance):
|
|
324
387
|
buffer_type = request.args.get("buffer_type", "")
|
|
325
388
|
device_id = request.args.get("device_id", None)
|
|
389
|
+
|
|
326
390
|
if buffer_type and str.isdigit(buffer_type):
|
|
327
391
|
buffer_type = int(buffer_type)
|
|
328
392
|
else:
|
|
@@ -335,7 +399,10 @@ def get_operations_buffers(instance: Instance):
|
|
|
335
399
|
)
|
|
336
400
|
)
|
|
337
401
|
operations = list(db.query_operations())
|
|
338
|
-
return
|
|
402
|
+
return Response(
|
|
403
|
+
orjson.dumps(serialize_operations_buffers(operations, buffers)),
|
|
404
|
+
mimetype="application/json",
|
|
405
|
+
)
|
|
339
406
|
|
|
340
407
|
|
|
341
408
|
@api.route("/operation-buffers/<operation_id>", methods=["GET"])
|
|
@@ -364,7 +431,11 @@ def get_operation_buffers(operation_id, instance: Instance):
|
|
|
364
431
|
)
|
|
365
432
|
if not operation:
|
|
366
433
|
return Response(status=HTTPStatus.NOT_FOUND)
|
|
367
|
-
|
|
434
|
+
|
|
435
|
+
return Response(
|
|
436
|
+
orjson.dumps(serialize_operation_buffers(operation, buffers)),
|
|
437
|
+
mimetype="application/json",
|
|
438
|
+
)
|
|
368
439
|
|
|
369
440
|
|
|
370
441
|
@api.route("/profiler", methods=["GET"])
|
|
@@ -374,12 +445,12 @@ def get_profiler_data_list(instance: Instance):
|
|
|
374
445
|
resolver = create_path_resolver(current_app)
|
|
375
446
|
|
|
376
447
|
# Note: "profiler" in app terminology maps to tt-metal's ttnn/reports
|
|
377
|
-
path = resolver.get_base_report_path("profiler"
|
|
448
|
+
path = resolver.get_base_report_path("profiler")
|
|
378
449
|
|
|
379
450
|
if not path.exists():
|
|
380
451
|
if resolver.is_direct_report_mode:
|
|
381
452
|
logger.warning(f"TT-Metal profiler reports not found: {path}")
|
|
382
|
-
return
|
|
453
|
+
return []
|
|
383
454
|
else:
|
|
384
455
|
path.mkdir(parents=True, exist_ok=True)
|
|
385
456
|
|
|
@@ -439,7 +510,7 @@ def get_profiler_data_list(instance: Instance):
|
|
|
439
510
|
continue
|
|
440
511
|
valid_dirs.append({"path": dir_path.name, "reportName": report_name})
|
|
441
512
|
|
|
442
|
-
return
|
|
513
|
+
return Response(orjson.dumps(valid_dirs), mimetype="application/json")
|
|
443
514
|
|
|
444
515
|
|
|
445
516
|
@api.route("/profiler/<profiler_name>", methods=["DELETE"])
|
|
@@ -494,16 +565,12 @@ def get_performance_data_list(instance: Instance):
|
|
|
494
565
|
resolver = create_path_resolver(current_app)
|
|
495
566
|
|
|
496
567
|
# Note: "performance" in app terminology maps to tt-metal's profiler/reports
|
|
497
|
-
path = resolver.get_base_report_path("performance"
|
|
498
|
-
|
|
499
|
-
is_remote = True if instance.remote_connection else False
|
|
568
|
+
path = resolver.get_base_report_path("performance")
|
|
500
569
|
|
|
501
570
|
if not path.exists():
|
|
502
571
|
if resolver.is_direct_report_mode:
|
|
503
572
|
logger.warning(f"TT-Metal performance reports not found: {path}")
|
|
504
573
|
return jsonify([])
|
|
505
|
-
elif not is_remote:
|
|
506
|
-
path.mkdir(parents=True, exist_ok=True)
|
|
507
574
|
|
|
508
575
|
if current_app.config["SERVER_MODE"]:
|
|
509
576
|
session_instances = session.get("instances", [])
|
|
@@ -564,7 +631,7 @@ def get_performance_data_list(instance: Instance):
|
|
|
564
631
|
}
|
|
565
632
|
)
|
|
566
633
|
|
|
567
|
-
return
|
|
634
|
+
return Response(orjson.dumps(valid_dirs), mimetype="application/json")
|
|
568
635
|
|
|
569
636
|
|
|
570
637
|
@api.route("/performance/device-log", methods=["GET"])
|
|
@@ -572,9 +639,10 @@ def get_performance_data_list(instance: Instance):
|
|
|
572
639
|
def get_performance_data(instance: Instance):
|
|
573
640
|
if not instance.performance_path:
|
|
574
641
|
return Response(status=HTTPStatus.NOT_FOUND)
|
|
642
|
+
|
|
575
643
|
with DeviceLogProfilerQueries(instance) as csv:
|
|
576
644
|
result = csv.get_all_entries(as_dict=True, limit=100)
|
|
577
|
-
return
|
|
645
|
+
return Response(orjson.dumps(result), mimetype="application/json")
|
|
578
646
|
|
|
579
647
|
|
|
580
648
|
@api.route("/performance/perf-results", methods=["GET"])
|
|
@@ -585,7 +653,7 @@ def get_profiler_performance_data(instance: Instance):
|
|
|
585
653
|
with OpsPerformanceQueries(instance) as csv:
|
|
586
654
|
# result = csv.query_by_op_code(op_code="(torch) contiguous", as_dict=True)
|
|
587
655
|
result = csv.get_all_entries(as_dict=True, limit=100)
|
|
588
|
-
return
|
|
656
|
+
return Response(orjson.dumps(result), mimetype="application/json")
|
|
589
657
|
|
|
590
658
|
|
|
591
659
|
@api.route("/performance/<performance_name>", methods=["DELETE"])
|
|
@@ -659,6 +727,10 @@ def get_performance_results_report(instance: Instance):
|
|
|
659
727
|
)
|
|
660
728
|
|
|
661
729
|
name = request.args.get("name", None)
|
|
730
|
+
start_signpost = request.args.get("start_signpost", None)
|
|
731
|
+
end_signpost = request.args.get("end_signpost", None)
|
|
732
|
+
stack_by_in0 = str_to_bool(request.args.get("stack_by_in0", "true"))
|
|
733
|
+
hide_host_ops = str_to_bool(request.args.get("hide_host_ops", "true"))
|
|
662
734
|
|
|
663
735
|
if name and not current_app.config["SERVER_MODE"]:
|
|
664
736
|
performance_path = Path(instance.performance_path).parent / name
|
|
@@ -666,11 +738,17 @@ def get_performance_results_report(instance: Instance):
|
|
|
666
738
|
logger.info(f"************ Performance path set to {instance.performance_path}")
|
|
667
739
|
|
|
668
740
|
try:
|
|
669
|
-
report = OpsPerformanceReportQueries.generate_report(
|
|
741
|
+
report = OpsPerformanceReportQueries.generate_report(
|
|
742
|
+
instance,
|
|
743
|
+
stack_by_in0=stack_by_in0,
|
|
744
|
+
start_signpost=start_signpost,
|
|
745
|
+
end_signpost=end_signpost,
|
|
746
|
+
hide_host_ops=hide_host_ops,
|
|
747
|
+
)
|
|
670
748
|
except DataFormatError:
|
|
671
749
|
return Response(status=HTTPStatus.UNPROCESSABLE_ENTITY)
|
|
672
750
|
|
|
673
|
-
return
|
|
751
|
+
return Response(orjson.dumps(report), mimetype="application/json")
|
|
674
752
|
|
|
675
753
|
|
|
676
754
|
@api.route("/performance/device-log/raw", methods=["GET"])
|
|
@@ -678,7 +756,16 @@ def get_performance_results_report(instance: Instance):
|
|
|
678
756
|
def get_performance_data_raw(instance: Instance):
|
|
679
757
|
if not instance.performance_path:
|
|
680
758
|
return Response(status=HTTPStatus.NOT_FOUND)
|
|
759
|
+
|
|
760
|
+
name = request.args.get("name", None)
|
|
761
|
+
|
|
762
|
+
if name and not current_app.config["SERVER_MODE"]:
|
|
763
|
+
performance_path = Path(instance.performance_path).parent / name
|
|
764
|
+
instance.performance_path = str(performance_path)
|
|
765
|
+
logger.info(f"************ Performance path set to {instance.performance_path}")
|
|
766
|
+
|
|
681
767
|
content = DeviceLogProfilerQueries.get_raw_csv(instance)
|
|
768
|
+
|
|
682
769
|
return Response(
|
|
683
770
|
content,
|
|
684
771
|
mimetype="text/csv",
|
|
@@ -696,7 +783,7 @@ def get_npe_manifest(instance: Instance):
|
|
|
696
783
|
except FileNotFoundError:
|
|
697
784
|
return jsonify([])
|
|
698
785
|
|
|
699
|
-
return
|
|
786
|
+
return Response(orjson.dumps(content), mimetype="application/json")
|
|
700
787
|
|
|
701
788
|
|
|
702
789
|
@api.route("/performance/npe/timeline", methods=["GET"])
|
|
@@ -708,16 +795,16 @@ def get_npe_timeline(instance: Instance):
|
|
|
708
795
|
filename = request.args.get("filename", default=None)
|
|
709
796
|
|
|
710
797
|
if not filename:
|
|
711
|
-
return
|
|
798
|
+
return Response(orjson.dumps({}), mimetype="application/json")
|
|
712
799
|
|
|
713
800
|
filename = Path(filename).name
|
|
714
801
|
|
|
715
802
|
try:
|
|
716
803
|
content = NPEQueries.get_npe_timeline(instance, filename=filename)
|
|
717
804
|
except FileNotFoundError:
|
|
718
|
-
return
|
|
805
|
+
return Response(orjson.dumps({}), mimetype="application/json")
|
|
719
806
|
|
|
720
|
-
return
|
|
807
|
+
return Response(orjson.dumps(content), mimetype="application/json")
|
|
721
808
|
|
|
722
809
|
|
|
723
810
|
@api.route("/performance/device-log/zone/<zone>", methods=["GET"])
|
|
@@ -727,7 +814,7 @@ def get_zone_statistics(zone, instance: Instance):
|
|
|
727
814
|
return Response(status=HTTPStatus.NOT_FOUND)
|
|
728
815
|
with DeviceLogProfilerQueries(instance) as csv:
|
|
729
816
|
result = csv.query_zone_statistics(zone_name=zone, as_dict=True)
|
|
730
|
-
return
|
|
817
|
+
return Response(orjson.dumps(result), mimetype="application/json")
|
|
731
818
|
|
|
732
819
|
|
|
733
820
|
@api.route("/devices", methods=["GET"])
|
|
@@ -735,7 +822,10 @@ def get_zone_statistics(zone, instance: Instance):
|
|
|
735
822
|
def get_devices(instance: Instance):
|
|
736
823
|
with DatabaseQueries(instance) as db:
|
|
737
824
|
devices = list(db.query_devices())
|
|
738
|
-
return
|
|
825
|
+
return Response(
|
|
826
|
+
orjson.dumps(serialize_devices(devices)),
|
|
827
|
+
mimetype="application/json",
|
|
828
|
+
)
|
|
739
829
|
|
|
740
830
|
|
|
741
831
|
@api.route("/local/upload/profiler", methods=["POST"])
|
|
@@ -866,7 +956,11 @@ def create_npe_files():
|
|
|
866
956
|
data_directory = current_app.config["LOCAL_DATA_DIRECTORY"]
|
|
867
957
|
|
|
868
958
|
for file in files:
|
|
869
|
-
if
|
|
959
|
+
if (
|
|
960
|
+
not file.filename.endswith(".json")
|
|
961
|
+
and not file.filename.endswith(".zst")
|
|
962
|
+
and not file.filename.endswith(".npeviz")
|
|
963
|
+
):
|
|
870
964
|
return StatusMessage(
|
|
871
965
|
status=ConnectionTestStates.FAILED,
|
|
872
966
|
message="NPE requires a valid .json or .zst file",
|
|
@@ -915,7 +1009,10 @@ def get_remote_folders_profiler():
|
|
|
915
1009
|
if not rf.lastSynced:
|
|
916
1010
|
logger.info(f"{directory_name} not yet synced")
|
|
917
1011
|
|
|
918
|
-
return
|
|
1012
|
+
return Response(
|
|
1013
|
+
orjson.dumps([r.model_dump() for r in remote_folders]),
|
|
1014
|
+
mimetype="application/json",
|
|
1015
|
+
)
|
|
919
1016
|
except RemoteConnectionException as e:
|
|
920
1017
|
return Response(status=e.http_status, response=e.message)
|
|
921
1018
|
|
|
@@ -948,7 +1045,10 @@ def get_remote_folders_performance():
|
|
|
948
1045
|
if not rf.lastSynced:
|
|
949
1046
|
logger.info(f"{performance_name} not yet synced")
|
|
950
1047
|
|
|
951
|
-
return
|
|
1048
|
+
return Response(
|
|
1049
|
+
orjson.dumps([r.model_dump() for r in remote_performance_folders]),
|
|
1050
|
+
mimetype="application/json",
|
|
1051
|
+
)
|
|
952
1052
|
except RemoteConnectionException as e:
|
|
953
1053
|
return Response(status=e.http_status, response=e.message)
|
|
954
1054
|
|
|
@@ -981,7 +1081,7 @@ def get_cluster_descriptor(instance: Instance):
|
|
|
981
1081
|
try:
|
|
982
1082
|
with open(local_path) as cluster_desc_file:
|
|
983
1083
|
yaml_data = yaml.safe_load(cluster_desc_file)
|
|
984
|
-
return jsonify(yaml_data)
|
|
1084
|
+
return jsonify(yaml_data) # yaml_data is not compatible with orjson
|
|
985
1085
|
except yaml.YAMLError as e:
|
|
986
1086
|
return jsonify({"error": f"Failed to parse YAML: {str(e)}"}), 400
|
|
987
1087
|
|
|
@@ -1011,7 +1111,7 @@ def test_remote_folder():
|
|
|
1011
1111
|
add_status(
|
|
1012
1112
|
ConnectionTestStates.FAILED.value, e.message, getattr(e, "detail", None)
|
|
1013
1113
|
)
|
|
1014
|
-
return [status.model_dump() for status in statuses], e.http_status
|
|
1114
|
+
return jsonify([status.model_dump() for status in statuses]), e.http_status
|
|
1015
1115
|
except RemoteConnectionException as e:
|
|
1016
1116
|
add_status(
|
|
1017
1117
|
ConnectionTestStates.FAILED.value, e.message, getattr(e, "detail", None)
|
|
@@ -1026,7 +1126,7 @@ def test_remote_folder():
|
|
|
1026
1126
|
add_status(
|
|
1027
1127
|
ConnectionTestStates.FAILED.value, e.message, getattr(e, "detail", None)
|
|
1028
1128
|
)
|
|
1029
|
-
return [status.model_dump() for status in statuses], e.http_status
|
|
1129
|
+
return jsonify([status.model_dump() for status in statuses]), e.http_status
|
|
1030
1130
|
except RemoteConnectionException as e:
|
|
1031
1131
|
add_status(
|
|
1032
1132
|
ConnectionTestStates.FAILED.value, e.message, getattr(e, "detail", None)
|
|
@@ -1041,7 +1141,7 @@ def test_remote_folder():
|
|
|
1041
1141
|
add_status(
|
|
1042
1142
|
ConnectionTestStates.FAILED.value, e.message, getattr(e, "detail", None)
|
|
1043
1143
|
)
|
|
1044
|
-
return [status.model_dump() for status in statuses], e.http_status
|
|
1144
|
+
return jsonify([status.model_dump() for status in statuses]), e.http_status
|
|
1045
1145
|
except RemoteConnectionException as e:
|
|
1046
1146
|
add_status(
|
|
1047
1147
|
ConnectionTestStates.FAILED.value, e.message, getattr(e, "detail", None)
|
|
@@ -1055,21 +1155,32 @@ def test_remote_folder():
|
|
|
1055
1155
|
add_status(
|
|
1056
1156
|
ConnectionTestStates.FAILED.value, e.message, getattr(e, "detail", None)
|
|
1057
1157
|
)
|
|
1058
|
-
return [status.model_dump() for status in statuses], e.http_status
|
|
1158
|
+
return jsonify([status.model_dump() for status in statuses]), e.http_status
|
|
1059
1159
|
except RemoteConnectionException as e:
|
|
1060
1160
|
add_status(
|
|
1061
1161
|
ConnectionTestStates.FAILED.value, e.message, getattr(e, "detail", None)
|
|
1062
1162
|
)
|
|
1063
1163
|
|
|
1064
|
-
return
|
|
1164
|
+
return Response(
|
|
1165
|
+
orjson.dumps([status.model_dump() for status in statuses]),
|
|
1166
|
+
mimetype="application/json",
|
|
1167
|
+
)
|
|
1065
1168
|
|
|
1066
1169
|
|
|
1067
1170
|
@api.route("/remote/read", methods=["POST"])
|
|
1068
|
-
|
|
1069
|
-
|
|
1171
|
+
@with_instance
|
|
1172
|
+
def read_remote_folder(instance: Instance):
|
|
1173
|
+
file_path = request.json.get("filePath")
|
|
1174
|
+
|
|
1175
|
+
remote_connection = instance.remote_connection
|
|
1176
|
+
if not remote_connection:
|
|
1177
|
+
return Response(
|
|
1178
|
+
status=HTTPStatus.BAD_REQUEST,
|
|
1179
|
+
response="No remote connection found in instance.",
|
|
1180
|
+
)
|
|
1181
|
+
|
|
1070
1182
|
try:
|
|
1071
|
-
|
|
1072
|
-
content = read_remote_file(connection, remote_path=connection.profilerPath)
|
|
1183
|
+
content = read_remote_file(remote_connection, remote_path=file_path)
|
|
1073
1184
|
except RemoteConnectionException as e:
|
|
1074
1185
|
return Response(status=e.http_status, response=e.message)
|
|
1075
1186
|
return Response(status=200, response=content)
|
|
@@ -1126,37 +1237,15 @@ def sync_remote_folder():
|
|
|
1126
1237
|
|
|
1127
1238
|
remote_profiler_folder.lastSynced = int(time.time())
|
|
1128
1239
|
|
|
1129
|
-
return
|
|
1240
|
+
return Response(
|
|
1241
|
+
orjson.dumps(remote_profiler_folder.model_dump()),
|
|
1242
|
+
mimetype="application/json",
|
|
1243
|
+
)
|
|
1130
1244
|
|
|
1131
1245
|
except RemoteConnectionException as e:
|
|
1132
1246
|
return Response(status=e.http_status, response=e.message)
|
|
1133
1247
|
|
|
1134
1248
|
|
|
1135
|
-
@api.route("/remote/sqlite/detect-path", methods=["POST"])
|
|
1136
|
-
def detect_sqlite_path():
|
|
1137
|
-
connection = request.json
|
|
1138
|
-
connection = RemoteConnection.model_validate(connection, strict=False)
|
|
1139
|
-
status_message = StatusMessage(
|
|
1140
|
-
status=ConnectionTestStates.OK, message="Unable to Detect Path"
|
|
1141
|
-
)
|
|
1142
|
-
try:
|
|
1143
|
-
path = get_sqlite_path(connection=connection)
|
|
1144
|
-
if path:
|
|
1145
|
-
status_message = StatusMessage(status=ConnectionTestStates.OK, message=path)
|
|
1146
|
-
else:
|
|
1147
|
-
status_message = StatusMessage(
|
|
1148
|
-
status=ConnectionTestStates.OK, message="Unable to Detect Path"
|
|
1149
|
-
)
|
|
1150
|
-
except RemoteConnectionException as e:
|
|
1151
|
-
current_app.logger.error(f"Unable to detect SQLite3 path {str(e)}")
|
|
1152
|
-
status_message = StatusMessage(
|
|
1153
|
-
status=ConnectionTestStates.FAILED,
|
|
1154
|
-
message="Unable to detect SQLite3 path. See logs",
|
|
1155
|
-
)
|
|
1156
|
-
finally:
|
|
1157
|
-
return status_message.model_dump()
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
1249
|
@api.route("/remote/use", methods=["POST"])
|
|
1161
1250
|
def use_remote_folder():
|
|
1162
1251
|
data = request.get_json(force=True)
|
|
@@ -1204,7 +1293,10 @@ def health_check():
|
|
|
1204
1293
|
@with_instance
|
|
1205
1294
|
def get_instance(instance: Instance):
|
|
1206
1295
|
# Used to gate UI functions if no report is active
|
|
1207
|
-
return
|
|
1296
|
+
return Response(
|
|
1297
|
+
orjson.dumps(instance.model_dump()),
|
|
1298
|
+
mimetype="application/json",
|
|
1299
|
+
)
|
|
1208
1300
|
|
|
1209
1301
|
|
|
1210
1302
|
@api.route("/instance", methods=["PUT"])
|
|
@@ -1247,7 +1339,7 @@ def get_npe_data(instance: Instance):
|
|
|
1247
1339
|
if instance.npe_path.endswith(".zst"):
|
|
1248
1340
|
compressed_path = Path(instance.npe_path)
|
|
1249
1341
|
uncompressed_path = None
|
|
1250
|
-
elif instance.npe_path.endswith(".json"):
|
|
1342
|
+
elif instance.npe_path.endswith(".json") or instance.npe_path.endswith(".npeviz"):
|
|
1251
1343
|
compressed_path = None
|
|
1252
1344
|
uncompressed_path = Path(instance.npe_path)
|
|
1253
1345
|
else:
|
|
@@ -1266,17 +1358,15 @@ def get_npe_data(instance: Instance):
|
|
|
1266
1358
|
if compressed_path and compressed_path.exists():
|
|
1267
1359
|
with open(compressed_path, "rb") as file:
|
|
1268
1360
|
compressed_data = file.read()
|
|
1269
|
-
|
|
1270
|
-
npe_data = json.loads(uncompressed_data)
|
|
1361
|
+
npe_data = zstd.uncompress(compressed_data)
|
|
1271
1362
|
else:
|
|
1272
1363
|
with open(uncompressed_path, "r") as file:
|
|
1273
|
-
npe_data =
|
|
1274
|
-
except
|
|
1275
|
-
logger.error(f"
|
|
1364
|
+
npe_data = file.read()
|
|
1365
|
+
except Exception as e:
|
|
1366
|
+
logger.error(f"Error reading NPE file: {e}")
|
|
1276
1367
|
return Response(status=HTTPStatus.UNPROCESSABLE_ENTITY)
|
|
1277
1368
|
|
|
1278
|
-
|
|
1279
|
-
return Response(orjson.dumps(npe_data), mimetype="application/json")
|
|
1369
|
+
return Response(npe_data, mimetype="application/json")
|
|
1280
1370
|
|
|
1281
1371
|
|
|
1282
1372
|
@api.route("/notify", methods=["POST"])
|
|
@@ -1323,8 +1413,8 @@ def notify_report_update():
|
|
|
1323
1413
|
|
|
1324
1414
|
logger.info(f"Report generated notification processed: {report_name}")
|
|
1325
1415
|
|
|
1326
|
-
return (
|
|
1327
|
-
|
|
1416
|
+
return Response(
|
|
1417
|
+
orjson.dumps(
|
|
1328
1418
|
{
|
|
1329
1419
|
"report_name": report_name,
|
|
1330
1420
|
"profiler_path": report_generated.profiler_path,
|
|
@@ -1333,9 +1423,13 @@ def notify_report_update():
|
|
|
1333
1423
|
"timestamp": report_generated.timestamp,
|
|
1334
1424
|
}
|
|
1335
1425
|
),
|
|
1336
|
-
|
|
1426
|
+
mimetype="application/json",
|
|
1337
1427
|
)
|
|
1338
1428
|
|
|
1339
1429
|
except Exception as e:
|
|
1340
1430
|
logger.error(f"Error processing report update notification: {str(e)}")
|
|
1341
|
-
return
|
|
1431
|
+
return Response(
|
|
1432
|
+
orjson.dumps({"error": "Internal server error"}),
|
|
1433
|
+
mimetype="application/json",
|
|
1434
|
+
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
|
1435
|
+
)
|