ttnn-visualizer 0.36.0__py3-none-any.whl → 0.37.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +2 -2
- ttnn_visualizer/csv_queries.py +35 -35
- ttnn_visualizer/decorators.py +13 -6
- ttnn_visualizer/file_uploads.py +89 -23
- ttnn_visualizer/{sessions.py → instances.py} +70 -54
- ttnn_visualizer/models.py +1 -1
- ttnn_visualizer/queries.py +26 -26
- ttnn_visualizer/serializers.py +4 -6
- ttnn_visualizer/settings.py +9 -2
- ttnn_visualizer/sftp_operations.py +7 -3
- ttnn_visualizer/static/assets/{allPaths-ChIeDZ5t.js → allPaths-Z03s-OPC.js} +1 -1
- ttnn_visualizer/static/assets/{allPathsLoader-C4OHN8TU.js → allPathsLoader-BnryPsjm.js} +2 -2
- ttnn_visualizer/static/assets/{index-D8zG3DIo.js → index-BgzPx-DB.js} +198 -198
- ttnn_visualizer/static/assets/{index-BnUuxY3c.css → index-je2tF5Bg.css} +1 -1
- ttnn_visualizer/static/assets/{splitPathsBySizeLoader-BL6wqcCx.js → splitPathsBySizeLoader-Ru7hJnSI.js} +1 -1
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/test_queries.py +28 -28
- ttnn_visualizer/views.py +213 -146
- {ttnn_visualizer-0.36.0.dist-info → ttnn_visualizer-0.37.0.dist-info}/METADATA +1 -1
- {ttnn_visualizer-0.36.0.dist-info → ttnn_visualizer-0.37.0.dist-info}/RECORD +25 -25
- {ttnn_visualizer-0.36.0.dist-info → ttnn_visualizer-0.37.0.dist-info}/LICENSE +0 -0
- {ttnn_visualizer-0.36.0.dist-info → ttnn_visualizer-0.37.0.dist-info}/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.36.0.dist-info → ttnn_visualizer-0.37.0.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.36.0.dist-info → ttnn_visualizer-0.37.0.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.36.0.dist-info → ttnn_visualizer-0.37.0.dist-info}/top_level.txt +0 -0
ttnn_visualizer/app.py
CHANGED
@@ -21,7 +21,7 @@ from werkzeug.debug import DebuggedApplication
|
|
21
21
|
from werkzeug.middleware.proxy_fix import ProxyFix
|
22
22
|
|
23
23
|
from ttnn_visualizer.exceptions import DatabaseFileNotFoundException, InvalidProfilerPath, InvalidReportPath
|
24
|
-
from ttnn_visualizer.
|
24
|
+
from ttnn_visualizer.instances import create_instance_from_local_paths
|
25
25
|
from ttnn_visualizer.settings import Config, DefaultConfig
|
26
26
|
|
27
27
|
logger = logging.getLogger(__name__)
|
@@ -127,7 +127,7 @@ def middleware(app: flask.Flask):
|
|
127
127
|
app.wsgi_app = ProxyFix(app.wsgi_app)
|
128
128
|
|
129
129
|
# CORS configuration
|
130
|
-
origins = ["
|
130
|
+
origins = app.config["ALLOWED_ORIGINS"]
|
131
131
|
|
132
132
|
CORS(
|
133
133
|
app,
|
ttnn_visualizer/csv_queries.py
CHANGED
@@ -277,38 +277,38 @@ class DeviceLogProfilerQueries:
|
|
277
277
|
"source file",
|
278
278
|
]
|
279
279
|
|
280
|
-
def __init__(self,
|
280
|
+
def __init__(self, instance: Instance):
|
281
281
|
"""
|
282
|
-
Initialize the profiler with a
|
283
|
-
The
|
282
|
+
Initialize the profiler with a instance object.
|
283
|
+
The instance determines whether to use a local or remote runner.
|
284
284
|
"""
|
285
|
-
self.
|
285
|
+
self.instance = instance
|
286
286
|
self.runner = None
|
287
287
|
|
288
288
|
def __enter__(self):
|
289
289
|
"""
|
290
|
-
Determine the appropriate query runner based on the
|
290
|
+
Determine the appropriate query runner based on the instance's remote connection.
|
291
291
|
"""
|
292
292
|
|
293
|
-
is_remote = self.
|
293
|
+
is_remote = self.instance.remote_connection
|
294
294
|
use_remote_querying = False
|
295
295
|
|
296
296
|
# Disabled until we resolve the issue with sqlite versions
|
297
297
|
# if is_remote:
|
298
|
-
# use_remote_querying = self.
|
298
|
+
# use_remote_querying = self.instance.remote_connection.useRemoteQuerying
|
299
299
|
|
300
300
|
# Determine if this is a local or remote operation
|
301
301
|
if is_remote and use_remote_querying:
|
302
|
-
remote_profiler_folder = self.
|
302
|
+
remote_profiler_folder = self.instance.remote_profile_folder
|
303
303
|
file_path = f"{remote_profiler_folder.remotePath}/{self.DEVICE_LOG_FILE}"
|
304
304
|
self.runner = RemoteCSVQueryRunner(
|
305
305
|
file_path=file_path,
|
306
|
-
remote_connection=self.
|
306
|
+
remote_connection=self.instance.remote_connection,
|
307
307
|
offset=1, # Skip the first line for device log files
|
308
308
|
)
|
309
309
|
else:
|
310
310
|
self.runner = LocalCSVQueryRunner(
|
311
|
-
file_path=Path(self.
|
311
|
+
file_path=Path(self.instance.performance_path).joinpath(
|
312
312
|
self.DEVICE_LOG_FILE
|
313
313
|
),
|
314
314
|
offset=1, # Skip the first line for device log files
|
@@ -365,23 +365,23 @@ class DeviceLogProfilerQueries:
|
|
365
365
|
)
|
366
366
|
|
367
367
|
@staticmethod
|
368
|
-
def get_raw_csv(
|
368
|
+
def get_raw_csv(instance: Instance):
|
369
369
|
from ttnn_visualizer.sftp_operations import read_remote_file
|
370
370
|
|
371
371
|
if (
|
372
|
-
not
|
373
|
-
or
|
374
|
-
and not
|
372
|
+
not instance.remote_connection
|
373
|
+
or instance.remote_connection
|
374
|
+
and not instance.remote_connection.useRemoteQuerying
|
375
375
|
):
|
376
376
|
file_path = Path(
|
377
|
-
|
377
|
+
instance.performance_path, DeviceLogProfilerQueries.DEVICE_LOG_FILE
|
378
378
|
)
|
379
379
|
with open(file_path, "r") as f:
|
380
380
|
return f.read()
|
381
381
|
else:
|
382
|
-
profiler_folder =
|
382
|
+
profiler_folder = instance.remote_profile_folder
|
383
383
|
return read_remote_file(
|
384
|
-
|
384
|
+
instance.remote_connection,
|
385
385
|
f"{profiler_folder.remotePath}/{DeviceLogProfilerQueries.DEVICE_LOG_FILE}",
|
386
386
|
)
|
387
387
|
|
@@ -454,11 +454,11 @@ class OpsPerformanceQueries:
|
|
454
454
|
"HWCommandQueue_write_buffer_TT_HOST_FUNC [ns]",
|
455
455
|
]
|
456
456
|
|
457
|
-
def __init__(self,
|
457
|
+
def __init__(self, instance: Instance):
|
458
458
|
"""
|
459
|
-
Initialize the performance profiler with a
|
459
|
+
Initialize the performance profiler with a instance object.
|
460
460
|
"""
|
461
|
-
self.
|
461
|
+
self.instance = instance
|
462
462
|
self.runner = None
|
463
463
|
|
464
464
|
def __enter__(self):
|
@@ -466,7 +466,7 @@ class OpsPerformanceQueries:
|
|
466
466
|
|
467
467
|
:return:
|
468
468
|
"""
|
469
|
-
file_path = OpsPerformanceQueries.get_local_ops_perf_file_path(self.
|
469
|
+
file_path = OpsPerformanceQueries.get_local_ops_perf_file_path(self.instance)
|
470
470
|
self.runner = LocalCSVQueryRunner(file_path=file_path, offset=1)
|
471
471
|
self.runner.__enter__()
|
472
472
|
|
@@ -477,8 +477,8 @@ class OpsPerformanceQueries:
|
|
477
477
|
return self
|
478
478
|
|
479
479
|
@staticmethod
|
480
|
-
def get_local_ops_perf_file_path(
|
481
|
-
performance_path = Path(
|
480
|
+
def get_local_ops_perf_file_path(instance):
|
481
|
+
performance_path = Path(instance.performance_path)
|
482
482
|
|
483
483
|
# Find the latest file with the correct prefix
|
484
484
|
perf_files = list(
|
@@ -494,29 +494,29 @@ class OpsPerformanceQueries:
|
|
494
494
|
return str(latest_file)
|
495
495
|
|
496
496
|
@staticmethod
|
497
|
-
def get_remote_ops_perf_file_path(
|
497
|
+
def get_remote_ops_perf_file_path(instance):
|
498
498
|
from ttnn_visualizer.sftp_operations import resolve_file_path
|
499
499
|
|
500
|
-
remote_profile_folder =
|
500
|
+
remote_profile_folder = instance.remote_profile_folder.remotePath
|
501
501
|
return resolve_file_path(
|
502
|
-
|
502
|
+
instance.remote_connection,
|
503
503
|
f"{remote_profile_folder}/{OpsPerformanceQueries.PERF_RESULTS_PREFIX}*",
|
504
504
|
)
|
505
505
|
|
506
506
|
@staticmethod
|
507
|
-
def get_raw_csv(
|
507
|
+
def get_raw_csv(instance):
|
508
508
|
from ttnn_visualizer.sftp_operations import read_remote_file
|
509
509
|
|
510
510
|
if (
|
511
|
-
not
|
512
|
-
or
|
513
|
-
and not
|
511
|
+
not instance.remote_connection
|
512
|
+
or instance.remote_connection
|
513
|
+
and not instance.remote_connection.useRemoteQuerying
|
514
514
|
):
|
515
|
-
with open(OpsPerformanceQueries.get_local_ops_perf_file_path(
|
515
|
+
with open(OpsPerformanceQueries.get_local_ops_perf_file_path(instance)) as f:
|
516
516
|
return f.read()
|
517
517
|
else:
|
518
|
-
path = OpsPerformanceQueries.get_remote_ops_perf_file_path(
|
519
|
-
return read_remote_file(
|
518
|
+
path = OpsPerformanceQueries.get_remote_ops_perf_file_path(instance)
|
519
|
+
return read_remote_file(instance.remote_connection, path)
|
520
520
|
|
521
521
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
522
522
|
"""
|
@@ -600,8 +600,8 @@ class OpsPerformanceReportQueries:
|
|
600
600
|
DEFAULT_TRACING_MODE = False
|
601
601
|
|
602
602
|
@classmethod
|
603
|
-
def generate_report(cls,
|
604
|
-
raw_csv = OpsPerformanceQueries.get_raw_csv(
|
603
|
+
def generate_report(cls, instance):
|
604
|
+
raw_csv = OpsPerformanceQueries.get_raw_csv(instance)
|
605
605
|
csv_file = StringIO(raw_csv)
|
606
606
|
csv_output_file = tempfile.mktemp(suffix=".csv")
|
607
607
|
perf_report.generate_perf_report(
|
ttnn_visualizer/decorators.py
CHANGED
@@ -7,7 +7,7 @@ from ttnn_visualizer.enums import ConnectionTestStates
|
|
7
7
|
|
8
8
|
|
9
9
|
from functools import wraps
|
10
|
-
from flask import request,
|
10
|
+
from flask import abort, request, session
|
11
11
|
from paramiko.ssh_exception import (
|
12
12
|
AuthenticationException,
|
13
13
|
NoValidConnectionsError,
|
@@ -19,10 +19,10 @@ from ttnn_visualizer.exceptions import (
|
|
19
19
|
NoProjectsException,
|
20
20
|
RemoteSqliteException,
|
21
21
|
)
|
22
|
-
from ttnn_visualizer.
|
22
|
+
from ttnn_visualizer.instances import get_or_create_instance
|
23
23
|
|
24
24
|
|
25
|
-
def
|
25
|
+
def with_instance(func):
|
26
26
|
@wraps(func)
|
27
27
|
def wrapper(*args, **kwargs):
|
28
28
|
from flask import current_app
|
@@ -33,10 +33,17 @@ def with_session(func):
|
|
33
33
|
current_app.logger.error("No instanceId present on request, returning 404")
|
34
34
|
abort(404)
|
35
35
|
|
36
|
-
|
37
|
-
|
36
|
+
instance_query_data = get_or_create_instance(instance_id=instance_id)
|
37
|
+
instance = instance_query_data.to_pydantic()
|
38
|
+
|
39
|
+
kwargs["instance"] = instance
|
40
|
+
|
41
|
+
if 'instances' not in session:
|
42
|
+
session['instances'] = []
|
43
|
+
|
44
|
+
if instance.instance_id not in session['instances']:
|
45
|
+
session['instances'].append(instance.instance_id)
|
38
46
|
|
39
|
-
kwargs["session"] = session
|
40
47
|
return func(*args, **kwargs)
|
41
48
|
|
42
49
|
return wrapper
|
ttnn_visualizer/file_uploads.py
CHANGED
@@ -2,9 +2,18 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-FileCopyrightText: © 2024 Tenstorrent Inc.
|
4
4
|
|
5
|
-
from pathlib import Path
|
6
5
|
import logging
|
6
|
+
import os
|
7
7
|
import re
|
8
|
+
import shlex
|
9
|
+
import shutil
|
10
|
+
import subprocess
|
11
|
+
import tempfile
|
12
|
+
from pathlib import Path
|
13
|
+
|
14
|
+
from flask import current_app
|
15
|
+
|
16
|
+
from ttnn_visualizer.exceptions import DataFormatError
|
8
17
|
|
9
18
|
logger = logging.getLogger(__name__)
|
10
19
|
|
@@ -37,13 +46,12 @@ def validate_files(files, required_files, pattern=None, folder_name=None):
|
|
37
46
|
return True
|
38
47
|
|
39
48
|
|
40
|
-
def
|
49
|
+
def extract_folder_name_from_files(files):
|
41
50
|
"""Extract the report name from the first file."""
|
42
51
|
if not files:
|
43
52
|
return None
|
44
|
-
|
45
|
-
return
|
46
|
-
|
53
|
+
unsplit_name = str(files[0].filename)
|
54
|
+
return unsplit_name.split("/")[0]
|
47
55
|
|
48
56
|
def extract_npe_name(files):
|
49
57
|
if not files:
|
@@ -54,8 +62,8 @@ def extract_npe_name(files):
|
|
54
62
|
|
55
63
|
def save_uploaded_files(
|
56
64
|
files,
|
57
|
-
|
58
|
-
|
65
|
+
base_directory,
|
66
|
+
parent_folder_name=None,
|
59
67
|
):
|
60
68
|
"""
|
61
69
|
Save uploaded files to the target directory.
|
@@ -64,24 +72,82 @@ def save_uploaded_files(
|
|
64
72
|
:param target_directory: The base directory for saving the files.
|
65
73
|
:param folder_name: The name to use for the directory.
|
66
74
|
"""
|
67
|
-
|
68
|
-
|
69
|
-
|
75
|
+
if current_app.config["MALWARE_SCANNER"]:
|
76
|
+
scanned_files = scan_uploaded_files(files, base_directory, parent_folder_name)
|
77
|
+
|
78
|
+
for temp_path, dest_path in scanned_files:
|
79
|
+
if dest_path.parent.exists():
|
80
|
+
dest_path.parent.mkdir(exist_ok=True, parents=True)
|
81
|
+
|
82
|
+
logger.info(f"Saving uploaded file (clean): {dest_path}")
|
83
|
+
shutil.move(temp_path, dest_path)
|
84
|
+
else:
|
85
|
+
for file in files:
|
86
|
+
dest_path = construct_dest_path(file, base_directory, parent_folder_name)
|
87
|
+
logger.info(f"Writing file to {dest_path}")
|
88
|
+
|
89
|
+
# Create directory if it doesn't exist
|
90
|
+
if not dest_path.parent.exists():
|
91
|
+
logger.info(
|
92
|
+
f"{dest_path.parent.name} does not exist. Creating directory"
|
93
|
+
)
|
94
|
+
dest_path.parent.mkdir(exist_ok=True, parents=True)
|
70
95
|
|
71
|
-
|
96
|
+
logger.info(f"Saving uploaded file: {dest_path}")
|
97
|
+
file.save(dest_path)
|
72
98
|
|
73
|
-
|
74
|
-
|
75
|
-
else:
|
76
|
-
destination_file = Path(target_directory) / str(file_path)
|
99
|
+
# Update the modified time of the parent directory (for sorting purposes)
|
100
|
+
os.utime(dest_path.parent, None)
|
77
101
|
|
78
|
-
|
102
|
+
# Update the modified time of the uploaded directory
|
103
|
+
if parent_folder_name:
|
104
|
+
uploaded_dir = Path(base_directory) / parent_folder_name
|
105
|
+
else:
|
106
|
+
uploaded_dir = Path(base_directory)
|
107
|
+
if uploaded_dir.exists():
|
108
|
+
os.utime(uploaded_dir, None)
|
79
109
|
|
80
|
-
# Create directory if it doesn't exist
|
81
|
-
if not destination_file.parent.exists():
|
82
|
-
logger.info(
|
83
|
-
f"{destination_file.parent.name} does not exist. Creating directory"
|
84
|
-
)
|
85
|
-
destination_file.parent.mkdir(exist_ok=True, parents=True)
|
86
110
|
|
87
|
-
|
111
|
+
def scan_uploaded_files(
|
112
|
+
files,
|
113
|
+
target_directory,
|
114
|
+
folder_name=None,
|
115
|
+
):
|
116
|
+
scanned_files = []
|
117
|
+
|
118
|
+
for file in files:
|
119
|
+
(_, temp_path) = tempfile.mkstemp()
|
120
|
+
file.save(temp_path)
|
121
|
+
dest_path = construct_dest_path(file, target_directory, folder_name)
|
122
|
+
|
123
|
+
cmd_list = shlex.split(current_app.config["MALWARE_SCANNER"])
|
124
|
+
cmd_list.append(temp_path)
|
125
|
+
|
126
|
+
try:
|
127
|
+
result = subprocess.run(
|
128
|
+
cmd_list,
|
129
|
+
stdout=subprocess.PIPE,
|
130
|
+
stderr=subprocess.PIPE,
|
131
|
+
text=True,
|
132
|
+
)
|
133
|
+
if result.returncode == 0:
|
134
|
+
scanned_files.append((temp_path, dest_path))
|
135
|
+
else:
|
136
|
+
os.unlink(temp_path)
|
137
|
+
logger.warning(f"Malware scanner flagged file: {file.filename}")
|
138
|
+
raise DataFormatError()
|
139
|
+
except Exception as e:
|
140
|
+
if os.path.exists(temp_path):
|
141
|
+
os.unlink(temp_path)
|
142
|
+
raise
|
143
|
+
|
144
|
+
return scanned_files
|
145
|
+
|
146
|
+
|
147
|
+
def construct_dest_path(file, target_directory, folder_name):
|
148
|
+
if folder_name:
|
149
|
+
dest_path = Path(target_directory) / folder_name / str(file.filename)
|
150
|
+
else:
|
151
|
+
dest_path = Path(target_directory) / str(file.filename)
|
152
|
+
|
153
|
+
return dest_path
|
@@ -20,11 +20,11 @@ from ttnn_visualizer.extensions import db
|
|
20
20
|
logger = getLogger(__name__)
|
21
21
|
|
22
22
|
from flask import jsonify, current_app
|
23
|
-
from sqlalchemy.exc import SQLAlchemyError
|
23
|
+
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
|
24
24
|
|
25
25
|
|
26
26
|
def update_existing_instance(
|
27
|
-
|
27
|
+
instance_data,
|
28
28
|
profiler_name,
|
29
29
|
performance_name,
|
30
30
|
npe_name,
|
@@ -33,7 +33,7 @@ def update_existing_instance(
|
|
33
33
|
remote_performance_folder,
|
34
34
|
clear_remote,
|
35
35
|
):
|
36
|
-
active_report =
|
36
|
+
active_report = instance_data.active_report or {}
|
37
37
|
|
38
38
|
# First ifs are explicit deletes and elifs are updates
|
39
39
|
if profiler_name == "":
|
@@ -51,62 +51,62 @@ def update_existing_instance(
|
|
51
51
|
elif npe_name is not None:
|
52
52
|
active_report["npe_name"] = npe_name
|
53
53
|
|
54
|
-
|
54
|
+
instance_data.active_report = active_report
|
55
55
|
|
56
56
|
if remote_connection:
|
57
|
-
|
57
|
+
instance_data.remote_connection = remote_connection.model_dump()
|
58
58
|
if remote_profiler_folder:
|
59
|
-
|
59
|
+
instance_data.remote_profiler_folder = remote_profiler_folder.model_dump()
|
60
60
|
if remote_performance_folder:
|
61
|
-
|
61
|
+
instance_data.remote_performance_folder = remote_performance_folder.model_dump()
|
62
62
|
|
63
63
|
if clear_remote:
|
64
|
-
clear_remote_data(
|
64
|
+
clear_remote_data(instance_data)
|
65
65
|
|
66
66
|
update_paths(
|
67
|
-
|
67
|
+
instance_data, active_report, remote_connection
|
68
68
|
)
|
69
69
|
|
70
70
|
|
71
|
-
def clear_remote_data(
|
72
|
-
|
73
|
-
|
74
|
-
|
71
|
+
def clear_remote_data(instance_data):
|
72
|
+
instance_data.remote_connection = None
|
73
|
+
instance_data.remote_profiler_folder = None
|
74
|
+
instance_data.remote_performance_folder = None
|
75
75
|
|
76
76
|
|
77
77
|
def handle_sqlalchemy_error(error):
|
78
|
-
current_app.logger.error(f"Failed to update tab
|
78
|
+
current_app.logger.error(f"Failed to update tab instance: {str(error)}")
|
79
79
|
db.session.rollback()
|
80
80
|
|
81
81
|
|
82
|
-
def commit_and_log_session(
|
82
|
+
def commit_and_log_session(instance_data, instance_id):
|
83
83
|
db.session.commit()
|
84
84
|
|
85
|
-
|
85
|
+
instance_data = InstanceTable.query.filter_by(instance_id=instance_id).first()
|
86
86
|
current_app.logger.info(
|
87
|
-
f"
|
87
|
+
f"Data for instance {instance_id}: {json.dumps(instance_data.to_dict(), indent=4)}"
|
88
88
|
)
|
89
89
|
|
90
90
|
|
91
91
|
def update_paths(
|
92
|
-
|
92
|
+
instance_data, active_report, remote_connection
|
93
93
|
):
|
94
94
|
if active_report.get("performance_name"):
|
95
|
-
|
95
|
+
instance_data.performance_path = get_performance_path(
|
96
96
|
performance_name=active_report["performance_name"],
|
97
97
|
current_app=current_app,
|
98
98
|
remote_connection=remote_connection,
|
99
99
|
)
|
100
100
|
|
101
101
|
if active_report.get("profiler_name"):
|
102
|
-
|
102
|
+
instance_data.profiler_path = get_profiler_path(
|
103
103
|
profiler_name=active_report["profiler_name"],
|
104
104
|
current_app=current_app,
|
105
105
|
remote_connection=remote_connection,
|
106
106
|
)
|
107
107
|
|
108
108
|
if active_report.get("npe_name"):
|
109
|
-
|
109
|
+
instance_data.npe_path = get_npe_path(
|
110
110
|
npe_name=active_report["npe_name"],
|
111
111
|
current_app=current_app
|
112
112
|
)
|
@@ -135,7 +135,7 @@ def create_new_instance(
|
|
135
135
|
remote_profiler_folder = None
|
136
136
|
remote_performance_folder = None
|
137
137
|
|
138
|
-
|
138
|
+
instance_data = InstanceTable(
|
139
139
|
instance_id=instance_id,
|
140
140
|
active_report=active_report,
|
141
141
|
profiler_path=get_profiler_path(
|
@@ -151,8 +151,8 @@ def create_new_instance(
|
|
151
151
|
remote_performance_folder.model_dump() if remote_performance_folder else None
|
152
152
|
),
|
153
153
|
)
|
154
|
-
db.session.add(
|
155
|
-
return
|
154
|
+
db.session.add(instance_data)
|
155
|
+
return instance_data
|
156
156
|
|
157
157
|
|
158
158
|
def update_instance(
|
@@ -166,11 +166,11 @@ def update_instance(
|
|
166
166
|
clear_remote=False,
|
167
167
|
):
|
168
168
|
try:
|
169
|
-
|
169
|
+
instance_data = get_or_create_instance(instance_id)
|
170
170
|
|
171
|
-
if
|
171
|
+
if instance_data:
|
172
172
|
update_existing_instance(
|
173
|
-
|
173
|
+
instance_data,
|
174
174
|
profiler_name,
|
175
175
|
performance_name,
|
176
176
|
npe_name,
|
@@ -180,7 +180,7 @@ def update_instance(
|
|
180
180
|
clear_remote,
|
181
181
|
)
|
182
182
|
else:
|
183
|
-
|
183
|
+
instance_data = create_new_instance(
|
184
184
|
instance_id,
|
185
185
|
profiler_name,
|
186
186
|
performance_name,
|
@@ -191,12 +191,12 @@ def update_instance(
|
|
191
191
|
clear_remote,
|
192
192
|
)
|
193
193
|
|
194
|
-
commit_and_log_session(
|
195
|
-
return jsonify({"message": "Tab
|
194
|
+
commit_and_log_session(instance_data, instance_id)
|
195
|
+
return jsonify({"message": "Tab instance updated successfully"}), 200
|
196
196
|
|
197
197
|
except SQLAlchemyError as e:
|
198
198
|
handle_sqlalchemy_error(e)
|
199
|
-
return jsonify({"error": "Failed to update tab
|
199
|
+
return jsonify({"error": "Failed to update tab instance"}), 500
|
200
200
|
|
201
201
|
|
202
202
|
def get_or_create_instance(
|
@@ -208,25 +208,30 @@ def get_or_create_instance(
|
|
208
208
|
remote_profiler_folder=None,
|
209
209
|
):
|
210
210
|
"""
|
211
|
-
Retrieve an existing tab
|
212
|
-
Uses the Instance model to manage
|
211
|
+
Retrieve an existing tab instance or create a new one if it doesn't exist.
|
212
|
+
Uses the Instance model to manage instance data and supports conditional updates.
|
213
213
|
"""
|
214
214
|
try:
|
215
|
-
# Query the database for the tab
|
216
|
-
|
215
|
+
# Query the database for the tab instance
|
216
|
+
instance_data = InstanceTable.query.filter_by(instance_id=instance_id).first()
|
217
217
|
|
218
|
-
# If
|
219
|
-
if not
|
220
|
-
|
218
|
+
# If instance doesn't exist, initialize it
|
219
|
+
if not instance_data:
|
220
|
+
instance_data = InstanceTable(
|
221
221
|
instance_id=instance_id,
|
222
222
|
active_report={},
|
223
223
|
remote_connection=None,
|
224
224
|
remote_profiler_folder=None,
|
225
225
|
)
|
226
|
-
db.session.add(
|
227
|
-
db.session.commit()
|
226
|
+
db.session.add(instance_data)
|
228
227
|
|
229
|
-
|
228
|
+
try:
|
229
|
+
db.session.commit()
|
230
|
+
except IntegrityError:
|
231
|
+
db.session.rollback()
|
232
|
+
instance_data = InstanceTable.query.filter_by(instance_id=instance_id).first()
|
233
|
+
|
234
|
+
# Update the instance if any new data is provided
|
230
235
|
if profiler_name or performance_name or npe_name or remote_connection or remote_profiler_folder:
|
231
236
|
update_instance(
|
232
237
|
instance_id=instance_id,
|
@@ -237,20 +242,20 @@ def get_or_create_instance(
|
|
237
242
|
remote_profiler_folder=remote_profiler_folder,
|
238
243
|
)
|
239
244
|
|
240
|
-
|
241
|
-
|
245
|
+
# Query again to get the updated instance data
|
246
|
+
instance_data = InstanceTable.query.filter_by(instance_id=instance_id).first()
|
242
247
|
|
243
|
-
return
|
248
|
+
return instance_data
|
244
249
|
|
245
250
|
except SQLAlchemyError as e:
|
246
|
-
current_app.logger.error(f"Failed to get or create tab
|
251
|
+
current_app.logger.error(f"Failed to get or create tab instance: {str(e)}")
|
247
252
|
db.session.rollback()
|
248
253
|
return None
|
249
254
|
|
250
255
|
|
251
256
|
def get_instance():
|
252
257
|
"""
|
253
|
-
Middleware to retrieve or create a tab
|
258
|
+
Middleware to retrieve or create a tab instance based on the instance_id.
|
254
259
|
"""
|
255
260
|
instance_id = request.args.get("instanceId", None)
|
256
261
|
|
@@ -261,22 +266,33 @@ def get_instance():
|
|
261
266
|
|
262
267
|
active_report = get_or_create_instance(instance_id)
|
263
268
|
current_app.logger.info(
|
264
|
-
f"get_instance:
|
269
|
+
f"get_instance: active report retrieved: {active_report.active_report}"
|
265
270
|
)
|
266
271
|
|
267
272
|
return jsonify({"active_report": active_report.active_report}), 200
|
268
273
|
|
269
274
|
|
270
|
-
def
|
275
|
+
def get_instances(instance_ids):
|
276
|
+
instances = []
|
277
|
+
|
278
|
+
for instance_id in instance_ids:
|
279
|
+
instance = InstanceTable.query.filter_by(instance_id=instance_id).first()
|
280
|
+
if instance:
|
281
|
+
instances.append(instance)
|
282
|
+
|
283
|
+
return instances
|
284
|
+
|
285
|
+
|
286
|
+
def init_instances(app):
|
271
287
|
"""
|
272
|
-
Initializes
|
288
|
+
Initializes instance middleware and hooks it into Flask.
|
273
289
|
"""
|
274
290
|
app.before_request(get_instance)
|
275
|
-
app.logger.info("
|
291
|
+
app.logger.info("Instances middleware initialized.")
|
276
292
|
|
277
293
|
|
278
294
|
def create_random_instance_id():
|
279
|
-
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=
|
295
|
+
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=45))
|
280
296
|
|
281
297
|
|
282
298
|
def create_instance_from_local_paths(profiler_path, performance_path):
|
@@ -291,7 +307,7 @@ def create_instance_from_local_paths(profiler_path, performance_path):
|
|
291
307
|
|
292
308
|
profiler_name = _profiler_path.parts[-1] if _profiler_path and len(_profiler_path.parts) > 2 else ""
|
293
309
|
performance_name = _performance_path.parts[-1] if _performance_path and len(_performance_path.parts) > 2 else ""
|
294
|
-
|
310
|
+
instance_data = InstanceTable(
|
295
311
|
instance_id=create_random_instance_id(),
|
296
312
|
active_report={
|
297
313
|
"profiler_name": profiler_name,
|
@@ -304,6 +320,6 @@ def create_instance_from_local_paths(profiler_path, performance_path):
|
|
304
320
|
remote_profiler_folder=None,
|
305
321
|
remote_performance_folder=None,
|
306
322
|
)
|
307
|
-
db.session.add(
|
323
|
+
db.session.add(instance_data)
|
308
324
|
db.session.commit()
|
309
|
-
return
|
325
|
+
return instance_data
|