ttnn-visualizer 0.24.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/__init__.py +4 -0
- ttnn_visualizer/app.py +193 -0
- ttnn_visualizer/bin/docker-entrypoint-web +16 -0
- ttnn_visualizer/bin/pip3-install +17 -0
- ttnn_visualizer/csv_queries.py +618 -0
- ttnn_visualizer/decorators.py +117 -0
- ttnn_visualizer/enums.py +12 -0
- ttnn_visualizer/exceptions.py +40 -0
- ttnn_visualizer/extensions.py +14 -0
- ttnn_visualizer/file_uploads.py +78 -0
- ttnn_visualizer/models.py +275 -0
- ttnn_visualizer/queries.py +388 -0
- ttnn_visualizer/remote_sqlite_setup.py +91 -0
- ttnn_visualizer/requirements.txt +24 -0
- ttnn_visualizer/serializers.py +249 -0
- ttnn_visualizer/sessions.py +245 -0
- ttnn_visualizer/settings.py +118 -0
- ttnn_visualizer/sftp_operations.py +486 -0
- ttnn_visualizer/sockets.py +118 -0
- ttnn_visualizer/ssh_client.py +85 -0
- ttnn_visualizer/static/assets/allPaths-CKt4gwo3.js +1 -0
- ttnn_visualizer/static/assets/allPathsLoader-Dzw0zTnr.js +2 -0
- ttnn_visualizer/static/assets/index-BXlT2rEV.js +5247 -0
- ttnn_visualizer/static/assets/index-CsS_OkTl.js +1 -0
- ttnn_visualizer/static/assets/index-DTKBo2Os.css +7 -0
- ttnn_visualizer/static/assets/index-DxLGmC6o.js +1 -0
- ttnn_visualizer/static/assets/site-BTBrvHC5.webmanifest +19 -0
- ttnn_visualizer/static/assets/splitPathsBySizeLoader-HHqSPeQM.js +1 -0
- ttnn_visualizer/static/favicon/android-chrome-192x192.png +0 -0
- ttnn_visualizer/static/favicon/android-chrome-512x512.png +0 -0
- ttnn_visualizer/static/favicon/favicon-32x32.png +0 -0
- ttnn_visualizer/static/favicon/favicon.svg +3 -0
- ttnn_visualizer/static/index.html +36 -0
- ttnn_visualizer/static/sample-data/cluster-desc.yaml +763 -0
- ttnn_visualizer/tests/__init__.py +4 -0
- ttnn_visualizer/tests/test_queries.py +444 -0
- ttnn_visualizer/tests/test_serializers.py +582 -0
- ttnn_visualizer/utils.py +185 -0
- ttnn_visualizer/views.py +794 -0
- ttnn_visualizer-0.24.0.dist-info/LICENSE +202 -0
- ttnn_visualizer-0.24.0.dist-info/LICENSE_understanding.txt +3 -0
- ttnn_visualizer-0.24.0.dist-info/METADATA +144 -0
- ttnn_visualizer-0.24.0.dist-info/RECORD +46 -0
- ttnn_visualizer-0.24.0.dist-info/WHEEL +5 -0
- ttnn_visualizer-0.24.0.dist-info/entry_points.txt +2 -0
- ttnn_visualizer-0.24.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,486 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
#
|
3
|
+
# SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC
|
4
|
+
|
5
|
+
import json
|
6
|
+
import logging
|
7
|
+
import re
|
8
|
+
import time
|
9
|
+
from pathlib import Path
|
10
|
+
from stat import S_ISDIR
|
11
|
+
from threading import Thread
|
12
|
+
from typing import List, Optional
|
13
|
+
|
14
|
+
from flask import current_app
|
15
|
+
from paramiko.client import SSHClient
|
16
|
+
from paramiko.sftp_client import SFTPClient
|
17
|
+
|
18
|
+
from ttnn_visualizer.decorators import remote_exception_handler
|
19
|
+
from ttnn_visualizer.enums import ConnectionTestStates
|
20
|
+
from ttnn_visualizer.exceptions import NoProjectsException, RemoteConnectionException
|
21
|
+
from ttnn_visualizer.models import RemoteConnection, RemoteReportFolder
|
22
|
+
from ttnn_visualizer.sockets import (
|
23
|
+
FileProgress,
|
24
|
+
FileStatus,
|
25
|
+
emit_file_status,
|
26
|
+
)
|
27
|
+
from ttnn_visualizer.ssh_client import get_client
|
28
|
+
from ttnn_visualizer.utils import update_last_synced
|
29
|
+
|
30
|
+
logger = logging.getLogger(__name__)
|
31
|
+
|
32
|
+
TEST_CONFIG_FILE = "config.json"
|
33
|
+
TEST_PROFILER_FILE = "profile_log_device.csv"
|
34
|
+
PROFILER_DIRECTORY = "profiler"
|
35
|
+
REPORT_DATA_DIRECTORY = Path(__file__).parent.absolute().joinpath("data")
|
36
|
+
|
37
|
+
|
38
|
+
def start_background_task(task, *args):
|
39
|
+
with current_app.app_context():
|
40
|
+
if current_app.config["USE_WEBSOCKETS"]:
|
41
|
+
with current_app.app_context():
|
42
|
+
# Use SocketIO's background task mechanism if available
|
43
|
+
from ttnn_visualizer.extensions import socketio
|
44
|
+
|
45
|
+
socketio.start_background_task(task, *args)
|
46
|
+
else:
|
47
|
+
# Use a basic thread if WebSockets are not enabled
|
48
|
+
thread = Thread(target=task, args=args)
|
49
|
+
thread.start()
|
50
|
+
|
51
|
+
|
52
|
+
def resolve_file_path(remote_connection, file_path: str) -> str:
|
53
|
+
"""
|
54
|
+
Resolve the file path if it contains a wildcard ('*') by using glob on the remote machine.
|
55
|
+
|
56
|
+
:param session: A session object containing the remote connection information.
|
57
|
+
:param file_path: The file path, which may include wildcards.
|
58
|
+
:return: The resolved file path.
|
59
|
+
:raises FileNotFoundError: If no files match the pattern.
|
60
|
+
"""
|
61
|
+
ssh_client = get_client(remote_connection)
|
62
|
+
|
63
|
+
if "*" in file_path:
|
64
|
+
command = f"ls -1 {file_path}"
|
65
|
+
stdin, stdout, stderr = ssh_client.exec_command(command)
|
66
|
+
files = stdout.read().decode().splitlines()
|
67
|
+
ssh_client.close()
|
68
|
+
|
69
|
+
if not files:
|
70
|
+
raise FileNotFoundError(f"No files found matching pattern: {file_path}")
|
71
|
+
|
72
|
+
# Return the first file found
|
73
|
+
return files[0]
|
74
|
+
|
75
|
+
return file_path
|
76
|
+
|
77
|
+
|
78
|
+
def calculate_folder_size(client: SSHClient, folder_path: str) -> int:
|
79
|
+
"""Calculate the total size of the folder before compression."""
|
80
|
+
stdin, stdout, stderr = client.exec_command(f"du -sb {folder_path}")
|
81
|
+
size_info = stdout.read().decode().strip().split("\t")[0]
|
82
|
+
return int(size_info)
|
83
|
+
|
84
|
+
|
85
|
+
def get_cluster_desc_path(ssh_client) -> Optional[str]:
|
86
|
+
"""
|
87
|
+
List all folders matching '/tmp/umd_*' on the remote machine, filter for those containing
|
88
|
+
'cluster_descriptor.yaml', and return the full path to the most recently modified YAML file.
|
89
|
+
|
90
|
+
:param remote_connection: RemoteConnection object containing SSH connection details.
|
91
|
+
:return: Full path to the most recently modified 'cluster_descriptor.yaml' file, or None.
|
92
|
+
"""
|
93
|
+
latest_yaml_path = None
|
94
|
+
latest_mod_time = 0
|
95
|
+
cluster_desc_file = "cluster_descriptor.yaml"
|
96
|
+
|
97
|
+
try:
|
98
|
+
# Command to list all folders matching '/tmp/umd_*'
|
99
|
+
list_folders_command = "ls -1d /tmp/umd_* 2>/dev/null"
|
100
|
+
stdin, stdout, stderr = ssh_client.exec_command(list_folders_command)
|
101
|
+
|
102
|
+
# Get the list of folders
|
103
|
+
folder_paths = stdout.read().decode().splitlines()
|
104
|
+
|
105
|
+
if not folder_paths:
|
106
|
+
logger.info("No folders found matching the pattern '/tmp/umd_*'")
|
107
|
+
return None
|
108
|
+
|
109
|
+
# Check each folder for 'cluster_descriptor.yaml' and track the most recent one
|
110
|
+
with ssh_client.open_sftp() as sftp:
|
111
|
+
for folder in folder_paths:
|
112
|
+
yaml_file_path = f"{folder}/{cluster_desc_file}"
|
113
|
+
try:
|
114
|
+
# Check if 'cluster_descriptor.yaml' exists and get its modification time
|
115
|
+
attributes = sftp.stat(yaml_file_path)
|
116
|
+
mod_time = attributes.st_mtime # Modification time
|
117
|
+
|
118
|
+
# Update the latest file if this one is newer
|
119
|
+
if mod_time > latest_mod_time:
|
120
|
+
latest_mod_time = mod_time
|
121
|
+
latest_yaml_path = yaml_file_path
|
122
|
+
logger.info(
|
123
|
+
f"Found newer {cluster_desc_file}': {yaml_file_path}"
|
124
|
+
)
|
125
|
+
|
126
|
+
except FileNotFoundError:
|
127
|
+
logger.debug(f"'{cluster_desc_file}' not found in: {folder}")
|
128
|
+
continue
|
129
|
+
|
130
|
+
if latest_yaml_path:
|
131
|
+
logger.info(
|
132
|
+
f"Most recently modified {cluster_desc_file}: {latest_yaml_path}"
|
133
|
+
)
|
134
|
+
else:
|
135
|
+
logger.info(
|
136
|
+
f"No {cluster_desc_file} files found in any '/tmp/umd_*' folders"
|
137
|
+
)
|
138
|
+
return latest_yaml_path
|
139
|
+
|
140
|
+
except Exception as e:
|
141
|
+
logger.error(f"Error retrieving {cluster_desc_file} path: {e}")
|
142
|
+
raise RemoteConnectionException(
|
143
|
+
message=f"Failed to get '{cluster_desc_file}' path",
|
144
|
+
status=ConnectionTestStates.FAILED,
|
145
|
+
)
|
146
|
+
finally:
|
147
|
+
ssh_client.close()
|
148
|
+
|
149
|
+
|
150
|
+
@remote_exception_handler
|
151
|
+
def get_cluster_desc(remote_connection: RemoteConnection):
|
152
|
+
client = get_client(remote_connection)
|
153
|
+
cluster_path = get_cluster_desc_path(client)
|
154
|
+
if cluster_path:
|
155
|
+
return read_remote_file(remote_connection, cluster_path)
|
156
|
+
else:
|
157
|
+
return None
|
158
|
+
|
159
|
+
|
160
|
+
def walk_sftp_directory(sftp: SFTPClient, remote_path: str):
|
161
|
+
"""SFTP implementation of os.walk."""
|
162
|
+
files, folders = [], []
|
163
|
+
for f in sftp.listdir_attr(remote_path):
|
164
|
+
if S_ISDIR(f.st_mode if f.st_mode else 0):
|
165
|
+
folders.append(f.filename)
|
166
|
+
else:
|
167
|
+
files.append(f.filename)
|
168
|
+
return files, folders
|
169
|
+
|
170
|
+
|
171
|
+
def is_excluded(file_path, exclude_patterns):
|
172
|
+
"""Check if the file matches any exclusion pattern."""
|
173
|
+
return any(re.search(pattern, file_path) for pattern in exclude_patterns)
|
174
|
+
|
175
|
+
|
176
|
+
@remote_exception_handler
|
177
|
+
def sync_files_and_directories(
|
178
|
+
client, remote_folder: str, destination_dir: Path, exclude_patterns=None, sid=None
|
179
|
+
):
|
180
|
+
"""Download files and directories sequentially in one unified loop."""
|
181
|
+
exclude_patterns = (
|
182
|
+
exclude_patterns or []
|
183
|
+
) # Default to an empty list if not provided
|
184
|
+
|
185
|
+
with client.open_sftp() as sftp:
|
186
|
+
# Ensure the destination directory exists
|
187
|
+
destination_dir.mkdir(parents=True, exist_ok=True)
|
188
|
+
finished_files = 0 # Initialize finished files counter
|
189
|
+
|
190
|
+
# Recursively handle files and folders in the current directory
|
191
|
+
def download_directory_contents(remote_dir, local_dir):
|
192
|
+
# Ensure the local directory exists
|
193
|
+
local_dir.mkdir(parents=True, exist_ok=True)
|
194
|
+
|
195
|
+
# Get files and folders in the remote directory
|
196
|
+
files, folders = walk_sftp_directory(sftp, remote_dir)
|
197
|
+
total_files = len(files)
|
198
|
+
|
199
|
+
# Function to download a file with progress reporting
|
200
|
+
def download_file(remote_file_path, local_file_path, index):
|
201
|
+
nonlocal finished_files
|
202
|
+
# Download file with progress callback
|
203
|
+
logger.info(f"Downloading {remote_file_path}")
|
204
|
+
download_file_with_progress(
|
205
|
+
sftp,
|
206
|
+
remote_file_path,
|
207
|
+
local_file_path,
|
208
|
+
sid,
|
209
|
+
total_files,
|
210
|
+
finished_files,
|
211
|
+
)
|
212
|
+
logger.info(f"Finished downloading {remote_file_path}")
|
213
|
+
finished_files += 1
|
214
|
+
|
215
|
+
# Download all files in the current directory
|
216
|
+
for index, file in enumerate(files, start=1):
|
217
|
+
remote_file_path = f"{remote_dir}/{file}"
|
218
|
+
local_file_path = Path(local_dir, file)
|
219
|
+
|
220
|
+
# Skip files that match any exclusion pattern
|
221
|
+
if is_excluded(remote_file_path, exclude_patterns):
|
222
|
+
logger.info(f"Skipping {remote_file_path} (excluded by pattern)")
|
223
|
+
continue
|
224
|
+
|
225
|
+
download_file(remote_file_path, local_file_path, index)
|
226
|
+
|
227
|
+
# Recursively handle subdirectories
|
228
|
+
for folder in folders:
|
229
|
+
remote_subdir = f"{remote_dir}/{folder}"
|
230
|
+
local_subdir = local_dir / folder
|
231
|
+
if is_excluded(remote_subdir, exclude_patterns):
|
232
|
+
logger.info(
|
233
|
+
f"Skipping directory {remote_subdir} (excluded by pattern)"
|
234
|
+
)
|
235
|
+
continue
|
236
|
+
download_directory_contents(remote_subdir, local_subdir)
|
237
|
+
|
238
|
+
# Start downloading from the root folder
|
239
|
+
download_directory_contents(remote_folder, destination_dir)
|
240
|
+
|
241
|
+
# Create a .last-synced file in directory
|
242
|
+
update_last_synced(destination_dir)
|
243
|
+
|
244
|
+
# Emit final status
|
245
|
+
final_progress = FileProgress(
|
246
|
+
current_file_name="", # No specific file for the final status
|
247
|
+
number_of_files=0,
|
248
|
+
percent_of_current=100,
|
249
|
+
finished_files=finished_files,
|
250
|
+
status=FileStatus.FINISHED,
|
251
|
+
)
|
252
|
+
|
253
|
+
if current_app.config["USE_WEBSOCKETS"]:
|
254
|
+
emit_file_status(final_progress, sid)
|
255
|
+
logger.info("All files downloaded. Final progress emitted.")
|
256
|
+
|
257
|
+
|
258
|
+
def download_file_with_progress(
|
259
|
+
sftp, remote_path, local_path, sid, total_files, finished_files
|
260
|
+
):
|
261
|
+
"""Download a file and emit progress using FileProgress."""
|
262
|
+
try:
|
263
|
+
|
264
|
+
def download_progress_callback(transferred, total):
|
265
|
+
percent_of_current = (transferred / total) * 100
|
266
|
+
progress = FileProgress(
|
267
|
+
current_file_name=remote_path,
|
268
|
+
number_of_files=total_files,
|
269
|
+
percent_of_current=percent_of_current,
|
270
|
+
finished_files=finished_files,
|
271
|
+
status=FileStatus.DOWNLOADING,
|
272
|
+
)
|
273
|
+
emit_file_status(progress, sid)
|
274
|
+
|
275
|
+
# Perform the download
|
276
|
+
sftp.get(remote_path, str(local_path), callback=download_progress_callback)
|
277
|
+
|
278
|
+
except OSError as e:
|
279
|
+
logger.error(f"Error downloading file {remote_path} to {local_path}: {str(e)}")
|
280
|
+
raise
|
281
|
+
|
282
|
+
|
283
|
+
def get_remote_report_folder_from_config_path(
|
284
|
+
sftp: SFTPClient, config_path: str
|
285
|
+
) -> RemoteReportFolder:
|
286
|
+
"""Read a remote config file and return RemoteFolder object."""
|
287
|
+
attributes = sftp.lstat(str(config_path))
|
288
|
+
with sftp.open(str(config_path), "rb") as config_file:
|
289
|
+
data = json.loads(config_file.read())
|
290
|
+
return RemoteReportFolder(
|
291
|
+
remotePath=str(Path(config_path).parent),
|
292
|
+
testName=data["report_name"],
|
293
|
+
lastModified=(
|
294
|
+
int(attributes.st_mtime) if attributes.st_mtime else int(time.time())
|
295
|
+
),
|
296
|
+
)
|
297
|
+
|
298
|
+
|
299
|
+
def get_remote_profile_folder(
|
300
|
+
sftp: SFTPClient, profile_folder: str
|
301
|
+
) -> RemoteReportFolder:
|
302
|
+
"""Read a remote config file and return RemoteFolder object."""
|
303
|
+
attributes = sftp.stat(str(profile_folder))
|
304
|
+
profile_name = profile_folder.split("/")[-1]
|
305
|
+
remote_path = profile_folder
|
306
|
+
last_modified = (
|
307
|
+
int(attributes.st_mtime) if attributes.st_mtime else int(time.time())
|
308
|
+
)
|
309
|
+
return RemoteReportFolder(
|
310
|
+
remotePath=str(remote_path),
|
311
|
+
testName=str(profile_name),
|
312
|
+
lastModified=last_modified,
|
313
|
+
)
|
314
|
+
|
315
|
+
|
316
|
+
@remote_exception_handler
|
317
|
+
def read_remote_file(
|
318
|
+
remote_connection,
|
319
|
+
remote_path=None,
|
320
|
+
):
|
321
|
+
"""Read a remote file."""
|
322
|
+
ssh_client = get_client(remote_connection)
|
323
|
+
with ssh_client.open_sftp() as sftp:
|
324
|
+
if remote_path:
|
325
|
+
path = Path(remote_path)
|
326
|
+
else:
|
327
|
+
path = Path(remote_connection.reportPath)
|
328
|
+
|
329
|
+
logger.info(f"Opening remote file {path}")
|
330
|
+
directory_path = str(path.parent)
|
331
|
+
file_name = str(path.name)
|
332
|
+
|
333
|
+
try:
|
334
|
+
sftp.chdir(path=directory_path)
|
335
|
+
with sftp.open(filename=file_name) as file:
|
336
|
+
content = file.read()
|
337
|
+
return content
|
338
|
+
except FileNotFoundError:
|
339
|
+
logger.error(f"File not found: {path}")
|
340
|
+
return None
|
341
|
+
except IOError as e:
|
342
|
+
logger.error(f"Error reading remote file {path}: {e}")
|
343
|
+
return None
|
344
|
+
|
345
|
+
|
346
|
+
@remote_exception_handler
|
347
|
+
def check_remote_path_for_reports(remote_connection):
|
348
|
+
"""Check the remote path for config files."""
|
349
|
+
ssh_client = get_client(remote_connection)
|
350
|
+
remote_config_paths = find_folders_by_files(
|
351
|
+
ssh_client, remote_connection.reportPath, [TEST_CONFIG_FILE]
|
352
|
+
)
|
353
|
+
if not remote_config_paths:
|
354
|
+
raise NoProjectsException(
|
355
|
+
message="No projects found at path", status=ConnectionTestStates.FAILED
|
356
|
+
)
|
357
|
+
return True
|
358
|
+
|
359
|
+
|
360
|
+
@remote_exception_handler
|
361
|
+
def check_remote_path_exists(remote_connection: RemoteConnection, path_key: str):
|
362
|
+
client = get_client(remote_connection)
|
363
|
+
sftp = client.open_sftp()
|
364
|
+
# Attempt to list the directory to see if it exists
|
365
|
+
try:
|
366
|
+
sftp.stat(getattr(remote_connection, path_key))
|
367
|
+
except IOError as e:
|
368
|
+
# Directory does not exist or is inaccessible
|
369
|
+
if path_key == "performancePath":
|
370
|
+
message = "Performance directory does not exist or cannot be accessed"
|
371
|
+
else:
|
372
|
+
message = "Report directory does not exist or cannot be accessed"
|
373
|
+
|
374
|
+
logger.error(message)
|
375
|
+
raise RemoteConnectionException(
|
376
|
+
message=message, status=ConnectionTestStates.FAILED
|
377
|
+
)
|
378
|
+
|
379
|
+
|
380
|
+
def find_folders_by_files(
|
381
|
+
ssh_client, root_folder: str, file_names: List[str]
|
382
|
+
) -> List[str]:
|
383
|
+
"""Given a remote path, return a list of top-level folders that contain any of the specified files."""
|
384
|
+
matched_folders: List[str] = []
|
385
|
+
with ssh_client.open_sftp() as sftp:
|
386
|
+
all_files = sftp.listdir_attr(root_folder)
|
387
|
+
top_level_directories = filter(lambda e: S_ISDIR(e.st_mode), all_files)
|
388
|
+
|
389
|
+
for directory in top_level_directories:
|
390
|
+
dirname = Path(root_folder, directory.filename)
|
391
|
+
directory_files = sftp.listdir(str(dirname))
|
392
|
+
|
393
|
+
# Check if any of the specified file names exist in the directory
|
394
|
+
if any(file_name in directory_files for file_name in file_names):
|
395
|
+
matched_folders.append(str(dirname))
|
396
|
+
|
397
|
+
return matched_folders
|
398
|
+
|
399
|
+
|
400
|
+
@remote_exception_handler
|
401
|
+
def get_remote_profiler_folders(
|
402
|
+
remote_connection: RemoteConnection,
|
403
|
+
) -> List[RemoteReportFolder]:
|
404
|
+
"""Return a list of remote folders containing a profile_log_device file."""
|
405
|
+
client = get_client(remote_connection)
|
406
|
+
profiler_paths = find_folders_by_files(
|
407
|
+
client, remote_connection.performancePath, [TEST_PROFILER_FILE]
|
408
|
+
)
|
409
|
+
if not profiler_paths:
|
410
|
+
error = f"No profiler paths found at {remote_connection.performancePath}"
|
411
|
+
logger.info(error)
|
412
|
+
raise NoProjectsException(status=ConnectionTestStates.FAILED, message=error)
|
413
|
+
remote_folder_data = []
|
414
|
+
with client.open_sftp() as sftp:
|
415
|
+
for path in profiler_paths:
|
416
|
+
remote_folder_data.append(get_remote_profile_folder(sftp, path))
|
417
|
+
return remote_folder_data
|
418
|
+
|
419
|
+
|
420
|
+
@remote_exception_handler
|
421
|
+
def get_remote_report_folders(
|
422
|
+
remote_connection: RemoteConnection,
|
423
|
+
) -> List[RemoteReportFolder]:
|
424
|
+
"""Return a list of remote folders containing a config.json file."""
|
425
|
+
client = get_client(remote_connection)
|
426
|
+
remote_config_paths = find_folders_by_files(
|
427
|
+
client, remote_connection.reportPath, [TEST_CONFIG_FILE]
|
428
|
+
)
|
429
|
+
if not remote_config_paths:
|
430
|
+
error = f"No projects found at {remote_connection.reportPath}"
|
431
|
+
logger.info(error)
|
432
|
+
raise NoProjectsException(status=ConnectionTestStates.FAILED, message=error)
|
433
|
+
remote_folder_data = []
|
434
|
+
with client.open_sftp() as sftp:
|
435
|
+
for config_path in remote_config_paths:
|
436
|
+
remote_folder = get_remote_report_folder_from_config_path(
|
437
|
+
sftp, str(Path(config_path).joinpath(TEST_CONFIG_FILE))
|
438
|
+
)
|
439
|
+
remote_folder_data.append(remote_folder)
|
440
|
+
return remote_folder_data
|
441
|
+
|
442
|
+
|
443
|
+
@remote_exception_handler
|
444
|
+
def sync_remote_folders(
|
445
|
+
remote_connection: RemoteConnection,
|
446
|
+
remote_folder_path: str,
|
447
|
+
path_prefix: str,
|
448
|
+
exclude_patterns: Optional[List[str]] = None,
|
449
|
+
sid=None,
|
450
|
+
):
|
451
|
+
"""Main function to sync test folders, handles both compressed and individual syncs."""
|
452
|
+
client = get_client(remote_connection)
|
453
|
+
report_folder = Path(remote_folder_path).name
|
454
|
+
destination_dir = Path(
|
455
|
+
REPORT_DATA_DIRECTORY, path_prefix, remote_connection.host, report_folder
|
456
|
+
)
|
457
|
+
destination_dir.mkdir(parents=True, exist_ok=True)
|
458
|
+
|
459
|
+
sync_files_and_directories(
|
460
|
+
client, remote_folder_path, destination_dir, exclude_patterns, sid
|
461
|
+
)
|
462
|
+
|
463
|
+
|
464
|
+
@remote_exception_handler
|
465
|
+
def sync_remote_profiler_folders(
|
466
|
+
remote_connection: RemoteConnection,
|
467
|
+
path_prefix: str,
|
468
|
+
profile: RemoteReportFolder,
|
469
|
+
exclude_patterns: Optional[List[str]] = None,
|
470
|
+
sid=None,
|
471
|
+
):
|
472
|
+
client = get_client(remote_connection)
|
473
|
+
remote_folder_path = profile.remotePath
|
474
|
+
profile_folder = Path(remote_folder_path).name
|
475
|
+
destination_dir = Path(
|
476
|
+
REPORT_DATA_DIRECTORY,
|
477
|
+
path_prefix,
|
478
|
+
remote_connection.host,
|
479
|
+
PROFILER_DIRECTORY,
|
480
|
+
profile_folder,
|
481
|
+
)
|
482
|
+
destination_dir.mkdir(parents=True, exist_ok=True)
|
483
|
+
|
484
|
+
sync_files_and_directories(
|
485
|
+
client, remote_folder_path, destination_dir, exclude_patterns, sid
|
486
|
+
)
|
@@ -0,0 +1,118 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
#
|
3
|
+
# SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC
|
4
|
+
|
5
|
+
import threading
|
6
|
+
import time
|
7
|
+
from dataclasses import dataclass, field
|
8
|
+
from datetime import datetime
|
9
|
+
from enum import Enum
|
10
|
+
from logging import getLogger
|
11
|
+
|
12
|
+
from flask_socketio import join_room, disconnect, leave_room
|
13
|
+
|
14
|
+
from ttnn_visualizer.utils import SerializeableDataclass
|
15
|
+
|
16
|
+
|
17
|
+
logger = getLogger(__name__)
|
18
|
+
|
19
|
+
|
20
|
+
class Messages(object):
|
21
|
+
FILE_TRANSFER_PROGRESS = "fileTransferProgress"
|
22
|
+
|
23
|
+
|
24
|
+
class FileStatus(Enum):
|
25
|
+
DOWNLOADING = "DOWNLOADING"
|
26
|
+
FAILED = "FAILED"
|
27
|
+
COMPRESSING = "COMPRESSING"
|
28
|
+
FINISHED = "FINISHED"
|
29
|
+
STARTED = "STARTED"
|
30
|
+
|
31
|
+
|
32
|
+
@dataclass
|
33
|
+
class FileProgress(SerializeableDataclass):
|
34
|
+
current_file_name: str
|
35
|
+
number_of_files: int
|
36
|
+
percent_of_current: float
|
37
|
+
finished_files: int
|
38
|
+
status: FileStatus # Use the FileStatus Enum
|
39
|
+
timestamp: str = field(default_factory=lambda: datetime.utcnow().isoformat())
|
40
|
+
|
41
|
+
def __post_init__(self):
|
42
|
+
self.percent_of_current = round(self.percent_of_current, 2)
|
43
|
+
|
44
|
+
|
45
|
+
# For tracking connected clients subscriber ID
|
46
|
+
tab_clients = {}
|
47
|
+
|
48
|
+
# Global variables for debouncing
|
49
|
+
debounce_timer = None
|
50
|
+
debounce_delay = 0.5 # Delay in seconds (adjust as needed)
|
51
|
+
last_emit_time = 0
|
52
|
+
|
53
|
+
|
54
|
+
def emit_file_status(progress: FileProgress, tab_id=None):
|
55
|
+
"""Debounced emit for file status updates using a debounce timer."""
|
56
|
+
global debounce_timer, last_emit_time
|
57
|
+
|
58
|
+
def emit_now():
|
59
|
+
global last_emit_time
|
60
|
+
last_emit_time = time.time()
|
61
|
+
data = progress.to_dict()
|
62
|
+
data.update({"tab_id": tab_id})
|
63
|
+
try:
|
64
|
+
if socketio is not None and hasattr(socketio, "emit"):
|
65
|
+
socketio.emit(Messages.FILE_TRANSFER_PROGRESS, data, to=tab_id)
|
66
|
+
except NameError:
|
67
|
+
pass # Can silently pass since we know the NameError is from sockets being disabled
|
68
|
+
|
69
|
+
# Cancel any existing debounce timer if it exists and is still active
|
70
|
+
if debounce_timer and isinstance(debounce_timer, threading.Timer):
|
71
|
+
debounce_timer.cancel()
|
72
|
+
|
73
|
+
# Check if the last emit was longer than debounce_delay
|
74
|
+
if time.time() - last_emit_time > debounce_delay:
|
75
|
+
emit_now()
|
76
|
+
else:
|
77
|
+
# Set a new debounce timer
|
78
|
+
debounce_timer = threading.Timer(debounce_delay, emit_now)
|
79
|
+
debounce_timer.start()
|
80
|
+
|
81
|
+
|
82
|
+
def register_handlers(socketio_instance):
|
83
|
+
global socketio
|
84
|
+
socketio = socketio_instance
|
85
|
+
|
86
|
+
@socketio.on("connect")
|
87
|
+
def handle_connect():
|
88
|
+
from flask import request
|
89
|
+
|
90
|
+
sid = getattr(request, "sid", "")
|
91
|
+
|
92
|
+
tab_id = request.args.get("tabId")
|
93
|
+
print(f"Received tabId: {tab_id}, socket ID: {sid}") # Log for debugging
|
94
|
+
|
95
|
+
if tab_id:
|
96
|
+
join_room(tab_id) # Join the room identified by the tabId
|
97
|
+
tab_clients[tab_id] = sid # Store the socket ID associated with this tabId
|
98
|
+
print(f"Joined room: {tab_id}")
|
99
|
+
else:
|
100
|
+
print("No tabId provided, disconnecting client.")
|
101
|
+
disconnect()
|
102
|
+
|
103
|
+
@socketio.on("disconnect")
|
104
|
+
def handle_disconnect():
|
105
|
+
from flask import request
|
106
|
+
|
107
|
+
tab_id = None
|
108
|
+
# Find and remove the socket ID associated with this tabId
|
109
|
+
sid = getattr(request, "sid", "")
|
110
|
+
for key, value in tab_clients.items():
|
111
|
+
|
112
|
+
if value == sid:
|
113
|
+
tab_id = key
|
114
|
+
break
|
115
|
+
if tab_id:
|
116
|
+
leave_room(tab_id)
|
117
|
+
del tab_clients[tab_id]
|
118
|
+
print(f"Client disconnected from tabId: {tab_id}, Socket ID: {sid}")
|
@@ -0,0 +1,85 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
#
|
3
|
+
# SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC
|
4
|
+
|
5
|
+
import paramiko
|
6
|
+
import os
|
7
|
+
from pathlib import Path
|
8
|
+
from paramiko.agent import Agent
|
9
|
+
from paramiko.ssh_exception import SSHException
|
10
|
+
|
11
|
+
from ttnn_visualizer.decorators import remote_exception_handler
|
12
|
+
from ttnn_visualizer.models import RemoteConnection
|
13
|
+
import logging
|
14
|
+
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
|
18
|
+
def initialize_ssh_client():
|
19
|
+
ssh = paramiko.SSHClient()
|
20
|
+
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
21
|
+
ssh.load_system_host_keys()
|
22
|
+
return ssh
|
23
|
+
|
24
|
+
|
25
|
+
def get_connection_args(remote_connection: RemoteConnection) -> dict:
|
26
|
+
use_agent = os.getenv("USE_SSH_AGENT", "true").lower() == "true"
|
27
|
+
ssh_config_path = Path(os.getenv("SSH_CONFIG_PATH", "~/.ssh/config")).expanduser()
|
28
|
+
|
29
|
+
if use_agent:
|
30
|
+
agent = Agent()
|
31
|
+
keys = agent.get_keys()
|
32
|
+
if not keys:
|
33
|
+
logger.error("No keys found in agent")
|
34
|
+
raise SSHException("No keys found")
|
35
|
+
return {"look_for_keys": True}
|
36
|
+
|
37
|
+
config = paramiko.SSHConfig.from_path(ssh_config_path).lookup(
|
38
|
+
remote_connection.host
|
39
|
+
)
|
40
|
+
if not config:
|
41
|
+
raise SSHException(f"Host not found in SSH config {remote_connection.host}")
|
42
|
+
|
43
|
+
return {"key_filename": config["identityfile"].pop(), "look_for_keys": False} # type: ignore
|
44
|
+
|
45
|
+
|
46
|
+
@remote_exception_handler
|
47
|
+
def get_client(remote_connection: RemoteConnection) -> paramiko.SSHClient:
|
48
|
+
ssh = initialize_ssh_client()
|
49
|
+
connection_args = get_connection_args(remote_connection)
|
50
|
+
|
51
|
+
ssh.connect(
|
52
|
+
remote_connection.host,
|
53
|
+
port=remote_connection.port,
|
54
|
+
username=remote_connection.username,
|
55
|
+
**connection_args,
|
56
|
+
)
|
57
|
+
return ssh
|
58
|
+
|
59
|
+
|
60
|
+
def check_permissions(client, directory):
|
61
|
+
"""Check if write and delete permissions are available in the remote directory."""
|
62
|
+
test_file = Path(directory) / "test_permission_file.txt"
|
63
|
+
touch_command = f"touch {test_file}"
|
64
|
+
remove_command = f"rm {test_file}"
|
65
|
+
|
66
|
+
stdin, stdout, stderr = client.exec_command(touch_command)
|
67
|
+
error = stderr.read().decode().strip()
|
68
|
+
if error:
|
69
|
+
raise Exception(f"No permission to create files in {directory}: {error}")
|
70
|
+
|
71
|
+
stdin, stdout, stderr = client.exec_command(remove_command)
|
72
|
+
error = stderr.read().decode().strip()
|
73
|
+
if error:
|
74
|
+
raise Exception(f"No permission to delete files in {directory}: {error}")
|
75
|
+
|
76
|
+
return True
|
77
|
+
|
78
|
+
|
79
|
+
def check_gzip_exists(client):
|
80
|
+
"""Check if gzip and tar exist on the remote server."""
|
81
|
+
stdin, stdout, stderr = client.exec_command("which gzip && which tar")
|
82
|
+
result = stdout.read().decode().strip()
|
83
|
+
if not result:
|
84
|
+
return False
|
85
|
+
return True
|
@@ -0,0 +1 @@
|
|
1
|
+
import{I as n}from"./index-DxLGmC6o.js";import{I as e}from"./index-CsS_OkTl.js";import{p as r,I as s}from"./index-BXlT2rEV.js";function I(o,t){var a=r(o);return t===s.STANDARD?n[a]:e[a]}function p(o){return r(o)}export{n as IconSvgPaths16,e as IconSvgPaths20,I as getIconPaths,p as iconNameToPathsRecordKey};
|
@@ -0,0 +1,2 @@
|
|
1
|
+
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/allPaths-CKt4gwo3.js","assets/index-DxLGmC6o.js","assets/index-CsS_OkTl.js","assets/index-BXlT2rEV.js","assets/index-DTKBo2Os.css"])))=>i.map(i=>d[i]);
|
2
|
+
import{_ as o,a as n,b as i}from"./index-BXlT2rEV.js";var _=function(e,a){return o(void 0,void 0,void 0,function(){var t;return n(this,function(r){switch(r.label){case 0:return[4,i(()=>import("./allPaths-CKt4gwo3.js"),__vite__mapDeps([0,1,2,3,4]))];case 1:return t=r.sent().getIconPaths,[2,t(e,a)]}})})};export{_ as allPathsLoader};
|