ttnn-visualizer 0.41.0__py3-none-any.whl → 0.42.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,25 +6,29 @@ import json
6
6
  import logging
7
7
  import re
8
8
  import time
9
+ import subprocess
9
10
  from pathlib import Path
10
11
  from stat import S_ISDIR
11
12
  from threading import Thread
12
13
  from typing import List, Optional
13
14
 
14
15
  from flask import current_app
15
- from paramiko.client import SSHClient
16
- from paramiko.sftp_client import SFTPClient
17
16
 
18
17
  from ttnn_visualizer.decorators import remote_exception_handler
19
18
  from ttnn_visualizer.enums import ConnectionTestStates
20
- from ttnn_visualizer.exceptions import NoProjectsException, RemoteConnectionException
19
+ from ttnn_visualizer.exceptions import (
20
+ NoProjectsException,
21
+ RemoteConnectionException,
22
+ SSHException,
23
+ AuthenticationException,
24
+ NoValidConnectionsError
25
+ )
21
26
  from ttnn_visualizer.models import RemoteConnection, RemoteReportFolder
22
27
  from ttnn_visualizer.sockets import (
23
28
  FileProgress,
24
29
  FileStatus,
25
30
  emit_file_status,
26
31
  )
27
- from ttnn_visualizer.ssh_client import get_client
28
32
  from ttnn_visualizer.utils import update_last_synced
29
33
 
30
34
  logger = logging.getLogger(__name__)
@@ -34,6 +38,45 @@ TEST_PROFILER_FILE = "profile_log_device.csv"
34
38
  REPORT_DATA_DIRECTORY = Path(__file__).parent.absolute().joinpath("data")
35
39
 
36
40
 
41
+ def handle_ssh_subprocess_error(e: subprocess.CalledProcessError, remote_connection: RemoteConnection):
42
+ """
43
+ Convert subprocess SSH errors to appropriate SSH exceptions.
44
+
45
+ :param e: The subprocess.CalledProcessError
46
+ :param remote_connection: The RemoteConnection object for context
47
+ :raises: SSHException, AuthenticationException, or NoValidConnectionsError
48
+ """
49
+ stderr = e.stderr.lower() if e.stderr else ""
50
+
51
+ # Check for authentication failures
52
+ if any(auth_err in stderr for auth_err in [
53
+ "permission denied",
54
+ "authentication failed",
55
+ "publickey",
56
+ "password",
57
+ "host key verification failed"
58
+ ]):
59
+ raise AuthenticationException(f"SSH authentication failed: {e.stderr}")
60
+
61
+ # Check for connection failures
62
+ elif any(conn_err in stderr for conn_err in [
63
+ "connection refused",
64
+ "network is unreachable",
65
+ "no route to host",
66
+ "name or service not known",
67
+ "connection timed out"
68
+ ]):
69
+ raise NoValidConnectionsError(f"SSH connection failed: {e.stderr}")
70
+
71
+ # Check for general SSH protocol errors
72
+ elif "ssh:" in stderr or "protocol" in stderr:
73
+ raise SSHException(f"SSH protocol error: {e.stderr}")
74
+
75
+ # Default to generic SSH exception
76
+ else:
77
+ raise SSHException(f"SSH command failed: {e.stderr}")
78
+
79
+
37
80
  def start_background_task(task, *args):
38
81
  with current_app.app_context():
39
82
  if current_app.config["USE_WEBSOCKETS"]:
@@ -57,31 +100,55 @@ def resolve_file_path(remote_connection, file_path: str) -> str:
57
100
  :return: The resolved file path.
58
101
  :raises FileNotFoundError: If no files match the pattern.
59
102
  """
60
- ssh_client = get_client(remote_connection)
61
-
62
103
  if "*" in file_path:
63
- command = f"ls -1 {file_path}"
64
- stdin, stdout, stderr = ssh_client.exec_command(command)
65
- files = stdout.read().decode().splitlines()
66
- ssh_client.close()
104
+ # Build SSH command to list files matching the pattern
105
+ ssh_cmd = [
106
+ "ssh",
107
+ f"{remote_connection.username}@{remote_connection.host}",
108
+ ]
67
109
 
68
- if not files:
69
- raise FileNotFoundError(f"No files found matching pattern: {file_path}")
110
+ # Handle non-standard SSH port
111
+ if remote_connection.port != 22:
112
+ ssh_cmd.extend(["-p", str(remote_connection.port)])
70
113
 
71
- # Return the first file found
72
- return files[0]
114
+ # Add the ls command
115
+ ssh_cmd.append(f"ls -1 {file_path}")
73
116
 
74
- return file_path
117
+ try:
118
+ result = subprocess.run(
119
+ ssh_cmd,
120
+ capture_output=True,
121
+ text=True,
122
+ check=True
123
+ )
124
+
125
+ files = result.stdout.strip().splitlines()
75
126
 
127
+ if not files or (len(files) == 1 and files[0] == ""):
128
+ raise FileNotFoundError(f"No files found matching pattern: {file_path}")
76
129
 
77
- def calculate_folder_size(client: SSHClient, folder_path: str) -> int:
78
- """Calculate the total size of the folder before compression."""
79
- stdin, stdout, stderr = client.exec_command(f"du -sb {folder_path}")
80
- size_info = stdout.read().decode().strip().split("\t")[0]
81
- return int(size_info)
130
+ # Return the first file found
131
+ return files[0]
82
132
 
133
+ except subprocess.CalledProcessError as e:
134
+ logger.error(f"SSH command failed: {e}")
135
+ logger.error(f"stderr: {e.stderr}")
83
136
 
84
- def get_cluster_desc_path(ssh_client) -> Optional[str]:
137
+ # Check if it's an SSH-specific error (authentication, connection, etc.)
138
+ if e.returncode == 255: # SSH returns 255 for SSH protocol errors
139
+ handle_ssh_subprocess_error(e, remote_connection)
140
+ else:
141
+ # File not found or other command error
142
+ raise FileNotFoundError(f"No files found matching pattern: {file_path}")
143
+ except Exception as e:
144
+ logger.error(f"Error resolving file path: {e}")
145
+ raise FileNotFoundError(f"Error resolving file path: {file_path}")
146
+
147
+ return file_path
148
+
149
+
150
+
151
+ def get_cluster_desc_path(remote_connection: RemoteConnection) -> Optional[str]:
85
152
  """
86
153
  List all folders matching '/tmp/umd_*' on the remote machine, filter for those containing
87
154
  'cluster_descriptor.yaml', and return the full path to the most recently modified YAML file.
@@ -94,37 +161,79 @@ def get_cluster_desc_path(ssh_client) -> Optional[str]:
94
161
  cluster_desc_file = "cluster_descriptor.yaml"
95
162
 
96
163
  try:
97
- # Command to list all folders matching '/tmp/umd_*'
98
- list_folders_command = "ls -1d /tmp/umd_* 2>/dev/null"
99
- stdin, stdout, stderr = ssh_client.exec_command(list_folders_command)
164
+ # Build SSH command to list folders matching '/tmp/umd_*'
165
+ ssh_cmd = [
166
+ "ssh",
167
+ f"{remote_connection.username}@{remote_connection.host}",
168
+ ]
169
+
170
+ # Handle non-standard SSH port
171
+ if remote_connection.port != 22:
172
+ ssh_cmd.extend(["-p", str(remote_connection.port)])
173
+
174
+ # Add the ls command
175
+ ssh_cmd.append("ls -1d /tmp/umd_* 2>/dev/null")
176
+
177
+ # Execute SSH command to list folders
178
+ result = subprocess.run(
179
+ ssh_cmd,
180
+ capture_output=True,
181
+ text=True,
182
+ check=False # Don't raise exception on non-zero exit (in case no folders found)
183
+ )
100
184
 
101
185
  # Get the list of folders
102
- folder_paths = stdout.read().decode().splitlines()
186
+ folder_paths = result.stdout.strip().splitlines() if result.stdout.strip() else []
103
187
 
104
188
  if not folder_paths:
105
189
  logger.info("No folders found matching the pattern '/tmp/umd_*'")
106
190
  return None
107
191
 
108
192
  # Check each folder for 'cluster_descriptor.yaml' and track the most recent one
109
- with ssh_client.open_sftp() as sftp:
110
- for folder in folder_paths:
111
- yaml_file_path = f"{folder}/{cluster_desc_file}"
112
- try:
113
- # Check if 'cluster_descriptor.yaml' exists and get its modification time
114
- attributes = sftp.stat(yaml_file_path)
115
- mod_time = attributes.st_mtime # Modification time
116
-
117
- # Update the latest file if this one is newer
118
- if mod_time > latest_mod_time:
119
- latest_mod_time = mod_time
120
- latest_yaml_path = yaml_file_path
121
- logger.info(
122
- f"Found newer {cluster_desc_file}': {yaml_file_path}"
123
- )
124
-
125
- except FileNotFoundError:
193
+ for folder in folder_paths:
194
+ yaml_file_path = f"{folder}/{cluster_desc_file}"
195
+
196
+ # Build SSH command to check if file exists and get its modification time
197
+ stat_cmd = [
198
+ "ssh",
199
+ f"{remote_connection.username}@{remote_connection.host}",
200
+ ]
201
+
202
+ if remote_connection.port != 22:
203
+ stat_cmd.extend(["-p", str(remote_connection.port)])
204
+
205
+ # Use stat to get modification time (seconds since epoch)
206
+ stat_cmd.append(f"stat -c %Y '{yaml_file_path}' 2>/dev/null")
207
+
208
+ try:
209
+ stat_result = subprocess.run(
210
+ stat_cmd,
211
+ capture_output=True,
212
+ text=True,
213
+ check=True
214
+ )
215
+
216
+ mod_time = float(stat_result.stdout.strip())
217
+
218
+ # Update the latest file if this one is newer
219
+ if mod_time > latest_mod_time:
220
+ latest_mod_time = mod_time
221
+ latest_yaml_path = yaml_file_path
222
+ logger.info(
223
+ f"Found newer {cluster_desc_file}: {yaml_file_path}"
224
+ )
225
+
226
+ except subprocess.CalledProcessError as e:
227
+ # Check if it's an SSH-specific error
228
+ if e.returncode == 255: # SSH returns 255 for SSH protocol errors
229
+ handle_ssh_subprocess_error(e, remote_connection)
230
+ else:
231
+ # File not found or other command error
126
232
  logger.debug(f"'{cluster_desc_file}' not found in: {folder}")
127
233
  continue
234
+ except ValueError:
235
+ logger.debug(f"'{cluster_desc_file}' not found in: {folder}")
236
+ continue
128
237
 
129
238
  if latest_yaml_path:
130
239
  logger.info(
@@ -142,173 +251,377 @@ def get_cluster_desc_path(ssh_client) -> Optional[str]:
142
251
  message=f"Failed to get '{cluster_desc_file}' path",
143
252
  status=ConnectionTestStates.FAILED,
144
253
  )
145
- finally:
146
- ssh_client.close()
147
254
 
148
255
 
149
256
  @remote_exception_handler
150
257
  def get_cluster_desc(remote_connection: RemoteConnection):
151
- client = get_client(remote_connection)
152
- cluster_path = get_cluster_desc_path(client)
258
+ cluster_path = get_cluster_desc_path(remote_connection)
153
259
  if cluster_path:
154
260
  return read_remote_file(remote_connection, cluster_path)
155
261
  else:
156
262
  return None
157
263
 
158
264
 
159
- def walk_sftp_directory(sftp: SFTPClient, remote_path: str):
160
- """SFTP implementation of os.walk."""
161
- files, folders = [], []
162
- for f in sftp.listdir_attr(remote_path):
163
- if S_ISDIR(f.st_mode if f.st_mode else 0):
164
- folders.append(f.filename)
165
- else:
166
- files.append(f.filename)
167
- return files, folders
168
-
169
-
170
265
  def is_excluded(file_path, exclude_patterns):
171
- """Check if the file matches any exclusion pattern."""
172
- return any(re.search(pattern, file_path) for pattern in exclude_patterns)
266
+ """Check if a file path should be excluded based on patterns."""
267
+ for pattern in exclude_patterns:
268
+ if pattern in file_path:
269
+ return True
270
+ return False
173
271
 
174
272
 
175
273
  @remote_exception_handler
176
274
  def sync_files_and_directories(
177
- client, remote_profiler_folder: str, destination_dir: Path, exclude_patterns=None, sid=None
275
+ remote_connection: RemoteConnection, remote_profiler_folder: str, destination_dir: Path, exclude_patterns=None, sid=None
178
276
  ):
179
- """Download files and directories sequentially in one unified loop."""
180
- exclude_patterns = (
181
- exclude_patterns or []
182
- ) # Default to an empty list if not provided
183
-
184
- with client.open_sftp() as sftp:
185
- # Ensure the destination directory exists
186
- destination_dir.mkdir(parents=True, exist_ok=True)
187
- finished_files = 0 # Initialize finished files counter
188
-
189
- # Recursively handle files and folders in the current directory
190
- def download_directory_contents(remote_dir, local_dir):
191
- # Ensure the local directory exists
192
- local_dir.mkdir(parents=True, exist_ok=True)
193
-
194
- # Get files and folders in the remote directory
195
- files, folders = walk_sftp_directory(sftp, remote_dir)
196
- total_files = len(files)
197
-
198
- # Function to download a file with progress reporting
199
- def download_file(remote_file_path, local_file_path, index):
200
- nonlocal finished_files
201
- # Download file with progress callback
202
- logger.info(f"Downloading {remote_file_path}")
203
- download_file_with_progress(
204
- sftp,
205
- remote_file_path,
206
- local_file_path,
207
- sid,
208
- total_files,
209
- finished_files,
210
- )
211
- logger.info(f"Finished downloading {remote_file_path}")
212
- finished_files += 1
277
+ """Download files and directories using SFTP with progress reporting."""
278
+ exclude_patterns = exclude_patterns or []
213
279
 
214
- # Download all files in the current directory
215
- for index, file in enumerate(files, start=1):
216
- remote_file_path = f"{remote_dir}/{file}"
217
- local_file_path = Path(local_dir, file)
280
+ # Ensure the destination directory exists
281
+ destination_dir.mkdir(parents=True, exist_ok=True)
218
282
 
219
- # Skip files that match any exclusion pattern
220
- if is_excluded(remote_file_path, exclude_patterns):
221
- logger.info(f"Skipping {remote_file_path} (excluded by pattern)")
222
- continue
283
+ logger.info(f"Starting SFTP sync from {remote_profiler_folder} to {destination_dir}")
223
284
 
224
- download_file(remote_file_path, local_file_path, index)
285
+ # First, get list of all files and directories
286
+ logger.info("Getting remote file and directory lists...")
287
+ all_files = get_remote_file_list(remote_connection, remote_profiler_folder, exclude_patterns)
288
+ all_dirs = get_remote_directory_list(remote_connection, remote_profiler_folder, exclude_patterns)
225
289
 
226
- # Recursively handle subdirectories
227
- for folder in folders:
228
- remote_subdir = f"{remote_dir}/{folder}"
229
- local_subdir = local_dir / folder
230
- if is_excluded(remote_subdir, exclude_patterns):
231
- logger.info(
232
- f"Skipping directory {remote_subdir} (excluded by pattern)"
233
- )
234
- continue
235
- download_directory_contents(remote_subdir, local_subdir)
290
+ logger.info(f"Found {len(all_files)} files and {len(all_dirs)} directories to sync")
236
291
 
237
- # Start downloading from the root folder
238
- download_directory_contents(remote_profiler_folder, destination_dir)
292
+ # Create local directory structure
293
+ logger.info("Creating local directory structure...")
294
+ for remote_dir in all_dirs:
295
+ try:
296
+ # Calculate relative path from the base remote folder
297
+ relative_path = Path(remote_dir).relative_to(remote_profiler_folder)
298
+ local_dir = destination_dir / relative_path
299
+ local_dir.mkdir(parents=True, exist_ok=True)
300
+ except ValueError:
301
+ # Skip if remote_dir is not relative to remote_profiler_folder
302
+ continue
239
303
 
240
- # Create a .last-synced file in directory
241
- update_last_synced(destination_dir)
304
+ # Download files with progress reporting
305
+ total_files = len(all_files)
306
+ finished_files = 0
242
307
 
243
- # Emit final status
244
- final_progress = FileProgress(
245
- current_file_name="", # No specific file for the final status
246
- number_of_files=0,
247
- percent_of_current=100,
248
- finished_files=finished_files,
249
- status=FileStatus.FINISHED,
250
- )
308
+ logger.info(f"Starting download of {total_files} files...")
251
309
 
252
- if current_app.config["USE_WEBSOCKETS"]:
253
- emit_file_status(final_progress, sid)
254
- logger.info("All files downloaded. Final progress emitted.")
310
+ for remote_file in all_files:
311
+ try:
312
+ # Calculate relative path from the base remote folder
313
+ relative_path = Path(remote_file).relative_to(remote_profiler_folder)
314
+ local_file = destination_dir / relative_path
255
315
 
316
+ # Download the file using SFTP
317
+ download_single_file_sftp(remote_connection, remote_file, local_file)
256
318
 
257
- def download_file_with_progress(
258
- sftp, remote_path, local_path, sid, total_files, finished_files
259
- ):
260
- """Download a file and emit progress using FileProgress."""
261
- try:
319
+ finished_files += 1
262
320
 
263
- def download_progress_callback(transferred, total):
264
- percent_of_current = (transferred / total) * 100
321
+ # Emit progress
265
322
  progress = FileProgress(
266
- current_file_name=remote_path,
323
+ current_file_name=str(relative_path),
267
324
  number_of_files=total_files,
268
- percent_of_current=percent_of_current,
325
+ percent_of_current=100, # We don't get per-file progress with SFTP
269
326
  finished_files=finished_files,
270
327
  status=FileStatus.DOWNLOADING,
271
328
  )
272
- emit_file_status(progress, sid)
273
329
 
274
- # Perform the download
275
- sftp.get(remote_path, str(local_path), callback=download_progress_callback)
330
+ if current_app.config["USE_WEBSOCKETS"]:
331
+ emit_file_status(progress, sid)
332
+
333
+ if finished_files % 10 == 0: # Log every 10 files
334
+ logger.info(f"Downloaded {finished_files}/{total_files} files")
335
+
336
+ except ValueError:
337
+ # Skip if remote_file is not relative to remote_profiler_folder
338
+ logger.warning(f"Skipping file outside base folder: {remote_file}")
339
+ continue
340
+ except Exception as e:
341
+ logger.error(f"Failed to download {remote_file}: {e}")
342
+ # Continue with other files rather than failing completely
343
+ continue
344
+
345
+ # Create a .last-synced file in directory
346
+ update_last_synced(destination_dir)
347
+
348
+ # Emit final status
349
+ final_progress = FileProgress(
350
+ current_file_name="",
351
+ number_of_files=total_files,
352
+ percent_of_current=100,
353
+ finished_files=finished_files,
354
+ status=FileStatus.FINISHED,
355
+ )
356
+
357
+ if current_app.config["USE_WEBSOCKETS"]:
358
+ emit_file_status(final_progress, sid)
359
+
360
+ logger.info(f"SFTP sync completed. Downloaded {finished_files}/{total_files} files.")
361
+
362
+
363
+ def get_remote_file_list(remote_connection: RemoteConnection, remote_folder: str, exclude_patterns=None) -> List[str]:
364
+ """Get a list of all files in the remote directory recursively, applying exclusion patterns."""
365
+ exclude_patterns = exclude_patterns or []
366
+
367
+ # Build SSH command to find all files recursively
368
+ ssh_cmd = ["ssh"]
369
+
370
+ # Handle non-standard SSH port
371
+ if remote_connection.port != 22:
372
+ ssh_cmd.extend(["-p", str(remote_connection.port)])
373
+
374
+ ssh_cmd.extend([
375
+ f"{remote_connection.username}@{remote_connection.host}",
376
+ f"find '{remote_folder}' -type f"
377
+ ])
378
+
379
+ try:
380
+ result = subprocess.run(
381
+ ssh_cmd,
382
+ capture_output=True,
383
+ text=True,
384
+ check=True,
385
+ timeout=60
386
+ )
387
+
388
+ all_files = result.stdout.strip().splitlines()
389
+
390
+ # Filter out excluded files
391
+ filtered_files = []
392
+ for file_path in all_files:
393
+ if not is_excluded(file_path, exclude_patterns):
394
+ filtered_files.append(file_path.strip())
395
+
396
+ return filtered_files
397
+
398
+ except subprocess.CalledProcessError as e:
399
+ if e.returncode == 255: # SSH protocol errors
400
+ handle_ssh_subprocess_error(e, remote_connection)
401
+ return []
402
+ else:
403
+ logger.error(f"Error getting file list: {e.stderr}")
404
+ return []
405
+ except subprocess.TimeoutExpired:
406
+ logger.error(f"Timeout getting file list from: {remote_folder}")
407
+ return []
408
+ except Exception as e:
409
+ logger.error(f"Error getting file list: {e}")
410
+ return []
411
+
412
+
413
+ def get_remote_directory_list(remote_connection: RemoteConnection, remote_folder: str, exclude_patterns=None) -> List[str]:
414
+ """Get a list of all directories in the remote directory recursively, applying exclusion patterns."""
415
+ exclude_patterns = exclude_patterns or []
416
+
417
+ # Build SSH command to find all directories recursively
418
+ ssh_cmd = ["ssh"]
419
+
420
+ # Handle non-standard SSH port
421
+ if remote_connection.port != 22:
422
+ ssh_cmd.extend(["-p", str(remote_connection.port)])
423
+
424
+ ssh_cmd.extend([
425
+ f"{remote_connection.username}@{remote_connection.host}",
426
+ f"find '{remote_folder}' -type d"
427
+ ])
428
+
429
+ try:
430
+ result = subprocess.run(
431
+ ssh_cmd,
432
+ capture_output=True,
433
+ text=True,
434
+ check=True,
435
+ timeout=60
436
+ )
437
+
438
+ all_dirs = result.stdout.strip().splitlines()
439
+
440
+ # Filter out excluded directories
441
+ filtered_dirs = []
442
+ for dir_path in all_dirs:
443
+ if not is_excluded(dir_path, exclude_patterns):
444
+ filtered_dirs.append(dir_path.strip())
445
+
446
+ return filtered_dirs
276
447
 
277
- except OSError as e:
278
- logger.error(f"Error downloading file {remote_path} to {local_path}: {str(e)}")
279
- raise
448
+ except subprocess.CalledProcessError as e:
449
+ if e.returncode == 255: # SSH protocol errors
450
+ handle_ssh_subprocess_error(e, remote_connection)
451
+ return []
452
+ else:
453
+ logger.error(f"Error getting directory list: {e.stderr}")
454
+ return []
455
+ except subprocess.TimeoutExpired:
456
+ logger.error(f"Timeout getting directory list from: {remote_folder}")
457
+ return []
458
+ except Exception as e:
459
+ logger.error(f"Error getting directory list: {e}")
460
+ return []
461
+
462
+
463
+ def download_single_file_sftp(remote_connection: RemoteConnection, remote_file: str, local_file: Path):
464
+ """Download a single file using SFTP."""
465
+ # Ensure local directory exists
466
+ local_file.parent.mkdir(parents=True, exist_ok=True)
467
+
468
+ # Build SFTP command
469
+ sftp_cmd = ["sftp"]
470
+
471
+ # Handle non-standard SSH port
472
+ if remote_connection.port != 22:
473
+ sftp_cmd.extend(["-P", str(remote_connection.port)])
474
+
475
+ # Add batch mode and other options
476
+ sftp_cmd.extend([
477
+ "-b", "-", # Read commands from stdin
478
+ f"{remote_connection.username}@{remote_connection.host}"
479
+ ])
480
+
481
+ # SFTP commands to execute
482
+ sftp_commands = f"get '{remote_file}' '{local_file}'\nquit\n"
483
+
484
+ try:
485
+ result = subprocess.run(
486
+ sftp_cmd,
487
+ input=sftp_commands,
488
+ capture_output=True,
489
+ text=True,
490
+ check=True,
491
+ timeout=300 # 5 minute timeout per file
492
+ )
493
+
494
+ logger.debug(f"Downloaded: {remote_file} -> {local_file}")
495
+
496
+ except subprocess.CalledProcessError as e:
497
+ if e.returncode == 255: # SSH protocol errors
498
+ handle_ssh_subprocess_error(e, remote_connection)
499
+ else:
500
+ logger.error(f"Error downloading file {remote_file}: {e.stderr}")
501
+ raise RuntimeError(f"Failed to download {remote_file}")
502
+ except subprocess.TimeoutExpired:
503
+ logger.error(f"Timeout downloading file: {remote_file}")
504
+ raise RuntimeError(f"Timeout downloading {remote_file}")
505
+ except Exception as e:
506
+ logger.error(f"Error downloading file {remote_file}: {e}")
507
+ raise RuntimeError(f"Failed to download {remote_file}")
280
508
 
281
509
 
282
510
  def get_remote_profiler_folder_from_config_path(
283
- sftp: SFTPClient, config_path: str
511
+ remote_connection: RemoteConnection, config_path: str
284
512
  ) -> RemoteReportFolder:
285
513
  """Read a remote config file and return RemoteFolder object."""
286
- attributes = sftp.lstat(str(config_path))
287
- with sftp.open(str(config_path), "rb") as config_file:
288
- data = json.loads(config_file.read())
514
+ try:
515
+ # Build SSH command to get file modification time
516
+ stat_cmd = [
517
+ "ssh",
518
+ f"{remote_connection.username}@{remote_connection.host}",
519
+ ]
520
+
521
+ # Handle non-standard SSH port
522
+ if remote_connection.port != 22:
523
+ stat_cmd.extend(["-p", str(remote_connection.port)])
524
+
525
+ # Get modification time using stat command
526
+ stat_cmd.append(f"stat -c %Y '{config_path}' 2>/dev/null")
527
+
528
+ stat_result = subprocess.run(
529
+ stat_cmd,
530
+ capture_output=True,
531
+ text=True,
532
+ check=True
533
+ )
534
+
535
+ last_modified = int(float(stat_result.stdout.strip()))
289
536
 
537
+ # Build SSH command to read file content
538
+ cat_cmd = [
539
+ "ssh",
540
+ f"{remote_connection.username}@{remote_connection.host}",
541
+ ]
542
+
543
+ if remote_connection.port != 22:
544
+ cat_cmd.extend(["-p", str(remote_connection.port)])
545
+
546
+ # Read file content using cat command
547
+ cat_cmd.append(f"cat '{config_path}'")
548
+
549
+ cat_result = subprocess.run(
550
+ cat_cmd,
551
+ capture_output=True,
552
+ text=True,
553
+ check=True
554
+ )
555
+
556
+ # Parse JSON data
557
+ data = json.loads(cat_result.stdout)
290
558
  report_name = data.get("report_name")
291
559
  logger.info(f"********* report_name: {report_name}")
292
560
 
293
561
  return RemoteReportFolder(
294
562
  remotePath=str(Path(config_path).parent),
295
563
  reportName=report_name,
296
- lastModified=(
297
- int(attributes.st_mtime) if attributes.st_mtime else int(time.time())
298
- ),
564
+ lastModified=last_modified,
565
+ )
566
+
567
+ except subprocess.CalledProcessError as e:
568
+ logger.error(f"SSH command failed while reading config: {e}")
569
+ logger.error(f"stderr: {e.stderr}")
570
+
571
+ # Check if it's an SSH-specific error (authentication, connection, etc.)
572
+ if e.returncode == 255: # SSH returns 255 for SSH protocol errors
573
+ handle_ssh_subprocess_error(e, remote_connection)
574
+ # This line never executes as handle_ssh_subprocess_error raises an exception
575
+ return RemoteReportFolder(
576
+ remotePath=str(Path(config_path).parent),
577
+ reportName="",
578
+ lastModified=int(time.time()),
579
+ )
580
+ else:
581
+ # Fall back to current time if we can't get modification time
582
+ return RemoteReportFolder(
583
+ remotePath=str(Path(config_path).parent),
584
+ reportName="",
585
+ lastModified=int(time.time()),
586
+ )
587
+ except (json.JSONDecodeError, ValueError) as e:
588
+ logger.error(f"Error parsing config file {config_path}: {e}")
589
+ # Fall back to current time and no report name
590
+ return RemoteReportFolder(
591
+ remotePath=str(Path(config_path).parent),
592
+ reportName="",
593
+ lastModified=int(time.time()),
299
594
  )
300
595
 
301
596
 
302
597
  def get_remote_performance_folder(
303
- sftp: SFTPClient, profile_folder: str
598
+ remote_connection: RemoteConnection, profile_folder: str
304
599
  ) -> RemoteReportFolder:
305
- """Read a remote config file and return RemoteFolder object."""
306
- attributes = sftp.stat(str(profile_folder))
600
+ """Get remote performance folder info and return RemoteFolder object."""
307
601
  performance_name = profile_folder.split("/")[-1]
308
602
  remote_path = profile_folder
309
- last_modified = (
310
- int(attributes.st_mtime) if attributes.st_mtime else int(time.time())
311
- )
603
+
604
+ # Get modification time using subprocess SSH command
605
+ try:
606
+ ssh_command = ["ssh"]
607
+ if remote_connection.port != 22:
608
+ ssh_command.extend(["-p", str(remote_connection.port)])
609
+ ssh_command.extend([f"{remote_connection.username}@{remote_connection.host}", f"stat -c %Y '{profile_folder}'"])
610
+
611
+ result = subprocess.run(ssh_command, capture_output=True, text=True, timeout=30)
612
+
613
+ if result.returncode == 0:
614
+ last_modified = int(result.stdout.strip())
615
+ else:
616
+ # If stat fails, handle SSH errors
617
+ if result.returncode == 255:
618
+ handle_ssh_subprocess_error(subprocess.CalledProcessError(result.returncode, ssh_command, result.stdout, result.stderr), remote_connection)
619
+ logger.warning(f"Could not get modification time for {profile_folder}, using current time")
620
+ last_modified = int(time.time())
621
+ except (subprocess.TimeoutExpired, subprocess.CalledProcessError, ValueError) as e:
622
+ logger.warning(f"Error getting modification time for {profile_folder}: {e}, using current time")
623
+ last_modified = int(time.time())
624
+
312
625
  return RemoteReportFolder(
313
626
  remotePath=str(remote_path),
314
627
  reportName=str(performance_name),
@@ -321,37 +634,55 @@ def read_remote_file(
321
634
  remote_connection,
322
635
  remote_path=None,
323
636
  ):
324
- """Read a remote file."""
325
- ssh_client = get_client(remote_connection)
326
- with ssh_client.open_sftp() as sftp:
327
- if remote_path:
328
- path = Path(remote_path)
329
- else:
330
- path = Path(remote_connection.profilerPath)
637
+ """Read a remote file using SSH cat command."""
638
+ if remote_path:
639
+ path = Path(remote_path)
640
+ else:
641
+ path = Path(remote_connection.profilerPath)
331
642
 
332
- logger.info(f"Opening remote file {path}")
333
- directory_path = str(path.parent)
334
- file_name = str(path.name)
643
+ logger.info(f"Reading remote file {path}")
335
644
 
336
- try:
337
- sftp.chdir(path=directory_path)
338
- with sftp.open(filename=file_name) as file:
339
- content = file.read()
340
- return content
341
- except FileNotFoundError:
342
- logger.error(f"File not found: {path}")
645
+ # Build SSH command to read the file
646
+ ssh_cmd = ["ssh"]
647
+
648
+ # Handle non-standard SSH port
649
+ if remote_connection.port != 22:
650
+ ssh_cmd.extend(["-p", str(remote_connection.port)])
651
+
652
+ ssh_cmd.extend([
653
+ f"{remote_connection.username}@{remote_connection.host}",
654
+ f"cat '{path}'"
655
+ ])
656
+
657
+ try:
658
+ result = subprocess.run(
659
+ ssh_cmd,
660
+ capture_output=True,
661
+ check=True,
662
+ timeout=30
663
+ )
664
+ return result.stdout
665
+ except subprocess.CalledProcessError as e:
666
+ if e.returncode == 255: # SSH protocol errors
667
+ handle_ssh_subprocess_error(e, remote_connection)
343
668
  return None
344
- except IOError as e:
345
- logger.error(f"Error reading remote file {path}: {e}")
669
+ else:
670
+ # File not found or other command error
671
+ logger.error(f"File not found or cannot be read: {path}")
346
672
  return None
673
+ except subprocess.TimeoutExpired:
674
+ logger.error(f"Timeout reading remote file: {path}")
675
+ return None
676
+ except Exception as e:
677
+ logger.error(f"Error reading remote file {path}: {e}")
678
+ return None
347
679
 
348
680
 
349
681
  @remote_exception_handler
350
682
  def check_remote_path_for_reports(remote_connection):
351
683
  """Check the remote path for config files."""
352
- ssh_client = get_client(remote_connection)
353
684
  remote_config_paths = find_folders_by_files(
354
- ssh_client, remote_connection.profilerPath, [TEST_CONFIG_FILE]
685
+ remote_connection, remote_connection.profilerPath, [TEST_CONFIG_FILE]
355
686
  )
356
687
  if not remote_config_paths:
357
688
  raise NoProjectsException(
@@ -362,42 +693,131 @@ def check_remote_path_for_reports(remote_connection):
362
693
 
363
694
  @remote_exception_handler
364
695
  def check_remote_path_exists(remote_connection: RemoteConnection, path_key: str):
365
- client = get_client(remote_connection)
366
- sftp = client.open_sftp()
367
- # Attempt to list the directory to see if it exists
696
+ """Check if a remote path exists using SSH test command."""
697
+ path = getattr(remote_connection, path_key)
698
+
699
+ # Build SSH command to test if path exists
700
+ ssh_cmd = ["ssh"]
701
+
702
+ # Handle non-standard SSH port
703
+ if remote_connection.port != 22:
704
+ ssh_cmd.extend(["-p", str(remote_connection.port)])
705
+
706
+ ssh_cmd.extend([
707
+ f"{remote_connection.username}@{remote_connection.host}",
708
+ f"test -d '{path}'"
709
+ ])
710
+
368
711
  try:
369
- sftp.stat(getattr(remote_connection, path_key))
370
- except IOError as e:
371
- # Directory does not exist or is inaccessible
372
- if path_key == "performancePath":
373
- message = "Performance directory does not exist or cannot be accessed"
712
+ result = subprocess.run(
713
+ ssh_cmd,
714
+ capture_output=True,
715
+ check=True,
716
+ timeout=10
717
+ )
718
+ # If command succeeds, directory exists
719
+ return True
720
+ except subprocess.CalledProcessError as e:
721
+ if e.returncode == 255: # SSH protocol errors
722
+ handle_ssh_subprocess_error(e, remote_connection)
374
723
  else:
375
- message = "Profiler directory does not exist or cannot be accessed"
376
-
377
- logger.error(message)
724
+ # Directory does not exist or is inaccessible
725
+ if path_key == "performancePath":
726
+ message = "Performance directory does not exist or cannot be accessed"
727
+ else:
728
+ message = "Profiler directory does not exist or cannot be accessed"
729
+
730
+ logger.error(message)
731
+ raise RemoteConnectionException(
732
+ message=message, status=ConnectionTestStates.FAILED
733
+ )
734
+ except subprocess.TimeoutExpired:
735
+ logger.error(f"Timeout checking remote path: {path}")
378
736
  raise RemoteConnectionException(
379
- message=message, status=ConnectionTestStates.FAILED
737
+ message=f"Timeout checking remote path: {path}",
738
+ status=ConnectionTestStates.FAILED
380
739
  )
381
740
 
382
741
 
383
742
  def find_folders_by_files(
384
- ssh_client, root_folder: str, file_names: List[str]
743
+ remote_connection: RemoteConnection, root_folder: str, file_names: List[str]
385
744
  ) -> List[str]:
386
745
  """Given a remote path, return a list of top-level folders that contain any of the specified files."""
387
746
  matched_folders: List[str] = []
388
- with ssh_client.open_sftp() as sftp:
389
- all_files = sftp.listdir_attr(root_folder)
390
- top_level_directories = filter(lambda e: S_ISDIR(e.st_mode), all_files)
391
747
 
392
- for directory in top_level_directories:
393
- dirname = Path(root_folder, directory.filename)
394
- directory_files = sftp.listdir(str(dirname))
748
+ # Build SSH command to find directories in root_folder
749
+ ssh_cmd = ["ssh"]
395
750
 
396
- # Check if any of the specified file names exist in the directory
397
- if any(file_name in directory_files for file_name in file_names):
398
- matched_folders.append(str(dirname))
751
+ # Handle non-standard SSH port
752
+ if remote_connection.port != 22:
753
+ ssh_cmd.extend(["-p", str(remote_connection.port)])
399
754
 
400
- return matched_folders
755
+ ssh_cmd.extend([
756
+ f"{remote_connection.username}@{remote_connection.host}",
757
+ f"find '{root_folder}' -maxdepth 1 -type d -not -path '{root_folder}'"
758
+ ])
759
+
760
+ try:
761
+ result = subprocess.run(
762
+ ssh_cmd,
763
+ capture_output=True,
764
+ text=True,
765
+ check=True,
766
+ timeout=30
767
+ )
768
+
769
+ directories = result.stdout.strip().splitlines()
770
+
771
+ # For each directory, check if it contains any of the specified files
772
+ for directory in directories:
773
+ directory = directory.strip()
774
+ if not directory:
775
+ continue
776
+
777
+ # Build SSH command to check for files in this directory
778
+ file_checks = []
779
+ for file_name in file_names:
780
+ file_checks.append(f"test -f '{directory}/{file_name}'")
781
+
782
+ # Use OR logic to check if any of the files exist
783
+ check_cmd = ["ssh"]
784
+ if remote_connection.port != 22:
785
+ check_cmd.extend(["-p", str(remote_connection.port)])
786
+
787
+ check_cmd.extend([
788
+ f"{remote_connection.username}@{remote_connection.host}",
789
+ f"({' || '.join(file_checks)})"
790
+ ])
791
+
792
+ try:
793
+ check_result = subprocess.run(
794
+ check_cmd,
795
+ capture_output=True,
796
+ check=True,
797
+ timeout=10
798
+ )
799
+ # If command succeeds, at least one file exists
800
+ matched_folders.append(directory)
801
+ except subprocess.CalledProcessError:
802
+ # None of the files exist in this directory, skip it
803
+ continue
804
+
805
+ return matched_folders
806
+
807
+ except subprocess.CalledProcessError as e:
808
+ if e.returncode == 255: # SSH protocol errors
809
+ handle_ssh_subprocess_error(e, remote_connection)
810
+ # This line should never be reached as handle_ssh_subprocess_error raises an exception
811
+ return []
812
+ else:
813
+ logger.error(f"Error finding folders: {e.stderr}")
814
+ return []
815
+ except subprocess.TimeoutExpired:
816
+ logger.error(f"Timeout finding folders in: {root_folder}")
817
+ return []
818
+ except Exception as e:
819
+ logger.error(f"Error finding folders: {e}")
820
+ return []
401
821
 
402
822
 
403
823
  @remote_exception_handler
@@ -405,19 +825,22 @@ def get_remote_performance_folders(
405
825
  remote_connection: RemoteConnection,
406
826
  ) -> List[RemoteReportFolder]:
407
827
  """Return a list of remote folders containing a profile_log_device file."""
408
- client = get_client(remote_connection)
828
+ if remote_connection.performancePath is None:
829
+ error = "Performance path is not configured for this connection"
830
+ logger.error(error)
831
+ raise NoProjectsException(status=ConnectionTestStates.FAILED, message=error)
832
+
409
833
  performance_paths = find_folders_by_files(
410
- client, remote_connection.performancePath, [TEST_PROFILER_FILE]
834
+ remote_connection, remote_connection.performancePath, [TEST_PROFILER_FILE]
411
835
  )
412
836
  if not performance_paths:
413
837
  error = f"No profiler paths found at {remote_connection.performancePath}"
414
838
  logger.info(error)
415
839
  raise NoProjectsException(status=ConnectionTestStates.FAILED, message=error)
416
840
  remote_folder_data = []
417
- with client.open_sftp() as sftp:
418
- for path in performance_paths:
419
- remote_folder_data.append(get_remote_performance_folder(sftp, path))
420
- return remote_folder_data
841
+ for path in performance_paths:
842
+ remote_folder_data.append(get_remote_performance_folder(remote_connection, path))
843
+ return remote_folder_data
421
844
 
422
845
 
423
846
  @remote_exception_handler
@@ -425,21 +848,19 @@ def get_remote_profiler_folders(
425
848
  remote_connection: RemoteConnection,
426
849
  ) -> List[RemoteReportFolder]:
427
850
  """Return a list of remote folders containing a config.json file."""
428
- client = get_client(remote_connection)
429
851
  remote_config_paths = find_folders_by_files(
430
- client, remote_connection.profilerPath, [TEST_CONFIG_FILE]
852
+ remote_connection, remote_connection.profilerPath, [TEST_CONFIG_FILE]
431
853
  )
432
854
  if not remote_config_paths:
433
855
  error = f"No projects found at {remote_connection.profilerPath}"
434
856
  logger.info(error)
435
857
  raise NoProjectsException(status=ConnectionTestStates.FAILED, message=error)
436
858
  remote_folder_data = []
437
- with client.open_sftp() as sftp:
438
- for config_path in remote_config_paths:
439
- remote_folder = get_remote_profiler_folder_from_config_path(
440
- sftp, str(Path(config_path).joinpath(TEST_CONFIG_FILE))
441
- )
442
- remote_folder_data.append(remote_folder)
859
+ for config_path in remote_config_paths:
860
+ remote_folder = get_remote_profiler_folder_from_config_path(
861
+ remote_connection, str(Path(config_path).joinpath(TEST_CONFIG_FILE))
862
+ )
863
+ remote_folder_data.append(remote_folder)
443
864
  return remote_folder_data
444
865
 
445
866
 
@@ -452,7 +873,6 @@ def sync_remote_profiler_folders(
452
873
  sid=None,
453
874
  ):
454
875
  """Main function to sync test folders, handles both compressed and individual syncs."""
455
- client = get_client(remote_connection)
456
876
  profiler_folder = Path(remote_folder_path).name
457
877
  destination_dir = Path(
458
878
  REPORT_DATA_DIRECTORY, path_prefix, remote_connection.host, current_app.config["PROFILER_DIRECTORY_NAME"], profiler_folder
@@ -460,7 +880,7 @@ def sync_remote_profiler_folders(
460
880
  destination_dir.mkdir(parents=True, exist_ok=True)
461
881
 
462
882
  sync_files_and_directories(
463
- client, remote_folder_path, destination_dir, exclude_patterns, sid
883
+ remote_connection, remote_folder_path, destination_dir, exclude_patterns, sid
464
884
  )
465
885
 
466
886
 
@@ -472,7 +892,6 @@ def sync_remote_performance_folders(
472
892
  exclude_patterns: Optional[List[str]] = None,
473
893
  sid=None,
474
894
  ):
475
- client = get_client(remote_connection)
476
895
  remote_folder_path = profile.remotePath
477
896
  profile_folder = Path(remote_folder_path).name
478
897
  destination_dir = Path(
@@ -483,7 +902,6 @@ def sync_remote_performance_folders(
483
902
  profile_folder,
484
903
  )
485
904
  destination_dir.mkdir(parents=True, exist_ok=True)
486
-
487
905
  sync_files_and_directories(
488
- client, remote_folder_path, destination_dir, exclude_patterns, sid
906
+ remote_connection, remote_folder_path, destination_dir, exclude_patterns, sid
489
907
  )