slurmray 6.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of slurmray might be problematic. Click here for more details.

slurmray/detection.py ADDED
@@ -0,0 +1 @@
1
+
slurmray/file_sync.py ADDED
@@ -0,0 +1,276 @@
1
+ """
2
+ File synchronization manager for local packages.
3
+ Handles hash computation, comparison, and incremental upload.
4
+ """
5
+
6
+ import os
7
+ import json
8
+ import hashlib
9
+ import logging
10
+ from typing import Dict, Set, List, Tuple
11
+ from pathlib import Path
12
+
13
+
14
+ class FileHashManager:
15
+ """Manages file hashes for synchronization."""
16
+
17
+ def __init__(self, project_root: str, logger: logging.Logger = None):
18
+ self.project_root = os.path.abspath(project_root)
19
+ self.logger = logger or logging.getLogger(__name__)
20
+ self.cache_dir = os.path.join(self.project_root, ".slogs")
21
+ self.local_hash_file = os.path.join(self.cache_dir, ".local_file_hashes.json")
22
+ self.remote_hash_file = os.path.join(
23
+ self.cache_dir, ".remote_file_hashes.json"
24
+ )
25
+
26
+ # Ensure cache directory exists
27
+ if not os.path.exists(self.cache_dir):
28
+ os.makedirs(self.cache_dir)
29
+
30
+ def compute_file_hash(self, file_path: str) -> str:
31
+ """Compute SHA256 hash of a file."""
32
+ sha256_hash = hashlib.sha256()
33
+ try:
34
+ with open(file_path, "rb") as f:
35
+ # Read file in chunks to handle large files
36
+ for byte_block in iter(lambda: f.read(4096), b""):
37
+ sha256_hash.update(byte_block)
38
+ return sha256_hash.hexdigest()
39
+ except Exception as e:
40
+ if self.logger:
41
+ self.logger.warning(f"Failed to compute hash for {file_path}: {e}")
42
+ return ""
43
+
44
+ def compute_hashes(self, file_paths: List[str]) -> Dict[str, Dict[str, any]]:
45
+ """
46
+ Compute hashes for multiple files and directories.
47
+ For directories, recursively computes hashes for all files within.
48
+ Returns dict: {rel_path: {"hash": "...", "mtime": ..., "size": ...}}
49
+ """
50
+ hashes = {}
51
+ files_to_process = set() # Use set to avoid duplicates
52
+
53
+ for file_path in file_paths:
54
+ # Convert to absolute path
55
+ if not os.path.isabs(file_path):
56
+ abs_path = os.path.join(self.project_root, file_path)
57
+ else:
58
+ abs_path = file_path
59
+
60
+ if not os.path.exists(abs_path):
61
+ continue
62
+
63
+ # Get relative path
64
+ try:
65
+ rel_path = os.path.relpath(abs_path, self.project_root)
66
+ except ValueError:
67
+ continue
68
+
69
+ # Skip if outside project
70
+ if rel_path.startswith(".."):
71
+ continue
72
+
73
+ # If it's a directory, recursively collect all files
74
+ if os.path.isdir(abs_path):
75
+ for root, dirs, files in os.walk(abs_path):
76
+ # Skip __pycache__ directories
77
+ dirs[:] = [d for d in dirs if d != "__pycache__"]
78
+ for file in files:
79
+ file_abs_path = os.path.join(root, file)
80
+ try:
81
+ file_rel_path = os.path.relpath(file_abs_path, self.project_root)
82
+ # Skip if outside project
83
+ if not file_rel_path.startswith(".."):
84
+ files_to_process.add(file_abs_path)
85
+ except ValueError:
86
+ continue
87
+ else:
88
+ # It's a file, add it directly
89
+ files_to_process.add(abs_path)
90
+
91
+ # Compute hashes for all collected files
92
+ for abs_path in files_to_process:
93
+ try:
94
+ rel_path = os.path.relpath(abs_path, self.project_root)
95
+ # Skip if outside project (double check)
96
+ if rel_path.startswith(".."):
97
+ continue
98
+
99
+ # Compute hash and metadata
100
+ file_hash = self.compute_file_hash(abs_path)
101
+ if file_hash:
102
+ stat = os.stat(abs_path)
103
+ hashes[rel_path] = {
104
+ "hash": file_hash,
105
+ "mtime": stat.st_mtime,
106
+ "size": stat.st_size,
107
+ }
108
+ except Exception as e:
109
+ if self.logger:
110
+ self.logger.debug(f"Skipping file {abs_path}: {e}")
111
+ continue
112
+
113
+ return hashes
114
+
115
+ def load_local_hashes(self) -> Dict[str, Dict[str, any]]:
116
+ """Load local file hashes from cache."""
117
+ if not os.path.exists(self.local_hash_file):
118
+ return {}
119
+ try:
120
+ with open(self.local_hash_file, "r") as f:
121
+ return json.load(f)
122
+ except Exception as e:
123
+ if self.logger:
124
+ self.logger.warning(f"Failed to load local hashes: {e}")
125
+ return {}
126
+
127
+ def save_local_hashes(self, hashes: Dict[str, Dict[str, any]]):
128
+ """Save local file hashes to cache."""
129
+ try:
130
+ with open(self.local_hash_file, "w") as f:
131
+ json.dump(hashes, f, indent=2)
132
+ except Exception as e:
133
+ if self.logger:
134
+ self.logger.warning(f"Failed to save local hashes: {e}")
135
+
136
+ def load_remote_hashes(self) -> Dict[str, Dict[str, any]]:
137
+ """Load remote file hashes from cache."""
138
+ if not os.path.exists(self.remote_hash_file):
139
+ return {}
140
+ try:
141
+ with open(self.remote_hash_file, "r") as f:
142
+ return json.load(f)
143
+ except Exception as e:
144
+ if self.logger:
145
+ self.logger.warning(f"Failed to load remote hashes: {e}")
146
+ return {}
147
+
148
+ def save_remote_hashes(self, hashes: Dict[str, Dict[str, any]]):
149
+ """Save remote file hashes to cache."""
150
+ try:
151
+ with open(self.remote_hash_file, "w") as f:
152
+ json.dump(hashes, f, indent=2)
153
+ except Exception as e:
154
+ if self.logger:
155
+ self.logger.warning(f"Failed to save remote hashes: {e}")
156
+
157
+
158
+ class LocalFileSyncManager:
159
+ """Manages incremental synchronization of local files."""
160
+
161
+ def __init__(
162
+ self,
163
+ project_root: str,
164
+ hash_manager: FileHashManager,
165
+ logger: logging.Logger = None,
166
+ ):
167
+ self.project_root = os.path.abspath(project_root)
168
+ self.hash_manager = hash_manager
169
+ self.logger = logger or logging.getLogger(__name__)
170
+
171
+ def get_files_to_upload(
172
+ self, local_files: List[str], remote_hashes: Dict[str, Dict[str, any]] = None
173
+ ) -> List[str]:
174
+ """
175
+ Compare local and remote hashes to determine which files need uploading.
176
+ Returns list of relative paths to files that need uploading.
177
+ """
178
+ if remote_hashes is None:
179
+ remote_hashes = self.hash_manager.load_remote_hashes()
180
+
181
+ # Compute current local hashes
182
+ local_hashes = self.hash_manager.compute_hashes(local_files)
183
+
184
+ # Compare hashes
185
+ files_to_upload = []
186
+ for rel_path, local_info in local_hashes.items():
187
+ remote_info = remote_hashes.get(rel_path)
188
+
189
+ # File needs upload if:
190
+ # 1. Not present remotely
191
+ # 2. Hash differs
192
+ if remote_info is None:
193
+ files_to_upload.append(rel_path)
194
+ if self.logger:
195
+ self.logger.debug(f"New file detected: {rel_path}")
196
+ elif remote_info.get("hash") != local_info["hash"]:
197
+ files_to_upload.append(rel_path)
198
+ if self.logger:
199
+ self.logger.debug(f"File modified: {rel_path} (hash changed)")
200
+
201
+ # Save updated local hashes
202
+ self.hash_manager.save_local_hashes(local_hashes)
203
+
204
+ return files_to_upload
205
+
206
+ def update_remote_hashes(
207
+ self,
208
+ uploaded_files: List[str],
209
+ remote_hashes: Dict[str, Dict[str, any]] = None,
210
+ ):
211
+ """
212
+ Update remote hash cache after successful upload.
213
+ """
214
+ if remote_hashes is None:
215
+ remote_hashes = self.hash_manager.load_remote_hashes()
216
+
217
+ # Get current local hashes for uploaded files
218
+ local_hashes = self.hash_manager.compute_hashes(uploaded_files)
219
+
220
+ # Update remote hashes with local hashes
221
+ for rel_path in uploaded_files:
222
+ if rel_path in local_hashes:
223
+ remote_hashes[rel_path] = local_hashes[rel_path]
224
+
225
+ # Save updated remote hashes
226
+ self.hash_manager.save_remote_hashes(remote_hashes)
227
+
228
+ def fetch_remote_hashes(self, ssh_client, remote_hash_file_path: str) -> Dict[str, Dict[str, any]]:
229
+ """
230
+ Fetch remote file hashes from the server via SSH.
231
+ Returns dict of remote hashes or empty dict if file doesn't exist.
232
+ """
233
+ try:
234
+ stdin, stdout, stderr = ssh_client.exec_command(
235
+ f"cat '{remote_hash_file_path}' 2>/dev/null || echo '{{}}'"
236
+ )
237
+ exit_status = stdout.channel.recv_exit_status()
238
+ if exit_status == 0:
239
+ content = stdout.read().decode("utf-8").strip()
240
+ if content:
241
+ return json.loads(content)
242
+ except Exception as e:
243
+ if self.logger:
244
+ self.logger.debug(f"Could not fetch remote hashes: {e}")
245
+ return {}
246
+
247
+ def save_remote_hashes_to_server(
248
+ self, ssh_client, remote_hash_file_path: str, hashes: Dict[str, Dict[str, any]]
249
+ ):
250
+ """Save remote file hashes to the server via SSH."""
251
+ try:
252
+ # Create JSON content
253
+ content = json.dumps(hashes, indent=2)
254
+
255
+ # Write to temporary file first, then move (atomic operation)
256
+ temp_path = remote_hash_file_path + ".tmp"
257
+ stdin, stdout, stderr = ssh_client.exec_command(
258
+ f"mkdir -p '{os.path.dirname(remote_hash_file_path)}'"
259
+ )
260
+ stdout.channel.recv_exit_status()
261
+
262
+ # Write content via echo (simple but works)
263
+ stdin, stdout, stderr = ssh_client.exec_command(
264
+ f"cat > '{temp_path}' << 'EOF'\n{content}\nEOF"
265
+ )
266
+ exit_status = stdout.channel.recv_exit_status()
267
+ if exit_status == 0:
268
+ # Move temp file to final location
269
+ stdin, stdout, stderr = ssh_client.exec_command(
270
+ f"mv '{temp_path}' '{remote_hash_file_path}'"
271
+ )
272
+ stdout.channel.recv_exit_status()
273
+ except Exception as e:
274
+ if self.logger:
275
+ self.logger.warning(f"Failed to save remote hashes to server: {e}")
276
+