portacode 0.3.19.dev4__py3-none-any.whl → 1.4.11.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of portacode might be problematic. Click here for more details.
- portacode/_version.py +16 -3
- portacode/cli.py +143 -17
- portacode/connection/client.py +149 -10
- portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +824 -21
- portacode/connection/handlers/__init__.py +28 -1
- portacode/connection/handlers/base.py +78 -16
- portacode/connection/handlers/chunked_content.py +244 -0
- portacode/connection/handlers/diff_handlers.py +603 -0
- portacode/connection/handlers/file_handlers.py +902 -17
- portacode/connection/handlers/project_aware_file_handlers.py +226 -0
- portacode/connection/handlers/project_state/README.md +312 -0
- portacode/connection/handlers/project_state/__init__.py +92 -0
- portacode/connection/handlers/project_state/file_system_watcher.py +179 -0
- portacode/connection/handlers/project_state/git_manager.py +1502 -0
- portacode/connection/handlers/project_state/handlers.py +875 -0
- portacode/connection/handlers/project_state/manager.py +1331 -0
- portacode/connection/handlers/project_state/models.py +108 -0
- portacode/connection/handlers/project_state/utils.py +50 -0
- portacode/connection/handlers/project_state_handlers.py +45 -2185
- portacode/connection/handlers/proxmox_infra.py +361 -0
- portacode/connection/handlers/registry.py +15 -4
- portacode/connection/handlers/session.py +483 -32
- portacode/connection/handlers/system_handlers.py +147 -8
- portacode/connection/handlers/tab_factory.py +53 -46
- portacode/connection/handlers/terminal_handlers.py +21 -8
- portacode/connection/handlers/update_handler.py +61 -0
- portacode/connection/multiplex.py +60 -2
- portacode/connection/terminal.py +214 -24
- portacode/keypair.py +63 -1
- portacode/link_capture/__init__.py +38 -0
- portacode/link_capture/__pycache__/__init__.cpython-311.pyc +0 -0
- portacode/link_capture/bin/__pycache__/link_capture_wrapper.cpython-311.pyc +0 -0
- portacode/link_capture/bin/elinks +3 -0
- portacode/link_capture/bin/gio-open +3 -0
- portacode/link_capture/bin/gnome-open +3 -0
- portacode/link_capture/bin/gvfs-open +3 -0
- portacode/link_capture/bin/kde-open +3 -0
- portacode/link_capture/bin/kfmclient +3 -0
- portacode/link_capture/bin/link_capture_exec.sh +11 -0
- portacode/link_capture/bin/link_capture_wrapper.py +75 -0
- portacode/link_capture/bin/links +3 -0
- portacode/link_capture/bin/links2 +3 -0
- portacode/link_capture/bin/lynx +3 -0
- portacode/link_capture/bin/mate-open +3 -0
- portacode/link_capture/bin/netsurf +3 -0
- portacode/link_capture/bin/sensible-browser +3 -0
- portacode/link_capture/bin/w3m +3 -0
- portacode/link_capture/bin/x-www-browser +3 -0
- portacode/link_capture/bin/xdg-open +3 -0
- portacode/logging_categories.py +140 -0
- portacode/pairing.py +103 -0
- portacode/static/js/test-ntp-clock.html +63 -0
- portacode/static/js/utils/ntp-clock.js +232 -0
- portacode/utils/NTP_ARCHITECTURE.md +136 -0
- portacode/utils/__init__.py +1 -0
- portacode/utils/diff_apply.py +456 -0
- portacode/utils/diff_renderer.py +371 -0
- portacode/utils/ntp_clock.py +65 -0
- portacode-1.4.11.dev1.dist-info/METADATA +298 -0
- portacode-1.4.11.dev1.dist-info/RECORD +97 -0
- {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info}/WHEEL +1 -1
- portacode-1.4.11.dev1.dist-info/top_level.txt +3 -0
- test_modules/README.md +296 -0
- test_modules/__init__.py +1 -0
- test_modules/test_device_online.py +44 -0
- test_modules/test_file_operations.py +743 -0
- test_modules/test_git_status_ui.py +370 -0
- test_modules/test_login_flow.py +50 -0
- test_modules/test_navigate_testing_folder.py +361 -0
- test_modules/test_play_store_screenshots.py +294 -0
- test_modules/test_terminal_buffer_performance.py +261 -0
- test_modules/test_terminal_interaction.py +80 -0
- test_modules/test_terminal_loading_race_condition.py +95 -0
- test_modules/test_terminal_start.py +56 -0
- testing_framework/.env.example +21 -0
- testing_framework/README.md +334 -0
- testing_framework/__init__.py +17 -0
- testing_framework/cli.py +326 -0
- testing_framework/core/__init__.py +1 -0
- testing_framework/core/base_test.py +336 -0
- testing_framework/core/cli_manager.py +177 -0
- testing_framework/core/hierarchical_runner.py +577 -0
- testing_framework/core/playwright_manager.py +520 -0
- testing_framework/core/runner.py +447 -0
- testing_framework/core/shared_cli_manager.py +234 -0
- testing_framework/core/test_discovery.py +112 -0
- testing_framework/requirements.txt +12 -0
- portacode-0.3.19.dev4.dist-info/METADATA +0 -241
- portacode-0.3.19.dev4.dist-info/RECORD +0 -30
- portacode-0.3.19.dev4.dist-info/top_level.txt +0 -1
- {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info}/entry_points.txt +0 -0
- {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info/licenses}/LICENSE +0 -0
|
@@ -1,2185 +1,45 @@
|
|
|
1
|
-
"""Project state handlers
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
from
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
PYGMENTS_AVAILABLE = False
|
|
47
|
-
highlight = None
|
|
48
|
-
get_lexer_for_filename = None
|
|
49
|
-
get_lexer_by_name = None
|
|
50
|
-
HtmlFormatter = None
|
|
51
|
-
ClassNotFound = Exception
|
|
52
|
-
|
|
53
|
-
# Cross-platform file system monitoring
|
|
54
|
-
try:
|
|
55
|
-
from watchdog.observers import Observer
|
|
56
|
-
from watchdog.events import FileSystemEventHandler
|
|
57
|
-
WATCHDOG_AVAILABLE = True
|
|
58
|
-
logger.info("Watchdog library available for file system monitoring")
|
|
59
|
-
except ImportError:
|
|
60
|
-
WATCHDOG_AVAILABLE = False
|
|
61
|
-
Observer = None
|
|
62
|
-
FileSystemEventHandler = None
|
|
63
|
-
logger.warning("Watchdog library not available - file system monitoring disabled")
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
@dataclass
|
|
67
|
-
class TabInfo:
|
|
68
|
-
"""Represents an editor tab with content and metadata."""
|
|
69
|
-
tab_id: str # Unique identifier for the tab
|
|
70
|
-
tab_type: str # 'file', 'diff', 'untitled', 'image', 'audio', 'video'
|
|
71
|
-
title: str # Display title for the tab
|
|
72
|
-
file_path: Optional[str] = None # Path for file-based tabs
|
|
73
|
-
content: Optional[str] = None # Text content or base64 for media
|
|
74
|
-
original_content: Optional[str] = None # For diff view
|
|
75
|
-
modified_content: Optional[str] = None # For diff view
|
|
76
|
-
is_dirty: bool = False # Has unsaved changes
|
|
77
|
-
mime_type: Optional[str] = None # For media files
|
|
78
|
-
encoding: Optional[str] = None # Content encoding (base64, utf-8, etc.)
|
|
79
|
-
metadata: Optional[Dict[str, Any]] = None # Additional metadata
|
|
80
|
-
|
|
81
|
-
@dataclass
|
|
82
|
-
class MonitoredFolder:
|
|
83
|
-
"""Represents a folder that is being monitored for changes."""
|
|
84
|
-
folder_path: str
|
|
85
|
-
is_expanded: bool = False
|
|
86
|
-
|
|
87
|
-
@dataclass
|
|
88
|
-
class FileItem:
|
|
89
|
-
"""Represents a file or directory item with metadata."""
|
|
90
|
-
name: str
|
|
91
|
-
path: str
|
|
92
|
-
is_directory: bool
|
|
93
|
-
parent_path: str
|
|
94
|
-
size: Optional[int] = None
|
|
95
|
-
modified_time: Optional[float] = None
|
|
96
|
-
is_git_tracked: Optional[bool] = None
|
|
97
|
-
git_status: Optional[str] = None
|
|
98
|
-
is_hidden: bool = False
|
|
99
|
-
is_ignored: bool = False
|
|
100
|
-
children: Optional[List['FileItem']] = None
|
|
101
|
-
is_expanded: bool = False
|
|
102
|
-
is_loaded: bool = False
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
@dataclass
|
|
106
|
-
class GitFileChange:
|
|
107
|
-
"""Represents a single file change in git."""
|
|
108
|
-
file_repo_path: str # Relative path from repository root
|
|
109
|
-
file_name: str # Just the filename (basename)
|
|
110
|
-
file_abs_path: str # Absolute path to the file
|
|
111
|
-
change_type: str # 'added', 'modified', 'deleted', 'untracked' - follows git's native types
|
|
112
|
-
content_hash: Optional[str] = None # SHA256 hash of current file content
|
|
113
|
-
is_staged: bool = False # Whether this change is staged
|
|
114
|
-
diff_details: Optional[Dict[str, Any]] = None # Per-character diff information using diff-match-patch
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
@dataclass
|
|
118
|
-
class GitDetailedStatus:
|
|
119
|
-
"""Represents detailed git status with file hashes."""
|
|
120
|
-
head_commit_hash: Optional[str] = None # Hash of HEAD commit
|
|
121
|
-
staged_changes: List[GitFileChange] = None # Changes in the staging area
|
|
122
|
-
unstaged_changes: List[GitFileChange] = None # Changes in working directory
|
|
123
|
-
untracked_files: List[GitFileChange] = None # Untracked files
|
|
124
|
-
|
|
125
|
-
def __post_init__(self):
|
|
126
|
-
if self.staged_changes is None:
|
|
127
|
-
self.staged_changes = []
|
|
128
|
-
if self.unstaged_changes is None:
|
|
129
|
-
self.unstaged_changes = []
|
|
130
|
-
if self.untracked_files is None:
|
|
131
|
-
self.untracked_files = []
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
@dataclass
|
|
135
|
-
class ProjectState:
|
|
136
|
-
"""Represents the complete state of a project."""
|
|
137
|
-
client_session_key: str # The composite key: client_session_id + "_" + hash(project_folder_path)
|
|
138
|
-
project_folder_path: str
|
|
139
|
-
items: List[FileItem]
|
|
140
|
-
monitored_folders: List[MonitoredFolder] = None
|
|
141
|
-
is_git_repo: bool = False
|
|
142
|
-
git_branch: Optional[str] = None
|
|
143
|
-
git_status_summary: Optional[Dict[str, int]] = None # Kept for backward compatibility
|
|
144
|
-
git_detailed_status: Optional[GitDetailedStatus] = None # New detailed git state
|
|
145
|
-
open_tabs: Dict[str, 'TabInfo'] = None # Changed from List to Dict with unique keys
|
|
146
|
-
active_tab: Optional['TabInfo'] = None
|
|
147
|
-
|
|
148
|
-
def __post_init__(self):
|
|
149
|
-
if self.open_tabs is None:
|
|
150
|
-
self.open_tabs = {}
|
|
151
|
-
if self.monitored_folders is None:
|
|
152
|
-
self.monitored_folders = []
|
|
153
|
-
|
|
154
|
-
@property
|
|
155
|
-
def client_session_id(self) -> str:
|
|
156
|
-
"""Extract the clean client session ID from the composite key."""
|
|
157
|
-
return self.client_session_key.split('_')[0]
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
class GitManager:
|
|
161
|
-
"""Manages Git operations for project state."""
|
|
162
|
-
|
|
163
|
-
def __init__(self, project_path: str):
|
|
164
|
-
self.project_path = project_path
|
|
165
|
-
self.repo: Optional[Repo] = None
|
|
166
|
-
self.is_git_repo = False
|
|
167
|
-
self._initialize_repo()
|
|
168
|
-
|
|
169
|
-
def _initialize_repo(self):
|
|
170
|
-
"""Initialize Git repository if available."""
|
|
171
|
-
if not GIT_AVAILABLE:
|
|
172
|
-
logger.warning("GitPython not available, Git features disabled")
|
|
173
|
-
return
|
|
174
|
-
|
|
175
|
-
try:
|
|
176
|
-
self.repo = Repo(self.project_path)
|
|
177
|
-
self.is_git_repo = True
|
|
178
|
-
logger.info("Initialized Git repo for project: %s", self.project_path)
|
|
179
|
-
except (InvalidGitRepositoryError, Exception) as e:
|
|
180
|
-
logger.debug("Not a Git repository or Git error: %s", e)
|
|
181
|
-
|
|
182
|
-
def get_branch_name(self) -> Optional[str]:
|
|
183
|
-
"""Get current Git branch name."""
|
|
184
|
-
if not self.is_git_repo or not self.repo:
|
|
185
|
-
return None
|
|
186
|
-
|
|
187
|
-
try:
|
|
188
|
-
return self.repo.active_branch.name
|
|
189
|
-
except Exception as e:
|
|
190
|
-
logger.debug("Could not get Git branch: %s", e)
|
|
191
|
-
return None
|
|
192
|
-
|
|
193
|
-
def get_file_status(self, file_path: str) -> Dict[str, Any]:
|
|
194
|
-
"""Get Git status for a specific file or directory."""
|
|
195
|
-
if not self.is_git_repo or not self.repo:
|
|
196
|
-
return {"is_tracked": False, "status": None, "is_ignored": False}
|
|
197
|
-
|
|
198
|
-
try:
|
|
199
|
-
rel_path = os.path.relpath(file_path, self.repo.working_dir)
|
|
200
|
-
|
|
201
|
-
# Check if ignored - GitPython handles path normalization internally
|
|
202
|
-
is_ignored = self.repo.ignored(rel_path)
|
|
203
|
-
if is_ignored:
|
|
204
|
-
return {"is_tracked": False, "status": "ignored", "is_ignored": True}
|
|
205
|
-
|
|
206
|
-
# For directories, only report status if they contain tracked or untracked files
|
|
207
|
-
if os.path.isdir(file_path):
|
|
208
|
-
# Check if directory contains any untracked files using path.startswith()
|
|
209
|
-
# This handles cross-platform path separators correctly
|
|
210
|
-
has_untracked = any(
|
|
211
|
-
os.path.commonpath([f, rel_path]) == rel_path and f != rel_path
|
|
212
|
-
for f in self.repo.untracked_files
|
|
213
|
-
)
|
|
214
|
-
if has_untracked:
|
|
215
|
-
return {"is_tracked": False, "status": "untracked", "is_ignored": False}
|
|
216
|
-
|
|
217
|
-
# Check if directory is dirty - GitPython handles path normalization
|
|
218
|
-
if self.repo.is_dirty(path=rel_path):
|
|
219
|
-
return {"is_tracked": True, "status": "modified", "is_ignored": False}
|
|
220
|
-
|
|
221
|
-
# Check if directory has tracked files - let GitPython handle paths
|
|
222
|
-
try:
|
|
223
|
-
tracked_files = self.repo.git.ls_files(rel_path)
|
|
224
|
-
is_tracked = bool(tracked_files.strip())
|
|
225
|
-
status = "clean" if is_tracked else None
|
|
226
|
-
return {"is_tracked": is_tracked, "status": status, "is_ignored": False}
|
|
227
|
-
except Exception:
|
|
228
|
-
return {"is_tracked": False, "status": None, "is_ignored": False}
|
|
229
|
-
|
|
230
|
-
# For files
|
|
231
|
-
else:
|
|
232
|
-
# Check if untracked - direct comparison works cross-platform
|
|
233
|
-
if rel_path in self.repo.untracked_files:
|
|
234
|
-
return {"is_tracked": False, "status": "untracked", "is_ignored": False}
|
|
235
|
-
|
|
236
|
-
# Check if tracked and dirty - GitPython handles path normalization
|
|
237
|
-
if self.repo.is_dirty(path=rel_path):
|
|
238
|
-
return {"is_tracked": True, "status": "modified", "is_ignored": False}
|
|
239
|
-
|
|
240
|
-
# Check if tracked and clean - GitPython handles paths
|
|
241
|
-
try:
|
|
242
|
-
self.repo.git.ls_files(rel_path, error_unmatch=True)
|
|
243
|
-
return {"is_tracked": True, "status": "clean", "is_ignored": False}
|
|
244
|
-
except Exception:
|
|
245
|
-
return {"is_tracked": False, "status": None, "is_ignored": False}
|
|
246
|
-
|
|
247
|
-
except Exception as e:
|
|
248
|
-
logger.debug("Error getting Git status for %s: %s", file_path, e)
|
|
249
|
-
return {"is_tracked": False, "status": None, "is_ignored": False}
|
|
250
|
-
|
|
251
|
-
def get_status_summary(self) -> Dict[str, int]:
|
|
252
|
-
"""Get summary of Git status."""
|
|
253
|
-
if not self.is_git_repo or not self.repo:
|
|
254
|
-
return {}
|
|
255
|
-
|
|
256
|
-
try:
|
|
257
|
-
status = self.repo.git.status(porcelain=True).strip()
|
|
258
|
-
if not status:
|
|
259
|
-
return {"clean": 0}
|
|
260
|
-
|
|
261
|
-
summary = {"modified": 0, "added": 0, "deleted": 0, "untracked": 0}
|
|
262
|
-
|
|
263
|
-
for line in status.split('\n'):
|
|
264
|
-
if len(line) >= 2:
|
|
265
|
-
index_status = line[0]
|
|
266
|
-
worktree_status = line[1]
|
|
267
|
-
|
|
268
|
-
if index_status == 'A' or worktree_status == 'A':
|
|
269
|
-
summary["added"] += 1
|
|
270
|
-
elif index_status == 'M' or worktree_status == 'M':
|
|
271
|
-
summary["modified"] += 1
|
|
272
|
-
elif index_status == 'D' or worktree_status == 'D':
|
|
273
|
-
summary["deleted"] += 1
|
|
274
|
-
elif index_status == '?' and worktree_status == '?':
|
|
275
|
-
summary["untracked"] += 1
|
|
276
|
-
|
|
277
|
-
return summary
|
|
278
|
-
|
|
279
|
-
except Exception as e:
|
|
280
|
-
logger.debug("Error getting Git status summary: %s", e)
|
|
281
|
-
return {}
|
|
282
|
-
|
|
283
|
-
def _compute_file_hash(self, file_path: str) -> Optional[str]:
|
|
284
|
-
"""Compute SHA256 hash of file content."""
|
|
285
|
-
try:
|
|
286
|
-
with open(file_path, 'rb') as f:
|
|
287
|
-
file_hash = hashlib.sha256()
|
|
288
|
-
chunk = f.read(8192)
|
|
289
|
-
while chunk:
|
|
290
|
-
file_hash.update(chunk)
|
|
291
|
-
chunk = f.read(8192)
|
|
292
|
-
return file_hash.hexdigest()
|
|
293
|
-
except (OSError, IOError) as e:
|
|
294
|
-
logger.debug("Error computing hash for %s: %s", file_path, e)
|
|
295
|
-
return None
|
|
296
|
-
|
|
297
|
-
def _compute_diff_details(self, original_content: str, modified_content: str) -> Optional[Dict[str, Any]]:
|
|
298
|
-
"""Compute per-character diff details using diff-match-patch."""
|
|
299
|
-
if not DIFF_MATCH_PATCH_AVAILABLE:
|
|
300
|
-
logger.debug("diff-match-patch not available, skipping diff details computation")
|
|
301
|
-
return None
|
|
302
|
-
|
|
303
|
-
try:
|
|
304
|
-
dmp = diff_match_patch()
|
|
305
|
-
|
|
306
|
-
# Compute the diff
|
|
307
|
-
diffs = dmp.diff_main(original_content, modified_content)
|
|
308
|
-
|
|
309
|
-
# Clean up the diff for efficiency
|
|
310
|
-
dmp.diff_cleanupSemantic(diffs)
|
|
311
|
-
|
|
312
|
-
# Convert the diff to a serializable format
|
|
313
|
-
diff_data = []
|
|
314
|
-
for operation, text in diffs:
|
|
315
|
-
diff_data.append({
|
|
316
|
-
"operation": operation, # -1 = delete, 0 = equal, 1 = insert
|
|
317
|
-
"text": text
|
|
318
|
-
})
|
|
319
|
-
|
|
320
|
-
# Also compute some useful statistics
|
|
321
|
-
char_additions = sum(len(text) for op, text in diffs if op == 1)
|
|
322
|
-
char_deletions = sum(len(text) for op, text in diffs if op == -1)
|
|
323
|
-
char_unchanged = sum(len(text) for op, text in diffs if op == 0)
|
|
324
|
-
|
|
325
|
-
return {
|
|
326
|
-
"diffs": diff_data,
|
|
327
|
-
"stats": {
|
|
328
|
-
"char_additions": char_additions,
|
|
329
|
-
"char_deletions": char_deletions,
|
|
330
|
-
"char_unchanged": char_unchanged,
|
|
331
|
-
"total_changes": char_additions + char_deletions
|
|
332
|
-
},
|
|
333
|
-
"algorithm": "diff-match-patch"
|
|
334
|
-
}
|
|
335
|
-
|
|
336
|
-
except Exception as e:
|
|
337
|
-
logger.error("Error computing diff details: %s", e)
|
|
338
|
-
return None
|
|
339
|
-
|
|
340
|
-
def _get_pygments_lexer(self, file_path: str) -> Optional[object]:
|
|
341
|
-
"""Get Pygments lexer for a file path."""
|
|
342
|
-
if not PYGMENTS_AVAILABLE:
|
|
343
|
-
return None
|
|
344
|
-
|
|
345
|
-
try:
|
|
346
|
-
# Try to get lexer by filename
|
|
347
|
-
return get_lexer_for_filename(file_path)
|
|
348
|
-
except ClassNotFound:
|
|
349
|
-
# Fallback to common extensions
|
|
350
|
-
extension = os.path.splitext(file_path)[1].lower()
|
|
351
|
-
lexer_map = {
|
|
352
|
-
'.py': 'python',
|
|
353
|
-
'.js': 'javascript',
|
|
354
|
-
'.jsx': 'jsx',
|
|
355
|
-
'.ts': 'typescript',
|
|
356
|
-
'.tsx': 'tsx',
|
|
357
|
-
'.html': 'html',
|
|
358
|
-
'.htm': 'html',
|
|
359
|
-
'.css': 'css',
|
|
360
|
-
'.scss': 'scss',
|
|
361
|
-
'.sass': 'sass',
|
|
362
|
-
'.json': 'json',
|
|
363
|
-
'.xml': 'xml',
|
|
364
|
-
'.yaml': 'yaml',
|
|
365
|
-
'.yml': 'yaml',
|
|
366
|
-
'.java': 'java',
|
|
367
|
-
'.c': 'c',
|
|
368
|
-
'.cpp': 'cpp',
|
|
369
|
-
'.cc': 'cpp',
|
|
370
|
-
'.cxx': 'cpp',
|
|
371
|
-
'.h': 'c',
|
|
372
|
-
'.hpp': 'cpp',
|
|
373
|
-
'.cs': 'csharp',
|
|
374
|
-
'.php': 'php',
|
|
375
|
-
'.rb': 'ruby',
|
|
376
|
-
'.go': 'go',
|
|
377
|
-
'.rs': 'rust',
|
|
378
|
-
'.sh': 'bash',
|
|
379
|
-
'.bash': 'bash',
|
|
380
|
-
'.zsh': 'zsh',
|
|
381
|
-
'.fish': 'fish',
|
|
382
|
-
'.sql': 'sql',
|
|
383
|
-
'.md': 'markdown',
|
|
384
|
-
'.rst': 'rst'
|
|
385
|
-
}
|
|
386
|
-
|
|
387
|
-
lexer_name = lexer_map.get(extension)
|
|
388
|
-
if lexer_name:
|
|
389
|
-
try:
|
|
390
|
-
return get_lexer_by_name(lexer_name)
|
|
391
|
-
except ClassNotFound:
|
|
392
|
-
pass
|
|
393
|
-
|
|
394
|
-
# Final fallback to text
|
|
395
|
-
try:
|
|
396
|
-
return get_lexer_by_name('text')
|
|
397
|
-
except ClassNotFound:
|
|
398
|
-
return None
|
|
399
|
-
except Exception as e:
|
|
400
|
-
logger.debug("Error getting Pygments lexer: %s", e)
|
|
401
|
-
return None
|
|
402
|
-
|
|
403
|
-
def _generate_html_diff(self, original_content: str, modified_content: str, file_path: str) -> Optional[str]:
|
|
404
|
-
"""Generate unified HTML diff with intra-line highlighting using GitPython and diff-match-patch."""
|
|
405
|
-
if not PYGMENTS_AVAILABLE:
|
|
406
|
-
logger.debug("Pygments not available for HTML diff generation")
|
|
407
|
-
return None
|
|
408
|
-
|
|
409
|
-
try:
|
|
410
|
-
import difflib
|
|
411
|
-
|
|
412
|
-
# Get line-based diff using Python's difflib (similar to git diff)
|
|
413
|
-
original_lines = original_content.splitlines(keepends=True)
|
|
414
|
-
modified_lines = modified_content.splitlines(keepends=True)
|
|
415
|
-
|
|
416
|
-
# Generate unified diff
|
|
417
|
-
diff_lines = list(difflib.unified_diff(
|
|
418
|
-
original_lines,
|
|
419
|
-
modified_lines,
|
|
420
|
-
fromfile='a/' + os.path.basename(file_path),
|
|
421
|
-
tofile='b/' + os.path.basename(file_path),
|
|
422
|
-
lineterm=''
|
|
423
|
-
))
|
|
424
|
-
|
|
425
|
-
# Parse the unified diff and add intra-line highlighting
|
|
426
|
-
parsed_diff = self._parse_unified_diff_with_intraline(diff_lines, original_lines, modified_lines)
|
|
427
|
-
|
|
428
|
-
# Get Pygments lexer for syntax highlighting
|
|
429
|
-
lexer = self._get_pygments_lexer(file_path)
|
|
430
|
-
|
|
431
|
-
# Build HTML
|
|
432
|
-
html_parts = []
|
|
433
|
-
html_parts.append('<div class="unified-diff-container">')
|
|
434
|
-
|
|
435
|
-
# Add stats header
|
|
436
|
-
line_additions = sum(1 for line in parsed_diff if line['type'] == 'add')
|
|
437
|
-
line_deletions = sum(1 for line in parsed_diff if line['type'] == 'delete')
|
|
438
|
-
|
|
439
|
-
html_parts.append(f'''
|
|
440
|
-
<div class="diff-stats">
|
|
441
|
-
<span class="additions">+{line_additions}</span>
|
|
442
|
-
<span class="deletions">-{line_deletions}</span>
|
|
443
|
-
<span class="file-path">{os.path.basename(file_path)}</span>
|
|
444
|
-
</div>
|
|
445
|
-
''')
|
|
446
|
-
|
|
447
|
-
# Generate unified diff view
|
|
448
|
-
html_parts.append('<div class="diff-content">')
|
|
449
|
-
html_parts.append('<table class="diff-table">')
|
|
450
|
-
|
|
451
|
-
for line_info in parsed_diff:
|
|
452
|
-
if line_info['type'] == 'header':
|
|
453
|
-
continue # Skip diff headers
|
|
454
|
-
|
|
455
|
-
line_type = line_info['type']
|
|
456
|
-
old_line_num = line_info.get('old_line_num', '')
|
|
457
|
-
new_line_num = line_info.get('new_line_num', '')
|
|
458
|
-
content = line_info['content']
|
|
459
|
-
intraline_html = line_info.get('intraline_html', '')
|
|
460
|
-
|
|
461
|
-
# Use intra-line highlighted content if available, otherwise apply syntax highlighting
|
|
462
|
-
if intraline_html:
|
|
463
|
-
final_content = intraline_html
|
|
464
|
-
elif lexer and content.strip():
|
|
465
|
-
try:
|
|
466
|
-
# Apply syntax highlighting to the content (without the +/- prefix)
|
|
467
|
-
clean_content = content[1:] if content and content[0] in '+-' else content
|
|
468
|
-
highlighted = highlight(clean_content, lexer, HtmlFormatter(nowrap=True, noclasses=False))
|
|
469
|
-
final_content = content[0] + highlighted if content and content[0] in '+-' else highlighted
|
|
470
|
-
except Exception:
|
|
471
|
-
final_content = self._escape_html(content)
|
|
472
|
-
else:
|
|
473
|
-
final_content = self._escape_html(content)
|
|
474
|
-
|
|
475
|
-
# CSS classes for different line types
|
|
476
|
-
row_class = f'diff-line diff-{line_type}'
|
|
477
|
-
|
|
478
|
-
html_parts.append(f'''
|
|
479
|
-
<tr class="{row_class}">
|
|
480
|
-
<td class="line-num old-line-num">{old_line_num}</td>
|
|
481
|
-
<td class="line-num new-line-num">{new_line_num}</td>
|
|
482
|
-
<td class="line-content">{final_content}</td>
|
|
483
|
-
</tr>
|
|
484
|
-
''')
|
|
485
|
-
|
|
486
|
-
html_parts.append('</table>')
|
|
487
|
-
html_parts.append('</div>')
|
|
488
|
-
html_parts.append('</div>')
|
|
489
|
-
|
|
490
|
-
return ''.join(html_parts)
|
|
491
|
-
|
|
492
|
-
except Exception as e:
|
|
493
|
-
logger.error("Error generating HTML diff: %s", e)
|
|
494
|
-
return None
|
|
495
|
-
|
|
496
|
-
def _parse_unified_diff_with_intraline(self, diff_lines, original_lines, modified_lines):
|
|
497
|
-
"""Parse unified diff and add intra-line character highlighting."""
|
|
498
|
-
parsed = []
|
|
499
|
-
old_line_num = 0
|
|
500
|
-
new_line_num = 0
|
|
501
|
-
|
|
502
|
-
pending_deletes = []
|
|
503
|
-
pending_adds = []
|
|
504
|
-
|
|
505
|
-
def flush_pending():
|
|
506
|
-
"""Process pending delete/add pairs for intra-line highlighting."""
|
|
507
|
-
if pending_deletes and pending_adds:
|
|
508
|
-
# Apply intra-line highlighting to delete/add pairs
|
|
509
|
-
for i, (del_line, add_line) in enumerate(zip(pending_deletes, pending_adds)):
|
|
510
|
-
del_content = del_line['content'][1:] # Remove '-' prefix
|
|
511
|
-
add_content = add_line['content'][1:] # Remove '+' prefix
|
|
512
|
-
|
|
513
|
-
del_highlighted, add_highlighted = self._generate_intraline_diff(del_content, add_content)
|
|
514
|
-
|
|
515
|
-
# Update the parsed lines with intra-line highlighting
|
|
516
|
-
del_line['intraline_html'] = '-' + del_highlighted
|
|
517
|
-
add_line['intraline_html'] = '+' + add_highlighted
|
|
518
|
-
|
|
519
|
-
parsed.append(del_line)
|
|
520
|
-
parsed.append(add_line)
|
|
521
|
-
|
|
522
|
-
# Handle remaining unmatched deletes/adds
|
|
523
|
-
for del_line in pending_deletes[len(pending_adds):]:
|
|
524
|
-
parsed.append(del_line)
|
|
525
|
-
for add_line in pending_adds[len(pending_deletes):]:
|
|
526
|
-
parsed.append(add_line)
|
|
527
|
-
else:
|
|
528
|
-
# No pairs to highlight, just add them as-is
|
|
529
|
-
parsed.extend(pending_deletes)
|
|
530
|
-
parsed.extend(pending_adds)
|
|
531
|
-
|
|
532
|
-
pending_deletes.clear()
|
|
533
|
-
pending_adds.clear()
|
|
534
|
-
|
|
535
|
-
for line in diff_lines:
|
|
536
|
-
if line.startswith('@@'):
|
|
537
|
-
# Flush any pending changes before hunk header
|
|
538
|
-
flush_pending()
|
|
539
|
-
|
|
540
|
-
# Parse hunk header to get line numbers
|
|
541
|
-
import re
|
|
542
|
-
match = re.match(r'@@ -(\d+)(?:,\d+)? \+(\d+)(?:,\d+)? @@', line)
|
|
543
|
-
if match:
|
|
544
|
-
old_line_num = int(match.group(1)) - 1
|
|
545
|
-
new_line_num = int(match.group(2)) - 1
|
|
546
|
-
|
|
547
|
-
parsed.append({
|
|
548
|
-
'type': 'header',
|
|
549
|
-
'content': line,
|
|
550
|
-
'old_line_num': '',
|
|
551
|
-
'new_line_num': ''
|
|
552
|
-
})
|
|
553
|
-
elif line.startswith('-'):
|
|
554
|
-
pending_deletes.append({
|
|
555
|
-
'type': 'delete',
|
|
556
|
-
'old_line_num': old_line_num + 1,
|
|
557
|
-
'new_line_num': '',
|
|
558
|
-
'content': line
|
|
559
|
-
})
|
|
560
|
-
old_line_num += 1
|
|
561
|
-
elif line.startswith('+'):
|
|
562
|
-
pending_adds.append({
|
|
563
|
-
'type': 'add',
|
|
564
|
-
'old_line_num': '',
|
|
565
|
-
'new_line_num': new_line_num + 1,
|
|
566
|
-
'content': line
|
|
567
|
-
})
|
|
568
|
-
new_line_num += 1
|
|
569
|
-
elif line.startswith(' '):
|
|
570
|
-
# Flush pending changes before context line
|
|
571
|
-
flush_pending()
|
|
572
|
-
|
|
573
|
-
old_line_num += 1
|
|
574
|
-
new_line_num += 1
|
|
575
|
-
parsed.append({
|
|
576
|
-
'type': 'context',
|
|
577
|
-
'old_line_num': old_line_num,
|
|
578
|
-
'new_line_num': new_line_num,
|
|
579
|
-
'content': line
|
|
580
|
-
})
|
|
581
|
-
elif line.startswith('---') or line.startswith('+++'):
|
|
582
|
-
parsed.append({
|
|
583
|
-
'type': 'header',
|
|
584
|
-
'content': line,
|
|
585
|
-
'old_line_num': '',
|
|
586
|
-
'new_line_num': ''
|
|
587
|
-
})
|
|
588
|
-
|
|
589
|
-
# Flush any remaining pending changes
|
|
590
|
-
flush_pending()
|
|
591
|
-
|
|
592
|
-
return parsed
|
|
593
|
-
|
|
594
|
-
def _generate_intraline_diff(self, old_text: str, new_text: str) -> Tuple[str, str]:
|
|
595
|
-
"""Generate intra-line character-level diff highlighting."""
|
|
596
|
-
if not DIFF_MATCH_PATCH_AVAILABLE:
|
|
597
|
-
return self._escape_html(old_text), self._escape_html(new_text)
|
|
598
|
-
|
|
599
|
-
try:
|
|
600
|
-
dmp = diff_match_patch()
|
|
601
|
-
diffs = dmp.diff_main(old_text, new_text)
|
|
602
|
-
dmp.diff_cleanupSemantic(diffs)
|
|
603
|
-
|
|
604
|
-
old_parts = []
|
|
605
|
-
new_parts = []
|
|
606
|
-
|
|
607
|
-
for op, text in diffs:
|
|
608
|
-
escaped_text = self._escape_html(text)
|
|
609
|
-
|
|
610
|
-
if op == 0: # EQUAL
|
|
611
|
-
old_parts.append(escaped_text)
|
|
612
|
-
new_parts.append(escaped_text)
|
|
613
|
-
elif op == -1: # DELETE
|
|
614
|
-
old_parts.append(f'<span class="intraline-delete">{escaped_text}</span>')
|
|
615
|
-
elif op == 1: # INSERT
|
|
616
|
-
new_parts.append(f'<span class="intraline-add">{escaped_text}</span>')
|
|
617
|
-
|
|
618
|
-
return ''.join(old_parts), ''.join(new_parts)
|
|
619
|
-
|
|
620
|
-
except Exception as e:
|
|
621
|
-
logger.debug("Error generating intra-line diff: %s", e)
|
|
622
|
-
return self._escape_html(old_text), self._escape_html(new_text)
|
|
623
|
-
|
|
624
|
-
def _escape_html(self, text: str) -> str:
|
|
625
|
-
"""Escape HTML special characters."""
|
|
626
|
-
return (text.replace('&', '&')
|
|
627
|
-
.replace('<', '<')
|
|
628
|
-
.replace('>', '>')
|
|
629
|
-
.replace('"', '"')
|
|
630
|
-
.replace("'", '''))
|
|
631
|
-
|
|
632
|
-
def get_head_commit_hash(self) -> Optional[str]:
|
|
633
|
-
"""Get the hash of the HEAD commit."""
|
|
634
|
-
if not self.is_git_repo or not self.repo:
|
|
635
|
-
return None
|
|
636
|
-
|
|
637
|
-
try:
|
|
638
|
-
return self.repo.head.commit.hexsha
|
|
639
|
-
except Exception as e:
|
|
640
|
-
logger.debug("Error getting HEAD commit hash: %s", e)
|
|
641
|
-
return None
|
|
642
|
-
|
|
643
|
-
def get_detailed_status(self) -> GitDetailedStatus:
|
|
644
|
-
"""Get detailed Git status with file hashes using GitPython APIs."""
|
|
645
|
-
if not self.is_git_repo or not self.repo:
|
|
646
|
-
return GitDetailedStatus()
|
|
647
|
-
|
|
648
|
-
try:
|
|
649
|
-
detailed_status = GitDetailedStatus()
|
|
650
|
-
detailed_status.head_commit_hash = self.get_head_commit_hash()
|
|
651
|
-
|
|
652
|
-
# Get all changed files using GitPython's index diff
|
|
653
|
-
# Get staged changes (index vs HEAD)
|
|
654
|
-
staged_files = self.repo.index.diff("HEAD")
|
|
655
|
-
for diff_item in staged_files:
|
|
656
|
-
file_repo_path = diff_item.a_path or diff_item.b_path
|
|
657
|
-
file_abs_path = os.path.join(self.project_path, file_repo_path)
|
|
658
|
-
file_name = os.path.basename(file_repo_path)
|
|
659
|
-
|
|
660
|
-
# Determine change type - stick to git's native types
|
|
661
|
-
if diff_item.deleted_file:
|
|
662
|
-
change_type = 'deleted'
|
|
663
|
-
content_hash = None
|
|
664
|
-
diff_details = None # No diff for deleted files
|
|
665
|
-
elif diff_item.new_file:
|
|
666
|
-
change_type = 'added'
|
|
667
|
-
content_hash = self._compute_file_hash(file_abs_path) if os.path.exists(file_abs_path) else None
|
|
668
|
-
# For new files, compare empty content vs current staged content
|
|
669
|
-
if content_hash:
|
|
670
|
-
staged_content = self.get_file_content_staged(file_abs_path) or ""
|
|
671
|
-
diff_details = self._compute_diff_details("", staged_content)
|
|
672
|
-
else:
|
|
673
|
-
diff_details = None
|
|
674
|
-
else:
|
|
675
|
-
# For modified files (including renames that git detected)
|
|
676
|
-
change_type = 'modified'
|
|
677
|
-
content_hash = self._compute_file_hash(file_abs_path) if os.path.exists(file_abs_path) else None
|
|
678
|
-
# Compare HEAD content vs staged content
|
|
679
|
-
head_content = self.get_file_content_at_commit(file_abs_path) or ""
|
|
680
|
-
staged_content = self.get_file_content_staged(file_abs_path) or ""
|
|
681
|
-
diff_details = self._compute_diff_details(head_content, staged_content)
|
|
682
|
-
|
|
683
|
-
change = GitFileChange(
|
|
684
|
-
file_repo_path=file_repo_path,
|
|
685
|
-
file_name=file_name,
|
|
686
|
-
file_abs_path=file_abs_path,
|
|
687
|
-
change_type=change_type,
|
|
688
|
-
content_hash=content_hash,
|
|
689
|
-
is_staged=True,
|
|
690
|
-
diff_details=diff_details
|
|
691
|
-
)
|
|
692
|
-
logger.info("GIT STATUS DEBUG - Created staged change: %s", asdict(change))
|
|
693
|
-
detailed_status.staged_changes.append(change)
|
|
694
|
-
|
|
695
|
-
# Get unstaged changes (working tree vs index)
|
|
696
|
-
unstaged_files = self.repo.index.diff(None)
|
|
697
|
-
for diff_item in unstaged_files:
|
|
698
|
-
file_repo_path = diff_item.a_path or diff_item.b_path
|
|
699
|
-
file_abs_path = os.path.join(self.project_path, file_repo_path)
|
|
700
|
-
file_name = os.path.basename(file_repo_path)
|
|
701
|
-
|
|
702
|
-
# Determine change type - stick to git's native types
|
|
703
|
-
if diff_item.deleted_file:
|
|
704
|
-
change_type = 'deleted'
|
|
705
|
-
content_hash = None
|
|
706
|
-
diff_details = None # No diff for deleted files
|
|
707
|
-
elif diff_item.new_file:
|
|
708
|
-
change_type = 'added'
|
|
709
|
-
content_hash = self._compute_file_hash(file_abs_path) if os.path.exists(file_abs_path) else None
|
|
710
|
-
# For new files, compare empty content vs current working content
|
|
711
|
-
if content_hash and os.path.exists(file_abs_path):
|
|
712
|
-
try:
|
|
713
|
-
with open(file_abs_path, 'r', encoding='utf-8') as f:
|
|
714
|
-
working_content = f.read()
|
|
715
|
-
diff_details = self._compute_diff_details("", working_content)
|
|
716
|
-
except (OSError, UnicodeDecodeError):
|
|
717
|
-
diff_details = None
|
|
718
|
-
else:
|
|
719
|
-
diff_details = None
|
|
720
|
-
else:
|
|
721
|
-
change_type = 'modified'
|
|
722
|
-
content_hash = self._compute_file_hash(file_abs_path) if os.path.exists(file_abs_path) else None
|
|
723
|
-
# Compare staged/index content vs working content
|
|
724
|
-
staged_content = self.get_file_content_staged(file_abs_path) or ""
|
|
725
|
-
if os.path.exists(file_abs_path):
|
|
726
|
-
try:
|
|
727
|
-
with open(file_abs_path, 'r', encoding='utf-8') as f:
|
|
728
|
-
working_content = f.read()
|
|
729
|
-
diff_details = self._compute_diff_details(staged_content, working_content)
|
|
730
|
-
except (OSError, UnicodeDecodeError):
|
|
731
|
-
diff_details = None
|
|
732
|
-
else:
|
|
733
|
-
diff_details = None
|
|
734
|
-
|
|
735
|
-
change = GitFileChange(
|
|
736
|
-
file_repo_path=file_repo_path,
|
|
737
|
-
file_name=file_name,
|
|
738
|
-
file_abs_path=file_abs_path,
|
|
739
|
-
change_type=change_type,
|
|
740
|
-
content_hash=content_hash,
|
|
741
|
-
is_staged=False,
|
|
742
|
-
diff_details=diff_details
|
|
743
|
-
)
|
|
744
|
-
logger.info("GIT STATUS DEBUG - Created unstaged change: %s", asdict(change))
|
|
745
|
-
detailed_status.unstaged_changes.append(change)
|
|
746
|
-
|
|
747
|
-
# Get untracked files
|
|
748
|
-
untracked_files = self.repo.untracked_files
|
|
749
|
-
for file_repo_path in untracked_files:
|
|
750
|
-
file_abs_path = os.path.join(self.project_path, file_repo_path)
|
|
751
|
-
file_name = os.path.basename(file_repo_path)
|
|
752
|
-
content_hash = self._compute_file_hash(file_abs_path) if os.path.exists(file_abs_path) else None
|
|
753
|
-
|
|
754
|
-
# For untracked files, compare empty content vs current file content
|
|
755
|
-
diff_details = None
|
|
756
|
-
if content_hash and os.path.exists(file_abs_path):
|
|
757
|
-
try:
|
|
758
|
-
with open(file_abs_path, 'r', encoding='utf-8') as f:
|
|
759
|
-
working_content = f.read()
|
|
760
|
-
diff_details = self._compute_diff_details("", working_content)
|
|
761
|
-
except (OSError, UnicodeDecodeError):
|
|
762
|
-
diff_details = None
|
|
763
|
-
|
|
764
|
-
change = GitFileChange(
|
|
765
|
-
file_repo_path=file_repo_path,
|
|
766
|
-
file_name=file_name,
|
|
767
|
-
file_abs_path=file_abs_path,
|
|
768
|
-
change_type='untracked',
|
|
769
|
-
content_hash=content_hash,
|
|
770
|
-
is_staged=False,
|
|
771
|
-
diff_details=diff_details
|
|
772
|
-
)
|
|
773
|
-
logger.info("GIT STATUS DEBUG - Created untracked change: %s", asdict(change))
|
|
774
|
-
detailed_status.untracked_files.append(change)
|
|
775
|
-
|
|
776
|
-
return detailed_status
|
|
777
|
-
|
|
778
|
-
except Exception as e:
|
|
779
|
-
logger.error("Error getting detailed Git status: %s", e)
|
|
780
|
-
return GitDetailedStatus()
|
|
781
|
-
|
|
782
|
-
def _get_change_type(self, status_char: str) -> str:
|
|
783
|
-
"""Convert git status character to change type."""
|
|
784
|
-
status_map = {
|
|
785
|
-
'A': 'added',
|
|
786
|
-
'M': 'modified',
|
|
787
|
-
'D': 'deleted',
|
|
788
|
-
'R': 'renamed',
|
|
789
|
-
'C': 'copied',
|
|
790
|
-
'U': 'unmerged',
|
|
791
|
-
'?': 'untracked'
|
|
792
|
-
}
|
|
793
|
-
return status_map.get(status_char, 'unknown')
|
|
794
|
-
|
|
795
|
-
def get_file_content_at_commit(self, file_path: str, commit_hash: Optional[str] = None) -> Optional[str]:
|
|
796
|
-
"""Get file content at a specific commit. If commit_hash is None, gets HEAD content."""
|
|
797
|
-
if not self.is_git_repo or not self.repo:
|
|
798
|
-
return None
|
|
799
|
-
|
|
800
|
-
try:
|
|
801
|
-
if commit_hash is None:
|
|
802
|
-
commit_hash = 'HEAD'
|
|
803
|
-
|
|
804
|
-
# Convert to relative path from repo root
|
|
805
|
-
rel_path = os.path.relpath(file_path, self.repo.working_dir)
|
|
806
|
-
|
|
807
|
-
# Get file content at the specified commit
|
|
808
|
-
try:
|
|
809
|
-
content = self.repo.git.show(f"{commit_hash}:{rel_path}")
|
|
810
|
-
return content
|
|
811
|
-
except Exception as e:
|
|
812
|
-
logger.debug("File %s not found at commit %s: %s", rel_path, commit_hash, e)
|
|
813
|
-
return None
|
|
814
|
-
|
|
815
|
-
except Exception as e:
|
|
816
|
-
logger.error("Error getting file content at commit %s for %s: %s", commit_hash, file_path, e)
|
|
817
|
-
return None
|
|
818
|
-
|
|
819
|
-
def get_file_content_staged(self, file_path: str) -> Optional[str]:
|
|
820
|
-
"""Get staged content of a file."""
|
|
821
|
-
if not self.is_git_repo or not self.repo:
|
|
822
|
-
return None
|
|
823
|
-
|
|
824
|
-
try:
|
|
825
|
-
# Convert to relative path from repo root
|
|
826
|
-
rel_path = os.path.relpath(file_path, self.repo.working_dir)
|
|
827
|
-
|
|
828
|
-
# Get staged content
|
|
829
|
-
try:
|
|
830
|
-
content = self.repo.git.show(f":{rel_path}")
|
|
831
|
-
return content
|
|
832
|
-
except Exception as e:
|
|
833
|
-
logger.debug("File %s not found in staging area: %s", rel_path, e)
|
|
834
|
-
return None
|
|
835
|
-
|
|
836
|
-
except Exception as e:
|
|
837
|
-
logger.error("Error getting staged content for %s: %s", file_path, e)
|
|
838
|
-
return None
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
class FileSystemWatcher:
|
|
842
|
-
"""Watches file system changes for project folders."""
|
|
843
|
-
|
|
844
|
-
def __init__(self, project_manager: 'ProjectStateManager'):
|
|
845
|
-
self.project_manager = project_manager
|
|
846
|
-
self.observer: Optional[Observer] = None
|
|
847
|
-
self.event_handler: Optional[FileSystemEventHandler] = None
|
|
848
|
-
self.watched_paths: Set[str] = set()
|
|
849
|
-
# Store reference to the event loop for thread-safe async task creation
|
|
850
|
-
try:
|
|
851
|
-
self.event_loop = asyncio.get_running_loop()
|
|
852
|
-
logger.info("Captured event loop reference for file system watcher")
|
|
853
|
-
except RuntimeError:
|
|
854
|
-
self.event_loop = None
|
|
855
|
-
logger.warning("No running event loop found - file system events may not work correctly")
|
|
856
|
-
|
|
857
|
-
if WATCHDOG_AVAILABLE:
|
|
858
|
-
self._initialize_watcher()
|
|
859
|
-
|
|
860
|
-
def _initialize_watcher(self):
|
|
861
|
-
"""Initialize file system watcher."""
|
|
862
|
-
if not WATCHDOG_AVAILABLE:
|
|
863
|
-
logger.warning("Watchdog not available, file monitoring disabled")
|
|
864
|
-
return
|
|
865
|
-
|
|
866
|
-
class ProjectEventHandler(FileSystemEventHandler):
|
|
867
|
-
def __init__(self, manager, watcher):
|
|
868
|
-
self.manager = manager
|
|
869
|
-
self.watcher = watcher
|
|
870
|
-
super().__init__()
|
|
871
|
-
|
|
872
|
-
def on_any_event(self, event):
|
|
873
|
-
# Skip debug files to avoid feedback loops
|
|
874
|
-
if event.src_path.endswith('project_state_debug.json'):
|
|
875
|
-
return
|
|
876
|
-
|
|
877
|
-
# Only process events that represent actual content changes
|
|
878
|
-
# Skip opened/closed events that don't indicate file modifications
|
|
879
|
-
if event.event_type in ('opened', 'closed'):
|
|
880
|
-
return
|
|
881
|
-
|
|
882
|
-
# Handle .git folder events separately for git status monitoring
|
|
883
|
-
path_parts = Path(event.src_path).parts
|
|
884
|
-
if '.git' in path_parts:
|
|
885
|
-
# Get the relative path within .git directory
|
|
886
|
-
try:
|
|
887
|
-
git_index = path_parts.index('.git')
|
|
888
|
-
git_relative_path = '/'.join(path_parts[git_index + 1:])
|
|
889
|
-
git_file = Path(event.src_path).name
|
|
890
|
-
|
|
891
|
-
# Monitor git files that indicate repository state changes
|
|
892
|
-
should_monitor_git_file = (
|
|
893
|
-
git_file == 'index' or # Staging area changes
|
|
894
|
-
git_file == 'HEAD' or # Branch switches
|
|
895
|
-
git_relative_path.startswith('refs/heads/') or # Branch updates
|
|
896
|
-
git_relative_path.startswith('refs/remotes/') or # Remote tracking branches
|
|
897
|
-
git_relative_path.startswith('logs/refs/heads/') or # Branch history
|
|
898
|
-
git_relative_path.startswith('logs/HEAD') # HEAD history
|
|
899
|
-
)
|
|
900
|
-
|
|
901
|
-
if should_monitor_git_file:
|
|
902
|
-
logger.info("Git status change detected: %s - %s", event.event_type, event.src_path)
|
|
903
|
-
else:
|
|
904
|
-
return # Skip other .git files
|
|
905
|
-
except (ValueError, IndexError):
|
|
906
|
-
return # Skip if can't parse .git path
|
|
907
|
-
else:
|
|
908
|
-
logger.info("File system event: %s - %s", event.event_type, event.src_path)
|
|
909
|
-
|
|
910
|
-
# Schedule async task in the main event loop from this watchdog thread
|
|
911
|
-
if self.watcher.event_loop and not self.watcher.event_loop.is_closed():
|
|
912
|
-
try:
|
|
913
|
-
future = asyncio.run_coroutine_threadsafe(
|
|
914
|
-
self.manager._handle_file_change(event),
|
|
915
|
-
self.watcher.event_loop
|
|
916
|
-
)
|
|
917
|
-
logger.debug("Successfully scheduled file change handler for: %s", event.src_path)
|
|
918
|
-
except Exception as e:
|
|
919
|
-
logger.error("Failed to schedule file change handler: %s", e)
|
|
920
|
-
else:
|
|
921
|
-
logger.warning("No event loop available to handle file change: %s", event.src_path)
|
|
922
|
-
|
|
923
|
-
self.event_handler = ProjectEventHandler(self.project_manager, self)
|
|
924
|
-
self.observer = Observer()
|
|
925
|
-
|
|
926
|
-
def start_watching(self, path: str):
|
|
927
|
-
"""Start watching a specific path."""
|
|
928
|
-
if not WATCHDOG_AVAILABLE or not self.observer:
|
|
929
|
-
logger.warning("Watchdog not available, cannot start watching: %s", path)
|
|
930
|
-
return
|
|
931
|
-
|
|
932
|
-
if path not in self.watched_paths:
|
|
933
|
-
try:
|
|
934
|
-
# Use recursive=False to watch only direct contents of each folder
|
|
935
|
-
self.observer.schedule(self.event_handler, path, recursive=False)
|
|
936
|
-
self.watched_paths.add(path)
|
|
937
|
-
logger.info("Started watching path (non-recursive): %s", path)
|
|
938
|
-
|
|
939
|
-
if not self.observer.is_alive():
|
|
940
|
-
self.observer.start()
|
|
941
|
-
logger.info("Started file system observer")
|
|
942
|
-
except Exception as e:
|
|
943
|
-
logger.error("Error starting file watcher for %s: %s", path, e)
|
|
944
|
-
else:
|
|
945
|
-
logger.debug("Path already being watched: %s", path)
|
|
946
|
-
|
|
947
|
-
def start_watching_git_directory(self, git_path: str):
|
|
948
|
-
"""Start watching a .git directory for git status changes."""
|
|
949
|
-
if not WATCHDOG_AVAILABLE or not self.observer:
|
|
950
|
-
logger.warning("Watchdog not available, cannot start watching git directory: %s", git_path)
|
|
951
|
-
return
|
|
952
|
-
|
|
953
|
-
if git_path not in self.watched_paths:
|
|
954
|
-
try:
|
|
955
|
-
# Watch .git directory recursively to catch changes in refs/, logs/, etc.
|
|
956
|
-
self.observer.schedule(self.event_handler, git_path, recursive=True)
|
|
957
|
-
self.watched_paths.add(git_path)
|
|
958
|
-
logger.info("Started watching git directory (recursive): %s", git_path)
|
|
959
|
-
|
|
960
|
-
if not self.observer.is_alive():
|
|
961
|
-
self.observer.start()
|
|
962
|
-
logger.info("Started file system observer")
|
|
963
|
-
except Exception as e:
|
|
964
|
-
logger.error("Error starting git directory watcher for %s: %s", git_path, e)
|
|
965
|
-
else:
|
|
966
|
-
logger.debug("Git directory already being watched: %s", git_path)
|
|
967
|
-
|
|
968
|
-
def stop_watching(self, path: str):
|
|
969
|
-
"""Stop watching a specific path."""
|
|
970
|
-
if not WATCHDOG_AVAILABLE or not self.observer:
|
|
971
|
-
return
|
|
972
|
-
|
|
973
|
-
if path in self.watched_paths:
|
|
974
|
-
# Note: watchdog doesn't have direct path removal, would need to recreate observer
|
|
975
|
-
self.watched_paths.discard(path)
|
|
976
|
-
logger.debug("Stopped watching path: %s", path)
|
|
977
|
-
|
|
978
|
-
def stop_all(self):
|
|
979
|
-
"""Stop all file watching."""
|
|
980
|
-
if self.observer and self.observer.is_alive():
|
|
981
|
-
self.observer.stop()
|
|
982
|
-
self.observer.join()
|
|
983
|
-
self.watched_paths.clear()
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
class ProjectStateManager:
|
|
987
|
-
"""Manages project state for client sessions."""
|
|
988
|
-
|
|
989
|
-
def __init__(self, control_channel, context: Dict[str, Any]):
|
|
990
|
-
self.control_channel = control_channel
|
|
991
|
-
self.context = context
|
|
992
|
-
self.projects: Dict[str, ProjectState] = {}
|
|
993
|
-
self.git_managers: Dict[str, GitManager] = {}
|
|
994
|
-
self.file_watcher = FileSystemWatcher(self)
|
|
995
|
-
self.debug_mode = False
|
|
996
|
-
self.debug_file_path: Optional[str] = None
|
|
997
|
-
|
|
998
|
-
# Debouncing for file changes
|
|
999
|
-
self._change_debounce_timer: Optional[asyncio.Task] = None
|
|
1000
|
-
self._pending_changes: Set[str] = set()
|
|
1001
|
-
|
|
1002
|
-
def set_debug_mode(self, enabled: bool, debug_file_path: Optional[str] = None):
|
|
1003
|
-
"""Enable or disable debug mode with JSON output."""
|
|
1004
|
-
self.debug_mode = enabled
|
|
1005
|
-
self.debug_file_path = debug_file_path
|
|
1006
|
-
if enabled:
|
|
1007
|
-
logger.info("Project state debug mode enabled, output to: %s", debug_file_path)
|
|
1008
|
-
|
|
1009
|
-
def _write_debug_state(self):
|
|
1010
|
-
"""Write current state to debug JSON file."""
|
|
1011
|
-
logger.debug("_write_debug_state called: debug_mode=%s, debug_file_path=%s", self.debug_mode, self.debug_file_path)
|
|
1012
|
-
if not self.debug_mode or not self.debug_file_path:
|
|
1013
|
-
logger.debug("Debug mode not enabled or no debug file path, skipping debug write")
|
|
1014
|
-
return
|
|
1015
|
-
|
|
1016
|
-
try:
|
|
1017
|
-
debug_data = {}
|
|
1018
|
-
for project_id, state in self.projects.items():
|
|
1019
|
-
debug_data[project_id] = {
|
|
1020
|
-
"project_folder_path": state.project_folder_path,
|
|
1021
|
-
"is_git_repo": state.is_git_repo,
|
|
1022
|
-
"git_branch": state.git_branch,
|
|
1023
|
-
"git_status_summary": state.git_status_summary,
|
|
1024
|
-
"git_detailed_status": asdict(state.git_detailed_status) if state.git_detailed_status else None,
|
|
1025
|
-
"open_tabs": [self._serialize_tab_info(tab) for tab in state.open_tabs.values()],
|
|
1026
|
-
"active_tab": self._serialize_tab_info(state.active_tab) if state.active_tab else None,
|
|
1027
|
-
"monitored_folders": [asdict(mf) for mf in state.monitored_folders],
|
|
1028
|
-
"items": [self._serialize_file_item(item) for item in state.items]
|
|
1029
|
-
}
|
|
1030
|
-
|
|
1031
|
-
with open(self.debug_file_path, 'w', encoding='utf-8') as f:
|
|
1032
|
-
json.dump(debug_data, f, indent=2, default=str)
|
|
1033
|
-
|
|
1034
|
-
logger.debug("Debug state written successfully to: %s", self.debug_file_path)
|
|
1035
|
-
logger.debug("Debug data summary: %d projects", len(debug_data))
|
|
1036
|
-
for project_id, data in debug_data.items():
|
|
1037
|
-
logger.debug("Project %s: %d monitored_folders, %d items",
|
|
1038
|
-
project_id, len(data.get('monitored_folders', [])), len(data.get('items', [])))
|
|
1039
|
-
|
|
1040
|
-
except Exception as e:
|
|
1041
|
-
logger.error("Error writing debug state: %s", e)
|
|
1042
|
-
|
|
1043
|
-
def _serialize_file_item(self, item: FileItem) -> Dict[str, Any]:
|
|
1044
|
-
"""Serialize FileItem for JSON output."""
|
|
1045
|
-
result = asdict(item)
|
|
1046
|
-
if item.children:
|
|
1047
|
-
result["children"] = [self._serialize_file_item(child) for child in item.children]
|
|
1048
|
-
return result
|
|
1049
|
-
|
|
1050
|
-
def _serialize_tab_info(self, tab: TabInfo) -> Dict[str, Any]:
|
|
1051
|
-
"""Serialize TabInfo for JSON output."""
|
|
1052
|
-
return asdict(tab)
|
|
1053
|
-
|
|
1054
|
-
async def initialize_project_state(self, client_session: str, project_folder_path: str) -> ProjectState:
|
|
1055
|
-
"""Initialize project state for a client session."""
|
|
1056
|
-
client_session_key = f"{client_session}_{hash(project_folder_path)}"
|
|
1057
|
-
|
|
1058
|
-
if client_session_key in self.projects:
|
|
1059
|
-
return self.projects[client_session_key]
|
|
1060
|
-
|
|
1061
|
-
logger.info("Initializing project state for client session: %s, folder: %s", client_session, project_folder_path)
|
|
1062
|
-
|
|
1063
|
-
# Initialize Git manager
|
|
1064
|
-
git_manager = GitManager(project_folder_path)
|
|
1065
|
-
self.git_managers[client_session_key] = git_manager
|
|
1066
|
-
|
|
1067
|
-
# Create project state
|
|
1068
|
-
project_state = ProjectState(
|
|
1069
|
-
client_session_key=client_session_key,
|
|
1070
|
-
project_folder_path=project_folder_path,
|
|
1071
|
-
items=[],
|
|
1072
|
-
is_git_repo=git_manager.is_git_repo,
|
|
1073
|
-
git_branch=git_manager.get_branch_name(),
|
|
1074
|
-
git_status_summary=git_manager.get_status_summary(),
|
|
1075
|
-
git_detailed_status=git_manager.get_detailed_status()
|
|
1076
|
-
)
|
|
1077
|
-
|
|
1078
|
-
# Initialize monitored folders with project root and its immediate subdirectories
|
|
1079
|
-
await self._initialize_monitored_folders(project_state)
|
|
1080
|
-
|
|
1081
|
-
# Sync all dependent state (items, watchdog)
|
|
1082
|
-
await self._sync_all_state_with_monitored_folders(project_state)
|
|
1083
|
-
|
|
1084
|
-
self.projects[client_session_key] = project_state
|
|
1085
|
-
self._write_debug_state()
|
|
1086
|
-
|
|
1087
|
-
return project_state
|
|
1088
|
-
|
|
1089
|
-
async def _initialize_monitored_folders(self, project_state: ProjectState):
|
|
1090
|
-
"""Initialize monitored folders with project root (expanded) and its immediate subdirectories (collapsed)."""
|
|
1091
|
-
# Add project root as expanded
|
|
1092
|
-
project_state.monitored_folders.append(
|
|
1093
|
-
MonitoredFolder(folder_path=project_state.project_folder_path, is_expanded=True)
|
|
1094
|
-
)
|
|
1095
|
-
|
|
1096
|
-
# Scan project root for immediate subdirectories and add them as collapsed
|
|
1097
|
-
try:
|
|
1098
|
-
with os.scandir(project_state.project_folder_path) as entries:
|
|
1099
|
-
for entry in entries:
|
|
1100
|
-
if entry.is_dir() and entry.name != '.git': # Only exclude .git, allow other dot folders
|
|
1101
|
-
project_state.monitored_folders.append(
|
|
1102
|
-
MonitoredFolder(folder_path=entry.path, is_expanded=False)
|
|
1103
|
-
)
|
|
1104
|
-
except (OSError, PermissionError) as e:
|
|
1105
|
-
logger.error("Error scanning project root for subdirectories: %s", e)
|
|
1106
|
-
|
|
1107
|
-
async def _start_watching_monitored_folders(self, project_state: ProjectState):
|
|
1108
|
-
"""Start watching all monitored folders."""
|
|
1109
|
-
for monitored_folder in project_state.monitored_folders:
|
|
1110
|
-
self.file_watcher.start_watching(monitored_folder.folder_path)
|
|
1111
|
-
|
|
1112
|
-
async def _sync_watchdog_with_monitored_folders(self, project_state: ProjectState):
|
|
1113
|
-
"""Ensure watchdog is monitoring each monitored folder individually (non-recursive)."""
|
|
1114
|
-
# Watch each monitored folder individually to align with the monitored_folders structure
|
|
1115
|
-
for monitored_folder in project_state.monitored_folders:
|
|
1116
|
-
self.file_watcher.start_watching(monitored_folder.folder_path)
|
|
1117
|
-
|
|
1118
|
-
# For git repositories, also watch the .git directory for git status changes
|
|
1119
|
-
if project_state.is_git_repo:
|
|
1120
|
-
git_dir_path = os.path.join(project_state.project_folder_path, '.git')
|
|
1121
|
-
if os.path.exists(git_dir_path):
|
|
1122
|
-
self.file_watcher.start_watching_git_directory(git_dir_path)
|
|
1123
|
-
logger.debug("Started monitoring .git directory for git status changes: %s", git_dir_path)
|
|
1124
|
-
|
|
1125
|
-
logger.debug("Watchdog synchronized: watching %d monitored folders individually", len(project_state.monitored_folders))
|
|
1126
|
-
|
|
1127
|
-
async def _sync_all_state_with_monitored_folders(self, project_state: ProjectState):
|
|
1128
|
-
"""Synchronize all dependent state (watchdog, items) with monitored_folders changes."""
|
|
1129
|
-
logger.debug("_sync_all_state_with_monitored_folders called")
|
|
1130
|
-
logger.debug("Current monitored_folders count: %d", len(project_state.monitored_folders))
|
|
1131
|
-
|
|
1132
|
-
# Sync watchdog monitoring
|
|
1133
|
-
logger.debug("Syncing watchdog monitoring")
|
|
1134
|
-
await self._sync_watchdog_with_monitored_folders(project_state)
|
|
1135
|
-
|
|
1136
|
-
# Rebuild items structure from all monitored folders
|
|
1137
|
-
logger.debug("Rebuilding items structure")
|
|
1138
|
-
await self._build_flattened_items_structure(project_state)
|
|
1139
|
-
logger.debug("Items count after rebuild: %d", len(project_state.items))
|
|
1140
|
-
|
|
1141
|
-
# Update debug state
|
|
1142
|
-
logger.debug("Writing debug state")
|
|
1143
|
-
self._write_debug_state()
|
|
1144
|
-
logger.debug("_sync_all_state_with_monitored_folders completed")
|
|
1145
|
-
|
|
1146
|
-
async def _add_subdirectories_to_monitored(self, project_state: ProjectState, parent_folder_path: str):
|
|
1147
|
-
"""Add all subdirectories of a folder to monitored_folders if not already present."""
|
|
1148
|
-
logger.info("_add_subdirectories_to_monitored called for: %s", parent_folder_path)
|
|
1149
|
-
try:
|
|
1150
|
-
existing_paths = {mf.folder_path for mf in project_state.monitored_folders}
|
|
1151
|
-
logger.info("Existing monitored paths: %s", existing_paths)
|
|
1152
|
-
added_any = False
|
|
1153
|
-
|
|
1154
|
-
with os.scandir(parent_folder_path) as entries:
|
|
1155
|
-
for entry in entries:
|
|
1156
|
-
if entry.is_dir() and entry.name != '.git': # Only exclude .git, allow other dot folders
|
|
1157
|
-
logger.info("Found subdirectory: %s", entry.path)
|
|
1158
|
-
if entry.path not in existing_paths:
|
|
1159
|
-
logger.info("Adding new monitored folder: %s", entry.path)
|
|
1160
|
-
new_monitored = MonitoredFolder(folder_path=entry.path, is_expanded=False)
|
|
1161
|
-
project_state.monitored_folders.append(new_monitored)
|
|
1162
|
-
added_any = True
|
|
1163
|
-
else:
|
|
1164
|
-
logger.info("Subdirectory already monitored: %s", entry.path)
|
|
1165
|
-
|
|
1166
|
-
logger.info("Added any new folders: %s", added_any)
|
|
1167
|
-
# Note: sync will be handled by the caller, no need to sync here
|
|
1168
|
-
|
|
1169
|
-
except (OSError, PermissionError) as e:
|
|
1170
|
-
logger.error("Error scanning folder %s for subdirectories: %s", parent_folder_path, e)
|
|
1171
|
-
|
|
1172
|
-
def _find_monitored_folder(self, project_state: ProjectState, folder_path: str) -> Optional[MonitoredFolder]:
|
|
1173
|
-
"""Find a monitored folder by path."""
|
|
1174
|
-
for monitored_folder in project_state.monitored_folders:
|
|
1175
|
-
if monitored_folder.folder_path == folder_path:
|
|
1176
|
-
return monitored_folder
|
|
1177
|
-
return None
|
|
1178
|
-
|
|
1179
|
-
async def _load_directory_items(self, project_state: ProjectState, directory_path: str, is_root: bool = False, parent_item: Optional[FileItem] = None):
|
|
1180
|
-
"""Load directory items with Git metadata."""
|
|
1181
|
-
git_manager = self.git_managers.get(project_state.client_session_key)
|
|
1182
|
-
|
|
1183
|
-
try:
|
|
1184
|
-
items = []
|
|
1185
|
-
|
|
1186
|
-
# Use os.scandir for better performance
|
|
1187
|
-
with os.scandir(directory_path) as entries:
|
|
1188
|
-
for entry in entries:
|
|
1189
|
-
try:
|
|
1190
|
-
# Skip .git folders and their contents
|
|
1191
|
-
if entry.name == '.git' and entry.is_dir():
|
|
1192
|
-
continue
|
|
1193
|
-
|
|
1194
|
-
stat_info = entry.stat()
|
|
1195
|
-
is_hidden = entry.name.startswith('.')
|
|
1196
|
-
|
|
1197
|
-
# Get Git status if available
|
|
1198
|
-
git_info = {"is_tracked": False, "status": None, "is_ignored": False}
|
|
1199
|
-
if git_manager:
|
|
1200
|
-
git_info = git_manager.get_file_status(entry.path)
|
|
1201
|
-
|
|
1202
|
-
# Check if this directory is expanded and loaded
|
|
1203
|
-
is_expanded = False
|
|
1204
|
-
is_loaded = True # Files are always loaded; for directories, will be set based on monitored_folders
|
|
1205
|
-
|
|
1206
|
-
file_item = FileItem(
|
|
1207
|
-
name=entry.name,
|
|
1208
|
-
path=entry.path,
|
|
1209
|
-
is_directory=entry.is_dir(),
|
|
1210
|
-
parent_path=directory_path,
|
|
1211
|
-
size=stat_info.st_size if entry.is_file() else None,
|
|
1212
|
-
modified_time=stat_info.st_mtime,
|
|
1213
|
-
is_git_tracked=git_info["is_tracked"],
|
|
1214
|
-
git_status=git_info["status"],
|
|
1215
|
-
is_hidden=is_hidden,
|
|
1216
|
-
is_ignored=git_info["is_ignored"],
|
|
1217
|
-
is_expanded=is_expanded,
|
|
1218
|
-
is_loaded=is_loaded
|
|
1219
|
-
)
|
|
1220
|
-
|
|
1221
|
-
items.append(file_item)
|
|
1222
|
-
|
|
1223
|
-
except (OSError, PermissionError) as e:
|
|
1224
|
-
logger.debug("Error reading entry %s: %s", entry.path, e)
|
|
1225
|
-
continue
|
|
1226
|
-
|
|
1227
|
-
# Sort items: directories first, then files, both alphabetically
|
|
1228
|
-
items.sort(key=lambda x: (not x.is_directory, x.name.lower()))
|
|
1229
|
-
|
|
1230
|
-
if is_root:
|
|
1231
|
-
project_state.items = items
|
|
1232
|
-
elif parent_item:
|
|
1233
|
-
parent_item.children = items
|
|
1234
|
-
# Don't set is_loaded here - it's set in _build_flattened_items_structure based on monitored_folders
|
|
1235
|
-
|
|
1236
|
-
except (OSError, PermissionError) as e:
|
|
1237
|
-
logger.error("Error loading directory %s: %s", directory_path, e)
|
|
1238
|
-
|
|
1239
|
-
async def _build_flattened_items_structure(self, project_state: ProjectState):
|
|
1240
|
-
"""Build a flattened items structure including ALL items from ALL monitored folders."""
|
|
1241
|
-
all_items = []
|
|
1242
|
-
|
|
1243
|
-
# Create sets for quick lookup
|
|
1244
|
-
expanded_paths = {mf.folder_path for mf in project_state.monitored_folders if mf.is_expanded}
|
|
1245
|
-
monitored_paths = {mf.folder_path for mf in project_state.monitored_folders}
|
|
1246
|
-
|
|
1247
|
-
# Load items from ALL monitored folders
|
|
1248
|
-
for monitored_folder in project_state.monitored_folders:
|
|
1249
|
-
# Load direct children of this monitored folder
|
|
1250
|
-
children = await self._load_directory_items_list(monitored_folder.folder_path, monitored_folder.folder_path)
|
|
1251
|
-
|
|
1252
|
-
# Set correct expansion and loading states for each child
|
|
1253
|
-
for child in children:
|
|
1254
|
-
if child.is_directory:
|
|
1255
|
-
# Set is_expanded based on expanded_paths
|
|
1256
|
-
child.is_expanded = child.path in expanded_paths
|
|
1257
|
-
# Set is_loaded based on monitored_paths (content loaded = in monitored folders)
|
|
1258
|
-
child.is_loaded = child.path in monitored_paths
|
|
1259
|
-
else:
|
|
1260
|
-
# Files are always loaded
|
|
1261
|
-
child.is_loaded = True
|
|
1262
|
-
all_items.append(child)
|
|
1263
|
-
|
|
1264
|
-
# Remove duplicates (items might be loaded multiple times due to nested monitoring)
|
|
1265
|
-
# Use a dict to deduplicate by path while preserving the last loaded state
|
|
1266
|
-
items_dict = {}
|
|
1267
|
-
for item in all_items:
|
|
1268
|
-
items_dict[item.path] = item
|
|
1269
|
-
|
|
1270
|
-
# Convert back to list and sort for consistent ordering
|
|
1271
|
-
project_state.items = list(items_dict.values())
|
|
1272
|
-
project_state.items.sort(key=lambda x: (x.parent_path, not x.is_directory, x.name.lower()))
|
|
1273
|
-
|
|
1274
|
-
async def _load_directory_items_list(self, directory_path: str, parent_path: str) -> List[FileItem]:
|
|
1275
|
-
"""Load directory items and return as a list with parent_path."""
|
|
1276
|
-
git_manager = None
|
|
1277
|
-
for manager in self.git_managers.values():
|
|
1278
|
-
if directory_path.startswith(manager.project_path):
|
|
1279
|
-
git_manager = manager
|
|
1280
|
-
break
|
|
1281
|
-
|
|
1282
|
-
items = []
|
|
1283
|
-
|
|
1284
|
-
try:
|
|
1285
|
-
with os.scandir(directory_path) as entries:
|
|
1286
|
-
for entry in entries:
|
|
1287
|
-
try:
|
|
1288
|
-
# Skip .git folders and their contents
|
|
1289
|
-
if entry.name == '.git' and entry.is_dir():
|
|
1290
|
-
continue
|
|
1291
|
-
|
|
1292
|
-
stat_info = entry.stat()
|
|
1293
|
-
is_hidden = entry.name.startswith('.')
|
|
1294
|
-
|
|
1295
|
-
# Get Git status if available
|
|
1296
|
-
git_info = {"is_tracked": False, "status": None, "is_ignored": False}
|
|
1297
|
-
if git_manager:
|
|
1298
|
-
git_info = git_manager.get_file_status(entry.path)
|
|
1299
|
-
|
|
1300
|
-
file_item = FileItem(
|
|
1301
|
-
name=entry.name,
|
|
1302
|
-
path=entry.path,
|
|
1303
|
-
is_directory=entry.is_dir(),
|
|
1304
|
-
parent_path=parent_path,
|
|
1305
|
-
size=stat_info.st_size if entry.is_file() else None,
|
|
1306
|
-
modified_time=stat_info.st_mtime,
|
|
1307
|
-
is_git_tracked=git_info["is_tracked"],
|
|
1308
|
-
git_status=git_info["status"],
|
|
1309
|
-
is_hidden=is_hidden,
|
|
1310
|
-
is_ignored=git_info["is_ignored"],
|
|
1311
|
-
is_expanded=False,
|
|
1312
|
-
is_loaded=True # Will be set correctly in _build_flattened_items_structure
|
|
1313
|
-
)
|
|
1314
|
-
|
|
1315
|
-
items.append(file_item)
|
|
1316
|
-
|
|
1317
|
-
except (OSError, PermissionError) as e:
|
|
1318
|
-
logger.debug("Error reading entry %s: %s", entry.path, e)
|
|
1319
|
-
continue
|
|
1320
|
-
|
|
1321
|
-
# Sort items: directories first, then files, both alphabetically
|
|
1322
|
-
items.sort(key=lambda x: (not x.is_directory, x.name.lower()))
|
|
1323
|
-
|
|
1324
|
-
except (OSError, PermissionError) as e:
|
|
1325
|
-
logger.error("Error loading directory %s: %s", directory_path, e)
|
|
1326
|
-
|
|
1327
|
-
return items
|
|
1328
|
-
|
|
1329
|
-
async def expand_folder(self, client_session_key: str, folder_path: str) -> bool:
|
|
1330
|
-
"""Expand a folder and load its contents."""
|
|
1331
|
-
logger.info("expand_folder called: client_session_key=%s, folder_path=%s", client_session_key, folder_path)
|
|
1332
|
-
|
|
1333
|
-
if client_session_key not in self.projects:
|
|
1334
|
-
logger.error("Project state not found for key: %s", client_session_key)
|
|
1335
|
-
return False
|
|
1336
|
-
|
|
1337
|
-
project_state = self.projects[client_session_key]
|
|
1338
|
-
logger.info("Found project state. Current monitored_folders count: %d", len(project_state.monitored_folders))
|
|
1339
|
-
|
|
1340
|
-
# Debug: log all monitored folders
|
|
1341
|
-
for i, mf in enumerate(project_state.monitored_folders):
|
|
1342
|
-
logger.info("Monitored folder %d: path=%s, is_expanded=%s", i, mf.folder_path, mf.is_expanded)
|
|
1343
|
-
|
|
1344
|
-
# Update the monitored folder to expanded state
|
|
1345
|
-
monitored_folder = self._find_monitored_folder(project_state, folder_path)
|
|
1346
|
-
if not monitored_folder:
|
|
1347
|
-
logger.error("Monitored folder not found for path: %s", folder_path)
|
|
1348
|
-
return False
|
|
1349
|
-
|
|
1350
|
-
logger.info("Found monitored folder: %s, current is_expanded: %s", monitored_folder.folder_path, monitored_folder.is_expanded)
|
|
1351
|
-
monitored_folder.is_expanded = True
|
|
1352
|
-
logger.info("Set monitored folder to expanded: %s", monitored_folder.is_expanded)
|
|
1353
|
-
|
|
1354
|
-
# Add all subdirectories of the expanded folder to monitored folders
|
|
1355
|
-
logger.info("Adding subdirectories to monitored for: %s", folder_path)
|
|
1356
|
-
await self._add_subdirectories_to_monitored(project_state, folder_path)
|
|
1357
|
-
|
|
1358
|
-
# Sync all dependent state (this will update items and watchdog)
|
|
1359
|
-
logger.info("Syncing all state with monitored folders")
|
|
1360
|
-
await self._sync_all_state_with_monitored_folders(project_state)
|
|
1361
|
-
|
|
1362
|
-
logger.info("expand_folder completed successfully")
|
|
1363
|
-
return True
|
|
1364
|
-
|
|
1365
|
-
async def collapse_folder(self, client_session_key: str, folder_path: str) -> bool:
|
|
1366
|
-
"""Collapse a folder."""
|
|
1367
|
-
if client_session_key not in self.projects:
|
|
1368
|
-
return False
|
|
1369
|
-
|
|
1370
|
-
project_state = self.projects[client_session_key]
|
|
1371
|
-
|
|
1372
|
-
# Update the monitored folder to collapsed state
|
|
1373
|
-
monitored_folder = self._find_monitored_folder(project_state, folder_path)
|
|
1374
|
-
if not monitored_folder:
|
|
1375
|
-
return False
|
|
1376
|
-
|
|
1377
|
-
monitored_folder.is_expanded = False
|
|
1378
|
-
|
|
1379
|
-
# Note: We keep monitoring collapsed folders for file changes
|
|
1380
|
-
# but don't stop watching them as we want to detect new files/folders
|
|
1381
|
-
|
|
1382
|
-
# Sync all dependent state (this will update items with correct expansion state)
|
|
1383
|
-
await self._sync_all_state_with_monitored_folders(project_state)
|
|
1384
|
-
|
|
1385
|
-
return True
|
|
1386
|
-
|
|
1387
|
-
def _find_item_by_path(self, items: List[FileItem], target_path: str) -> Optional[FileItem]:
|
|
1388
|
-
"""Find a file item by its path recursively."""
|
|
1389
|
-
for item in items:
|
|
1390
|
-
if item.path == target_path:
|
|
1391
|
-
return item
|
|
1392
|
-
if item.children:
|
|
1393
|
-
found = self._find_item_by_path(item.children, target_path)
|
|
1394
|
-
if found:
|
|
1395
|
-
return found
|
|
1396
|
-
return None
|
|
1397
|
-
|
|
1398
|
-
async def open_file(self, client_session_key: str, file_path: str, set_active: bool = True) -> bool:
|
|
1399
|
-
"""Open a file in a new tab with content loaded."""
|
|
1400
|
-
if client_session_key not in self.projects:
|
|
1401
|
-
return False
|
|
1402
|
-
|
|
1403
|
-
project_state = self.projects[client_session_key]
|
|
1404
|
-
|
|
1405
|
-
# Generate unique key for file tab
|
|
1406
|
-
tab_key = generate_tab_key('file', file_path)
|
|
1407
|
-
|
|
1408
|
-
# Check if file is already open
|
|
1409
|
-
if tab_key in project_state.open_tabs:
|
|
1410
|
-
existing_tab = project_state.open_tabs[tab_key]
|
|
1411
|
-
if set_active:
|
|
1412
|
-
project_state.active_tab = existing_tab
|
|
1413
|
-
self._write_debug_state()
|
|
1414
|
-
return True
|
|
1415
|
-
|
|
1416
|
-
# Create new file tab using tab factory
|
|
1417
|
-
from .tab_factory import get_tab_factory
|
|
1418
|
-
tab_factory = get_tab_factory()
|
|
1419
|
-
|
|
1420
|
-
try:
|
|
1421
|
-
new_tab = await tab_factory.create_file_tab(file_path)
|
|
1422
|
-
project_state.open_tabs[tab_key] = new_tab
|
|
1423
|
-
if set_active:
|
|
1424
|
-
project_state.active_tab = new_tab
|
|
1425
|
-
|
|
1426
|
-
logger.info(f"Opened file tab: {file_path} (content loaded: {len(new_tab.content or '') > 0})")
|
|
1427
|
-
self._write_debug_state()
|
|
1428
|
-
return True
|
|
1429
|
-
except Exception as e:
|
|
1430
|
-
logger.error(f"Failed to create tab for file {file_path}: {e}")
|
|
1431
|
-
return False
|
|
1432
|
-
|
|
1433
|
-
async def close_tab(self, client_session_key: str, tab_id: str) -> bool:
|
|
1434
|
-
"""Close a tab by tab ID."""
|
|
1435
|
-
if client_session_key not in self.projects:
|
|
1436
|
-
return False
|
|
1437
|
-
|
|
1438
|
-
project_state = self.projects[client_session_key]
|
|
1439
|
-
|
|
1440
|
-
# Find and remove the tab by searching through the dictionary values
|
|
1441
|
-
tab_key_to_remove = None
|
|
1442
|
-
tab_to_remove = None
|
|
1443
|
-
for key, tab in project_state.open_tabs.items():
|
|
1444
|
-
if tab.tab_id == tab_id:
|
|
1445
|
-
tab_key_to_remove = key
|
|
1446
|
-
tab_to_remove = tab
|
|
1447
|
-
break
|
|
1448
|
-
|
|
1449
|
-
if not tab_to_remove:
|
|
1450
|
-
return False
|
|
1451
|
-
|
|
1452
|
-
del project_state.open_tabs[tab_key_to_remove]
|
|
1453
|
-
|
|
1454
|
-
# Clear active tab if it was the closed tab
|
|
1455
|
-
if project_state.active_tab and project_state.active_tab.tab_id == tab_id:
|
|
1456
|
-
# Set active tab to the last remaining tab, or None if no tabs left
|
|
1457
|
-
remaining_tabs = list(project_state.open_tabs.values())
|
|
1458
|
-
project_state.active_tab = remaining_tabs[-1] if remaining_tabs else None
|
|
1459
|
-
|
|
1460
|
-
self._write_debug_state()
|
|
1461
|
-
return True
|
|
1462
|
-
|
|
1463
|
-
async def set_active_tab(self, client_session_key: str, tab_id: Optional[str]) -> bool:
|
|
1464
|
-
"""Set the currently active tab."""
|
|
1465
|
-
if client_session_key not in self.projects:
|
|
1466
|
-
return False
|
|
1467
|
-
|
|
1468
|
-
project_state = self.projects[client_session_key]
|
|
1469
|
-
|
|
1470
|
-
if tab_id:
|
|
1471
|
-
# Find the tab by ID in the dictionary values
|
|
1472
|
-
tab = None
|
|
1473
|
-
for t in project_state.open_tabs.values():
|
|
1474
|
-
if t.tab_id == tab_id:
|
|
1475
|
-
tab = t
|
|
1476
|
-
break
|
|
1477
|
-
if not tab:
|
|
1478
|
-
return False
|
|
1479
|
-
project_state.active_tab = tab
|
|
1480
|
-
else:
|
|
1481
|
-
project_state.active_tab = None
|
|
1482
|
-
|
|
1483
|
-
self._write_debug_state()
|
|
1484
|
-
return True
|
|
1485
|
-
|
|
1486
|
-
async def open_diff_tab(self, client_session_key: str, file_path: str,
|
|
1487
|
-
from_ref: str, to_ref: str, from_hash: Optional[str] = None,
|
|
1488
|
-
to_hash: Optional[str] = None) -> bool:
|
|
1489
|
-
"""Open a diff tab comparing file versions at different git timeline points."""
|
|
1490
|
-
if client_session_key not in self.projects:
|
|
1491
|
-
return False
|
|
1492
|
-
|
|
1493
|
-
project_state = self.projects[client_session_key]
|
|
1494
|
-
git_manager = self.git_managers.get(client_session_key)
|
|
1495
|
-
|
|
1496
|
-
if not git_manager or not git_manager.is_git_repo:
|
|
1497
|
-
logger.error("Cannot create diff tab: not a git repository")
|
|
1498
|
-
return False
|
|
1499
|
-
|
|
1500
|
-
# Generate unique key for diff tab
|
|
1501
|
-
tab_key = generate_tab_key('diff', file_path,
|
|
1502
|
-
from_ref=from_ref, to_ref=to_ref,
|
|
1503
|
-
from_hash=from_hash, to_hash=to_hash)
|
|
1504
|
-
|
|
1505
|
-
# Check if this diff tab is already open
|
|
1506
|
-
if tab_key in project_state.open_tabs:
|
|
1507
|
-
existing_tab = project_state.open_tabs[tab_key]
|
|
1508
|
-
project_state.active_tab = existing_tab
|
|
1509
|
-
logger.info(f"Diff tab already exists, activating: {tab_key}")
|
|
1510
|
-
self._write_debug_state()
|
|
1511
|
-
return True
|
|
1512
|
-
|
|
1513
|
-
try:
|
|
1514
|
-
# Get content based on the reference type
|
|
1515
|
-
original_content = ""
|
|
1516
|
-
modified_content = ""
|
|
1517
|
-
|
|
1518
|
-
# Handle 'from' reference
|
|
1519
|
-
if from_ref == "head":
|
|
1520
|
-
original_content = git_manager.get_file_content_at_commit(file_path) or ""
|
|
1521
|
-
elif from_ref == "staged":
|
|
1522
|
-
original_content = git_manager.get_file_content_staged(file_path) or ""
|
|
1523
|
-
elif from_ref == "working":
|
|
1524
|
-
# Read current file content
|
|
1525
|
-
if os.path.exists(file_path):
|
|
1526
|
-
try:
|
|
1527
|
-
with open(file_path, 'r', encoding='utf-8') as f:
|
|
1528
|
-
original_content = f.read()
|
|
1529
|
-
except (OSError, UnicodeDecodeError) as e:
|
|
1530
|
-
logger.error("Error reading working file %s: %s", file_path, e)
|
|
1531
|
-
original_content = f"# Error reading file: {e}"
|
|
1532
|
-
elif from_ref == "commit" and from_hash:
|
|
1533
|
-
original_content = git_manager.get_file_content_at_commit(file_path, from_hash) or ""
|
|
1534
|
-
|
|
1535
|
-
# Handle 'to' reference
|
|
1536
|
-
if to_ref == "head":
|
|
1537
|
-
modified_content = git_manager.get_file_content_at_commit(file_path) or ""
|
|
1538
|
-
elif to_ref == "staged":
|
|
1539
|
-
modified_content = git_manager.get_file_content_staged(file_path) or ""
|
|
1540
|
-
elif to_ref == "working":
|
|
1541
|
-
# Read current file content
|
|
1542
|
-
if os.path.exists(file_path):
|
|
1543
|
-
try:
|
|
1544
|
-
with open(file_path, 'r', encoding='utf-8') as f:
|
|
1545
|
-
modified_content = f.read()
|
|
1546
|
-
except (OSError, UnicodeDecodeError) as e:
|
|
1547
|
-
logger.error("Error reading working file %s: %s", file_path, e)
|
|
1548
|
-
modified_content = f"# Error reading file: {e}"
|
|
1549
|
-
elif to_ref == "commit" and to_hash:
|
|
1550
|
-
modified_content = git_manager.get_file_content_at_commit(file_path, to_hash) or ""
|
|
1551
|
-
|
|
1552
|
-
# Create diff tab using tab factory
|
|
1553
|
-
from .tab_factory import get_tab_factory
|
|
1554
|
-
tab_factory = get_tab_factory()
|
|
1555
|
-
|
|
1556
|
-
# Compute diff details for the client
|
|
1557
|
-
diff_details = git_manager._compute_diff_details(original_content, modified_content)
|
|
1558
|
-
|
|
1559
|
-
# Generate HTML diff with syntax highlighting
|
|
1560
|
-
html_diff = git_manager._generate_html_diff(original_content, modified_content, file_path)
|
|
1561
|
-
|
|
1562
|
-
# Create a descriptive title for the diff
|
|
1563
|
-
title_parts = []
|
|
1564
|
-
if from_ref == "commit" and from_hash:
|
|
1565
|
-
title_parts.append(from_hash[:8])
|
|
1566
|
-
else:
|
|
1567
|
-
title_parts.append(from_ref)
|
|
1568
|
-
title_parts.append("→")
|
|
1569
|
-
if to_ref == "commit" and to_hash:
|
|
1570
|
-
title_parts.append(to_hash[:8])
|
|
1571
|
-
else:
|
|
1572
|
-
title_parts.append(to_ref)
|
|
1573
|
-
|
|
1574
|
-
diff_title = f"{os.path.basename(file_path)} ({' '.join(title_parts)})"
|
|
1575
|
-
|
|
1576
|
-
diff_tab = await tab_factory.create_diff_tab_with_title(
|
|
1577
|
-
file_path, original_content, modified_content, diff_title,
|
|
1578
|
-
diff_details=diff_details
|
|
1579
|
-
)
|
|
1580
|
-
|
|
1581
|
-
# Add metadata about the diff references
|
|
1582
|
-
diff_tab.metadata.update({
|
|
1583
|
-
'from_ref': from_ref,
|
|
1584
|
-
'to_ref': to_ref,
|
|
1585
|
-
'from_hash': from_hash,
|
|
1586
|
-
'to_hash': to_hash,
|
|
1587
|
-
'diff_timeline': True,
|
|
1588
|
-
'html_diff': html_diff
|
|
1589
|
-
})
|
|
1590
|
-
|
|
1591
|
-
project_state.open_tabs[tab_key] = diff_tab
|
|
1592
|
-
project_state.active_tab = diff_tab
|
|
1593
|
-
|
|
1594
|
-
logger.info(f"Created timeline diff tab for: {file_path} ({from_ref} → {to_ref})")
|
|
1595
|
-
self._write_debug_state()
|
|
1596
|
-
return True
|
|
1597
|
-
|
|
1598
|
-
except Exception as e:
|
|
1599
|
-
logger.error(f"Failed to create timeline diff tab for {file_path}: {e}")
|
|
1600
|
-
return False
|
|
1601
|
-
|
|
1602
|
-
async def _handle_file_change(self, event):
|
|
1603
|
-
"""Handle file system change events with debouncing."""
|
|
1604
|
-
logger.debug("Processing file change: %s - %s", event.event_type, event.src_path)
|
|
1605
|
-
|
|
1606
|
-
self._pending_changes.add(event.src_path)
|
|
1607
|
-
|
|
1608
|
-
# Cancel existing timer
|
|
1609
|
-
if self._change_debounce_timer:
|
|
1610
|
-
self._change_debounce_timer.cancel()
|
|
1611
|
-
|
|
1612
|
-
# Set new timer
|
|
1613
|
-
logger.debug("Starting debounce timer for file changes")
|
|
1614
|
-
self._change_debounce_timer = asyncio.create_task(self._process_pending_changes())
|
|
1615
|
-
|
|
1616
|
-
async def _process_pending_changes(self):
|
|
1617
|
-
"""Process pending file changes after debounce delay."""
|
|
1618
|
-
logger.info("Processing %d pending file changes after debounce", len(self._pending_changes))
|
|
1619
|
-
await asyncio.sleep(0.5) # Debounce delay
|
|
1620
|
-
|
|
1621
|
-
if not self._pending_changes:
|
|
1622
|
-
logger.debug("No pending changes to process")
|
|
1623
|
-
return
|
|
1624
|
-
|
|
1625
|
-
logger.debug("Pending changes: %s", list(self._pending_changes))
|
|
1626
|
-
|
|
1627
|
-
# Process changes for each affected project
|
|
1628
|
-
affected_projects = set()
|
|
1629
|
-
for change_path in self._pending_changes:
|
|
1630
|
-
logger.debug("Checking change path: %s", change_path)
|
|
1631
|
-
for client_session_key, project_state in self.projects.items():
|
|
1632
|
-
if change_path.startswith(project_state.project_folder_path):
|
|
1633
|
-
logger.debug("Change affects project: %s", client_session_key)
|
|
1634
|
-
affected_projects.add(client_session_key)
|
|
1635
|
-
|
|
1636
|
-
logger.info("Refreshing %d affected projects", len(affected_projects))
|
|
1637
|
-
|
|
1638
|
-
# Refresh affected projects
|
|
1639
|
-
for client_session_key in affected_projects:
|
|
1640
|
-
logger.debug("Refreshing project state: %s", client_session_key)
|
|
1641
|
-
await self._refresh_project_state(client_session_key)
|
|
1642
|
-
|
|
1643
|
-
self._pending_changes.clear()
|
|
1644
|
-
logger.debug("Finished processing file changes")
|
|
1645
|
-
|
|
1646
|
-
async def _refresh_project_state(self, client_session_key: str):
|
|
1647
|
-
"""Refresh project state after file changes."""
|
|
1648
|
-
if client_session_key not in self.projects:
|
|
1649
|
-
return
|
|
1650
|
-
|
|
1651
|
-
project_state = self.projects[client_session_key]
|
|
1652
|
-
git_manager = self.git_managers[client_session_key]
|
|
1653
|
-
|
|
1654
|
-
# Update Git status
|
|
1655
|
-
if git_manager:
|
|
1656
|
-
project_state.git_branch = git_manager.get_branch_name()
|
|
1657
|
-
project_state.git_status_summary = git_manager.get_status_summary()
|
|
1658
|
-
project_state.git_detailed_status = git_manager.get_detailed_status()
|
|
1659
|
-
|
|
1660
|
-
# Sync all dependent state (items, watchdog) - no automatic directory detection
|
|
1661
|
-
await self._sync_all_state_with_monitored_folders(project_state)
|
|
1662
|
-
|
|
1663
|
-
# Send update to clients
|
|
1664
|
-
await self._send_project_state_update(project_state)
|
|
1665
|
-
|
|
1666
|
-
async def _detect_and_add_new_directories(self, project_state: ProjectState):
|
|
1667
|
-
"""Detect new directories in monitored folders and add them to monitoring."""
|
|
1668
|
-
# For each currently monitored folder, check if new subdirectories appeared
|
|
1669
|
-
monitored_folder_paths = [mf.folder_path for mf in project_state.monitored_folders]
|
|
1670
|
-
|
|
1671
|
-
for folder_path in monitored_folder_paths:
|
|
1672
|
-
if os.path.exists(folder_path) and os.path.isdir(folder_path):
|
|
1673
|
-
await self._add_subdirectories_to_monitored(project_state, folder_path)
|
|
1674
|
-
|
|
1675
|
-
async def _reload_visible_structures(self, project_state: ProjectState):
|
|
1676
|
-
"""Reload all visible structures with flattened items."""
|
|
1677
|
-
await self._build_flattened_items_structure(project_state)
|
|
1678
|
-
|
|
1679
|
-
async def _send_project_state_update(self, project_state: ProjectState, server_project_id: str = None):
|
|
1680
|
-
"""Send project state update to the specific client session only."""
|
|
1681
|
-
# Create state signature for change detection
|
|
1682
|
-
current_state_signature = {
|
|
1683
|
-
"git_branch": project_state.git_branch,
|
|
1684
|
-
"git_status_summary": project_state.git_status_summary,
|
|
1685
|
-
"git_detailed_status": str(project_state.git_detailed_status) if project_state.git_detailed_status else None,
|
|
1686
|
-
"open_tabs": tuple((tab.tab_id, tab.tab_type, tab.title) for tab in project_state.open_tabs.values()),
|
|
1687
|
-
"active_tab": project_state.active_tab.tab_id if project_state.active_tab else None,
|
|
1688
|
-
"items_count": len(project_state.items),
|
|
1689
|
-
"monitored_folders": tuple((mf.folder_path, mf.is_expanded) for mf in sorted(project_state.monitored_folders, key=lambda x: x.folder_path))
|
|
1690
|
-
}
|
|
1691
|
-
|
|
1692
|
-
# Check if state has actually changed
|
|
1693
|
-
last_signature = getattr(project_state, '_last_sent_signature', None)
|
|
1694
|
-
if last_signature == current_state_signature:
|
|
1695
|
-
logger.debug("Project state unchanged, skipping update for client: %s", project_state.client_session_id)
|
|
1696
|
-
return
|
|
1697
|
-
|
|
1698
|
-
# State has changed, send update
|
|
1699
|
-
project_state._last_sent_signature = current_state_signature
|
|
1700
|
-
logger.info("Sending project state update to client: %s", project_state.client_session_id)
|
|
1701
|
-
|
|
1702
|
-
payload = {
|
|
1703
|
-
"event": "project_state_update",
|
|
1704
|
-
"project_id": server_project_id or project_state.client_session_key, # Use server ID if provided
|
|
1705
|
-
"project_folder_path": project_state.project_folder_path,
|
|
1706
|
-
"is_git_repo": project_state.is_git_repo,
|
|
1707
|
-
"git_branch": project_state.git_branch,
|
|
1708
|
-
"git_status_summary": project_state.git_status_summary,
|
|
1709
|
-
"git_detailed_status": asdict(project_state.git_detailed_status) if project_state.git_detailed_status else None,
|
|
1710
|
-
"open_tabs": [self._serialize_tab_info(tab) for tab in project_state.open_tabs.values()],
|
|
1711
|
-
"active_tab": self._serialize_tab_info(project_state.active_tab) if project_state.active_tab else None,
|
|
1712
|
-
"items": [self._serialize_file_item(item) for item in project_state.items],
|
|
1713
|
-
"timestamp": time.time(),
|
|
1714
|
-
"client_sessions": [project_state.client_session_id] # Target only this client session
|
|
1715
|
-
}
|
|
1716
|
-
|
|
1717
|
-
# Send via control channel with client session targeting
|
|
1718
|
-
await self.control_channel.send(payload)
|
|
1719
|
-
|
|
1720
|
-
def cleanup_project(self, client_session_key: str):
|
|
1721
|
-
"""Clean up project state and resources."""
|
|
1722
|
-
if client_session_key in self.projects:
|
|
1723
|
-
project_state = self.projects[client_session_key]
|
|
1724
|
-
|
|
1725
|
-
# Stop watching all monitored folders for this project
|
|
1726
|
-
for monitored_folder in project_state.monitored_folders:
|
|
1727
|
-
self.file_watcher.stop_watching(monitored_folder.folder_path)
|
|
1728
|
-
|
|
1729
|
-
# Stop watching .git directory if it was being monitored
|
|
1730
|
-
if project_state.is_git_repo:
|
|
1731
|
-
git_dir_path = os.path.join(project_state.project_folder_path, '.git')
|
|
1732
|
-
self.file_watcher.stop_watching(git_dir_path)
|
|
1733
|
-
|
|
1734
|
-
# Clean up managers
|
|
1735
|
-
self.git_managers.pop(client_session_key, None)
|
|
1736
|
-
self.projects.pop(client_session_key, None)
|
|
1737
|
-
|
|
1738
|
-
logger.info("Cleaned up project state: %s", client_session_key)
|
|
1739
|
-
self._write_debug_state()
|
|
1740
|
-
|
|
1741
|
-
def cleanup_projects_by_client_session(self, client_session_id: str):
|
|
1742
|
-
"""Clean up all project states for a specific client session."""
|
|
1743
|
-
logger.info("Cleaning up all project states for client session: %s", client_session_id)
|
|
1744
|
-
|
|
1745
|
-
# Find all project states that belong to this client session
|
|
1746
|
-
keys_to_remove = []
|
|
1747
|
-
for client_session_key in self.projects.keys():
|
|
1748
|
-
if client_session_key.startswith(client_session_id):
|
|
1749
|
-
keys_to_remove.append(client_session_key)
|
|
1750
|
-
|
|
1751
|
-
# Clean up each project state
|
|
1752
|
-
for client_session_key in keys_to_remove:
|
|
1753
|
-
self.cleanup_project(client_session_key)
|
|
1754
|
-
|
|
1755
|
-
logger.info("Cleaned up %d project states for client session: %s", len(keys_to_remove), client_session_id)
|
|
1756
|
-
|
|
1757
|
-
def cleanup_all_projects(self):
|
|
1758
|
-
"""Clean up all project states. Used for shutdown or reset."""
|
|
1759
|
-
logger.info("Cleaning up all project states")
|
|
1760
|
-
|
|
1761
|
-
keys_to_remove = list(self.projects.keys())
|
|
1762
|
-
for client_session_key in keys_to_remove:
|
|
1763
|
-
self.cleanup_project(client_session_key)
|
|
1764
|
-
|
|
1765
|
-
logger.info("Cleaned up %d project states", len(keys_to_remove))
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
def generate_tab_key(tab_type: str, file_path: str, **kwargs) -> str:
|
|
1769
|
-
"""Generate a unique key for a tab.
|
|
1770
|
-
|
|
1771
|
-
Args:
|
|
1772
|
-
tab_type: Type of tab ('file', 'diff', 'untitled', etc.)
|
|
1773
|
-
file_path: Path to the file
|
|
1774
|
-
**kwargs: Additional parameters for diff tabs (from_ref, to_ref, from_hash, to_hash)
|
|
1775
|
-
|
|
1776
|
-
Returns:
|
|
1777
|
-
Unique string key for the tab
|
|
1778
|
-
"""
|
|
1779
|
-
import uuid
|
|
1780
|
-
|
|
1781
|
-
if tab_type == 'file':
|
|
1782
|
-
return file_path
|
|
1783
|
-
elif tab_type == 'diff':
|
|
1784
|
-
from_ref = kwargs.get('from_ref', '')
|
|
1785
|
-
to_ref = kwargs.get('to_ref', '')
|
|
1786
|
-
from_hash = kwargs.get('from_hash', '')
|
|
1787
|
-
to_hash = kwargs.get('to_hash', '')
|
|
1788
|
-
return f"diff:{file_path}:{from_ref}:{to_ref}:{from_hash}:{to_hash}"
|
|
1789
|
-
elif tab_type == 'untitled':
|
|
1790
|
-
# For untitled tabs, use the tab_id as the key since they don't have a file path
|
|
1791
|
-
return kwargs.get('tab_id', str(uuid.uuid4()))
|
|
1792
|
-
else:
|
|
1793
|
-
# For other tab types, use file_path if available, otherwise tab_id
|
|
1794
|
-
return file_path if file_path else kwargs.get('tab_id', str(uuid.uuid4()))
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
# Helper function for other handlers to get/create project state manager
|
|
1798
|
-
def _get_or_create_project_state_manager(context: Dict[str, Any], control_channel) -> 'ProjectStateManager':
|
|
1799
|
-
"""Get or create project state manager with debug setup."""
|
|
1800
|
-
logger.info("_get_or_create_project_state_manager called")
|
|
1801
|
-
logger.info("Context debug flag: %s", context.get("debug", False))
|
|
1802
|
-
|
|
1803
|
-
if "project_state_manager" not in context:
|
|
1804
|
-
logger.info("Creating new ProjectStateManager")
|
|
1805
|
-
manager = ProjectStateManager(control_channel, context)
|
|
1806
|
-
|
|
1807
|
-
# Set up debug mode if enabled
|
|
1808
|
-
if context.get("debug", False):
|
|
1809
|
-
debug_file_path = os.path.join(os.getcwd(), "project_state_debug.json")
|
|
1810
|
-
logger.info("Setting up debug mode with file: %s", debug_file_path)
|
|
1811
|
-
manager.set_debug_mode(True, debug_file_path)
|
|
1812
|
-
else:
|
|
1813
|
-
logger.info("Debug mode not enabled in context")
|
|
1814
|
-
|
|
1815
|
-
context["project_state_manager"] = manager
|
|
1816
|
-
logger.info("Created and stored new manager")
|
|
1817
|
-
return manager
|
|
1818
|
-
else:
|
|
1819
|
-
logger.info("Returning existing project state manager")
|
|
1820
|
-
return context["project_state_manager"]
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
# Handler classes
|
|
1824
|
-
class ProjectStateFolderExpandHandler(AsyncHandler):
|
|
1825
|
-
"""Handler for expanding project folders."""
|
|
1826
|
-
|
|
1827
|
-
@property
|
|
1828
|
-
def command_name(self) -> str:
|
|
1829
|
-
return "project_state_folder_expand"
|
|
1830
|
-
|
|
1831
|
-
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
1832
|
-
"""Expand a folder in project state."""
|
|
1833
|
-
logger.info("ProjectStateFolderExpandHandler.execute called with message: %s", message)
|
|
1834
|
-
|
|
1835
|
-
server_project_id = message.get("project_id") # Server-side UUID (for response)
|
|
1836
|
-
folder_path = message.get("folder_path")
|
|
1837
|
-
source_client_session = message.get("source_client_session") # This is our key
|
|
1838
|
-
|
|
1839
|
-
logger.info("Extracted server_project_id: %s, folder_path: %s, source_client_session: %s",
|
|
1840
|
-
server_project_id, folder_path, source_client_session)
|
|
1841
|
-
|
|
1842
|
-
if not server_project_id:
|
|
1843
|
-
raise ValueError("project_id is required")
|
|
1844
|
-
if not folder_path:
|
|
1845
|
-
raise ValueError("folder_path is required")
|
|
1846
|
-
if not source_client_session:
|
|
1847
|
-
raise ValueError("source_client_session is required")
|
|
1848
|
-
|
|
1849
|
-
logger.info("Getting project state manager...")
|
|
1850
|
-
manager = _get_or_create_project_state_manager(self.context, self.control_channel)
|
|
1851
|
-
logger.info("Got manager: %s", manager)
|
|
1852
|
-
|
|
1853
|
-
# Find project state using client session - we need to find which project state this client session has
|
|
1854
|
-
project_state_key = None
|
|
1855
|
-
for key in manager.projects.keys():
|
|
1856
|
-
if key.startswith(source_client_session):
|
|
1857
|
-
project_state_key = key
|
|
1858
|
-
break
|
|
1859
|
-
|
|
1860
|
-
if not project_state_key:
|
|
1861
|
-
logger.error("No project state found for client session: %s", source_client_session)
|
|
1862
|
-
response = {
|
|
1863
|
-
"event": "project_state_folder_expand_response",
|
|
1864
|
-
"project_id": server_project_id,
|
|
1865
|
-
"folder_path": folder_path,
|
|
1866
|
-
"success": False
|
|
1867
|
-
}
|
|
1868
|
-
logger.info("Returning response: %s", response)
|
|
1869
|
-
return response
|
|
1870
|
-
|
|
1871
|
-
logger.info("Found project state key: %s", project_state_key)
|
|
1872
|
-
|
|
1873
|
-
logger.info("Calling manager.expand_folder...")
|
|
1874
|
-
success = await manager.expand_folder(project_state_key, folder_path)
|
|
1875
|
-
logger.info("expand_folder returned: %s", success)
|
|
1876
|
-
|
|
1877
|
-
if success:
|
|
1878
|
-
# Send updated state
|
|
1879
|
-
logger.info("Sending project state update...")
|
|
1880
|
-
project_state = manager.projects[project_state_key]
|
|
1881
|
-
await manager._send_project_state_update(project_state, server_project_id)
|
|
1882
|
-
logger.info("Project state update sent")
|
|
1883
|
-
|
|
1884
|
-
response = {
|
|
1885
|
-
"event": "project_state_folder_expand_response",
|
|
1886
|
-
"project_id": server_project_id, # Return the server-side project ID
|
|
1887
|
-
"folder_path": folder_path,
|
|
1888
|
-
"success": success
|
|
1889
|
-
}
|
|
1890
|
-
|
|
1891
|
-
logger.info("Returning response: %s", response)
|
|
1892
|
-
return response
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
class ProjectStateFolderCollapseHandler(AsyncHandler):
|
|
1896
|
-
"""Handler for collapsing project folders."""
|
|
1897
|
-
|
|
1898
|
-
@property
|
|
1899
|
-
def command_name(self) -> str:
|
|
1900
|
-
return "project_state_folder_collapse"
|
|
1901
|
-
|
|
1902
|
-
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
1903
|
-
"""Collapse a folder in project state."""
|
|
1904
|
-
server_project_id = message.get("project_id") # Server-side UUID (for response)
|
|
1905
|
-
folder_path = message.get("folder_path")
|
|
1906
|
-
source_client_session = message.get("source_client_session") # This is our key
|
|
1907
|
-
|
|
1908
|
-
if not server_project_id:
|
|
1909
|
-
raise ValueError("project_id is required")
|
|
1910
|
-
if not folder_path:
|
|
1911
|
-
raise ValueError("folder_path is required")
|
|
1912
|
-
if not source_client_session:
|
|
1913
|
-
raise ValueError("source_client_session is required")
|
|
1914
|
-
|
|
1915
|
-
manager = _get_or_create_project_state_manager(self.context, self.control_channel)
|
|
1916
|
-
|
|
1917
|
-
# Find project state using client session
|
|
1918
|
-
project_state_key = None
|
|
1919
|
-
for key in manager.projects.keys():
|
|
1920
|
-
if key.startswith(source_client_session):
|
|
1921
|
-
project_state_key = key
|
|
1922
|
-
break
|
|
1923
|
-
|
|
1924
|
-
if not project_state_key:
|
|
1925
|
-
return {
|
|
1926
|
-
"event": "project_state_folder_collapse_response",
|
|
1927
|
-
"project_id": server_project_id,
|
|
1928
|
-
"folder_path": folder_path,
|
|
1929
|
-
"success": False
|
|
1930
|
-
}
|
|
1931
|
-
|
|
1932
|
-
success = await manager.collapse_folder(project_state_key, folder_path)
|
|
1933
|
-
|
|
1934
|
-
if success:
|
|
1935
|
-
# Send updated state
|
|
1936
|
-
project_state = manager.projects[project_state_key]
|
|
1937
|
-
await manager._send_project_state_update(project_state, server_project_id)
|
|
1938
|
-
|
|
1939
|
-
return {
|
|
1940
|
-
"event": "project_state_folder_collapse_response",
|
|
1941
|
-
"project_id": server_project_id, # Return the server-side project ID
|
|
1942
|
-
"folder_path": folder_path,
|
|
1943
|
-
"success": success
|
|
1944
|
-
}
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
class ProjectStateFileOpenHandler(AsyncHandler):
|
|
1948
|
-
"""Handler for opening files in project state."""
|
|
1949
|
-
|
|
1950
|
-
@property
|
|
1951
|
-
def command_name(self) -> str:
|
|
1952
|
-
return "project_state_file_open"
|
|
1953
|
-
|
|
1954
|
-
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
1955
|
-
"""Open a file in project state."""
|
|
1956
|
-
server_project_id = message.get("project_id") # Server-side UUID (for response)
|
|
1957
|
-
file_path = message.get("file_path")
|
|
1958
|
-
source_client_session = message.get("source_client_session") # This is our key
|
|
1959
|
-
set_active = message.get("set_active", True)
|
|
1960
|
-
|
|
1961
|
-
if not server_project_id:
|
|
1962
|
-
raise ValueError("project_id is required")
|
|
1963
|
-
if not file_path:
|
|
1964
|
-
raise ValueError("file_path is required")
|
|
1965
|
-
if not source_client_session:
|
|
1966
|
-
raise ValueError("source_client_session is required")
|
|
1967
|
-
|
|
1968
|
-
manager = _get_or_create_project_state_manager(self.context, self.control_channel)
|
|
1969
|
-
|
|
1970
|
-
# Find project state using client session
|
|
1971
|
-
project_state_key = None
|
|
1972
|
-
for key in manager.projects.keys():
|
|
1973
|
-
if key.startswith(source_client_session):
|
|
1974
|
-
project_state_key = key
|
|
1975
|
-
break
|
|
1976
|
-
|
|
1977
|
-
if not project_state_key:
|
|
1978
|
-
return {
|
|
1979
|
-
"event": "project_state_file_open_response",
|
|
1980
|
-
"project_id": server_project_id,
|
|
1981
|
-
"file_path": file_path,
|
|
1982
|
-
"success": False,
|
|
1983
|
-
"set_active": set_active
|
|
1984
|
-
}
|
|
1985
|
-
|
|
1986
|
-
success = await manager.open_file(project_state_key, file_path, set_active)
|
|
1987
|
-
|
|
1988
|
-
if success:
|
|
1989
|
-
# Send updated state
|
|
1990
|
-
project_state = manager.projects[project_state_key]
|
|
1991
|
-
await manager._send_project_state_update(project_state, server_project_id)
|
|
1992
|
-
|
|
1993
|
-
return {
|
|
1994
|
-
"event": "project_state_file_open_response",
|
|
1995
|
-
"project_id": server_project_id, # Return the server-side project ID
|
|
1996
|
-
"file_path": file_path,
|
|
1997
|
-
"success": success,
|
|
1998
|
-
"set_active": set_active
|
|
1999
|
-
}
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
class ProjectStateTabCloseHandler(AsyncHandler):
|
|
2003
|
-
"""Handler for closing tabs in project state."""
|
|
2004
|
-
|
|
2005
|
-
@property
|
|
2006
|
-
def command_name(self) -> str:
|
|
2007
|
-
return "project_state_tab_close"
|
|
2008
|
-
|
|
2009
|
-
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
2010
|
-
"""Close a tab in project state."""
|
|
2011
|
-
server_project_id = message.get("project_id") # Server-side UUID (for response)
|
|
2012
|
-
tab_id = message.get("tab_id")
|
|
2013
|
-
source_client_session = message.get("source_client_session") # This is our key
|
|
2014
|
-
|
|
2015
|
-
if not server_project_id:
|
|
2016
|
-
raise ValueError("project_id is required")
|
|
2017
|
-
if not tab_id:
|
|
2018
|
-
raise ValueError("tab_id is required")
|
|
2019
|
-
if not source_client_session:
|
|
2020
|
-
raise ValueError("source_client_session is required")
|
|
2021
|
-
|
|
2022
|
-
manager = _get_or_create_project_state_manager(self.context, self.control_channel)
|
|
2023
|
-
|
|
2024
|
-
# Find project state using client session
|
|
2025
|
-
project_state_key = None
|
|
2026
|
-
for key in manager.projects.keys():
|
|
2027
|
-
if key.startswith(source_client_session):
|
|
2028
|
-
project_state_key = key
|
|
2029
|
-
break
|
|
2030
|
-
|
|
2031
|
-
if not project_state_key:
|
|
2032
|
-
return {
|
|
2033
|
-
"event": "project_state_tab_close_response",
|
|
2034
|
-
"project_id": server_project_id,
|
|
2035
|
-
"tab_id": tab_id,
|
|
2036
|
-
"success": False
|
|
2037
|
-
}
|
|
2038
|
-
|
|
2039
|
-
success = await manager.close_tab(project_state_key, tab_id)
|
|
2040
|
-
|
|
2041
|
-
if success:
|
|
2042
|
-
# Send updated state
|
|
2043
|
-
project_state = manager.projects[project_state_key]
|
|
2044
|
-
await manager._send_project_state_update(project_state, server_project_id)
|
|
2045
|
-
|
|
2046
|
-
return {
|
|
2047
|
-
"event": "project_state_tab_close_response",
|
|
2048
|
-
"project_id": server_project_id, # Return the server-side project ID
|
|
2049
|
-
"tab_id": tab_id,
|
|
2050
|
-
"success": success
|
|
2051
|
-
}
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
class ProjectStateSetActiveTabHandler(AsyncHandler):
|
|
2055
|
-
"""Handler for setting active tab in project state."""
|
|
2056
|
-
|
|
2057
|
-
@property
|
|
2058
|
-
def command_name(self) -> str:
|
|
2059
|
-
return "project_state_set_active_tab"
|
|
2060
|
-
|
|
2061
|
-
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
2062
|
-
"""Set active tab in project state."""
|
|
2063
|
-
server_project_id = message.get("project_id") # Server-side UUID (for response)
|
|
2064
|
-
tab_id = message.get("tab_id") # Can be None to clear active tab
|
|
2065
|
-
source_client_session = message.get("source_client_session") # This is our key
|
|
2066
|
-
|
|
2067
|
-
if not server_project_id:
|
|
2068
|
-
raise ValueError("project_id is required")
|
|
2069
|
-
if not source_client_session:
|
|
2070
|
-
raise ValueError("source_client_session is required")
|
|
2071
|
-
|
|
2072
|
-
manager = _get_or_create_project_state_manager(self.context, self.control_channel)
|
|
2073
|
-
|
|
2074
|
-
# Find project state using client session
|
|
2075
|
-
project_state_key = None
|
|
2076
|
-
for key in manager.projects.keys():
|
|
2077
|
-
if key.startswith(source_client_session):
|
|
2078
|
-
project_state_key = key
|
|
2079
|
-
break
|
|
2080
|
-
|
|
2081
|
-
if not project_state_key:
|
|
2082
|
-
return {
|
|
2083
|
-
"event": "project_state_set_active_tab_response",
|
|
2084
|
-
"project_id": server_project_id,
|
|
2085
|
-
"tab_id": tab_id,
|
|
2086
|
-
"success": False
|
|
2087
|
-
}
|
|
2088
|
-
|
|
2089
|
-
success = await manager.set_active_tab(project_state_key, tab_id)
|
|
2090
|
-
|
|
2091
|
-
if success:
|
|
2092
|
-
# Send updated state
|
|
2093
|
-
project_state = manager.projects[project_state_key]
|
|
2094
|
-
await manager._send_project_state_update(project_state, server_project_id)
|
|
2095
|
-
|
|
2096
|
-
return {
|
|
2097
|
-
"event": "project_state_set_active_tab_response",
|
|
2098
|
-
"project_id": server_project_id, # Return the server-side project ID
|
|
2099
|
-
"tab_id": tab_id,
|
|
2100
|
-
"success": success
|
|
2101
|
-
}
|
|
2102
|
-
|
|
2103
|
-
|
|
2104
|
-
class ProjectStateDiffOpenHandler(AsyncHandler):
|
|
2105
|
-
"""Handler for opening diff tabs based on git timeline references."""
|
|
2106
|
-
|
|
2107
|
-
@property
|
|
2108
|
-
def command_name(self) -> str:
|
|
2109
|
-
return "project_state_diff_open"
|
|
2110
|
-
|
|
2111
|
-
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
2112
|
-
"""Open a diff tab comparing file versions at different git timeline points."""
|
|
2113
|
-
server_project_id = message.get("project_id") # Server-side UUID (for response)
|
|
2114
|
-
file_path = message.get("file_path")
|
|
2115
|
-
from_ref = message.get("from_ref") # 'head', 'staged', 'working', 'commit'
|
|
2116
|
-
to_ref = message.get("to_ref") # 'head', 'staged', 'working', 'commit'
|
|
2117
|
-
from_hash = message.get("from_hash") # Optional commit hash for from_ref='commit'
|
|
2118
|
-
to_hash = message.get("to_hash") # Optional commit hash for to_ref='commit'
|
|
2119
|
-
source_client_session = message.get("source_client_session") # This is our key
|
|
2120
|
-
|
|
2121
|
-
if not server_project_id:
|
|
2122
|
-
raise ValueError("project_id is required")
|
|
2123
|
-
if not file_path:
|
|
2124
|
-
raise ValueError("file_path is required")
|
|
2125
|
-
if not from_ref:
|
|
2126
|
-
raise ValueError("from_ref is required")
|
|
2127
|
-
if not to_ref:
|
|
2128
|
-
raise ValueError("to_ref is required")
|
|
2129
|
-
if not source_client_session:
|
|
2130
|
-
raise ValueError("source_client_session is required")
|
|
2131
|
-
|
|
2132
|
-
# Validate reference types
|
|
2133
|
-
valid_refs = {'head', 'staged', 'working', 'commit'}
|
|
2134
|
-
if from_ref not in valid_refs:
|
|
2135
|
-
raise ValueError(f"Invalid from_ref: {from_ref}. Must be one of {valid_refs}")
|
|
2136
|
-
if to_ref not in valid_refs:
|
|
2137
|
-
raise ValueError(f"Invalid to_ref: {to_ref}. Must be one of {valid_refs}")
|
|
2138
|
-
|
|
2139
|
-
# Validate commit hashes are provided when needed
|
|
2140
|
-
if from_ref == 'commit' and not from_hash:
|
|
2141
|
-
raise ValueError("from_hash is required when from_ref='commit'")
|
|
2142
|
-
if to_ref == 'commit' and not to_hash:
|
|
2143
|
-
raise ValueError("to_hash is required when to_ref='commit'")
|
|
2144
|
-
|
|
2145
|
-
manager = _get_or_create_project_state_manager(self.context, self.control_channel)
|
|
2146
|
-
|
|
2147
|
-
# Find project state using client session
|
|
2148
|
-
project_state_key = None
|
|
2149
|
-
for key in manager.projects.keys():
|
|
2150
|
-
if key.startswith(source_client_session):
|
|
2151
|
-
project_state_key = key
|
|
2152
|
-
break
|
|
2153
|
-
|
|
2154
|
-
if not project_state_key:
|
|
2155
|
-
return {
|
|
2156
|
-
"event": "project_state_diff_open_response",
|
|
2157
|
-
"project_id": server_project_id,
|
|
2158
|
-
"file_path": file_path,
|
|
2159
|
-
"from_ref": from_ref,
|
|
2160
|
-
"to_ref": to_ref,
|
|
2161
|
-
"success": False,
|
|
2162
|
-
"error": "Project state not found"
|
|
2163
|
-
}
|
|
2164
|
-
|
|
2165
|
-
success = await manager.open_diff_tab(
|
|
2166
|
-
project_state_key, file_path, from_ref, to_ref, from_hash, to_hash
|
|
2167
|
-
)
|
|
2168
|
-
|
|
2169
|
-
if success:
|
|
2170
|
-
# Send updated state
|
|
2171
|
-
project_state = manager.projects[project_state_key]
|
|
2172
|
-
await manager._send_project_state_update(project_state, server_project_id)
|
|
2173
|
-
|
|
2174
|
-
return {
|
|
2175
|
-
"event": "project_state_diff_open_response",
|
|
2176
|
-
"project_id": server_project_id, # Return the server-side project ID
|
|
2177
|
-
"file_path": file_path,
|
|
2178
|
-
"from_ref": from_ref,
|
|
2179
|
-
"to_ref": to_ref,
|
|
2180
|
-
"from_hash": from_hash,
|
|
2181
|
-
"to_hash": to_hash,
|
|
2182
|
-
"success": success
|
|
2183
|
-
}
|
|
2184
|
-
|
|
2185
|
-
|
|
1
|
+
"""Project state handlers - modular architecture.
|
|
2
|
+
|
|
3
|
+
This module serves as a compatibility layer that imports all the project state
|
|
4
|
+
handlers from the new modular structure. This ensures existing code continues
|
|
5
|
+
to work while providing access to the new architecture.
|
|
6
|
+
|
|
7
|
+
The original monolithic file has been broken down into a modular structure
|
|
8
|
+
located in the project_state/ subdirectory. All functionality, logging, and
|
|
9
|
+
documentation has been preserved while improving maintainability.
|
|
10
|
+
|
|
11
|
+
For detailed information about the new structure, see:
|
|
12
|
+
project_state/README.md
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
# Import everything from the modular structure for backward compatibility
|
|
16
|
+
from .project_state import *
|
|
17
|
+
|
|
18
|
+
# Ensure all handlers are available at module level for existing imports
|
|
19
|
+
from .project_state.handlers import (
|
|
20
|
+
ProjectStateFolderExpandHandler,
|
|
21
|
+
ProjectStateFolderCollapseHandler,
|
|
22
|
+
ProjectStateFileOpenHandler,
|
|
23
|
+
ProjectStateTabCloseHandler,
|
|
24
|
+
ProjectStateSetActiveTabHandler,
|
|
25
|
+
ProjectStateDiffOpenHandler,
|
|
26
|
+
ProjectStateDiffContentHandler,
|
|
27
|
+
ProjectStateGitStageHandler,
|
|
28
|
+
ProjectStateGitUnstageHandler,
|
|
29
|
+
ProjectStateGitRevertHandler,
|
|
30
|
+
ProjectStateGitCommitHandler,
|
|
31
|
+
handle_client_session_cleanup
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
from .project_state.manager import (
|
|
35
|
+
get_or_create_project_state_manager,
|
|
36
|
+
reset_global_project_state_manager,
|
|
37
|
+
debug_global_manager_state
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
from .project_state.utils import generate_tab_key
|
|
41
|
+
|
|
42
|
+
# Re-export with the old private function names for backward compatibility
|
|
43
|
+
_get_or_create_project_state_manager = get_or_create_project_state_manager
|
|
44
|
+
_reset_global_project_state_manager = reset_global_project_state_manager
|
|
45
|
+
_debug_global_manager_state = debug_global_manager_state
|