portacode 0.3.19.dev4__py3-none-any.whl → 1.4.11.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of portacode might be problematic. Click here for more details.
- portacode/_version.py +16 -3
- portacode/cli.py +143 -17
- portacode/connection/client.py +149 -10
- portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +824 -21
- portacode/connection/handlers/__init__.py +28 -1
- portacode/connection/handlers/base.py +78 -16
- portacode/connection/handlers/chunked_content.py +244 -0
- portacode/connection/handlers/diff_handlers.py +603 -0
- portacode/connection/handlers/file_handlers.py +902 -17
- portacode/connection/handlers/project_aware_file_handlers.py +226 -0
- portacode/connection/handlers/project_state/README.md +312 -0
- portacode/connection/handlers/project_state/__init__.py +92 -0
- portacode/connection/handlers/project_state/file_system_watcher.py +179 -0
- portacode/connection/handlers/project_state/git_manager.py +1502 -0
- portacode/connection/handlers/project_state/handlers.py +875 -0
- portacode/connection/handlers/project_state/manager.py +1331 -0
- portacode/connection/handlers/project_state/models.py +108 -0
- portacode/connection/handlers/project_state/utils.py +50 -0
- portacode/connection/handlers/project_state_handlers.py +45 -2185
- portacode/connection/handlers/proxmox_infra.py +361 -0
- portacode/connection/handlers/registry.py +15 -4
- portacode/connection/handlers/session.py +483 -32
- portacode/connection/handlers/system_handlers.py +147 -8
- portacode/connection/handlers/tab_factory.py +53 -46
- portacode/connection/handlers/terminal_handlers.py +21 -8
- portacode/connection/handlers/update_handler.py +61 -0
- portacode/connection/multiplex.py +60 -2
- portacode/connection/terminal.py +214 -24
- portacode/keypair.py +63 -1
- portacode/link_capture/__init__.py +38 -0
- portacode/link_capture/__pycache__/__init__.cpython-311.pyc +0 -0
- portacode/link_capture/bin/__pycache__/link_capture_wrapper.cpython-311.pyc +0 -0
- portacode/link_capture/bin/elinks +3 -0
- portacode/link_capture/bin/gio-open +3 -0
- portacode/link_capture/bin/gnome-open +3 -0
- portacode/link_capture/bin/gvfs-open +3 -0
- portacode/link_capture/bin/kde-open +3 -0
- portacode/link_capture/bin/kfmclient +3 -0
- portacode/link_capture/bin/link_capture_exec.sh +11 -0
- portacode/link_capture/bin/link_capture_wrapper.py +75 -0
- portacode/link_capture/bin/links +3 -0
- portacode/link_capture/bin/links2 +3 -0
- portacode/link_capture/bin/lynx +3 -0
- portacode/link_capture/bin/mate-open +3 -0
- portacode/link_capture/bin/netsurf +3 -0
- portacode/link_capture/bin/sensible-browser +3 -0
- portacode/link_capture/bin/w3m +3 -0
- portacode/link_capture/bin/x-www-browser +3 -0
- portacode/link_capture/bin/xdg-open +3 -0
- portacode/logging_categories.py +140 -0
- portacode/pairing.py +103 -0
- portacode/static/js/test-ntp-clock.html +63 -0
- portacode/static/js/utils/ntp-clock.js +232 -0
- portacode/utils/NTP_ARCHITECTURE.md +136 -0
- portacode/utils/__init__.py +1 -0
- portacode/utils/diff_apply.py +456 -0
- portacode/utils/diff_renderer.py +371 -0
- portacode/utils/ntp_clock.py +65 -0
- portacode-1.4.11.dev1.dist-info/METADATA +298 -0
- portacode-1.4.11.dev1.dist-info/RECORD +97 -0
- {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info}/WHEEL +1 -1
- portacode-1.4.11.dev1.dist-info/top_level.txt +3 -0
- test_modules/README.md +296 -0
- test_modules/__init__.py +1 -0
- test_modules/test_device_online.py +44 -0
- test_modules/test_file_operations.py +743 -0
- test_modules/test_git_status_ui.py +370 -0
- test_modules/test_login_flow.py +50 -0
- test_modules/test_navigate_testing_folder.py +361 -0
- test_modules/test_play_store_screenshots.py +294 -0
- test_modules/test_terminal_buffer_performance.py +261 -0
- test_modules/test_terminal_interaction.py +80 -0
- test_modules/test_terminal_loading_race_condition.py +95 -0
- test_modules/test_terminal_start.py +56 -0
- testing_framework/.env.example +21 -0
- testing_framework/README.md +334 -0
- testing_framework/__init__.py +17 -0
- testing_framework/cli.py +326 -0
- testing_framework/core/__init__.py +1 -0
- testing_framework/core/base_test.py +336 -0
- testing_framework/core/cli_manager.py +177 -0
- testing_framework/core/hierarchical_runner.py +577 -0
- testing_framework/core/playwright_manager.py +520 -0
- testing_framework/core/runner.py +447 -0
- testing_framework/core/shared_cli_manager.py +234 -0
- testing_framework/core/test_discovery.py +112 -0
- testing_framework/requirements.txt +12 -0
- portacode-0.3.19.dev4.dist-info/METADATA +0 -241
- portacode-0.3.19.dev4.dist-info/RECORD +0 -30
- portacode-0.3.19.dev4.dist-info/top_level.txt +0 -1
- {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info}/entry_points.txt +0 -0
- {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,1331 @@
|
|
|
1
|
+
"""Main project state manager that orchestrates all project state operations.
|
|
2
|
+
|
|
3
|
+
This module contains the ProjectStateManager class which is the central coordinator
|
|
4
|
+
for all project state operations, including file system monitoring, git operations,
|
|
5
|
+
tab management, and state synchronization.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
import os
|
|
12
|
+
import threading
|
|
13
|
+
import time
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from asyncio import Lock
|
|
16
|
+
from dataclasses import asdict
|
|
17
|
+
from typing import Any, Dict, List, Optional, Set
|
|
18
|
+
|
|
19
|
+
from .models import ProjectState, MonitoredFolder, FileItem, TabInfo
|
|
20
|
+
from .git_manager import GitManager
|
|
21
|
+
from .file_system_watcher import FileSystemWatcher
|
|
22
|
+
from ....logging_categories import get_categorized_logger, LogCategory
|
|
23
|
+
|
|
24
|
+
logger = get_categorized_logger(__name__)
|
|
25
|
+
|
|
26
|
+
# Global singleton instance
|
|
27
|
+
_global_project_state_manager: Optional['ProjectStateManager'] = None
|
|
28
|
+
_manager_lock = threading.Lock()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ProjectStateManager:
|
|
32
|
+
"""Manages project state for client sessions."""
|
|
33
|
+
|
|
34
|
+
def __init__(self, control_channel, context: Dict[str, Any]):
|
|
35
|
+
self.control_channel = control_channel
|
|
36
|
+
self.context = context
|
|
37
|
+
self.projects: Dict[str, ProjectState] = {}
|
|
38
|
+
self.git_managers: Dict[str, GitManager] = {}
|
|
39
|
+
self._shared_git_managers: Dict[str, Dict[str, Any]] = {}
|
|
40
|
+
self._shared_git_lock: Lock = Lock()
|
|
41
|
+
self.file_watcher = FileSystemWatcher(self)
|
|
42
|
+
self.debug_mode = False
|
|
43
|
+
self.debug_file_path: Optional[str] = None
|
|
44
|
+
self._session_locks: Dict[str, Lock] = {}
|
|
45
|
+
|
|
46
|
+
# Content caching optimization
|
|
47
|
+
self.use_content_caching = context.get("use_content_caching", False)
|
|
48
|
+
|
|
49
|
+
# Debouncing for file changes
|
|
50
|
+
self._change_debounce_timer: Optional[asyncio.Task] = None
|
|
51
|
+
self._pending_changes: Set[str] = set()
|
|
52
|
+
self._pending_change_sources: Dict[str, Dict[str, Any]] = {}
|
|
53
|
+
|
|
54
|
+
def set_debug_mode(self, enabled: bool, debug_file_path: Optional[str] = None):
|
|
55
|
+
"""Enable or disable debug mode with JSON output."""
|
|
56
|
+
self.debug_mode = enabled
|
|
57
|
+
self.debug_file_path = debug_file_path
|
|
58
|
+
if enabled:
|
|
59
|
+
logger.info("Project state debug mode enabled, output to: %s", debug_file_path)
|
|
60
|
+
|
|
61
|
+
def _get_session_lock(self, client_session_id: str) -> Lock:
|
|
62
|
+
"""Get or create an asyncio lock for a client session."""
|
|
63
|
+
lock = self._session_locks.get(client_session_id)
|
|
64
|
+
if not lock:
|
|
65
|
+
lock = Lock()
|
|
66
|
+
self._session_locks[client_session_id] = lock
|
|
67
|
+
return lock
|
|
68
|
+
|
|
69
|
+
def _write_debug_state(self):
|
|
70
|
+
"""Write current state to debug JSON file (thread-safe)."""
|
|
71
|
+
if not self.debug_mode or not self.debug_file_path:
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
# Use a lock to prevent multiple instances from writing simultaneously
|
|
75
|
+
with _manager_lock:
|
|
76
|
+
try:
|
|
77
|
+
debug_data = {
|
|
78
|
+
"_instance_info": {
|
|
79
|
+
"pid": os.getpid(),
|
|
80
|
+
"timestamp": time.time(),
|
|
81
|
+
"project_count": len(self.projects)
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
for project_id, state in self.projects.items():
|
|
86
|
+
debug_data[project_id] = {
|
|
87
|
+
"project_folder_path": state.project_folder_path,
|
|
88
|
+
"is_git_repo": state.is_git_repo,
|
|
89
|
+
"git_branch": state.git_branch,
|
|
90
|
+
"git_status_summary": state.git_status_summary,
|
|
91
|
+
"git_detailed_status": asdict(state.git_detailed_status) if state.git_detailed_status and hasattr(state.git_detailed_status, '__dataclass_fields__') else None,
|
|
92
|
+
"open_tabs": [self._serialize_tab_info(tab) for tab in state.open_tabs.values()],
|
|
93
|
+
"active_tab": self._serialize_tab_info(state.active_tab) if state.active_tab else None,
|
|
94
|
+
"monitored_folders": [asdict(mf) if hasattr(mf, '__dataclass_fields__') else {} for mf in state.monitored_folders],
|
|
95
|
+
"items": [self._serialize_file_item(item) for item in state.items]
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
# Write atomically by writing to temp file first, then renaming
|
|
99
|
+
temp_file_path = self.debug_file_path + ".tmp"
|
|
100
|
+
with open(temp_file_path, 'w', encoding='utf-8') as f:
|
|
101
|
+
json.dump(debug_data, f, indent=2, default=str)
|
|
102
|
+
|
|
103
|
+
# Atomic rename
|
|
104
|
+
os.rename(temp_file_path, self.debug_file_path)
|
|
105
|
+
|
|
106
|
+
# Only log debug info occasionally to avoid spam
|
|
107
|
+
if len(debug_data) > 1: # >1 because we always have _instance_info
|
|
108
|
+
logger.debug("Debug state updated: %d projects (PID: %s)", len(debug_data) - 1, os.getpid())
|
|
109
|
+
|
|
110
|
+
except Exception as e:
|
|
111
|
+
logger.error("Error writing debug state: %s", e)
|
|
112
|
+
|
|
113
|
+
def _serialize_file_item(self, item: FileItem) -> Dict[str, Any]:
|
|
114
|
+
"""Serialize FileItem for JSON output."""
|
|
115
|
+
result = asdict(item) if hasattr(item, '__dataclass_fields__') else {}
|
|
116
|
+
if item.children:
|
|
117
|
+
result["children"] = [self._serialize_file_item(child) for child in item.children]
|
|
118
|
+
return result
|
|
119
|
+
|
|
120
|
+
def _serialize_tab_info(self, tab: TabInfo) -> Dict[str, Any]:
|
|
121
|
+
"""Serialize TabInfo for JSON output."""
|
|
122
|
+
if not hasattr(tab, '__dataclass_fields__'):
|
|
123
|
+
return {}
|
|
124
|
+
|
|
125
|
+
tab_dict = asdict(tab)
|
|
126
|
+
|
|
127
|
+
# If content caching is enabled, exclude content fields to reduce payload size
|
|
128
|
+
if self.use_content_caching:
|
|
129
|
+
# Only include hashes, not the actual content
|
|
130
|
+
tab_dict.pop('content', None)
|
|
131
|
+
tab_dict.pop('original_content', None)
|
|
132
|
+
tab_dict.pop('modified_content', None)
|
|
133
|
+
# Keep the hashes for client-side cache lookup
|
|
134
|
+
# content_hash, original_content_hash, modified_content_hash remain
|
|
135
|
+
|
|
136
|
+
# Also exclude large metadata for diff tabs
|
|
137
|
+
if tab_dict.get('metadata'):
|
|
138
|
+
metadata = tab_dict['metadata']
|
|
139
|
+
# Remove massive HTML diff content that can be megabytes
|
|
140
|
+
metadata.pop('html_diff_versions', None)
|
|
141
|
+
metadata.pop('diff_details', None)
|
|
142
|
+
|
|
143
|
+
return tab_dict
|
|
144
|
+
|
|
145
|
+
async def _handle_shared_git_change(self, project_folder_path: str):
|
|
146
|
+
"""Notify all sessions for a project when the shared git manager detects changes."""
|
|
147
|
+
async with self._shared_git_lock:
|
|
148
|
+
entry = self._shared_git_managers.get(project_folder_path)
|
|
149
|
+
target_sessions = list(entry["sessions"]) if entry else []
|
|
150
|
+
|
|
151
|
+
for session_id in target_sessions:
|
|
152
|
+
await self._refresh_project_state(session_id, git_only=True, reason="git_monitor")
|
|
153
|
+
|
|
154
|
+
async def _acquire_shared_git_manager(self, project_folder_path: str, client_session_id: str) -> GitManager:
|
|
155
|
+
"""Get or create a shared git manager for a project path."""
|
|
156
|
+
async with self._shared_git_lock:
|
|
157
|
+
entry = self._shared_git_managers.get(project_folder_path)
|
|
158
|
+
|
|
159
|
+
if not entry:
|
|
160
|
+
async def git_change_callback():
|
|
161
|
+
await self._handle_shared_git_change(project_folder_path)
|
|
162
|
+
|
|
163
|
+
git_manager = GitManager(
|
|
164
|
+
project_folder_path,
|
|
165
|
+
change_callback=git_change_callback,
|
|
166
|
+
owner_session_id=client_session_id,
|
|
167
|
+
)
|
|
168
|
+
entry = {"manager": git_manager, "sessions": set()}
|
|
169
|
+
self._shared_git_managers[project_folder_path] = entry
|
|
170
|
+
|
|
171
|
+
entry["sessions"].add(client_session_id)
|
|
172
|
+
self.git_managers[client_session_id] = entry["manager"]
|
|
173
|
+
return entry["manager"]
|
|
174
|
+
|
|
175
|
+
async def _release_shared_git_manager(self, project_folder_path: Optional[str], client_session_id: str):
|
|
176
|
+
"""Release a session's reference to a shared git manager and clean it up if unused."""
|
|
177
|
+
manager_to_cleanup: Optional[GitManager] = None
|
|
178
|
+
|
|
179
|
+
async with self._shared_git_lock:
|
|
180
|
+
manager = self.git_managers.pop(client_session_id, None)
|
|
181
|
+
|
|
182
|
+
if not project_folder_path:
|
|
183
|
+
manager_to_cleanup = manager
|
|
184
|
+
else:
|
|
185
|
+
entry = self._shared_git_managers.get(project_folder_path)
|
|
186
|
+
if entry:
|
|
187
|
+
entry["sessions"].discard(client_session_id)
|
|
188
|
+
if not entry["sessions"]:
|
|
189
|
+
self._shared_git_managers.pop(project_folder_path, None)
|
|
190
|
+
manager_to_cleanup = manager or entry["manager"]
|
|
191
|
+
else:
|
|
192
|
+
manager_to_cleanup = manager
|
|
193
|
+
|
|
194
|
+
if manager_to_cleanup:
|
|
195
|
+
manager_to_cleanup.cleanup()
|
|
196
|
+
|
|
197
|
+
async def initialize_project_state(self, client_session_id: str, project_folder_path: str) -> ProjectState:
|
|
198
|
+
"""Initialize project state for a client session."""
|
|
199
|
+
lock = self._get_session_lock(client_session_id)
|
|
200
|
+
async with lock:
|
|
201
|
+
existing_project = self.projects.get(client_session_id)
|
|
202
|
+
if existing_project:
|
|
203
|
+
if existing_project.project_folder_path == project_folder_path:
|
|
204
|
+
logger.info("Returning existing project state for client session: %s", client_session_id)
|
|
205
|
+
return existing_project
|
|
206
|
+
logger.info("Client session %s switching projects from %s to %s",
|
|
207
|
+
client_session_id, existing_project.project_folder_path, project_folder_path)
|
|
208
|
+
previous_path = self._cleanup_project_locked(client_session_id)
|
|
209
|
+
await self._release_shared_git_manager(previous_path, client_session_id)
|
|
210
|
+
|
|
211
|
+
logger.info("Initializing project state for client session: %s, folder: %s", client_session_id, project_folder_path)
|
|
212
|
+
git_manager = await self._acquire_shared_git_manager(project_folder_path, client_session_id)
|
|
213
|
+
|
|
214
|
+
loop = asyncio.get_event_loop()
|
|
215
|
+
is_git_repo = git_manager.is_git_repo
|
|
216
|
+
git_branch = await loop.run_in_executor(None, git_manager.get_branch_name)
|
|
217
|
+
git_status_summary = await loop.run_in_executor(None, git_manager.get_status_summary)
|
|
218
|
+
git_detailed_status = await loop.run_in_executor(None, git_manager.get_detailed_status)
|
|
219
|
+
|
|
220
|
+
project_state = ProjectState(
|
|
221
|
+
client_session_id=client_session_id,
|
|
222
|
+
project_folder_path=project_folder_path,
|
|
223
|
+
items=[],
|
|
224
|
+
is_git_repo=is_git_repo,
|
|
225
|
+
git_branch=git_branch,
|
|
226
|
+
git_status_summary=git_status_summary,
|
|
227
|
+
git_detailed_status=git_detailed_status,
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
await self._initialize_monitored_folders(project_state)
|
|
231
|
+
await self._sync_all_state_with_monitored_folders(project_state)
|
|
232
|
+
|
|
233
|
+
self.projects[client_session_id] = project_state
|
|
234
|
+
self._write_debug_state()
|
|
235
|
+
|
|
236
|
+
return project_state
|
|
237
|
+
|
|
238
|
+
def get_diagnostics(self) -> Dict[str, Any]:
|
|
239
|
+
"""Return aggregate stats for health monitoring."""
|
|
240
|
+
git_diag = {}
|
|
241
|
+
for session_id, git_manager in self.git_managers.items():
|
|
242
|
+
git_diag[session_id] = git_manager.get_diagnostics()
|
|
243
|
+
|
|
244
|
+
watcher_diag = self.file_watcher.get_diagnostics() if self.file_watcher else {}
|
|
245
|
+
|
|
246
|
+
return {
|
|
247
|
+
"projects": len(self.projects),
|
|
248
|
+
"git_managers": len(self.git_managers),
|
|
249
|
+
"pending_file_changes": len(self._pending_changes),
|
|
250
|
+
"watcher": watcher_diag,
|
|
251
|
+
"git_sessions": git_diag,
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
async def _initialize_monitored_folders(self, project_state: ProjectState):
|
|
255
|
+
"""Initialize monitored folders with project root (expanded) and its immediate subdirectories (collapsed)."""
|
|
256
|
+
# Add project root as expanded
|
|
257
|
+
project_state.monitored_folders.append(
|
|
258
|
+
MonitoredFolder(folder_path=project_state.project_folder_path, is_expanded=True)
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
# Scan project root for immediate subdirectories and add them as collapsed
|
|
262
|
+
try:
|
|
263
|
+
with os.scandir(project_state.project_folder_path) as entries:
|
|
264
|
+
for entry in entries:
|
|
265
|
+
if entry.is_dir() and entry.name != '.git': # Only exclude .git, allow other dot folders
|
|
266
|
+
project_state.monitored_folders.append(
|
|
267
|
+
MonitoredFolder(folder_path=entry.path, is_expanded=False)
|
|
268
|
+
)
|
|
269
|
+
except (OSError, PermissionError) as e:
|
|
270
|
+
logger.error("Error scanning project root for subdirectories: %s", e)
|
|
271
|
+
|
|
272
|
+
async def _start_watching_monitored_folders(self, project_state: ProjectState):
|
|
273
|
+
"""Start watching all monitored folders."""
|
|
274
|
+
for monitored_folder in project_state.monitored_folders:
|
|
275
|
+
self.file_watcher.start_watching(monitored_folder.folder_path)
|
|
276
|
+
|
|
277
|
+
async def _sync_watchdog_with_monitored_folders(self, project_state: ProjectState):
|
|
278
|
+
"""Ensure watchdog is monitoring each monitored folder individually (non-recursive)."""
|
|
279
|
+
# Watch each monitored folder individually to align with the monitored_folders structure
|
|
280
|
+
for monitored_folder in project_state.monitored_folders:
|
|
281
|
+
self.file_watcher.start_watching(monitored_folder.folder_path)
|
|
282
|
+
|
|
283
|
+
# Intentionally avoid watching .git; Git status changes are polled separately
|
|
284
|
+
if project_state.is_git_repo:
|
|
285
|
+
git_dir_path = os.path.join(project_state.project_folder_path, '.git')
|
|
286
|
+
logger.debug("🔍 [TRACE] Project is git repo, but skipping .git watcher registration: %s", LogCategory.GIT, git_dir_path)
|
|
287
|
+
else:
|
|
288
|
+
logger.debug("🔍 [TRACE] Project is NOT a git repo, no .git watcher needed", LogCategory.GIT)
|
|
289
|
+
|
|
290
|
+
# Watchdog synchronized
|
|
291
|
+
|
|
292
|
+
async def _sync_all_state_with_monitored_folders(self, project_state: ProjectState):
|
|
293
|
+
"""Synchronize all dependent state (watchdog, items) with monitored_folders changes."""
|
|
294
|
+
# Syncing state with monitored folders
|
|
295
|
+
|
|
296
|
+
# Sync watchdog monitoring
|
|
297
|
+
logger.debug("Syncing watchdog monitoring")
|
|
298
|
+
await self._sync_watchdog_with_monitored_folders(project_state)
|
|
299
|
+
|
|
300
|
+
# Rebuild items structure from all monitored folders
|
|
301
|
+
logger.debug("Rebuilding items structure")
|
|
302
|
+
await self._build_flattened_items_structure(project_state)
|
|
303
|
+
# Items rebuilt
|
|
304
|
+
|
|
305
|
+
# Update debug state less frequently
|
|
306
|
+
self._write_debug_state()
|
|
307
|
+
logger.debug("_sync_all_state_with_monitored_folders completed")
|
|
308
|
+
|
|
309
|
+
async def _add_subdirectories_to_monitored(self, project_state: ProjectState, parent_folder_path: str):
|
|
310
|
+
"""Add all subdirectories of a folder to monitored_folders if not already present, and remove deleted ones."""
|
|
311
|
+
# logger.info("_add_subdirectories_to_monitored called for: %s", parent_folder_path)
|
|
312
|
+
try:
|
|
313
|
+
existing_paths = {mf.folder_path for mf in project_state.monitored_folders}
|
|
314
|
+
# logger.info("Existing monitored paths: %s", existing_paths)
|
|
315
|
+
added_any = False
|
|
316
|
+
removed_any = False
|
|
317
|
+
|
|
318
|
+
# First, clean up any monitored folders that no longer exist
|
|
319
|
+
to_remove = []
|
|
320
|
+
for monitored_folder in project_state.monitored_folders:
|
|
321
|
+
# Don't remove the root project folder, only subdirectories
|
|
322
|
+
if monitored_folder.folder_path != project_state.project_folder_path:
|
|
323
|
+
if not os.path.exists(monitored_folder.folder_path):
|
|
324
|
+
logger.info("Removing deleted monitored folder: %s", monitored_folder.folder_path)
|
|
325
|
+
to_remove.append(monitored_folder)
|
|
326
|
+
removed_any = True
|
|
327
|
+
|
|
328
|
+
for folder_to_remove in to_remove:
|
|
329
|
+
project_state.monitored_folders.remove(folder_to_remove)
|
|
330
|
+
|
|
331
|
+
# Then, add new subdirectories
|
|
332
|
+
with os.scandir(parent_folder_path) as entries:
|
|
333
|
+
for entry in entries:
|
|
334
|
+
if entry.is_dir() and entry.name != '.git': # Only exclude .git, allow other dot folders
|
|
335
|
+
# logger.info("Found subdirectory: %s", entry.path)
|
|
336
|
+
if entry.path not in existing_paths:
|
|
337
|
+
logger.info("Adding new monitored folder: %s", entry.path)
|
|
338
|
+
new_monitored = MonitoredFolder(folder_path=entry.path, is_expanded=False)
|
|
339
|
+
project_state.monitored_folders.append(new_monitored)
|
|
340
|
+
added_any = True
|
|
341
|
+
else:
|
|
342
|
+
# logger.info("Subdirectory already monitored: %s", entry.path)
|
|
343
|
+
pass
|
|
344
|
+
|
|
345
|
+
# logger.info("Added any new folders: %s, Removed any deleted folders: %s", added_any, removed_any)
|
|
346
|
+
# Note: sync will be handled by the caller, no need to sync here
|
|
347
|
+
|
|
348
|
+
except (OSError, PermissionError) as e:
|
|
349
|
+
logger.error("Error scanning folder %s for subdirectories: %s", parent_folder_path, e)
|
|
350
|
+
|
|
351
|
+
def _find_monitored_folder(self, project_state: ProjectState, folder_path: str) -> Optional[MonitoredFolder]:
|
|
352
|
+
"""Find a monitored folder by path."""
|
|
353
|
+
for monitored_folder in project_state.monitored_folders:
|
|
354
|
+
if monitored_folder.folder_path == folder_path:
|
|
355
|
+
return monitored_folder
|
|
356
|
+
return None
|
|
357
|
+
|
|
358
|
+
async def _load_directory_items(self, project_state: ProjectState, directory_path: str, is_root: bool = False, parent_item: Optional[FileItem] = None):
|
|
359
|
+
"""Load directory items with Git metadata."""
|
|
360
|
+
git_manager = self.git_managers.get(project_state.client_session_id)
|
|
361
|
+
|
|
362
|
+
try:
|
|
363
|
+
items = []
|
|
364
|
+
|
|
365
|
+
# Use os.scandir for better performance
|
|
366
|
+
with os.scandir(directory_path) as entries:
|
|
367
|
+
for entry in entries:
|
|
368
|
+
try:
|
|
369
|
+
# Skip .git metadata regardless of whether it's a dir or file (worktrees create files)
|
|
370
|
+
if entry.name == '.git':
|
|
371
|
+
continue
|
|
372
|
+
|
|
373
|
+
stat_info = entry.stat()
|
|
374
|
+
is_hidden = entry.name.startswith('.')
|
|
375
|
+
|
|
376
|
+
# Get Git status if available
|
|
377
|
+
git_info = {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
|
|
378
|
+
if git_manager:
|
|
379
|
+
git_info = git_manager.get_file_status(entry.path)
|
|
380
|
+
|
|
381
|
+
# Check if this directory is expanded and loaded
|
|
382
|
+
is_expanded = False
|
|
383
|
+
is_loaded = True # Files are always loaded; for directories, will be set based on monitored_folders
|
|
384
|
+
|
|
385
|
+
file_item = FileItem(
|
|
386
|
+
name=entry.name,
|
|
387
|
+
path=entry.path,
|
|
388
|
+
is_directory=entry.is_dir(),
|
|
389
|
+
parent_path=directory_path,
|
|
390
|
+
size=stat_info.st_size if entry.is_file() else None,
|
|
391
|
+
modified_time=stat_info.st_mtime,
|
|
392
|
+
is_git_tracked=git_info["is_tracked"],
|
|
393
|
+
git_status=git_info["status"],
|
|
394
|
+
is_staged=git_info["is_staged"],
|
|
395
|
+
is_hidden=is_hidden,
|
|
396
|
+
is_ignored=git_info["is_ignored"],
|
|
397
|
+
is_expanded=is_expanded,
|
|
398
|
+
is_loaded=is_loaded
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
items.append(file_item)
|
|
402
|
+
|
|
403
|
+
except (OSError, PermissionError) as e:
|
|
404
|
+
logger.debug("Error reading entry %s: %s", entry.path, e)
|
|
405
|
+
continue
|
|
406
|
+
|
|
407
|
+
# Sort items: directories first, then files, both alphabetically
|
|
408
|
+
items.sort(key=lambda x: (not x.is_directory, x.name.lower()))
|
|
409
|
+
|
|
410
|
+
if is_root:
|
|
411
|
+
project_state.items = items
|
|
412
|
+
elif parent_item:
|
|
413
|
+
parent_item.children = items
|
|
414
|
+
# Don't set is_loaded here - it's set in _build_flattened_items_structure based on monitored_folders
|
|
415
|
+
|
|
416
|
+
except (OSError, PermissionError) as e:
|
|
417
|
+
logger.error("Error loading directory %s: %s", directory_path, e)
|
|
418
|
+
|
|
419
|
+
async def _build_flattened_items_structure(self, project_state: ProjectState):
|
|
420
|
+
"""Build a flattened items structure including ALL items from ALL monitored folders."""
|
|
421
|
+
import time
|
|
422
|
+
func_start = time.time()
|
|
423
|
+
|
|
424
|
+
all_items = []
|
|
425
|
+
|
|
426
|
+
# Create sets for quick lookup
|
|
427
|
+
expanded_paths = {mf.folder_path for mf in project_state.monitored_folders if mf.is_expanded}
|
|
428
|
+
monitored_paths = {mf.folder_path for mf in project_state.monitored_folders}
|
|
429
|
+
|
|
430
|
+
# OPTIMIZATION: Collect all file paths first, then batch git operations
|
|
431
|
+
batch_git_start = time.time()
|
|
432
|
+
all_file_paths = []
|
|
433
|
+
folder_to_paths = {} # monitored_folder_path -> list of child paths
|
|
434
|
+
|
|
435
|
+
# First pass: scan all directories to collect file paths
|
|
436
|
+
for monitored_folder in project_state.monitored_folders:
|
|
437
|
+
try:
|
|
438
|
+
child_paths = []
|
|
439
|
+
with os.scandir(monitored_folder.folder_path) as entries:
|
|
440
|
+
for entry in entries:
|
|
441
|
+
if entry.name == '.git':
|
|
442
|
+
continue
|
|
443
|
+
child_paths.append(entry.path)
|
|
444
|
+
all_file_paths.append(entry.path)
|
|
445
|
+
folder_to_paths[monitored_folder.folder_path] = child_paths
|
|
446
|
+
except (OSError, PermissionError) as e:
|
|
447
|
+
logger.error("Error scanning folder %s: %s", monitored_folder.folder_path, e)
|
|
448
|
+
folder_to_paths[monitored_folder.folder_path] = []
|
|
449
|
+
|
|
450
|
+
# BATCH GIT OPERATION: Get status for ALL files at once
|
|
451
|
+
git_manager = self.git_managers.get(project_state.client_session_id)
|
|
452
|
+
git_status_map = {}
|
|
453
|
+
if git_manager and all_file_paths:
|
|
454
|
+
loop = asyncio.get_event_loop()
|
|
455
|
+
git_status_map = await loop.run_in_executor(
|
|
456
|
+
None,
|
|
457
|
+
git_manager.get_file_status_batch,
|
|
458
|
+
all_file_paths
|
|
459
|
+
)
|
|
460
|
+
|
|
461
|
+
batch_git_duration = time.time() - batch_git_start
|
|
462
|
+
logger.info("⏱️ Batch git operations for %d files took %.4f seconds", len(all_file_paths), batch_git_duration)
|
|
463
|
+
|
|
464
|
+
# Second pass: load items using pre-fetched git status
|
|
465
|
+
load_items_start = time.time()
|
|
466
|
+
loop = asyncio.get_event_loop()
|
|
467
|
+
for monitored_folder in project_state.monitored_folders:
|
|
468
|
+
# Load direct children of this monitored folder (run in executor to avoid blocking)
|
|
469
|
+
children = await loop.run_in_executor(
|
|
470
|
+
None,
|
|
471
|
+
self._load_directory_items_list_sync,
|
|
472
|
+
monitored_folder.folder_path,
|
|
473
|
+
monitored_folder.folder_path,
|
|
474
|
+
git_status_map # Pass pre-fetched git status
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
# Set correct expansion and loading states for each child
|
|
478
|
+
for child in children:
|
|
479
|
+
if child.is_directory:
|
|
480
|
+
# Set is_expanded based on expanded_paths
|
|
481
|
+
child.is_expanded = child.path in expanded_paths
|
|
482
|
+
# Set is_loaded based on monitored_paths (content loaded = in monitored folders)
|
|
483
|
+
child.is_loaded = child.path in monitored_paths
|
|
484
|
+
else:
|
|
485
|
+
# Files are always loaded
|
|
486
|
+
child.is_loaded = True
|
|
487
|
+
all_items.append(child)
|
|
488
|
+
|
|
489
|
+
load_items_duration = time.time() - load_items_start
|
|
490
|
+
logger.info("⏱️ Loading items took %.4f seconds", load_items_duration)
|
|
491
|
+
|
|
492
|
+
# Remove duplicates (items might be loaded multiple times due to nested monitoring)
|
|
493
|
+
dedup_start = time.time()
|
|
494
|
+
items_dict = {}
|
|
495
|
+
for item in all_items:
|
|
496
|
+
items_dict[item.path] = item
|
|
497
|
+
|
|
498
|
+
dedup_duration = time.time() - dedup_start
|
|
499
|
+
logger.info("⏱️ Deduplication took %.4f seconds", dedup_duration)
|
|
500
|
+
|
|
501
|
+
# Convert back to list and sort for consistent ordering
|
|
502
|
+
sort_start = time.time()
|
|
503
|
+
project_state.items = list(items_dict.values())
|
|
504
|
+
project_state.items.sort(key=lambda x: (x.parent_path, not x.is_directory, x.name.lower()))
|
|
505
|
+
sort_duration = time.time() - sort_start
|
|
506
|
+
logger.info("⏱️ Sorting took %.4f seconds", sort_duration)
|
|
507
|
+
|
|
508
|
+
func_duration = time.time() - func_start
|
|
509
|
+
logger.info("⏱️ _build_flattened_items_structure TOTAL: %.4f seconds (batch_git=%.4f, load=%.4f)",
|
|
510
|
+
func_duration, batch_git_duration, load_items_duration)
|
|
511
|
+
|
|
512
|
+
def _load_directory_items_list_sync(self, directory_path: str, parent_path: str,
|
|
513
|
+
git_status_map: Dict[str, Dict[str, Any]] = None) -> List[FileItem]:
|
|
514
|
+
"""Load directory items and return as a list with parent_path (synchronous version for executor).
|
|
515
|
+
|
|
516
|
+
Args:
|
|
517
|
+
directory_path: Directory to scan
|
|
518
|
+
parent_path: Parent path for items
|
|
519
|
+
git_status_map: Optional pre-fetched git status map (path -> status_dict)
|
|
520
|
+
"""
|
|
521
|
+
items = []
|
|
522
|
+
|
|
523
|
+
try:
|
|
524
|
+
with os.scandir(directory_path) as entries:
|
|
525
|
+
for entry in entries:
|
|
526
|
+
try:
|
|
527
|
+
# Skip .git metadata regardless of whether it's a dir or file (worktrees create files)
|
|
528
|
+
if entry.name == '.git':
|
|
529
|
+
continue
|
|
530
|
+
|
|
531
|
+
stat_info = entry.stat()
|
|
532
|
+
is_hidden = entry.name.startswith('.')
|
|
533
|
+
|
|
534
|
+
# Get Git status from pre-fetched map or use default
|
|
535
|
+
git_info = {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
|
|
536
|
+
if git_status_map and entry.path in git_status_map:
|
|
537
|
+
git_info = git_status_map[entry.path]
|
|
538
|
+
|
|
539
|
+
file_item = FileItem(
|
|
540
|
+
name=entry.name,
|
|
541
|
+
path=entry.path,
|
|
542
|
+
is_directory=entry.is_dir(),
|
|
543
|
+
parent_path=parent_path,
|
|
544
|
+
size=stat_info.st_size if entry.is_file() else None,
|
|
545
|
+
modified_time=stat_info.st_mtime,
|
|
546
|
+
is_git_tracked=git_info["is_tracked"],
|
|
547
|
+
git_status=git_info["status"],
|
|
548
|
+
is_staged=git_info["is_staged"],
|
|
549
|
+
is_hidden=is_hidden,
|
|
550
|
+
is_ignored=git_info["is_ignored"],
|
|
551
|
+
is_expanded=False,
|
|
552
|
+
is_loaded=True # Will be set correctly in _build_flattened_items_structure
|
|
553
|
+
)
|
|
554
|
+
|
|
555
|
+
items.append(file_item)
|
|
556
|
+
|
|
557
|
+
except (OSError, PermissionError) as e:
|
|
558
|
+
logger.debug("Error reading entry %s: %s", entry.path, e)
|
|
559
|
+
continue
|
|
560
|
+
|
|
561
|
+
# Sort items: directories first, then files, both alphabetically
|
|
562
|
+
items.sort(key=lambda x: (not x.is_directory, x.name.lower()))
|
|
563
|
+
|
|
564
|
+
except (OSError, PermissionError) as e:
|
|
565
|
+
logger.error("Error loading directory %s: %s", directory_path, e)
|
|
566
|
+
|
|
567
|
+
return items
|
|
568
|
+
|
|
569
|
+
async def expand_folder(self, client_session_id: str, folder_path: str) -> bool:
|
|
570
|
+
"""Expand a folder and load its contents."""
|
|
571
|
+
logger.info("expand_folder called: client_session_id=%s, folder_path=%s", client_session_id, folder_path)
|
|
572
|
+
|
|
573
|
+
if client_session_id not in self.projects:
|
|
574
|
+
logger.error("Project state not found for client session: %s", client_session_id)
|
|
575
|
+
return False
|
|
576
|
+
|
|
577
|
+
project_state = self.projects[client_session_id]
|
|
578
|
+
logger.info("Found project state. Current monitored_folders count: %d", len(project_state.monitored_folders))
|
|
579
|
+
|
|
580
|
+
# Debug: log all monitored folders
|
|
581
|
+
for i, mf in enumerate(project_state.monitored_folders):
|
|
582
|
+
logger.info("Monitored folder %d: path=%s, is_expanded=%s", i, mf.folder_path, mf.is_expanded)
|
|
583
|
+
|
|
584
|
+
# Update the monitored folder to expanded state
|
|
585
|
+
monitored_folder = self._find_monitored_folder(project_state, folder_path)
|
|
586
|
+
if not monitored_folder:
|
|
587
|
+
logger.error("Monitored folder not found for path: %s", folder_path)
|
|
588
|
+
return False
|
|
589
|
+
|
|
590
|
+
logger.info("Found monitored folder: %s, current is_expanded: %s", monitored_folder.folder_path, monitored_folder.is_expanded)
|
|
591
|
+
monitored_folder.is_expanded = True
|
|
592
|
+
logger.info("Set monitored folder to expanded: %s", monitored_folder.is_expanded)
|
|
593
|
+
|
|
594
|
+
# Add all subdirectories of the expanded folder to monitored folders
|
|
595
|
+
logger.info("Adding subdirectories to monitored for: %s", folder_path)
|
|
596
|
+
await self._add_subdirectories_to_monitored(project_state, folder_path)
|
|
597
|
+
|
|
598
|
+
# Sync all dependent state (this will update items and watchdog)
|
|
599
|
+
logger.info("Syncing all state with monitored folders")
|
|
600
|
+
await self._sync_all_state_with_monitored_folders(project_state)
|
|
601
|
+
|
|
602
|
+
logger.info("expand_folder completed successfully")
|
|
603
|
+
return True
|
|
604
|
+
|
|
605
|
+
async def collapse_folder(self, client_session_id: str, folder_path: str) -> bool:
|
|
606
|
+
"""Collapse a folder."""
|
|
607
|
+
if client_session_id not in self.projects:
|
|
608
|
+
return False
|
|
609
|
+
|
|
610
|
+
project_state = self.projects[client_session_id]
|
|
611
|
+
|
|
612
|
+
# Update the monitored folder to collapsed state
|
|
613
|
+
monitored_folder = self._find_monitored_folder(project_state, folder_path)
|
|
614
|
+
if not monitored_folder:
|
|
615
|
+
return False
|
|
616
|
+
|
|
617
|
+
monitored_folder.is_expanded = False
|
|
618
|
+
|
|
619
|
+
# Note: We keep monitoring collapsed folders for file changes
|
|
620
|
+
# but don't stop watching them as we want to detect new files/folders
|
|
621
|
+
|
|
622
|
+
# Sync all dependent state (this will update items with correct expansion state)
|
|
623
|
+
await self._sync_all_state_with_monitored_folders(project_state)
|
|
624
|
+
|
|
625
|
+
return True
|
|
626
|
+
|
|
627
|
+
def _find_item_by_path(self, items: List[FileItem], target_path: str) -> Optional[FileItem]:
|
|
628
|
+
"""Find a file item by its path recursively."""
|
|
629
|
+
for item in items:
|
|
630
|
+
if item.path == target_path:
|
|
631
|
+
return item
|
|
632
|
+
if item.children:
|
|
633
|
+
found = self._find_item_by_path(item.children, target_path)
|
|
634
|
+
if found:
|
|
635
|
+
return found
|
|
636
|
+
return None
|
|
637
|
+
|
|
638
|
+
async def open_file(self, client_session_id: str, file_path: str, set_active: bool = True) -> bool:
|
|
639
|
+
"""Open a file in a new tab with content loaded."""
|
|
640
|
+
if client_session_id not in self.projects:
|
|
641
|
+
return False
|
|
642
|
+
|
|
643
|
+
project_state = self.projects[client_session_id]
|
|
644
|
+
|
|
645
|
+
# Generate unique key for file tab
|
|
646
|
+
from .utils import generate_tab_key
|
|
647
|
+
tab_key = generate_tab_key('file', file_path)
|
|
648
|
+
|
|
649
|
+
# Check if file is already open
|
|
650
|
+
if tab_key in project_state.open_tabs:
|
|
651
|
+
existing_tab = project_state.open_tabs[tab_key]
|
|
652
|
+
if set_active:
|
|
653
|
+
project_state.active_tab = existing_tab
|
|
654
|
+
self._write_debug_state()
|
|
655
|
+
return True
|
|
656
|
+
|
|
657
|
+
# Create new file tab using tab factory
|
|
658
|
+
from ..tab_factory import get_tab_factory
|
|
659
|
+
tab_factory = get_tab_factory()
|
|
660
|
+
|
|
661
|
+
try:
|
|
662
|
+
logger.info(f"About to create tab for file: {file_path}")
|
|
663
|
+
new_tab = await tab_factory.create_file_tab(file_path)
|
|
664
|
+
logger.info(f"Tab created successfully, adding to project state")
|
|
665
|
+
project_state.open_tabs[tab_key] = new_tab
|
|
666
|
+
if set_active:
|
|
667
|
+
project_state.active_tab = new_tab
|
|
668
|
+
|
|
669
|
+
logger.info(f"Opened file tab: {file_path} (content loaded: {len(new_tab.content or '') > 0})")
|
|
670
|
+
try:
|
|
671
|
+
self._write_debug_state()
|
|
672
|
+
except Exception as debug_e:
|
|
673
|
+
logger.warning(f"Debug state write failed (non-critical): {debug_e}")
|
|
674
|
+
return True
|
|
675
|
+
except Exception as e:
|
|
676
|
+
logger.error(f"Failed to create tab for file {file_path}: {e}")
|
|
677
|
+
import traceback
|
|
678
|
+
logger.error(f"Full traceback: {traceback.format_exc()}")
|
|
679
|
+
return False
|
|
680
|
+
|
|
681
|
+
async def close_tab(self, client_session_id: str, tab_id: str) -> bool:
|
|
682
|
+
"""Close a tab by tab ID."""
|
|
683
|
+
if client_session_id not in self.projects:
|
|
684
|
+
return False
|
|
685
|
+
|
|
686
|
+
project_state = self.projects[client_session_id]
|
|
687
|
+
|
|
688
|
+
# Find and remove the tab by searching through the dictionary values
|
|
689
|
+
tab_key_to_remove = None
|
|
690
|
+
tab_to_remove = None
|
|
691
|
+
for key, tab in project_state.open_tabs.items():
|
|
692
|
+
if tab.tab_id == tab_id:
|
|
693
|
+
tab_key_to_remove = key
|
|
694
|
+
tab_to_remove = tab
|
|
695
|
+
break
|
|
696
|
+
|
|
697
|
+
if not tab_to_remove:
|
|
698
|
+
return False
|
|
699
|
+
|
|
700
|
+
del project_state.open_tabs[tab_key_to_remove]
|
|
701
|
+
|
|
702
|
+
# Clear active tab if it was the closed tab
|
|
703
|
+
if project_state.active_tab and project_state.active_tab.tab_id == tab_id:
|
|
704
|
+
# Set active tab to the last remaining tab, or None if no tabs left
|
|
705
|
+
remaining_tabs = list(project_state.open_tabs.values())
|
|
706
|
+
project_state.active_tab = remaining_tabs[-1] if remaining_tabs else None
|
|
707
|
+
|
|
708
|
+
return True
|
|
709
|
+
|
|
710
|
+
async def set_active_tab(self, client_session_id: str, tab_id: Optional[str]) -> bool:
|
|
711
|
+
"""Set the currently active tab."""
|
|
712
|
+
if client_session_id not in self.projects:
|
|
713
|
+
return False
|
|
714
|
+
|
|
715
|
+
project_state = self.projects[client_session_id]
|
|
716
|
+
|
|
717
|
+
if tab_id:
|
|
718
|
+
# Find the tab by ID in the dictionary values
|
|
719
|
+
tab = None
|
|
720
|
+
for t in project_state.open_tabs.values():
|
|
721
|
+
if t.tab_id == tab_id:
|
|
722
|
+
tab = t
|
|
723
|
+
break
|
|
724
|
+
if not tab:
|
|
725
|
+
return False
|
|
726
|
+
project_state.active_tab = tab
|
|
727
|
+
else:
|
|
728
|
+
project_state.active_tab = None
|
|
729
|
+
|
|
730
|
+
return True
|
|
731
|
+
|
|
732
|
+
async def open_diff_tab(self, client_session_id: str, file_path: str,
|
|
733
|
+
from_ref: str, to_ref: str, from_hash: Optional[str] = None,
|
|
734
|
+
to_hash: Optional[str] = None) -> bool:
|
|
735
|
+
"""Open a diff tab comparing file versions at different git timeline points."""
|
|
736
|
+
if client_session_id not in self.projects:
|
|
737
|
+
return False
|
|
738
|
+
|
|
739
|
+
project_state = self.projects[client_session_id]
|
|
740
|
+
git_manager = self.git_managers.get(client_session_id)
|
|
741
|
+
|
|
742
|
+
if not git_manager or not git_manager.is_git_repo:
|
|
743
|
+
logger.error("Cannot create diff tab: not a git repository")
|
|
744
|
+
return False
|
|
745
|
+
|
|
746
|
+
# Generate unique key for diff tab
|
|
747
|
+
from .utils import generate_tab_key
|
|
748
|
+
tab_key = generate_tab_key('diff', file_path,
|
|
749
|
+
from_ref=from_ref, to_ref=to_ref,
|
|
750
|
+
from_hash=from_hash, to_hash=to_hash)
|
|
751
|
+
|
|
752
|
+
# Check if this diff tab is already open
|
|
753
|
+
if tab_key in project_state.open_tabs:
|
|
754
|
+
existing_tab = project_state.open_tabs[tab_key]
|
|
755
|
+
project_state.active_tab = existing_tab
|
|
756
|
+
logger.info(f"Diff tab already exists, activating: {tab_key}")
|
|
757
|
+
self._write_debug_state()
|
|
758
|
+
return True
|
|
759
|
+
|
|
760
|
+
try:
|
|
761
|
+
# Get content based on the reference type
|
|
762
|
+
original_content = ""
|
|
763
|
+
modified_content = ""
|
|
764
|
+
|
|
765
|
+
# Handle 'from' reference
|
|
766
|
+
if from_ref == "head":
|
|
767
|
+
original_content = git_manager.get_file_content_at_commit(file_path) or ""
|
|
768
|
+
elif from_ref == "staged":
|
|
769
|
+
original_content = git_manager.get_file_content_staged(file_path) or ""
|
|
770
|
+
elif from_ref == "working":
|
|
771
|
+
# Read current file content
|
|
772
|
+
if os.path.exists(file_path):
|
|
773
|
+
try:
|
|
774
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
775
|
+
original_content = f.read()
|
|
776
|
+
except (OSError, UnicodeDecodeError) as e:
|
|
777
|
+
logger.error("Error reading working file %s: %s", file_path, e)
|
|
778
|
+
original_content = f"# Error reading file: {e}"
|
|
779
|
+
elif from_ref == "commit" and from_hash:
|
|
780
|
+
original_content = git_manager.get_file_content_at_commit(file_path, from_hash) or ""
|
|
781
|
+
|
|
782
|
+
# Handle 'to' reference
|
|
783
|
+
if to_ref == "head":
|
|
784
|
+
modified_content = git_manager.get_file_content_at_commit(file_path) or ""
|
|
785
|
+
elif to_ref == "staged":
|
|
786
|
+
modified_content = git_manager.get_file_content_staged(file_path) or ""
|
|
787
|
+
elif to_ref == "working":
|
|
788
|
+
# Read current file content
|
|
789
|
+
if os.path.exists(file_path):
|
|
790
|
+
try:
|
|
791
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
792
|
+
modified_content = f.read()
|
|
793
|
+
except (OSError, UnicodeDecodeError) as e:
|
|
794
|
+
logger.error("Error reading working file %s: %s", file_path, e)
|
|
795
|
+
modified_content = f"# Error reading file: {e}"
|
|
796
|
+
elif to_ref == "commit" and to_hash:
|
|
797
|
+
modified_content = git_manager.get_file_content_at_commit(file_path, to_hash) or ""
|
|
798
|
+
|
|
799
|
+
# Create diff tab using tab factory
|
|
800
|
+
from ..tab_factory import get_tab_factory
|
|
801
|
+
tab_factory = get_tab_factory()
|
|
802
|
+
|
|
803
|
+
# Compute diff details for the client
|
|
804
|
+
diff_details = git_manager._compute_diff_details(original_content, modified_content)
|
|
805
|
+
|
|
806
|
+
# Generate HTML diff with syntax highlighting (both minimal and full context)
|
|
807
|
+
# Re-enable with improved performance and on-demand generation
|
|
808
|
+
html_diff_versions = None
|
|
809
|
+
try:
|
|
810
|
+
import time
|
|
811
|
+
diff_start_time = time.time()
|
|
812
|
+
|
|
813
|
+
# Skip HTML diff for very large files to prevent connection issues
|
|
814
|
+
original_size = len(original_content)
|
|
815
|
+
modified_size = len(modified_content)
|
|
816
|
+
if original_size > 1000000 or modified_size > 1000000: # 1MB limit
|
|
817
|
+
logger.warning(f"Skipping HTML diff generation for large file {file_path} ({original_size}+{modified_size} bytes)")
|
|
818
|
+
html_diff_versions = None
|
|
819
|
+
else:
|
|
820
|
+
logger.info(f"Starting HTML diff generation for {file_path} ({original_size}+{modified_size} bytes)")
|
|
821
|
+
html_diff_versions = git_manager._generate_html_diff(original_content, modified_content, file_path)
|
|
822
|
+
diff_end_time = time.time()
|
|
823
|
+
logger.info(f"HTML diff generation completed for {file_path} in {diff_end_time - diff_start_time:.2f}s")
|
|
824
|
+
except Exception as e:
|
|
825
|
+
logger.error(f"Error generating HTML diff for {file_path}: {e}")
|
|
826
|
+
import traceback
|
|
827
|
+
logger.error(f"Diff generation traceback: {traceback.format_exc()}")
|
|
828
|
+
# Continue without HTML diff - fallback to basic diff will be used
|
|
829
|
+
|
|
830
|
+
# Create a descriptive title for the diff
|
|
831
|
+
title_parts = []
|
|
832
|
+
if from_ref == "commit" and from_hash:
|
|
833
|
+
title_parts.append(from_hash[:8])
|
|
834
|
+
else:
|
|
835
|
+
title_parts.append(from_ref)
|
|
836
|
+
title_parts.append("→")
|
|
837
|
+
if to_ref == "commit" and to_hash:
|
|
838
|
+
title_parts.append(to_hash[:8])
|
|
839
|
+
else:
|
|
840
|
+
title_parts.append(to_ref)
|
|
841
|
+
|
|
842
|
+
diff_title = f"{os.path.basename(file_path)} ({' '.join(title_parts)})"
|
|
843
|
+
|
|
844
|
+
diff_tab = await tab_factory.create_diff_tab_with_title(
|
|
845
|
+
file_path, original_content, modified_content, diff_title,
|
|
846
|
+
diff_details=diff_details
|
|
847
|
+
)
|
|
848
|
+
|
|
849
|
+
# Add metadata about the diff references
|
|
850
|
+
metadata_update = {
|
|
851
|
+
'from_ref': from_ref,
|
|
852
|
+
'to_ref': to_ref,
|
|
853
|
+
'from_hash': from_hash,
|
|
854
|
+
'to_hash': to_hash,
|
|
855
|
+
'diff_timeline': True
|
|
856
|
+
}
|
|
857
|
+
|
|
858
|
+
# Only add HTML diff versions if they were successfully generated
|
|
859
|
+
if html_diff_versions:
|
|
860
|
+
metadata_update['html_diff_versions'] = html_diff_versions
|
|
861
|
+
|
|
862
|
+
diff_tab.metadata.update(metadata_update)
|
|
863
|
+
|
|
864
|
+
project_state.open_tabs[tab_key] = diff_tab
|
|
865
|
+
project_state.active_tab = diff_tab
|
|
866
|
+
|
|
867
|
+
logger.info(f"Created timeline diff tab for: {file_path} ({from_ref} → {to_ref})")
|
|
868
|
+
self._write_debug_state()
|
|
869
|
+
return True
|
|
870
|
+
|
|
871
|
+
except Exception as e:
|
|
872
|
+
logger.error(f"Failed to create timeline diff tab for {file_path}: {e}")
|
|
873
|
+
return False
|
|
874
|
+
|
|
875
|
+
async def _handle_file_change(self, event):
|
|
876
|
+
"""Handle file system change events with debouncing."""
|
|
877
|
+
logger.debug("🔍 [TRACE] _handle_file_change called: %s - %s", LogCategory.FILE_SYSTEM, event.event_type, event.src_path)
|
|
878
|
+
is_git_event = ".git" in Path(event.src_path).parts
|
|
879
|
+
if is_git_event:
|
|
880
|
+
logger.info("File watcher event from .git: %s %s", LogCategory.FILE_SYSTEM, event.event_type, event.src_path)
|
|
881
|
+
else:
|
|
882
|
+
logger.debug("File watcher event: %s %s", LogCategory.FILE_SYSTEM, event.event_type, event.src_path)
|
|
883
|
+
|
|
884
|
+
self._pending_changes.add(event.src_path)
|
|
885
|
+
self._pending_change_sources[event.src_path] = {
|
|
886
|
+
"event_type": event.event_type,
|
|
887
|
+
"is_git_event": is_git_event,
|
|
888
|
+
"timestamp": time.time(),
|
|
889
|
+
}
|
|
890
|
+
logger.debug("🔍 [TRACE] Added to pending changes: %s (total pending: %d)", LogCategory.FILE_SYSTEM, event.src_path, len(self._pending_changes))
|
|
891
|
+
|
|
892
|
+
# Cancel existing timer
|
|
893
|
+
if self._change_debounce_timer and not self._change_debounce_timer.done():
|
|
894
|
+
logger.debug("🔍 [TRACE] Cancelling existing debounce timer")
|
|
895
|
+
self._change_debounce_timer.cancel()
|
|
896
|
+
|
|
897
|
+
# Set new timer with proper exception handling
|
|
898
|
+
async def debounced_process():
|
|
899
|
+
try:
|
|
900
|
+
logger.debug("🔍 [TRACE] Starting debounce delay (0.5s)...")
|
|
901
|
+
await asyncio.sleep(0.5) # Debounce delay
|
|
902
|
+
logger.debug("🔍 [TRACE] Debounce delay complete, processing pending changes...")
|
|
903
|
+
await self._process_pending_changes()
|
|
904
|
+
except asyncio.CancelledError:
|
|
905
|
+
logger.debug("🔍 [TRACE] Debounce timer cancelled")
|
|
906
|
+
except Exception as e:
|
|
907
|
+
logger.error("🔍 [TRACE] ❌ Error in debounced file processing: %s", e)
|
|
908
|
+
|
|
909
|
+
logger.debug("🔍 [TRACE] Creating new debounce timer task...")
|
|
910
|
+
self._change_debounce_timer = asyncio.create_task(debounced_process())
|
|
911
|
+
|
|
912
|
+
async def _process_pending_changes(self):
|
|
913
|
+
"""Process pending file changes."""
|
|
914
|
+
logger.debug("🔍 [TRACE] _process_pending_changes called")
|
|
915
|
+
|
|
916
|
+
if not self._pending_changes:
|
|
917
|
+
logger.debug("🔍 [TRACE] No pending changes to process")
|
|
918
|
+
return
|
|
919
|
+
|
|
920
|
+
logger.debug("🔍 [TRACE] Processing %d pending file changes: %s", len(self._pending_changes), list(self._pending_changes))
|
|
921
|
+
git_events = [path for path in self._pending_changes if self._pending_change_sources.get(path, {}).get("is_git_event")]
|
|
922
|
+
workspace_events = [path for path in self._pending_changes if path not in git_events]
|
|
923
|
+
logger.info(
|
|
924
|
+
"Pending change summary: total=%d git_events=%d workspace_events=%d sample_git=%s",
|
|
925
|
+
LogCategory.FILE_SYSTEM,
|
|
926
|
+
len(self._pending_changes),
|
|
927
|
+
len(git_events),
|
|
928
|
+
len(workspace_events),
|
|
929
|
+
git_events[:3],
|
|
930
|
+
)
|
|
931
|
+
|
|
932
|
+
# Process changes for each affected project
|
|
933
|
+
affected_projects = set()
|
|
934
|
+
logger.debug("🔍 [TRACE] Checking %d active projects for affected paths", len(self.projects))
|
|
935
|
+
|
|
936
|
+
for change_path in self._pending_changes:
|
|
937
|
+
logger.debug("🔍 [TRACE] Checking change path: %s", change_path)
|
|
938
|
+
for client_session_id, project_state in self.projects.items():
|
|
939
|
+
logger.debug("🔍 [TRACE] Comparing with project path: %s (session: %s)",
|
|
940
|
+
project_state.project_folder_path, client_session_id)
|
|
941
|
+
if change_path.startswith(project_state.project_folder_path):
|
|
942
|
+
logger.debug("🔍 [TRACE] ✅ Change affects project session: %s", client_session_id)
|
|
943
|
+
affected_projects.add(client_session_id)
|
|
944
|
+
else:
|
|
945
|
+
logger.debug("🔍 [TRACE] ❌ Change does NOT affect project session: %s", client_session_id)
|
|
946
|
+
|
|
947
|
+
if affected_projects:
|
|
948
|
+
logger.debug("🔍 [TRACE] Found %d affected projects: %s", len(affected_projects), list(affected_projects))
|
|
949
|
+
else:
|
|
950
|
+
logger.debug("🔍 [TRACE] ❌ No affected projects to refresh")
|
|
951
|
+
|
|
952
|
+
# Refresh affected projects
|
|
953
|
+
for client_session_id in affected_projects:
|
|
954
|
+
project_state = self.projects.get(client_session_id)
|
|
955
|
+
if not project_state:
|
|
956
|
+
continue
|
|
957
|
+
project_paths = [
|
|
958
|
+
path for path in self._pending_changes
|
|
959
|
+
if path.startswith(project_state.project_folder_path)
|
|
960
|
+
]
|
|
961
|
+
git_paths = [
|
|
962
|
+
path for path in project_paths
|
|
963
|
+
if self._pending_change_sources.get(path, {}).get("is_git_event")
|
|
964
|
+
]
|
|
965
|
+
is_git_only_batch = bool(project_paths) and len(project_paths) == len(git_paths)
|
|
966
|
+
logger.info(
|
|
967
|
+
"Refreshing project %s due to pending changes: total_paths=%d git_paths=%d git_only_batch=%s sample_paths=%s",
|
|
968
|
+
LogCategory.FILE_SYSTEM,
|
|
969
|
+
client_session_id,
|
|
970
|
+
len(project_paths),
|
|
971
|
+
len(git_paths),
|
|
972
|
+
is_git_only_batch,
|
|
973
|
+
project_paths[:3],
|
|
974
|
+
)
|
|
975
|
+
await self._refresh_project_state(
|
|
976
|
+
client_session_id,
|
|
977
|
+
git_only=is_git_only_batch,
|
|
978
|
+
reason="filesystem_watch_git_only" if is_git_only_batch else "filesystem_watch",
|
|
979
|
+
)
|
|
980
|
+
|
|
981
|
+
self._pending_changes.clear()
|
|
982
|
+
self._pending_change_sources.clear()
|
|
983
|
+
logger.debug("🔍 [TRACE] ✅ Finished processing file changes")
|
|
984
|
+
|
|
985
|
+
async def _refresh_project_state(self, client_session_id: str, git_only: bool = False, reason: str = "unknown"):
|
|
986
|
+
"""Refresh project state after file changes.
|
|
987
|
+
|
|
988
|
+
Args:
|
|
989
|
+
client_session_id: The client session ID
|
|
990
|
+
git_only: If True, only git status changed (skip filesystem operations like
|
|
991
|
+
detecting new directories and syncing file state). Use this for
|
|
992
|
+
git operations (stage, unstage, revert) to avoid unnecessary work.
|
|
993
|
+
"""
|
|
994
|
+
logger.debug("🔍 [TRACE] _refresh_project_state called for session: %s (git_only=%s, reason=%s)",
|
|
995
|
+
client_session_id, git_only, reason)
|
|
996
|
+
|
|
997
|
+
if client_session_id not in self.projects:
|
|
998
|
+
logger.debug("🔍 [TRACE] ❌ Session not found in projects: %s", client_session_id)
|
|
999
|
+
return
|
|
1000
|
+
|
|
1001
|
+
project_state = self.projects[client_session_id]
|
|
1002
|
+
git_manager = self.git_managers[client_session_id]
|
|
1003
|
+
logger.debug("🔍 [TRACE] Found project state and git manager for session: %s", client_session_id)
|
|
1004
|
+
|
|
1005
|
+
# Check if git repo status changed (created or deleted)
|
|
1006
|
+
git_dir_path = os.path.join(project_state.project_folder_path, '.git')
|
|
1007
|
+
git_dir_exists = os.path.exists(git_dir_path)
|
|
1008
|
+
|
|
1009
|
+
if not git_manager.is_git_repo and git_dir_exists:
|
|
1010
|
+
# Git repo was created
|
|
1011
|
+
logger.debug("🔍 [TRACE] Git repo detected, reinitializing git manager for session: %s", client_session_id)
|
|
1012
|
+
git_manager.reinitialize()
|
|
1013
|
+
elif git_manager.is_git_repo and not git_dir_exists:
|
|
1014
|
+
# Git repo was deleted
|
|
1015
|
+
logger.debug("🔍 [TRACE] Git repo removed, updating git manager for session: %s", client_session_id)
|
|
1016
|
+
git_manager.repo = None
|
|
1017
|
+
git_manager.is_git_repo = False
|
|
1018
|
+
|
|
1019
|
+
# Update Git status
|
|
1020
|
+
if git_manager:
|
|
1021
|
+
logger.debug("🔍 [TRACE] Updating git status for session: %s", client_session_id)
|
|
1022
|
+
old_branch = project_state.git_branch
|
|
1023
|
+
old_status_summary = project_state.git_status_summary
|
|
1024
|
+
old_is_git_repo = project_state.is_git_repo
|
|
1025
|
+
|
|
1026
|
+
# Update all git state atomically - single source of truth
|
|
1027
|
+
project_state.is_git_repo = git_manager.is_git_repo
|
|
1028
|
+
project_state.git_branch = git_manager.get_branch_name()
|
|
1029
|
+
project_state.git_status_summary = git_manager.get_status_summary()
|
|
1030
|
+
project_state.git_detailed_status = git_manager.get_detailed_status()
|
|
1031
|
+
|
|
1032
|
+
logger.debug("🔍 [TRACE] Git status updated - is_git_repo: %s->%s, branch: %s->%s, summary: %s->%s",
|
|
1033
|
+
old_is_git_repo, project_state.is_git_repo,
|
|
1034
|
+
old_branch, project_state.git_branch,
|
|
1035
|
+
old_status_summary, project_state.git_status_summary)
|
|
1036
|
+
else:
|
|
1037
|
+
logger.debug("🔍 [TRACE] ❌ No git manager found for session: %s", client_session_id)
|
|
1038
|
+
|
|
1039
|
+
# For git-only operations, skip scanning for new directories
|
|
1040
|
+
# but still sync items to update git attributes for UI
|
|
1041
|
+
if not git_only:
|
|
1042
|
+
# Detect and add new directories in expanded folders before syncing
|
|
1043
|
+
logger.debug("🔍 [TRACE] Detecting and adding new directories...")
|
|
1044
|
+
await self._detect_and_add_new_directories(project_state)
|
|
1045
|
+
else:
|
|
1046
|
+
logger.debug("🔍 [TRACE] Skipping directory detection (git_only=True)")
|
|
1047
|
+
|
|
1048
|
+
# Always sync state to update git attributes on items (needed for UI updates)
|
|
1049
|
+
logger.debug("🔍 [TRACE] Syncing all state with monitored folders...")
|
|
1050
|
+
await self._sync_all_state_with_monitored_folders(project_state)
|
|
1051
|
+
|
|
1052
|
+
# Send update to clients
|
|
1053
|
+
logger.debug("🔍 [TRACE] About to send project state update...")
|
|
1054
|
+
await self._send_project_state_update(project_state)
|
|
1055
|
+
|
|
1056
|
+
async def _detect_and_add_new_directories(self, project_state: ProjectState):
|
|
1057
|
+
"""Detect new directories in EXPANDED monitored folders and add them to monitoring."""
|
|
1058
|
+
# For each currently expanded monitored folder, check if new subdirectories appeared
|
|
1059
|
+
expanded_folder_paths = [mf.folder_path for mf in project_state.monitored_folders if mf.is_expanded]
|
|
1060
|
+
logger.debug("🔍 [TRACE] Checking %d expanded folders for new subdirectories: %s",
|
|
1061
|
+
len(expanded_folder_paths), expanded_folder_paths)
|
|
1062
|
+
|
|
1063
|
+
for folder_path in expanded_folder_paths:
|
|
1064
|
+
if os.path.exists(folder_path) and os.path.isdir(folder_path):
|
|
1065
|
+
logger.debug("🔍 [TRACE] Checking expanded folder for new subdirectories: %s", folder_path)
|
|
1066
|
+
await self._add_subdirectories_to_monitored(project_state, folder_path)
|
|
1067
|
+
|
|
1068
|
+
async def _reload_visible_structures(self, project_state: ProjectState):
|
|
1069
|
+
"""Reload all visible structures with flattened items."""
|
|
1070
|
+
await self._build_flattened_items_structure(project_state)
|
|
1071
|
+
|
|
1072
|
+
async def _send_project_state_update(self, project_state: ProjectState, server_project_id: str = None):
|
|
1073
|
+
"""Send project state update to the specific client session only."""
|
|
1074
|
+
logger.debug("🔍 [TRACE] _send_project_state_update called for session: %s", project_state.client_session_id)
|
|
1075
|
+
|
|
1076
|
+
# Create state signature for change detection
|
|
1077
|
+
current_state_signature = {
|
|
1078
|
+
"git_branch": project_state.git_branch,
|
|
1079
|
+
"git_status_summary": project_state.git_status_summary,
|
|
1080
|
+
"git_detailed_status": str(project_state.git_detailed_status) if project_state.git_detailed_status else None,
|
|
1081
|
+
"open_tabs": tuple((tab.tab_id, tab.tab_type, tab.title) for tab in project_state.open_tabs.values()),
|
|
1082
|
+
"active_tab": project_state.active_tab.tab_id if project_state.active_tab else None,
|
|
1083
|
+
"items_count": len(project_state.items),
|
|
1084
|
+
"monitored_folders": tuple((mf.folder_path, mf.is_expanded) for mf in sorted(project_state.monitored_folders, key=lambda x: x.folder_path))
|
|
1085
|
+
}
|
|
1086
|
+
|
|
1087
|
+
logger.debug("🔍 [TRACE] Current state signature: %s", current_state_signature)
|
|
1088
|
+
|
|
1089
|
+
# Check if state has actually changed
|
|
1090
|
+
last_signature = getattr(project_state, '_last_sent_signature', None)
|
|
1091
|
+
logger.debug("🔍 [TRACE] Last sent signature: %s", last_signature)
|
|
1092
|
+
|
|
1093
|
+
if last_signature == current_state_signature:
|
|
1094
|
+
logger.debug("🔍 [TRACE] ❌ Project state unchanged, skipping update for client: %s", project_state.client_session_id)
|
|
1095
|
+
return
|
|
1096
|
+
|
|
1097
|
+
# State has changed, send update
|
|
1098
|
+
project_state._last_sent_signature = current_state_signature
|
|
1099
|
+
logger.debug("🔍 [TRACE] ✅ State has changed, preparing to send update to client: %s", project_state.client_session_id)
|
|
1100
|
+
|
|
1101
|
+
payload = {
|
|
1102
|
+
"event": "project_state_update",
|
|
1103
|
+
"project_id": server_project_id or project_state.client_session_id, # Use server ID if provided
|
|
1104
|
+
"project_folder_path": project_state.project_folder_path,
|
|
1105
|
+
"is_git_repo": project_state.is_git_repo,
|
|
1106
|
+
"git_branch": project_state.git_branch,
|
|
1107
|
+
"git_status_summary": project_state.git_status_summary,
|
|
1108
|
+
"git_detailed_status": asdict(project_state.git_detailed_status) if project_state.git_detailed_status and hasattr(project_state.git_detailed_status, '__dataclass_fields__') else (logger.warning(f"git_detailed_status is not a dataclass: {type(project_state.git_detailed_status)} - {project_state.git_detailed_status}") or None),
|
|
1109
|
+
"open_tabs": [self._serialize_tab_info(tab) for tab in project_state.open_tabs.values()],
|
|
1110
|
+
"active_tab": self._serialize_tab_info(project_state.active_tab) if project_state.active_tab else None,
|
|
1111
|
+
"items": [self._serialize_file_item(item) for item in project_state.items],
|
|
1112
|
+
"timestamp": time.time(),
|
|
1113
|
+
"client_sessions": [project_state.client_session_id] # Target only this client session
|
|
1114
|
+
}
|
|
1115
|
+
|
|
1116
|
+
# Log payload size analysis before sending
|
|
1117
|
+
try:
|
|
1118
|
+
import json
|
|
1119
|
+
payload_json = json.dumps(payload)
|
|
1120
|
+
payload_size_kb = len(payload_json.encode('utf-8')) / 1024
|
|
1121
|
+
|
|
1122
|
+
if payload_size_kb > 100: # Log for large project state updates
|
|
1123
|
+
logger.warning("📦 Large project_state_update: %.1f KB for client %s",
|
|
1124
|
+
payload_size_kb, project_state.client_session_id)
|
|
1125
|
+
|
|
1126
|
+
# Analyze which parts are large
|
|
1127
|
+
large_components = []
|
|
1128
|
+
for key, value in payload.items():
|
|
1129
|
+
if key in ['open_tabs', 'active_tab', 'items', 'git_detailed_status']:
|
|
1130
|
+
component_size = len(json.dumps(value).encode('utf-8')) / 1024
|
|
1131
|
+
if component_size > 10: # Components > 10KB
|
|
1132
|
+
large_components.append(f"{key}: {component_size:.1f}KB")
|
|
1133
|
+
|
|
1134
|
+
if large_components:
|
|
1135
|
+
logger.warning("📦 Large components in project_state_update: %s", ", ".join(large_components))
|
|
1136
|
+
|
|
1137
|
+
# Special analysis for active_tab which often contains HTML diff
|
|
1138
|
+
if payload.get('active_tab') and isinstance(payload['active_tab'], dict):
|
|
1139
|
+
active_tab = payload['active_tab']
|
|
1140
|
+
tab_type = active_tab.get('tab_type', 'unknown')
|
|
1141
|
+
if tab_type == 'diff' and active_tab.get('metadata'):
|
|
1142
|
+
metadata = active_tab['metadata']
|
|
1143
|
+
if 'html_diff_versions' in metadata:
|
|
1144
|
+
html_diff_size = len(json.dumps(metadata['html_diff_versions']).encode('utf-8')) / 1024
|
|
1145
|
+
logger.warning("📦 HTML diff in active_tab: %.1f KB (tab_type: %s)", html_diff_size, tab_type)
|
|
1146
|
+
|
|
1147
|
+
elif payload_size_kb > 50:
|
|
1148
|
+
logger.info("📦 Medium project_state_update: %.1f KB for client %s",
|
|
1149
|
+
payload_size_kb, project_state.client_session_id)
|
|
1150
|
+
|
|
1151
|
+
except Exception as e:
|
|
1152
|
+
logger.warning("Failed to analyze payload size: %s", e)
|
|
1153
|
+
|
|
1154
|
+
# Send via control channel with client session targeting
|
|
1155
|
+
logger.debug("🔍 [TRACE] About to send payload via control channel...")
|
|
1156
|
+
try:
|
|
1157
|
+
await self.control_channel.send(payload)
|
|
1158
|
+
logger.debug("🔍 [TRACE] ✅ Successfully sent project_state_update to client: %s", project_state.client_session_id)
|
|
1159
|
+
except Exception as e:
|
|
1160
|
+
logger.error("🔍 [TRACE] ❌ Failed to send project_state_update: %s", e)
|
|
1161
|
+
|
|
1162
|
+
async def cleanup_project(self, client_session_id: str):
|
|
1163
|
+
"""Clean up project state and resources."""
|
|
1164
|
+
lock = self._get_session_lock(client_session_id)
|
|
1165
|
+
async with lock:
|
|
1166
|
+
project_folder_path = self._cleanup_project_locked(client_session_id)
|
|
1167
|
+
await self._release_shared_git_manager(project_folder_path, client_session_id)
|
|
1168
|
+
|
|
1169
|
+
def _cleanup_project_locked(self, client_session_id: str) -> Optional[str]:
|
|
1170
|
+
"""Internal helper to release resources associated with a project state. Lock must be held."""
|
|
1171
|
+
project_state = self.projects.get(client_session_id)
|
|
1172
|
+
project_folder_path = project_state.project_folder_path if project_state else None
|
|
1173
|
+
|
|
1174
|
+
if project_state:
|
|
1175
|
+
# Cancel debounce timer to prevent pending refreshes running after cleanup
|
|
1176
|
+
if self._change_debounce_timer and not self._change_debounce_timer.done():
|
|
1177
|
+
try:
|
|
1178
|
+
self._change_debounce_timer.cancel()
|
|
1179
|
+
except Exception:
|
|
1180
|
+
pass
|
|
1181
|
+
# Remove pending file change events related to this project
|
|
1182
|
+
if self._pending_changes:
|
|
1183
|
+
removed = [path for path in list(self._pending_changes) if path.startswith(project_state.project_folder_path)]
|
|
1184
|
+
for path in removed:
|
|
1185
|
+
self._pending_changes.discard(path)
|
|
1186
|
+
self._pending_change_sources.pop(path, None)
|
|
1187
|
+
if removed:
|
|
1188
|
+
logger.debug("Removed %d pending change paths for session %s during cleanup", len(removed), client_session_id)
|
|
1189
|
+
|
|
1190
|
+
# Stop watching all monitored folders for this project
|
|
1191
|
+
for monitored_folder in project_state.monitored_folders:
|
|
1192
|
+
self.file_watcher.stop_watching(monitored_folder.folder_path)
|
|
1193
|
+
|
|
1194
|
+
# Stop watching .git directory if it was being monitored
|
|
1195
|
+
if project_state.is_git_repo:
|
|
1196
|
+
git_dir_path = os.path.join(project_state.project_folder_path, '.git')
|
|
1197
|
+
self.file_watcher.stop_watching(git_dir_path)
|
|
1198
|
+
|
|
1199
|
+
self.projects.pop(client_session_id, None)
|
|
1200
|
+
logger.info("Cleaned up project state: %s", client_session_id)
|
|
1201
|
+
else:
|
|
1202
|
+
logger.info("No project state found for client session: %s during cleanup", client_session_id)
|
|
1203
|
+
|
|
1204
|
+
# Clean up associated git manager even if project state was not registered
|
|
1205
|
+
# (actual cleanup occurs when shared git manager refcount drops to zero)
|
|
1206
|
+
self._write_debug_state()
|
|
1207
|
+
return project_folder_path
|
|
1208
|
+
|
|
1209
|
+
async def cleanup_projects_by_client_session(self, client_session_id: str):
|
|
1210
|
+
"""Clean up project state for a specific client session when explicitly notified of disconnection."""
|
|
1211
|
+
logger.info("Explicitly cleaning up project state for disconnected client session: %s", client_session_id)
|
|
1212
|
+
|
|
1213
|
+
# With the new design, each client session has only one project
|
|
1214
|
+
if client_session_id in self.projects:
|
|
1215
|
+
await self.cleanup_project(client_session_id)
|
|
1216
|
+
logger.info("Cleaned up project state for client session: %s", client_session_id)
|
|
1217
|
+
else:
|
|
1218
|
+
logger.info("No project state found for client session: %s", client_session_id)
|
|
1219
|
+
|
|
1220
|
+
async def cleanup_all_projects(self):
|
|
1221
|
+
"""Clean up all project states. Used for shutdown or reset."""
|
|
1222
|
+
logger.info("Cleaning up all project states")
|
|
1223
|
+
|
|
1224
|
+
client_session_ids = list(self.projects.keys())
|
|
1225
|
+
for client_session_id in client_session_ids:
|
|
1226
|
+
await self.cleanup_project(client_session_id)
|
|
1227
|
+
|
|
1228
|
+
logger.info("Cleaned up %d project states", len(client_session_ids))
|
|
1229
|
+
|
|
1230
|
+
async def refresh_project_state_for_file_change(self, file_path: str):
|
|
1231
|
+
"""Public method to trigger project state refresh for a specific file change."""
|
|
1232
|
+
logger.info(f"Manual refresh triggered for file change: {file_path}")
|
|
1233
|
+
|
|
1234
|
+
# Find project states that include this file path
|
|
1235
|
+
for client_session_id, project_state in self.projects.items():
|
|
1236
|
+
project_folder = Path(project_state.project_folder_path)
|
|
1237
|
+
|
|
1238
|
+
# Check if the file is within this project
|
|
1239
|
+
try:
|
|
1240
|
+
Path(file_path).relative_to(project_folder)
|
|
1241
|
+
# File is within this project, trigger refresh
|
|
1242
|
+
logger.info(f"Refreshing project state for session {client_session_id} after file change: {file_path}")
|
|
1243
|
+
await self._refresh_project_state(client_session_id, reason="manual_file_change")
|
|
1244
|
+
break
|
|
1245
|
+
except ValueError:
|
|
1246
|
+
# File is not within this project
|
|
1247
|
+
continue
|
|
1248
|
+
|
|
1249
|
+
async def cleanup_orphaned_project_states(self, current_client_sessions: List[str]):
|
|
1250
|
+
"""Clean up project states that don't match any current client session."""
|
|
1251
|
+
current_sessions_set = set(current_client_sessions)
|
|
1252
|
+
orphaned_keys = []
|
|
1253
|
+
|
|
1254
|
+
for session_id in list(self.projects.keys()):
|
|
1255
|
+
if session_id not in current_sessions_set:
|
|
1256
|
+
orphaned_keys.append(session_id)
|
|
1257
|
+
|
|
1258
|
+
if orphaned_keys:
|
|
1259
|
+
logger.info("Found %d orphaned project states, cleaning up: %s", len(orphaned_keys), orphaned_keys)
|
|
1260
|
+
for session_id in orphaned_keys:
|
|
1261
|
+
await self.cleanup_project(session_id)
|
|
1262
|
+
logger.info("Cleaned up %d orphaned project states", len(orphaned_keys))
|
|
1263
|
+
else:
|
|
1264
|
+
logger.debug("No orphaned project states found")
|
|
1265
|
+
|
|
1266
|
+
|
|
1267
|
+
# Helper function for other handlers to get/create project state manager
|
|
1268
|
+
def get_or_create_project_state_manager(context: Dict[str, Any], control_channel) -> 'ProjectStateManager':
|
|
1269
|
+
"""Get or create project state manager with debug setup (SINGLETON PATTERN)."""
|
|
1270
|
+
global _global_project_state_manager
|
|
1271
|
+
|
|
1272
|
+
logger.info("get_or_create_project_state_manager called")
|
|
1273
|
+
logger.info("Context debug flag: %s", context.get("debug", False))
|
|
1274
|
+
|
|
1275
|
+
with _manager_lock:
|
|
1276
|
+
if _global_project_state_manager is None:
|
|
1277
|
+
logger.info("Creating new GLOBAL ProjectStateManager (singleton)")
|
|
1278
|
+
manager = ProjectStateManager(control_channel, context)
|
|
1279
|
+
|
|
1280
|
+
# Set up debug mode if enabled
|
|
1281
|
+
if context.get("debug", False):
|
|
1282
|
+
debug_file_path = os.path.join(os.getcwd(), "project_state_debug.json")
|
|
1283
|
+
logger.info("Setting up debug mode with file: %s", debug_file_path)
|
|
1284
|
+
manager.set_debug_mode(True, debug_file_path)
|
|
1285
|
+
else:
|
|
1286
|
+
logger.info("Debug mode not enabled in context")
|
|
1287
|
+
|
|
1288
|
+
_global_project_state_manager = manager
|
|
1289
|
+
logger.info("Created and stored new GLOBAL manager (PID: %s)", os.getpid())
|
|
1290
|
+
return manager
|
|
1291
|
+
else:
|
|
1292
|
+
logger.info("Returning existing GLOBAL project state manager (PID: %s)", os.getpid())
|
|
1293
|
+
# Update the control channel reference in case it changed
|
|
1294
|
+
_global_project_state_manager.control_channel = control_channel
|
|
1295
|
+
|
|
1296
|
+
# Log active project states for debugging
|
|
1297
|
+
if _global_project_state_manager.projects:
|
|
1298
|
+
logger.debug("Active project states: %s", list(_global_project_state_manager.projects.keys()))
|
|
1299
|
+
else:
|
|
1300
|
+
logger.debug("No active project states in global manager")
|
|
1301
|
+
|
|
1302
|
+
return _global_project_state_manager
|
|
1303
|
+
|
|
1304
|
+
|
|
1305
|
+
def get_global_project_state_manager() -> Optional['ProjectStateManager']:
|
|
1306
|
+
"""Return the current global project state manager if it exists."""
|
|
1307
|
+
with _manager_lock:
|
|
1308
|
+
return _global_project_state_manager
|
|
1309
|
+
|
|
1310
|
+
|
|
1311
|
+
def reset_global_project_state_manager():
|
|
1312
|
+
"""Reset the global project state manager (for testing/cleanup)."""
|
|
1313
|
+
global _global_project_state_manager
|
|
1314
|
+
with _manager_lock:
|
|
1315
|
+
if _global_project_state_manager:
|
|
1316
|
+
logger.info("Resetting global project state manager")
|
|
1317
|
+
_global_project_state_manager = None
|
|
1318
|
+
else:
|
|
1319
|
+
logger.debug("Global project state manager already None")
|
|
1320
|
+
|
|
1321
|
+
|
|
1322
|
+
def debug_global_manager_state():
|
|
1323
|
+
"""Debug function to log the current state of the global manager."""
|
|
1324
|
+
global _global_project_state_manager
|
|
1325
|
+
with _manager_lock:
|
|
1326
|
+
if _global_project_state_manager:
|
|
1327
|
+
logger.info("Global ProjectStateManager exists (PID: %s)", os.getpid())
|
|
1328
|
+
logger.info("Active project states: %s", list(_global_project_state_manager.projects.keys()))
|
|
1329
|
+
logger.info("Total project states: %d", len(_global_project_state_manager.projects))
|
|
1330
|
+
else:
|
|
1331
|
+
logger.info("No global ProjectStateManager exists (PID: %s)", os.getpid())
|