portacode 0.3.22__py3-none-any.whl → 0.3.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- portacode/_version.py +16 -3
- portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +188 -16
- portacode/connection/handlers/__init__.py +4 -0
- portacode/connection/handlers/base.py +9 -5
- portacode/connection/handlers/chunked_content.py +244 -0
- portacode/connection/handlers/file_handlers.py +68 -2
- portacode/connection/handlers/project_aware_file_handlers.py +143 -1
- portacode/connection/handlers/project_state/git_manager.py +326 -66
- portacode/connection/handlers/project_state/handlers.py +307 -31
- portacode/connection/handlers/project_state/manager.py +44 -1
- portacode/connection/handlers/project_state/models.py +7 -0
- portacode/connection/handlers/project_state/utils.py +17 -1
- portacode/connection/handlers/project_state_handlers.py +1 -0
- portacode/connection/handlers/tab_factory.py +60 -7
- portacode/connection/terminal.py +13 -7
- {portacode-0.3.22.dist-info → portacode-0.3.24.dist-info}/METADATA +14 -3
- {portacode-0.3.22.dist-info → portacode-0.3.24.dist-info}/RECORD +25 -24
- {portacode-0.3.22.dist-info → portacode-0.3.24.dist-info}/WHEEL +1 -1
- test_modules/test_git_status_ui.py +24 -66
- testing_framework/core/playwright_manager.py +23 -0
- testing_framework/core/runner.py +10 -2
- testing_framework/core/test_discovery.py +7 -3
- {portacode-0.3.22.dist-info → portacode-0.3.24.dist-info}/entry_points.txt +0 -0
- {portacode-0.3.22.dist-info → portacode-0.3.24.dist-info/licenses}/LICENSE +0 -0
- {portacode-0.3.22.dist-info → portacode-0.3.24.dist-info}/top_level.txt +0 -0
|
@@ -9,6 +9,7 @@ import logging
|
|
|
9
9
|
from typing import Any, Dict, List
|
|
10
10
|
|
|
11
11
|
from ..base import AsyncHandler
|
|
12
|
+
from ..chunked_content import create_chunked_response
|
|
12
13
|
from .manager import get_or_create_project_state_manager
|
|
13
14
|
|
|
14
15
|
logger = logging.getLogger(__name__)
|
|
@@ -364,20 +365,33 @@ class ProjectStateGitStageHandler(AsyncHandler):
|
|
|
364
365
|
return "project_state_git_stage"
|
|
365
366
|
|
|
366
367
|
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
367
|
-
"""Stage
|
|
368
|
+
"""Stage file(s) in git for a project. Supports both single file and bulk operations."""
|
|
368
369
|
server_project_id = message.get("project_id")
|
|
369
|
-
file_path = message.get("file_path")
|
|
370
|
+
file_path = message.get("file_path") # Single file (backward compatibility)
|
|
371
|
+
file_paths = message.get("file_paths") # Multiple files (bulk operation)
|
|
372
|
+
stage_all = message.get("stage_all", False) # Stage all changes
|
|
370
373
|
source_client_session = message.get("source_client_session")
|
|
371
374
|
|
|
372
375
|
if not server_project_id:
|
|
373
376
|
raise ValueError("project_id is required")
|
|
374
|
-
if not file_path:
|
|
375
|
-
raise ValueError("file_path is required")
|
|
376
377
|
if not source_client_session:
|
|
377
378
|
raise ValueError("source_client_session is required")
|
|
378
379
|
|
|
379
|
-
|
|
380
|
-
|
|
380
|
+
# Determine operation mode
|
|
381
|
+
if stage_all:
|
|
382
|
+
operation_desc = "staging all changes"
|
|
383
|
+
file_paths_to_stage = []
|
|
384
|
+
elif file_paths:
|
|
385
|
+
operation_desc = f"staging {len(file_paths)} files"
|
|
386
|
+
file_paths_to_stage = file_paths
|
|
387
|
+
elif file_path:
|
|
388
|
+
operation_desc = f"staging file {file_path}"
|
|
389
|
+
file_paths_to_stage = [file_path]
|
|
390
|
+
else:
|
|
391
|
+
raise ValueError("Either file_path, file_paths, or stage_all must be provided")
|
|
392
|
+
|
|
393
|
+
logger.info("%s for project %s (client session: %s)",
|
|
394
|
+
operation_desc.capitalize(), server_project_id, source_client_session)
|
|
381
395
|
|
|
382
396
|
# Get the project state manager
|
|
383
397
|
manager = get_or_create_project_state_manager(self.context, self.control_channel)
|
|
@@ -387,19 +401,34 @@ class ProjectStateGitStageHandler(AsyncHandler):
|
|
|
387
401
|
if not git_manager:
|
|
388
402
|
raise ValueError("No git repository found for this project")
|
|
389
403
|
|
|
390
|
-
#
|
|
391
|
-
|
|
404
|
+
# Perform the staging operation
|
|
405
|
+
if stage_all:
|
|
406
|
+
success = git_manager.stage_all_changes()
|
|
407
|
+
elif len(file_paths_to_stage) == 1:
|
|
408
|
+
success = git_manager.stage_file(file_paths_to_stage[0])
|
|
409
|
+
else:
|
|
410
|
+
success = git_manager.stage_files(file_paths_to_stage)
|
|
392
411
|
|
|
393
412
|
if success:
|
|
394
413
|
# Refresh entire project state to ensure consistency
|
|
395
414
|
await manager._refresh_project_state(source_client_session)
|
|
396
415
|
|
|
397
|
-
|
|
416
|
+
# Build response
|
|
417
|
+
response = {
|
|
398
418
|
"event": "project_state_git_stage_response",
|
|
399
419
|
"project_id": server_project_id,
|
|
400
|
-
"file_path": file_path,
|
|
401
420
|
"success": success
|
|
402
421
|
}
|
|
422
|
+
|
|
423
|
+
# Include appropriate file information in response for backward compatibility
|
|
424
|
+
if file_path:
|
|
425
|
+
response["file_path"] = file_path
|
|
426
|
+
if file_paths:
|
|
427
|
+
response["file_paths"] = file_paths
|
|
428
|
+
if stage_all:
|
|
429
|
+
response["stage_all"] = True
|
|
430
|
+
|
|
431
|
+
return response
|
|
403
432
|
|
|
404
433
|
|
|
405
434
|
class ProjectStateGitUnstageHandler(AsyncHandler):
|
|
@@ -410,20 +439,33 @@ class ProjectStateGitUnstageHandler(AsyncHandler):
|
|
|
410
439
|
return "project_state_git_unstage"
|
|
411
440
|
|
|
412
441
|
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
413
|
-
"""Unstage
|
|
442
|
+
"""Unstage file(s) in git for a project. Supports both single file and bulk operations."""
|
|
414
443
|
server_project_id = message.get("project_id")
|
|
415
|
-
file_path = message.get("file_path")
|
|
444
|
+
file_path = message.get("file_path") # Single file (backward compatibility)
|
|
445
|
+
file_paths = message.get("file_paths") # Multiple files (bulk operation)
|
|
446
|
+
unstage_all = message.get("unstage_all", False) # Unstage all changes
|
|
416
447
|
source_client_session = message.get("source_client_session")
|
|
417
448
|
|
|
418
449
|
if not server_project_id:
|
|
419
450
|
raise ValueError("project_id is required")
|
|
420
|
-
if not file_path:
|
|
421
|
-
raise ValueError("file_path is required")
|
|
422
451
|
if not source_client_session:
|
|
423
452
|
raise ValueError("source_client_session is required")
|
|
424
453
|
|
|
425
|
-
|
|
426
|
-
|
|
454
|
+
# Determine operation mode
|
|
455
|
+
if unstage_all:
|
|
456
|
+
operation_desc = "unstaging all changes"
|
|
457
|
+
file_paths_to_unstage = []
|
|
458
|
+
elif file_paths:
|
|
459
|
+
operation_desc = f"unstaging {len(file_paths)} files"
|
|
460
|
+
file_paths_to_unstage = file_paths
|
|
461
|
+
elif file_path:
|
|
462
|
+
operation_desc = f"unstaging file {file_path}"
|
|
463
|
+
file_paths_to_unstage = [file_path]
|
|
464
|
+
else:
|
|
465
|
+
raise ValueError("Either file_path, file_paths, or unstage_all must be provided")
|
|
466
|
+
|
|
467
|
+
logger.info("%s for project %s (client session: %s)",
|
|
468
|
+
operation_desc.capitalize(), server_project_id, source_client_session)
|
|
427
469
|
|
|
428
470
|
# Get the project state manager
|
|
429
471
|
manager = get_or_create_project_state_manager(self.context, self.control_channel)
|
|
@@ -433,19 +475,34 @@ class ProjectStateGitUnstageHandler(AsyncHandler):
|
|
|
433
475
|
if not git_manager:
|
|
434
476
|
raise ValueError("No git repository found for this project")
|
|
435
477
|
|
|
436
|
-
#
|
|
437
|
-
|
|
478
|
+
# Perform the unstaging operation
|
|
479
|
+
if unstage_all:
|
|
480
|
+
success = git_manager.unstage_all_changes()
|
|
481
|
+
elif len(file_paths_to_unstage) == 1:
|
|
482
|
+
success = git_manager.unstage_file(file_paths_to_unstage[0])
|
|
483
|
+
else:
|
|
484
|
+
success = git_manager.unstage_files(file_paths_to_unstage)
|
|
438
485
|
|
|
439
486
|
if success:
|
|
440
487
|
# Refresh entire project state to ensure consistency
|
|
441
488
|
await manager._refresh_project_state(source_client_session)
|
|
442
489
|
|
|
443
|
-
|
|
490
|
+
# Build response
|
|
491
|
+
response = {
|
|
444
492
|
"event": "project_state_git_unstage_response",
|
|
445
493
|
"project_id": server_project_id,
|
|
446
|
-
"file_path": file_path,
|
|
447
494
|
"success": success
|
|
448
495
|
}
|
|
496
|
+
|
|
497
|
+
# Include appropriate file information in response for backward compatibility
|
|
498
|
+
if file_path:
|
|
499
|
+
response["file_path"] = file_path
|
|
500
|
+
if file_paths:
|
|
501
|
+
response["file_paths"] = file_paths
|
|
502
|
+
if unstage_all:
|
|
503
|
+
response["unstage_all"] = True
|
|
504
|
+
|
|
505
|
+
return response
|
|
449
506
|
|
|
450
507
|
|
|
451
508
|
class ProjectStateGitRevertHandler(AsyncHandler):
|
|
@@ -456,20 +513,33 @@ class ProjectStateGitRevertHandler(AsyncHandler):
|
|
|
456
513
|
return "project_state_git_revert"
|
|
457
514
|
|
|
458
515
|
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
459
|
-
"""Revert
|
|
516
|
+
"""Revert file(s) in git for a project. Supports both single file and bulk operations."""
|
|
460
517
|
server_project_id = message.get("project_id")
|
|
461
|
-
file_path = message.get("file_path")
|
|
518
|
+
file_path = message.get("file_path") # Single file (backward compatibility)
|
|
519
|
+
file_paths = message.get("file_paths") # Multiple files (bulk operation)
|
|
520
|
+
revert_all = message.get("revert_all", False) # Revert all changes
|
|
462
521
|
source_client_session = message.get("source_client_session")
|
|
463
522
|
|
|
464
523
|
if not server_project_id:
|
|
465
524
|
raise ValueError("project_id is required")
|
|
466
|
-
if not file_path:
|
|
467
|
-
raise ValueError("file_path is required")
|
|
468
525
|
if not source_client_session:
|
|
469
526
|
raise ValueError("source_client_session is required")
|
|
470
527
|
|
|
471
|
-
|
|
472
|
-
|
|
528
|
+
# Determine operation mode
|
|
529
|
+
if revert_all:
|
|
530
|
+
operation_desc = "reverting all changes"
|
|
531
|
+
file_paths_to_revert = []
|
|
532
|
+
elif file_paths:
|
|
533
|
+
operation_desc = f"reverting {len(file_paths)} files"
|
|
534
|
+
file_paths_to_revert = file_paths
|
|
535
|
+
elif file_path:
|
|
536
|
+
operation_desc = f"reverting file {file_path}"
|
|
537
|
+
file_paths_to_revert = [file_path]
|
|
538
|
+
else:
|
|
539
|
+
raise ValueError("Either file_path, file_paths, or revert_all must be provided")
|
|
540
|
+
|
|
541
|
+
logger.info("%s for project %s (client session: %s)",
|
|
542
|
+
operation_desc.capitalize(), server_project_id, source_client_session)
|
|
473
543
|
|
|
474
544
|
# Get the project state manager
|
|
475
545
|
manager = get_or_create_project_state_manager(self.context, self.control_channel)
|
|
@@ -479,19 +549,34 @@ class ProjectStateGitRevertHandler(AsyncHandler):
|
|
|
479
549
|
if not git_manager:
|
|
480
550
|
raise ValueError("No git repository found for this project")
|
|
481
551
|
|
|
482
|
-
#
|
|
483
|
-
|
|
552
|
+
# Perform the revert operation
|
|
553
|
+
if revert_all:
|
|
554
|
+
success = git_manager.revert_all_changes()
|
|
555
|
+
elif len(file_paths_to_revert) == 1:
|
|
556
|
+
success = git_manager.revert_file(file_paths_to_revert[0])
|
|
557
|
+
else:
|
|
558
|
+
success = git_manager.revert_files(file_paths_to_revert)
|
|
484
559
|
|
|
485
560
|
if success:
|
|
486
561
|
# Refresh entire project state to ensure consistency
|
|
487
562
|
await manager._refresh_project_state(source_client_session)
|
|
488
563
|
|
|
489
|
-
|
|
564
|
+
# Build response
|
|
565
|
+
response = {
|
|
490
566
|
"event": "project_state_git_revert_response",
|
|
491
567
|
"project_id": server_project_id,
|
|
492
|
-
"file_path": file_path,
|
|
493
568
|
"success": success
|
|
494
569
|
}
|
|
570
|
+
|
|
571
|
+
# Include appropriate file information in response for backward compatibility
|
|
572
|
+
if file_path:
|
|
573
|
+
response["file_path"] = file_path
|
|
574
|
+
if file_paths:
|
|
575
|
+
response["file_paths"] = file_paths
|
|
576
|
+
if revert_all:
|
|
577
|
+
response["revert_all"] = True
|
|
578
|
+
|
|
579
|
+
return response
|
|
495
580
|
|
|
496
581
|
|
|
497
582
|
class ProjectStateGitCommitHandler(AsyncHandler):
|
|
@@ -579,4 +664,195 @@ async def handle_client_session_cleanup(handler, payload: Dict[str, Any], source
|
|
|
579
664
|
"event": "client_session_cleanup_response",
|
|
580
665
|
"client_session_id": client_session_id,
|
|
581
666
|
"success": True
|
|
582
|
-
}
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
class ProjectStateDiffContentHandler(AsyncHandler):
|
|
671
|
+
"""Handler for requesting specific diff content for diff tabs."""
|
|
672
|
+
|
|
673
|
+
@property
|
|
674
|
+
def command_name(self) -> str:
|
|
675
|
+
return "project_state_diff_content_request"
|
|
676
|
+
|
|
677
|
+
async def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
678
|
+
"""Request specific content for a diff tab (original, modified, or html_diff)."""
|
|
679
|
+
server_project_id = message.get("project_id")
|
|
680
|
+
file_path = message.get("file_path")
|
|
681
|
+
from_ref = message.get("from_ref")
|
|
682
|
+
to_ref = message.get("to_ref")
|
|
683
|
+
from_hash = message.get("from_hash")
|
|
684
|
+
to_hash = message.get("to_hash")
|
|
685
|
+
content_type = message.get("content_type") # 'original', 'modified', 'html_diff'
|
|
686
|
+
request_id = message.get("request_id")
|
|
687
|
+
source_client_session = message.get("source_client_session")
|
|
688
|
+
|
|
689
|
+
# Validate required fields
|
|
690
|
+
if not server_project_id:
|
|
691
|
+
raise ValueError("project_id is required")
|
|
692
|
+
if not file_path:
|
|
693
|
+
raise ValueError("file_path is required")
|
|
694
|
+
if not from_ref:
|
|
695
|
+
raise ValueError("from_ref is required")
|
|
696
|
+
if not to_ref:
|
|
697
|
+
raise ValueError("to_ref is required")
|
|
698
|
+
if not content_type:
|
|
699
|
+
raise ValueError("content_type is required")
|
|
700
|
+
if not request_id:
|
|
701
|
+
raise ValueError("request_id is required")
|
|
702
|
+
if not source_client_session:
|
|
703
|
+
raise ValueError("source_client_session is required")
|
|
704
|
+
|
|
705
|
+
# Validate content_type
|
|
706
|
+
valid_content_types = ["original", "modified", "html_diff", "all"]
|
|
707
|
+
if content_type not in valid_content_types:
|
|
708
|
+
raise ValueError(f"content_type must be one of: {valid_content_types}")
|
|
709
|
+
|
|
710
|
+
# Get the project state manager
|
|
711
|
+
manager = get_or_create_project_state_manager(self.context, self.control_channel)
|
|
712
|
+
|
|
713
|
+
# Get the project state for this client session
|
|
714
|
+
if source_client_session not in manager.projects:
|
|
715
|
+
raise ValueError(f"No project state found for client session: {source_client_session}")
|
|
716
|
+
|
|
717
|
+
project_state = manager.projects[source_client_session]
|
|
718
|
+
|
|
719
|
+
try:
|
|
720
|
+
# Find the diff tab with matching parameters
|
|
721
|
+
matching_tab = None
|
|
722
|
+
for tab in project_state.open_tabs.values():
|
|
723
|
+
if tab.tab_type == "diff" and tab.file_path == file_path:
|
|
724
|
+
# Get diff parameters from metadata
|
|
725
|
+
tab_metadata = getattr(tab, 'metadata', {}) or {}
|
|
726
|
+
tab_from_ref = tab_metadata.get('from_ref')
|
|
727
|
+
tab_to_ref = tab_metadata.get('to_ref')
|
|
728
|
+
tab_from_hash = tab_metadata.get('from_hash')
|
|
729
|
+
tab_to_hash = tab_metadata.get('to_hash')
|
|
730
|
+
|
|
731
|
+
if (tab_from_ref == from_ref and
|
|
732
|
+
tab_to_ref == to_ref and
|
|
733
|
+
tab_from_hash == from_hash and
|
|
734
|
+
tab_to_hash == to_hash):
|
|
735
|
+
matching_tab = tab
|
|
736
|
+
break
|
|
737
|
+
|
|
738
|
+
if not matching_tab:
|
|
739
|
+
# Debug information
|
|
740
|
+
logger.error(f"No diff tab found for file_path={file_path}, from_ref={from_ref}, to_ref={to_ref}")
|
|
741
|
+
logger.error(f"Available diff tabs: {[(tab.file_path, getattr(tab, 'metadata', {})) for tab in project_state.open_tabs.values() if tab.tab_type == 'diff']}")
|
|
742
|
+
raise ValueError(f"No diff tab found matching the specified parameters: file_path={file_path}, from_ref={from_ref}, to_ref={to_ref}")
|
|
743
|
+
|
|
744
|
+
# Get the requested content based on type
|
|
745
|
+
content = None
|
|
746
|
+
if content_type == "original":
|
|
747
|
+
content = matching_tab.original_content
|
|
748
|
+
elif content_type == "modified":
|
|
749
|
+
content = matching_tab.modified_content
|
|
750
|
+
elif content_type == "html_diff":
|
|
751
|
+
# For html_diff, we need to get the HTML diff versions from metadata
|
|
752
|
+
html_diff_versions = getattr(matching_tab, 'metadata', {}).get('html_diff_versions')
|
|
753
|
+
if html_diff_versions:
|
|
754
|
+
import json
|
|
755
|
+
content = json.dumps(html_diff_versions)
|
|
756
|
+
elif content_type == "all":
|
|
757
|
+
# Return all content types as a JSON object
|
|
758
|
+
html_diff_versions = getattr(matching_tab, 'metadata', {}).get('html_diff_versions')
|
|
759
|
+
import json
|
|
760
|
+
content = json.dumps({
|
|
761
|
+
"original_content": matching_tab.original_content,
|
|
762
|
+
"modified_content": matching_tab.modified_content,
|
|
763
|
+
"html_diff_versions": html_diff_versions
|
|
764
|
+
})
|
|
765
|
+
|
|
766
|
+
# If content is None or incomplete for "all", regenerate if needed
|
|
767
|
+
if content is None or (content_type == "all" and not all([matching_tab.original_content, matching_tab.modified_content])):
|
|
768
|
+
if content_type in ["original", "modified", "all"]:
|
|
769
|
+
# Re-generate the diff content if needed
|
|
770
|
+
await manager.create_diff_tab(
|
|
771
|
+
source_client_session,
|
|
772
|
+
file_path,
|
|
773
|
+
from_ref,
|
|
774
|
+
to_ref,
|
|
775
|
+
from_hash,
|
|
776
|
+
to_hash,
|
|
777
|
+
activate=False # Don't activate, just ensure content is loaded
|
|
778
|
+
)
|
|
779
|
+
|
|
780
|
+
# Try to get content again after regeneration (use same matching logic)
|
|
781
|
+
updated_tab = None
|
|
782
|
+
for tab in project_state.open_tabs.values():
|
|
783
|
+
if tab.tab_type == "diff" and tab.file_path == file_path:
|
|
784
|
+
tab_metadata = getattr(tab, 'metadata', {}) or {}
|
|
785
|
+
if (tab_metadata.get('from_ref') == from_ref and
|
|
786
|
+
tab_metadata.get('to_ref') == to_ref and
|
|
787
|
+
tab_metadata.get('from_hash') == from_hash and
|
|
788
|
+
tab_metadata.get('to_hash') == to_hash):
|
|
789
|
+
updated_tab = tab
|
|
790
|
+
break
|
|
791
|
+
|
|
792
|
+
if updated_tab:
|
|
793
|
+
if content_type == "original":
|
|
794
|
+
content = updated_tab.original_content
|
|
795
|
+
elif content_type == "modified":
|
|
796
|
+
content = updated_tab.modified_content
|
|
797
|
+
elif content_type == "html_diff":
|
|
798
|
+
html_diff_versions = getattr(updated_tab, 'metadata', {}).get('html_diff_versions')
|
|
799
|
+
if html_diff_versions:
|
|
800
|
+
import json
|
|
801
|
+
content = json.dumps(html_diff_versions)
|
|
802
|
+
elif content_type == "all":
|
|
803
|
+
html_diff_versions = getattr(updated_tab, 'metadata', {}).get('html_diff_versions')
|
|
804
|
+
import json
|
|
805
|
+
content = json.dumps({
|
|
806
|
+
"original_content": updated_tab.original_content,
|
|
807
|
+
"modified_content": updated_tab.modified_content,
|
|
808
|
+
"html_diff_versions": html_diff_versions
|
|
809
|
+
})
|
|
810
|
+
|
|
811
|
+
success = content is not None
|
|
812
|
+
base_response = {
|
|
813
|
+
"event": "project_state_diff_content_response",
|
|
814
|
+
"project_id": server_project_id,
|
|
815
|
+
"file_path": file_path,
|
|
816
|
+
"from_ref": from_ref,
|
|
817
|
+
"to_ref": to_ref,
|
|
818
|
+
"content_type": content_type,
|
|
819
|
+
"request_id": request_id,
|
|
820
|
+
"success": success
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
if from_hash:
|
|
824
|
+
base_response["from_hash"] = from_hash
|
|
825
|
+
if to_hash:
|
|
826
|
+
base_response["to_hash"] = to_hash
|
|
827
|
+
|
|
828
|
+
if success:
|
|
829
|
+
# Create chunked responses for large content
|
|
830
|
+
responses = create_chunked_response(base_response, "content", content)
|
|
831
|
+
|
|
832
|
+
# Send all responses
|
|
833
|
+
for response in responses:
|
|
834
|
+
await self.send_response(response, project_id=server_project_id)
|
|
835
|
+
|
|
836
|
+
logger.info(f"Sent diff content response in {len(responses)} chunk(s) for {content_type} content")
|
|
837
|
+
else:
|
|
838
|
+
base_response["error"] = f"Failed to load {content_type} content for diff"
|
|
839
|
+
base_response["chunked"] = False
|
|
840
|
+
await self.send_response(base_response, project_id=server_project_id)
|
|
841
|
+
|
|
842
|
+
return # AsyncHandler doesn't return responses, it sends them
|
|
843
|
+
|
|
844
|
+
except Exception as e:
|
|
845
|
+
logger.error("Error processing diff content request: %s", e)
|
|
846
|
+
error_response = {
|
|
847
|
+
"event": "project_state_diff_content_response",
|
|
848
|
+
"project_id": server_project_id,
|
|
849
|
+
"file_path": file_path,
|
|
850
|
+
"from_ref": from_ref,
|
|
851
|
+
"to_ref": to_ref,
|
|
852
|
+
"content_type": content_type,
|
|
853
|
+
"request_id": request_id,
|
|
854
|
+
"success": False,
|
|
855
|
+
"error": str(e),
|
|
856
|
+
"chunked": False
|
|
857
|
+
}
|
|
858
|
+
await self.send_response(error_response, project_id=server_project_id)
|
|
@@ -38,6 +38,9 @@ class ProjectStateManager:
|
|
|
38
38
|
self.debug_mode = False
|
|
39
39
|
self.debug_file_path: Optional[str] = None
|
|
40
40
|
|
|
41
|
+
# Content caching optimization
|
|
42
|
+
self.use_content_caching = context.get("use_content_caching", False)
|
|
43
|
+
|
|
41
44
|
# Debouncing for file changes
|
|
42
45
|
self._change_debounce_timer: Optional[asyncio.Task] = None
|
|
43
46
|
self._pending_changes: Set[str] = set()
|
|
@@ -102,7 +105,28 @@ class ProjectStateManager:
|
|
|
102
105
|
|
|
103
106
|
def _serialize_tab_info(self, tab: TabInfo) -> Dict[str, Any]:
|
|
104
107
|
"""Serialize TabInfo for JSON output."""
|
|
105
|
-
|
|
108
|
+
if not hasattr(tab, '__dataclass_fields__'):
|
|
109
|
+
return {}
|
|
110
|
+
|
|
111
|
+
tab_dict = asdict(tab)
|
|
112
|
+
|
|
113
|
+
# If content caching is enabled, exclude content fields to reduce payload size
|
|
114
|
+
if self.use_content_caching:
|
|
115
|
+
# Only include hashes, not the actual content
|
|
116
|
+
tab_dict.pop('content', None)
|
|
117
|
+
tab_dict.pop('original_content', None)
|
|
118
|
+
tab_dict.pop('modified_content', None)
|
|
119
|
+
# Keep the hashes for client-side cache lookup
|
|
120
|
+
# content_hash, original_content_hash, modified_content_hash remain
|
|
121
|
+
|
|
122
|
+
# Also exclude large metadata for diff tabs
|
|
123
|
+
if tab_dict.get('metadata'):
|
|
124
|
+
metadata = tab_dict['metadata']
|
|
125
|
+
# Remove massive HTML diff content that can be megabytes
|
|
126
|
+
metadata.pop('html_diff_versions', None)
|
|
127
|
+
metadata.pop('diff_details', None)
|
|
128
|
+
|
|
129
|
+
return tab_dict
|
|
106
130
|
|
|
107
131
|
async def initialize_project_state(self, client_session_id: str, project_folder_path: str) -> ProjectState:
|
|
108
132
|
"""Initialize project state for a client session."""
|
|
@@ -1005,6 +1029,25 @@ class ProjectStateManager:
|
|
|
1005
1029
|
|
|
1006
1030
|
logger.info("Cleaned up %d project states", len(client_session_ids))
|
|
1007
1031
|
|
|
1032
|
+
async def refresh_project_state_for_file_change(self, file_path: str):
|
|
1033
|
+
"""Public method to trigger project state refresh for a specific file change."""
|
|
1034
|
+
logger.info(f"Manual refresh triggered for file change: {file_path}")
|
|
1035
|
+
|
|
1036
|
+
# Find project states that include this file path
|
|
1037
|
+
for client_session_id, project_state in self.projects.items():
|
|
1038
|
+
project_folder = Path(project_state.project_folder_path)
|
|
1039
|
+
|
|
1040
|
+
# Check if the file is within this project
|
|
1041
|
+
try:
|
|
1042
|
+
Path(file_path).relative_to(project_folder)
|
|
1043
|
+
# File is within this project, trigger refresh
|
|
1044
|
+
logger.info(f"Refreshing project state for session {client_session_id} after file change: {file_path}")
|
|
1045
|
+
await self._refresh_project_state(client_session_id)
|
|
1046
|
+
break
|
|
1047
|
+
except ValueError:
|
|
1048
|
+
# File is not within this project
|
|
1049
|
+
continue
|
|
1050
|
+
|
|
1008
1051
|
def cleanup_orphaned_project_states(self, current_client_sessions: List[str]):
|
|
1009
1052
|
"""Clean up project states that don't match any current client session."""
|
|
1010
1053
|
current_sessions_set = set(current_client_sessions)
|
|
@@ -19,6 +19,13 @@ class TabInfo:
|
|
|
19
19
|
content: Optional[str] = None # Text content or base64 for media
|
|
20
20
|
original_content: Optional[str] = None # For diff view
|
|
21
21
|
modified_content: Optional[str] = None # For diff view
|
|
22
|
+
|
|
23
|
+
# Content hash fields for caching optimization
|
|
24
|
+
content_hash: Optional[str] = None # SHA-256 hash of content
|
|
25
|
+
original_content_hash: Optional[str] = None # SHA-256 hash of original_content for diffs
|
|
26
|
+
modified_content_hash: Optional[str] = None # SHA-256 hash of modified_content for diffs
|
|
27
|
+
html_diff_hash: Optional[str] = None # SHA-256 hash of html_diff_versions JSON
|
|
28
|
+
|
|
22
29
|
is_dirty: bool = False # Has unsaved changes
|
|
23
30
|
mime_type: Optional[str] = None # For media files
|
|
24
31
|
encoding: Optional[str] = None # Content encoding (base64, utf-8, etc.)
|
|
@@ -4,6 +4,7 @@ This module contains shared utility functions used across the project state
|
|
|
4
4
|
management system, including tab key generation and other helper functions.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
+
import hashlib
|
|
7
8
|
import uuid
|
|
8
9
|
|
|
9
10
|
|
|
@@ -31,4 +32,19 @@ def generate_tab_key(tab_type: str, file_path: str, **kwargs) -> str:
|
|
|
31
32
|
return kwargs.get('tab_id', str(uuid.uuid4()))
|
|
32
33
|
else:
|
|
33
34
|
# For other tab types, use file_path if available, otherwise tab_id
|
|
34
|
-
return file_path if file_path else kwargs.get('tab_id', str(uuid.uuid4()))
|
|
35
|
+
return file_path if file_path else kwargs.get('tab_id', str(uuid.uuid4()))
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def generate_content_hash(content: str) -> str:
|
|
39
|
+
"""Generate SHA-256 hash of content for caching.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
content: The string content to hash
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
SHA-256 hash prefixed with 'sha256:'
|
|
46
|
+
"""
|
|
47
|
+
if content is None:
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
return "sha256:" + hashlib.sha256(content.encode('utf-8')).hexdigest()
|
|
@@ -23,6 +23,7 @@ from .project_state.handlers import (
|
|
|
23
23
|
ProjectStateTabCloseHandler,
|
|
24
24
|
ProjectStateSetActiveTabHandler,
|
|
25
25
|
ProjectStateDiffOpenHandler,
|
|
26
|
+
ProjectStateDiffContentHandler,
|
|
26
27
|
ProjectStateGitStageHandler,
|
|
27
28
|
ProjectStateGitUnstageHandler,
|
|
28
29
|
ProjectStateGitRevertHandler,
|