aline-ai 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.2.0.dist-info → aline_ai-0.2.2.dist-info}/METADATA +1 -1
- {aline_ai-0.2.0.dist-info → aline_ai-0.2.2.dist-info}/RECORD +10 -10
- realign/__init__.py +1 -1
- realign/commands/init.py +8 -2
- realign/hooks.py +97 -9
- realign/mcp_watcher.py +43 -3
- {aline_ai-0.2.0.dist-info → aline_ai-0.2.2.dist-info}/WHEEL +0 -0
- {aline_ai-0.2.0.dist-info → aline_ai-0.2.2.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.2.0.dist-info → aline_ai-0.2.2.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.2.0.dist-info → aline_ai-0.2.2.dist-info}/top_level.txt +0 -0
|
@@ -1,25 +1,25 @@
|
|
|
1
|
-
aline_ai-0.2.
|
|
2
|
-
realign/__init__.py,sha256=
|
|
1
|
+
aline_ai-0.2.2.dist-info/licenses/LICENSE,sha256=H8wTqV5IF1oHw_HbBtS1PSDU8G_q81yblEIL_JfV8Vo,1077
|
|
2
|
+
realign/__init__.py,sha256=f1FalM6Y9iEIqHcRYlPxgxYsNHpJAHkmJtFz4WPRUeU,68
|
|
3
3
|
realign/claude_detector.py,sha256=NLxI0zJWcqNxNha9jAy9AslTMwHKakCc9yPGdkrbiFE,3028
|
|
4
4
|
realign/cli.py,sha256=bkwS329jMDEkrUEihXRN2DDyeTKE6HbAysoDxxskZ8g,941
|
|
5
5
|
realign/codex_detector.py,sha256=RI3JbZgebrhoqpRfTBMfclYCAISN7hZAHVW3bgftJpU,4428
|
|
6
6
|
realign/config.py,sha256=jarinbr0mA6e5DmgY19b_VpMnxk6SOYTwyvB9luq0ww,7207
|
|
7
7
|
realign/file_lock.py,sha256=-9c3tMdMj_ZxmasK5y6hV9Gfo6KDsSO3Q7PXiTBhsu4,3369
|
|
8
|
-
realign/hooks.py,sha256=
|
|
8
|
+
realign/hooks.py,sha256=80x7kF7t_D2D0TWM4whCU2Id3ItgkPXeREKkz1jfL_c,48751
|
|
9
9
|
realign/logging_config.py,sha256=KvkKktF-bkUu031y9vgUoHpsbnOw7ud25jhpzliNZwA,4929
|
|
10
10
|
realign/mcp_server.py,sha256=dntFatMpozI80K5hHrIiQ9sviC6ARKTP89goULhi1T4,16477
|
|
11
|
-
realign/mcp_watcher.py,sha256=
|
|
11
|
+
realign/mcp_watcher.py,sha256=ffYOXDLuf9T6Kab3CdGNAOY3DBlAbjZrVrSjM5RdYGU,26828
|
|
12
12
|
realign/redactor.py,sha256=uZvLKKGrRGJm-qM8S4XJyJK6i0CSSby_wbKiay7VGJw,8148
|
|
13
13
|
realign/commands/__init__.py,sha256=GG6IMw6fUBQAXGJDFJvOOQgv6pkiRSfMh8z3AYXTyRM,31
|
|
14
14
|
realign/commands/auto_commit.py,sha256=jgjAYZHqN34NmQkncZg3Vtwsl3MyAlsvucxEBwUj7ko,7450
|
|
15
15
|
realign/commands/commit.py,sha256=mlwrv5nfTRY17WlcAdiJKKGh5uM7dGvT7sMxhdbsfkw,12605
|
|
16
16
|
realign/commands/config.py,sha256=iiu7usqw00djKZja5bx0iDH8DB0vU2maUPMkXLdgXwI,6609
|
|
17
|
-
realign/commands/init.py,sha256=
|
|
17
|
+
realign/commands/init.py,sha256=52WkcgdTdsvYXYOuTQ0zR1QF6QOWsjWOuu0f8vLhbL4,13452
|
|
18
18
|
realign/commands/search.py,sha256=xTWuX0lpjQPX8cen0ewl-BNF0FeWgjMwN06bdeesED8,18770
|
|
19
19
|
realign/commands/session_utils.py,sha256=L1DwZIGCOBirp6tkAswACJEeDa6i9aAAfsialAs4rRY,864
|
|
20
20
|
realign/commands/show.py,sha256=A9LvhOBcY6_HoI76irPB2rBOSgdftBuX2uZiO8IwNoU,16338
|
|
21
|
-
aline_ai-0.2.
|
|
22
|
-
aline_ai-0.2.
|
|
23
|
-
aline_ai-0.2.
|
|
24
|
-
aline_ai-0.2.
|
|
25
|
-
aline_ai-0.2.
|
|
21
|
+
aline_ai-0.2.2.dist-info/METADATA,sha256=Eck_4ZFdzNnAcomzVJ5aoK4XgwPKpBmh8NiAzJfN2RE,1398
|
|
22
|
+
aline_ai-0.2.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
23
|
+
aline_ai-0.2.2.dist-info/entry_points.txt,sha256=h-NocHDzSueXfsepHTIdRPNQzhNZQPAztJfldd-mQTE,202
|
|
24
|
+
aline_ai-0.2.2.dist-info/top_level.txt,sha256=yIL3s2xv9nf1GwD5n71Aq_JEIV4AfzCIDNKBzewuRm4,8
|
|
25
|
+
aline_ai-0.2.2.dist-info/RECORD,,
|
realign/__init__.py
CHANGED
realign/commands/init.py
CHANGED
|
@@ -119,10 +119,16 @@ def init_repository(
|
|
|
119
119
|
hook_path.chmod(0o755)
|
|
120
120
|
result["hooks_created"].append(f"prepare-commit-msg ({action})")
|
|
121
121
|
|
|
122
|
-
# Create .gitignore for sessions
|
|
122
|
+
# Create .gitignore for sessions and metadata
|
|
123
123
|
gitignore_path = realign_dir / ".gitignore"
|
|
124
124
|
if not gitignore_path.exists():
|
|
125
|
-
|
|
125
|
+
gitignore_content = (
|
|
126
|
+
"# Uncomment to ignore session files\n"
|
|
127
|
+
"# sessions/\n\n"
|
|
128
|
+
"# Ignore metadata files (used internally to prevent duplicate processing)\n"
|
|
129
|
+
".metadata/\n"
|
|
130
|
+
)
|
|
131
|
+
gitignore_path.write_text(gitignore_content, encoding="utf-8")
|
|
126
132
|
|
|
127
133
|
# Backup and set core.hooksPath
|
|
128
134
|
backup_file = realign_dir / "backup_hook_config.yaml"
|
realign/hooks.py
CHANGED
|
@@ -311,6 +311,10 @@ def find_latest_session(history_path: Path, explicit_path: Optional[str] = None)
|
|
|
311
311
|
"""
|
|
312
312
|
Find the most recent session file.
|
|
313
313
|
|
|
314
|
+
Filters out Claude Code agent sessions (agent-*.jsonl) since they are
|
|
315
|
+
sub-tasks of main sessions and their results are already incorporated
|
|
316
|
+
into the main session files.
|
|
317
|
+
|
|
314
318
|
Args:
|
|
315
319
|
history_path: Path to history directory or a specific session file (for Codex)
|
|
316
320
|
explicit_path: Explicit path to a session file (overrides history_path)
|
|
@@ -337,7 +341,13 @@ def find_latest_session(history_path: Path, explicit_path: Optional[str] = None)
|
|
|
337
341
|
# Otherwise, search directory for session files
|
|
338
342
|
session_files = []
|
|
339
343
|
for pattern in ["*.json", "*.jsonl"]:
|
|
340
|
-
|
|
344
|
+
for file in history_path.glob(pattern):
|
|
345
|
+
# Filter out Claude Code agent sessions (agent-*.jsonl)
|
|
346
|
+
# These are sub-tasks whose results are already in main sessions
|
|
347
|
+
if file.name.startswith("agent-"):
|
|
348
|
+
logger.debug(f"Skipping agent session: {file.name}")
|
|
349
|
+
continue
|
|
350
|
+
session_files.append(file)
|
|
341
351
|
|
|
342
352
|
if not session_files:
|
|
343
353
|
return None
|
|
@@ -689,12 +699,12 @@ def copy_session_to_repo(
|
|
|
689
699
|
repo_root: Path,
|
|
690
700
|
user: str,
|
|
691
701
|
config: Optional[ReAlignConfig] = None
|
|
692
|
-
) -> Tuple[Path, str, bool]:
|
|
702
|
+
) -> Tuple[Path, str, bool, int]:
|
|
693
703
|
"""
|
|
694
704
|
Copy session file to repository .realign/sessions/ directory.
|
|
695
705
|
Optionally redacts sensitive information if configured.
|
|
696
706
|
If the source filename is in UUID format, renames it to include username for better identification.
|
|
697
|
-
Returns (absolute_path, relative_path, was_redacted).
|
|
707
|
+
Returns (absolute_path, relative_path, was_redacted, content_size).
|
|
698
708
|
"""
|
|
699
709
|
logger.info(f"Copying session to repo: {session_file.name}")
|
|
700
710
|
logger.debug(f"Source: {session_file}, Repo root: {repo_root}, User: {user}")
|
|
@@ -746,7 +756,12 @@ def copy_session_to_repo(
|
|
|
746
756
|
temp_path.rename(dest_path)
|
|
747
757
|
rel_path = dest_path.relative_to(repo_root)
|
|
748
758
|
logger.warning(f"Copied session with fallback (no agent detection): {rel_path}")
|
|
749
|
-
|
|
759
|
+
# Get file size for the fallback case
|
|
760
|
+
try:
|
|
761
|
+
fallback_size = dest_path.stat().st_size
|
|
762
|
+
except Exception:
|
|
763
|
+
fallback_size = 0
|
|
764
|
+
return dest_path, str(rel_path), False, fallback_size
|
|
750
765
|
|
|
751
766
|
# Detect agent type from session content
|
|
752
767
|
agent_type = "unknown"
|
|
@@ -843,9 +858,68 @@ def copy_session_to_repo(
|
|
|
843
858
|
shutil.copy2(session_file, dest_path)
|
|
844
859
|
logger.warning("Fallback to simple copy")
|
|
845
860
|
|
|
846
|
-
# Return both absolute and relative paths, plus redaction status
|
|
861
|
+
# Return both absolute and relative paths, plus redaction status and content size
|
|
847
862
|
rel_path = dest_path.relative_to(repo_root)
|
|
848
|
-
|
|
863
|
+
content_size = len(content)
|
|
864
|
+
return dest_path, str(rel_path), was_redacted, content_size
|
|
865
|
+
|
|
866
|
+
|
|
867
|
+
def save_session_metadata(repo_root: Path, session_relpath: str, content_size: int):
|
|
868
|
+
"""
|
|
869
|
+
Save metadata about a processed session to avoid reprocessing.
|
|
870
|
+
|
|
871
|
+
Args:
|
|
872
|
+
repo_root: Path to repository root
|
|
873
|
+
session_relpath: Relative path to session file
|
|
874
|
+
content_size: Size of session content when processed
|
|
875
|
+
"""
|
|
876
|
+
metadata_dir = repo_root / ".realign" / ".metadata"
|
|
877
|
+
metadata_dir.mkdir(parents=True, exist_ok=True)
|
|
878
|
+
|
|
879
|
+
# Use session filename as metadata key
|
|
880
|
+
session_name = Path(session_relpath).name
|
|
881
|
+
metadata_file = metadata_dir / f"{session_name}.meta"
|
|
882
|
+
|
|
883
|
+
metadata = {
|
|
884
|
+
"processed_at": time.time(),
|
|
885
|
+
"content_size": content_size,
|
|
886
|
+
"session_relpath": session_relpath,
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
try:
|
|
890
|
+
with open(metadata_file, 'w', encoding='utf-8') as f:
|
|
891
|
+
json.dump(metadata, f)
|
|
892
|
+
logger.debug(f"Saved metadata for {session_relpath}: {content_size} bytes")
|
|
893
|
+
except Exception as e:
|
|
894
|
+
logger.warning(f"Failed to save metadata for {session_relpath}: {e}")
|
|
895
|
+
|
|
896
|
+
|
|
897
|
+
def get_session_metadata(repo_root: Path, session_relpath: str) -> Optional[Dict[str, Any]]:
|
|
898
|
+
"""
|
|
899
|
+
Get metadata about a previously processed session.
|
|
900
|
+
|
|
901
|
+
Args:
|
|
902
|
+
repo_root: Path to repository root
|
|
903
|
+
session_relpath: Relative path to session file
|
|
904
|
+
|
|
905
|
+
Returns:
|
|
906
|
+
Metadata dictionary or None if not found
|
|
907
|
+
"""
|
|
908
|
+
metadata_dir = repo_root / ".realign" / ".metadata"
|
|
909
|
+
session_name = Path(session_relpath).name
|
|
910
|
+
metadata_file = metadata_dir / f"{session_name}.meta"
|
|
911
|
+
|
|
912
|
+
if not metadata_file.exists():
|
|
913
|
+
return None
|
|
914
|
+
|
|
915
|
+
try:
|
|
916
|
+
with open(metadata_file, 'r', encoding='utf-8') as f:
|
|
917
|
+
metadata = json.load(f)
|
|
918
|
+
logger.debug(f"Loaded metadata for {session_relpath}: {metadata.get('content_size')} bytes")
|
|
919
|
+
return metadata
|
|
920
|
+
except Exception as e:
|
|
921
|
+
logger.warning(f"Failed to load metadata for {session_relpath}: {e}")
|
|
922
|
+
return None
|
|
849
923
|
|
|
850
924
|
|
|
851
925
|
def process_sessions(
|
|
@@ -915,13 +989,15 @@ def process_sessions(
|
|
|
915
989
|
|
|
916
990
|
# Copy all sessions to repo (with optional redaction)
|
|
917
991
|
session_relpaths = []
|
|
992
|
+
session_metadata_map = {} # Map session_relpath -> content_size
|
|
918
993
|
any_redacted = False
|
|
919
994
|
for session_file in session_files:
|
|
920
995
|
try:
|
|
921
|
-
_, session_relpath, was_redacted = copy_session_to_repo(
|
|
996
|
+
_, session_relpath, was_redacted, content_size = copy_session_to_repo(
|
|
922
997
|
session_file, repo_root, user, config
|
|
923
998
|
)
|
|
924
999
|
session_relpaths.append(session_relpath)
|
|
1000
|
+
session_metadata_map[session_relpath] = content_size
|
|
925
1001
|
if was_redacted:
|
|
926
1002
|
any_redacted = True
|
|
927
1003
|
except Exception as e:
|
|
@@ -941,8 +1017,13 @@ def process_sessions(
|
|
|
941
1017
|
|
|
942
1018
|
logger.info(f"Copied {len(session_relpaths)} session(s): {session_relpaths}")
|
|
943
1019
|
|
|
944
|
-
# If pre-commit mode,
|
|
1020
|
+
# If pre-commit mode, save metadata and return session paths (summary will be generated later)
|
|
945
1021
|
if pre_commit_mode:
|
|
1022
|
+
# Save metadata for each session to prevent reprocessing
|
|
1023
|
+
for session_relpath, content_size in session_metadata_map.items():
|
|
1024
|
+
save_session_metadata(repo_root, session_relpath, content_size)
|
|
1025
|
+
logger.debug(f"Saved metadata for {session_relpath} in pre-commit")
|
|
1026
|
+
|
|
946
1027
|
elapsed = time.time() - start_time
|
|
947
1028
|
logger.info(f"======== Hook completed: {hook_type} in {elapsed:.2f}s ========")
|
|
948
1029
|
return {
|
|
@@ -974,7 +1055,10 @@ def process_sessions(
|
|
|
974
1055
|
summary_model_label: Optional[str] = None
|
|
975
1056
|
|
|
976
1057
|
for session_relpath in session_relpaths:
|
|
977
|
-
# Extract NEW content using git diff
|
|
1058
|
+
# Extract NEW content using git diff (compares staged content with HEAD)
|
|
1059
|
+
# This correctly detects new content even if the file hasn't grown since pre-commit
|
|
1060
|
+
# (which happens in auto-commit scenarios where the AI has finished responding)
|
|
1061
|
+
current_size = session_metadata_map.get(session_relpath, 0)
|
|
978
1062
|
new_content = get_new_content_from_git_diff(repo_root, session_relpath)
|
|
979
1063
|
|
|
980
1064
|
if not new_content or not new_content.strip():
|
|
@@ -1020,6 +1104,10 @@ def process_sessions(
|
|
|
1020
1104
|
})
|
|
1021
1105
|
legacy_summary_chunks.append(f"[{agent_name}] {summary_text}")
|
|
1022
1106
|
|
|
1107
|
+
# Update metadata after successfully generating summary
|
|
1108
|
+
save_session_metadata(repo_root, session_relpath, current_size)
|
|
1109
|
+
logger.debug(f"Updated metadata for {session_relpath} in prepare-commit-msg")
|
|
1110
|
+
|
|
1023
1111
|
# Combine all summaries
|
|
1024
1112
|
if summary_entries:
|
|
1025
1113
|
if summary_model_label is None:
|
realign/mcp_watcher.py
CHANGED
|
@@ -14,6 +14,10 @@ from datetime import datetime
|
|
|
14
14
|
|
|
15
15
|
from .config import ReAlignConfig
|
|
16
16
|
from .hooks import find_all_active_sessions
|
|
17
|
+
from .logging_config import setup_logger
|
|
18
|
+
|
|
19
|
+
# Initialize logger for watcher
|
|
20
|
+
logger = setup_logger('realign.mcp_watcher', 'mcp_watcher.log')
|
|
17
21
|
|
|
18
22
|
|
|
19
23
|
# Session type detection
|
|
@@ -38,17 +42,24 @@ class DialogueWatcher:
|
|
|
38
42
|
async def start(self):
|
|
39
43
|
"""Start watching session files."""
|
|
40
44
|
if not self.config.mcp_auto_commit:
|
|
45
|
+
logger.info("Auto-commit disabled in config")
|
|
41
46
|
print("[MCP Watcher] Auto-commit disabled in config", file=sys.stderr)
|
|
42
47
|
return
|
|
43
48
|
|
|
44
49
|
self.running = True
|
|
50
|
+
logger.info("Started watching for dialogue completion")
|
|
51
|
+
logger.info(f"Mode: Per-request (triggers at end of each AI response)")
|
|
52
|
+
logger.info(f"Supports: Claude Code & Codex (auto-detected)")
|
|
53
|
+
logger.info(f"Debounce: {self.debounce_delay}s, Cooldown: {self.min_commit_interval}s")
|
|
45
54
|
print("[MCP Watcher] Started watching for dialogue completion", file=sys.stderr)
|
|
46
55
|
print(f"[MCP Watcher] Mode: Per-request (triggers at end of each AI response)", file=sys.stderr)
|
|
47
56
|
print(f"[MCP Watcher] Supports: Claude Code & Codex (auto-detected)", file=sys.stderr)
|
|
48
57
|
print(f"[MCP Watcher] Debounce: {self.debounce_delay}s, Cooldown: {self.min_commit_interval}s", file=sys.stderr)
|
|
49
58
|
if self.project_path:
|
|
59
|
+
logger.info(f"Monitoring project: {self.project_path}")
|
|
50
60
|
print(f"[MCP Watcher] Monitoring project: {self.project_path}", file=sys.stderr)
|
|
51
61
|
else:
|
|
62
|
+
logger.info("Project path unknown, falling back to multi-project scan")
|
|
52
63
|
print("[MCP Watcher] Project path unknown, falling back to multi-project scan", file=sys.stderr)
|
|
53
64
|
|
|
54
65
|
# Initialize baseline sizes and stop_reason counts
|
|
@@ -61,6 +72,7 @@ class DialogueWatcher:
|
|
|
61
72
|
await self.check_for_changes()
|
|
62
73
|
await asyncio.sleep(0.5) # Check every 0.5 seconds for responsiveness
|
|
63
74
|
except Exception as e:
|
|
75
|
+
logger.error(f"Error in check loop: {e}", exc_info=True)
|
|
64
76
|
print(f"[MCP Watcher] Error: {e}", file=sys.stderr)
|
|
65
77
|
await asyncio.sleep(1.0)
|
|
66
78
|
|
|
@@ -69,6 +81,7 @@ class DialogueWatcher:
|
|
|
69
81
|
self.running = False
|
|
70
82
|
if self.pending_commit_task:
|
|
71
83
|
self.pending_commit_task.cancel()
|
|
84
|
+
logger.info("Watcher stopped")
|
|
72
85
|
print("[MCP Watcher] Stopped", file=sys.stderr)
|
|
73
86
|
|
|
74
87
|
def _get_session_sizes(self) -> Dict[str, int]:
|
|
@@ -82,7 +95,9 @@ class DialogueWatcher:
|
|
|
82
95
|
for session_file in session_files:
|
|
83
96
|
if session_file.exists():
|
|
84
97
|
sizes[str(session_file)] = session_file.stat().st_size
|
|
98
|
+
logger.debug(f"Tracked {len(sizes)} session file(s)")
|
|
85
99
|
except Exception as e:
|
|
100
|
+
logger.error(f"Error getting session sizes: {e}", exc_info=True)
|
|
86
101
|
print(f"[MCP Watcher] Error getting session sizes: {e}", file=sys.stderr)
|
|
87
102
|
return sizes
|
|
88
103
|
|
|
@@ -239,8 +254,11 @@ class DialogueWatcher:
|
|
|
239
254
|
"""
|
|
240
255
|
Count complete dialogue turns for Claude Code sessions.
|
|
241
256
|
|
|
242
|
-
Uses stop_reason
|
|
257
|
+
Uses stop_reason in ('end_turn', 'tool_use') as the marker, with message ID deduplication
|
|
243
258
|
to handle Claude Code's incremental writes (thinking first, then full content).
|
|
259
|
+
|
|
260
|
+
Note: In Claude Code, most responses have stop_reason='tool_use' because the AI
|
|
261
|
+
frequently uses tools. We count both 'end_turn' and 'tool_use' as complete turns.
|
|
244
262
|
"""
|
|
245
263
|
unique_message_ids = set()
|
|
246
264
|
try:
|
|
@@ -252,8 +270,11 @@ class DialogueWatcher:
|
|
|
252
270
|
try:
|
|
253
271
|
data = json.loads(line)
|
|
254
272
|
message = data.get("message", {})
|
|
255
|
-
|
|
256
|
-
|
|
273
|
+
stop_reason = message.get("stop_reason")
|
|
274
|
+
# Count both end_turn and tool_use as complete turns
|
|
275
|
+
# tool_use indicates the AI used tools (most common in Claude Code)
|
|
276
|
+
# end_turn indicates a simple response without tools
|
|
277
|
+
if stop_reason in ("end_turn", "tool_use"):
|
|
257
278
|
# Deduplicate by message ID to handle incremental writes
|
|
258
279
|
msg_id = message.get("id")
|
|
259
280
|
if msg_id:
|
|
@@ -264,6 +285,7 @@ class DialogueWatcher:
|
|
|
264
285
|
except json.JSONDecodeError:
|
|
265
286
|
continue
|
|
266
287
|
except Exception as e:
|
|
288
|
+
logger.error(f"Error counting Claude turns in {session_file}: {e}", exc_info=True)
|
|
267
289
|
print(f"[MCP Watcher] Error counting Claude turns in {session_file}: {e}", file=sys.stderr)
|
|
268
290
|
return len(unique_message_ids)
|
|
269
291
|
|
|
@@ -308,10 +330,13 @@ class DialogueWatcher:
|
|
|
308
330
|
old_size = self.last_session_sizes.get(path, 0)
|
|
309
331
|
if size > old_size:
|
|
310
332
|
changed_files.append(Path(path))
|
|
333
|
+
logger.debug(f"Session file changed: {Path(path).name} ({old_size} -> {size} bytes)")
|
|
311
334
|
|
|
312
335
|
if changed_files:
|
|
336
|
+
logger.info(f"Detected changes in {len(changed_files)} session file(s), scheduling commit check")
|
|
313
337
|
# File changed - cancel any pending commit and schedule a new one
|
|
314
338
|
if self.pending_commit_task:
|
|
339
|
+
logger.debug("Cancelling pending commit task")
|
|
315
340
|
self.pending_commit_task.cancel()
|
|
316
341
|
|
|
317
342
|
# Wait for debounce period to ensure the turn is complete
|
|
@@ -323,6 +348,7 @@ class DialogueWatcher:
|
|
|
323
348
|
self.last_session_sizes = current_sizes
|
|
324
349
|
|
|
325
350
|
except Exception as e:
|
|
351
|
+
logger.error(f"Error checking for changes: {e}", exc_info=True)
|
|
326
352
|
print(f"[MCP Watcher] Error checking for changes: {e}", file=sys.stderr)
|
|
327
353
|
|
|
328
354
|
async def _debounced_commit(self, changed_files: list):
|
|
@@ -337,6 +363,7 @@ class DialogueWatcher:
|
|
|
337
363
|
for session_file in changed_files:
|
|
338
364
|
if await self._check_if_turn_complete(session_file):
|
|
339
365
|
session_to_commit = session_file
|
|
366
|
+
logger.info(f"Complete turn detected in {session_file.name}")
|
|
340
367
|
print(f"[MCP Watcher] Complete turn detected in {session_file.name}", file=sys.stderr)
|
|
341
368
|
break
|
|
342
369
|
|
|
@@ -344,6 +371,7 @@ class DialogueWatcher:
|
|
|
344
371
|
# Extract project path from the session file
|
|
345
372
|
project_path = self._extract_project_path(session_to_commit)
|
|
346
373
|
if not project_path:
|
|
374
|
+
logger.warning(f"Could not determine project path for {session_to_commit.name}, skipping commit")
|
|
347
375
|
print(f"[MCP Watcher] Could not determine project path for {session_to_commit.name}, skipping commit", file=sys.stderr)
|
|
348
376
|
return
|
|
349
377
|
|
|
@@ -354,10 +382,12 @@ class DialogueWatcher:
|
|
|
354
382
|
if last_commit_time:
|
|
355
383
|
time_since_last = current_time - last_commit_time
|
|
356
384
|
if time_since_last < self.min_commit_interval:
|
|
385
|
+
logger.info(f"Skipping commit for {project_path.name} (cooldown: {time_since_last:.1f}s < {self.min_commit_interval}s)")
|
|
357
386
|
print(f"[MCP Watcher] Skipping commit for {project_path.name} (cooldown: {time_since_last:.1f}s < {self.min_commit_interval}s)", file=sys.stderr)
|
|
358
387
|
return
|
|
359
388
|
|
|
360
389
|
# Perform commit for this project
|
|
390
|
+
logger.info(f"Triggering commit for {project_path.name}")
|
|
361
391
|
await self._do_commit(project_path, session_to_commit)
|
|
362
392
|
|
|
363
393
|
except asyncio.CancelledError:
|
|
@@ -390,6 +420,7 @@ class DialogueWatcher:
|
|
|
390
420
|
|
|
391
421
|
# Commit after each complete assistant response (1 new turn)
|
|
392
422
|
if new_turns >= 1:
|
|
423
|
+
logger.info(f"Detected {new_turns} new turn(s) in {session_file.name} ({session_type})")
|
|
393
424
|
print(f"[MCP Watcher] Detected {new_turns} new turn(s) in {session_file.name} ({session_type})", file=sys.stderr)
|
|
394
425
|
# Update baseline immediately to avoid double-counting
|
|
395
426
|
self.last_stop_reason_counts[session_path] = current_count
|
|
@@ -398,6 +429,7 @@ class DialogueWatcher:
|
|
|
398
429
|
return False
|
|
399
430
|
|
|
400
431
|
except Exception as e:
|
|
432
|
+
logger.error(f"Error checking turn completion: {e}", exc_info=True)
|
|
401
433
|
print(f"[MCP Watcher] Error checking turn completion: {e}", file=sys.stderr)
|
|
402
434
|
return False
|
|
403
435
|
|
|
@@ -423,12 +455,16 @@ class DialogueWatcher:
|
|
|
423
455
|
)
|
|
424
456
|
|
|
425
457
|
if result:
|
|
458
|
+
logger.info(f"✓ Committed to {project_path.name}: {message}")
|
|
426
459
|
print(f"[MCP Watcher] ✓ Committed to {project_path.name}: {message}", file=sys.stderr)
|
|
427
460
|
# Update last commit time for this project
|
|
428
461
|
self.last_commit_times[str(project_path)] = time.time()
|
|
429
462
|
# Baseline counts already updated in _check_if_turn_complete()
|
|
463
|
+
else:
|
|
464
|
+
logger.warning(f"Commit failed for {project_path.name}")
|
|
430
465
|
|
|
431
466
|
except Exception as e:
|
|
467
|
+
logger.error(f"Error during commit for {project_path}: {e}", exc_info=True)
|
|
432
468
|
print(f"[MCP Watcher] Error during commit for {project_path}: {e}", file=sys.stderr)
|
|
433
469
|
|
|
434
470
|
def _run_realign_commit(self, project_path: Path, message: str) -> bool:
|
|
@@ -562,11 +598,15 @@ async def start_watcher():
|
|
|
562
598
|
global _watcher
|
|
563
599
|
|
|
564
600
|
if _watcher and _watcher.running:
|
|
601
|
+
logger.info("Watcher already running, skipping start")
|
|
565
602
|
print("[MCP Watcher] Already running", file=sys.stderr)
|
|
566
603
|
return
|
|
567
604
|
|
|
605
|
+
logger.info("Initializing global watcher instance")
|
|
568
606
|
_watcher = DialogueWatcher()
|
|
607
|
+
logger.info("Starting watcher task")
|
|
569
608
|
asyncio.create_task(_watcher.start())
|
|
609
|
+
logger.info("Watcher task created")
|
|
570
610
|
|
|
571
611
|
|
|
572
612
|
async def stop_watcher():
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|