pomera-ai-commander 1.2.1 → 1.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,239 @@
1
+ """
2
+ Diff Utilities Module
3
+
4
+ Reusable diff generation functions for Find & Replace preview,
5
+ MCP tools, and other components that need text comparison.
6
+
7
+ This module is UI-independent and can be used by both tkinter widgets
8
+ and CLI/MCP tools.
9
+ """
10
+
11
+ import difflib
12
+ import re
13
+ from typing import List, Tuple, Optional, NamedTuple
14
+ from dataclasses import dataclass
15
+
16
+
17
+ @dataclass
18
+ class DiffResult:
19
+ """Result of a diff comparison operation."""
20
+ original_text: str
21
+ modified_text: str
22
+ unified_diff: str
23
+ replacements: int
24
+ lines_affected: int
25
+ similarity_score: float # 0-100
26
+
27
+
28
+ @dataclass
29
+ class FindReplacePreview:
30
+ """Preview of a find/replace operation before execution."""
31
+ original_text: str
32
+ modified_text: str
33
+ unified_diff: str
34
+ match_count: int
35
+ lines_affected: int
36
+ match_positions: List[Tuple[int, int]] # List of (start, end) character positions
37
+
38
+
39
+ def generate_unified_diff(
40
+ original: str,
41
+ modified: str,
42
+ context_lines: int = 3,
43
+ original_label: str = "Original",
44
+ modified_label: str = "Modified"
45
+ ) -> str:
46
+ """
47
+ Generate a unified diff between two texts.
48
+
49
+ Args:
50
+ original: Original text
51
+ modified: Modified text
52
+ context_lines: Number of context lines around changes
53
+ original_label: Label for original text (shown as --- label)
54
+ modified_label: Label for modified text (shown as +++ label)
55
+
56
+ Returns:
57
+ Unified diff as string
58
+ """
59
+ original_lines = original.splitlines(keepends=True)
60
+ modified_lines = modified.splitlines(keepends=True)
61
+
62
+ # Ensure last lines end with newline for proper diff
63
+ if original_lines and not original_lines[-1].endswith('\n'):
64
+ original_lines[-1] += '\n'
65
+ if modified_lines and not modified_lines[-1].endswith('\n'):
66
+ modified_lines[-1] += '\n'
67
+
68
+ diff = difflib.unified_diff(
69
+ original_lines,
70
+ modified_lines,
71
+ fromfile=original_label,
72
+ tofile=modified_label,
73
+ n=context_lines
74
+ )
75
+
76
+ return ''.join(diff)
77
+
78
+
79
+ def generate_find_replace_preview(
80
+ text: str,
81
+ find_pattern: str,
82
+ replace_pattern: str,
83
+ use_regex: bool = True,
84
+ case_sensitive: bool = True,
85
+ context_lines: int = 2
86
+ ) -> FindReplacePreview:
87
+ """
88
+ Generate a preview of find/replace operation with unified diff.
89
+
90
+ Args:
91
+ text: Input text to process
92
+ find_pattern: Pattern to find (regex or literal)
93
+ replace_pattern: Replacement string
94
+ use_regex: Whether find_pattern is a regex
95
+ case_sensitive: Whether to match case sensitively
96
+ context_lines: Lines of context in diff output
97
+
98
+ Returns:
99
+ FindReplacePreview with diff and match information
100
+
101
+ Raises:
102
+ re.error: If regex pattern is invalid
103
+ """
104
+ if not find_pattern:
105
+ return FindReplacePreview(
106
+ original_text=text,
107
+ modified_text=text,
108
+ unified_diff="",
109
+ match_count=0,
110
+ lines_affected=0,
111
+ match_positions=[]
112
+ )
113
+
114
+ # Compile the pattern
115
+ flags = 0 if case_sensitive else re.IGNORECASE
116
+ if use_regex:
117
+ pattern = re.compile(find_pattern, flags)
118
+ else:
119
+ pattern = re.compile(re.escape(find_pattern), flags)
120
+
121
+ # Find all matches and their positions
122
+ matches = list(pattern.finditer(text))
123
+ match_positions = [(m.start(), m.end()) for m in matches]
124
+
125
+ # Perform the replacement
126
+ modified_text = pattern.sub(replace_pattern, text)
127
+
128
+ # Calculate lines affected
129
+ original_lines = set()
130
+ pos = 0
131
+ line_num = 1
132
+ for char in text:
133
+ for match_start, match_end in match_positions:
134
+ if match_start <= pos < match_end:
135
+ original_lines.add(line_num)
136
+ if char == '\n':
137
+ line_num += 1
138
+ pos += 1
139
+
140
+ # Generate diff
141
+ match_info = f"({len(matches)} match{'es' if len(matches) != 1 else ''})"
142
+ unified_diff = generate_unified_diff(
143
+ text,
144
+ modified_text,
145
+ context_lines=context_lines,
146
+ original_label=f"Original {match_info}",
147
+ modified_label="Modified"
148
+ )
149
+
150
+ return FindReplacePreview(
151
+ original_text=text,
152
+ modified_text=modified_text,
153
+ unified_diff=unified_diff,
154
+ match_count=len(matches),
155
+ lines_affected=len(original_lines),
156
+ match_positions=match_positions
157
+ )
158
+
159
+
160
+ def compute_similarity_score(original: str, modified: str) -> float:
161
+ """
162
+ Compute similarity score between two texts (0-100).
163
+
164
+ Args:
165
+ original: Original text
166
+ modified: Modified text
167
+
168
+ Returns:
169
+ Similarity percentage (0-100)
170
+ """
171
+ if not original and not modified:
172
+ return 100.0
173
+ if not original or not modified:
174
+ return 0.0
175
+
176
+ matcher = difflib.SequenceMatcher(None, original, modified, autojunk=False)
177
+ return matcher.ratio() * 100
178
+
179
+
180
+ def generate_compact_diff(
181
+ original: str,
182
+ modified: str,
183
+ max_lines: int = 20
184
+ ) -> str:
185
+ """
186
+ Generate a compact diff suitable for CLI/token-limited contexts.
187
+ Shows only changed lines without full context.
188
+
189
+ Args:
190
+ original: Original text
191
+ modified: Modified text
192
+ max_lines: Maximum lines to show in diff
193
+
194
+ Returns:
195
+ Compact diff string
196
+ """
197
+ original_lines = original.splitlines()
198
+ modified_lines = modified.splitlines()
199
+
200
+ matcher = difflib.SequenceMatcher(None, original_lines, modified_lines, autojunk=False)
201
+
202
+ output_lines = []
203
+ line_count = 0
204
+
205
+ for tag, i1, i2, j1, j2 in matcher.get_opcodes():
206
+ if line_count >= max_lines:
207
+ output_lines.append(f"... ({max_lines}+ changes, truncated)")
208
+ break
209
+
210
+ if tag == 'equal':
211
+ continue
212
+ elif tag == 'delete':
213
+ for i in range(i1, i2):
214
+ if line_count >= max_lines:
215
+ break
216
+ output_lines.append(f"-{i1 + 1}: {original_lines[i]}")
217
+ line_count += 1
218
+ elif tag == 'insert':
219
+ for j in range(j1, j2):
220
+ if line_count >= max_lines:
221
+ break
222
+ output_lines.append(f"+{j1 + 1}: {modified_lines[j]}")
223
+ line_count += 1
224
+ elif tag == 'replace':
225
+ for i in range(i1, i2):
226
+ if line_count >= max_lines:
227
+ break
228
+ output_lines.append(f"-{i + 1}: {original_lines[i]}")
229
+ line_count += 1
230
+ for j in range(j1, j2):
231
+ if line_count >= max_lines:
232
+ break
233
+ output_lines.append(f"+{j + 1}: {modified_lines[j]}")
234
+ line_count += 1
235
+
236
+ if not output_lines:
237
+ return "No differences found."
238
+
239
+ return '\n'.join(output_lines)
@@ -96,6 +96,14 @@ class EfficientLineNumbers(tk.Frame):
96
96
  # Focus events for optimization
97
97
  self.text.bind("<FocusIn>", self._on_focus_in)
98
98
  self.text.bind("<FocusOut>", self._on_focus_out)
99
+
100
+ # Paste events - insert undo separator after paste to separate from subsequent typing
101
+ self.text.bind("<<Paste>>", self._on_paste)
102
+ self.text.bind("<Control-v>", self._on_paste)
103
+ self.text.bind("<Control-V>", self._on_paste)
104
+ # Also handle Shift+Insert (alternative paste)
105
+ self.text.bind("<Shift-Insert>", self._on_paste)
106
+
99
107
 
100
108
  def _setup_scrollbar_sync(self):
101
109
  """Setup proper scrollbar synchronization."""
@@ -161,6 +169,28 @@ class EfficientLineNumbers(tk.Frame):
161
169
  """Handle focus out - can reduce update frequency."""
162
170
  pass # Could implement reduced update frequency when not focused
163
171
 
172
+ def _on_paste(self, event=None):
173
+ """
174
+ Handle paste operations - insert undo separator after paste.
175
+
176
+ This ensures that paste operations are separate from subsequent typing
177
+ in the undo history, so Ctrl+Z undoes them independently.
178
+ """
179
+ # Let the paste happen first, then insert a separator
180
+ def insert_undo_separator():
181
+ try:
182
+ # Insert undo separator to mark this as a separate operation
183
+ self.text.edit_separator()
184
+ except Exception:
185
+ pass # Ignore if undo is not enabled
186
+
187
+ # Schedule the separator insertion after the paste completes
188
+ self.after(10, insert_undo_separator)
189
+ # Also schedule line number update
190
+ self._schedule_line_number_update()
191
+ # Don't return "break" - let the default paste handling occur
192
+
193
+
164
194
  def _on_key_press(self, event=None):
165
195
  """Handle key press events - immediate update for Enter key."""
166
196
  if event and event.keysym in ['Return', 'BackSpace', 'Delete']:
@@ -0,0 +1,334 @@
1
+ """
2
+ Find & Replace Diff MCP Tool
3
+
4
+ Provides regex find/replace with diff preview and automatic backup to Notes.
5
+ Designed for AI agent workflows requiring verification and rollback capability.
6
+
7
+ Operations:
8
+ - validate: Check regex syntax before use
9
+ - preview: Show unified diff of proposed changes
10
+ - execute: Perform replacement with automatic backup to Notes
11
+ - recall: Retrieve previous operation state for rollback
12
+ """
13
+
14
+ import re
15
+ import json
16
+ import logging
17
+ from typing import Dict, Any, Optional, List, Tuple
18
+ from dataclasses import dataclass, asdict
19
+ from datetime import datetime
20
+
21
+ # Import diff utilities
22
+ try:
23
+ from core.diff_utils import generate_find_replace_preview, generate_compact_diff, FindReplacePreview
24
+ DIFF_UTILS_AVAILABLE = True
25
+ except ImportError:
26
+ DIFF_UTILS_AVAILABLE = False
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+
31
+ @dataclass
32
+ class FindReplaceOperation:
33
+ """Represents a find/replace operation for storage in Notes."""
34
+ find_pattern: str
35
+ replace_pattern: str
36
+ flags: List[str]
37
+ original_text: str
38
+ modified_text: str
39
+ match_count: int
40
+ timestamp: str
41
+
42
+ def to_json(self) -> str:
43
+ return json.dumps(asdict(self), ensure_ascii=False)
44
+
45
+ @classmethod
46
+ def from_json(cls, json_str: str) -> 'FindReplaceOperation':
47
+ data = json.loads(json_str)
48
+ return cls(**data)
49
+
50
+
51
+ def validate_regex(pattern: str, flags: List[str] = None) -> Dict[str, Any]:
52
+ """
53
+ Validate a regex pattern.
54
+
55
+ Args:
56
+ pattern: Regex pattern string
57
+ flags: Optional list of flag characters ('i', 'm', 's', 'x')
58
+
59
+ Returns:
60
+ Dict with validation result
61
+ """
62
+ if not pattern:
63
+ return {"valid": True, "pattern": "", "groups": 0, "flags_applied": []}
64
+
65
+ try:
66
+ # Convert flag characters to re flags
67
+ re_flags = 0
68
+ flags_applied = []
69
+ if flags:
70
+ flag_map = {
71
+ 'i': (re.IGNORECASE, 'IGNORECASE'),
72
+ 'm': (re.MULTILINE, 'MULTILINE'),
73
+ 's': (re.DOTALL, 'DOTALL'),
74
+ 'x': (re.VERBOSE, 'VERBOSE')
75
+ }
76
+ for f in flags:
77
+ if f.lower() in flag_map:
78
+ re_flags |= flag_map[f.lower()][0]
79
+ flags_applied.append(flag_map[f.lower()][1])
80
+
81
+ compiled = re.compile(pattern, re_flags)
82
+ return {
83
+ "valid": True,
84
+ "pattern": pattern,
85
+ "groups": compiled.groups,
86
+ "flags_applied": flags_applied
87
+ }
88
+ except re.error as e:
89
+ suggestion = _get_regex_suggestion(str(e))
90
+ return {
91
+ "valid": False,
92
+ "pattern": pattern,
93
+ "error": str(e),
94
+ "suggestion": suggestion
95
+ }
96
+
97
+
98
+ def preview_replace(
99
+ text: str,
100
+ find_pattern: str,
101
+ replace_pattern: str,
102
+ flags: List[str] = None,
103
+ context_lines: int = 2,
104
+ max_diff_lines: int = 50
105
+ ) -> Dict[str, Any]:
106
+ """
107
+ Generate a preview of find/replace operation with compact diff.
108
+
109
+ Args:
110
+ text: Input text to process
111
+ find_pattern: Regex pattern to find
112
+ replace_pattern: Replacement string
113
+ flags: Optional regex flags
114
+ context_lines: Lines of context in diff
115
+ max_diff_lines: Maximum diff lines to return (token efficiency)
116
+
117
+ Returns:
118
+ Dict with preview information
119
+ """
120
+ # Validate first
121
+ validation = validate_regex(find_pattern, flags)
122
+ if not validation["valid"]:
123
+ return {"success": False, "error": validation["error"], "suggestion": validation.get("suggestion", "")}
124
+
125
+ # Build regex flags
126
+ re_flags = 0
127
+ if flags:
128
+ flag_map = {'i': re.IGNORECASE, 'm': re.MULTILINE, 's': re.DOTALL, 'x': re.VERBOSE}
129
+ for f in flags:
130
+ if f.lower() in flag_map:
131
+ re_flags |= flag_map[f.lower()]
132
+
133
+ try:
134
+ pattern = re.compile(find_pattern, re_flags)
135
+ matches = list(pattern.finditer(text))
136
+
137
+ if not matches:
138
+ return {
139
+ "success": True,
140
+ "match_count": 0,
141
+ "diff": "No matches found.",
142
+ "lines_affected": 0
143
+ }
144
+
145
+ # Perform replacement
146
+ modified_text = pattern.sub(replace_pattern, text)
147
+
148
+ # Generate compact diff (token-efficient)
149
+ diff = generate_compact_diff(text, modified_text, max_lines=max_diff_lines) if DIFF_UTILS_AVAILABLE else _basic_diff(text, modified_text)
150
+
151
+ # Count affected lines
152
+ lines_affected = len(set(text[:m.start()].count('\n') + 1 for m in matches))
153
+
154
+ return {
155
+ "success": True,
156
+ "match_count": len(matches),
157
+ "lines_affected": lines_affected,
158
+ "diff": diff
159
+ }
160
+ except Exception as e:
161
+ return {"success": False, "error": str(e)}
162
+
163
+
164
+ def execute_replace(
165
+ text: str,
166
+ find_pattern: str,
167
+ replace_pattern: str,
168
+ flags: List[str] = None,
169
+ save_to_notes: bool = True,
170
+ notes_handler = None
171
+ ) -> Dict[str, Any]:
172
+ """
173
+ Execute find/replace with optional backup to Notes.
174
+
175
+ Args:
176
+ text: Input text to process
177
+ find_pattern: Regex pattern to find
178
+ replace_pattern: Replacement string
179
+ flags: Optional regex flags
180
+ save_to_notes: Whether to save operation to Notes for rollback
181
+ notes_handler: Function to save to notes (called as notes_handler(title, input_content, output_content))
182
+
183
+ Returns:
184
+ Dict with execution result including note_id if saved
185
+ """
186
+ # Validate first
187
+ validation = validate_regex(find_pattern, flags)
188
+ if not validation["valid"]:
189
+ return {"success": False, "error": validation["error"]}
190
+
191
+ # Build regex flags
192
+ re_flags = 0
193
+ if flags:
194
+ flag_map = {'i': re.IGNORECASE, 'm': re.MULTILINE, 's': re.DOTALL, 'x': re.VERBOSE}
195
+ for f in flags:
196
+ if f.lower() in flag_map:
197
+ re_flags |= flag_map[f.lower()]
198
+
199
+ try:
200
+ pattern = re.compile(find_pattern, re_flags)
201
+ matches = list(pattern.finditer(text))
202
+
203
+ if not matches:
204
+ return {
205
+ "success": True,
206
+ "replacements": 0,
207
+ "modified_text": text,
208
+ "note_id": None
209
+ }
210
+
211
+ # Perform replacement
212
+ modified_text = pattern.sub(replace_pattern, text)
213
+
214
+ # Count affected lines
215
+ lines_affected = len(set(text[:m.start()].count('\n') + 1 for m in matches))
216
+
217
+ result = {
218
+ "success": True,
219
+ "replacements": len(matches),
220
+ "lines_affected": lines_affected,
221
+ "modified_text": modified_text,
222
+ "note_id": None
223
+ }
224
+
225
+ # Save to notes if requested
226
+ if save_to_notes and notes_handler:
227
+ try:
228
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
229
+ title = f"FindReplace/{timestamp}"
230
+
231
+ # Create operation record
232
+ operation = FindReplaceOperation(
233
+ find_pattern=find_pattern,
234
+ replace_pattern=replace_pattern,
235
+ flags=flags or [],
236
+ original_text=text,
237
+ modified_text=modified_text,
238
+ match_count=len(matches),
239
+ timestamp=timestamp
240
+ )
241
+
242
+ note_id = notes_handler(
243
+ title=title,
244
+ input_content=text,
245
+ output_content=operation.to_json()
246
+ )
247
+ result["note_id"] = note_id
248
+ result["note_title"] = title
249
+ except Exception as e:
250
+ logger.warning(f"Failed to save operation to notes: {e}")
251
+ result["note_error"] = str(e)
252
+
253
+ return result
254
+ except Exception as e:
255
+ return {"success": False, "error": str(e)}
256
+
257
+
258
+ def recall_operation(note_id: int, notes_getter = None) -> Dict[str, Any]:
259
+ """
260
+ Recall a previous find/replace operation from Notes.
261
+
262
+ Args:
263
+ note_id: ID of the note to recall
264
+ notes_getter: Function to get note by ID (returns dict with 'output_content')
265
+
266
+ Returns:
267
+ Dict with recalled operation details
268
+ """
269
+ if not notes_getter:
270
+ return {"success": False, "error": "Notes getter not available"}
271
+
272
+ try:
273
+ note = notes_getter(note_id)
274
+ if not note:
275
+ return {"success": False, "error": f"Note {note_id} not found"}
276
+
277
+ # Parse the operation from output_content
278
+ operation = FindReplaceOperation.from_json(note.get('output_content', '{}'))
279
+
280
+ return {
281
+ "success": True,
282
+ "note_id": note_id,
283
+ "title": note.get('title', ''),
284
+ "find_pattern": operation.find_pattern,
285
+ "replace_pattern": operation.replace_pattern,
286
+ "flags": operation.flags,
287
+ "original_text": operation.original_text,
288
+ "modified_text": operation.modified_text,
289
+ "match_count": operation.match_count,
290
+ "timestamp": operation.timestamp
291
+ }
292
+ except Exception as e:
293
+ return {"success": False, "error": str(e)}
294
+
295
+
296
+ def _get_regex_suggestion(error_msg: str) -> str:
297
+ """Get helpful suggestion for common regex errors."""
298
+ suggestions = {
299
+ "unterminated": "Check for unmatched parentheses, brackets, or quotes",
300
+ "unbalanced": "Ensure all opening ( [ { have matching closing ) ] }",
301
+ "nothing to repeat": "Quantifiers like * + ? need something before them",
302
+ "bad escape": "Use double backslash \\\\ or raw string r'' for special chars",
303
+ "look-behind": "Python look-behind requires fixed-width pattern",
304
+ "bad character range": "Check character ranges like [a-z], ensure start < end"
305
+ }
306
+
307
+ error_lower = error_msg.lower()
308
+ for key, suggestion in suggestions.items():
309
+ if key in error_lower:
310
+ return suggestion
311
+
312
+ return "Check regex syntax - see Python re module documentation"
313
+
314
+
315
+ def _basic_diff(original: str, modified: str) -> str:
316
+ """Basic diff when diff_utils not available."""
317
+ orig_lines = original.splitlines()
318
+ mod_lines = modified.splitlines()
319
+
320
+ output = []
321
+ for i, (o, m) in enumerate(zip(orig_lines, mod_lines)):
322
+ if o != m:
323
+ output.append(f"-{i+1}: {o}")
324
+ output.append(f"+{i+1}: {m}")
325
+
326
+ # Handle length differences
327
+ if len(mod_lines) > len(orig_lines):
328
+ for i in range(len(orig_lines), len(mod_lines)):
329
+ output.append(f"+{i+1}: {mod_lines[i]}")
330
+ elif len(orig_lines) > len(mod_lines):
331
+ for i in range(len(mod_lines), len(orig_lines)):
332
+ output.append(f"-{i+1}: {orig_lines[i]}")
333
+
334
+ return '\n'.join(output) if output else "No differences"