pomera-ai-commander 0.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +105 -680
  3. package/bin/pomera-ai-commander.js +62 -62
  4. package/core/__init__.py +65 -65
  5. package/core/app_context.py +482 -482
  6. package/core/async_text_processor.py +421 -421
  7. package/core/backup_manager.py +655 -655
  8. package/core/backup_recovery_manager.py +1033 -1033
  9. package/core/content_hash_cache.py +508 -508
  10. package/core/context_menu.py +313 -313
  11. package/core/data_validator.py +1066 -1066
  12. package/core/database_connection_manager.py +744 -744
  13. package/core/database_curl_settings_manager.py +608 -608
  14. package/core/database_promera_ai_settings_manager.py +446 -446
  15. package/core/database_schema.py +411 -411
  16. package/core/database_schema_manager.py +395 -395
  17. package/core/database_settings_manager.py +1507 -1507
  18. package/core/database_settings_manager_interface.py +456 -456
  19. package/core/dialog_manager.py +734 -734
  20. package/core/efficient_line_numbers.py +510 -510
  21. package/core/error_handler.py +746 -746
  22. package/core/error_service.py +431 -431
  23. package/core/event_consolidator.py +511 -511
  24. package/core/mcp/__init__.py +43 -43
  25. package/core/mcp/protocol.py +288 -288
  26. package/core/mcp/schema.py +251 -251
  27. package/core/mcp/server_stdio.py +299 -299
  28. package/core/mcp/tool_registry.py +2372 -2345
  29. package/core/memory_efficient_text_widget.py +711 -711
  30. package/core/migration_manager.py +914 -914
  31. package/core/migration_test_suite.py +1085 -1085
  32. package/core/migration_validator.py +1143 -1143
  33. package/core/optimized_find_replace.py +714 -714
  34. package/core/optimized_pattern_engine.py +424 -424
  35. package/core/optimized_search_highlighter.py +552 -552
  36. package/core/performance_monitor.py +674 -674
  37. package/core/persistence_manager.py +712 -712
  38. package/core/progressive_stats_calculator.py +632 -632
  39. package/core/regex_pattern_cache.py +529 -529
  40. package/core/regex_pattern_library.py +350 -350
  41. package/core/search_operation_manager.py +434 -434
  42. package/core/settings_defaults_registry.py +1087 -1087
  43. package/core/settings_integrity_validator.py +1111 -1111
  44. package/core/settings_serializer.py +557 -557
  45. package/core/settings_validator.py +1823 -1823
  46. package/core/smart_stats_calculator.py +709 -709
  47. package/core/statistics_update_manager.py +619 -619
  48. package/core/stats_config_manager.py +858 -858
  49. package/core/streaming_text_handler.py +723 -723
  50. package/core/task_scheduler.py +596 -596
  51. package/core/update_pattern_library.py +168 -168
  52. package/core/visibility_monitor.py +596 -596
  53. package/core/widget_cache.py +498 -498
  54. package/mcp.json +51 -61
  55. package/package.json +61 -57
  56. package/pomera.py +7482 -7482
  57. package/pomera_mcp_server.py +183 -144
  58. package/requirements.txt +32 -0
  59. package/tools/__init__.py +4 -4
  60. package/tools/ai_tools.py +2891 -2891
  61. package/tools/ascii_art_generator.py +352 -352
  62. package/tools/base64_tools.py +183 -183
  63. package/tools/base_tool.py +511 -511
  64. package/tools/case_tool.py +308 -308
  65. package/tools/column_tools.py +395 -395
  66. package/tools/cron_tool.py +884 -884
  67. package/tools/curl_history.py +600 -600
  68. package/tools/curl_processor.py +1207 -1207
  69. package/tools/curl_settings.py +502 -502
  70. package/tools/curl_tool.py +5467 -5467
  71. package/tools/diff_viewer.py +1071 -1071
  72. package/tools/email_extraction_tool.py +248 -248
  73. package/tools/email_header_analyzer.py +425 -425
  74. package/tools/extraction_tools.py +250 -250
  75. package/tools/find_replace.py +1750 -1750
  76. package/tools/folder_file_reporter.py +1463 -1463
  77. package/tools/folder_file_reporter_adapter.py +480 -480
  78. package/tools/generator_tools.py +1216 -1216
  79. package/tools/hash_generator.py +255 -255
  80. package/tools/html_tool.py +656 -656
  81. package/tools/jsonxml_tool.py +729 -729
  82. package/tools/line_tools.py +419 -419
  83. package/tools/markdown_tools.py +561 -561
  84. package/tools/mcp_widget.py +1417 -1417
  85. package/tools/notes_widget.py +973 -973
  86. package/tools/number_base_converter.py +372 -372
  87. package/tools/regex_extractor.py +571 -571
  88. package/tools/slug_generator.py +310 -310
  89. package/tools/sorter_tools.py +458 -458
  90. package/tools/string_escape_tool.py +392 -392
  91. package/tools/text_statistics_tool.py +365 -365
  92. package/tools/text_wrapper.py +430 -430
  93. package/tools/timestamp_converter.py +421 -421
  94. package/tools/tool_loader.py +710 -710
  95. package/tools/translator_tools.py +522 -522
  96. package/tools/url_link_extractor.py +261 -261
  97. package/tools/url_parser.py +204 -204
  98. package/tools/whitespace_tools.py +355 -355
  99. package/tools/word_frequency_counter.py +146 -146
  100. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  101. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  102. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  103. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  104. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  105. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  106. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  107. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  108. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  109. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  110. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  111. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  112. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  113. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  114. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  115. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  116. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  117. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  118. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  119. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  120. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  121. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  122. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  123. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  124. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  125. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  126. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  127. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  128. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  129. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  130. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  131. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  132. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  133. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  134. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  135. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  136. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  137. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  138. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  139. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  140. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  141. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  142. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  143. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  144. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  145. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  146. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  147. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  148. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  149. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  150. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  151. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  152. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  153. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  154. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  155. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  156. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  157. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  158. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  159. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  160. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  161. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  162. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  163. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  164. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  165. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  166. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  167. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  168. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  169. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  170. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  171. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  172. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  173. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  174. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  175. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  176. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  177. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  178. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  179. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  180. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  181. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  182. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  183. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  184. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  185. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  186. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  187. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  188. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  189. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  190. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  191. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
@@ -1,530 +1,530 @@
1
- """
2
- Intelligent regex pattern caching system for Promera AI Commander.
3
- Provides efficient caching and compilation of regex patterns for find/replace operations.
4
- """
5
-
6
- import re
7
- import time
8
- import threading
9
- import hashlib
10
- from typing import Dict, List, Optional, Any, Tuple, Pattern, Union
11
- from dataclasses import dataclass, field
12
- from collections import OrderedDict
13
- import weakref
14
-
15
- @dataclass
16
- class PatternCacheEntry:
17
- """Cache entry for compiled regex patterns."""
18
- pattern: Pattern[str]
19
- pattern_string: str
20
- flags: int
21
- access_count: int = 0
22
- last_access: float = field(default_factory=time.time)
23
- compilation_time_ms: float = 0.0
24
- success_count: int = 0
25
- error_count: int = 0
26
-
27
- @property
28
- def age_seconds(self) -> float:
29
- """Age of the cache entry in seconds."""
30
- return time.time() - self.last_access
31
-
32
- @property
33
- def success_rate(self) -> float:
34
- """Success rate of pattern usage."""
35
- total_uses = self.success_count + self.error_count
36
- return self.success_count / max(total_uses, 1)
37
-
38
- @dataclass
39
- class SearchResult:
40
- """Result of a search operation."""
41
- matches: List[re.Match]
42
- pattern_hash: str
43
- search_time_ms: float
44
- text_length: int
45
- match_count: int
46
-
47
- @property
48
- def match_positions(self) -> List[Tuple[int, int]]:
49
- """Get list of (start, end) positions for all matches."""
50
- return [(match.start(), match.end()) for match in self.matches]
51
-
52
- class RegexPatternCache:
53
- """
54
- Intelligent regex pattern cache with compilation optimization and usage tracking.
55
- """
56
-
57
- def __init__(self, cache_size_limit: int = 500):
58
- self.cache_size_limit = cache_size_limit
59
- self.pattern_cache: OrderedDict[str, PatternCacheEntry] = OrderedDict()
60
- self.cache_lock = threading.RLock()
61
-
62
-
63
-
64
- # Common pattern optimizations
65
- self.pattern_optimizations = {
66
- # Simple text search optimizations
67
- 'simple_text': {
68
- 'detect': lambda p: not any(c in p for c in r'.*+?^${}[]|()\\'),
69
- 'optimize': lambda p: re.escape(p)
70
- },
71
- # Word boundary optimizations
72
- 'word_search': {
73
- 'detect': lambda p: p.isalnum() and ' ' not in p,
74
- 'optimize': lambda p: r'\b' + re.escape(p) + r'\b'
75
- }
76
- }
77
-
78
- def get_compiled_pattern(self,
79
- pattern_string: str,
80
- flags: int = 0,
81
- pattern_type: str = "regex") -> Optional[Pattern[str]]:
82
- """
83
- Get a compiled regex pattern with caching.
84
-
85
- Args:
86
- pattern_string: The regex pattern string
87
- flags: Regex flags (re.IGNORECASE, etc.)
88
- pattern_type: Type of pattern ("regex", "text", "wildcard")
89
-
90
- Returns:
91
- Compiled regex pattern or None if compilation failed
92
- """
93
- # Generate cache key
94
- cache_key = self._generate_cache_key(pattern_string, flags, pattern_type)
95
-
96
- # Check cache first
97
- with self.cache_lock:
98
- if cache_key in self.pattern_cache:
99
- entry = self.pattern_cache[cache_key]
100
- entry.access_count += 1
101
- entry.last_access = time.time()
102
-
103
- # Move to end (LRU)
104
- self.pattern_cache.move_to_end(cache_key)
105
-
106
- return entry.pattern
107
-
108
- # Cache miss - compile pattern
109
- start_time = time.time()
110
-
111
- try:
112
- # Apply optimizations based on pattern type
113
- optimized_pattern = self._optimize_pattern(pattern_string, pattern_type)
114
-
115
- # Compile the pattern
116
- compiled_pattern = re.compile(optimized_pattern, flags)
117
- compilation_time = (time.time() - start_time) * 1000
118
-
119
- # Create cache entry
120
- entry = PatternCacheEntry(
121
- pattern=compiled_pattern,
122
- pattern_string=pattern_string,
123
- flags=flags,
124
- access_count=1,
125
- compilation_time_ms=compilation_time,
126
- success_count=1
127
- )
128
-
129
-
130
-
131
- # Cache the compiled pattern
132
- self._cache_pattern(cache_key, entry)
133
-
134
- return compiled_pattern
135
-
136
- except re.error as e:
137
- # Pattern compilation failed
138
- # Cache the error to avoid repeated compilation attempts
139
- error_entry = PatternCacheEntry(
140
- pattern=None,
141
- pattern_string=pattern_string,
142
- flags=flags,
143
- access_count=1,
144
- compilation_time_ms=(time.time() - start_time) * 1000,
145
- error_count=1
146
- )
147
- self._cache_pattern(cache_key, error_entry)
148
-
149
- return None
150
-
151
- def search_with_cache(self,
152
- pattern_string: str,
153
- text: str,
154
- flags: int = 0,
155
- pattern_type: str = "regex") -> SearchResult:
156
- """
157
- Perform a search operation with pattern caching.
158
-
159
- Args:
160
- pattern_string: The regex pattern string
161
- text: Text to search in
162
- flags: Regex flags
163
- pattern_type: Type of pattern
164
-
165
- Returns:
166
- SearchResult with matches and performance info
167
- """
168
- start_time = time.time()
169
- pattern_hash = self._generate_cache_key(pattern_string, flags, pattern_type)
170
-
171
- # Get compiled pattern
172
- compiled_pattern = self.get_compiled_pattern(pattern_string, flags, pattern_type)
173
-
174
- if compiled_pattern is None:
175
- # Pattern compilation failed
176
- return SearchResult(
177
- matches=[],
178
- pattern_hash=pattern_hash,
179
- search_time_ms=(time.time() - start_time) * 1000,
180
- text_length=len(text),
181
- match_count=0
182
- )
183
-
184
- # Perform search
185
- try:
186
- matches = list(compiled_pattern.finditer(text))
187
- search_time = (time.time() - start_time) * 1000
188
-
189
- # Update pattern success count
190
- with self.cache_lock:
191
- if pattern_hash in self.pattern_cache:
192
- self.pattern_cache[pattern_hash].success_count += 1
193
-
194
-
195
-
196
- return SearchResult(
197
- matches=matches,
198
- pattern_hash=pattern_hash,
199
- search_time_ms=search_time,
200
- text_length=len(text),
201
- match_count=len(matches)
202
- )
203
-
204
- except Exception as e:
205
- # Search operation failed
206
- search_time = (time.time() - start_time) * 1000
207
-
208
- # Update pattern error count
209
- with self.cache_lock:
210
- if pattern_hash in self.pattern_cache:
211
- self.pattern_cache[pattern_hash].error_count += 1
212
-
213
- return SearchResult(
214
- matches=[],
215
- pattern_hash=pattern_hash,
216
- search_time_ms=search_time,
217
- text_length=len(text),
218
- match_count=0
219
- )
220
-
221
- def replace_with_cache(self,
222
- pattern_string: str,
223
- replacement: str,
224
- text: str,
225
- flags: int = 0,
226
- pattern_type: str = "regex",
227
- count: int = 0) -> Tuple[str, int]:
228
- """
229
- Perform a replace operation with pattern caching.
230
-
231
- Args:
232
- pattern_string: The regex pattern string
233
- replacement: Replacement string
234
- text: Text to perform replacement on
235
- flags: Regex flags
236
- pattern_type: Type of pattern
237
- count: Maximum number of replacements (0 = all)
238
-
239
- Returns:
240
- Tuple of (modified_text, replacement_count)
241
- """
242
- compiled_pattern = self.get_compiled_pattern(pattern_string, flags, pattern_type)
243
-
244
- if compiled_pattern is None:
245
- return text, 0
246
-
247
- try:
248
- if count == 0:
249
- modified_text = compiled_pattern.sub(replacement, text)
250
- # Count replacements by comparing with original
251
- replacement_count = len(compiled_pattern.findall(text))
252
- else:
253
- modified_text = compiled_pattern.sub(replacement, text, count=count)
254
- replacement_count = min(count, len(compiled_pattern.findall(text)))
255
-
256
- # Update pattern success count
257
- pattern_hash = self._generate_cache_key(pattern_string, flags, pattern_type)
258
- with self.cache_lock:
259
- if pattern_hash in self.pattern_cache:
260
- self.pattern_cache[pattern_hash].success_count += 1
261
-
262
- return modified_text, replacement_count
263
-
264
- except Exception as e:
265
- # Replace operation failed
266
- pattern_hash = self._generate_cache_key(pattern_string, flags, pattern_type)
267
- with self.cache_lock:
268
- if pattern_hash in self.pattern_cache:
269
- self.pattern_cache[pattern_hash].error_count += 1
270
-
271
- return text, 0
272
-
273
- def _generate_cache_key(self, pattern_string: str, flags: int, pattern_type: str) -> str:
274
- """Generate a cache key for the pattern."""
275
- key_data = f"{pattern_string}_{flags}_{pattern_type}"
276
- return hashlib.md5(key_data.encode('utf-8')).hexdigest()[:16]
277
-
278
- def _optimize_pattern(self, pattern_string: str, pattern_type: str) -> str:
279
- """Apply optimizations to the pattern based on its type."""
280
- if pattern_type == "text":
281
- # Simple text search - escape special characters
282
- return re.escape(pattern_string)
283
- elif pattern_type == "wildcard":
284
- # Convert wildcard pattern to regex
285
- escaped = re.escape(pattern_string)
286
- # Replace escaped wildcards with regex equivalents
287
- escaped = escaped.replace(r'\*', '.*').replace(r'\?', '.')
288
- return escaped
289
- elif pattern_type == "regex":
290
- # Apply common regex optimizations
291
- for opt_name, opt_config in self.pattern_optimizations.items():
292
- if opt_config['detect'](pattern_string):
293
- return opt_config['optimize'](pattern_string)
294
- return pattern_string
295
- else:
296
- return pattern_string
297
-
298
- def _cache_pattern(self, cache_key: str, entry: PatternCacheEntry):
299
- """Cache a compiled pattern with intelligent cache management."""
300
- with self.cache_lock:
301
- # Check if cache is full
302
- if len(self.pattern_cache) >= self.cache_size_limit:
303
- # Remove least recently used entry
304
- self.pattern_cache.popitem(last=False)
305
-
306
- # Add new entry
307
- self.pattern_cache[cache_key] = entry
308
-
309
- def get_cache_stats(self) -> Dict[str, Any]:
310
- """Get cache statistics."""
311
- with self.cache_lock:
312
- cache_size = len(self.pattern_cache)
313
-
314
- return {
315
- 'cache_size': cache_size,
316
- 'cache_size_limit': self.cache_size_limit
317
- }
318
-
319
- def get_pattern_stats(self) -> List[Dict[str, Any]]:
320
- """Get statistics for individual patterns."""
321
- with self.cache_lock:
322
- stats = []
323
- for cache_key, entry in self.pattern_cache.items():
324
- stats.append({
325
- 'pattern': entry.pattern_string[:50] + ('...' if len(entry.pattern_string) > 50 else ''),
326
- 'access_count': entry.access_count,
327
- 'success_rate': entry.success_rate,
328
- 'age_seconds': entry.age_seconds,
329
- 'compilation_time_ms': entry.compilation_time_ms
330
- })
331
- return sorted(stats, key=lambda x: x['access_count'], reverse=True)
332
-
333
- def clear_cache(self):
334
- """Clear all cached patterns."""
335
- with self.cache_lock:
336
- self.pattern_cache.clear()
337
-
338
- def clear_old_patterns(self, max_age_seconds: float = 3600):
339
- """Clear patterns older than specified age."""
340
- with self.cache_lock:
341
- current_time = time.time()
342
- keys_to_remove = []
343
-
344
- for cache_key, entry in self.pattern_cache.items():
345
- if entry.age_seconds > max_age_seconds:
346
- keys_to_remove.append(cache_key)
347
-
348
- for key in keys_to_remove:
349
- self.pattern_cache.pop(key, None)
350
-
351
- def optimize_cache_size(self, target_cache_size: int = 500):
352
- """Optimize cache size based on usage patterns."""
353
- stats = self.get_cache_stats()
354
-
355
- if stats['cache_size'] < target_cache_size and self.cache_size_limit < 1000:
356
- # Increase cache size if current size is below target
357
- self.cache_size_limit = min(1000, int(self.cache_size_limit * 1.2))
358
- elif stats['cache_size'] > target_cache_size and self.cache_size_limit > 50:
359
- # Decrease cache size if current size is above target
360
- self.cache_size_limit = max(50, int(self.cache_size_limit * 0.9))
361
-
362
- class FindReplaceCache:
363
- """
364
- Specialized cache for find/replace operations with result caching.
365
- """
366
-
367
- def __init__(self, pattern_cache: RegexPatternCache):
368
- self.pattern_cache = pattern_cache
369
- self.result_cache: Dict[str, Any] = {}
370
- self.cache_lock = threading.RLock()
371
- self.max_result_cache_size = 100
372
-
373
- def find_with_cache(self,
374
- find_text: str,
375
- content: str,
376
- options: Dict[str, Any]) -> Dict[str, Any]:
377
- """
378
- Perform find operation with comprehensive caching.
379
-
380
- Args:
381
- find_text: Text to find
382
- content: Content to search in
383
- options: Search options (case_sensitive, whole_words, etc.)
384
-
385
- Returns:
386
- Dictionary with search results and metadata
387
- """
388
- # Generate cache key for the entire operation
389
- operation_key = self._generate_operation_key(find_text, content, options, "find")
390
-
391
- with self.cache_lock:
392
- if operation_key in self.result_cache:
393
- cached_result = self.result_cache[operation_key]
394
- cached_result['cache_hit'] = True
395
- return cached_result
396
-
397
- # Determine pattern type and flags
398
- pattern_type, flags = self._parse_options(options)
399
-
400
- # Perform search
401
- search_result = self.pattern_cache.search_with_cache(
402
- find_text, content, flags, pattern_type
403
- )
404
-
405
- # Create result dictionary
406
- result = {
407
- 'matches': search_result.matches,
408
- 'match_count': search_result.match_count,
409
- 'match_positions': search_result.match_positions,
410
- 'search_time_ms': search_result.search_time_ms,
411
- 'pattern_hash': search_result.pattern_hash,
412
- 'cache_hit': False
413
- }
414
-
415
- # Cache the result
416
- self._cache_result(operation_key, result)
417
-
418
- return result
419
-
420
- def replace_with_cache(self,
421
- find_text: str,
422
- replace_text: str,
423
- content: str,
424
- options: Dict[str, Any]) -> Dict[str, Any]:
425
- """
426
- Perform replace operation with caching.
427
-
428
- Args:
429
- find_text: Text to find
430
- replace_text: Replacement text
431
- content: Content to perform replacement on
432
- options: Replace options
433
-
434
- Returns:
435
- Dictionary with replacement results and metadata
436
- """
437
- # Generate cache key
438
- operation_key = self._generate_operation_key(
439
- f"{find_text}→{replace_text}", content, options, "replace"
440
- )
441
-
442
- with self.cache_lock:
443
- if operation_key in self.result_cache:
444
- cached_result = self.result_cache[operation_key]
445
- cached_result['cache_hit'] = True
446
- return cached_result
447
-
448
- # Determine pattern type and flags
449
- pattern_type, flags = self._parse_options(options)
450
-
451
- # Perform replacement
452
- modified_text, replacement_count = self.pattern_cache.replace_with_cache(
453
- find_text, replace_text, content, flags, pattern_type
454
- )
455
-
456
- # Create result dictionary
457
- result = {
458
- 'modified_text': modified_text,
459
- 'replacement_count': replacement_count,
460
- 'original_length': len(content),
461
- 'modified_length': len(modified_text),
462
- 'cache_hit': False
463
- }
464
-
465
- # Cache the result
466
- self._cache_result(operation_key, result)
467
-
468
- return result
469
-
470
- def _generate_operation_key(self,
471
- operation_text: str,
472
- content: str,
473
- options: Dict[str, Any],
474
- operation_type: str) -> str:
475
- """Generate cache key for find/replace operations."""
476
- # Use content hash instead of full content for efficiency
477
- content_hash = hashlib.md5(content.encode('utf-8')).hexdigest()[:16]
478
- options_str = str(sorted(options.items()))
479
- key_data = f"{operation_type}_{operation_text}_{content_hash}_{options_str}"
480
- return hashlib.md5(key_data.encode('utf-8')).hexdigest()[:16]
481
-
482
- def _parse_options(self, options: Dict[str, Any]) -> Tuple[str, int]:
483
- """Parse options to determine pattern type and regex flags."""
484
- pattern_type = "regex" if options.get("mode") == "Regex" else "text"
485
- flags = 0
486
-
487
- option_name = options.get("option", "ignore_case")
488
-
489
- if option_name == "ignore_case":
490
- flags |= re.IGNORECASE
491
- elif option_name == "wildcards":
492
- pattern_type = "wildcard"
493
- flags |= re.IGNORECASE
494
-
495
- return pattern_type, flags
496
-
497
- def _cache_result(self, operation_key: str, result: Dict[str, Any]):
498
- """Cache operation result with size management."""
499
- with self.cache_lock:
500
- if len(self.result_cache) >= self.max_result_cache_size:
501
- # Remove oldest entry (simple FIFO)
502
- oldest_key = next(iter(self.result_cache))
503
- self.result_cache.pop(oldest_key)
504
-
505
- self.result_cache[operation_key] = result
506
-
507
- def clear_cache(self):
508
- """Clear all cached results."""
509
- with self.cache_lock:
510
- self.result_cache.clear()
511
-
512
- # Global instances
513
- _global_regex_cache = None
514
- _global_find_replace_cache = None
515
-
516
- def get_regex_pattern_cache() -> RegexPatternCache:
517
- """Get the global regex pattern cache instance."""
518
- global _global_regex_cache
519
- if _global_regex_cache is None:
520
- _global_regex_cache = RegexPatternCache()
521
- return _global_regex_cache
522
-
523
- def get_find_replace_cache() -> FindReplaceCache:
524
- """Get the global find/replace cache instance."""
525
- global _global_find_replace_cache, _global_regex_cache
526
- if _global_find_replace_cache is None:
527
- if _global_regex_cache is None:
528
- _global_regex_cache = RegexPatternCache()
529
- _global_find_replace_cache = FindReplaceCache(_global_regex_cache)
1
+ """
2
+ Intelligent regex pattern caching system for Promera AI Commander.
3
+ Provides efficient caching and compilation of regex patterns for find/replace operations.
4
+ """
5
+
6
+ import re
7
+ import time
8
+ import threading
9
+ import hashlib
10
+ from typing import Dict, List, Optional, Any, Tuple, Pattern, Union
11
+ from dataclasses import dataclass, field
12
+ from collections import OrderedDict
13
+ import weakref
14
+
15
+ @dataclass
16
+ class PatternCacheEntry:
17
+ """Cache entry for compiled regex patterns."""
18
+ pattern: Pattern[str]
19
+ pattern_string: str
20
+ flags: int
21
+ access_count: int = 0
22
+ last_access: float = field(default_factory=time.time)
23
+ compilation_time_ms: float = 0.0
24
+ success_count: int = 0
25
+ error_count: int = 0
26
+
27
+ @property
28
+ def age_seconds(self) -> float:
29
+ """Age of the cache entry in seconds."""
30
+ return time.time() - self.last_access
31
+
32
+ @property
33
+ def success_rate(self) -> float:
34
+ """Success rate of pattern usage."""
35
+ total_uses = self.success_count + self.error_count
36
+ return self.success_count / max(total_uses, 1)
37
+
38
+ @dataclass
39
+ class SearchResult:
40
+ """Result of a search operation."""
41
+ matches: List[re.Match]
42
+ pattern_hash: str
43
+ search_time_ms: float
44
+ text_length: int
45
+ match_count: int
46
+
47
+ @property
48
+ def match_positions(self) -> List[Tuple[int, int]]:
49
+ """Get list of (start, end) positions for all matches."""
50
+ return [(match.start(), match.end()) for match in self.matches]
51
+
52
+ class RegexPatternCache:
53
+ """
54
+ Intelligent regex pattern cache with compilation optimization and usage tracking.
55
+ """
56
+
57
+ def __init__(self, cache_size_limit: int = 500):
58
+ self.cache_size_limit = cache_size_limit
59
+ self.pattern_cache: OrderedDict[str, PatternCacheEntry] = OrderedDict()
60
+ self.cache_lock = threading.RLock()
61
+
62
+
63
+
64
+ # Common pattern optimizations
65
+ self.pattern_optimizations = {
66
+ # Simple text search optimizations
67
+ 'simple_text': {
68
+ 'detect': lambda p: not any(c in p for c in r'.*+?^${}[]|()\\'),
69
+ 'optimize': lambda p: re.escape(p)
70
+ },
71
+ # Word boundary optimizations
72
+ 'word_search': {
73
+ 'detect': lambda p: p.isalnum() and ' ' not in p,
74
+ 'optimize': lambda p: r'\b' + re.escape(p) + r'\b'
75
+ }
76
+ }
77
+
78
+ def get_compiled_pattern(self,
79
+ pattern_string: str,
80
+ flags: int = 0,
81
+ pattern_type: str = "regex") -> Optional[Pattern[str]]:
82
+ """
83
+ Get a compiled regex pattern with caching.
84
+
85
+ Args:
86
+ pattern_string: The regex pattern string
87
+ flags: Regex flags (re.IGNORECASE, etc.)
88
+ pattern_type: Type of pattern ("regex", "text", "wildcard")
89
+
90
+ Returns:
91
+ Compiled regex pattern or None if compilation failed
92
+ """
93
+ # Generate cache key
94
+ cache_key = self._generate_cache_key(pattern_string, flags, pattern_type)
95
+
96
+ # Check cache first
97
+ with self.cache_lock:
98
+ if cache_key in self.pattern_cache:
99
+ entry = self.pattern_cache[cache_key]
100
+ entry.access_count += 1
101
+ entry.last_access = time.time()
102
+
103
+ # Move to end (LRU)
104
+ self.pattern_cache.move_to_end(cache_key)
105
+
106
+ return entry.pattern
107
+
108
+ # Cache miss - compile pattern
109
+ start_time = time.time()
110
+
111
+ try:
112
+ # Apply optimizations based on pattern type
113
+ optimized_pattern = self._optimize_pattern(pattern_string, pattern_type)
114
+
115
+ # Compile the pattern
116
+ compiled_pattern = re.compile(optimized_pattern, flags)
117
+ compilation_time = (time.time() - start_time) * 1000
118
+
119
+ # Create cache entry
120
+ entry = PatternCacheEntry(
121
+ pattern=compiled_pattern,
122
+ pattern_string=pattern_string,
123
+ flags=flags,
124
+ access_count=1,
125
+ compilation_time_ms=compilation_time,
126
+ success_count=1
127
+ )
128
+
129
+
130
+
131
+ # Cache the compiled pattern
132
+ self._cache_pattern(cache_key, entry)
133
+
134
+ return compiled_pattern
135
+
136
+ except re.error as e:
137
+ # Pattern compilation failed
138
+ # Cache the error to avoid repeated compilation attempts
139
+ error_entry = PatternCacheEntry(
140
+ pattern=None,
141
+ pattern_string=pattern_string,
142
+ flags=flags,
143
+ access_count=1,
144
+ compilation_time_ms=(time.time() - start_time) * 1000,
145
+ error_count=1
146
+ )
147
+ self._cache_pattern(cache_key, error_entry)
148
+
149
+ return None
150
+
151
+ def search_with_cache(self,
152
+ pattern_string: str,
153
+ text: str,
154
+ flags: int = 0,
155
+ pattern_type: str = "regex") -> SearchResult:
156
+ """
157
+ Perform a search operation with pattern caching.
158
+
159
+ Args:
160
+ pattern_string: The regex pattern string
161
+ text: Text to search in
162
+ flags: Regex flags
163
+ pattern_type: Type of pattern
164
+
165
+ Returns:
166
+ SearchResult with matches and performance info
167
+ """
168
+ start_time = time.time()
169
+ pattern_hash = self._generate_cache_key(pattern_string, flags, pattern_type)
170
+
171
+ # Get compiled pattern
172
+ compiled_pattern = self.get_compiled_pattern(pattern_string, flags, pattern_type)
173
+
174
+ if compiled_pattern is None:
175
+ # Pattern compilation failed
176
+ return SearchResult(
177
+ matches=[],
178
+ pattern_hash=pattern_hash,
179
+ search_time_ms=(time.time() - start_time) * 1000,
180
+ text_length=len(text),
181
+ match_count=0
182
+ )
183
+
184
+ # Perform search
185
+ try:
186
+ matches = list(compiled_pattern.finditer(text))
187
+ search_time = (time.time() - start_time) * 1000
188
+
189
+ # Update pattern success count
190
+ with self.cache_lock:
191
+ if pattern_hash in self.pattern_cache:
192
+ self.pattern_cache[pattern_hash].success_count += 1
193
+
194
+
195
+
196
+ return SearchResult(
197
+ matches=matches,
198
+ pattern_hash=pattern_hash,
199
+ search_time_ms=search_time,
200
+ text_length=len(text),
201
+ match_count=len(matches)
202
+ )
203
+
204
+ except Exception as e:
205
+ # Search operation failed
206
+ search_time = (time.time() - start_time) * 1000
207
+
208
+ # Update pattern error count
209
+ with self.cache_lock:
210
+ if pattern_hash in self.pattern_cache:
211
+ self.pattern_cache[pattern_hash].error_count += 1
212
+
213
+ return SearchResult(
214
+ matches=[],
215
+ pattern_hash=pattern_hash,
216
+ search_time_ms=search_time,
217
+ text_length=len(text),
218
+ match_count=0
219
+ )
220
+
221
+ def replace_with_cache(self,
222
+ pattern_string: str,
223
+ replacement: str,
224
+ text: str,
225
+ flags: int = 0,
226
+ pattern_type: str = "regex",
227
+ count: int = 0) -> Tuple[str, int]:
228
+ """
229
+ Perform a replace operation with pattern caching.
230
+
231
+ Args:
232
+ pattern_string: The regex pattern string
233
+ replacement: Replacement string
234
+ text: Text to perform replacement on
235
+ flags: Regex flags
236
+ pattern_type: Type of pattern
237
+ count: Maximum number of replacements (0 = all)
238
+
239
+ Returns:
240
+ Tuple of (modified_text, replacement_count)
241
+ """
242
+ compiled_pattern = self.get_compiled_pattern(pattern_string, flags, pattern_type)
243
+
244
+ if compiled_pattern is None:
245
+ return text, 0
246
+
247
+ try:
248
+ if count == 0:
249
+ modified_text = compiled_pattern.sub(replacement, text)
250
+ # Count replacements by comparing with original
251
+ replacement_count = len(compiled_pattern.findall(text))
252
+ else:
253
+ modified_text = compiled_pattern.sub(replacement, text, count=count)
254
+ replacement_count = min(count, len(compiled_pattern.findall(text)))
255
+
256
+ # Update pattern success count
257
+ pattern_hash = self._generate_cache_key(pattern_string, flags, pattern_type)
258
+ with self.cache_lock:
259
+ if pattern_hash in self.pattern_cache:
260
+ self.pattern_cache[pattern_hash].success_count += 1
261
+
262
+ return modified_text, replacement_count
263
+
264
+ except Exception as e:
265
+ # Replace operation failed
266
+ pattern_hash = self._generate_cache_key(pattern_string, flags, pattern_type)
267
+ with self.cache_lock:
268
+ if pattern_hash in self.pattern_cache:
269
+ self.pattern_cache[pattern_hash].error_count += 1
270
+
271
+ return text, 0
272
+
273
+ def _generate_cache_key(self, pattern_string: str, flags: int, pattern_type: str) -> str:
274
+ """Generate a cache key for the pattern."""
275
+ key_data = f"{pattern_string}_{flags}_{pattern_type}"
276
+ return hashlib.md5(key_data.encode('utf-8')).hexdigest()[:16]
277
+
278
+ def _optimize_pattern(self, pattern_string: str, pattern_type: str) -> str:
279
+ """Apply optimizations to the pattern based on its type."""
280
+ if pattern_type == "text":
281
+ # Simple text search - escape special characters
282
+ return re.escape(pattern_string)
283
+ elif pattern_type == "wildcard":
284
+ # Convert wildcard pattern to regex
285
+ escaped = re.escape(pattern_string)
286
+ # Replace escaped wildcards with regex equivalents
287
+ escaped = escaped.replace(r'\*', '.*').replace(r'\?', '.')
288
+ return escaped
289
+ elif pattern_type == "regex":
290
+ # Apply common regex optimizations
291
+ for opt_name, opt_config in self.pattern_optimizations.items():
292
+ if opt_config['detect'](pattern_string):
293
+ return opt_config['optimize'](pattern_string)
294
+ return pattern_string
295
+ else:
296
+ return pattern_string
297
+
298
+ def _cache_pattern(self, cache_key: str, entry: PatternCacheEntry):
299
+ """Cache a compiled pattern with intelligent cache management."""
300
+ with self.cache_lock:
301
+ # Check if cache is full
302
+ if len(self.pattern_cache) >= self.cache_size_limit:
303
+ # Remove least recently used entry
304
+ self.pattern_cache.popitem(last=False)
305
+
306
+ # Add new entry
307
+ self.pattern_cache[cache_key] = entry
308
+
309
+ def get_cache_stats(self) -> Dict[str, Any]:
310
+ """Get cache statistics."""
311
+ with self.cache_lock:
312
+ cache_size = len(self.pattern_cache)
313
+
314
+ return {
315
+ 'cache_size': cache_size,
316
+ 'cache_size_limit': self.cache_size_limit
317
+ }
318
+
319
+ def get_pattern_stats(self) -> List[Dict[str, Any]]:
320
+ """Get statistics for individual patterns."""
321
+ with self.cache_lock:
322
+ stats = []
323
+ for cache_key, entry in self.pattern_cache.items():
324
+ stats.append({
325
+ 'pattern': entry.pattern_string[:50] + ('...' if len(entry.pattern_string) > 50 else ''),
326
+ 'access_count': entry.access_count,
327
+ 'success_rate': entry.success_rate,
328
+ 'age_seconds': entry.age_seconds,
329
+ 'compilation_time_ms': entry.compilation_time_ms
330
+ })
331
+ return sorted(stats, key=lambda x: x['access_count'], reverse=True)
332
+
333
+ def clear_cache(self):
334
+ """Clear all cached patterns."""
335
+ with self.cache_lock:
336
+ self.pattern_cache.clear()
337
+
338
+ def clear_old_patterns(self, max_age_seconds: float = 3600):
339
+ """Clear patterns older than specified age."""
340
+ with self.cache_lock:
341
+ current_time = time.time()
342
+ keys_to_remove = []
343
+
344
+ for cache_key, entry in self.pattern_cache.items():
345
+ if entry.age_seconds > max_age_seconds:
346
+ keys_to_remove.append(cache_key)
347
+
348
+ for key in keys_to_remove:
349
+ self.pattern_cache.pop(key, None)
350
+
351
+ def optimize_cache_size(self, target_cache_size: int = 500):
352
+ """Optimize cache size based on usage patterns."""
353
+ stats = self.get_cache_stats()
354
+
355
+ if stats['cache_size'] < target_cache_size and self.cache_size_limit < 1000:
356
+ # Increase cache size if current size is below target
357
+ self.cache_size_limit = min(1000, int(self.cache_size_limit * 1.2))
358
+ elif stats['cache_size'] > target_cache_size and self.cache_size_limit > 50:
359
+ # Decrease cache size if current size is above target
360
+ self.cache_size_limit = max(50, int(self.cache_size_limit * 0.9))
361
+
362
+ class FindReplaceCache:
363
+ """
364
+ Specialized cache for find/replace operations with result caching.
365
+ """
366
+
367
+ def __init__(self, pattern_cache: RegexPatternCache):
368
+ self.pattern_cache = pattern_cache
369
+ self.result_cache: Dict[str, Any] = {}
370
+ self.cache_lock = threading.RLock()
371
+ self.max_result_cache_size = 100
372
+
373
+ def find_with_cache(self,
374
+ find_text: str,
375
+ content: str,
376
+ options: Dict[str, Any]) -> Dict[str, Any]:
377
+ """
378
+ Perform find operation with comprehensive caching.
379
+
380
+ Args:
381
+ find_text: Text to find
382
+ content: Content to search in
383
+ options: Search options (case_sensitive, whole_words, etc.)
384
+
385
+ Returns:
386
+ Dictionary with search results and metadata
387
+ """
388
+ # Generate cache key for the entire operation
389
+ operation_key = self._generate_operation_key(find_text, content, options, "find")
390
+
391
+ with self.cache_lock:
392
+ if operation_key in self.result_cache:
393
+ cached_result = self.result_cache[operation_key]
394
+ cached_result['cache_hit'] = True
395
+ return cached_result
396
+
397
+ # Determine pattern type and flags
398
+ pattern_type, flags = self._parse_options(options)
399
+
400
+ # Perform search
401
+ search_result = self.pattern_cache.search_with_cache(
402
+ find_text, content, flags, pattern_type
403
+ )
404
+
405
+ # Create result dictionary
406
+ result = {
407
+ 'matches': search_result.matches,
408
+ 'match_count': search_result.match_count,
409
+ 'match_positions': search_result.match_positions,
410
+ 'search_time_ms': search_result.search_time_ms,
411
+ 'pattern_hash': search_result.pattern_hash,
412
+ 'cache_hit': False
413
+ }
414
+
415
+ # Cache the result
416
+ self._cache_result(operation_key, result)
417
+
418
+ return result
419
+
420
+ def replace_with_cache(self,
421
+ find_text: str,
422
+ replace_text: str,
423
+ content: str,
424
+ options: Dict[str, Any]) -> Dict[str, Any]:
425
+ """
426
+ Perform replace operation with caching.
427
+
428
+ Args:
429
+ find_text: Text to find
430
+ replace_text: Replacement text
431
+ content: Content to perform replacement on
432
+ options: Replace options
433
+
434
+ Returns:
435
+ Dictionary with replacement results and metadata
436
+ """
437
+ # Generate cache key
438
+ operation_key = self._generate_operation_key(
439
+ f"{find_text}→{replace_text}", content, options, "replace"
440
+ )
441
+
442
+ with self.cache_lock:
443
+ if operation_key in self.result_cache:
444
+ cached_result = self.result_cache[operation_key]
445
+ cached_result['cache_hit'] = True
446
+ return cached_result
447
+
448
+ # Determine pattern type and flags
449
+ pattern_type, flags = self._parse_options(options)
450
+
451
+ # Perform replacement
452
+ modified_text, replacement_count = self.pattern_cache.replace_with_cache(
453
+ find_text, replace_text, content, flags, pattern_type
454
+ )
455
+
456
+ # Create result dictionary
457
+ result = {
458
+ 'modified_text': modified_text,
459
+ 'replacement_count': replacement_count,
460
+ 'original_length': len(content),
461
+ 'modified_length': len(modified_text),
462
+ 'cache_hit': False
463
+ }
464
+
465
+ # Cache the result
466
+ self._cache_result(operation_key, result)
467
+
468
+ return result
469
+
470
+ def _generate_operation_key(self,
471
+ operation_text: str,
472
+ content: str,
473
+ options: Dict[str, Any],
474
+ operation_type: str) -> str:
475
+ """Generate cache key for find/replace operations."""
476
+ # Use content hash instead of full content for efficiency
477
+ content_hash = hashlib.md5(content.encode('utf-8')).hexdigest()[:16]
478
+ options_str = str(sorted(options.items()))
479
+ key_data = f"{operation_type}_{operation_text}_{content_hash}_{options_str}"
480
+ return hashlib.md5(key_data.encode('utf-8')).hexdigest()[:16]
481
+
482
+ def _parse_options(self, options: Dict[str, Any]) -> Tuple[str, int]:
483
+ """Parse options to determine pattern type and regex flags."""
484
+ pattern_type = "regex" if options.get("mode") == "Regex" else "text"
485
+ flags = 0
486
+
487
+ option_name = options.get("option", "ignore_case")
488
+
489
+ if option_name == "ignore_case":
490
+ flags |= re.IGNORECASE
491
+ elif option_name == "wildcards":
492
+ pattern_type = "wildcard"
493
+ flags |= re.IGNORECASE
494
+
495
+ return pattern_type, flags
496
+
497
+ def _cache_result(self, operation_key: str, result: Dict[str, Any]):
498
+ """Cache operation result with size management."""
499
+ with self.cache_lock:
500
+ if len(self.result_cache) >= self.max_result_cache_size:
501
+ # Remove oldest entry (simple FIFO)
502
+ oldest_key = next(iter(self.result_cache))
503
+ self.result_cache.pop(oldest_key)
504
+
505
+ self.result_cache[operation_key] = result
506
+
507
+ def clear_cache(self):
508
+ """Clear all cached results."""
509
+ with self.cache_lock:
510
+ self.result_cache.clear()
511
+
512
+ # Global instances
513
+ _global_regex_cache = None
514
+ _global_find_replace_cache = None
515
+
516
+ def get_regex_pattern_cache() -> RegexPatternCache:
517
+ """Get the global regex pattern cache instance."""
518
+ global _global_regex_cache
519
+ if _global_regex_cache is None:
520
+ _global_regex_cache = RegexPatternCache()
521
+ return _global_regex_cache
522
+
523
+ def get_find_replace_cache() -> FindReplaceCache:
524
+ """Get the global find/replace cache instance."""
525
+ global _global_find_replace_cache, _global_regex_cache
526
+ if _global_find_replace_cache is None:
527
+ if _global_regex_cache is None:
528
+ _global_regex_cache = RegexPatternCache()
529
+ _global_find_replace_cache = FindReplaceCache(_global_regex_cache)
530
530
  return _global_find_replace_cache