pomera-ai-commander 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (192) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +680 -0
  3. package/bin/pomera-ai-commander.js +62 -0
  4. package/core/__init__.py +66 -0
  5. package/core/__pycache__/__init__.cpython-313.pyc +0 -0
  6. package/core/__pycache__/app_context.cpython-313.pyc +0 -0
  7. package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
  8. package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
  9. package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
  10. package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
  11. package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
  12. package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
  13. package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
  14. package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
  15. package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
  16. package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
  17. package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
  18. package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
  19. package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
  20. package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
  21. package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
  22. package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
  23. package/core/__pycache__/error_service.cpython-313.pyc +0 -0
  24. package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
  25. package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
  26. package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
  27. package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
  28. package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
  29. package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
  30. package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
  31. package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
  32. package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
  33. package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
  34. package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
  35. package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
  36. package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
  37. package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
  38. package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
  39. package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
  40. package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
  41. package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
  42. package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
  43. package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
  44. package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
  45. package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
  46. package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
  47. package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
  48. package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
  49. package/core/app_context.py +482 -0
  50. package/core/async_text_processor.py +422 -0
  51. package/core/backup_manager.py +656 -0
  52. package/core/backup_recovery_manager.py +1034 -0
  53. package/core/content_hash_cache.py +509 -0
  54. package/core/context_menu.py +313 -0
  55. package/core/data_validator.py +1067 -0
  56. package/core/database_connection_manager.py +745 -0
  57. package/core/database_curl_settings_manager.py +609 -0
  58. package/core/database_promera_ai_settings_manager.py +447 -0
  59. package/core/database_schema.py +412 -0
  60. package/core/database_schema_manager.py +396 -0
  61. package/core/database_settings_manager.py +1508 -0
  62. package/core/database_settings_manager_interface.py +457 -0
  63. package/core/dialog_manager.py +735 -0
  64. package/core/efficient_line_numbers.py +511 -0
  65. package/core/error_handler.py +747 -0
  66. package/core/error_service.py +431 -0
  67. package/core/event_consolidator.py +512 -0
  68. package/core/mcp/__init__.py +43 -0
  69. package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
  70. package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
  71. package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
  72. package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
  73. package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
  74. package/core/mcp/protocol.py +288 -0
  75. package/core/mcp/schema.py +251 -0
  76. package/core/mcp/server_stdio.py +299 -0
  77. package/core/mcp/tool_registry.py +2345 -0
  78. package/core/memory_efficient_text_widget.py +712 -0
  79. package/core/migration_manager.py +915 -0
  80. package/core/migration_test_suite.py +1086 -0
  81. package/core/migration_validator.py +1144 -0
  82. package/core/optimized_find_replace.py +715 -0
  83. package/core/optimized_pattern_engine.py +424 -0
  84. package/core/optimized_search_highlighter.py +553 -0
  85. package/core/performance_monitor.py +675 -0
  86. package/core/persistence_manager.py +713 -0
  87. package/core/progressive_stats_calculator.py +632 -0
  88. package/core/regex_pattern_cache.py +530 -0
  89. package/core/regex_pattern_library.py +351 -0
  90. package/core/search_operation_manager.py +435 -0
  91. package/core/settings_defaults_registry.py +1087 -0
  92. package/core/settings_integrity_validator.py +1112 -0
  93. package/core/settings_serializer.py +558 -0
  94. package/core/settings_validator.py +1824 -0
  95. package/core/smart_stats_calculator.py +710 -0
  96. package/core/statistics_update_manager.py +619 -0
  97. package/core/stats_config_manager.py +858 -0
  98. package/core/streaming_text_handler.py +723 -0
  99. package/core/task_scheduler.py +596 -0
  100. package/core/update_pattern_library.py +169 -0
  101. package/core/visibility_monitor.py +596 -0
  102. package/core/widget_cache.py +498 -0
  103. package/mcp.json +61 -0
  104. package/package.json +57 -0
  105. package/pomera.py +7483 -0
  106. package/pomera_mcp_server.py +144 -0
  107. package/tools/__init__.py +5 -0
  108. package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  109. package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
  110. package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
  111. package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
  112. package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
  113. package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
  114. package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
  115. package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
  116. package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
  117. package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
  118. package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
  119. package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
  120. package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
  121. package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
  122. package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
  123. package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
  124. package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
  125. package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
  126. package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
  127. package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
  128. package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
  129. package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
  130. package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
  131. package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
  132. package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
  133. package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
  134. package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
  135. package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
  136. package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
  137. package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
  138. package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
  139. package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
  140. package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
  141. package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
  142. package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
  143. package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
  144. package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
  145. package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
  146. package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
  147. package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
  148. package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
  149. package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
  150. package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
  151. package/tools/ai_tools.py +2892 -0
  152. package/tools/ascii_art_generator.py +353 -0
  153. package/tools/base64_tools.py +184 -0
  154. package/tools/base_tool.py +511 -0
  155. package/tools/case_tool.py +309 -0
  156. package/tools/column_tools.py +396 -0
  157. package/tools/cron_tool.py +885 -0
  158. package/tools/curl_history.py +601 -0
  159. package/tools/curl_processor.py +1208 -0
  160. package/tools/curl_settings.py +503 -0
  161. package/tools/curl_tool.py +5467 -0
  162. package/tools/diff_viewer.py +1072 -0
  163. package/tools/email_extraction_tool.py +249 -0
  164. package/tools/email_header_analyzer.py +426 -0
  165. package/tools/extraction_tools.py +250 -0
  166. package/tools/find_replace.py +1751 -0
  167. package/tools/folder_file_reporter.py +1463 -0
  168. package/tools/folder_file_reporter_adapter.py +480 -0
  169. package/tools/generator_tools.py +1217 -0
  170. package/tools/hash_generator.py +256 -0
  171. package/tools/html_tool.py +657 -0
  172. package/tools/huggingface_helper.py +449 -0
  173. package/tools/jsonxml_tool.py +730 -0
  174. package/tools/line_tools.py +419 -0
  175. package/tools/list_comparator.py +720 -0
  176. package/tools/markdown_tools.py +562 -0
  177. package/tools/mcp_widget.py +1417 -0
  178. package/tools/notes_widget.py +973 -0
  179. package/tools/number_base_converter.py +373 -0
  180. package/tools/regex_extractor.py +572 -0
  181. package/tools/slug_generator.py +311 -0
  182. package/tools/sorter_tools.py +459 -0
  183. package/tools/string_escape_tool.py +393 -0
  184. package/tools/text_statistics_tool.py +366 -0
  185. package/tools/text_wrapper.py +431 -0
  186. package/tools/timestamp_converter.py +422 -0
  187. package/tools/tool_loader.py +710 -0
  188. package/tools/translator_tools.py +523 -0
  189. package/tools/url_link_extractor.py +262 -0
  190. package/tools/url_parser.py +205 -0
  191. package/tools/whitespace_tools.py +356 -0
  192. package/tools/word_frequency_counter.py +147 -0
@@ -0,0 +1,422 @@
1
+ """
2
+ Asynchronous text processing framework for Promera AI Commander.
3
+ Handles heavy text operations in background threads to prevent UI freezing.
4
+ """
5
+
6
+ import threading
7
+ import time
8
+ import queue
9
+ import hashlib
10
+ from concurrent.futures import ThreadPoolExecutor, Future, as_completed
11
+ from dataclasses import dataclass
12
+ from typing import Dict, List, Optional, Callable, Any, Tuple
13
+ from enum import Enum
14
+ import logging
15
+
16
+ class ProcessingMode(Enum):
17
+ """Processing mode for different content sizes."""
18
+ SYNC = "sync" # Small content, process synchronously
19
+ ASYNC = "async" # Medium content, process asynchronously
20
+ CHUNKED = "chunked" # Large content, process in chunks
21
+
22
+ @dataclass
23
+ class TextProcessingContext:
24
+ """Context information for text processing operations."""
25
+ content: str
26
+ content_hash: str
27
+ size_bytes: int
28
+ line_count: int
29
+ processing_mode: ProcessingMode
30
+ chunk_size: int = 50000
31
+ tool_name: str = ""
32
+ callback_id: str = ""
33
+
34
+ @classmethod
35
+ def from_content(cls, content: str, tool_name: str = "", callback_id: str = ""):
36
+ """Create context from text content."""
37
+ content_hash = hashlib.md5(content.encode('utf-8')).hexdigest()
38
+ size_bytes = len(content.encode('utf-8'))
39
+ line_count = content.count('\n')
40
+
41
+ # Determine processing mode based on content size
42
+ if size_bytes < 10000: # 10KB
43
+ mode = ProcessingMode.SYNC
44
+ elif size_bytes < 100000: # 100KB
45
+ mode = ProcessingMode.ASYNC
46
+ else:
47
+ mode = ProcessingMode.CHUNKED
48
+
49
+ return cls(
50
+ content=content,
51
+ content_hash=content_hash,
52
+ size_bytes=size_bytes,
53
+ line_count=line_count,
54
+ processing_mode=mode,
55
+ tool_name=tool_name,
56
+ callback_id=callback_id
57
+ )
58
+
59
+ @property
60
+ def requires_async_processing(self) -> bool:
61
+ """Check if content requires async processing."""
62
+ return self.processing_mode in [ProcessingMode.ASYNC, ProcessingMode.CHUNKED]
63
+
64
+ @dataclass
65
+ class ProcessingResult:
66
+ """Result of a text processing operation."""
67
+ success: bool
68
+ result: str
69
+ error_message: Optional[str] = None
70
+ processing_time_ms: float = 0.0
71
+ chunks_processed: int = 1
72
+ context: Optional[TextProcessingContext] = None
73
+
74
+ class AsyncTextProcessor:
75
+ """Asynchronous text processor with background threading and chunking support."""
76
+
77
+ def __init__(self, max_workers: int = 2, logger: Optional[logging.Logger] = None):
78
+ self.max_workers = max_workers
79
+ self.logger = logger or logging.getLogger(__name__)
80
+
81
+ # Thread pool for async operations
82
+ self.executor = ThreadPoolExecutor(max_workers=max_workers, thread_name_prefix="AsyncTextProcessor")
83
+
84
+ # Task management
85
+ self.active_tasks: Dict[str, Future] = {}
86
+ self.task_callbacks: Dict[str, Callable] = {}
87
+ self.task_contexts: Dict[str, TextProcessingContext] = {}
88
+
89
+ # Progress tracking
90
+ self.progress_callbacks: Dict[str, Callable] = {}
91
+
92
+ # Shutdown flag
93
+ self._shutdown = False
94
+
95
+ self.logger.info(f"AsyncTextProcessor initialized with {max_workers} workers")
96
+
97
+ def process_text_async(self,
98
+ context: TextProcessingContext,
99
+ processor_func: Callable[[str], str],
100
+ callback: Callable[[ProcessingResult], None],
101
+ progress_callback: Optional[Callable[[int, int], None]] = None) -> str:
102
+ """
103
+ Process text asynchronously with callback when complete.
104
+
105
+ Args:
106
+ context: Text processing context
107
+ processor_func: Function to process the text
108
+ callback: Callback function for when processing is complete
109
+ progress_callback: Optional callback for progress updates (current, total)
110
+
111
+ Returns:
112
+ Task ID for tracking/cancellation
113
+ """
114
+ if self._shutdown:
115
+ raise RuntimeError("AsyncTextProcessor is shut down")
116
+
117
+ task_id = f"{context.tool_name}_{context.callback_id}_{int(time.time() * 1000000)}"
118
+
119
+ # Store callback and context
120
+ self.task_callbacks[task_id] = callback
121
+ self.task_contexts[task_id] = context
122
+ if progress_callback:
123
+ self.progress_callbacks[task_id] = progress_callback
124
+
125
+ # Submit task based on processing mode
126
+ if context.processing_mode == ProcessingMode.CHUNKED:
127
+ future = self.executor.submit(self._process_chunked, context, processor_func, task_id)
128
+ else:
129
+ future = self.executor.submit(self._process_single, context, processor_func, task_id)
130
+
131
+ self.active_tasks[task_id] = future
132
+
133
+ # Set up completion callback
134
+ future.add_done_callback(lambda f: self._on_task_complete(task_id, f))
135
+
136
+ self.logger.debug(f"Started async processing: {task_id} ({context.processing_mode.value})")
137
+ return task_id
138
+
139
+ def _process_single(self, context: TextProcessingContext, processor_func: Callable, task_id: str) -> ProcessingResult:
140
+ """Process text in a single operation."""
141
+ start_time = time.time()
142
+
143
+ try:
144
+ result = processor_func(context.content)
145
+ processing_time = (time.time() - start_time) * 1000
146
+
147
+ return ProcessingResult(
148
+ success=True,
149
+ result=result,
150
+ processing_time_ms=processing_time,
151
+ chunks_processed=1,
152
+ context=context
153
+ )
154
+ except Exception as e:
155
+ processing_time = (time.time() - start_time) * 1000
156
+ self.logger.error(f"Error in single processing {task_id}: {e}")
157
+
158
+ return ProcessingResult(
159
+ success=False,
160
+ result="",
161
+ error_message=str(e),
162
+ processing_time_ms=processing_time,
163
+ context=context
164
+ )
165
+
166
+ def _process_chunked(self, context: TextProcessingContext, processor_func: Callable, task_id: str) -> ProcessingResult:
167
+ """Process text in chunks for large content."""
168
+ start_time = time.time()
169
+
170
+ try:
171
+ chunks = self.chunk_large_text(context.content, context.chunk_size)
172
+ processed_chunks = []
173
+
174
+ for i, chunk in enumerate(chunks):
175
+ if self._is_task_cancelled(task_id):
176
+ return ProcessingResult(
177
+ success=False,
178
+ result="",
179
+ error_message="Task cancelled",
180
+ processing_time_ms=(time.time() - start_time) * 1000,
181
+ context=context
182
+ )
183
+
184
+ # Process chunk
185
+ processed_chunk = processor_func(chunk)
186
+ processed_chunks.append(processed_chunk)
187
+
188
+ # Update progress
189
+ if task_id in self.progress_callbacks:
190
+ try:
191
+ self.progress_callbacks[task_id](i + 1, len(chunks))
192
+ except Exception as e:
193
+ self.logger.warning(f"Progress callback error: {e}")
194
+
195
+ # Combine results
196
+ result = self._combine_chunks(processed_chunks, context.tool_name)
197
+ processing_time = (time.time() - start_time) * 1000
198
+
199
+ return ProcessingResult(
200
+ success=True,
201
+ result=result,
202
+ processing_time_ms=processing_time,
203
+ chunks_processed=len(chunks),
204
+ context=context
205
+ )
206
+
207
+ except Exception as e:
208
+ processing_time = (time.time() - start_time) * 1000
209
+ self.logger.error(f"Error in chunked processing {task_id}: {e}")
210
+
211
+ return ProcessingResult(
212
+ success=False,
213
+ result="",
214
+ error_message=str(e),
215
+ processing_time_ms=processing_time,
216
+ context=context
217
+ )
218
+
219
+ def chunk_large_text(self, text: str, chunk_size: int = 50000) -> List[str]:
220
+ """
221
+ Break large text into processable chunks.
222
+ Tries to break at word boundaries when possible.
223
+ """
224
+ if len(text) <= chunk_size:
225
+ return [text]
226
+
227
+ chunks = []
228
+ start = 0
229
+
230
+ while start < len(text):
231
+ end = start + chunk_size
232
+
233
+ if end >= len(text):
234
+ # Last chunk
235
+ chunks.append(text[start:])
236
+ break
237
+
238
+ # Try to break at word boundary
239
+ break_point = end
240
+ for i in range(end, max(start, end - 100), -1):
241
+ if text[i] in ' \n\t':
242
+ break_point = i
243
+ break
244
+
245
+ chunks.append(text[start:break_point])
246
+ start = break_point
247
+
248
+ return chunks
249
+
250
+ def _combine_chunks(self, chunks: List[str], tool_name: str) -> str:
251
+ """Combine processed chunks back into a single result."""
252
+ if tool_name in ["Number Sorter", "Alphabetical Sorter"]:
253
+ # For sorting tools, we need to sort the combined result
254
+ combined = '\n'.join(chunks)
255
+ lines = combined.splitlines()
256
+ # Remove empty lines that might have been introduced
257
+ lines = [line for line in lines if line.strip()]
258
+ return '\n'.join(lines)
259
+ else:
260
+ # For most tools, simple concatenation works
261
+ return ''.join(chunks)
262
+
263
+ def _on_task_complete(self, task_id: str, future: Future):
264
+ """Handle task completion."""
265
+ try:
266
+ result = future.result()
267
+ callback = self.task_callbacks.get(task_id)
268
+
269
+ if callback:
270
+ try:
271
+ callback(result)
272
+ except Exception as e:
273
+ self.logger.error(f"Callback error for task {task_id}: {e}")
274
+
275
+ self.logger.debug(f"Completed async processing: {task_id} "
276
+ f"({result.processing_time_ms:.1f}ms, "
277
+ f"{result.chunks_processed} chunks)")
278
+
279
+ except Exception as e:
280
+ self.logger.error(f"Task completion error for {task_id}: {e}")
281
+
282
+ # Create error result
283
+ result = ProcessingResult(
284
+ success=False,
285
+ result="",
286
+ error_message=str(e),
287
+ context=self.task_contexts.get(task_id)
288
+ )
289
+
290
+ callback = self.task_callbacks.get(task_id)
291
+ if callback:
292
+ try:
293
+ callback(result)
294
+ except Exception as callback_error:
295
+ self.logger.error(f"Callback error for failed task {task_id}: {callback_error}")
296
+
297
+ finally:
298
+ # Clean up
299
+ self.active_tasks.pop(task_id, None)
300
+ self.task_callbacks.pop(task_id, None)
301
+ self.task_contexts.pop(task_id, None)
302
+ self.progress_callbacks.pop(task_id, None)
303
+
304
+ def cancel_processing(self, task_id: str) -> bool:
305
+ """
306
+ Cancel an ongoing processing operation.
307
+
308
+ Args:
309
+ task_id: ID of the task to cancel
310
+
311
+ Returns:
312
+ True if task was cancelled, False if not found or already complete
313
+ """
314
+ if task_id in self.active_tasks:
315
+ future = self.active_tasks[task_id]
316
+ cancelled = future.cancel()
317
+
318
+ if cancelled:
319
+ self.logger.info(f"Cancelled task: {task_id}")
320
+ # Clean up immediately for cancelled tasks
321
+ self.active_tasks.pop(task_id, None)
322
+ self.task_callbacks.pop(task_id, None)
323
+ self.task_contexts.pop(task_id, None)
324
+ self.progress_callbacks.pop(task_id, None)
325
+
326
+ return cancelled
327
+
328
+ return False
329
+
330
+ def _is_task_cancelled(self, task_id: str) -> bool:
331
+ """Check if a task has been cancelled."""
332
+ if task_id in self.active_tasks:
333
+ return self.active_tasks[task_id].cancelled()
334
+ return False
335
+
336
+ def cancel_all_tasks(self):
337
+ """Cancel all active tasks."""
338
+ task_ids = list(self.active_tasks.keys())
339
+ cancelled_count = 0
340
+
341
+ for task_id in task_ids:
342
+ if self.cancel_processing(task_id):
343
+ cancelled_count += 1
344
+
345
+ self.logger.info(f"Cancelled {cancelled_count} tasks")
346
+ return cancelled_count
347
+
348
+ def get_active_task_count(self) -> int:
349
+ """Get the number of currently active tasks."""
350
+ return len(self.active_tasks)
351
+
352
+ def get_active_task_info(self) -> Dict[str, Dict[str, Any]]:
353
+ """Get information about active tasks."""
354
+ info = {}
355
+
356
+ for task_id, future in self.active_tasks.items():
357
+ context = self.task_contexts.get(task_id)
358
+ info[task_id] = {
359
+ 'tool_name': context.tool_name if context else 'unknown',
360
+ 'content_size': context.size_bytes if context else 0,
361
+ 'processing_mode': context.processing_mode.value if context else 'unknown',
362
+ 'is_done': future.done(),
363
+ 'is_cancelled': future.cancelled()
364
+ }
365
+
366
+ return info
367
+
368
+ def wait_for_completion(self, timeout: Optional[float] = None) -> bool:
369
+ """
370
+ Wait for all active tasks to complete.
371
+
372
+ Args:
373
+ timeout: Maximum time to wait in seconds
374
+
375
+ Returns:
376
+ True if all tasks completed, False if timeout occurred
377
+ """
378
+ if not self.active_tasks:
379
+ return True
380
+
381
+ try:
382
+ futures = list(self.active_tasks.values())
383
+ for future in as_completed(futures, timeout=timeout):
384
+ pass # Just wait for completion
385
+ return True
386
+ except TimeoutError:
387
+ return False
388
+
389
+ def shutdown(self, wait: bool = True, timeout: Optional[float] = None):
390
+ """
391
+ Shutdown the async processor.
392
+
393
+ Args:
394
+ wait: Whether to wait for active tasks to complete
395
+ timeout: Maximum time to wait for shutdown
396
+ """
397
+ self._shutdown = True
398
+
399
+ if wait:
400
+ self.wait_for_completion(timeout)
401
+ else:
402
+ self.cancel_all_tasks()
403
+
404
+ self.executor.shutdown(wait=wait)
405
+ self.logger.info("AsyncTextProcessor shut down")
406
+
407
+ # Global async processor instance
408
+ _global_async_processor = None
409
+
410
+ def get_async_text_processor() -> AsyncTextProcessor:
411
+ """Get the global async text processor instance."""
412
+ global _global_async_processor
413
+ if _global_async_processor is None:
414
+ _global_async_processor = AsyncTextProcessor()
415
+ return _global_async_processor
416
+
417
+ def shutdown_async_processor():
418
+ """Shutdown the global async processor."""
419
+ global _global_async_processor
420
+ if _global_async_processor is not None:
421
+ _global_async_processor.shutdown()
422
+ _global_async_processor = None