pomera-ai-commander 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +680 -0
- package/bin/pomera-ai-commander.js +62 -0
- package/core/__init__.py +66 -0
- package/core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/__pycache__/app_context.cpython-313.pyc +0 -0
- package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
- package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
- package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
- package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/error_service.cpython-313.pyc +0 -0
- package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
- package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
- package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
- package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
- package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
- package/core/app_context.py +482 -0
- package/core/async_text_processor.py +422 -0
- package/core/backup_manager.py +656 -0
- package/core/backup_recovery_manager.py +1034 -0
- package/core/content_hash_cache.py +509 -0
- package/core/context_menu.py +313 -0
- package/core/data_validator.py +1067 -0
- package/core/database_connection_manager.py +745 -0
- package/core/database_curl_settings_manager.py +609 -0
- package/core/database_promera_ai_settings_manager.py +447 -0
- package/core/database_schema.py +412 -0
- package/core/database_schema_manager.py +396 -0
- package/core/database_settings_manager.py +1508 -0
- package/core/database_settings_manager_interface.py +457 -0
- package/core/dialog_manager.py +735 -0
- package/core/efficient_line_numbers.py +511 -0
- package/core/error_handler.py +747 -0
- package/core/error_service.py +431 -0
- package/core/event_consolidator.py +512 -0
- package/core/mcp/__init__.py +43 -0
- package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
- package/core/mcp/protocol.py +288 -0
- package/core/mcp/schema.py +251 -0
- package/core/mcp/server_stdio.py +299 -0
- package/core/mcp/tool_registry.py +2345 -0
- package/core/memory_efficient_text_widget.py +712 -0
- package/core/migration_manager.py +915 -0
- package/core/migration_test_suite.py +1086 -0
- package/core/migration_validator.py +1144 -0
- package/core/optimized_find_replace.py +715 -0
- package/core/optimized_pattern_engine.py +424 -0
- package/core/optimized_search_highlighter.py +553 -0
- package/core/performance_monitor.py +675 -0
- package/core/persistence_manager.py +713 -0
- package/core/progressive_stats_calculator.py +632 -0
- package/core/regex_pattern_cache.py +530 -0
- package/core/regex_pattern_library.py +351 -0
- package/core/search_operation_manager.py +435 -0
- package/core/settings_defaults_registry.py +1087 -0
- package/core/settings_integrity_validator.py +1112 -0
- package/core/settings_serializer.py +558 -0
- package/core/settings_validator.py +1824 -0
- package/core/smart_stats_calculator.py +710 -0
- package/core/statistics_update_manager.py +619 -0
- package/core/stats_config_manager.py +858 -0
- package/core/streaming_text_handler.py +723 -0
- package/core/task_scheduler.py +596 -0
- package/core/update_pattern_library.py +169 -0
- package/core/visibility_monitor.py +596 -0
- package/core/widget_cache.py +498 -0
- package/mcp.json +61 -0
- package/package.json +57 -0
- package/pomera.py +7483 -0
- package/pomera_mcp_server.py +144 -0
- package/tools/__init__.py +5 -0
- package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
- package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
- package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
- package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
- package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
- package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
- package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
- package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
- package/tools/ai_tools.py +2892 -0
- package/tools/ascii_art_generator.py +353 -0
- package/tools/base64_tools.py +184 -0
- package/tools/base_tool.py +511 -0
- package/tools/case_tool.py +309 -0
- package/tools/column_tools.py +396 -0
- package/tools/cron_tool.py +885 -0
- package/tools/curl_history.py +601 -0
- package/tools/curl_processor.py +1208 -0
- package/tools/curl_settings.py +503 -0
- package/tools/curl_tool.py +5467 -0
- package/tools/diff_viewer.py +1072 -0
- package/tools/email_extraction_tool.py +249 -0
- package/tools/email_header_analyzer.py +426 -0
- package/tools/extraction_tools.py +250 -0
- package/tools/find_replace.py +1751 -0
- package/tools/folder_file_reporter.py +1463 -0
- package/tools/folder_file_reporter_adapter.py +480 -0
- package/tools/generator_tools.py +1217 -0
- package/tools/hash_generator.py +256 -0
- package/tools/html_tool.py +657 -0
- package/tools/huggingface_helper.py +449 -0
- package/tools/jsonxml_tool.py +730 -0
- package/tools/line_tools.py +419 -0
- package/tools/list_comparator.py +720 -0
- package/tools/markdown_tools.py +562 -0
- package/tools/mcp_widget.py +1417 -0
- package/tools/notes_widget.py +973 -0
- package/tools/number_base_converter.py +373 -0
- package/tools/regex_extractor.py +572 -0
- package/tools/slug_generator.py +311 -0
- package/tools/sorter_tools.py +459 -0
- package/tools/string_escape_tool.py +393 -0
- package/tools/text_statistics_tool.py +366 -0
- package/tools/text_wrapper.py +431 -0
- package/tools/timestamp_converter.py +422 -0
- package/tools/tool_loader.py +710 -0
- package/tools/translator_tools.py +523 -0
- package/tools/url_link_extractor.py +262 -0
- package/tools/url_parser.py +205 -0
- package/tools/whitespace_tools.py +356 -0
- package/tools/word_frequency_counter.py +147 -0
|
@@ -0,0 +1,723 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Streaming Text Handler Module
|
|
3
|
+
|
|
4
|
+
Provides efficient handling of streaming text content, particularly for AI responses.
|
|
5
|
+
Implements progressive text insertion with minimal UI blocking and diff-based updates.
|
|
6
|
+
|
|
7
|
+
Key Components:
|
|
8
|
+
- StreamingTextHandler: Handles progressive text insertion for streaming AI responses
|
|
9
|
+
- IncrementalTextUpdater: Uses diff algorithm for efficient large text updates
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import tkinter as tk
|
|
13
|
+
from typing import Optional, Callable, List, Tuple, Any
|
|
14
|
+
from dataclasses import dataclass, field
|
|
15
|
+
from enum import Enum
|
|
16
|
+
import difflib
|
|
17
|
+
import time
|
|
18
|
+
import threading
|
|
19
|
+
from queue import Queue, Empty
|
|
20
|
+
import logging
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class StreamState(Enum):
|
|
26
|
+
"""State of the streaming handler."""
|
|
27
|
+
IDLE = "idle"
|
|
28
|
+
STREAMING = "streaming"
|
|
29
|
+
PAUSED = "paused"
|
|
30
|
+
COMPLETED = "completed"
|
|
31
|
+
ERROR = "error"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class StreamConfig:
|
|
36
|
+
"""Configuration for streaming behavior."""
|
|
37
|
+
chunk_delay_ms: int = 10 # Delay between chunk insertions
|
|
38
|
+
batch_size: int = 5 # Number of chunks to batch before UI update
|
|
39
|
+
max_buffer_size: int = 1000 # Maximum chunks to buffer
|
|
40
|
+
auto_scroll: bool = True # Auto-scroll to end during streaming
|
|
41
|
+
highlight_new_text: bool = False # Temporarily highlight new text
|
|
42
|
+
highlight_duration_ms: int = 500 # Duration of highlight
|
|
43
|
+
use_threading: bool = True # Use background thread for processing
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class StreamMetrics:
|
|
48
|
+
"""Metrics for streaming performance."""
|
|
49
|
+
total_chunks: int = 0
|
|
50
|
+
total_characters: int = 0
|
|
51
|
+
start_time: float = 0.0
|
|
52
|
+
end_time: float = 0.0
|
|
53
|
+
ui_updates: int = 0
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def duration(self) -> float:
|
|
57
|
+
"""Get streaming duration in seconds."""
|
|
58
|
+
if self.end_time > 0:
|
|
59
|
+
return self.end_time - self.start_time
|
|
60
|
+
elif self.start_time > 0:
|
|
61
|
+
return time.time() - self.start_time
|
|
62
|
+
return 0.0
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def chars_per_second(self) -> float:
|
|
66
|
+
"""Get characters per second rate."""
|
|
67
|
+
duration = self.duration
|
|
68
|
+
if duration > 0:
|
|
69
|
+
return self.total_characters / duration
|
|
70
|
+
return 0.0
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class StreamingTextHandler:
|
|
74
|
+
"""
|
|
75
|
+
Handles progressive text insertion for streaming content.
|
|
76
|
+
|
|
77
|
+
Designed for AI response streaming where text arrives in chunks
|
|
78
|
+
and needs to be displayed progressively without blocking the UI.
|
|
79
|
+
|
|
80
|
+
Usage:
|
|
81
|
+
handler = StreamingTextHandler(text_widget)
|
|
82
|
+
handler.start_stream()
|
|
83
|
+
for chunk in ai_response_stream:
|
|
84
|
+
handler.add_chunk(chunk)
|
|
85
|
+
handler.end_stream()
|
|
86
|
+
"""
|
|
87
|
+
|
|
88
|
+
def __init__(
|
|
89
|
+
self,
|
|
90
|
+
text_widget: tk.Text,
|
|
91
|
+
config: Optional[StreamConfig] = None,
|
|
92
|
+
on_progress: Optional[Callable[[int, int], None]] = None,
|
|
93
|
+
on_complete: Optional[Callable[[StreamMetrics], None]] = None,
|
|
94
|
+
on_error: Optional[Callable[[Exception], None]] = None
|
|
95
|
+
):
|
|
96
|
+
"""
|
|
97
|
+
Initialize the streaming text handler.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
text_widget: The Tkinter Text widget to insert text into
|
|
101
|
+
config: Configuration for streaming behavior
|
|
102
|
+
on_progress: Callback for progress updates (chars_received, total_chars)
|
|
103
|
+
on_complete: Callback when streaming completes
|
|
104
|
+
on_error: Callback when an error occurs
|
|
105
|
+
"""
|
|
106
|
+
self.text_widget = text_widget
|
|
107
|
+
self.config = config or StreamConfig()
|
|
108
|
+
self.on_progress = on_progress
|
|
109
|
+
self.on_complete = on_complete
|
|
110
|
+
self.on_error = on_error
|
|
111
|
+
|
|
112
|
+
self._state = StreamState.IDLE
|
|
113
|
+
self._metrics = StreamMetrics()
|
|
114
|
+
self._chunk_queue: Queue = Queue(maxsize=self.config.max_buffer_size)
|
|
115
|
+
self._buffer: List[str] = []
|
|
116
|
+
self._insert_position: str = "end"
|
|
117
|
+
self._stream_tag = "streaming_text"
|
|
118
|
+
self._processing_thread: Optional[threading.Thread] = None
|
|
119
|
+
self._stop_event = threading.Event()
|
|
120
|
+
self._lock = threading.Lock()
|
|
121
|
+
|
|
122
|
+
# Configure text tag for highlighting new text
|
|
123
|
+
if self.config.highlight_new_text:
|
|
124
|
+
self.text_widget.tag_configure(
|
|
125
|
+
self._stream_tag,
|
|
126
|
+
background="#e6f3ff" # Light blue highlight
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
@property
|
|
130
|
+
def state(self) -> StreamState:
|
|
131
|
+
"""Get current streaming state."""
|
|
132
|
+
return self._state
|
|
133
|
+
|
|
134
|
+
@property
|
|
135
|
+
def metrics(self) -> StreamMetrics:
|
|
136
|
+
"""Get current streaming metrics."""
|
|
137
|
+
return self._metrics
|
|
138
|
+
|
|
139
|
+
@property
|
|
140
|
+
def is_streaming(self) -> bool:
|
|
141
|
+
"""Check if currently streaming."""
|
|
142
|
+
return self._state == StreamState.STREAMING
|
|
143
|
+
|
|
144
|
+
def start_stream(
|
|
145
|
+
self,
|
|
146
|
+
clear_existing: bool = True,
|
|
147
|
+
insert_position: str = "end"
|
|
148
|
+
) -> bool:
|
|
149
|
+
"""
|
|
150
|
+
Start a new streaming session.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
clear_existing: Whether to clear existing text in widget
|
|
154
|
+
insert_position: Position to insert text ("end" or index)
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
True if stream started successfully
|
|
158
|
+
"""
|
|
159
|
+
if self._state == StreamState.STREAMING:
|
|
160
|
+
logger.warning("Stream already in progress")
|
|
161
|
+
return False
|
|
162
|
+
|
|
163
|
+
try:
|
|
164
|
+
with self._lock:
|
|
165
|
+
# Reset state
|
|
166
|
+
self._state = StreamState.STREAMING
|
|
167
|
+
self._metrics = StreamMetrics()
|
|
168
|
+
self._metrics.start_time = time.time()
|
|
169
|
+
self._buffer.clear()
|
|
170
|
+
self._insert_position = insert_position
|
|
171
|
+
self._stop_event.clear()
|
|
172
|
+
|
|
173
|
+
# Clear the queue
|
|
174
|
+
while not self._chunk_queue.empty():
|
|
175
|
+
try:
|
|
176
|
+
self._chunk_queue.get_nowait()
|
|
177
|
+
except Empty:
|
|
178
|
+
break
|
|
179
|
+
|
|
180
|
+
# Clear existing text if requested
|
|
181
|
+
if clear_existing:
|
|
182
|
+
self.text_widget.config(state=tk.NORMAL)
|
|
183
|
+
self.text_widget.delete("1.0", tk.END)
|
|
184
|
+
|
|
185
|
+
# Start processing thread if configured
|
|
186
|
+
if self.config.use_threading:
|
|
187
|
+
self._processing_thread = threading.Thread(
|
|
188
|
+
target=self._process_queue,
|
|
189
|
+
daemon=True
|
|
190
|
+
)
|
|
191
|
+
self._processing_thread.start()
|
|
192
|
+
|
|
193
|
+
logger.debug("Stream started")
|
|
194
|
+
return True
|
|
195
|
+
|
|
196
|
+
except Exception as e:
|
|
197
|
+
self._state = StreamState.ERROR
|
|
198
|
+
logger.error(f"Failed to start stream: {e}")
|
|
199
|
+
if self.on_error:
|
|
200
|
+
self.on_error(e)
|
|
201
|
+
return False
|
|
202
|
+
|
|
203
|
+
def add_chunk(self, chunk: str) -> bool:
|
|
204
|
+
"""
|
|
205
|
+
Add a text chunk to the stream.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
chunk: Text chunk to add
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
True if chunk was added successfully
|
|
212
|
+
"""
|
|
213
|
+
if self._state != StreamState.STREAMING:
|
|
214
|
+
logger.warning(f"Cannot add chunk in state: {self._state}")
|
|
215
|
+
return False
|
|
216
|
+
|
|
217
|
+
if not chunk:
|
|
218
|
+
return True
|
|
219
|
+
|
|
220
|
+
try:
|
|
221
|
+
self._metrics.total_chunks += 1
|
|
222
|
+
self._metrics.total_characters += len(chunk)
|
|
223
|
+
|
|
224
|
+
if self.config.use_threading:
|
|
225
|
+
# Add to queue for background processing
|
|
226
|
+
self._chunk_queue.put(chunk, timeout=1.0)
|
|
227
|
+
else:
|
|
228
|
+
# Direct insertion
|
|
229
|
+
self._insert_chunk(chunk)
|
|
230
|
+
|
|
231
|
+
# Progress callback
|
|
232
|
+
if self.on_progress:
|
|
233
|
+
self.on_progress(
|
|
234
|
+
self._metrics.total_characters,
|
|
235
|
+
-1 # Unknown total
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
return True
|
|
239
|
+
|
|
240
|
+
except Exception as e:
|
|
241
|
+
logger.error(f"Failed to add chunk: {e}")
|
|
242
|
+
if self.on_error:
|
|
243
|
+
self.on_error(e)
|
|
244
|
+
return False
|
|
245
|
+
|
|
246
|
+
def end_stream(self) -> StreamMetrics:
|
|
247
|
+
"""
|
|
248
|
+
End the streaming session.
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
Final streaming metrics
|
|
252
|
+
"""
|
|
253
|
+
if self._state != StreamState.STREAMING:
|
|
254
|
+
logger.warning(f"Cannot end stream in state: {self._state}")
|
|
255
|
+
return self._metrics
|
|
256
|
+
|
|
257
|
+
try:
|
|
258
|
+
with self._lock:
|
|
259
|
+
self._stop_event.set()
|
|
260
|
+
|
|
261
|
+
# Wait for processing thread to finish
|
|
262
|
+
if self._processing_thread and self._processing_thread.is_alive():
|
|
263
|
+
self._processing_thread.join(timeout=2.0)
|
|
264
|
+
|
|
265
|
+
# Process any remaining chunks
|
|
266
|
+
self._flush_buffer()
|
|
267
|
+
|
|
268
|
+
# Finalize
|
|
269
|
+
self._metrics.end_time = time.time()
|
|
270
|
+
self._state = StreamState.COMPLETED
|
|
271
|
+
|
|
272
|
+
# Remove highlight tag if used
|
|
273
|
+
if self.config.highlight_new_text:
|
|
274
|
+
self.text_widget.tag_remove(
|
|
275
|
+
self._stream_tag,
|
|
276
|
+
"1.0",
|
|
277
|
+
tk.END
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
logger.debug(
|
|
281
|
+
f"Stream completed: {self._metrics.total_characters} chars "
|
|
282
|
+
f"in {self._metrics.duration:.2f}s"
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
# Completion callback
|
|
286
|
+
if self.on_complete:
|
|
287
|
+
self.on_complete(self._metrics)
|
|
288
|
+
|
|
289
|
+
return self._metrics
|
|
290
|
+
|
|
291
|
+
except Exception as e:
|
|
292
|
+
self._state = StreamState.ERROR
|
|
293
|
+
logger.error(f"Failed to end stream: {e}")
|
|
294
|
+
if self.on_error:
|
|
295
|
+
self.on_error(e)
|
|
296
|
+
return self._metrics
|
|
297
|
+
|
|
298
|
+
def pause_stream(self) -> bool:
|
|
299
|
+
"""Pause the streaming."""
|
|
300
|
+
if self._state == StreamState.STREAMING:
|
|
301
|
+
self._state = StreamState.PAUSED
|
|
302
|
+
return True
|
|
303
|
+
return False
|
|
304
|
+
|
|
305
|
+
def resume_stream(self) -> bool:
|
|
306
|
+
"""Resume a paused stream."""
|
|
307
|
+
if self._state == StreamState.PAUSED:
|
|
308
|
+
self._state = StreamState.STREAMING
|
|
309
|
+
return True
|
|
310
|
+
return False
|
|
311
|
+
|
|
312
|
+
def cancel_stream(self) -> None:
|
|
313
|
+
"""Cancel the current stream."""
|
|
314
|
+
self._stop_event.set()
|
|
315
|
+
self._state = StreamState.IDLE
|
|
316
|
+
self._metrics.end_time = time.time()
|
|
317
|
+
|
|
318
|
+
# Clear queue
|
|
319
|
+
while not self._chunk_queue.empty():
|
|
320
|
+
try:
|
|
321
|
+
self._chunk_queue.get_nowait()
|
|
322
|
+
except Empty:
|
|
323
|
+
break
|
|
324
|
+
|
|
325
|
+
def _process_queue(self) -> None:
|
|
326
|
+
"""Background thread for processing chunk queue."""
|
|
327
|
+
while not self._stop_event.is_set():
|
|
328
|
+
try:
|
|
329
|
+
# Collect batch of chunks
|
|
330
|
+
batch = []
|
|
331
|
+
for _ in range(self.config.batch_size):
|
|
332
|
+
try:
|
|
333
|
+
chunk = self._chunk_queue.get(timeout=0.05)
|
|
334
|
+
batch.append(chunk)
|
|
335
|
+
except Empty:
|
|
336
|
+
break
|
|
337
|
+
|
|
338
|
+
if batch:
|
|
339
|
+
# Schedule UI update on main thread
|
|
340
|
+
combined = "".join(batch)
|
|
341
|
+
self.text_widget.after(0, self._insert_chunk, combined)
|
|
342
|
+
|
|
343
|
+
except Exception as e:
|
|
344
|
+
logger.error(f"Error processing queue: {e}")
|
|
345
|
+
break
|
|
346
|
+
|
|
347
|
+
def _insert_chunk(self, chunk: str) -> None:
|
|
348
|
+
"""Insert a chunk into the text widget."""
|
|
349
|
+
try:
|
|
350
|
+
self.text_widget.config(state=tk.NORMAL)
|
|
351
|
+
|
|
352
|
+
# Get current end position for highlighting
|
|
353
|
+
start_index = self.text_widget.index(tk.END + "-1c")
|
|
354
|
+
|
|
355
|
+
# Insert text
|
|
356
|
+
self.text_widget.insert(self._insert_position, chunk)
|
|
357
|
+
self._metrics.ui_updates += 1
|
|
358
|
+
|
|
359
|
+
# Apply highlight if configured
|
|
360
|
+
if self.config.highlight_new_text:
|
|
361
|
+
end_index = self.text_widget.index(tk.END + "-1c")
|
|
362
|
+
self.text_widget.tag_add(
|
|
363
|
+
self._stream_tag,
|
|
364
|
+
start_index,
|
|
365
|
+
end_index
|
|
366
|
+
)
|
|
367
|
+
# Schedule highlight removal
|
|
368
|
+
self.text_widget.after(
|
|
369
|
+
self.config.highlight_duration_ms,
|
|
370
|
+
lambda: self._remove_highlight(start_index, end_index)
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
# Auto-scroll if configured
|
|
374
|
+
if self.config.auto_scroll:
|
|
375
|
+
self.text_widget.see(tk.END)
|
|
376
|
+
|
|
377
|
+
except tk.TclError as e:
|
|
378
|
+
logger.error(f"Tkinter error inserting chunk: {e}")
|
|
379
|
+
|
|
380
|
+
def _remove_highlight(self, start: str, end: str) -> None:
|
|
381
|
+
"""Remove highlight from a text range."""
|
|
382
|
+
try:
|
|
383
|
+
self.text_widget.tag_remove(self._stream_tag, start, end)
|
|
384
|
+
except tk.TclError:
|
|
385
|
+
pass # Widget may have been destroyed
|
|
386
|
+
|
|
387
|
+
def _flush_buffer(self) -> None:
|
|
388
|
+
"""Flush any remaining buffered chunks."""
|
|
389
|
+
while not self._chunk_queue.empty():
|
|
390
|
+
try:
|
|
391
|
+
chunk = self._chunk_queue.get_nowait()
|
|
392
|
+
self._insert_chunk(chunk)
|
|
393
|
+
except Empty:
|
|
394
|
+
break
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
class IncrementalTextUpdater:
|
|
398
|
+
"""
|
|
399
|
+
Efficient text updater using diff algorithm.
|
|
400
|
+
|
|
401
|
+
Instead of replacing all text, computes the minimal set of changes
|
|
402
|
+
needed to transform the current text to the new text.
|
|
403
|
+
|
|
404
|
+
Usage:
|
|
405
|
+
updater = IncrementalTextUpdater(text_widget)
|
|
406
|
+
updater.update_text(new_text)
|
|
407
|
+
"""
|
|
408
|
+
|
|
409
|
+
def __init__(
|
|
410
|
+
self,
|
|
411
|
+
text_widget: tk.Text,
|
|
412
|
+
min_diff_ratio: float = 0.3,
|
|
413
|
+
on_update: Optional[Callable[[int, int], None]] = None
|
|
414
|
+
):
|
|
415
|
+
"""
|
|
416
|
+
Initialize the incremental text updater.
|
|
417
|
+
|
|
418
|
+
Args:
|
|
419
|
+
text_widget: The Tkinter Text widget to update
|
|
420
|
+
min_diff_ratio: Minimum similarity ratio to use diff (0-1)
|
|
421
|
+
Below this, full replacement is used
|
|
422
|
+
on_update: Callback after update (insertions, deletions)
|
|
423
|
+
"""
|
|
424
|
+
self.text_widget = text_widget
|
|
425
|
+
self.min_diff_ratio = min_diff_ratio
|
|
426
|
+
self.on_update = on_update
|
|
427
|
+
self._lock = threading.Lock()
|
|
428
|
+
|
|
429
|
+
def update_text(
|
|
430
|
+
self,
|
|
431
|
+
new_text: str,
|
|
432
|
+
preserve_cursor: bool = True,
|
|
433
|
+
preserve_scroll: bool = True
|
|
434
|
+
) -> Tuple[int, int]:
|
|
435
|
+
"""
|
|
436
|
+
Update text widget content efficiently using diff.
|
|
437
|
+
|
|
438
|
+
Args:
|
|
439
|
+
new_text: The new text content
|
|
440
|
+
preserve_cursor: Try to preserve cursor position
|
|
441
|
+
preserve_scroll: Try to preserve scroll position
|
|
442
|
+
|
|
443
|
+
Returns:
|
|
444
|
+
Tuple of (insertions, deletions) count
|
|
445
|
+
"""
|
|
446
|
+
with self._lock:
|
|
447
|
+
try:
|
|
448
|
+
self.text_widget.config(state=tk.NORMAL)
|
|
449
|
+
|
|
450
|
+
# Get current state
|
|
451
|
+
current_text = self.text_widget.get("1.0", tk.END + "-1c")
|
|
452
|
+
cursor_pos = self.text_widget.index(tk.INSERT) if preserve_cursor else None
|
|
453
|
+
scroll_pos = self.text_widget.yview() if preserve_scroll else None
|
|
454
|
+
|
|
455
|
+
# Check if diff is worthwhile
|
|
456
|
+
if not current_text:
|
|
457
|
+
# Empty widget, just insert
|
|
458
|
+
self.text_widget.insert("1.0", new_text)
|
|
459
|
+
return (1, 0)
|
|
460
|
+
|
|
461
|
+
# Calculate similarity
|
|
462
|
+
matcher = difflib.SequenceMatcher(None, current_text, new_text)
|
|
463
|
+
ratio = matcher.ratio()
|
|
464
|
+
|
|
465
|
+
if ratio < self.min_diff_ratio:
|
|
466
|
+
# Too different, do full replacement
|
|
467
|
+
self.text_widget.delete("1.0", tk.END)
|
|
468
|
+
self.text_widget.insert("1.0", new_text)
|
|
469
|
+
insertions, deletions = 1, 1
|
|
470
|
+
else:
|
|
471
|
+
# Apply incremental changes
|
|
472
|
+
insertions, deletions = self._apply_diff(
|
|
473
|
+
current_text, new_text, matcher
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
# Restore cursor position
|
|
477
|
+
if cursor_pos and preserve_cursor:
|
|
478
|
+
try:
|
|
479
|
+
self.text_widget.mark_set(tk.INSERT, cursor_pos)
|
|
480
|
+
except tk.TclError:
|
|
481
|
+
pass
|
|
482
|
+
|
|
483
|
+
# Restore scroll position
|
|
484
|
+
if scroll_pos and preserve_scroll:
|
|
485
|
+
try:
|
|
486
|
+
self.text_widget.yview_moveto(scroll_pos[0])
|
|
487
|
+
except tk.TclError:
|
|
488
|
+
pass
|
|
489
|
+
|
|
490
|
+
# Callback
|
|
491
|
+
if self.on_update:
|
|
492
|
+
self.on_update(insertions, deletions)
|
|
493
|
+
|
|
494
|
+
return (insertions, deletions)
|
|
495
|
+
|
|
496
|
+
except Exception as e:
|
|
497
|
+
logger.error(f"Error updating text: {e}")
|
|
498
|
+
# Fallback to full replacement
|
|
499
|
+
self.text_widget.delete("1.0", tk.END)
|
|
500
|
+
self.text_widget.insert("1.0", new_text)
|
|
501
|
+
return (1, 1)
|
|
502
|
+
|
|
503
|
+
def _apply_diff(
|
|
504
|
+
self,
|
|
505
|
+
old_text: str,
|
|
506
|
+
new_text: str,
|
|
507
|
+
matcher: difflib.SequenceMatcher
|
|
508
|
+
) -> Tuple[int, int]:
|
|
509
|
+
"""
|
|
510
|
+
Apply diff operations to transform old text to new text.
|
|
511
|
+
|
|
512
|
+
Returns:
|
|
513
|
+
Tuple of (insertions, deletions) count
|
|
514
|
+
"""
|
|
515
|
+
insertions = 0
|
|
516
|
+
deletions = 0
|
|
517
|
+
|
|
518
|
+
# Get opcodes and process in reverse order to maintain indices
|
|
519
|
+
opcodes = list(matcher.get_opcodes())
|
|
520
|
+
|
|
521
|
+
for tag, i1, i2, j1, j2 in reversed(opcodes):
|
|
522
|
+
if tag == 'equal':
|
|
523
|
+
continue
|
|
524
|
+
|
|
525
|
+
# Convert character indices to Tkinter indices
|
|
526
|
+
start_idx = self._char_to_index(i1)
|
|
527
|
+
end_idx = self._char_to_index(i2)
|
|
528
|
+
|
|
529
|
+
if tag == 'replace':
|
|
530
|
+
# Delete old text and insert new
|
|
531
|
+
self.text_widget.delete(start_idx, end_idx)
|
|
532
|
+
self.text_widget.insert(start_idx, new_text[j1:j2])
|
|
533
|
+
insertions += 1
|
|
534
|
+
deletions += 1
|
|
535
|
+
|
|
536
|
+
elif tag == 'delete':
|
|
537
|
+
# Delete text
|
|
538
|
+
self.text_widget.delete(start_idx, end_idx)
|
|
539
|
+
deletions += 1
|
|
540
|
+
|
|
541
|
+
elif tag == 'insert':
|
|
542
|
+
# Insert new text
|
|
543
|
+
self.text_widget.insert(start_idx, new_text[j1:j2])
|
|
544
|
+
insertions += 1
|
|
545
|
+
|
|
546
|
+
return (insertions, deletions)
|
|
547
|
+
|
|
548
|
+
def _char_to_index(self, char_pos: int) -> str:
|
|
549
|
+
"""Convert character position to Tkinter text index."""
|
|
550
|
+
# Get text up to position to count lines
|
|
551
|
+
text = self.text_widget.get("1.0", tk.END + "-1c")
|
|
552
|
+
|
|
553
|
+
if char_pos >= len(text):
|
|
554
|
+
return tk.END
|
|
555
|
+
|
|
556
|
+
# Count newlines to get line number
|
|
557
|
+
line = text[:char_pos].count('\n') + 1
|
|
558
|
+
|
|
559
|
+
# Get column (position within line)
|
|
560
|
+
last_newline = text.rfind('\n', 0, char_pos)
|
|
561
|
+
if last_newline == -1:
|
|
562
|
+
col = char_pos
|
|
563
|
+
else:
|
|
564
|
+
col = char_pos - last_newline - 1
|
|
565
|
+
|
|
566
|
+
return f"{line}.{col}"
|
|
567
|
+
|
|
568
|
+
def get_diff_preview(
|
|
569
|
+
self,
|
|
570
|
+
new_text: str,
|
|
571
|
+
context_lines: int = 3
|
|
572
|
+
) -> str:
|
|
573
|
+
"""
|
|
574
|
+
Get a preview of changes without applying them.
|
|
575
|
+
|
|
576
|
+
Args:
|
|
577
|
+
new_text: The new text to compare
|
|
578
|
+
context_lines: Number of context lines in diff
|
|
579
|
+
|
|
580
|
+
Returns:
|
|
581
|
+
Unified diff string
|
|
582
|
+
"""
|
|
583
|
+
current_text = self.text_widget.get("1.0", tk.END + "-1c")
|
|
584
|
+
|
|
585
|
+
diff = difflib.unified_diff(
|
|
586
|
+
current_text.splitlines(keepends=True),
|
|
587
|
+
new_text.splitlines(keepends=True),
|
|
588
|
+
fromfile='current',
|
|
589
|
+
tofile='new',
|
|
590
|
+
n=context_lines
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
return ''.join(diff)
|
|
594
|
+
|
|
595
|
+
|
|
596
|
+
class StreamingTextManager:
|
|
597
|
+
"""
|
|
598
|
+
High-level manager for streaming text operations.
|
|
599
|
+
|
|
600
|
+
Combines StreamingTextHandler and IncrementalTextUpdater
|
|
601
|
+
for comprehensive text handling.
|
|
602
|
+
"""
|
|
603
|
+
|
|
604
|
+
def __init__(
|
|
605
|
+
self,
|
|
606
|
+
text_widget: tk.Text,
|
|
607
|
+
stream_config: Optional[StreamConfig] = None
|
|
608
|
+
):
|
|
609
|
+
"""
|
|
610
|
+
Initialize the streaming text manager.
|
|
611
|
+
|
|
612
|
+
Args:
|
|
613
|
+
text_widget: The Tkinter Text widget to manage
|
|
614
|
+
stream_config: Configuration for streaming
|
|
615
|
+
"""
|
|
616
|
+
self.text_widget = text_widget
|
|
617
|
+
self.stream_handler = StreamingTextHandler(
|
|
618
|
+
text_widget,
|
|
619
|
+
config=stream_config
|
|
620
|
+
)
|
|
621
|
+
self.incremental_updater = IncrementalTextUpdater(text_widget)
|
|
622
|
+
self._accumulated_text = ""
|
|
623
|
+
|
|
624
|
+
def start_streaming(
|
|
625
|
+
self,
|
|
626
|
+
clear_existing: bool = True,
|
|
627
|
+
on_progress: Optional[Callable[[int, int], None]] = None,
|
|
628
|
+
on_complete: Optional[Callable[[StreamMetrics], None]] = None
|
|
629
|
+
) -> bool:
|
|
630
|
+
"""
|
|
631
|
+
Start a streaming session.
|
|
632
|
+
|
|
633
|
+
Args:
|
|
634
|
+
clear_existing: Clear existing text
|
|
635
|
+
on_progress: Progress callback
|
|
636
|
+
on_complete: Completion callback
|
|
637
|
+
|
|
638
|
+
Returns:
|
|
639
|
+
True if started successfully
|
|
640
|
+
"""
|
|
641
|
+
self._accumulated_text = ""
|
|
642
|
+
self.stream_handler.on_progress = on_progress
|
|
643
|
+
self.stream_handler.on_complete = on_complete
|
|
644
|
+
return self.stream_handler.start_stream(clear_existing)
|
|
645
|
+
|
|
646
|
+
def add_stream_chunk(self, chunk: str) -> bool:
|
|
647
|
+
"""Add a chunk to the stream."""
|
|
648
|
+
self._accumulated_text += chunk
|
|
649
|
+
return self.stream_handler.add_chunk(chunk)
|
|
650
|
+
|
|
651
|
+
def end_streaming(self) -> StreamMetrics:
|
|
652
|
+
"""End the streaming session."""
|
|
653
|
+
return self.stream_handler.end_stream()
|
|
654
|
+
|
|
655
|
+
def get_accumulated_text(self) -> str:
|
|
656
|
+
"""Get all text accumulated during streaming."""
|
|
657
|
+
return self._accumulated_text
|
|
658
|
+
|
|
659
|
+
def update_text_incrementally(
|
|
660
|
+
self,
|
|
661
|
+
new_text: str,
|
|
662
|
+
preserve_state: bool = True
|
|
663
|
+
) -> Tuple[int, int]:
|
|
664
|
+
"""
|
|
665
|
+
Update text using incremental diff.
|
|
666
|
+
|
|
667
|
+
Args:
|
|
668
|
+
new_text: New text content
|
|
669
|
+
preserve_state: Preserve cursor and scroll
|
|
670
|
+
|
|
671
|
+
Returns:
|
|
672
|
+
Tuple of (insertions, deletions)
|
|
673
|
+
"""
|
|
674
|
+
return self.incremental_updater.update_text(
|
|
675
|
+
new_text,
|
|
676
|
+
preserve_cursor=preserve_state,
|
|
677
|
+
preserve_scroll=preserve_state
|
|
678
|
+
)
|
|
679
|
+
|
|
680
|
+
@property
|
|
681
|
+
def is_streaming(self) -> bool:
|
|
682
|
+
"""Check if currently streaming."""
|
|
683
|
+
return self.stream_handler.is_streaming
|
|
684
|
+
|
|
685
|
+
def cancel(self) -> None:
|
|
686
|
+
"""Cancel any ongoing operation."""
|
|
687
|
+
self.stream_handler.cancel_stream()
|
|
688
|
+
|
|
689
|
+
|
|
690
|
+
# Convenience function for simple streaming
|
|
691
|
+
def stream_text_to_widget(
|
|
692
|
+
text_widget: tk.Text,
|
|
693
|
+
text_generator,
|
|
694
|
+
clear_existing: bool = True,
|
|
695
|
+
on_complete: Optional[Callable[[StreamMetrics], None]] = None
|
|
696
|
+
) -> StreamMetrics:
|
|
697
|
+
"""
|
|
698
|
+
Stream text from a generator to a widget.
|
|
699
|
+
|
|
700
|
+
Args:
|
|
701
|
+
text_widget: Target text widget
|
|
702
|
+
text_generator: Generator yielding text chunks
|
|
703
|
+
clear_existing: Clear existing text first
|
|
704
|
+
on_complete: Callback when complete
|
|
705
|
+
|
|
706
|
+
Returns:
|
|
707
|
+
Streaming metrics
|
|
708
|
+
"""
|
|
709
|
+
handler = StreamingTextHandler(
|
|
710
|
+
text_widget,
|
|
711
|
+
on_complete=on_complete
|
|
712
|
+
)
|
|
713
|
+
|
|
714
|
+
handler.start_stream(clear_existing)
|
|
715
|
+
|
|
716
|
+
try:
|
|
717
|
+
for chunk in text_generator:
|
|
718
|
+
if not handler.add_chunk(chunk):
|
|
719
|
+
break
|
|
720
|
+
except Exception as e:
|
|
721
|
+
logger.error(f"Error during streaming: {e}")
|
|
722
|
+
|
|
723
|
+
return handler.end_stream()
|