chat-console 0.4.9__py3-none-any.whl → 0.4.91__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- app/__init__.py +1 -1
- app/console_chat.py +168 -30
- {chat_console-0.4.9.dist-info → chat_console-0.4.91.dist-info}/METADATA +1 -1
- {chat_console-0.4.9.dist-info → chat_console-0.4.91.dist-info}/RECORD +8 -8
- {chat_console-0.4.9.dist-info → chat_console-0.4.91.dist-info}/WHEEL +0 -0
- {chat_console-0.4.9.dist-info → chat_console-0.4.91.dist-info}/entry_points.txt +0 -0
- {chat_console-0.4.9.dist-info → chat_console-0.4.91.dist-info}/licenses/LICENSE +0 -0
- {chat_console-0.4.9.dist-info → chat_console-0.4.91.dist-info}/top_level.txt +0 -0
app/__init__.py
CHANGED
app/console_chat.py
CHANGED
@@ -425,33 +425,64 @@ class ConsoleUI:
|
|
425
425
|
return lines
|
426
426
|
|
427
427
|
def draw_input_area(self, current_input: str = "", prompt: str = "Type your message") -> List[str]:
|
428
|
-
"""Draw the input area with
|
428
|
+
"""Draw the input area with enhanced multi-line support and indicators"""
|
429
429
|
chars = self.get_border_chars()
|
430
430
|
lines = []
|
431
431
|
|
432
|
-
# Input prompt with mode indicator
|
432
|
+
# Input prompt with mode indicator and multi-line status
|
433
433
|
mode_indicator = "📝" if self.input_mode == "text" else "⚡"
|
434
434
|
mode_text = "TEXT" if self.input_mode == "text" else "MENU"
|
435
|
-
|
436
|
-
|
435
|
+
|
436
|
+
# Multi-line indicator
|
437
|
+
if self.multi_line_input:
|
438
|
+
ml_indicator = f"{self.theme['accent']}[MULTI-LINE: {len(self.multi_line_input)} lines]{self.theme['reset']}"
|
439
|
+
prompt_with_mode = f"{mode_indicator} {ml_indicator} (Ctrl+D to send, Esc to cancel)"
|
440
|
+
else:
|
441
|
+
prompt_with_mode = f"{mode_indicator} {prompt} ({mode_text} mode - Tab to switch, Ctrl+J for multi-line)"
|
442
|
+
|
443
|
+
prompt_line = chars['vertical'] + f" {prompt_with_mode}".ljust(self.width - 2) + chars['vertical']
|
437
444
|
lines.append(prompt_line)
|
438
445
|
|
439
|
-
# Input field
|
446
|
+
# Input field(s)
|
440
447
|
if self.input_mode == "text":
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
448
|
+
if self.multi_line_input:
|
449
|
+
# Show multi-line input with line numbers
|
450
|
+
for i, line_content in enumerate(self.multi_line_input[-3:]): # Show last 3 lines
|
451
|
+
line_num = len(self.multi_line_input) - 3 + i + 1 if len(self.multi_line_input) > 3 else i + 1
|
452
|
+
if len(line_content) > self.width - 12:
|
453
|
+
display_content = line_content[:self.width - 15] + "..."
|
454
|
+
else:
|
455
|
+
display_content = line_content
|
456
|
+
|
457
|
+
if i == len(self.multi_line_input[-3:]) - 1: # Current line
|
458
|
+
input_line = chars['vertical'] + f" {self.theme['primary']}{line_num:2d}>{self.theme['reset']} {display_content}".ljust(self.width - 2) + chars['vertical']
|
459
|
+
else:
|
460
|
+
input_line = chars['vertical'] + f" {self.theme['muted']}{line_num:2d}:{self.theme['reset']} {display_content}".ljust(self.width - 2) + chars['vertical']
|
461
|
+
lines.append(input_line)
|
462
|
+
|
463
|
+
# Show line count if more than 3 lines
|
464
|
+
if len(self.multi_line_input) > 3:
|
465
|
+
more_line = chars['vertical'] + f" {self.theme['muted']}... ({len(self.multi_line_input)} total lines){self.theme['reset']}".ljust(self.width - 2) + chars['vertical']
|
466
|
+
lines.append(more_line)
|
467
|
+
else:
|
468
|
+
# Single line input
|
469
|
+
input_content = current_input
|
470
|
+
if len(input_content) > self.width - 6:
|
471
|
+
input_content = input_content[-(self.width - 9):] + "..."
|
472
|
+
input_line = chars['vertical'] + f" {self.theme['primary']}>{self.theme['reset']} {input_content}".ljust(self.width - 2) + chars['vertical']
|
473
|
+
lines.append(input_line)
|
445
474
|
else:
|
446
475
|
# Menu mode - show available hotkeys
|
447
|
-
menu_help = "n)ew h)istory s)ettings m)odels q)uit"
|
476
|
+
menu_help = f"{self.theme['secondary']}n{self.theme['reset']})ew {self.theme['secondary']}h{self.theme['reset']})istory {self.theme['secondary']}s{self.theme['reset']})ettings {self.theme['secondary']}m{self.theme['reset']})odels {self.theme['secondary']}q{self.theme['reset']})uit"
|
448
477
|
input_line = chars['vertical'] + f" {menu_help}".ljust(self.width - 2) + chars['vertical']
|
449
|
-
|
450
|
-
lines.append(input_line)
|
478
|
+
lines.append(input_line)
|
451
479
|
|
452
480
|
# Show generating indicator if needed
|
453
481
|
if self.generating:
|
454
|
-
|
482
|
+
elapsed = int(time.time() - self.start_time) if hasattr(self, 'start_time') else 0
|
483
|
+
user_message = getattr(self, '_current_user_message', "")
|
484
|
+
phrase = self._get_dynamic_loading_phrase(user_message)
|
485
|
+
status_line = chars['vertical'] + f" {self.theme['accent']}● {phrase}... ({elapsed}s){self.theme['reset']}".ljust(self.width - 2) + chars['vertical']
|
455
486
|
lines.append(status_line)
|
456
487
|
|
457
488
|
return lines
|
@@ -517,7 +548,9 @@ class ConsoleUI:
|
|
517
548
|
else:
|
518
549
|
display_prompt = prompt
|
519
550
|
|
520
|
-
|
551
|
+
# Only redraw screen if not currently generating to avoid interference
|
552
|
+
if not self.generating:
|
553
|
+
self.draw_screen(current_input, display_prompt)
|
521
554
|
|
522
555
|
# Get character input with escape sequence handling
|
523
556
|
char = self._get_char_with_escape_sequences()
|
@@ -603,10 +636,15 @@ class ConsoleUI:
|
|
603
636
|
current_input = "\n".join(self.multi_line_input)
|
604
637
|
else:
|
605
638
|
current_input = current_input[:-1]
|
606
|
-
elif char == '\x0a': #
|
639
|
+
elif char == '\x0a': # Ctrl+J - start/continue multi-line
|
607
640
|
if not self.multi_line_input:
|
641
|
+
# Start multi-line mode
|
608
642
|
self.multi_line_input = [current_input, ""]
|
609
643
|
current_input = "\n".join(self.multi_line_input)
|
644
|
+
else:
|
645
|
+
# Add new line in multi-line mode
|
646
|
+
self.multi_line_input.append("")
|
647
|
+
current_input = "\n".join(self.multi_line_input)
|
610
648
|
elif ord(char) >= 32:
|
611
649
|
# Printable character
|
612
650
|
if self.multi_line_input:
|
@@ -655,30 +693,125 @@ class ConsoleUI:
|
|
655
693
|
finally:
|
656
694
|
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
657
695
|
|
658
|
-
def
|
659
|
-
"""
|
696
|
+
def _get_context_aware_loading_phrases(self, user_message: str) -> List[str]:
|
697
|
+
"""Generate context-aware loading phrases based on user input"""
|
698
|
+
message_lower = user_message.lower()
|
699
|
+
|
700
|
+
# Code-related keywords
|
701
|
+
if any(keyword in message_lower for keyword in [
|
702
|
+
'code', 'function', 'debug', 'error', 'bug', 'script', 'program',
|
703
|
+
'algorithm', 'python', 'javascript', 'java', 'c++', 'html', 'css',
|
704
|
+
'sql', 'git', 'api', 'database', 'framework', 'library'
|
705
|
+
]):
|
706
|
+
return [
|
707
|
+
"Analyzing your code", "Reviewing logic", "Debugging the issue",
|
708
|
+
"Examining patterns", "Processing syntax", "Evaluating approach",
|
709
|
+
"Formulating solution", "Optimizing structure"
|
710
|
+
]
|
711
|
+
|
712
|
+
# Writing/creative keywords
|
713
|
+
elif any(keyword in message_lower for keyword in [
|
714
|
+
'write', 'essay', 'story', 'article', 'blog', 'creative', 'poem',
|
715
|
+
'letter', 'email', 'content', 'draft', 'narrative', 'description'
|
716
|
+
]):
|
717
|
+
return [
|
718
|
+
"Crafting your text", "Shaping ideas", "Weaving words",
|
719
|
+
"Building narrative", "Polishing prose", "Structuring content",
|
720
|
+
"Refining language", "Creating flow"
|
721
|
+
]
|
722
|
+
|
723
|
+
# Analysis/research keywords
|
724
|
+
elif any(keyword in message_lower for keyword in [
|
725
|
+
'analyze', 'research', 'study', 'explain', 'compare', 'evaluate',
|
726
|
+
'assess', 'investigate', 'examine', 'understand', 'interpret'
|
727
|
+
]):
|
728
|
+
return [
|
729
|
+
"Analyzing information", "Processing data", "Examining details",
|
730
|
+
"Connecting insights", "Evaluating evidence", "Synthesizing findings",
|
731
|
+
"Drawing conclusions", "Structuring analysis"
|
732
|
+
]
|
733
|
+
|
734
|
+
# Math/calculation keywords
|
735
|
+
elif any(keyword in message_lower for keyword in [
|
736
|
+
'calculate', 'math', 'solve', 'equation', 'formula', 'statistics',
|
737
|
+
'probability', 'geometry', 'algebra', 'number', 'compute'
|
738
|
+
]):
|
739
|
+
return [
|
740
|
+
"Calculating result", "Processing numbers", "Solving equation",
|
741
|
+
"Working through math", "Computing values", "Analyzing formula",
|
742
|
+
"Checking calculations", "Verifying solution"
|
743
|
+
]
|
744
|
+
|
745
|
+
# Question/help keywords
|
746
|
+
elif any(keyword in message_lower for keyword in [
|
747
|
+
'how', 'what', 'why', 'when', 'where', 'help', 'assist', 'guide',
|
748
|
+
'explain', 'show', 'teach', 'learn', 'understand'
|
749
|
+
]):
|
750
|
+
return [
|
751
|
+
"Processing your question", "Gathering information", "Organizing thoughts",
|
752
|
+
"Preparing explanation", "Structuring response", "Connecting concepts",
|
753
|
+
"Clarifying details", "Formulating answer"
|
754
|
+
]
|
755
|
+
|
756
|
+
# Default generic phrases
|
757
|
+
else:
|
758
|
+
return self.loading_phrases
|
759
|
+
|
760
|
+
def _get_dynamic_loading_phrase(self, user_message: str = "") -> str:
|
761
|
+
"""Get current loading phrase with context-awareness and cycling"""
|
660
762
|
elapsed = time.time() - self.start_time
|
763
|
+
|
764
|
+
# Get context-aware phrases if user message provided
|
765
|
+
if user_message and hasattr(self, '_current_context_phrases'):
|
766
|
+
phrases = self._current_context_phrases
|
767
|
+
elif user_message:
|
768
|
+
phrases = self._get_context_aware_loading_phrases(user_message)
|
769
|
+
self._current_context_phrases = phrases # Cache for this generation
|
770
|
+
else:
|
771
|
+
phrases = self.loading_phrases
|
772
|
+
|
661
773
|
# Change phrase every 2 seconds
|
662
|
-
phrase_index = int(elapsed // 2) % len(
|
663
|
-
return
|
774
|
+
phrase_index = int(elapsed // 2) % len(phrases)
|
775
|
+
return phrases[phrase_index]
|
664
776
|
|
665
777
|
def _update_streaming_display(self, content: str):
|
666
|
-
"""Update display
|
778
|
+
"""Update display with real-time streaming content and context-aware status"""
|
667
779
|
if not self.generating:
|
668
780
|
return
|
669
781
|
|
670
782
|
# Show dynamic loading indicator with cycling phrases
|
671
783
|
elapsed = int(time.time() - self.start_time)
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
784
|
+
user_message = getattr(self, '_current_user_message', "")
|
785
|
+
phrase = self._get_dynamic_loading_phrase(user_message)
|
786
|
+
|
787
|
+
# Create a streaming preview of content (first/last parts)
|
788
|
+
preview = ""
|
789
|
+
if content:
|
790
|
+
if len(content) <= 100:
|
791
|
+
preview = content.replace('\n', ' ')[:50]
|
792
|
+
else:
|
793
|
+
# Show first 30 chars + ... + last 20 chars
|
794
|
+
start = content[:30].replace('\n', ' ')
|
795
|
+
end = content[-20:].replace('\n', ' ')
|
796
|
+
preview = f"{start}...{end}"
|
797
|
+
|
798
|
+
# Use cursor positioning to update multiple lines at bottom
|
799
|
+
print(f"\033[s", end="") # Save cursor position
|
800
|
+
|
801
|
+
# Update streaming content area (second to last line)
|
802
|
+
if content:
|
803
|
+
print(f"\033[{self.height-1};1H", end="") # Move to second-to-last row
|
804
|
+
print(f"\033[K", end="") # Clear line
|
805
|
+
content_line = f"{self.theme['text']}► {preview}{self.theme['reset']}"
|
806
|
+
print(content_line[:self.width-2], end="", flush=True)
|
807
|
+
|
808
|
+
# Update status line (bottom)
|
809
|
+
print(f"\033[{self.height};1H", end="") # Move to bottom row
|
810
|
+
print(f"\033[K", end="") # Clear line
|
811
|
+
status_line = f"{self.theme['accent']}● {phrase}... {self.theme['muted']}({elapsed}s) - {len(content)} chars{self.theme['reset']}"
|
812
|
+
print(status_line, end="", flush=True)
|
813
|
+
|
814
|
+
print(f"\033[u", end="", flush=True) # Restore cursor position
|
682
815
|
|
683
816
|
async def create_new_conversation(self):
|
684
817
|
"""Create a new conversation"""
|
@@ -722,6 +855,11 @@ class ConsoleUI:
|
|
722
855
|
"""Generate AI response with enhanced streaming display"""
|
723
856
|
self.generating = True
|
724
857
|
self.start_time = time.time() # Reset timer for this generation
|
858
|
+
self._current_user_message = user_message # Store for context-aware loading
|
859
|
+
|
860
|
+
# Clear any cached context phrases for new generation
|
861
|
+
if hasattr(self, '_current_context_phrases'):
|
862
|
+
delattr(self, '_current_context_phrases')
|
725
863
|
|
726
864
|
try:
|
727
865
|
# Add user message
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: chat-console
|
3
|
-
Version: 0.4.
|
3
|
+
Version: 0.4.91
|
4
4
|
Summary: A command-line interface for chatting with LLMs, storing chats and (future) rag interactions
|
5
5
|
Home-page: https://github.com/wazacraftrfid/chat-console
|
6
6
|
Author: Johnathan Greenaway
|
@@ -1,6 +1,6 @@
|
|
1
|
-
app/__init__.py,sha256=
|
1
|
+
app/__init__.py,sha256=PDATaB-vh7djdO7muLiV3GwDj4J-pDDL82c4BYxENgQ,131
|
2
2
|
app/config.py,sha256=F-0hO3NT5kRJxZelGLxaeUmnwx8i0LPHzYtNftL6CwM,8468
|
3
|
-
app/console_chat.py,sha256=
|
3
|
+
app/console_chat.py,sha256=y43zFuX4WT08NNGsXcO6GhA8B0RRCvrfRIPY5-jcQVo,82326
|
4
4
|
app/console_main.py,sha256=QNUiD9IPw2Spl-gpvrw1AzDqezMpmU3IPHlcK1LDOtI,1799
|
5
5
|
app/console_utils.py,sha256=jdfZ3OZEAIJ4tlQNg1_8qrtUP1xdyMeWjdz9ahgzL1s,7032
|
6
6
|
app/database.py,sha256=nt8CVuDpy6zw8mOYqDcfUmNw611t7Ln7pz22M0b6-MI,9967
|
@@ -20,9 +20,9 @@ app/ui/model_browser.py,sha256=pdblLVkdyVF0_Bo02bqbErGAtieyH-y6IfhMOPEqIso,71124
|
|
20
20
|
app/ui/model_selector.py,sha256=3ykyDhzJU9KQg3XnOQbba5bhpqsSH1RwIYEGWVY37GQ,19407
|
21
21
|
app/ui/search.py,sha256=b-m14kG3ovqW1-i0qDQ8KnAqFJbi5b1FLM9dOnbTyIs,9763
|
22
22
|
app/ui/styles.py,sha256=MZ7J3D796IFYsDQs-RFxSLGHhcQqsaXym9wv3LYMI5k,9284
|
23
|
-
chat_console-0.4.
|
24
|
-
chat_console-0.4.
|
25
|
-
chat_console-0.4.
|
26
|
-
chat_console-0.4.
|
27
|
-
chat_console-0.4.
|
28
|
-
chat_console-0.4.
|
23
|
+
chat_console-0.4.91.dist-info/licenses/LICENSE,sha256=srHZ3fvcAuZY1LHxE7P6XWju2njRCHyK6h_ftEbzxSE,1057
|
24
|
+
chat_console-0.4.91.dist-info/METADATA,sha256=cLGZ7zT7V1cXivqPsUbILtC6aYfgBdrWzkVPRIfHREg,3810
|
25
|
+
chat_console-0.4.91.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
26
|
+
chat_console-0.4.91.dist-info/entry_points.txt,sha256=oy1j-LnmYOO0akBMP8Ijx0rZxxA7N3FSy82ZeCPNnqE,142
|
27
|
+
chat_console-0.4.91.dist-info/top_level.txt,sha256=io9g7LCbfmTG1SFKgEOGXmCFB9uMP2H5lerm0HiHWQE,4
|
28
|
+
chat_console-0.4.91.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|