ollama_chat 0.0.10 ā 0.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGES.md +21 -0
- data/README.md +2 -1
- data/VERSION +1 -1
- data/lib/ollama_chat/chat.rb +25 -6
- data/lib/ollama_chat/dialog.rb +24 -0
- data/lib/ollama_chat/follow_chat.rb +34 -10
- data/lib/ollama_chat/information.rb +3 -1
- data/lib/ollama_chat/ollama_chat_config/default_config.yml +1 -0
- data/lib/ollama_chat/version.rb +1 -1
- data/ollama_chat.gemspec +2 -2
- data/spec/ollama_chat/chat_spec.rb +1 -0
- data/spec/ollama_chat/follow_chat_spec.rb +5 -3
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ae96668b9a38eb238d0d9352e4c6866f3a99c8e89796ca696a6c09acce776c32
|
4
|
+
data.tar.gz: 44171082a4e6c971a4cd3cf4a95ab89cf76388e13fc9638e140261a2702437c7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 13ae7bdd0e3012e34341d989ace27240472c8e442f8dc9f49ec94b1e9729c71353f70089a80f066d34d65ad67dc3e52cffe0d2f873a1881a185dea88f63f413a
|
7
|
+
data.tar.gz: ae099dfe86b2888b5ed545b5c5f630eeccef4f4d52c000ffa1a5fbc2f36897705e31634e5c0810a962fecd27241645efdcedaa667d5eebcead72c26ca3448c05
|
data/CHANGES.md
CHANGED
@@ -1,5 +1,26 @@
|
|
1
1
|
# Changes
|
2
2
|
|
3
|
+
## 2025-06-01 v0.0.11
|
4
|
+
|
5
|
+
* **Think Mode Implementation**:
|
6
|
+
+ Introduced `@think_mode` attribute to read think mode setting from config
|
7
|
+
+ Implemented `remove_think_blocks` method to filter out thought blocks from chat messages sent to the LLM model.
|
8
|
+
+ Added conditional logic based on `@think_mode` value to handle different modes (`'display'`, `'omit'`, `'no_delete'`, `'only_delete'`)
|
9
|
+
|
10
|
+
* **'display'**: Displays thought blocks' tags as emojis.
|
11
|
+
* **'omit'**: Omit internal reasoning blocks and tags from the output entirely.
|
12
|
+
* **'no_delete'**: Sends the entire conversation, including all think tags, to the Large Language Model (LLM) for processing.
|
13
|
+
* **'only_delete'**: Removes the explicit indicators of thought processes only from the conversation sent to the LLM, but does not modify the output shown to the user.
|
14
|
+
* **User Interface Improvements**:
|
15
|
+
+ Added `/think_mode` command to help users understand think mode options
|
16
|
+
+ Updated session output to include current think mode
|
17
|
+
+ Added think mode chooser to OllamaChat::Dialog, allowing users to select their preferred think mode
|
18
|
+
* **Output Handling Enhancements**:
|
19
|
+
+ Improved markdown handling for think blocks in OllamaChat::FollowChat class
|
20
|
+
+ Modified output to print clear screen, move home, and user info before printing content
|
21
|
+
* **Configuration Updates**:
|
22
|
+
+ Added `think_mode` key with value `"display"` to `default_config.yml`
|
23
|
+
|
3
24
|
## 2025-05-28 v0.0.10
|
4
25
|
|
5
26
|
* Simplify and improve command handling logic.
|
data/README.md
CHANGED
@@ -121,7 +121,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
|
|
121
121
|
/markdown toggle markdown output
|
122
122
|
/stream toggle stream output
|
123
123
|
/location toggle location submission
|
124
|
-
/voice
|
124
|
+
/voice [change] toggle voice output or change the voice
|
125
125
|
/list [n] list the last n / all conversation exchanges
|
126
126
|
/clear [messages|links|history] clear the all messages, links, or the chat history (defaults to messages)
|
127
127
|
/clobber clear the conversation, links, and collection
|
@@ -133,6 +133,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
|
|
133
133
|
/info show information for current session
|
134
134
|
/config output current configuration ("/Users/flori/.config/ollama_chat/config.yml")
|
135
135
|
/document_policy pick a scan policy for document references
|
136
|
+
/think_mode pick a think mode for reasoning models (display, omit, only_delete, no_delete)
|
136
137
|
/import source import the source's content
|
137
138
|
/summarize [n] source summarize the source's content in n words
|
138
139
|
/embedding toggle embedding paused or not
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.11
|
data/lib/ollama_chat/chat.rb
CHANGED
@@ -46,6 +46,7 @@ class OllamaChat::Chat
|
|
46
46
|
)
|
47
47
|
server_version
|
48
48
|
@document_policy = config.document_policy
|
49
|
+
@think_mode = config.think_mode
|
49
50
|
@model = choose_model(@opts[?m], config.model.name)
|
50
51
|
@model_options = Ollama::Options[config.model.options]
|
51
52
|
model_system = pull_model_unless_present(@model, @model_options)
|
@@ -153,7 +154,7 @@ class OllamaChat::Chat
|
|
153
154
|
when %r(^/regenerate$)
|
154
155
|
if content = messages.second_last&.content
|
155
156
|
content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
|
156
|
-
messages.drop(
|
157
|
+
messages.drop(1)
|
157
158
|
else
|
158
159
|
STDOUT.puts "Not enough messages in this conversation."
|
159
160
|
return :redo
|
@@ -246,7 +247,6 @@ class OllamaChat::Chat
|
|
246
247
|
fetch_source(url) { |url_io| embed_source(url_io, url) }
|
247
248
|
end
|
248
249
|
urls_summarized = urls.map { summarize(_1) }
|
249
|
-
query = $2.inspect
|
250
250
|
results = urls.zip(urls_summarized).
|
251
251
|
map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
|
252
252
|
config.prompts.web % { query:, results: }
|
@@ -387,11 +387,17 @@ class OllamaChat::Chat
|
|
387
387
|
messages:,
|
388
388
|
voice: (@current_voice if voice.on?)
|
389
389
|
)
|
390
|
+
messages_to_send =
|
391
|
+
if @think_mode == 'no_delete'
|
392
|
+
messages
|
393
|
+
else
|
394
|
+
remove_think_blocks(messages)
|
395
|
+
end
|
390
396
|
ollama.chat(
|
391
|
-
model:
|
392
|
-
messages
|
393
|
-
options:
|
394
|
-
stream:
|
397
|
+
model: @model,
|
398
|
+
messages: messages_to_send,
|
399
|
+
options: @model_options,
|
400
|
+
stream: stream.on?,
|
395
401
|
&handler
|
396
402
|
)
|
397
403
|
if embedding.on? && !records.empty?
|
@@ -419,6 +425,19 @@ class OllamaChat::Chat
|
|
419
425
|
|
420
426
|
private
|
421
427
|
|
428
|
+
def remove_think_blocks(messages)
|
429
|
+
new_messages = OllamaChat::MessageList.new(self)
|
430
|
+
messages.to_ary.each do |message|
|
431
|
+
thought_less_content = message.content.gsub(%r(<think(?:ing)?>.*?</think(?:ing)?>)im, '')
|
432
|
+
new_messages << Ollama::Message.new(
|
433
|
+
role: message.role,
|
434
|
+
content: thought_less_content,
|
435
|
+
images: message.images
|
436
|
+
)
|
437
|
+
end
|
438
|
+
new_messages
|
439
|
+
end
|
440
|
+
|
422
441
|
def setup_documents
|
423
442
|
if embedding.on?
|
424
443
|
@embedding_model = config.embedding.model.name
|
data/lib/ollama_chat/dialog.rb
CHANGED
@@ -57,6 +57,30 @@ module OllamaChat::Dialog
|
|
57
57
|
info
|
58
58
|
end
|
59
59
|
|
60
|
+
attr_accessor :think_mode
|
61
|
+
|
62
|
+
def choose_think_mode
|
63
|
+
modes = %w[ display omit only_delete no_delete ].sort
|
64
|
+
current = if modes.index(@think_mode)
|
65
|
+
@think_mode
|
66
|
+
elsif modes.index(config.think_mode)
|
67
|
+
config.think_mode
|
68
|
+
else
|
69
|
+
modes.first
|
70
|
+
end
|
71
|
+
modes.unshift('[EXIT]')
|
72
|
+
think_mode = OllamaChat::Utils::Chooser.choose(modes)
|
73
|
+
case think_mode
|
74
|
+
when nil, '[EXIT]'
|
75
|
+
STDOUT.puts "Exiting chooser."
|
76
|
+
think_mode = current
|
77
|
+
end
|
78
|
+
self.think_mode = think_mode
|
79
|
+
ensure
|
80
|
+
STDOUT.puts "Using think mode #{bold{@think_mode}}."
|
81
|
+
info
|
82
|
+
end
|
83
|
+
|
60
84
|
def change_system_prompt(default, system: nil)
|
61
85
|
selector = Regexp.new(system.to_s[1..-1].to_s)
|
62
86
|
prompts = config.system_prompts.attribute_names.compact.grep(selector)
|
@@ -20,17 +20,23 @@ class OllamaChat::FollowChat
|
|
20
20
|
@messages << Message.new(role: 'assistant', content: '')
|
21
21
|
@user = message_type(@messages.last.images) + " " +
|
22
22
|
bold { color(111) { 'assistant:' } }
|
23
|
-
@output.puts @user unless @chat.markdown.on?
|
24
23
|
end
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
24
|
+
@messages.last.content << response.message&.content
|
25
|
+
if content = @messages.last.content.full?
|
26
|
+
case @chat.think_mode
|
27
|
+
when 'display'
|
28
|
+
content = emphasize_think_block(content)
|
29
|
+
when 'omit'
|
30
|
+
content = omit_think_block(content)
|
31
|
+
when 'no_delete', 'only_delete'
|
32
|
+
content = quote_think_tags(content)
|
33
|
+
end
|
34
|
+
if @chat.markdown.on?
|
35
|
+
markdown_content = Kramdown::ANSI.parse(content)
|
36
|
+
@output.print clear_screen, move_home, @user, ?\n, markdown_content
|
37
|
+
else
|
38
|
+
@output.print clear_screen, move_home, @user, ?\n, content
|
39
|
+
end
|
34
40
|
end
|
35
41
|
@say.call(response)
|
36
42
|
end
|
@@ -57,4 +63,22 @@ class OllamaChat::FollowChat
|
|
57
63
|
Kramdown::ANSI::Width.wrap(stats_text, percentage: 90).gsub(/(?<!\A)^/, ' ')
|
58
64
|
}
|
59
65
|
end
|
66
|
+
|
67
|
+
private
|
68
|
+
|
69
|
+
def emphasize_think_block(content)
|
70
|
+
content.gsub(%r(<think(?:ing)?>)i, "\nš\n").gsub(%r(</think(?:ing)?>)i, "\nš¬\n")
|
71
|
+
end
|
72
|
+
|
73
|
+
def omit_think_block(content)
|
74
|
+
content.gsub(%r(<think(?:ing)?>.*?(</think(?:ing)?>|\z))im, '')
|
75
|
+
end
|
76
|
+
|
77
|
+
def quote_think_tags(content)
|
78
|
+
if @chat.markdown.on?
|
79
|
+
content.gsub(%r(<(think(?:ing)?)>)i, "\n\\<\\1\\>\n").gsub(%r(</(think(?:ing)?)>)i, "\n\\</\\1\\>\n")
|
80
|
+
else
|
81
|
+
content.gsub(%r(<(think(?:ing)?)>)i, "\n<\\1\>\n").gsub(%r(</(think(?:ing)?)>)i, "\n</\\1>\n")
|
82
|
+
end
|
83
|
+
end
|
60
84
|
end
|
@@ -48,6 +48,7 @@ module OllamaChat::Information
|
|
48
48
|
stream.show
|
49
49
|
location.show
|
50
50
|
STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
|
51
|
+
STDOUT.puts "Think mode is currently: #{bold{@think_mode}}"
|
51
52
|
STDOUT.puts "Currently selected search engine is #{bold(search_engine)}."
|
52
53
|
if @voice.on?
|
53
54
|
STDOUT.puts "Using voice #{bold{@current_voice}} to speak."
|
@@ -75,12 +76,13 @@ module OllamaChat::Information
|
|
75
76
|
/info show information for current session
|
76
77
|
/config output current configuration (#{@ollama_chat_config.filename.to_s.inspect})
|
77
78
|
/document_policy pick a scan policy for document references
|
79
|
+
/think_mode pick a think mode for reasoning models
|
78
80
|
/import source import the source's content
|
79
81
|
/summarize [n] source summarize the source's content in n words
|
80
82
|
/embedding toggle embedding paused or not
|
81
83
|
/embed source embed the source's content
|
82
84
|
/web [n] query query web search & return n or 1 results
|
83
|
-
/links
|
85
|
+
/links [clear] display (or clear) links used in the chat
|
84
86
|
/save filename store conversation messages
|
85
87
|
/load filename load conversation messages
|
86
88
|
/quit to quit
|
data/lib/ollama_chat/version.rb
CHANGED
data/ollama_chat.gemspec
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
# stub: ollama_chat 0.0.
|
2
|
+
# stub: ollama_chat 0.0.11 ruby lib
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = "ollama_chat".freeze
|
6
|
-
s.version = "0.0.
|
6
|
+
s.version = "0.0.11".freeze
|
7
7
|
|
8
8
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
9
9
|
s.require_paths = ["lib".freeze]
|
@@ -8,7 +8,7 @@ RSpec.describe OllamaChat::FollowChat do
|
|
8
8
|
end
|
9
9
|
|
10
10
|
let :chat do
|
11
|
-
double('Chat', markdown: double(on?: false))
|
11
|
+
double('Chat', markdown: double(on?: false), think_mode: 'display')
|
12
12
|
end
|
13
13
|
|
14
14
|
let :follow_chat do
|
@@ -31,8 +31,10 @@ RSpec.describe OllamaChat::FollowChat do
|
|
31
31
|
it 'can follow without markdown' do
|
32
32
|
message = Ollama::Message.new(role: 'assistant', content: 'world')
|
33
33
|
response = double(message:, done: false)
|
34
|
-
expect(output).to receive(:
|
35
|
-
|
34
|
+
expect(output).to receive(:print).with(
|
35
|
+
"\e[2J", "\e[1;1H", "šØ \e[1m\e[38;5;111massistant:\e[0m\e[0m", "\n",
|
36
|
+
"world"
|
37
|
+
)
|
36
38
|
follow_chat.call(response)
|
37
39
|
response = double(
|
38
40
|
message: nil,
|