ollama_chat 0.0.9 → 0.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGES.md +31 -0
- data/README.md +2 -1
- data/VERSION +1 -1
- data/bin/ollama_chat_send +2 -1
- data/lib/ollama_chat/chat.rb +260 -209
- data/lib/ollama_chat/dialog.rb +24 -0
- data/lib/ollama_chat/follow_chat.rb +34 -10
- data/lib/ollama_chat/information.rb +3 -1
- data/lib/ollama_chat/ollama_chat_config/default_config.yml +1 -0
- data/lib/ollama_chat/server_socket.rb +2 -2
- data/lib/ollama_chat/version.rb +1 -1
- data/ollama_chat.gemspec +2 -2
- data/spec/ollama_chat/chat_spec.rb +187 -11
- data/spec/ollama_chat/follow_chat_spec.rb +5 -3
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ae96668b9a38eb238d0d9352e4c6866f3a99c8e89796ca696a6c09acce776c32
|
4
|
+
data.tar.gz: 44171082a4e6c971a4cd3cf4a95ab89cf76388e13fc9638e140261a2702437c7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 13ae7bdd0e3012e34341d989ace27240472c8e442f8dc9f49ec94b1e9729c71353f70089a80f066d34d65ad67dc3e52cffe0d2f873a1881a185dea88f63f413a
|
7
|
+
data.tar.gz: ae099dfe86b2888b5ed545b5c5f630eeccef4f4d52c000ffa1a5fbc2f36897705e31634e5c0810a962fecd27241645efdcedaa667d5eebcead72c26ca3448c05
|
data/CHANGES.md
CHANGED
@@ -1,5 +1,36 @@
|
|
1
1
|
# Changes
|
2
2
|
|
3
|
+
## 2025-06-01 v0.0.11
|
4
|
+
|
5
|
+
* **Think Mode Implementation**:
|
6
|
+
+ Introduced `@think_mode` attribute to read think mode setting from config
|
7
|
+
+ Implemented `remove_think_blocks` method to filter out thought blocks from chat messages sent to the LLM model.
|
8
|
+
+ Added conditional logic based on `@think_mode` value to handle different modes (`'display'`, `'omit'`, `'no_delete'`, `'only_delete'`)
|
9
|
+
|
10
|
+
* **'display'**: Displays thought blocks' tags as emojis.
|
11
|
+
* **'omit'**: Omit internal reasoning blocks and tags from the output entirely.
|
12
|
+
* **'no_delete'**: Sends the entire conversation, including all think tags, to the Large Language Model (LLM) for processing.
|
13
|
+
* **'only_delete'**: Removes the explicit indicators of thought processes only from the conversation sent to the LLM, but does not modify the output shown to the user.
|
14
|
+
* **User Interface Improvements**:
|
15
|
+
+ Added `/think_mode` command to help users understand think mode options
|
16
|
+
+ Updated session output to include current think mode
|
17
|
+
+ Added think mode chooser to OllamaChat::Dialog, allowing users to select their preferred think mode
|
18
|
+
* **Output Handling Enhancements**:
|
19
|
+
+ Improved markdown handling for think blocks in OllamaChat::FollowChat class
|
20
|
+
+ Modified output to print clear screen, move home, and user info before printing content
|
21
|
+
* **Configuration Updates**:
|
22
|
+
+ Added `think_mode` key with value `"display"` to `default_config.yml`
|
23
|
+
|
24
|
+
## 2025-05-28 v0.0.10
|
25
|
+
|
26
|
+
* Simplify and improve command handling logic.
|
27
|
+
* Update chat input handling to use a single `handle_input` method for all commands.
|
28
|
+
* Add tests for various chat commands, including input handling, document
|
29
|
+
policy selection, summarization, and more.
|
30
|
+
* Improve test coverage for `DocumentCache`, `Information`, and other modules.
|
31
|
+
* Improved handling of commands, e.g. **don't** when sending via `ollama_chat_send` by default.
|
32
|
+
* Added support for sending content to server socket with specific type.
|
33
|
+
|
3
34
|
## 2025-05-26 v0.0.9
|
4
35
|
|
5
36
|
* Improved tag parsing in OllamaChat:
|
data/README.md
CHANGED
@@ -121,7 +121,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
|
|
121
121
|
/markdown toggle markdown output
|
122
122
|
/stream toggle stream output
|
123
123
|
/location toggle location submission
|
124
|
-
/voice
|
124
|
+
/voice [change] toggle voice output or change the voice
|
125
125
|
/list [n] list the last n / all conversation exchanges
|
126
126
|
/clear [messages|links|history] clear the all messages, links, or the chat history (defaults to messages)
|
127
127
|
/clobber clear the conversation, links, and collection
|
@@ -133,6 +133,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
|
|
133
133
|
/info show information for current session
|
134
134
|
/config output current configuration ("/Users/flori/.config/ollama_chat/config.yml")
|
135
135
|
/document_policy pick a scan policy for document references
|
136
|
+
/think_mode pick a think mode for reasoning models (display, omit, only_delete, no_delete)
|
136
137
|
/import source import the source's content
|
137
138
|
/summarize [n] source summarize the source's content in n words
|
138
139
|
/embedding toggle embedding paused or not
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.11
|
data/bin/ollama_chat_send
CHANGED
@@ -3,7 +3,8 @@
|
|
3
3
|
require 'ollama_chat'
|
4
4
|
|
5
5
|
begin
|
6
|
-
|
6
|
+
type = (ARGV.shift || 'socket_input').to_sym
|
7
|
+
OllamaChat::ServerSocket.send_to_server_socket(STDIN.read, type:)
|
7
8
|
rescue => e
|
8
9
|
warn "Caught #{e.class}: #{e}"
|
9
10
|
exit 1
|
data/lib/ollama_chat/chat.rb
CHANGED
@@ -46,6 +46,7 @@ class OllamaChat::Chat
|
|
46
46
|
)
|
47
47
|
server_version
|
48
48
|
@document_policy = config.document_policy
|
49
|
+
@think_mode = config.think_mode
|
49
50
|
@model = choose_model(@opts[?m], config.model.name)
|
50
51
|
@model_options = Ollama::Options[config.model.options]
|
51
52
|
model_system = pull_model_unless_present(@model, @model_options)
|
@@ -106,229 +107,260 @@ class OllamaChat::Chat
|
|
106
107
|
|
107
108
|
private
|
108
109
|
|
110
|
+
def handle_input(content)
|
111
|
+
case content
|
112
|
+
when %r(^/copy$)
|
113
|
+
copy_to_clipboard
|
114
|
+
:next
|
115
|
+
when %r(^/paste$)
|
116
|
+
paste_from_input
|
117
|
+
when %r(^/markdown$)
|
118
|
+
markdown.toggle
|
119
|
+
:next
|
120
|
+
when %r(^/stream$)
|
121
|
+
stream.toggle
|
122
|
+
:next
|
123
|
+
when %r(^/location$)
|
124
|
+
location.toggle
|
125
|
+
:next
|
126
|
+
when %r(^/voice(?:\s+(change))?$)
|
127
|
+
if $1 == 'change'
|
128
|
+
change_voice
|
129
|
+
else
|
130
|
+
voice.toggle
|
131
|
+
end
|
132
|
+
:next
|
133
|
+
when %r(^/list(?:\s+(\d*))?$)
|
134
|
+
last = 2 * $1.to_i if $1
|
135
|
+
messages.list_conversation(last)
|
136
|
+
:next
|
137
|
+
when %r(^/clear(?:\s+(messages|links|history|all))?$)
|
138
|
+
clean($1)
|
139
|
+
:next
|
140
|
+
when %r(^/clobber$)
|
141
|
+
clean('all')
|
142
|
+
:next
|
143
|
+
when %r(^/drop(?:\s+(\d*))?$)
|
144
|
+
messages.drop($1)
|
145
|
+
messages.list_conversation(2)
|
146
|
+
:next
|
147
|
+
when %r(^/model$)
|
148
|
+
@model = choose_model('', @model)
|
149
|
+
:next
|
150
|
+
when %r(^/system$)
|
151
|
+
change_system_prompt(@system)
|
152
|
+
info
|
153
|
+
:next
|
154
|
+
when %r(^/regenerate$)
|
155
|
+
if content = messages.second_last&.content
|
156
|
+
content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
|
157
|
+
messages.drop(1)
|
158
|
+
else
|
159
|
+
STDOUT.puts "Not enough messages in this conversation."
|
160
|
+
return :redo
|
161
|
+
end
|
162
|
+
@parse_content = false
|
163
|
+
content
|
164
|
+
when %r(^/collection(?:\s+(clear|change))?$)
|
165
|
+
case $1 || 'change'
|
166
|
+
when 'clear'
|
167
|
+
loop do
|
168
|
+
tags = @documents.tags.add('[EXIT]').add('[ALL]')
|
169
|
+
tag = OllamaChat::Utils::Chooser.choose(tags, prompt: 'Clear? %s')
|
170
|
+
case tag
|
171
|
+
when nil, '[EXIT]'
|
172
|
+
STDOUT.puts "Exiting chooser."
|
173
|
+
break
|
174
|
+
when '[ALL]'
|
175
|
+
if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
|
176
|
+
@documents.clear
|
177
|
+
STDOUT.puts "Cleared collection #{bold{@documents.collection}}."
|
178
|
+
break
|
179
|
+
else
|
180
|
+
STDOUT.puts 'Cancelled.'
|
181
|
+
sleep 3
|
182
|
+
end
|
183
|
+
when /./
|
184
|
+
@documents.clear(tags: [ tag ])
|
185
|
+
STDOUT.puts "Cleared tag #{tag} from collection #{bold{@documents.collection}}."
|
186
|
+
sleep 3
|
187
|
+
end
|
188
|
+
end
|
189
|
+
when 'change'
|
190
|
+
choose_collection(@documents.collection)
|
191
|
+
end
|
192
|
+
:next
|
193
|
+
when %r(^/info$)
|
194
|
+
info
|
195
|
+
:next
|
196
|
+
when %r(^/document_policy$)
|
197
|
+
choose_document_policy
|
198
|
+
:next
|
199
|
+
when %r(^/import\s+(.+))
|
200
|
+
@parse_content = false
|
201
|
+
import($1) or :next
|
202
|
+
when %r(^/summarize\s+(?:(\d+)\s+)?(.+))
|
203
|
+
@parse_content = false
|
204
|
+
summarize($2, words: $1) or :next
|
205
|
+
when %r(^/embedding$)
|
206
|
+
embedding_paused.toggle(show: false)
|
207
|
+
embedding.show
|
208
|
+
:next
|
209
|
+
when %r(^/embed\s+(.+))
|
210
|
+
@parse_content = false
|
211
|
+
embed($1) or :next
|
212
|
+
when %r(^/web\s+(?:(\d+)\s+)?(.+))
|
213
|
+
@parse_content = false
|
214
|
+
web($1, $2)
|
215
|
+
when %r(^/save\s+(.+)$)
|
216
|
+
messages.save_conversation($1)
|
217
|
+
STDOUT.puts "Saved conversation to #$1."
|
218
|
+
:next
|
219
|
+
when %r(^/links(?:\s+(clear))?$)
|
220
|
+
manage_links($1)
|
221
|
+
:next
|
222
|
+
when %r(^/load\s+(.+)$)
|
223
|
+
messages.load_conversation($1)
|
224
|
+
if messages.size > 1
|
225
|
+
messages.list_conversation(2)
|
226
|
+
end
|
227
|
+
STDOUT.puts "Loaded conversation from #$1."
|
228
|
+
:next
|
229
|
+
when %r(^/config$)
|
230
|
+
display_config
|
231
|
+
:next
|
232
|
+
when %r(^/quit$), nil
|
233
|
+
STDOUT.puts "Goodbye."
|
234
|
+
:return
|
235
|
+
when %r(^/)
|
236
|
+
display_chat_help
|
237
|
+
:next
|
238
|
+
when /\A\s*\z/
|
239
|
+
STDOUT.puts "Type /quit to quit."
|
240
|
+
:next
|
241
|
+
end
|
242
|
+
end
|
243
|
+
|
244
|
+
def web(count, query)
|
245
|
+
urls = search_web(query, count.to_i) or return :next
|
246
|
+
urls.each do |url|
|
247
|
+
fetch_source(url) { |url_io| embed_source(url_io, url) }
|
248
|
+
end
|
249
|
+
urls_summarized = urls.map { summarize(_1) }
|
250
|
+
results = urls.zip(urls_summarized).
|
251
|
+
map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
|
252
|
+
config.prompts.web % { query:, results: }
|
253
|
+
end
|
254
|
+
|
255
|
+
def manage_links(command)
|
256
|
+
case command
|
257
|
+
when 'clear'
|
258
|
+
loop do
|
259
|
+
links_options = links.dup.add('[EXIT]').add('[ALL]')
|
260
|
+
link = OllamaChat::Utils::Chooser.choose(links_options, prompt: 'Clear? %s')
|
261
|
+
case link
|
262
|
+
when nil, '[EXIT]'
|
263
|
+
STDOUT.puts "Exiting chooser."
|
264
|
+
break
|
265
|
+
when '[ALL]'
|
266
|
+
if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
|
267
|
+
links.clear
|
268
|
+
STDOUT.puts "Cleared all links in list."
|
269
|
+
break
|
270
|
+
else
|
271
|
+
STDOUT.puts 'Cancelled.'
|
272
|
+
sleep 3
|
273
|
+
end
|
274
|
+
when /./
|
275
|
+
links.delete(link)
|
276
|
+
STDOUT.puts "Cleared link from links in list."
|
277
|
+
sleep 3
|
278
|
+
end
|
279
|
+
end
|
280
|
+
when nil
|
281
|
+
if links.empty?
|
282
|
+
STDOUT.puts "List is empty."
|
283
|
+
else
|
284
|
+
Math.log10(links.size).ceil
|
285
|
+
format = "% #{}s. %s"
|
286
|
+
connect = -> link { hyperlink(link) { link } }
|
287
|
+
STDOUT.puts links.each_with_index.map { |x, i| format % [ i + 1, connect.(x) ] }
|
288
|
+
end
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
292
|
+
def clean(what)
|
293
|
+
what = 'messages' if what.nil?
|
294
|
+
case what
|
295
|
+
when 'messages'
|
296
|
+
messages.clear
|
297
|
+
STDOUT.puts "Cleared messages."
|
298
|
+
when 'links'
|
299
|
+
links.clear
|
300
|
+
STDOUT.puts "Cleared links."
|
301
|
+
when 'history'
|
302
|
+
clear_history
|
303
|
+
STDOUT.puts "Cleared history."
|
304
|
+
when 'all'
|
305
|
+
if ask?(prompt: 'Are you sure to clear messages and collection? (y/n) ') =~ /\Ay/i
|
306
|
+
messages.clear
|
307
|
+
@documents.clear
|
308
|
+
links.clear
|
309
|
+
clear_history
|
310
|
+
STDOUT.puts "Cleared messages and collection #{bold{@documents.collection}}."
|
311
|
+
else
|
312
|
+
STDOUT.puts 'Cancelled.'
|
313
|
+
end
|
314
|
+
end
|
315
|
+
end
|
316
|
+
|
317
|
+
def display_config
|
318
|
+
default_pager = ENV['PAGER'].full?
|
319
|
+
if fallback_pager = `which less`.chomp.full? || `which more`.chomp.full?
|
320
|
+
fallback_pager << ' -r'
|
321
|
+
end
|
322
|
+
my_pager = default_pager || fallback_pager
|
323
|
+
rendered = config.to_s
|
324
|
+
Kramdown::ANSI::Pager.pager(
|
325
|
+
lines: rendered.count(?\n),
|
326
|
+
command: my_pager
|
327
|
+
) do |output|
|
328
|
+
output.puts rendered
|
329
|
+
end
|
330
|
+
end
|
331
|
+
|
109
332
|
def interact_with_user
|
110
333
|
loop do
|
111
|
-
parse_content = true
|
112
|
-
|
334
|
+
@parse_content = true
|
335
|
+
type = :terminal_input
|
336
|
+
input_prompt = bold { color(172) { message_type(@images) + " user" } } + bold { "> " }
|
113
337
|
|
114
338
|
begin
|
115
339
|
content = Reline.readline(input_prompt, true)&.chomp
|
116
340
|
rescue Interrupt
|
117
341
|
if message = server_socket_message
|
118
342
|
self.server_socket_message = nil
|
343
|
+
type = message.fetch('type', 'socket_input').to_sym
|
119
344
|
content = message['content']
|
120
345
|
else
|
121
346
|
raise
|
122
347
|
end
|
123
348
|
end
|
124
349
|
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
content = paste_from_input
|
131
|
-
when %r(^/markdown$)
|
132
|
-
markdown.toggle
|
133
|
-
next
|
134
|
-
when %r(^/stream$)
|
135
|
-
stream.toggle
|
136
|
-
next
|
137
|
-
when %r(^/location$)
|
138
|
-
location.toggle
|
139
|
-
next
|
140
|
-
when %r(^/voice(?:\s+(change))?$)
|
141
|
-
if $1 == 'change'
|
142
|
-
change_voice
|
143
|
-
else
|
144
|
-
voice.toggle
|
145
|
-
end
|
146
|
-
next
|
147
|
-
when %r(^/list(?:\s+(\d*))?$)
|
148
|
-
last = 2 * $1.to_i if $1
|
149
|
-
messages.list_conversation(last)
|
150
|
-
next
|
151
|
-
when %r(^/clear(?:\s+(messages|links|history))?$)
|
152
|
-
what = $1.nil? ? 'messages' : $1
|
153
|
-
case what
|
154
|
-
when 'messages'
|
155
|
-
messages.clear
|
156
|
-
STDOUT.puts "Cleared messages."
|
157
|
-
when 'links'
|
158
|
-
links.clear
|
159
|
-
STDOUT.puts "Cleared links."
|
160
|
-
when 'history'
|
161
|
-
clear_history
|
162
|
-
STDOUT.puts "Cleared history."
|
163
|
-
end
|
164
|
-
next
|
165
|
-
when %r(^/clobber$)
|
166
|
-
if ask?(prompt: 'Are you sure to clear messages and collection? (y/n) ') =~ /\Ay/i
|
167
|
-
messages.clear
|
168
|
-
@documents.clear
|
169
|
-
links.clear
|
170
|
-
clear_history
|
171
|
-
STDOUT.puts "Cleared messages and collection #{bold{@documents.collection}}."
|
172
|
-
else
|
173
|
-
STDOUT.puts 'Cancelled.'
|
174
|
-
end
|
175
|
-
next
|
176
|
-
when %r(^/drop(?:\s+(\d*))?$)
|
177
|
-
messages.drop($1)
|
178
|
-
messages.list_conversation(2)
|
179
|
-
next
|
180
|
-
when %r(^/model$)
|
181
|
-
@model = choose_model('', @model)
|
182
|
-
next
|
183
|
-
when %r(^/system$)
|
184
|
-
change_system_prompt(@system)
|
185
|
-
info
|
186
|
-
next
|
187
|
-
when %r(^/regenerate$)
|
188
|
-
if content = messages.second_last&.content
|
189
|
-
content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
|
190
|
-
messages.drop(2)
|
191
|
-
else
|
192
|
-
STDOUT.puts "Not enough messages in this conversation."
|
350
|
+
unless type == :socket_input
|
351
|
+
case next_action = handle_input(content)
|
352
|
+
when :next
|
353
|
+
next
|
354
|
+
when :redo
|
193
355
|
redo
|
356
|
+
when :return
|
357
|
+
return
|
358
|
+
when String
|
359
|
+
content = next_action
|
194
360
|
end
|
195
|
-
parse_content = false
|
196
|
-
content
|
197
|
-
when %r(^/collection(?:\s+(clear|change))?$)
|
198
|
-
case $1 || 'change'
|
199
|
-
when 'clear'
|
200
|
-
loop do
|
201
|
-
tags = @documents.tags.add('[EXIT]').add('[ALL]')
|
202
|
-
tag = OllamaChat::Utils::Chooser.choose(tags, prompt: 'Clear? %s')
|
203
|
-
case tag
|
204
|
-
when nil, '[EXIT]'
|
205
|
-
STDOUT.puts "Exiting chooser."
|
206
|
-
break
|
207
|
-
when '[ALL]'
|
208
|
-
if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
|
209
|
-
@documents.clear
|
210
|
-
STDOUT.puts "Cleared collection #{bold{@documents.collection}}."
|
211
|
-
break
|
212
|
-
else
|
213
|
-
STDOUT.puts 'Cancelled.'
|
214
|
-
sleep 3
|
215
|
-
end
|
216
|
-
when /./
|
217
|
-
@documents.clear(tags: [ tag ])
|
218
|
-
STDOUT.puts "Cleared tag #{tag} from collection #{bold{@documents.collection}}."
|
219
|
-
sleep 3
|
220
|
-
end
|
221
|
-
end
|
222
|
-
when 'change'
|
223
|
-
choose_collection(@documents.collection)
|
224
|
-
end
|
225
|
-
next
|
226
|
-
when %r(^/info$)
|
227
|
-
info
|
228
|
-
next
|
229
|
-
when %r(^/document_policy$)
|
230
|
-
choose_document_policy
|
231
|
-
next
|
232
|
-
when %r(^/import\s+(.+))
|
233
|
-
parse_content = false
|
234
|
-
content = import($1) or next
|
235
|
-
when %r(^/summarize\s+(?:(\d+)\s+)?(.+))
|
236
|
-
parse_content = false
|
237
|
-
content = summarize($2, words: $1) or next
|
238
|
-
when %r(^/embedding$)
|
239
|
-
embedding_paused.toggle(show: false)
|
240
|
-
embedding.show
|
241
|
-
next
|
242
|
-
when %r(^/embed\s+(.+))
|
243
|
-
parse_content = false
|
244
|
-
content = embed($1) or next
|
245
|
-
when %r(^/web\s+(?:(\d+)\s+)?(.+))
|
246
|
-
parse_content = false
|
247
|
-
urls = search_web($2, $1.to_i) or next
|
248
|
-
urls.each do |url|
|
249
|
-
fetch_source(url) { |url_io| embed_source(url_io, url) }
|
250
|
-
end
|
251
|
-
urls_summarized = urls.map { summarize(_1) }
|
252
|
-
query = $2.inspect
|
253
|
-
results = urls.zip(urls_summarized).
|
254
|
-
map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
|
255
|
-
content = config.prompts.web % { query:, results: }
|
256
|
-
when %r(^/save\s+(.+)$)
|
257
|
-
messages.save_conversation($1)
|
258
|
-
STDOUT.puts "Saved conversation to #$1."
|
259
|
-
next
|
260
|
-
when %r(^/links(?:\s+(clear))?$)
|
261
|
-
case $1
|
262
|
-
when 'clear'
|
263
|
-
loop do
|
264
|
-
links_options = links.dup.add('[EXIT]').add('[ALL]')
|
265
|
-
link = OllamaChat::Utils::Chooser.choose(links_options, prompt: 'Clear? %s')
|
266
|
-
case link
|
267
|
-
when nil, '[EXIT]'
|
268
|
-
STDOUT.puts "Exiting chooser."
|
269
|
-
break
|
270
|
-
when '[ALL]'
|
271
|
-
if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
|
272
|
-
links.clear
|
273
|
-
STDOUT.puts "Cleared all links in list."
|
274
|
-
break
|
275
|
-
else
|
276
|
-
STDOUT.puts 'Cancelled.'
|
277
|
-
sleep 3
|
278
|
-
end
|
279
|
-
when /./
|
280
|
-
links.delete(link)
|
281
|
-
STDOUT.puts "Cleared link from links in list."
|
282
|
-
sleep 3
|
283
|
-
end
|
284
|
-
end
|
285
|
-
when nil
|
286
|
-
if links.empty?
|
287
|
-
STDOUT.puts "List is empty."
|
288
|
-
else
|
289
|
-
Math.log10(links.size).ceil
|
290
|
-
format = "% #{}s. %s"
|
291
|
-
connect = -> link { hyperlink(link) { link } }
|
292
|
-
STDOUT.puts links.each_with_index.map { |x, i| format % [ i + 1, connect.(x) ] }
|
293
|
-
end
|
294
|
-
end
|
295
|
-
next
|
296
|
-
when %r(^/load\s+(.+)$)
|
297
|
-
messages.load_conversation($1)
|
298
|
-
if messages.size > 1
|
299
|
-
messages.list_conversation(2)
|
300
|
-
end
|
301
|
-
STDOUT.puts "Loaded conversation from #$1."
|
302
|
-
next
|
303
|
-
when %r(^/config$)
|
304
|
-
default_pager = ENV['PAGER'].full?
|
305
|
-
if fallback_pager = `which less`.chomp.full? || `which more`.chomp.full?
|
306
|
-
fallback_pager << ' -r'
|
307
|
-
end
|
308
|
-
my_pager = default_pager || fallback_pager
|
309
|
-
rendered = config.to_s
|
310
|
-
Kramdown::ANSI::Pager.pager(
|
311
|
-
lines: rendered.count(?\n),
|
312
|
-
command: my_pager
|
313
|
-
) do |output|
|
314
|
-
output.puts rendered
|
315
|
-
end
|
316
|
-
next
|
317
|
-
when %r(^/quit$)
|
318
|
-
STDOUT.puts "Goodbye."
|
319
|
-
return
|
320
|
-
when %r(^/)
|
321
|
-
display_chat_help
|
322
|
-
next
|
323
|
-
when ''
|
324
|
-
STDOUT.puts "Type /quit to quit."
|
325
|
-
next
|
326
|
-
when nil
|
327
|
-
STDOUT.puts "Goodbye."
|
328
|
-
return
|
329
361
|
end
|
330
362
|
|
331
|
-
content, tags = if parse_content
|
363
|
+
content, tags = if @parse_content
|
332
364
|
parse_content(content, @images)
|
333
365
|
else
|
334
366
|
[ content, Documentrix::Utils::Tags.new(valid_tag: /\A#*([\w\]\[]+)/) ]
|
@@ -355,11 +387,17 @@ class OllamaChat::Chat
|
|
355
387
|
messages:,
|
356
388
|
voice: (@current_voice if voice.on?)
|
357
389
|
)
|
390
|
+
messages_to_send =
|
391
|
+
if @think_mode == 'no_delete'
|
392
|
+
messages
|
393
|
+
else
|
394
|
+
remove_think_blocks(messages)
|
395
|
+
end
|
358
396
|
ollama.chat(
|
359
|
-
model:
|
360
|
-
messages
|
361
|
-
options:
|
362
|
-
stream:
|
397
|
+
model: @model,
|
398
|
+
messages: messages_to_send,
|
399
|
+
options: @model_options,
|
400
|
+
stream: stream.on?,
|
363
401
|
&handler
|
364
402
|
)
|
365
403
|
if embedding.on? && !records.empty?
|
@@ -387,6 +425,19 @@ class OllamaChat::Chat
|
|
387
425
|
|
388
426
|
private
|
389
427
|
|
428
|
+
def remove_think_blocks(messages)
|
429
|
+
new_messages = OllamaChat::MessageList.new(self)
|
430
|
+
messages.to_ary.each do |message|
|
431
|
+
thought_less_content = message.content.gsub(%r(<think(?:ing)?>.*?</think(?:ing)?>)im, '')
|
432
|
+
new_messages << Ollama::Message.new(
|
433
|
+
role: message.role,
|
434
|
+
content: thought_less_content,
|
435
|
+
images: message.images
|
436
|
+
)
|
437
|
+
end
|
438
|
+
new_messages
|
439
|
+
end
|
440
|
+
|
390
441
|
def setup_documents
|
391
442
|
if embedding.on?
|
392
443
|
@embedding_model = config.embedding.model.name
|
data/lib/ollama_chat/dialog.rb
CHANGED
@@ -57,6 +57,30 @@ module OllamaChat::Dialog
|
|
57
57
|
info
|
58
58
|
end
|
59
59
|
|
60
|
+
attr_accessor :think_mode
|
61
|
+
|
62
|
+
def choose_think_mode
|
63
|
+
modes = %w[ display omit only_delete no_delete ].sort
|
64
|
+
current = if modes.index(@think_mode)
|
65
|
+
@think_mode
|
66
|
+
elsif modes.index(config.think_mode)
|
67
|
+
config.think_mode
|
68
|
+
else
|
69
|
+
modes.first
|
70
|
+
end
|
71
|
+
modes.unshift('[EXIT]')
|
72
|
+
think_mode = OllamaChat::Utils::Chooser.choose(modes)
|
73
|
+
case think_mode
|
74
|
+
when nil, '[EXIT]'
|
75
|
+
STDOUT.puts "Exiting chooser."
|
76
|
+
think_mode = current
|
77
|
+
end
|
78
|
+
self.think_mode = think_mode
|
79
|
+
ensure
|
80
|
+
STDOUT.puts "Using think mode #{bold{@think_mode}}."
|
81
|
+
info
|
82
|
+
end
|
83
|
+
|
60
84
|
def change_system_prompt(default, system: nil)
|
61
85
|
selector = Regexp.new(system.to_s[1..-1].to_s)
|
62
86
|
prompts = config.system_prompts.attribute_names.compact.grep(selector)
|
@@ -20,17 +20,23 @@ class OllamaChat::FollowChat
|
|
20
20
|
@messages << Message.new(role: 'assistant', content: '')
|
21
21
|
@user = message_type(@messages.last.images) + " " +
|
22
22
|
bold { color(111) { 'assistant:' } }
|
23
|
-
@output.puts @user unless @chat.markdown.on?
|
24
23
|
end
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
24
|
+
@messages.last.content << response.message&.content
|
25
|
+
if content = @messages.last.content.full?
|
26
|
+
case @chat.think_mode
|
27
|
+
when 'display'
|
28
|
+
content = emphasize_think_block(content)
|
29
|
+
when 'omit'
|
30
|
+
content = omit_think_block(content)
|
31
|
+
when 'no_delete', 'only_delete'
|
32
|
+
content = quote_think_tags(content)
|
33
|
+
end
|
34
|
+
if @chat.markdown.on?
|
35
|
+
markdown_content = Kramdown::ANSI.parse(content)
|
36
|
+
@output.print clear_screen, move_home, @user, ?\n, markdown_content
|
37
|
+
else
|
38
|
+
@output.print clear_screen, move_home, @user, ?\n, content
|
39
|
+
end
|
34
40
|
end
|
35
41
|
@say.call(response)
|
36
42
|
end
|
@@ -57,4 +63,22 @@ class OllamaChat::FollowChat
|
|
57
63
|
Kramdown::ANSI::Width.wrap(stats_text, percentage: 90).gsub(/(?<!\A)^/, ' ')
|
58
64
|
}
|
59
65
|
end
|
66
|
+
|
67
|
+
private
|
68
|
+
|
69
|
+
def emphasize_think_block(content)
|
70
|
+
content.gsub(%r(<think(?:ing)?>)i, "\n💭\n").gsub(%r(</think(?:ing)?>)i, "\n💬\n")
|
71
|
+
end
|
72
|
+
|
73
|
+
def omit_think_block(content)
|
74
|
+
content.gsub(%r(<think(?:ing)?>.*?(</think(?:ing)?>|\z))im, '')
|
75
|
+
end
|
76
|
+
|
77
|
+
def quote_think_tags(content)
|
78
|
+
if @chat.markdown.on?
|
79
|
+
content.gsub(%r(<(think(?:ing)?)>)i, "\n\\<\\1\\>\n").gsub(%r(</(think(?:ing)?)>)i, "\n\\</\\1\\>\n")
|
80
|
+
else
|
81
|
+
content.gsub(%r(<(think(?:ing)?)>)i, "\n<\\1\>\n").gsub(%r(</(think(?:ing)?)>)i, "\n</\\1>\n")
|
82
|
+
end
|
83
|
+
end
|
60
84
|
end
|
@@ -48,6 +48,7 @@ module OllamaChat::Information
|
|
48
48
|
stream.show
|
49
49
|
location.show
|
50
50
|
STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
|
51
|
+
STDOUT.puts "Think mode is currently: #{bold{@think_mode}}"
|
51
52
|
STDOUT.puts "Currently selected search engine is #{bold(search_engine)}."
|
52
53
|
if @voice.on?
|
53
54
|
STDOUT.puts "Using voice #{bold{@current_voice}} to speak."
|
@@ -75,12 +76,13 @@ module OllamaChat::Information
|
|
75
76
|
/info show information for current session
|
76
77
|
/config output current configuration (#{@ollama_chat_config.filename.to_s.inspect})
|
77
78
|
/document_policy pick a scan policy for document references
|
79
|
+
/think_mode pick a think mode for reasoning models
|
78
80
|
/import source import the source's content
|
79
81
|
/summarize [n] source summarize the source's content in n words
|
80
82
|
/embedding toggle embedding paused or not
|
81
83
|
/embed source embed the source's content
|
82
84
|
/web [n] query query web search & return n or 1 results
|
83
|
-
/links
|
85
|
+
/links [clear] display (or clear) links used in the chat
|
84
86
|
/save filename store conversation messages
|
85
87
|
/load filename load conversation messages
|
86
88
|
/quit to quit
|
@@ -8,9 +8,9 @@ module OllamaChat::ServerSocket
|
|
8
8
|
File.join(runtime_dir, 'ollama_chat.sock')
|
9
9
|
end
|
10
10
|
|
11
|
-
def send_to_server_socket(content)
|
11
|
+
def send_to_server_socket(content, type: :socket_input)
|
12
12
|
FileUtils.mkdir_p runtime_dir
|
13
|
-
message = { content: }
|
13
|
+
message = { content:, type: }
|
14
14
|
socket = UNIXSocket.new(server_socket_path)
|
15
15
|
socket.puts JSON(message)
|
16
16
|
socket.close
|
data/lib/ollama_chat/version.rb
CHANGED
data/ollama_chat.gemspec
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
# stub: ollama_chat 0.0.
|
2
|
+
# stub: ollama_chat 0.0.11 ruby lib
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = "ollama_chat".freeze
|
6
|
-
s.version = "0.0.
|
6
|
+
s.version = "0.0.11".freeze
|
7
7
|
|
8
8
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
9
9
|
s.require_paths = ["lib".freeze]
|
@@ -6,43 +6,211 @@ RSpec.describe OllamaChat::Chat do
|
|
6
6
|
end
|
7
7
|
|
8
8
|
let :chat do
|
9
|
-
OllamaChat::Chat.new
|
9
|
+
OllamaChat::Chat.new(argv: argv).expose
|
10
10
|
end
|
11
11
|
|
12
|
-
|
12
|
+
describe 'instantiation' do
|
13
|
+
connect_to_ollama_server(instantiate: false)
|
13
14
|
|
14
|
-
|
15
|
-
|
15
|
+
it 'can be instantiated' do
|
16
|
+
expect(chat).to be_a described_class
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
describe 'handle_input' do
|
21
|
+
connect_to_ollama_server
|
22
|
+
|
23
|
+
it 'returns :next when input is "/copy"' do
|
24
|
+
expect(chat).to receive(:copy_to_clipboard)
|
25
|
+
expect(chat.handle_input("/copy")).to eq :next
|
26
|
+
end
|
27
|
+
|
28
|
+
it 'returns :next when input is "/paste"' do
|
29
|
+
expect(chat).to receive(:paste_from_input).and_return "pasted this"
|
30
|
+
expect(chat.handle_input("/paste")).to eq "pasted this"
|
31
|
+
end
|
32
|
+
|
33
|
+
it 'returns :next when input is "/markdown"' do
|
34
|
+
expect(chat.markdown).to receive(:toggle)
|
35
|
+
expect(chat.handle_input("/markdown")).to eq :next
|
36
|
+
end
|
37
|
+
|
38
|
+
it 'returns :next when input is "/stream"' do
|
39
|
+
expect(chat.stream).to receive(:toggle)
|
40
|
+
expect(chat.handle_input("/stream")).to eq :next
|
41
|
+
end
|
42
|
+
|
43
|
+
it 'returns :next when input is "/location"' do
|
44
|
+
expect(chat.location).to receive(:toggle)
|
45
|
+
expect(chat.handle_input("/location")).to eq :next
|
46
|
+
end
|
47
|
+
|
48
|
+
it 'returns :next when input is "/voice(?:\s+(change))? "' do
|
49
|
+
expect(chat.voice).to receive(:toggle)
|
50
|
+
expect(chat.handle_input("/voice")).to eq :next
|
51
|
+
expect(chat).to receive(:change_voice)
|
52
|
+
expect(chat.handle_input("/voice change")).to eq :next
|
53
|
+
end
|
54
|
+
|
55
|
+
it 'returns :next when input is "/list(?:\s+(\d*))? "' do
|
56
|
+
expect(chat.messages).to receive(:list_conversation).with(4)
|
57
|
+
expect(chat.handle_input("/list 2")).to eq :next
|
58
|
+
end
|
59
|
+
|
60
|
+
it 'returns :next when input is "/clear(messages|links|history|all)"' do
|
61
|
+
expect(chat).to receive(:clean).with('messages')
|
62
|
+
expect(chat.handle_input("/clear messages")).to eq :next
|
63
|
+
expect(chat).to receive(:clean).with('links')
|
64
|
+
expect(chat.handle_input("/clear links")).to eq :next
|
65
|
+
expect(chat).to receive(:clean).with('history')
|
66
|
+
expect(chat.handle_input("/clear history")).to eq :next
|
67
|
+
expect(chat).to receive(:clean).with('all')
|
68
|
+
expect(chat.handle_input("/clear all")).to eq :next
|
69
|
+
end
|
70
|
+
|
71
|
+
it 'returns :next when input is "/clobber"' do
|
72
|
+
expect(chat).to receive(:clean).with('all')
|
73
|
+
expect(chat.handle_input("/clobber")).to eq :next
|
74
|
+
end
|
75
|
+
|
76
|
+
it 'returns :next when input is "/drop(?:\s+(\d*))?"' do
|
77
|
+
expect(chat.messages).to receive(:drop).with(?2)
|
78
|
+
expect(chat.messages).to receive(:list_conversation).with(2)
|
79
|
+
expect(chat.handle_input("/drop 2")).to eq :next
|
80
|
+
end
|
81
|
+
|
82
|
+
it 'returns :next when input is "/model"' do
|
83
|
+
expect(chat).to receive(:choose_model).with('', 'llama3.1')
|
84
|
+
expect(chat.handle_input("/model")).to eq :next
|
85
|
+
end
|
86
|
+
|
87
|
+
it 'returns :next when input is "/system"' do
|
88
|
+
expect(chat).to receive(:change_system_prompt).with(nil)
|
89
|
+
expect(chat).to receive(:info)
|
90
|
+
expect(chat.handle_input("/system")).to eq :next
|
91
|
+
end
|
92
|
+
|
93
|
+
it 'returns :next when input is "/regenerate"' do
|
94
|
+
expect(STDOUT).to receive(:puts).with(/Not enough messages/)
|
95
|
+
expect(chat.handle_input("/regenerate")).to eq :redo
|
96
|
+
end
|
97
|
+
|
98
|
+
it 'returns :next when input is "/collection(clear|change)"' do
|
99
|
+
expect(OllamaChat::Utils::Chooser).to receive(:choose)
|
100
|
+
expect(STDOUT).to receive(:puts).with(/Exiting/)
|
101
|
+
expect(chat.handle_input("/collection clear")).to eq :next
|
102
|
+
expect(OllamaChat::Utils::Chooser).to receive(:choose)
|
103
|
+
expect(chat).to receive(:info)
|
104
|
+
expect(STDOUT).to receive(:puts).with(/./)
|
105
|
+
expect(chat.handle_input("/collection change")).to eq :next
|
106
|
+
end
|
107
|
+
|
108
|
+
it 'returns :next when input is "/info"' do
|
109
|
+
expect(chat).to receive(:info)
|
110
|
+
expect(chat.handle_input("/info")).to eq :next
|
111
|
+
end
|
112
|
+
|
113
|
+
it 'returns :next when input is "/document_policy"' do
|
114
|
+
expect(chat).to receive(:choose_document_policy)
|
115
|
+
expect(chat.handle_input("/document_policy")).to eq :next
|
116
|
+
end
|
117
|
+
|
118
|
+
it 'returns :next when input is "/import\s+(.+)"' do
|
119
|
+
expect(chat).to receive(:import).with('./some_file')
|
120
|
+
expect(chat.handle_input("/import ./some_file")).to eq :next
|
121
|
+
end
|
122
|
+
|
123
|
+
it 'returns :next when input is "/summarize\s+(?:(\d+)\s+)?(.+)"' do
|
124
|
+
expect(chat).to receive(:summarize).with('./some_file', words: '23')
|
125
|
+
expect(chat.handle_input("/summarize 23 ./some_file")).to eq :next
|
126
|
+
end
|
127
|
+
|
128
|
+
it 'returns :next when input is "/embedding"' do
|
129
|
+
expect(chat.embedding_paused).to receive(:toggle)
|
130
|
+
expect(chat.embedding).to receive(:show)
|
131
|
+
expect(chat.handle_input("/embedding")).to eq :next
|
132
|
+
end
|
133
|
+
|
134
|
+
it 'returns :next when input is "/embed\s+(.+)"' do
|
135
|
+
expect(chat).to receive(:embed).with('./some_file')
|
136
|
+
expect(chat.handle_input("/embed ./some_file")).to eq :next
|
137
|
+
end
|
138
|
+
|
139
|
+
it 'returns :next when input is "/web\s+(?:(\d+)\s+)?(.+)"' do
|
140
|
+
expect(chat).to receive(:web).with('23', 'query').and_return 'the response'
|
141
|
+
expect(chat.handle_input("/web 23 query")).to eq 'the response'
|
142
|
+
end
|
143
|
+
|
144
|
+
it 'returns :next when input is "/save\s+(.+)$"' do
|
145
|
+
expect(chat.messages).to receive(:save_conversation).with('./some_file')
|
146
|
+
expect(chat.handle_input("/save ./some_file")).to eq :next
|
147
|
+
end
|
148
|
+
|
149
|
+
it 'returns :next when input is "/links(?:\s+(clear))?$" ' do
|
150
|
+
expect(chat).to receive(:manage_links).with(nil)
|
151
|
+
expect(chat.handle_input("/links")).to eq :next
|
152
|
+
expect(chat).to receive(:manage_links).with('clear')
|
153
|
+
expect(chat.handle_input("/links clear")).to eq :next
|
154
|
+
end
|
155
|
+
|
156
|
+
it 'returns :next when input is "/load\s+(.+)$"' do
|
157
|
+
expect(chat.messages).to receive(:load_conversation).with('./some_file')
|
158
|
+
expect(chat.handle_input("/load ./some_file")).to eq :next
|
159
|
+
end
|
160
|
+
|
161
|
+
it 'returns :next when input is "/config"' do
|
162
|
+
expect(chat).to receive(:display_config)
|
163
|
+
expect(chat.handle_input("/config")).to eq :next
|
164
|
+
end
|
165
|
+
|
166
|
+
it 'returns :next when input is "/quit"' do
|
167
|
+
expect(STDOUT).to receive(:puts).with(/Goodbye/)
|
168
|
+
expect(chat.handle_input("/quit")).to eq :return
|
169
|
+
end
|
170
|
+
|
171
|
+
it 'returns :next when input is "/nixda"' do
|
172
|
+
expect(chat).to receive(:display_chat_help)
|
173
|
+
expect(chat.handle_input("/nixda")).to eq :next
|
174
|
+
end
|
175
|
+
|
176
|
+
it 'returns :next when input is " "' do
|
177
|
+
expect(STDOUT).to receive(:puts).with(/to quit/)
|
178
|
+
expect(chat.handle_input(" ")).to eq :next
|
179
|
+
end
|
16
180
|
end
|
17
181
|
|
18
182
|
describe 'chat history' do
|
183
|
+
connect_to_ollama_server(instantiate: false)
|
184
|
+
|
19
185
|
it 'derives chat_history_filename' do
|
20
|
-
expect(chat.
|
186
|
+
expect(chat.chat_history_filename).to_not be_nil
|
21
187
|
end
|
22
188
|
|
23
189
|
it 'can save chat history' do
|
24
190
|
expect(File).to receive(:secure_write).with(
|
25
|
-
chat.
|
191
|
+
chat.chat_history_filename,
|
26
192
|
kind_of(String)
|
27
193
|
)
|
28
|
-
chat.
|
194
|
+
chat.save_history
|
29
195
|
end
|
30
196
|
|
31
197
|
it 'can initialize chat history' do
|
32
|
-
expect(File).to receive(:exist?).with(chat.
|
198
|
+
expect(File).to receive(:exist?).with(chat.chat_history_filename).
|
33
199
|
and_return true
|
34
|
-
expect(File).to receive(:open).with(chat.
|
35
|
-
chat.
|
200
|
+
expect(File).to receive(:open).with(chat.chat_history_filename, ?r)
|
201
|
+
chat.init_chat_history
|
36
202
|
end
|
37
203
|
|
38
204
|
it 'can clear history' do
|
39
205
|
chat
|
40
206
|
expect(Readline::HISTORY).to receive(:clear)
|
41
|
-
chat.
|
207
|
+
chat.clear_history
|
42
208
|
end
|
43
209
|
end
|
44
210
|
|
45
211
|
context 'loading conversations' do
|
212
|
+
connect_to_ollama_server(instantiate: false)
|
213
|
+
|
46
214
|
let :argv do
|
47
215
|
%w[ -C test -c ] << asset('conversation.json')
|
48
216
|
end
|
@@ -56,7 +224,10 @@ RSpec.describe OllamaChat::Chat do
|
|
56
224
|
end
|
57
225
|
|
58
226
|
describe OllamaChat::DocumentCache do
|
227
|
+
connect_to_ollama_server(instantiate: false)
|
228
|
+
|
59
229
|
context 'with MemoryCache' do
|
230
|
+
|
60
231
|
let :argv do
|
61
232
|
%w[ -M ]
|
62
233
|
end
|
@@ -77,6 +248,8 @@ RSpec.describe OllamaChat::Chat do
|
|
77
248
|
|
78
249
|
describe Documentrix::Documents do
|
79
250
|
context 'with documents' do
|
251
|
+
connect_to_ollama_server(instantiate: false)
|
252
|
+
|
80
253
|
let :argv do
|
81
254
|
%w[ -C test -D ] << asset('example.html')
|
82
255
|
end
|
@@ -90,6 +263,8 @@ RSpec.describe OllamaChat::Chat do
|
|
90
263
|
end
|
91
264
|
|
92
265
|
describe OllamaChat::Information do
|
266
|
+
connect_to_ollama_server(instantiate: false)
|
267
|
+
|
93
268
|
it 'has progname' do
|
94
269
|
expect(chat.progname).to eq 'ollama_chat'
|
95
270
|
end
|
@@ -122,6 +297,7 @@ RSpec.describe OllamaChat::Chat do
|
|
122
297
|
Streaming|
|
123
298
|
Location|
|
124
299
|
Document\ policy|
|
300
|
+
Think\ mode|
|
125
301
|
Currently\ selected\ search\ engine
|
126
302
|
/x
|
127
303
|
).at_least(1)
|
@@ -8,7 +8,7 @@ RSpec.describe OllamaChat::FollowChat do
|
|
8
8
|
end
|
9
9
|
|
10
10
|
let :chat do
|
11
|
-
double('Chat', markdown: double(on?: false))
|
11
|
+
double('Chat', markdown: double(on?: false), think_mode: 'display')
|
12
12
|
end
|
13
13
|
|
14
14
|
let :follow_chat do
|
@@ -31,8 +31,10 @@ RSpec.describe OllamaChat::FollowChat do
|
|
31
31
|
it 'can follow without markdown' do
|
32
32
|
message = Ollama::Message.new(role: 'assistant', content: 'world')
|
33
33
|
response = double(message:, done: false)
|
34
|
-
expect(output).to receive(:
|
35
|
-
|
34
|
+
expect(output).to receive(:print).with(
|
35
|
+
"\e[2J", "\e[1;1H", "📨 \e[1m\e[38;5;111massistant:\e[0m\e[0m", "\n",
|
36
|
+
"world"
|
37
|
+
)
|
36
38
|
follow_chat.call(response)
|
37
39
|
response = double(
|
38
40
|
message: nil,
|