ollama-ruby 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/ollama_chat CHANGED
@@ -5,6 +5,8 @@ include Ollama
5
5
  require 'term/ansicolor'
6
6
  include Term::ANSIColor
7
7
  require 'tins'
8
+ require 'tins/xt/full'
9
+ require 'tins/xt/hash_union'
8
10
  include Tins::GO
9
11
  require 'reline'
10
12
  require 'reverse_markdown'
@@ -23,16 +25,23 @@ class OllamaChatConfig
23
25
  DEFAULT_CONFIG = <<~EOT
24
26
  ---
25
27
  url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
28
+ proxy: null # http://localhost:8080
26
29
  model:
27
30
  name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
28
31
  options:
29
32
  num_ctx: 8192
30
33
  prompts:
31
34
  system: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
35
+ embed: "This source was now embedded: %{source}"
32
36
  summarize: |
33
- Generate an abstract summary of the content in this document:
37
+ Generate an abstract summary of the content in this document using
38
+ %{words} words:
34
39
 
35
- %s
40
+ %{source_content}
41
+ web: |
42
+ Answer the the query %{query} using these sources and summaries:
43
+
44
+ %{results}
36
45
  voice: Samantha
37
46
  markdown: true
38
47
  embedding:
@@ -42,7 +51,7 @@ class OllamaChatConfig
42
51
  options: {}
43
52
  # Retrieval prompt template:
44
53
  prompt: 'Represent this sentence for searching relevant passages: %s'
45
- collection: <%= ENV.fetch('OLLAMA_CHAT_COLLECTION', 'ollama_chat') %>
54
+ collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
46
55
  found_texts_size: 4096
47
56
  found_texts_count: null
48
57
  splitter:
@@ -50,8 +59,12 @@ class OllamaChatConfig
50
59
  chunk_size: 1024
51
60
  cache: Ollama::Documents::Cache::RedisBackedMemoryCache
52
61
  redis:
53
- url: <%= ENV.fetch('REDIS_URL', 'null') %>
62
+ documents:
63
+ url: <%= ENV.fetch('REDIS_URL', 'null') %>
64
+ expiring:
65
+ url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
54
66
  debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
67
+ ssl_no_verify: []
55
68
  EOT
56
69
 
57
70
  def initialize(filename = nil)
@@ -112,8 +125,8 @@ class FollowChat
112
125
  end
113
126
  content = response.message&.content
114
127
  @messages.last.content << content
115
- if @markdown and @messages.last.content.present?
116
- markdown_content = Utils::ANSIMarkdown.parse(@messages.last.content)
128
+ if @markdown and content = @messages.last.content.full?
129
+ markdown_content = Utils::ANSIMarkdown.parse(content)
117
130
  @output.print clear_screen, move_home, @user, ?\n, markdown_content
118
131
  else
119
132
  @output.print content
@@ -217,32 +230,25 @@ def save_conversation(filename, messages)
217
230
  end
218
231
 
219
232
  def message_type(images)
220
- if images.present?
221
- ?📸
222
- else
223
- ?📨
224
- end
233
+ images.present? ? ?📸 : ?📨
225
234
  end
226
235
 
227
- def list_conversation(messages, markdown)
228
- messages.each do |m|
236
+ def list_conversation(messages, last = nil)
237
+ last = (last || messages.size).clamp(0, messages.size)
238
+ messages[-last..-1].to_a.each do |m|
229
239
  role_color = case m.role
230
240
  when 'user' then 172
231
241
  when 'assistant' then 111
232
242
  when 'system' then 213
233
243
  else 210
234
244
  end
235
- content = if markdown && m.content.present?
236
- Utils::ANSIMarkdown.parse(m.content)
237
- else
238
- m.content
239
- end
245
+ content = m.content.full? { $markdown ? Utils::ANSIMarkdown.parse(_1) : _1 }
240
246
  message_text = message_type(m.images) + " "
241
247
  message_text += bold { color(role_color) { m.role } }
242
248
  message_text += ":\n#{content}"
243
- if m.images.present?
244
- message_text += "\nImages: " + italic { m.images.map(&:path) * ', ' }
245
- end
249
+ m.images.full? { |images|
250
+ message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
251
+ }
246
252
  puts message_text
247
253
  end
248
254
  end
@@ -258,37 +264,37 @@ end
258
264
 
259
265
  def parse_rss(source_io)
260
266
  feed = RSS::Parser.parse(source_io, false, false)
261
- title = <<~end
267
+ title = <<~EOT
262
268
  # #{feed&.channel&.title}
263
269
 
264
- end
270
+ EOT
265
271
  feed.items.inject(title) do |text, item|
266
- text << <<~end
272
+ text << <<~EOT
267
273
  ## [#{item&.title}](#{item&.link})
268
274
 
269
275
  updated on #{item&.pubDate}
270
276
 
271
277
  #{reverse_markdown(item&.description)}
272
278
 
273
- end
279
+ EOT
274
280
  end
275
281
  end
276
282
 
277
283
  def parse_atom(source_io)
278
284
  feed = RSS::Parser.parse(source_io, false, false)
279
- title = <<~end
285
+ title = <<~EOT
280
286
  # #{feed.title.content}
281
287
 
282
- end
288
+ EOT
283
289
  feed.items.inject(title) do |text, item|
284
- text << <<~end
290
+ text << <<~EOT
285
291
  ## [#{item&.title&.content}](#{item&.link&.href})
286
292
 
287
293
  updated on #{item&.updated&.content}
288
294
 
289
295
  #{reverse_markdown(item&.content&.content)}
290
296
 
291
- end
297
+ EOT
292
298
  end
293
299
  end
294
300
 
@@ -325,38 +331,51 @@ def parse_source(source_io)
325
331
  reader = PDF::Reader.new(source_io)
326
332
  reader.pages.inject(+'') { |result, page| result << page.text }
327
333
  else
328
- STDERR.puts "Cannot import #{source_io&.content_type} document."
334
+ STDERR.puts "Cannot embed #{source_io&.content_type} document."
329
335
  return
330
336
  end
331
337
  end
332
338
 
333
- def import_document(source_io, source)
339
+ def embed_source(source_io, source)
334
340
  embedding_enabled? or return parse_source(source_io)
335
- puts "Importing #{italic { source_io&.content_type }} document #{source.to_s.inspect}."
341
+ puts "Embedding #{italic { source_io&.content_type }} document #{source.to_s.inspect}."
336
342
  text = parse_source(source_io) or return
337
343
  text.downcase!
338
344
  splitter_config = $config.embedding.splitter
339
- inputs = case splitter_config.name
340
- when 'Character'
341
- Ollama::Documents::Splitters::Character.new(
342
- chunk_size: splitter_config.chunk_size,
343
- ).split(text)
344
- when 'RecursiveCharacter'
345
- Ollama::Documents::Splitters::RecursiveCharacter.new(
346
- chunk_size: splitter_config.chunk_size,
347
- ).split(text)
348
- when 'Semantic'
349
- Ollama::Documents::Splitters::Semantic.new(
350
- ollama:, model: $config.embedding.model.name,
351
- chunk_size: splitter_config.chunk_size,
352
- ).split(
353
- text,
354
- breakpoint: splitter_config.breakpoint.to_sym,
355
- percentage: splitter_config.percentage?,
356
- percentile: splitter_config.percentile?,
357
- )
358
- end
359
- $documents.add(inputs, source: source.to_s)
345
+ inputs = nil
346
+ case splitter_config.name
347
+ when 'Character'
348
+ splitter = Ollama::Documents::Splitters::Character.new(
349
+ chunk_size: splitter_config.chunk_size,
350
+ )
351
+ inputs = splitter.split(text)
352
+ when 'RecursiveCharacter'
353
+ splitter = Ollama::Documents::Splitters::RecursiveCharacter.new(
354
+ chunk_size: splitter_config.chunk_size,
355
+ )
356
+ inputs = splitter.split(text)
357
+ when 'Semantic'
358
+ splitter = Ollama::Documents::Splitters::Semantic.new(
359
+ ollama:, model: $config.embedding.model.name,
360
+ chunk_size: splitter_config.chunk_size,
361
+ )
362
+ inputs = splitter.split(
363
+ text,
364
+ breakpoint: splitter_config.breakpoint.to_sym,
365
+ percentage: splitter_config.percentage?,
366
+ percentile: splitter_config.percentile?,
367
+ )
368
+ inputs = splitter.split(text)
369
+ end
370
+ inputs or return
371
+ source = source.to_s
372
+ if source.start_with?(?!)
373
+ source = Ollama::Utils::Width.truncate(
374
+ source[1..-1].gsub(/\W+/, ?_),
375
+ length: 10
376
+ )
377
+ end
378
+ $documents.add(inputs, source: source)
360
379
  end
361
380
 
362
381
  def add_image(images, source_io, source)
@@ -365,10 +384,27 @@ def add_image(images, source_io, source)
365
384
  (images << image).uniq!
366
385
  end
367
386
 
387
+ def http_options(url)
388
+ options = {}
389
+ if ssl_no_verify = $config.ssl_no_verify?
390
+ hostname = URI.parse(url).hostname
391
+ options |= { ssl_verify_peer: !ssl_no_verify.include?(hostname) }
392
+ end
393
+ if proxy = $config.proxy?
394
+ options |= { proxy: }
395
+ end
396
+ options
397
+ end
398
+
368
399
  def fetch_source(source, &block)
369
400
  case source
401
+ when %r(\A!(.*))
402
+ command = $1
403
+ Utils::Fetcher.execute(command) do |tmp|
404
+ block.(tmp)
405
+ end
370
406
  when %r(\Ahttps?://\S+)
371
- Utils::Fetcher.get(source, debug: $config.debug) do |tmp|
407
+ Utils::Fetcher.get(source, debug: $config.debug, http_options: http_options(source)) do |tmp|
372
408
  block.(tmp)
373
409
  end
374
410
  when %r(\Afile://(?:(?:[.-]|[[:alnum:]])*)(/\S*)|([~.]?/\S*))
@@ -384,17 +420,45 @@ rescue => e
384
420
  STDERR.puts "Cannot add source #{source.to_s.inspect}: #{e}\n#{e.backtrace * ?\n}"
385
421
  end
386
422
 
387
- def summarize(source)
423
+ def import(source)
424
+ puts "Now importing #{source.to_s.inspect}."
425
+ fetch_source(source) do |source_io|
426
+ content = parse_source(source_io)
427
+ content.present? or return
428
+ source_io.rewind
429
+ content
430
+ end
431
+ end
432
+
433
+ def summarize(source, words: nil)
434
+ words = words.to_i
435
+ words < 1 and words = 100
388
436
  puts "Now summarizing #{source.to_s.inspect}."
389
437
  source_content =
390
438
  fetch_source(source) do |source_io|
391
439
  content = parse_source(source_io)
392
440
  content.present? or return
393
441
  source_io.rewind
394
- import_document(source_io, source)
395
442
  content
396
443
  end
397
- $config.prompts.summarize % source_content
444
+ $config.prompts.summarize % { source_content:, words: }
445
+ end
446
+
447
+ def embed(source)
448
+ if embedding_enabled?
449
+ puts "Now embedding #{source.to_s.inspect}."
450
+ fetch_source(source) do |source_io|
451
+ content = parse_source(source_io)
452
+ content.present? or return
453
+ source_io.rewind
454
+ embed_source(source_io, source)
455
+ content
456
+ end
457
+ $config.prompts.embed % { source: }
458
+ else
459
+ puts "Embedding is off, so I will just give a small summary of this source."
460
+ summarize(source)
461
+ end
398
462
  end
399
463
 
400
464
  def parse_content(content, images)
@@ -412,7 +476,7 @@ def parse_content(content, images)
412
476
  when 'image'
413
477
  add_image(images, source_io, source)
414
478
  when 'text', 'application'
415
- import_document(source_io, source)
479
+ embed_source(source_io, source)
416
480
  else
417
481
  STDERR.puts(
418
482
  "Cannot fetch #{source.to_s.inspect} with content type "\
@@ -438,22 +502,28 @@ ensure
438
502
  end
439
503
 
440
504
  def choose_collection(default_collection)
441
- collections = [ default_collection ] + $documents.collections.map(&:to_s)
442
- collections = collections.uniq.sort
443
- $documents.collection = collection =
444
- Ollama::Utils::Chooser.choose(collections) || default_collection
505
+ collections = [ default_collection ] + $documents.collections
506
+ collections = collections.compact.map(&:to_s).uniq.sort
507
+ collections.unshift('[NEW]')
508
+ collection = Ollama::Utils::Chooser.choose(collections) || default_collection
509
+ if collection == '[NEW]'
510
+ print "Enter name of the new collection: "
511
+ collection = STDIN.gets.chomp
512
+ end
513
+ $documents.collection = collection
445
514
  ensure
446
515
  puts "Changing to collection #{bold{collection}}."
447
516
  collection_stats
448
517
  end
449
518
 
450
519
  def collection_stats
451
- puts <<~end
520
+ puts <<~EOT
452
521
  Collection
453
522
  Name: #{bold{$documents.collection}}
523
+ Embedding model: #{bold{$embedding_model}}
454
524
  #Embeddings: #{$documents.size}
455
525
  Tags: #{$documents.tags}
456
- end
526
+ EOT
457
527
  end
458
528
 
459
529
  def configure_cache
@@ -467,46 +537,78 @@ rescue => e
467
537
  Ollama::Documents::MemoryCache
468
538
  end
469
539
 
470
- def set_markdown(value)
471
- if value
540
+ def toggle_markdown
541
+ $markdown = !$markdown
542
+ show_markdown
543
+ end
544
+
545
+ def show_markdown
546
+ if $markdown
472
547
  puts "Using ANSI markdown to output content."
473
- true
474
548
  else
475
549
  puts "Using plaintext for outputting content."
476
- false
477
550
  end
551
+ $markdown
478
552
  end
479
553
 
480
- def clear_messages(messages)
481
- messages.delete_if { _1.role != 'system' }
554
+ def set_embedding(embedding)
555
+ $embedding_enabled = embedding
556
+ show_embedding
557
+ end
558
+
559
+ def show_embedding
560
+ puts "Embedding is #{embedding_enabled? ? "on" : "off"}."
561
+ $embedding_enabled
482
562
  end
483
563
 
484
564
  def embedding_enabled?
485
- $config.embedding.enabled && !$opts[?E]
565
+ $embedding_enabled && !$embedding_paused
486
566
  end
487
567
 
488
- def display_chat_help
489
- puts <<~end
490
- /paste to paste content
491
- /markdown toggle markdown output
492
- /list list the messages of the conversation
493
- /clear clear the conversation messages
494
- /clobber clear conversation messages and collection
495
- /pop [n] pop the last n exchanges, defaults to 1
496
- /model change the model
497
- /regenerate the last answer message
498
- /collection clear [tag]|stats|change|new clear or show stats of current collection
499
- /summarize source summarize the URL/file source's content
500
- /web [n] query query web search & return n or 1 results
501
- /save filename store conversation messages
502
- /load filename load conversation messages
503
- /quit to quit
504
- /help to view this help
568
+ def toggle_embedding_paused
569
+ $embedding_paused = !$embedding_paused
570
+ show_embedding
571
+ end
572
+
573
+ def info
574
+ puts "Current model is #{bold{$model}}."
575
+ collection_stats
576
+ if show_embedding
577
+ puts "Text splitter is #{bold{$config.embedding.splitter.name}}."
505
578
  end
579
+ puts "Documents database cache is #{$documents.nil? ? 'n/a' : $documents.cache.class}"
580
+ show_markdown
581
+ end
582
+
583
+ def clear_messages(messages)
584
+ messages.delete_if { _1.role != 'system' }
585
+ end
586
+
587
+ def display_chat_help
588
+ puts <<~EOT
589
+ /paste to paste content
590
+ /markdown toggle markdown output
591
+ /list [n] list the last n / all conversation exchanges
592
+ /clear clear the whole conversation
593
+ /clobber clear the conversation and collection
594
+ /pop [n] pop the last n exchanges, defaults to 1
595
+ /model change the model
596
+ /regenerate the last answer message
597
+ /collection clear [tag]|change clear or show stats of current collection
598
+ /import source import the source's content
599
+ /summarize [n] source summarize the source's content in n words
600
+ /embedding toggle embedding paused or not
601
+ /embed source embed the source's content
602
+ /web [n] query query web search & return n or 1 results
603
+ /save filename store conversation messages
604
+ /load filename load conversation messages
605
+ /quit to quit
606
+ /help to view this help
607
+ EOT
506
608
  end
507
609
 
508
610
  def usage
509
- puts <<~end
611
+ puts <<~EOT
510
612
  #{File.basename($0)} [OPTIONS]
511
613
 
512
614
  -f CONFIG config file to read
@@ -521,7 +623,7 @@ def usage
521
623
  -v use voice output
522
624
  -h this help
523
625
 
524
- end
626
+ EOT
525
627
  exit 0
526
628
  end
527
629
 
@@ -541,23 +643,29 @@ puts "Configuration read from #{config.filename.inspect} is:", $config
541
643
  base_url = $opts[?u] || $config.url
542
644
  $ollama = Client.new(base_url:, debug: $config.debug)
543
645
 
544
- model = choose_model($opts[?m], $config.model.name)
646
+ $model = choose_model($opts[?m], $config.model.name)
545
647
  options = Options[$config.model.options]
546
- model_system = pull_model_unless_present(model, options)
648
+ model_system = pull_model_unless_present($model, options)
547
649
  messages = []
650
+ set_embedding($config.embedding.enabled && !$opts[?E])
651
+
652
+ if voice = ($config.voice if $opts[?v])
653
+ puts "Using voice #{bold{voice}} to speak."
654
+ end
655
+ $markdown = $config.markdown
548
656
 
549
657
  if embedding_enabled?
550
- embedding_model = $config.embedding.model.name
658
+ $embedding_model = $config.embedding.model.name
551
659
  embedding_model_options = Options[$config.embedding.model.options]
552
- pull_model_unless_present(embedding_model, embedding_model_options)
660
+ pull_model_unless_present($embedding_model, embedding_model_options)
553
661
  collection = $opts[?C] || $config.embedding.collection
554
662
  $documents = Documents.new(
555
663
  ollama:,
556
- model: $config.embedding.model.name,
664
+ model: $embedding_model,
557
665
  model_options: $config.embedding.model.options,
558
666
  collection:,
559
667
  cache: configure_cache,
560
- redis_url: $config.redis.url?,
668
+ redis_url: $config.redis.documents.url?,
561
669
  )
562
670
 
563
671
  document_list = $opts[?D].to_a
@@ -578,31 +686,26 @@ if embedding_enabled?
578
686
  document_list.each_slice(25) do |docs|
579
687
  docs.each do |doc|
580
688
  fetch_source(doc) do |doc_io|
581
- import_document(doc_io, doc)
689
+ embed_source(doc_io, doc)
582
690
  end
583
691
  end
584
692
  end
585
693
  end
586
694
  collection_stats
587
695
  else
588
- $documents = Documents.new(ollama:, model:)
696
+ $documents = Tins::NULL
589
697
  end
590
698
 
591
- if voice = ($config.voice if $opts[?v])
592
- puts "Using voice #{bold{voice}} to speak."
593
- end
594
- markdown = set_markdown($config.markdown)
595
-
596
699
  if $opts[?c]
597
700
  messages.concat load_conversation($opts[?c])
598
701
  else
599
702
  if system = Ollama::Utils::FileArgument.
600
703
  get_file_argument($opts[?s], default: $config.prompts.system? || model_system)
601
704
  messages << Message.new(role: 'system', content: system)
602
- puts <<~end
705
+ puts <<~EOT
603
706
  Configured system prompt is:
604
707
  #{italic{Ollama::Utils::Width.wrap(system, percentage: 90)}}
605
- end
708
+ EOT
606
709
  end
607
710
  end
608
711
 
@@ -618,14 +721,14 @@ loop do
618
721
  when %r(^/paste$)
619
722
  puts bold { "Paste your content and then press C-d!" }
620
723
  content = STDIN.read
621
- when %r(^/quit$)
622
- puts "Goodbye."
623
- exit 0
624
724
  when %r(^/markdown$)
625
- markdown = set_markdown(!markdown)
725
+ $markdown = toggle_markdown
626
726
  next
627
- when %r(^/list$)
628
- list_conversation(messages, markdown)
727
+ when %r(^/list(?:\s+(\d*))?$)
728
+ last = if $1
729
+ 2 * $1.to_i
730
+ end
731
+ list_conversation(messages, last)
629
732
  next
630
733
  when %r(^/clear$)
631
734
  clear_messages(messages)
@@ -636,7 +739,7 @@ loop do
636
739
  $documents.clear
637
740
  puts "Cleared messages and collection."
638
741
  next
639
- when %r(^/collection\s+(clear|stats|change|new)(?:\s+(.+))?$)
742
+ when %r(^/collection\s+(clear|change)(?:\s+(.+))?$)
640
743
  command, arg = $1, $2
641
744
  case command
642
745
  when 'clear'
@@ -648,24 +751,26 @@ loop do
648
751
  $documents.clear
649
752
  puts "Cleared collection #{bold{collection}}."
650
753
  end
651
- when 'stats'
652
- collection_stats
653
754
  when 'change'
654
755
  choose_collection(collection)
655
- when 'new'
656
- print "Enter name of the new collection: "
657
- $documents.collection = collection = STDIN.gets.chomp
658
- collection_stats
659
756
  end
660
757
  next
661
- when %r(^/pop?(?:\s+(\d*))?$)
662
- n = $1.to_i.clamp(1, Float::INFINITY)
663
- r = messages.pop(2 * n)
664
- m = r.size / 2
665
- puts "Popped the last #{m} exchanges."
758
+ when %r(/info)
759
+ info
760
+ next
761
+ when %r(^/pop(?:\s+(\d*))?$)
762
+ if messages.size > 1
763
+ n = $1.to_i.clamp(1, Float::INFINITY)
764
+ r = messages.pop(2 * n)
765
+ m = r.size / 2
766
+ puts "Popped the last #{m} exchanges."
767
+ else
768
+ puts "No more exchanges you can pop."
769
+ end
770
+ list_conversation(messages, 2)
666
771
  next
667
772
  when %r(^/model$)
668
- model = choose_model('', model)
773
+ $model = choose_model('', $model)
669
774
  next
670
775
  when %r(^/regenerate$)
671
776
  if content = messages[-2]&.content
@@ -677,23 +782,29 @@ loop do
677
782
  end
678
783
  parse_content = false
679
784
  content
680
- when %r(^/summarize\s+(.+))
785
+ when %r(^/import\s+(.+))
786
+ parse_content = false
787
+ content = import($1) or next
788
+ when %r(^/summarize\s+(?:(\d+)\s+)?(.+))
681
789
  parse_content = false
682
- content = summarize($1) or next
790
+ content = summarize($2, words: $1) or next
791
+ when %r(^/embedding$)
792
+ toggle_embedding_paused
793
+ next
794
+ when %r(^/embed\s+(.+))
795
+ parse_content = false
796
+ content = embed($1) or next
683
797
  when %r(^/web\s+(?:(\d+)\s+)?(.+))
684
798
  parse_content = false
685
799
  urls = search_web($2, $1.to_i)
686
800
  urls.each do |url|
687
- fetch_source(url) do |url_io|
688
- import_document(url_io, url)
689
- end
801
+ fetch_source(url) { |url_io| embed_source(url_io, url) }
690
802
  end
691
803
  urls_summarized = urls.map { summarize(_1) }
692
- content = <<~end
693
- Answer the the query #{$2.inspect} using these sources and summaries:
694
-
695
- #{urls.zip(urls_summarized).map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"}
696
- end
804
+ query = $2.inspect
805
+ results = urls.zip(urls_summarized).
806
+ map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
807
+ content = $config.prompts.web % { query:, results: }
697
808
  when %r(^/save\s+(.+)$)
698
809
  save_conversation($1, messages)
699
810
  puts "Saved conversation to #$1."
@@ -702,6 +813,9 @@ loop do
702
813
  messages = load_conversation($1)
703
814
  puts "Loaded conversation from #$1."
704
815
  next
816
+ when %r(^/quit$)
817
+ puts "Goodbye."
818
+ exit 0
705
819
  when %r(^/)
706
820
  display_chat_help
707
821
  next
@@ -734,8 +848,8 @@ loop do
734
848
  end
735
849
 
736
850
  messages << Message.new(role: 'user', content:, images:)
737
- handler = FollowChat.new(messages:, markdown:, voice:)
738
- ollama.chat(model:, messages:, options:, stream: true, &handler)
851
+ handler = FollowChat.new(messages:, markdown: $markdown, voice:)
852
+ ollama.chat(model: $model, messages:, options:, stream: true, &handler)
739
853
 
740
854
  if embedding_enabled? && !records.empty?
741
855
  puts "", records.map { |record|