ollama-ruby 0.5.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/ollama_chat CHANGED
@@ -4,9 +4,6 @@ require 'ollama'
4
4
  include Ollama
5
5
  require 'term/ansicolor'
6
6
  include Term::ANSIColor
7
- require 'tins'
8
- require 'tins/xt/full'
9
- require 'tins/xt/hash_union'
10
7
  include Tins::GO
11
8
  require 'reline'
12
9
  require 'reverse_markdown'
@@ -17,6 +14,7 @@ require 'nokogiri'
17
14
  require 'rss'
18
15
  require 'pdf/reader'
19
16
  require 'csv'
17
+ require 'xdg'
20
18
 
21
19
  class OllamaChatConfig
22
20
  include ComplexConfig
@@ -30,8 +28,12 @@ class OllamaChatConfig
30
28
  name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
31
29
  options:
32
30
  num_ctx: 8192
31
+ location:
32
+ enabled: false
33
+ name: Berlin
34
+ decimal_degrees: [ 52.514127, 13.475211 ]
35
+ units: SI (International System of Units) # or USCS (United States Customary System)
33
36
  prompts:
34
- system: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
35
37
  embed: "This source was now embedded: %{source}"
36
38
  summarize: |
37
39
  Generate an abstract summary of the content in this document using
@@ -42,8 +44,14 @@ class OllamaChatConfig
42
44
  Answer the the query %{query} using these sources and summaries:
43
45
 
44
46
  %{results}
45
- voice: Samantha
47
+ system_prompts:
48
+ default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
49
+ voice:
50
+ enabled: false
51
+ default: Samantha
52
+ list: <%= `say -v ?`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
46
53
  markdown: true
54
+ stream: true
47
55
  embedding:
48
56
  enabled: true
49
57
  model:
@@ -57,14 +65,16 @@ class OllamaChatConfig
57
65
  splitter:
58
66
  name: RecursiveCharacter
59
67
  chunk_size: 1024
60
- cache: Ollama::Documents::Cache::RedisBackedMemoryCache
68
+ cache: Ollama::Documents::RedisBackedMemoryCache
61
69
  redis:
62
70
  documents:
63
71
  url: <%= ENV.fetch('REDIS_URL', 'null') %>
64
72
  expiring:
65
73
  url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
74
+ ex: 86400
66
75
  debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
67
76
  ssl_no_verify: []
77
+ copy: pbcopy
68
78
  EOT
69
79
 
70
80
  def initialize(filename = nil)
@@ -87,17 +97,11 @@ class OllamaChatConfig
87
97
  attr_reader :config
88
98
 
89
99
  def default_path
90
- File.join(config_dir_path, 'config.yml')
100
+ config_dir_path + 'config.yml'
91
101
  end
92
102
 
93
103
  def config_dir_path
94
- File.join(
95
- ENV.fetch(
96
- 'XDG_CONFIG_HOME',
97
- File.join(ENV.fetch('HOME'), '.config')
98
- ),
99
- 'ollama_chat'
100
- )
104
+ XDG.new.config_home + 'ollama_chat'
101
105
  end
102
106
  end
103
107
 
@@ -158,12 +162,125 @@ class FollowChat
158
162
  end
159
163
  end
160
164
 
165
+ module CheckSwitch
166
+ extend Tins::Concern
167
+
168
+ included do
169
+ alias_method :on?, :value
170
+ end
171
+
172
+ def off?
173
+ !on?
174
+ end
175
+
176
+ def show
177
+ puts @msg[value]
178
+ end
179
+ end
180
+
181
+ class Switch
182
+ def initialize(name, msg:, config: $config)
183
+ @value = [ false, true ].include?(config) ? config : !!config.send("#{name}?")
184
+ @msg = msg
185
+ end
186
+
187
+ attr_reader :value
188
+
189
+ def set(value, show: false)
190
+ @value = !!value
191
+ show && self.show
192
+ end
193
+
194
+ def toggle(show: true)
195
+ @value = !@value
196
+ show && self.show
197
+ end
198
+
199
+ include CheckSwitch
200
+ end
201
+
202
+ class CombinedSwitch
203
+ def initialize(value:, msg:)
204
+ @value = value
205
+ @msg = msg
206
+ end
207
+
208
+ def value
209
+ @value.()
210
+ end
211
+
212
+ include CheckSwitch
213
+ end
214
+
215
+ def setup_switches
216
+ $markdown = Switch.new(
217
+ :markdown,
218
+ msg: {
219
+ true => "Using #{italic{'ANSI'}} markdown to output content.",
220
+ false => "Using plaintext for outputting content.",
221
+ }
222
+ )
223
+
224
+ $stream = Switch.new(
225
+ :stream,
226
+ msg: {
227
+ true => "Streaming enabled.",
228
+ false => "Streaming disabled.",
229
+ }
230
+ )
231
+
232
+ $voice = Switch.new(
233
+ :stream,
234
+ msg: {
235
+ true => "Voice output enabled.",
236
+ false => "Voice output disabled.",
237
+ },
238
+ config: $config.voice
239
+ )
240
+
241
+ $embedding_enabled = Switch.new(
242
+ :embedding_enabled,
243
+ msg: {
244
+ true => "Embedding enabled.",
245
+ false => "Embedding disabled.",
246
+ }
247
+ )
248
+
249
+ $embedding_paused = Switch.new(
250
+ :embedding_paused,
251
+ msg: {
252
+ true => "Embedding paused.",
253
+ false => "Embedding resumed.",
254
+ }
255
+ )
256
+
257
+ $embedding = CombinedSwitch.new(
258
+ value: -> { $embedding_enabled.on? && $embedding_paused.off? },
259
+ msg: {
260
+ true => "Embedding is currently performed.",
261
+ false => "Embedding is currently not performed.",
262
+ }
263
+ )
264
+
265
+ $location = Switch.new(
266
+ :location,
267
+ msg: {
268
+ true => "Location and localtime enabled.",
269
+ false => "Location and localtime disabled.",
270
+ },
271
+ config: $config.location.enabled
272
+ )
273
+ end
274
+
161
275
  def search_web(query, n = nil)
276
+ if l = at_location
277
+ query += " #{at_location}"
278
+ end
162
279
  n = n.to_i
163
280
  n < 1 and n = 1
164
281
  query = URI.encode_uri_component(query)
165
282
  url = "https://www.duckduckgo.com/html/?q=#{query}"
166
- Ollama::Utils::Fetcher.new(debug: $config.debug).get(url) do |tmp|
283
+ Ollama::Utils::Fetcher.get(url, debug: $config.debug) do |tmp|
167
284
  result = []
168
285
  doc = Nokogiri::HTML(tmp)
169
286
  doc.css('.results_links').each do |link|
@@ -196,7 +313,7 @@ def pull_model_unless_present(model, options, retried = false)
196
313
  end
197
314
  }
198
315
  rescue Errors::NotFoundError
199
- puts "Model #{bold{model}} not found, attempting to pull it now…"
316
+ puts "Model #{bold{model}} not found locally, attempting to pull it from remote now…"
200
317
  ollama.pull(name: model)
201
318
  if retried
202
319
  exit 1
@@ -205,7 +322,7 @@ rescue Errors::NotFoundError
205
322
  retry
206
323
  end
207
324
  rescue Errors::Error => e
208
- warn "Caught #{e.class}: #{e} => Exiting."
325
+ warn "Caught #{e.class} while pulling model: #{e} => Exiting."
209
326
  exit 1
210
327
  end
211
328
 
@@ -242,7 +359,7 @@ def list_conversation(messages, last = nil)
242
359
  when 'system' then 213
243
360
  else 210
244
361
  end
245
- content = m.content.full? { $markdown ? Utils::ANSIMarkdown.parse(_1) : _1 }
362
+ content = m.content.full? { $markdown.on? ? Utils::ANSIMarkdown.parse(_1) : _1 }
246
363
  message_text = message_type(m.images) + " "
247
364
  message_text += bold { color(role_color) { m.role } }
248
365
  message_text += ":\n#{content}"
@@ -298,6 +415,32 @@ def parse_atom(source_io)
298
415
  end
299
416
  end
300
417
 
418
+ def pdf_read(io)
419
+ reader = PDF::Reader.new(io)
420
+ reader.pages.inject(+'') { |result, page| result << page.text }
421
+ end
422
+
423
+ def ps_read(io)
424
+ gs = `which gs`.chomp
425
+ if gs.present?
426
+ Tempfile.create do |tmp|
427
+ IO.popen("#{gs} -q -sDEVICE=pdfwrite -sOutputFile=#{tmp.path} -", 'wb') do |gs_io|
428
+ until io.eof?
429
+ buffer = io.read(1 << 17)
430
+ IO.select(nil, [ gs_io ], nil)
431
+ gs_io.write buffer
432
+ end
433
+ gs_io.close
434
+ File.open(tmp.path, 'rb') do |pdf|
435
+ pdf_read(pdf)
436
+ end
437
+ end
438
+ end
439
+ else
440
+ STDERR.puts "Cannot convert #{io&.content_type} whith ghostscript, gs not in path."
441
+ end
442
+ end
443
+
301
444
  def parse_source(source_io)
302
445
  case source_io&.content_type
303
446
  when 'text/html'
@@ -319,17 +462,16 @@ def parse_source(source_io)
319
462
  result << "\n\n"
320
463
  end
321
464
  result
322
- when %r(\Atext/)
323
- source_io.read
324
465
  when 'application/rss+xml'
325
466
  parse_rss(source_io)
326
467
  when 'application/atom+xml'
327
468
  parse_atom(source_io)
328
- when 'application/json'
329
- source_io.read
469
+ when 'application/postscript'
470
+ ps_read(source_io)
330
471
  when 'application/pdf'
331
- reader = PDF::Reader.new(source_io)
332
- reader.pages.inject(+'') { |result, page| result << page.text }
472
+ pdf_read(source_io)
473
+ when %r(\Aapplication/(json|ld\+json|x-ruby|x-perl|x-gawk|x-python|x-javascript|x-c?sh|x-dosexec|x-shellscript|x-tex|x-latex|x-lyx|x-bibtex)), %r(\Atext/), nil
474
+ source_io.read
333
475
  else
334
476
  STDERR.puts "Cannot embed #{source_io&.content_type} document."
335
477
  return
@@ -337,7 +479,7 @@ def parse_source(source_io)
337
479
  end
338
480
 
339
481
  def embed_source(source_io, source)
340
- embedding_enabled? or return parse_source(source_io)
482
+ $embedding.on? or return parse_source(source_io)
341
483
  puts "Embedding #{italic { source_io&.content_type }} document #{source.to_s.inspect}."
342
484
  text = parse_source(source_io) or return
343
485
  text.downcase!
@@ -375,7 +517,7 @@ def embed_source(source_io, source)
375
517
  length: 10
376
518
  )
377
519
  end
378
- $documents.add(inputs, source: source)
520
+ $documents.add(inputs, source:)
379
521
  end
380
522
 
381
523
  def add_image(images, source_io, source)
@@ -404,7 +546,12 @@ def fetch_source(source, &block)
404
546
  block.(tmp)
405
547
  end
406
548
  when %r(\Ahttps?://\S+)
407
- Utils::Fetcher.get(source, debug: $config.debug, http_options: http_options(source)) do |tmp|
549
+ Utils::Fetcher.get(
550
+ source,
551
+ cache: $cache,
552
+ debug: $config.debug,
553
+ http_options: http_options(source)
554
+ ) do |tmp|
408
555
  block.(tmp)
409
556
  end
410
557
  when %r(\Afile://(?:(?:[.-]|[[:alnum:]])*)(/\S*)|([~.]?/\S*))
@@ -417,7 +564,7 @@ def fetch_source(source, &block)
417
564
  raise "invalid source"
418
565
  end
419
566
  rescue => e
420
- STDERR.puts "Cannot add source #{source.to_s.inspect}: #{e}\n#{e.backtrace * ?\n}"
567
+ STDERR.puts "Cannot fetch source #{source.to_s.inspect}: #{e}\n#{e.backtrace * ?\n}"
421
568
  end
422
569
 
423
570
  def import(source)
@@ -445,7 +592,7 @@ def summarize(source, words: nil)
445
592
  end
446
593
 
447
594
  def embed(source)
448
- if embedding_enabled?
595
+ if $embedding.on?
449
596
  puts "Now embedding #{source.to_s.inspect}."
450
597
  fetch_source(source) do |source_io|
451
598
  content = parse_source(source_io)
@@ -468,7 +615,7 @@ def parse_content(content, images)
468
615
  content.scan(%r([.~]?/\S+|https?://\S+|#\S+)).each do |source|
469
616
  case source
470
617
  when /\A#(\S+)/
471
- tags << $1
618
+ tags.add($1, source:)
472
619
  else
473
620
  source = source.sub(/(["')]|\*+)\z/, '')
474
621
  fetch_source(source) do |source_io|
@@ -490,29 +637,37 @@ def parse_content(content, images)
490
637
  return content, (tags unless tags.empty?)
491
638
  end
492
639
 
493
- def choose_model(cli_model, default_model)
640
+ def choose_model(cli_model, current_model)
494
641
  models = ollama.tags.models.map(&:name).sort
495
642
  model = if cli_model == ''
496
- Ollama::Utils::Chooser.choose(models) || default_model
643
+ Ollama::Utils::Chooser.choose(models) || current_model
497
644
  else
498
- cli_model || default_model
645
+ cli_model || current_model
499
646
  end
500
647
  ensure
501
648
  puts green { "Connecting to #{model}@#{ollama.base_url} now…" }
502
649
  end
503
650
 
504
- def choose_collection(default_collection)
505
- collections = [ default_collection ] + $documents.collections
651
+ def ask?(prompt:)
652
+ print prompt
653
+ STDIN.gets.chomp
654
+ end
655
+
656
+ def choose_collection(current_collection)
657
+ collections = [ current_collection ] + $documents.collections
506
658
  collections = collections.compact.map(&:to_s).uniq.sort
507
- collections.unshift('[NEW]')
508
- collection = Ollama::Utils::Chooser.choose(collections) || default_collection
509
- if collection == '[NEW]'
510
- print "Enter name of the new collection: "
511
- collection = STDIN.gets.chomp
659
+ collections.unshift('[EXIT]').unshift('[NEW]')
660
+ collection = Ollama::Utils::Chooser.choose(collections) || current_collection
661
+ case collection
662
+ when '[NEW]'
663
+ $documents.collection = ask?(prompt: "Enter name of the new collection: ")
664
+ when nil, '[EXIT]'
665
+ puts "Exiting chooser."
666
+ when /./
667
+ $documents.collection = collection
512
668
  end
513
- $documents.collection = collection
514
669
  ensure
515
- puts "Changing to collection #{bold{collection}}."
670
+ puts "Using collection #{bold{$documents.collection}}."
516
671
  collection_stats
517
672
  end
518
673
 
@@ -537,64 +692,101 @@ rescue => e
537
692
  Ollama::Documents::MemoryCache
538
693
  end
539
694
 
540
- def toggle_markdown
541
- $markdown = !$markdown
542
- show_markdown
543
- end
544
-
545
- def show_markdown
546
- if $markdown
547
- puts "Using ANSI markdown to output content."
548
- else
549
- puts "Using plaintext for outputting content."
550
- end
551
- $markdown
695
+ def show_system_prompt
696
+ puts <<~EOT
697
+ Configured system prompt is:
698
+ #{Ollama::Utils::ANSIMarkdown.parse($system.to_s).gsub(/\n+\z/, '').full? || 'n/a'}
699
+ EOT
552
700
  end
553
701
 
554
- def set_embedding(embedding)
555
- $embedding_enabled = embedding
556
- show_embedding
702
+ def at_location
703
+ if $location.on?
704
+ location_name = $config.location.name
705
+ location_decimal_degrees = $config.location.decimal_degrees * ', '
706
+ localtime = Time.now.iso8601
707
+ units = $config.location.units
708
+ $config.prompts.location % {
709
+ location_name:, location_decimal_degrees:, localtime:, units:,
710
+ }
711
+ end.to_s
557
712
  end
558
713
 
559
- def show_embedding
560
- puts "Embedding is #{embedding_enabled? ? "on" : "off"}."
561
- $embedding_enabled
714
+ def set_system_prompt(messages, system)
715
+ $system = system
716
+ messages.clear
717
+ messages << Message.new(role: 'system', content: system)
562
718
  end
563
719
 
564
- def embedding_enabled?
565
- $embedding_enabled && !$embedding_paused
720
+ def change_system_prompt(messages, default)
721
+ prompts = $config.system_prompts.attribute_names.compact
722
+ chosen = Ollama::Utils::Chooser.choose(prompts)
723
+ system = if chosen
724
+ $config.system_prompts.send(chosen)
725
+ else
726
+ default
727
+ end
728
+ set_system_prompt(messages, system)
566
729
  end
567
730
 
568
- def toggle_embedding_paused
569
- $embedding_paused = !$embedding_paused
570
- show_embedding
731
+ def change_voice
732
+ chosen = Ollama::Utils::Chooser.choose($config.voice.list)
733
+ $current_voice = chosen.full? || $config.voice.default
571
734
  end
572
735
 
573
736
  def info
574
737
  puts "Current model is #{bold{$model}}."
575
738
  collection_stats
576
- if show_embedding
739
+ $embedding.show
740
+ if $embedding.on?
577
741
  puts "Text splitter is #{bold{$config.embedding.splitter.name}}."
578
742
  end
579
- puts "Documents database cache is #{$documents.nil? ? 'n/a' : $documents.cache.class}"
580
- show_markdown
743
+ puts "Documents database cache is #{$documents.nil? ? 'n/a' : bold{$documents.cache.class}}"
744
+ $markdown.show
745
+ $stream.show
746
+ $location.show
747
+ if $voice.on?
748
+ puts "Using voice #{bold{$current_voice}} to speak."
749
+ end
750
+ show_system_prompt
581
751
  end
582
752
 
583
753
  def clear_messages(messages)
584
754
  messages.delete_if { _1.role != 'system' }
585
755
  end
586
756
 
757
+ def copy_to_clipboard(messages)
758
+ if message = messages.last and message.role == 'assistant'
759
+ copy = `which #{$config.copy}`.chomp
760
+ if copy.present?
761
+ IO.popen(copy, 'w') do |clipboard|
762
+ clipboard.write(message.content)
763
+ end
764
+ STDOUT.puts "The last response has been copied to the system clipboard."
765
+ else
766
+ STDERR.puts "#{$config.copy.inspect} command not found in system's path!"
767
+ end
768
+ else
769
+ STDERR.puts "No response available to copy to the system clipboard."
770
+ end
771
+ end
772
+
587
773
  def display_chat_help
588
774
  puts <<~EOT
775
+ /copy to copy last response to clipboard
589
776
  /paste to paste content
590
777
  /markdown toggle markdown output
778
+ /stream toggle stream output
779
+ /location toggle location submission
780
+ /voice( change) toggle voice output or change the voice
591
781
  /list [n] list the last n / all conversation exchanges
592
782
  /clear clear the whole conversation
593
783
  /clobber clear the conversation and collection
594
784
  /pop [n] pop the last n exchanges, defaults to 1
595
785
  /model change the model
786
+ /system change system prompt (clears conversation)
596
787
  /regenerate the last answer message
597
- /collection clear [tag]|change clear or show stats of current collection
788
+ /collection( clear|change) change (default) collection or clear
789
+ /info show information for current session
598
790
  /import source import the source's content
599
791
  /summarize [n] source summarize the source's content in n words
600
792
  /embedding toggle embedding paused or not
@@ -609,7 +801,7 @@ end
609
801
 
610
802
  def usage
611
803
  puts <<~EOT
612
- #{File.basename($0)} [OPTIONS]
804
+ Usage: #{File.basename($0)} [OPTIONS]
613
805
 
614
806
  -f CONFIG config file to read
615
807
  -u URL the ollama base url, OLLAMA_URL
@@ -620,25 +812,31 @@ def usage
620
812
  -D DOCUMENT load document and add to embeddings collection (multiple)
621
813
  -M use (empty) MemoryCache for this chat session
622
814
  -E disable embeddings for this chat session
623
- -v use voice output
815
+ -V display the current version number and quit
624
816
  -h this help
625
817
 
626
818
  EOT
627
819
  exit 0
628
820
  end
629
821
 
822
+ def version
823
+ puts "%s %s" % [ File.basename($0), Ollama::VERSION ]
824
+ exit 0
825
+ end
826
+
630
827
  def ollama
631
828
  $ollama
632
829
  end
633
830
 
634
- $opts = go 'f:u:m:s:c:C:D:MEvh'
831
+ $opts = go 'f:u:m:s:c:C:D:MEVh'
635
832
 
636
833
  config = OllamaChatConfig.new($opts[?f])
637
834
  $config = config.config
638
835
 
639
- $opts[?h] and usage
836
+ setup_switches
640
837
 
641
- puts "Configuration read from #{config.filename.inspect} is:", $config
838
+ $opts[?h] and usage
839
+ $opts[?V] and version
642
840
 
643
841
  base_url = $opts[?u] || $config.url
644
842
  $ollama = Client.new(base_url:, debug: $config.debug)
@@ -647,14 +845,21 @@ $model = choose_model($opts[?m], $config.model.name)
647
845
  options = Options[$config.model.options]
648
846
  model_system = pull_model_unless_present($model, options)
649
847
  messages = []
650
- set_embedding($config.embedding.enabled && !$opts[?E])
848
+ $embedding_enabled.set($config.embedding.enabled && !$opts[?E])
651
849
 
652
- if voice = ($config.voice if $opts[?v])
653
- puts "Using voice #{bold{voice}} to speak."
850
+ if $opts[?c]
851
+ messages.concat load_conversation($opts[?c])
852
+ else
853
+ default = $config.system_prompts.default? || model_system
854
+ if $opts[?s] == ??
855
+ change_system_prompt(messages, default)
856
+ else
857
+ system = Ollama::Utils::FileArgument.get_file_argument($opts[?s], default:)
858
+ system.present? and set_system_prompt(messages, system)
859
+ end
654
860
  end
655
- $markdown = $config.markdown
656
861
 
657
- if embedding_enabled?
862
+ if $embedding.on?
658
863
  $embedding_model = $config.embedding.model.name
659
864
  embedding_model_options = Options[$config.embedding.model.options]
660
865
  pull_model_unless_present($embedding_model, embedding_model_options)
@@ -666,6 +871,7 @@ if embedding_enabled?
666
871
  collection:,
667
872
  cache: configure_cache,
668
873
  redis_url: $config.redis.documents.url?,
874
+ debug: ENV['DEBUG'].to_i == 1,
669
875
  )
670
876
 
671
877
  document_list = $opts[?D].to_a
@@ -691,24 +897,22 @@ if embedding_enabled?
691
897
  end
692
898
  end
693
899
  end
694
- collection_stats
695
900
  else
696
901
  $documents = Tins::NULL
697
902
  end
698
903
 
699
- if $opts[?c]
700
- messages.concat load_conversation($opts[?c])
701
- else
702
- if system = Ollama::Utils::FileArgument.
703
- get_file_argument($opts[?s], default: $config.prompts.system? || model_system)
704
- messages << Message.new(role: 'system', content: system)
705
- puts <<~EOT
706
- Configured system prompt is:
707
- #{italic{Ollama::Utils::Width.wrap(system, percentage: 90)}}
708
- EOT
709
- end
904
+ if redis_expiring_url = $config.redis.expiring.url?
905
+ $cache = Ollama::Documents::RedisCache.new(
906
+ prefix: 'Expiring-',
907
+ url: redis_expiring_url,
908
+ ex: $config.redis.expiring.ex,
909
+ )
710
910
  end
711
911
 
912
+ $current_voice = $config.voice.default
913
+
914
+ puts "Configuration read from #{config.filename.inspect} is:", $config
915
+ info
712
916
  puts "\nType /help to display the chat help."
713
917
 
714
918
  images = []
@@ -718,11 +922,27 @@ loop do
718
922
  content = Reline.readline(input_prompt, true)&.chomp
719
923
 
720
924
  case content
925
+ when %r(^/copy$)
926
+ copy_to_clipboard(messages)
927
+ next
721
928
  when %r(^/paste$)
722
929
  puts bold { "Paste your content and then press C-d!" }
723
930
  content = STDIN.read
724
931
  when %r(^/markdown$)
725
- $markdown = toggle_markdown
932
+ $markdown.toggle
933
+ next
934
+ when %r(^/stream$)
935
+ $stream.toggle
936
+ next
937
+ when %r(^/location$)
938
+ $location.toggle
939
+ next
940
+ when %r(^/voice(?:\s+(change))?$)
941
+ if $1 == 'change'
942
+ change_voice
943
+ else
944
+ $voice.toggle
945
+ end
726
946
  next
727
947
  when %r(^/list(?:\s+(\d*))?$)
728
948
  last = if $1
@@ -735,29 +955,14 @@ loop do
735
955
  puts "Cleared messages."
736
956
  next
737
957
  when %r(^/clobber$)
738
- clear_messages(messages)
739
- $documents.clear
740
- puts "Cleared messages and collection."
741
- next
742
- when %r(^/collection\s+(clear|change)(?:\s+(.+))?$)
743
- command, arg = $1, $2
744
- case command
745
- when 'clear'
746
- tags = arg.present? ? arg.sub(/\A#*/, '') : nil
747
- if tags
748
- $documents.clear(tags:)
749
- puts "Cleared tag ##{tags} from collection #{bold{collection}}."
750
- else
751
- $documents.clear
752
- puts "Cleared collection #{bold{collection}}."
753
- end
754
- when 'change'
755
- choose_collection(collection)
958
+ if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
959
+ clear_messages(messages)
960
+ $documents.clear
961
+ puts "Cleared messages and collection #{bold{$documents.collection}}."
962
+ else
963
+ puts 'Cancelled.'
756
964
  end
757
965
  next
758
- when %r(/info)
759
- info
760
- next
761
966
  when %r(^/pop(?:\s+(\d*))?$)
762
967
  if messages.size > 1
763
968
  n = $1.to_i.clamp(1, Float::INFINITY)
@@ -772,6 +977,10 @@ loop do
772
977
  when %r(^/model$)
773
978
  $model = choose_model('', $model)
774
979
  next
980
+ when %r(^/system$)
981
+ change_system_prompt(messages, $system)
982
+ info
983
+ next
775
984
  when %r(^/regenerate$)
776
985
  if content = messages[-2]&.content
777
986
  content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
@@ -782,6 +991,38 @@ loop do
782
991
  end
783
992
  parse_content = false
784
993
  content
994
+ when %r(^/collection(?:\s+(clear|change))?$)
995
+ case $1 || 'change'
996
+ when 'clear'
997
+ loop do
998
+ tags = $documents.tags.add('[EXIT]').add('[ALL]')
999
+ tag = Ollama::Utils::Chooser.choose(tags, prompt: 'Clear? %s')
1000
+ case tag
1001
+ when nil, '[EXIT]'
1002
+ puts "Exiting chooser."
1003
+ break
1004
+ when '[ALL]'
1005
+ if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
1006
+ $documents.clear
1007
+ puts "Cleared collection #{bold{$documents.collection}}."
1008
+ break
1009
+ else
1010
+ puts 'Cancelled.'
1011
+ sleep 3
1012
+ end
1013
+ when /./
1014
+ $documents.clear(tags: [ tag ])
1015
+ puts "Cleared tag #{tag} from collection #{bold{$documents.collection}}."
1016
+ sleep 3
1017
+ end
1018
+ end
1019
+ when 'change'
1020
+ choose_collection($documents.collection)
1021
+ end
1022
+ next
1023
+ when %r(/info)
1024
+ info
1025
+ next
785
1026
  when %r(^/import\s+(.+))
786
1027
  parse_content = false
787
1028
  content = import($1) or next
@@ -789,7 +1030,8 @@ loop do
789
1030
  parse_content = false
790
1031
  content = summarize($2, words: $1) or next
791
1032
  when %r(^/embedding$)
792
- toggle_embedding_paused
1033
+ $embedding_paused.toggle(show: false)
1034
+ $embedding.show
793
1035
  next
794
1036
  when %r(^/embed\s+(.+))
795
1037
  parse_content = false
@@ -828,12 +1070,12 @@ loop do
828
1070
  end
829
1071
 
830
1072
  content, tags = if parse_content
831
- parse_content(content, images.clear)
1073
+ parse_content(content, images)
832
1074
  else
833
1075
  [ content, Utils::Tags.new ]
834
1076
  end
835
1077
 
836
- if embedding_enabled? && content
1078
+ if $embedding.on? && content
837
1079
  records = $documents.find_where(
838
1080
  content.downcase,
839
1081
  tags:,
@@ -847,11 +1089,16 @@ loop do
847
1089
  end
848
1090
  end
849
1091
 
850
- messages << Message.new(role: 'user', content:, images:)
851
- handler = FollowChat.new(messages:, markdown: $markdown, voice:)
852
- ollama.chat(model: $model, messages:, options:, stream: true, &handler)
1092
+ if location = at_location.full?
1093
+ content += " [#{location} – do not comment on this information, just consider it for eventual queries]"
1094
+ end
1095
+
1096
+ messages << Message.new(role: 'user', content:, images: images.dup)
1097
+ images.clear
1098
+ handler = FollowChat.new(messages:, markdown: $markdown.on?, voice: ($current_voice if $voice.on?))
1099
+ ollama.chat(model: $model, messages:, options:, stream: $stream.on?, &handler)
853
1100
 
854
- if embedding_enabled? && !records.empty?
1101
+ if $embedding.on? && !records.empty?
855
1102
  puts "", records.map { |record|
856
1103
  link = if record.source =~ %r(\Ahttps?://)
857
1104
  record.source