ollama-ruby 0.4.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/ollama_chat CHANGED
@@ -5,6 +5,8 @@ include Ollama
5
5
  require 'term/ansicolor'
6
6
  include Term::ANSIColor
7
7
  require 'tins'
8
+ require 'tins/xt/full'
9
+ require 'tins/xt/hash_union'
8
10
  include Tins::GO
9
11
  require 'reline'
10
12
  require 'reverse_markdown'
@@ -15,6 +17,7 @@ require 'nokogiri'
15
17
  require 'rss'
16
18
  require 'pdf/reader'
17
19
  require 'csv'
20
+ require 'xdg'
18
21
 
19
22
  class OllamaChatConfig
20
23
  include ComplexConfig
@@ -23,18 +26,30 @@ class OllamaChatConfig
23
26
  DEFAULT_CONFIG = <<~EOT
24
27
  ---
25
28
  url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
29
+ proxy: null # http://localhost:8080
26
30
  model:
27
31
  name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
28
32
  options:
29
33
  num_ctx: 8192
30
34
  prompts:
31
- system: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
35
+ embed: "This source was now embedded: %{source}"
32
36
  summarize: |
33
- Generate an abstract summary of the content in this document:
34
-
35
- %s
36
- voice: Samantha
37
+ Generate an abstract summary of the content in this document using
38
+ %{words} words:
39
+
40
+ %{source_content}
41
+ web: |
42
+ Answer the the query %{query} using these sources and summaries:
43
+
44
+ %{results}
45
+ system_prompts:
46
+ default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
47
+ voice:
48
+ enabled: false
49
+ default: Samantha
50
+ list: <%= `say -v ?`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
37
51
  markdown: true
52
+ stream: true
38
53
  embedding:
39
54
  enabled: true
40
55
  model:
@@ -42,16 +57,22 @@ class OllamaChatConfig
42
57
  options: {}
43
58
  # Retrieval prompt template:
44
59
  prompt: 'Represent this sentence for searching relevant passages: %s'
45
- collection: <%= ENV.fetch('OLLAMA_CHAT_COLLECTION', 'ollama_chat') %>
60
+ collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
46
61
  found_texts_size: 4096
47
62
  found_texts_count: null
48
63
  splitter:
49
64
  name: RecursiveCharacter
50
65
  chunk_size: 1024
51
- cache: Ollama::Documents::Cache::RedisBackedMemoryCache
66
+ cache: Ollama::Documents::RedisBackedMemoryCache
52
67
  redis:
53
- url: <%= ENV.fetch('REDIS_URL', 'null') %>
68
+ documents:
69
+ url: <%= ENV.fetch('REDIS_URL', 'null') %>
70
+ expiring:
71
+ url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
72
+ ex: 86400
54
73
  debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
74
+ ssl_no_verify: []
75
+ copy: pbcopy
55
76
  EOT
56
77
 
57
78
  def initialize(filename = nil)
@@ -62,7 +83,7 @@ class OllamaChatConfig
62
83
  if @filename == default_path && !retried
63
84
  retried = true
64
85
  mkdir_p File.dirname(default_path)
65
- File.secure_write(default_path, DEFAULT_CONFIG)
86
+ File.secure_write(default_path.to_s, DEFAULT_CONFIG)
66
87
  retry
67
88
  else
68
89
  raise
@@ -74,17 +95,11 @@ class OllamaChatConfig
74
95
  attr_reader :config
75
96
 
76
97
  def default_path
77
- File.join(config_dir_path, 'config.yml')
98
+ config_dir_path + 'config.yml'
78
99
  end
79
100
 
80
101
  def config_dir_path
81
- File.join(
82
- ENV.fetch(
83
- 'XDG_CONFIG_HOME',
84
- File.join(ENV.fetch('HOME'), '.config')
85
- ),
86
- 'ollama_chat'
87
- )
102
+ XDG.new.config_home + 'ollama_chat'
88
103
  end
89
104
  end
90
105
 
@@ -112,8 +127,8 @@ class FollowChat
112
127
  end
113
128
  content = response.message&.content
114
129
  @messages.last.content << content
115
- if @markdown and @messages.last.content.present?
116
- markdown_content = Utils::ANSIMarkdown.parse(@messages.last.content)
130
+ if @markdown and content = @messages.last.content.full?
131
+ markdown_content = Utils::ANSIMarkdown.parse(content)
117
132
  @output.print clear_screen, move_home, @user, ?\n, markdown_content
118
133
  else
119
134
  @output.print content
@@ -145,12 +160,113 @@ class FollowChat
145
160
  end
146
161
  end
147
162
 
163
+ module CheckSwitch
164
+ extend Tins::Concern
165
+
166
+ included do
167
+ alias_method :on?, :value
168
+ end
169
+
170
+ def off?
171
+ !on?
172
+ end
173
+
174
+ def show
175
+ puts @msg[value]
176
+ end
177
+ end
178
+
179
+ class Switch
180
+ def initialize(name, msg:, config: $config)
181
+ @value = !!config.send("#{name}?")
182
+ @msg = msg
183
+ end
184
+
185
+ attr_reader :value
186
+
187
+ def set(value, show: false)
188
+ @value = !!value
189
+ show && self.show
190
+ end
191
+
192
+ def toggle(show: true)
193
+ @value = !@value
194
+ show && self.show
195
+ end
196
+
197
+ include CheckSwitch
198
+ end
199
+
200
+ class CombinedSwitch
201
+ def initialize(value:, msg:)
202
+ @value = value
203
+ @msg = msg
204
+ end
205
+
206
+ def value
207
+ @value.()
208
+ end
209
+
210
+ include CheckSwitch
211
+ end
212
+
213
+ def setup_switches
214
+ $markdown = Switch.new(
215
+ :markdown,
216
+ msg: {
217
+ true => "Using #{italic{'ANSI'}} markdown to output content.",
218
+ false => "Using plaintext for outputting content.",
219
+ }
220
+ )
221
+
222
+ $stream = Switch.new(
223
+ :stream,
224
+ msg: {
225
+ true => "Streaming enabled.",
226
+ false => "Streaming disabled.",
227
+ }
228
+ )
229
+
230
+ $voice = Switch.new(
231
+ :stream,
232
+ msg: {
233
+ true => "Voice output enabled.",
234
+ false => "Voice output disabled.",
235
+ },
236
+ config: $config.voice
237
+ )
238
+
239
+ $embedding_enabled = Switch.new(
240
+ :embedding_enabled,
241
+ msg: {
242
+ true => "Embedding enabled.",
243
+ false => "Embedding disabled.",
244
+ }
245
+ )
246
+
247
+ $embedding_paused = Switch.new(
248
+ :embedding_paused,
249
+ msg: {
250
+ true => "Embedding paused.",
251
+ false => "Embedding resumed.",
252
+ }
253
+ )
254
+
255
+ $embedding = CombinedSwitch.new(
256
+ value: -> { $embedding_enabled.on? && $embedding_paused.off? },
257
+ msg: {
258
+ true => "Embedding is currently performed.",
259
+ false => "Embedding is currently not performed.",
260
+ }
261
+ )
262
+ end
263
+
148
264
  def search_web(query, n = nil)
149
265
  n = n.to_i
150
266
  n < 1 and n = 1
151
267
  query = URI.encode_uri_component(query)
152
268
  url = "https://www.duckduckgo.com/html/?q=#{query}"
153
- Ollama::Utils::Fetcher.new(debug: $config.debug).get(url) do |tmp|
269
+ Ollama::Utils::Fetcher.get(url, debug: $config.debug) do |tmp|
154
270
  result = []
155
271
  doc = Nokogiri::HTML(tmp)
156
272
  doc.css('.results_links').each do |link|
@@ -183,7 +299,7 @@ def pull_model_unless_present(model, options, retried = false)
183
299
  end
184
300
  }
185
301
  rescue Errors::NotFoundError
186
- puts "Model #{bold{model}} not found, attempting to pull it now…"
302
+ puts "Model #{bold{model}} not found locally, attempting to pull it from remote now…"
187
303
  ollama.pull(name: model)
188
304
  if retried
189
305
  exit 1
@@ -192,7 +308,7 @@ rescue Errors::NotFoundError
192
308
  retry
193
309
  end
194
310
  rescue Errors::Error => e
195
- warn "Caught #{e.class}: #{e} => Exiting."
311
+ warn "Caught #{e.class} while pulling model: #{e} => Exiting."
196
312
  exit 1
197
313
  end
198
314
 
@@ -217,32 +333,25 @@ def save_conversation(filename, messages)
217
333
  end
218
334
 
219
335
  def message_type(images)
220
- if images.present?
221
- ?📸
222
- else
223
- ?📨
224
- end
336
+ images.present? ? ?📸 : ?📨
225
337
  end
226
338
 
227
- def list_conversation(messages, markdown)
228
- messages.each do |m|
339
+ def list_conversation(messages, last = nil)
340
+ last = (last || messages.size).clamp(0, messages.size)
341
+ messages[-last..-1].to_a.each do |m|
229
342
  role_color = case m.role
230
343
  when 'user' then 172
231
344
  when 'assistant' then 111
232
345
  when 'system' then 213
233
346
  else 210
234
347
  end
235
- content = if markdown && m.content.present?
236
- Utils::ANSIMarkdown.parse(m.content)
237
- else
238
- m.content
239
- end
348
+ content = m.content.full? { $markdown.on? ? Utils::ANSIMarkdown.parse(_1) : _1 }
240
349
  message_text = message_type(m.images) + " "
241
350
  message_text += bold { color(role_color) { m.role } }
242
351
  message_text += ":\n#{content}"
243
- if m.images.present?
244
- message_text += "\nImages: " + italic { m.images.map(&:path) * ', ' }
245
- end
352
+ m.images.full? { |images|
353
+ message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
354
+ }
246
355
  puts message_text
247
356
  end
248
357
  end
@@ -258,37 +367,37 @@ end
258
367
 
259
368
  def parse_rss(source_io)
260
369
  feed = RSS::Parser.parse(source_io, false, false)
261
- title = <<~end
370
+ title = <<~EOT
262
371
  # #{feed&.channel&.title}
263
372
 
264
- end
373
+ EOT
265
374
  feed.items.inject(title) do |text, item|
266
- text << <<~end
375
+ text << <<~EOT
267
376
  ## [#{item&.title}](#{item&.link})
268
377
 
269
378
  updated on #{item&.pubDate}
270
379
 
271
380
  #{reverse_markdown(item&.description)}
272
381
 
273
- end
382
+ EOT
274
383
  end
275
384
  end
276
385
 
277
386
  def parse_atom(source_io)
278
387
  feed = RSS::Parser.parse(source_io, false, false)
279
- title = <<~end
388
+ title = <<~EOT
280
389
  # #{feed.title.content}
281
390
 
282
- end
391
+ EOT
283
392
  feed.items.inject(title) do |text, item|
284
- text << <<~end
393
+ text << <<~EOT
285
394
  ## [#{item&.title&.content}](#{item&.link&.href})
286
395
 
287
396
  updated on #{item&.updated&.content}
288
397
 
289
398
  #{reverse_markdown(item&.content&.content)}
290
399
 
291
- end
400
+ EOT
292
401
  end
293
402
  end
294
403
 
@@ -313,8 +422,6 @@ def parse_source(source_io)
313
422
  result << "\n\n"
314
423
  end
315
424
  result
316
- when %r(\Atext/)
317
- source_io.read
318
425
  when 'application/rss+xml'
319
426
  parse_rss(source_io)
320
427
  when 'application/atom+xml'
@@ -324,39 +431,54 @@ def parse_source(source_io)
324
431
  when 'application/pdf'
325
432
  reader = PDF::Reader.new(source_io)
326
433
  reader.pages.inject(+'') { |result, page| result << page.text }
434
+ when %r(\Atext/), nil
435
+ source_io.read
327
436
  else
328
- STDERR.puts "Cannot import #{source_io&.content_type} document."
437
+ STDERR.puts "Cannot embed #{source_io&.content_type} document."
329
438
  return
330
439
  end
331
440
  end
332
441
 
333
- def import_document(source_io, source)
334
- embedding_enabled? or return parse_source(source_io)
335
- puts "Importing #{italic { source_io&.content_type }} document #{source.to_s.inspect}."
442
+ def embed_source(source_io, source)
443
+ $embedding.on? or return parse_source(source_io)
444
+ puts "Embedding #{italic { source_io&.content_type }} document #{source.to_s.inspect}."
336
445
  text = parse_source(source_io) or return
337
446
  text.downcase!
338
447
  splitter_config = $config.embedding.splitter
339
- inputs = case splitter_config.name
340
- when 'Character'
341
- Ollama::Documents::Splitters::Character.new(
342
- chunk_size: splitter_config.chunk_size,
343
- ).split(text)
344
- when 'RecursiveCharacter'
345
- Ollama::Documents::Splitters::RecursiveCharacter.new(
346
- chunk_size: splitter_config.chunk_size,
347
- ).split(text)
348
- when 'Semantic'
349
- Ollama::Documents::Splitters::Semantic.new(
350
- ollama:, model: $config.embedding.model.name,
351
- chunk_size: splitter_config.chunk_size,
352
- ).split(
353
- text,
354
- breakpoint: splitter_config.breakpoint.to_sym,
355
- percentage: splitter_config.percentage?,
356
- percentile: splitter_config.percentile?,
357
- )
358
- end
359
- $documents.add(inputs, source: source.to_s)
448
+ inputs = nil
449
+ case splitter_config.name
450
+ when 'Character'
451
+ splitter = Ollama::Documents::Splitters::Character.new(
452
+ chunk_size: splitter_config.chunk_size,
453
+ )
454
+ inputs = splitter.split(text)
455
+ when 'RecursiveCharacter'
456
+ splitter = Ollama::Documents::Splitters::RecursiveCharacter.new(
457
+ chunk_size: splitter_config.chunk_size,
458
+ )
459
+ inputs = splitter.split(text)
460
+ when 'Semantic'
461
+ splitter = Ollama::Documents::Splitters::Semantic.new(
462
+ ollama:, model: $config.embedding.model.name,
463
+ chunk_size: splitter_config.chunk_size,
464
+ )
465
+ inputs = splitter.split(
466
+ text,
467
+ breakpoint: splitter_config.breakpoint.to_sym,
468
+ percentage: splitter_config.percentage?,
469
+ percentile: splitter_config.percentile?,
470
+ )
471
+ inputs = splitter.split(text)
472
+ end
473
+ inputs or return
474
+ source = source.to_s
475
+ if source.start_with?(?!)
476
+ source = Ollama::Utils::Width.truncate(
477
+ source[1..-1].gsub(/\W+/, ?_),
478
+ length: 10
479
+ )
480
+ end
481
+ $documents.add(inputs, source: source)
360
482
  end
361
483
 
362
484
  def add_image(images, source_io, source)
@@ -365,10 +487,32 @@ def add_image(images, source_io, source)
365
487
  (images << image).uniq!
366
488
  end
367
489
 
490
+ def http_options(url)
491
+ options = {}
492
+ if ssl_no_verify = $config.ssl_no_verify?
493
+ hostname = URI.parse(url).hostname
494
+ options |= { ssl_verify_peer: !ssl_no_verify.include?(hostname) }
495
+ end
496
+ if proxy = $config.proxy?
497
+ options |= { proxy: }
498
+ end
499
+ options
500
+ end
501
+
368
502
  def fetch_source(source, &block)
369
503
  case source
504
+ when %r(\A!(.*))
505
+ command = $1
506
+ Utils::Fetcher.execute(command) do |tmp|
507
+ block.(tmp)
508
+ end
370
509
  when %r(\Ahttps?://\S+)
371
- Utils::Fetcher.get(source, debug: $config.debug) do |tmp|
510
+ Utils::Fetcher.get(
511
+ source,
512
+ cache: $cache,
513
+ debug: $config.debug,
514
+ http_options: http_options(source)
515
+ ) do |tmp|
372
516
  block.(tmp)
373
517
  end
374
518
  when %r(\Afile://(?:(?:[.-]|[[:alnum:]])*)(/\S*)|([~.]?/\S*))
@@ -381,20 +525,48 @@ def fetch_source(source, &block)
381
525
  raise "invalid source"
382
526
  end
383
527
  rescue => e
384
- STDERR.puts "Cannot add source #{source.to_s.inspect}: #{e}\n#{e.backtrace * ?\n}"
528
+ STDERR.puts "Cannot fetch source #{source.to_s.inspect}: #{e}\n#{e.backtrace * ?\n}"
385
529
  end
386
530
 
387
- def summarize(source)
531
+ def import(source)
532
+ puts "Now importing #{source.to_s.inspect}."
533
+ fetch_source(source) do |source_io|
534
+ content = parse_source(source_io)
535
+ content.present? or return
536
+ source_io.rewind
537
+ content
538
+ end
539
+ end
540
+
541
+ def summarize(source, words: nil)
542
+ words = words.to_i
543
+ words < 1 and words = 100
388
544
  puts "Now summarizing #{source.to_s.inspect}."
389
545
  source_content =
390
546
  fetch_source(source) do |source_io|
391
547
  content = parse_source(source_io)
392
548
  content.present? or return
393
549
  source_io.rewind
394
- import_document(source_io, source)
395
550
  content
396
551
  end
397
- $config.prompts.summarize % source_content
552
+ $config.prompts.summarize % { source_content:, words: }
553
+ end
554
+
555
+ def embed(source)
556
+ if $embedding.on?
557
+ puts "Now embedding #{source.to_s.inspect}."
558
+ fetch_source(source) do |source_io|
559
+ content = parse_source(source_io)
560
+ content.present? or return
561
+ source_io.rewind
562
+ embed_source(source_io, source)
563
+ content
564
+ end
565
+ $config.prompts.embed % { source: }
566
+ else
567
+ puts "Embedding is off, so I will just give a small summary of this source."
568
+ summarize(source)
569
+ end
398
570
  end
399
571
 
400
572
  def parse_content(content, images)
@@ -412,7 +584,7 @@ def parse_content(content, images)
412
584
  when 'image'
413
585
  add_image(images, source_io, source)
414
586
  when 'text', 'application'
415
- import_document(source_io, source)
587
+ embed_source(source_io, source)
416
588
  else
417
589
  STDERR.puts(
418
590
  "Cannot fetch #{source.to_s.inspect} with content type "\
@@ -438,22 +610,28 @@ ensure
438
610
  end
439
611
 
440
612
  def choose_collection(default_collection)
441
- collections = [ default_collection ] + $documents.collections.map(&:to_s)
442
- collections = collections.uniq.sort
443
- $documents.collection = collection =
444
- Ollama::Utils::Chooser.choose(collections) || default_collection
613
+ collections = [ default_collection ] + $documents.collections
614
+ collections = collections.compact.map(&:to_s).uniq.sort
615
+ collections.unshift('[NEW]')
616
+ collection = Ollama::Utils::Chooser.choose(collections) || default_collection
617
+ if collection == '[NEW]'
618
+ print "Enter name of the new collection: "
619
+ collection = STDIN.gets.chomp
620
+ end
621
+ $documents.collection = collection
445
622
  ensure
446
623
  puts "Changing to collection #{bold{collection}}."
447
624
  collection_stats
448
625
  end
449
626
 
450
627
  def collection_stats
451
- puts <<~end
628
+ puts <<~EOT
452
629
  Collection
453
630
  Name: #{bold{$documents.collection}}
631
+ Embedding model: #{bold{$embedding_model}}
454
632
  #Embeddings: #{$documents.size}
455
633
  Tags: #{$documents.tags}
456
- end
634
+ EOT
457
635
  end
458
636
 
459
637
  def configure_cache
@@ -467,46 +645,100 @@ rescue => e
467
645
  Ollama::Documents::MemoryCache
468
646
  end
469
647
 
470
- def set_markdown(value)
471
- if value
472
- puts "Using ANSI markdown to output content."
473
- true
474
- else
475
- puts "Using plaintext for outputting content."
476
- false
648
+ def show_system_prompt
649
+ puts <<~EOT
650
+ Configured system prompt is:
651
+ #{Ollama::Utils::ANSIMarkdown.parse($system.to_s).gsub(/\n+\z/, '').full? || 'n/a'}
652
+ EOT
653
+ end
654
+
655
+ def set_system_prompt(messages, system)
656
+ $system = system
657
+ messages.clear
658
+ messages << Message.new(role: 'system', content: system)
659
+ end
660
+
661
+ def change_system_prompt(messages)
662
+ prompts = $config.system_prompts.attribute_names.compact
663
+ chosen = Ollama::Utils::Chooser.choose(prompts)
664
+ system = if chosen
665
+ $config.system_prompts.send(chosen)
666
+ else
667
+ default
668
+ end
669
+ set_system_prompt(messages, system)
670
+ end
671
+
672
+ def change_voice
673
+ chosen = Ollama::Utils::Chooser.choose($config.voice.list)
674
+ $current_voice = chosen.full? || $config.voice.default
675
+ end
676
+
677
+ def info
678
+ puts "Current model is #{bold{$model}}."
679
+ collection_stats
680
+ $embedding.show
681
+ if $embedding.on?
682
+ puts "Text splitter is #{bold{$config.embedding.splitter.name}}."
683
+ end
684
+ puts "Documents database cache is #{$documents.nil? ? 'n/a' : bold{$documents.cache.class}}"
685
+ $markdown.show
686
+ $stream.show
687
+ if $voice.on?
688
+ puts "Using voice #{bold{$current_voice}} to speak."
477
689
  end
690
+ show_system_prompt
478
691
  end
479
692
 
480
693
  def clear_messages(messages)
481
694
  messages.delete_if { _1.role != 'system' }
482
695
  end
483
696
 
484
- def embedding_enabled?
485
- $config.embedding.enabled && !$opts[?E]
697
+ def copy_to_clipboard(messages)
698
+ if message = messages.last and message.role == 'assistant'
699
+ copy = `which #{$config.copy}`.chomp
700
+ if copy.present?
701
+ IO.popen(copy, 'w') do |clipboard|
702
+ clipboard.write(message.content)
703
+ end
704
+ STDOUT.puts "The last response has been copied to the system clipboard."
705
+ else
706
+ STDERR.puts "#{$config.copy.inspect} command not found in system's path!"
707
+ end
708
+ else
709
+ STDERR.puts "No response available to copy to the system clipboard."
710
+ end
486
711
  end
487
712
 
488
713
  def display_chat_help
489
- puts <<~end
490
- /paste to paste content
491
- /markdown toggle markdown output
492
- /list list the messages of the conversation
493
- /clear clear the conversation messages
494
- /clobber clear conversation messages and collection
495
- /pop [n] pop the last n exchanges, defaults to 1
496
- /model change the model
497
- /regenerate the last answer message
498
- /collection clear [tag]|stats|change|new clear or show stats of current collection
499
- /summarize source summarize the URL/file source's content
500
- /web [n] query query web search & return n or 1 results
501
- /save filename store conversation messages
502
- /load filename load conversation messages
503
- /quit to quit
504
- /help to view this help
505
- end
714
+ puts <<~EOT
715
+ /copy to copy last response to clipboard
716
+ /paste to paste content
717
+ /markdown toggle markdown output
718
+ /stream toggle stream output
719
+ /voice( change) toggle voice output or change the voice
720
+ /list [n] list the last n / all conversation exchanges
721
+ /clear clear the whole conversation
722
+ /clobber clear the conversation and collection
723
+ /pop [n] pop the last n exchanges, defaults to 1
724
+ /model change the model
725
+ /system change system prompt (clears conversation)
726
+ /regenerate the last answer message
727
+ /collection clear [tag]|change clear or show stats of current collection
728
+ /import source import the source's content
729
+ /summarize [n] source summarize the source's content in n words
730
+ /embedding toggle embedding paused or not
731
+ /embed source embed the source's content
732
+ /web [n] query query web search & return n or 1 results
733
+ /save filename store conversation messages
734
+ /load filename load conversation messages
735
+ /quit to quit
736
+ /help to view this help
737
+ EOT
506
738
  end
507
739
 
508
740
  def usage
509
- puts <<~end
741
+ puts <<~EOT
510
742
  #{File.basename($0)} [OPTIONS]
511
743
 
512
744
  -f CONFIG config file to read
@@ -518,10 +750,15 @@ def usage
518
750
  -D DOCUMENT load document and add to embeddings collection (multiple)
519
751
  -M use (empty) MemoryCache for this chat session
520
752
  -E disable embeddings for this chat session
521
- -v use voice output
753
+ -V display the current version number and quit
522
754
  -h this help
523
755
 
524
- end
756
+ EOT
757
+ exit 0
758
+ end
759
+
760
+ def version
761
+ puts "%s %s" % [ File.basename($0), Ollama::VERSION ]
525
762
  exit 0
526
763
  end
527
764
 
@@ -529,35 +766,50 @@ def ollama
529
766
  $ollama
530
767
  end
531
768
 
532
- $opts = go 'f:u:m:s:c:C:D:MEvh'
769
+ $opts = go 'f:u:m:s:c:C:D:MEVh'
533
770
 
534
771
  config = OllamaChatConfig.new($opts[?f])
535
772
  $config = config.config
536
773
 
537
- $opts[?h] and usage
774
+ setup_switches
538
775
 
539
- puts "Configuration read from #{config.filename.inspect} is:", $config
776
+ $opts[?h] and usage
777
+ $opts[?V] and version
540
778
 
541
779
  base_url = $opts[?u] || $config.url
542
780
  $ollama = Client.new(base_url:, debug: $config.debug)
543
781
 
544
- model = choose_model($opts[?m], $config.model.name)
782
+ $model = choose_model($opts[?m], $config.model.name)
545
783
  options = Options[$config.model.options]
546
- model_system = pull_model_unless_present(model, options)
784
+ model_system = pull_model_unless_present($model, options)
547
785
  messages = []
786
+ $embedding_enabled.set($config.embedding.enabled && !$opts[?E])
548
787
 
549
- if embedding_enabled?
550
- embedding_model = $config.embedding.model.name
788
+ if $opts[?c]
789
+ messages.concat load_conversation($opts[?c])
790
+ else
791
+ default = $config.system_prompts.default? || model_system
792
+ if $opts[?s] == ??
793
+ change_system_prompt(messages)
794
+ else
795
+ system = Ollama::Utils::FileArgument.get_file_argument($opts[?s], default:)
796
+ system.present? and set_system_prompt(messages, system)
797
+ end
798
+ end
799
+
800
+ if $embedding.on?
801
+ $embedding_model = $config.embedding.model.name
551
802
  embedding_model_options = Options[$config.embedding.model.options]
552
- pull_model_unless_present(embedding_model, embedding_model_options)
803
+ pull_model_unless_present($embedding_model, embedding_model_options)
553
804
  collection = $opts[?C] || $config.embedding.collection
554
805
  $documents = Documents.new(
555
806
  ollama:,
556
- model: $config.embedding.model.name,
807
+ model: $embedding_model,
557
808
  model_options: $config.embedding.model.options,
558
809
  collection:,
559
810
  cache: configure_cache,
560
- redis_url: $config.redis.url?,
811
+ redis_url: $config.redis.documents.url?,
812
+ debug: ENV['DEBUG'].to_i == 1,
561
813
  )
562
814
 
563
815
  document_list = $opts[?D].to_a
@@ -578,34 +830,27 @@ if embedding_enabled?
578
830
  document_list.each_slice(25) do |docs|
579
831
  docs.each do |doc|
580
832
  fetch_source(doc) do |doc_io|
581
- import_document(doc_io, doc)
833
+ embed_source(doc_io, doc)
582
834
  end
583
835
  end
584
836
  end
585
837
  end
586
- collection_stats
587
838
  else
588
- $documents = Documents.new(ollama:, model:)
839
+ $documents = Tins::NULL
589
840
  end
590
841
 
591
- if voice = ($config.voice if $opts[?v])
592
- puts "Using voice #{bold{voice}} to speak."
842
+ if redis_expiring_url = $config.redis.expiring.url?
843
+ $cache = Ollama::Documents::RedisCache.new(
844
+ prefix: 'Expiring-',
845
+ url: redis_expiring_url,
846
+ ex: $config.redis.expiring.ex,
847
+ )
593
848
  end
594
- markdown = set_markdown($config.markdown)
595
849
 
596
- if $opts[?c]
597
- messages.concat load_conversation($opts[?c])
598
- else
599
- if system = Ollama::Utils::FileArgument.
600
- get_file_argument($opts[?s], default: $config.prompts.system? || model_system)
601
- messages << Message.new(role: 'system', content: system)
602
- puts <<~end
603
- Configured system prompt is:
604
- #{italic{Ollama::Utils::Width.wrap(system, percentage: 90)}}
605
- end
606
- end
607
- end
850
+ $current_voice = $config.voice.default
608
851
 
852
+ puts "Configuration read from #{config.filename.inspect} is:", $config
853
+ info
609
854
  puts "\nType /help to display the chat help."
610
855
 
611
856
  images = []
@@ -618,14 +863,27 @@ loop do
618
863
  when %r(^/paste$)
619
864
  puts bold { "Paste your content and then press C-d!" }
620
865
  content = STDIN.read
621
- when %r(^/quit$)
622
- puts "Goodbye."
623
- exit 0
866
+ when %r(^/copy$)
867
+ copy_to_clipboard(messages)
868
+ next
624
869
  when %r(^/markdown$)
625
- markdown = set_markdown(!markdown)
870
+ $markdown.toggle
871
+ next
872
+ when %r(^/stream$)
873
+ $stream.toggle
626
874
  next
627
- when %r(^/list$)
628
- list_conversation(messages, markdown)
875
+ when %r(^/voice(?:\s+(change))?$)
876
+ if $1 == 'change'
877
+ change_voice
878
+ else
879
+ $voice.toggle
880
+ end
881
+ next
882
+ when %r(^/list(?:\s+(\d*))?$)
883
+ last = if $1
884
+ 2 * $1.to_i
885
+ end
886
+ list_conversation(messages, last)
629
887
  next
630
888
  when %r(^/clear$)
631
889
  clear_messages(messages)
@@ -636,7 +894,7 @@ loop do
636
894
  $documents.clear
637
895
  puts "Cleared messages and collection."
638
896
  next
639
- when %r(^/collection\s+(clear|stats|change|new)(?:\s+(.+))?$)
897
+ when %r(^/collection\s+(clear|change)(?:\s+(.+))?$)
640
898
  command, arg = $1, $2
641
899
  case command
642
900
  when 'clear'
@@ -648,24 +906,30 @@ loop do
648
906
  $documents.clear
649
907
  puts "Cleared collection #{bold{collection}}."
650
908
  end
651
- when 'stats'
652
- collection_stats
653
909
  when 'change'
654
910
  choose_collection(collection)
655
- when 'new'
656
- print "Enter name of the new collection: "
657
- $documents.collection = collection = STDIN.gets.chomp
658
- collection_stats
659
911
  end
660
912
  next
661
- when %r(^/pop?(?:\s+(\d*))?$)
662
- n = $1.to_i.clamp(1, Float::INFINITY)
663
- r = messages.pop(2 * n)
664
- m = r.size / 2
665
- puts "Popped the last #{m} exchanges."
913
+ when %r(^/system$)
914
+ change_system_prompt(messages)
915
+ info
916
+ next
917
+ when %r(/info)
918
+ info
919
+ next
920
+ when %r(^/pop(?:\s+(\d*))?$)
921
+ if messages.size > 1
922
+ n = $1.to_i.clamp(1, Float::INFINITY)
923
+ r = messages.pop(2 * n)
924
+ m = r.size / 2
925
+ puts "Popped the last #{m} exchanges."
926
+ else
927
+ puts "No more exchanges you can pop."
928
+ end
929
+ list_conversation(messages, 2)
666
930
  next
667
931
  when %r(^/model$)
668
- model = choose_model('', model)
932
+ $model = choose_model('', $model)
669
933
  next
670
934
  when %r(^/regenerate$)
671
935
  if content = messages[-2]&.content
@@ -677,23 +941,30 @@ loop do
677
941
  end
678
942
  parse_content = false
679
943
  content
680
- when %r(^/summarize\s+(.+))
944
+ when %r(^/import\s+(.+))
945
+ parse_content = false
946
+ content = import($1) or next
947
+ when %r(^/summarize\s+(?:(\d+)\s+)?(.+))
681
948
  parse_content = false
682
- content = summarize($1) or next
949
+ content = summarize($2, words: $1) or next
950
+ when %r(^/embedding$)
951
+ $embedding_paused.toggle(show: false)
952
+ $embedding.show
953
+ next
954
+ when %r(^/embed\s+(.+))
955
+ parse_content = false
956
+ content = embed($1) or next
683
957
  when %r(^/web\s+(?:(\d+)\s+)?(.+))
684
958
  parse_content = false
685
959
  urls = search_web($2, $1.to_i)
686
960
  urls.each do |url|
687
- fetch_source(url) do |url_io|
688
- import_document(url_io, url)
689
- end
961
+ fetch_source(url) { |url_io| embed_source(url_io, url) }
690
962
  end
691
963
  urls_summarized = urls.map { summarize(_1) }
692
- content = <<~end
693
- Answer the the query #{$2.inspect} using these sources and summaries:
694
-
695
- #{urls.zip(urls_summarized).map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"}
696
- end
964
+ query = $2.inspect
965
+ results = urls.zip(urls_summarized).
966
+ map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
967
+ content = $config.prompts.web % { query:, results: }
697
968
  when %r(^/save\s+(.+)$)
698
969
  save_conversation($1, messages)
699
970
  puts "Saved conversation to #$1."
@@ -702,6 +973,9 @@ loop do
702
973
  messages = load_conversation($1)
703
974
  puts "Loaded conversation from #$1."
704
975
  next
976
+ when %r(^/quit$)
977
+ puts "Goodbye."
978
+ exit 0
705
979
  when %r(^/)
706
980
  display_chat_help
707
981
  next
@@ -714,12 +988,12 @@ loop do
714
988
  end
715
989
 
716
990
  content, tags = if parse_content
717
- parse_content(content, images.clear)
991
+ parse_content(content, images)
718
992
  else
719
993
  [ content, Utils::Tags.new ]
720
994
  end
721
995
 
722
- if embedding_enabled? && content
996
+ if $embedding.on? && content
723
997
  records = $documents.find_where(
724
998
  content.downcase,
725
999
  tags:,
@@ -733,11 +1007,12 @@ loop do
733
1007
  end
734
1008
  end
735
1009
 
736
- messages << Message.new(role: 'user', content:, images:)
737
- handler = FollowChat.new(messages:, markdown:, voice:)
738
- ollama.chat(model:, messages:, options:, stream: true, &handler)
1010
+ messages << Message.new(role: 'user', content:, images: images.dup)
1011
+ images.clear
1012
+ handler = FollowChat.new(messages:, markdown: $markdown.on?, voice: ($current_voice if $voice.on?))
1013
+ ollama.chat(model: $model, messages:, options:, stream: $stream.on?, &handler)
739
1014
 
740
- if embedding_enabled? && !records.empty?
1015
+ if $embedding.on? && !records.empty?
741
1016
  puts "", records.map { |record|
742
1017
  link = if record.source =~ %r(\Ahttps?://)
743
1018
  record.source