ollama-ruby 0.8.0 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/ollama_chat CHANGED
@@ -52,6 +52,7 @@ class OllamaChatConfig
52
52
  list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
53
53
  markdown: true
54
54
  stream: true
55
+ document_policy: importing
55
56
  embedding:
56
57
  enabled: true
57
58
  model:
@@ -107,7 +108,7 @@ class OllamaChatConfig
107
108
  end
108
109
 
109
110
  class FollowChat
110
- include Ollama::Handlers::Concern
111
+ include Handlers::Concern
111
112
  include Term::ANSIColor
112
113
 
113
114
  def initialize(messages:, markdown: false, voice: nil, output: $stdout)
@@ -163,114 +164,142 @@ class FollowChat
163
164
  end
164
165
  end
165
166
 
166
- module CheckSwitch
167
- extend Tins::Concern
167
+ module Switches
168
+ module CheckSwitch
169
+ extend Tins::Concern
168
170
 
169
- included do
170
- alias_method :on?, :value
171
- end
171
+ included do
172
+ alias_method :on?, :value
173
+ end
172
174
 
173
- def off?
174
- !on?
175
- end
175
+ def off?
176
+ !on?
177
+ end
176
178
 
177
- def show
178
- puts @msg[value]
179
+ def show
180
+ puts @msg[value]
181
+ end
179
182
  end
180
- end
181
183
 
182
- class Switch
183
- def initialize(name, msg:, config: $config)
184
- @value = [ false, true ].include?(config) ? config : !!config.send("#{name}?")
185
- @msg = msg
186
- end
184
+ class Switch
185
+ def initialize(name, msg:, config: $config)
186
+ @value = [ false, true ].include?(config) ? config : !!config.send("#{name}?")
187
+ @msg = msg
188
+ end
187
189
 
188
- attr_reader :value
190
+ attr_reader :value
189
191
 
190
- def set(value, show: false)
191
- @value = !!value
192
- show && self.show
193
- end
192
+ def set(value, show: false)
193
+ @value = !!value
194
+ show && self.show
195
+ end
194
196
 
195
- def toggle(show: true)
196
- @value = !@value
197
- show && self.show
197
+ def toggle(show: true)
198
+ @value = !@value
199
+ show && self.show
200
+ end
201
+
202
+ include CheckSwitch
198
203
  end
199
204
 
200
- include CheckSwitch
201
- end
205
+ class CombinedSwitch
206
+ def initialize(value:, msg:)
207
+ @value = value
208
+ @msg = msg
209
+ end
202
210
 
203
- class CombinedSwitch
204
- def initialize(value:, msg:)
205
- @value = value
206
- @msg = msg
207
- end
211
+ def value
212
+ @value.()
213
+ end
208
214
 
209
- def value
210
- @value.()
215
+ include CheckSwitch
211
216
  end
212
217
 
213
- include CheckSwitch
214
- end
218
+ def setup_switches
219
+ $markdown = Switch.new(
220
+ :markdown,
221
+ msg: {
222
+ true => "Using #{italic{'ANSI'}} markdown to output content.",
223
+ false => "Using plaintext for outputting content.",
224
+ }
225
+ )
215
226
 
216
- def setup_switches
217
- $markdown = Switch.new(
218
- :markdown,
219
- msg: {
220
- true => "Using #{italic{'ANSI'}} markdown to output content.",
221
- false => "Using plaintext for outputting content.",
222
- }
223
- )
227
+ $stream = Switch.new(
228
+ :stream,
229
+ msg: {
230
+ true => "Streaming enabled.",
231
+ false => "Streaming disabled.",
232
+ }
233
+ )
224
234
 
225
- $stream = Switch.new(
226
- :stream,
227
- msg: {
228
- true => "Streaming enabled.",
229
- false => "Streaming disabled.",
230
- }
231
- )
235
+ $voice = Switch.new(
236
+ :stream,
237
+ msg: {
238
+ true => "Voice output enabled.",
239
+ false => "Voice output disabled.",
240
+ },
241
+ config: $config.voice
242
+ )
232
243
 
233
- $voice = Switch.new(
234
- :stream,
235
- msg: {
236
- true => "Voice output enabled.",
237
- false => "Voice output disabled.",
238
- },
239
- config: $config.voice
240
- )
244
+ $embedding_enabled = Switch.new(
245
+ :embedding_enabled,
246
+ msg: {
247
+ true => "Embedding enabled.",
248
+ false => "Embedding disabled.",
249
+ }
250
+ )
241
251
 
242
- $embedding_enabled = Switch.new(
243
- :embedding_enabled,
244
- msg: {
245
- true => "Embedding enabled.",
246
- false => "Embedding disabled.",
247
- }
248
- )
252
+ $embedding_paused = Switch.new(
253
+ :embedding_paused,
254
+ msg: {
255
+ true => "Embedding paused.",
256
+ false => "Embedding resumed.",
257
+ }
258
+ )
249
259
 
250
- $embedding_paused = Switch.new(
251
- :embedding_paused,
252
- msg: {
253
- true => "Embedding paused.",
254
- false => "Embedding resumed.",
255
- }
256
- )
260
+ $embedding = CombinedSwitch.new(
261
+ value: -> { $embedding_enabled.on? && $embedding_paused.off? },
262
+ msg: {
263
+ true => "Embedding is currently performed.",
264
+ false => "Embedding is currently not performed.",
265
+ }
266
+ )
257
267
 
258
- $embedding = CombinedSwitch.new(
259
- value: -> { $embedding_enabled.on? && $embedding_paused.off? },
260
- msg: {
261
- true => "Embedding is currently performed.",
262
- false => "Embedding is currently not performed.",
263
- }
264
- )
268
+ $location = Switch.new(
269
+ :location,
270
+ msg: {
271
+ true => "Location and localtime enabled.",
272
+ false => "Location and localtime disabled.",
273
+ },
274
+ config: $config.location.enabled
275
+ )
276
+ end
277
+ end
278
+ include Switches
265
279
 
266
- $location = Switch.new(
267
- :location,
268
- msg: {
269
- true => "Location and localtime enabled.",
270
- false => "Location and localtime disabled.",
271
- },
272
- config: $config.location.enabled
273
- )
280
+ def pull_model_unless_present(model, options, retried = false)
281
+ ollama.show(name: model) { |response|
282
+ puts "Model #{bold{model}} with architecture "\
283
+ "#{response.model_info['general.architecture']} found."
284
+ if system = response.system
285
+ puts "Configured model system prompt is:\n#{italic { system }}"
286
+ return system
287
+ else
288
+ return
289
+ end
290
+ }
291
+ rescue Errors::NotFoundError
292
+ puts "Model #{bold{model}} not found locally, attempting to pull it from remote now…"
293
+ ollama.pull(name: model)
294
+ if retried
295
+ exit 1
296
+ else
297
+ retried = true
298
+ retry
299
+ end
300
+ rescue Errors::Error => e
301
+ warn "Caught #{e.class} while pulling model: #{e} => Exiting."
302
+ exit 1
274
303
  end
275
304
 
276
305
  def search_web(query, n = nil)
@@ -281,7 +310,7 @@ def search_web(query, n = nil)
281
310
  n < 1 and n = 1
282
311
  query = URI.encode_uri_component(query)
283
312
  url = "https://www.duckduckgo.com/html/?q=#{query}"
284
- Ollama::Utils::Fetcher.get(url, debug: $config.debug) do |tmp|
313
+ Utils::Fetcher.get(url, debug: $config.debug) do |tmp|
285
314
  result = []
286
315
  doc = Nokogiri::HTML(tmp)
287
316
  doc.css('.results_links').each do |link|
@@ -302,38 +331,13 @@ def search_web(query, n = nil)
302
331
  end
303
332
  end
304
333
 
305
- def pull_model_unless_present(model, options, retried = false)
306
- ollama.show(name: model) { |response|
307
- puts "Model #{bold{model}} with architecture "\
308
- "#{response.model_info['general.architecture']} found."
309
- if system = response.system
310
- puts "Configured model system prompt is:\n#{italic { system }}"
311
- return system
312
- else
313
- return
314
- end
315
- }
316
- rescue Errors::NotFoundError
317
- puts "Model #{bold{model}} not found locally, attempting to pull it from remote now…"
318
- ollama.pull(name: model)
319
- if retried
320
- exit 1
321
- else
322
- retried = true
323
- retry
324
- end
325
- rescue Errors::Error => e
326
- warn "Caught #{e.class} while pulling model: #{e} => Exiting."
327
- exit 1
328
- end
329
-
330
334
  def load_conversation(filename)
331
335
  unless File.exist?(filename)
332
336
  puts "File #{filename} doesn't exist. Choose another filename."
333
337
  return
334
338
  end
335
339
  File.open(filename, 'r') do |output|
336
- return JSON(output.read).map { Ollama::Message.from_hash(_1) }
340
+ return JSON(output.read).map { Message.from_hash(_1) }
337
341
  end
338
342
  end
339
343
 
@@ -479,59 +483,6 @@ def parse_source(source_io)
479
483
  end
480
484
  end
481
485
 
482
- def embed_source(source_io, source, count: nil)
483
- $embedding.on? or return parse_source(source_io)
484
- m = "Embedding #{italic { source_io&.content_type }} document #{source.to_s.inspect}."
485
- if count
486
- puts '%u. %s' % [ count, m ]
487
- else
488
- puts m
489
- end
490
- text = parse_source(source_io) or return
491
- text.downcase!
492
- splitter_config = $config.embedding.splitter
493
- inputs = nil
494
- case splitter_config.name
495
- when 'Character'
496
- splitter = Ollama::Documents::Splitters::Character.new(
497
- chunk_size: splitter_config.chunk_size,
498
- )
499
- inputs = splitter.split(text)
500
- when 'RecursiveCharacter'
501
- splitter = Ollama::Documents::Splitters::RecursiveCharacter.new(
502
- chunk_size: splitter_config.chunk_size,
503
- )
504
- inputs = splitter.split(text)
505
- when 'Semantic'
506
- splitter = Ollama::Documents::Splitters::Semantic.new(
507
- ollama:, model: $config.embedding.model.name,
508
- chunk_size: splitter_config.chunk_size,
509
- )
510
- inputs = splitter.split(
511
- text,
512
- breakpoint: splitter_config.breakpoint.to_sym,
513
- percentage: splitter_config.percentage?,
514
- percentile: splitter_config.percentile?,
515
- )
516
- inputs = splitter.split(text)
517
- end
518
- inputs or return
519
- source = source.to_s
520
- if source.start_with?(?!)
521
- source = Ollama::Utils::Width.truncate(
522
- source[1..-1].gsub(/\W+/, ?_),
523
- length: 10
524
- )
525
- end
526
- $documents.add(inputs, source:, batch_size: $config.embedding.batch_size?)
527
- end
528
-
529
- def add_image(images, source_io, source)
530
- STDERR.puts "Adding #{source_io&.content_type} image #{source.to_s.inspect}."
531
- image = Image.for_io(source_io, path: source.to_s)
532
- (images << image).uniq!
533
- end
534
-
535
486
  def http_options(url)
536
487
  options = {}
537
488
  if ssl_no_verify = $config.ssl_no_verify?
@@ -556,7 +507,7 @@ def fetch_source(source, &block)
556
507
  source,
557
508
  cache: $cache,
558
509
  debug: $config.debug,
559
- http_options: http_options(source)
510
+ http_options: http_options(Utils::Fetcher.normalize_url(source))
560
511
  ) do |tmp|
561
512
  block.(tmp)
562
513
  end
@@ -570,33 +521,94 @@ def fetch_source(source, &block)
570
521
  raise "invalid source"
571
522
  end
572
523
  rescue => e
573
- STDERR.puts "Cannot fetch source #{source.to_s.inspect}: #{e}\n#{e.backtrace * ?\n}"
524
+ STDERR.puts "Cannot fetch source #{source.to_s.inspect}: #{e.class} #{e}\n#{e.backtrace * ?\n}"
525
+ end
526
+
527
+ def add_image(images, source_io, source)
528
+ STDERR.puts "Adding #{source_io&.content_type} image #{source.to_s.inspect}."
529
+ image = Image.for_io(source_io, path: source.to_s)
530
+ (images << image).uniq!
531
+ end
532
+
533
+ def import_source(source_io, source)
534
+ source = source.to_s
535
+ puts "Importing #{italic { source_io&.content_type }} document #{source.inspect} now."
536
+ source_content = parse_source(source_io)
537
+ "Imported #{source.inspect}:\n#{source_content}\n\n"
574
538
  end
575
539
 
576
540
  def import(source)
577
- puts "Now importing #{source.to_s.inspect}."
578
541
  fetch_source(source) do |source_io|
579
- content = parse_source(source_io)
580
- content.present? or return
542
+ content = import_source(source_io, source) or return
581
543
  source_io.rewind
582
544
  content
583
545
  end
584
546
  end
585
547
 
586
- def summarize(source, words: nil)
548
+ def summarize_source(source_io, source, words: nil)
549
+ puts "Summarizing #{italic { source_io&.content_type }} document #{source.inspect} now."
587
550
  words = words.to_i
588
551
  words < 1 and words = 100
589
- puts "Now summarizing #{source.to_s.inspect}."
590
- source_content =
591
- fetch_source(source) do |source_io|
592
- content = parse_source(source_io)
593
- content.present? or return
594
- source_io.rewind
595
- content
596
- end
552
+ source_content = parse_source(source_io)
553
+ source_content.present? or return
597
554
  $config.prompts.summarize % { source_content:, words: }
598
555
  end
599
556
 
557
+ def summarize(source, words: nil)
558
+ fetch_source(source) do |source_io|
559
+ content = summarize_source(source_io, source, words:) or return
560
+ source_io.rewind
561
+ content
562
+ end
563
+ end
564
+
565
+ def embed_source(source_io, source, count: nil)
566
+ $embedding.on? or return parse_source(source_io)
567
+ m = "Embedding #{italic { source_io&.content_type }} document #{source.to_s.inspect}."
568
+ if count
569
+ puts '%u. %s' % [ count, m ]
570
+ else
571
+ puts m
572
+ end
573
+ text = parse_source(source_io) or return
574
+ text.downcase!
575
+ splitter_config = $config.embedding.splitter
576
+ inputs = nil
577
+ case splitter_config.name
578
+ when 'Character'
579
+ splitter = Documents::Splitters::Character.new(
580
+ chunk_size: splitter_config.chunk_size,
581
+ )
582
+ inputs = splitter.split(text)
583
+ when 'RecursiveCharacter'
584
+ splitter = Documents::Splitters::RecursiveCharacter.new(
585
+ chunk_size: splitter_config.chunk_size,
586
+ )
587
+ inputs = splitter.split(text)
588
+ when 'Semantic'
589
+ splitter = Documents::Splitters::Semantic.new(
590
+ ollama:, model: $config.embedding.model.name,
591
+ chunk_size: splitter_config.chunk_size,
592
+ )
593
+ inputs = splitter.split(
594
+ text,
595
+ breakpoint: splitter_config.breakpoint.to_sym,
596
+ percentage: splitter_config.percentage?,
597
+ percentile: splitter_config.percentile?,
598
+ )
599
+ inputs = splitter.split(text)
600
+ end
601
+ inputs or return
602
+ source = source.to_s
603
+ if source.start_with?(?!)
604
+ source = Utils::Width.truncate(
605
+ source[1..-1].gsub(/\W+/, ?_),
606
+ length: 10
607
+ )
608
+ end
609
+ $documents.add(inputs, source:, batch_size: $config.embedding.batch_size?)
610
+ end
611
+
600
612
  def embed(source)
601
613
  if $embedding.on?
602
614
  puts "Now embedding #{source.to_s.inspect}."
@@ -618,6 +630,7 @@ def parse_content(content, images)
618
630
  images.clear
619
631
  tags = Utils::Tags.new
620
632
 
633
+ contents = [ content ]
621
634
  content.scan(%r((?:\.\.|[.~])?/\S+|https?://\S+|#\S+)).each do |source|
622
635
  case source
623
636
  when /\A#(\S+)/
@@ -628,8 +641,15 @@ def parse_content(content, images)
628
641
  case source_io&.content_type&.media_type
629
642
  when 'image'
630
643
  add_image(images, source_io, source)
631
- when 'text', 'application'
632
- embed_source(source_io, source)
644
+ when 'text', 'application', nil
645
+ case $document_policy
646
+ when 'importing'
647
+ contents << import_source(source_io, source)
648
+ when 'embedding'
649
+ embed_source(source_io, source)
650
+ when 'summarizing'
651
+ contents << summarize_source(source_io, source)
652
+ end
633
653
  else
634
654
  STDERR.puts(
635
655
  "Cannot fetch #{source.to_s.inspect} with content type "\
@@ -639,14 +659,14 @@ def parse_content(content, images)
639
659
  end
640
660
  end
641
661
  end
642
-
643
- return content, (tags unless tags.empty?)
662
+ new_content = contents.select(&:present?).compact * "\n\n"
663
+ return new_content, (tags unless tags.empty?)
644
664
  end
645
665
 
646
666
  def choose_model(cli_model, current_model)
647
667
  models = ollama.tags.models.map(&:name).sort
648
668
  model = if cli_model == ''
649
- Ollama::Utils::Chooser.choose(models) || current_model
669
+ Utils::Chooser.choose(models) || current_model
650
670
  else
651
671
  cli_model || current_model
652
672
  end
@@ -663,7 +683,7 @@ def choose_collection(current_collection)
663
683
  collections = [ current_collection ] + $documents.collections
664
684
  collections = collections.compact.map(&:to_s).uniq.sort
665
685
  collections.unshift('[EXIT]').unshift('[NEW]')
666
- collection = Ollama::Utils::Chooser.choose(collections) || current_collection
686
+ collection = Utils::Chooser.choose(collections) || current_collection
667
687
  case collection
668
688
  when '[NEW]'
669
689
  $documents.collection = ask?(prompt: "Enter name of the new collection: ")
@@ -674,7 +694,29 @@ def choose_collection(current_collection)
674
694
  end
675
695
  ensure
676
696
  puts "Using collection #{bold{$documents.collection}}."
677
- collection_stats
697
+ info
698
+ end
699
+
700
+ def choose_document_policy
701
+ policies = %w[ importing embedding summarizing ].sort
702
+ current = if policies.index($document_policy)
703
+ $document_policy
704
+ elsif policies.index($config.document_policy)
705
+ $config.document_policy
706
+ else
707
+ policies.first
708
+ end
709
+ policies.unshift('[EXIT]')
710
+ policy = Utils::Chooser.choose(policies)
711
+ case policy
712
+ when nil, '[EXIT]'
713
+ puts "Exiting chooser."
714
+ policy = current
715
+ end
716
+ $document_policy = policy
717
+ ensure
718
+ puts "Using document policy #{bold{$document_policy}}."
719
+ info
678
720
  end
679
721
 
680
722
  def collection_stats
@@ -695,19 +737,19 @@ end
695
737
 
696
738
  def configure_cache
697
739
  if $opts[?M]
698
- Ollama::Documents::MemoryCache
740
+ Documents::MemoryCache
699
741
  else
700
742
  Object.const_get($config.cache)
701
743
  end
702
744
  rescue => e
703
745
  STDERR.puts "Caught #{e.class}: #{e} => Falling back to MemoryCache."
704
- Ollama::Documents::MemoryCache
746
+ Documents::MemoryCache
705
747
  end
706
748
 
707
749
  def show_system_prompt
708
750
  puts <<~EOT
709
751
  Configured system prompt is:
710
- #{Ollama::Utils::ANSIMarkdown.parse($system.to_s).gsub(/\n+\z/, '').full? || 'n/a'}
752
+ #{Utils::ANSIMarkdown.parse($system.to_s).gsub(/\n+\z/, '').full? || 'n/a'}
711
753
  EOT
712
754
  end
713
755
 
@@ -731,7 +773,7 @@ end
731
773
 
732
774
  def change_system_prompt(messages, default)
733
775
  prompts = $config.system_prompts.attribute_names.compact
734
- chosen = Ollama::Utils::Chooser.choose(prompts)
776
+ chosen = Utils::Chooser.choose(prompts)
735
777
  system = if chosen
736
778
  $config.system_prompts.send(chosen)
737
779
  else
@@ -741,7 +783,7 @@ def change_system_prompt(messages, default)
741
783
  end
742
784
 
743
785
  def change_voice
744
- chosen = Ollama::Utils::Chooser.choose($config.voice.list)
786
+ chosen = Utils::Chooser.choose($config.voice.list)
745
787
  $current_voice = chosen.full? || $config.voice.default
746
788
  end
747
789
 
@@ -756,6 +798,7 @@ def info
756
798
  $markdown.show
757
799
  $stream.show
758
800
  $location.show
801
+ puts "Document policy for references in user text: #{bold{$document_policy}}"
759
802
  if $voice.on?
760
803
  puts "Using voice #{bold{$current_voice}} to speak."
761
804
  end
@@ -799,6 +842,7 @@ def display_chat_help
799
842
  /regenerate the last answer message
800
843
  /collection( clear|change) change (default) collection or clear
801
844
  /info show information for current session
845
+ /document_policy pick a scan policy for document references
802
846
  /import source import the source's content
803
847
  /summarize [n] source summarize the source's content in n words
804
848
  /embedding toggle embedding paused or not
@@ -832,7 +876,7 @@ def usage
832
876
  end
833
877
 
834
878
  def version
835
- puts "%s %s" % [ File.basename($0), Ollama::VERSION ]
879
+ puts "%s %s" % [ File.basename($0), VERSION ]
836
880
  exit 0
837
881
  end
838
882
 
@@ -853,10 +897,11 @@ $opts[?V] and version
853
897
  base_url = $opts[?u] || $config.url
854
898
  $ollama = Client.new(base_url:, debug: $config.debug)
855
899
 
856
- $model = choose_model($opts[?m], $config.model.name)
857
- options = Options[$config.model.options]
858
- model_system = pull_model_unless_present($model, options)
859
- messages = []
900
+ $document_policy = $config.document_policy
901
+ $model = choose_model($opts[?m], $config.model.name)
902
+ options = Options[$config.model.options]
903
+ model_system = pull_model_unless_present($model, options)
904
+ messages = []
860
905
  $embedding_enabled.set($config.embedding.enabled && !$opts[?E])
861
906
 
862
907
  if $opts[?c]
@@ -866,7 +911,7 @@ else
866
911
  if $opts[?s] == ??
867
912
  change_system_prompt(messages, default)
868
913
  else
869
- system = Ollama::Utils::FileArgument.get_file_argument($opts[?s], default:)
914
+ system = Utils::FileArgument.get_file_argument($opts[?s], default:)
870
915
  system.present? and set_system_prompt(messages, system)
871
916
  end
872
917
  end
@@ -916,7 +961,7 @@ else
916
961
  end
917
962
 
918
963
  if redis_expiring_url = $config.redis.expiring.url?
919
- $cache = Ollama::Documents::RedisCache.new(
964
+ $cache = Documents::RedisCache.new(
920
965
  prefix: 'Expiring-',
921
966
  url: redis_expiring_url,
922
967
  ex: $config.redis.expiring.ex,
@@ -969,7 +1014,7 @@ loop do
969
1014
  puts "Cleared messages."
970
1015
  next
971
1016
  when %r(^/clobber$)
972
- if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
1017
+ if ask?(prompt: 'Are you sure to clear messages and collection? (y/n) ') =~ /\Ay/i
973
1018
  clear_messages(messages)
974
1019
  $documents.clear
975
1020
  puts "Cleared messages and collection #{bold{$documents.collection}}."
@@ -1010,7 +1055,7 @@ loop do
1010
1055
  when 'clear'
1011
1056
  loop do
1012
1057
  tags = $documents.tags.add('[EXIT]').add('[ALL]')
1013
- tag = Ollama::Utils::Chooser.choose(tags, prompt: 'Clear? %s')
1058
+ tag = Utils::Chooser.choose(tags, prompt: 'Clear? %s')
1014
1059
  case tag
1015
1060
  when nil, '[EXIT]'
1016
1061
  puts "Exiting chooser."
@@ -1034,9 +1079,12 @@ loop do
1034
1079
  choose_collection($documents.collection)
1035
1080
  end
1036
1081
  next
1037
- when %r(/info)
1082
+ when %r(^/info$)
1038
1083
  info
1039
1084
  next
1085
+ when %r(^/document_policy$)
1086
+ choose_document_policy
1087
+ next
1040
1088
  when %r(^/import\s+(.+))
1041
1089
  parse_content = false
1042
1090
  content = import($1) or next
@@ -7,7 +7,7 @@ class Ollama::Handlers::Markdown
7
7
  def initialize(output: $stdout)
8
8
  super
9
9
  @output.sync = true
10
- @content = ''
10
+ @content = ''
11
11
  end
12
12
 
13
13
  def call(response)
@@ -16,7 +16,6 @@ class Ollama::Handlers::Markdown
16
16
  markdown_content = Ollama::Utils::ANSIMarkdown.parse(@content)
17
17
  @output.print clear_screen, move_home, markdown_content
18
18
  end
19
- response.done and @output.puts
20
19
  self
21
20
  end
22
21
  end