ollama-ruby 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 3efc65090c37d60db61417738d3780ce7b86af805c86e1828421b86e83c11c13
4
- data.tar.gz: 3d18b6d92320c889bcef7b566a818867cbd55ff29a519738e3dd0e524d766aa6
3
+ metadata.gz: bb7858cf04c638fa3369fe7d96d96eed12e5553fd17abe9ed48c1eaf25113ffb
4
+ data.tar.gz: ccb56d4b85a6e74256feb52ac5642d50850f7b980f9060dbb9fdf933985be746
5
5
  SHA512:
6
- metadata.gz: 5086fa3669dd9a19d917a528dacf8073cf250400ef91210086b96a99e62468f3b232d55a1a04240537ab79f58cb9015c5f591b46b61e60424e068107133a944a
7
- data.tar.gz: 93837384222c5221e8b70db3034fe8ac4dbb86c379bd583a66c29b500376e6fc6bacba2419d673cf84f1b81a2b13b2cbd0ac8d32a2280e09e1c46acc48de5f8b
6
+ metadata.gz: f36aacde399c0be934425f12669fa6cc55cdfad938ea8157c5195db8c2367243cc6470dff24ec5bf83800079732bfa271450a4779246335c45c2bedbb0e3a6cf
7
+ data.tar.gz: 56e465f056934210a6cc45677d52518d8adb76e8702561248b6db4cd1e4b129cfd22eedbdcc0c935f69bf85eeb9bc324aef6876f39969942af4529978a277756
data/.envrc CHANGED
@@ -1 +1,2 @@
1
1
  export REDIS_URL=redis://localhost:9736
2
+ export REDIS_EXPRING_URL=redis://localhost:9736
data/README.md CHANGED
@@ -45,7 +45,6 @@ ollama_chat [OPTIONS]
45
45
  -D DOCUMENT load document and add to collection (multiple)
46
46
  -M use (empty) MemoryCache for this chat session
47
47
  -E disable embeddings for this chat session
48
- -v use voice output
49
48
  -h this help
50
49
  ```
51
50
 
@@ -154,13 +153,17 @@ subject - the young, blue-eyed cat.
154
153
  The following commands can be given inside the chat, if prefixed by a `/`:
155
154
 
156
155
  ```
156
+ /copy to copy last response to clipboard
157
157
  /paste to paste content
158
158
  /markdown toggle markdown output
159
+ /stream toggle stream output
160
+ /voice( change) toggle voice output or change the voice
159
161
  /list [n] list the last n / all conversation exchanges
160
162
  /clear clear the whole conversation
161
163
  /clobber clear the conversation and collection
162
164
  /pop [n] pop the last n exchanges, defaults to 1
163
165
  /model change the model
166
+ /system change system prompt (clears conversation)
164
167
  /regenerate the last answer message
165
168
  /collection clear [tag]|change clear or show stats of current collection
166
169
  /import source import the source's content
data/Rakefile CHANGED
@@ -41,6 +41,7 @@ GemHadar do
41
41
  dependency 'pdf-reader', '~> 2.0'
42
42
  dependency 'logger', '~> 1.0'
43
43
  dependency 'json', '~> 2.0'
44
+ dependency 'xdg', '~> 7.0'
44
45
  development_dependency 'all_images', '~> 0.4'
45
46
  development_dependency 'rspec', '~> 3.2'
46
47
  development_dependency 'webmock'
data/bin/ollama_chat CHANGED
@@ -17,6 +17,7 @@ require 'nokogiri'
17
17
  require 'rss'
18
18
  require 'pdf/reader'
19
19
  require 'csv'
20
+ require 'xdg'
20
21
 
21
22
  class OllamaChatConfig
22
23
  include ComplexConfig
@@ -31,7 +32,6 @@ class OllamaChatConfig
31
32
  options:
32
33
  num_ctx: 8192
33
34
  prompts:
34
- system: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
35
35
  embed: "This source was now embedded: %{source}"
36
36
  summarize: |
37
37
  Generate an abstract summary of the content in this document using
@@ -42,8 +42,14 @@ class OllamaChatConfig
42
42
  Answer the the query %{query} using these sources and summaries:
43
43
 
44
44
  %{results}
45
- voice: Samantha
45
+ system_prompts:
46
+ default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
47
+ voice:
48
+ enabled: false
49
+ default: Samantha
50
+ list: <%= `say -v ?`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
46
51
  markdown: true
52
+ stream: true
47
53
  embedding:
48
54
  enabled: true
49
55
  model:
@@ -57,14 +63,16 @@ class OllamaChatConfig
57
63
  splitter:
58
64
  name: RecursiveCharacter
59
65
  chunk_size: 1024
60
- cache: Ollama::Documents::Cache::RedisBackedMemoryCache
66
+ cache: Ollama::Documents::RedisBackedMemoryCache
61
67
  redis:
62
68
  documents:
63
69
  url: <%= ENV.fetch('REDIS_URL', 'null') %>
64
70
  expiring:
65
71
  url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
72
+ ex: 86400
66
73
  debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
67
74
  ssl_no_verify: []
75
+ copy: pbcopy
68
76
  EOT
69
77
 
70
78
  def initialize(filename = nil)
@@ -75,7 +83,7 @@ class OllamaChatConfig
75
83
  if @filename == default_path && !retried
76
84
  retried = true
77
85
  mkdir_p File.dirname(default_path)
78
- File.secure_write(default_path, DEFAULT_CONFIG)
86
+ File.secure_write(default_path.to_s, DEFAULT_CONFIG)
79
87
  retry
80
88
  else
81
89
  raise
@@ -87,17 +95,11 @@ class OllamaChatConfig
87
95
  attr_reader :config
88
96
 
89
97
  def default_path
90
- File.join(config_dir_path, 'config.yml')
98
+ config_dir_path + 'config.yml'
91
99
  end
92
100
 
93
101
  def config_dir_path
94
- File.join(
95
- ENV.fetch(
96
- 'XDG_CONFIG_HOME',
97
- File.join(ENV.fetch('HOME'), '.config')
98
- ),
99
- 'ollama_chat'
100
- )
102
+ XDG.new.config_home + 'ollama_chat'
101
103
  end
102
104
  end
103
105
 
@@ -158,12 +160,113 @@ class FollowChat
158
160
  end
159
161
  end
160
162
 
163
+ module CheckSwitch
164
+ extend Tins::Concern
165
+
166
+ included do
167
+ alias_method :on?, :value
168
+ end
169
+
170
+ def off?
171
+ !on?
172
+ end
173
+
174
+ def show
175
+ puts @msg[value]
176
+ end
177
+ end
178
+
179
+ class Switch
180
+ def initialize(name, msg:, config: $config)
181
+ @value = !!config.send("#{name}?")
182
+ @msg = msg
183
+ end
184
+
185
+ attr_reader :value
186
+
187
+ def set(value, show: false)
188
+ @value = !!value
189
+ show && self.show
190
+ end
191
+
192
+ def toggle(show: true)
193
+ @value = !@value
194
+ show && self.show
195
+ end
196
+
197
+ include CheckSwitch
198
+ end
199
+
200
+ class CombinedSwitch
201
+ def initialize(value:, msg:)
202
+ @value = value
203
+ @msg = msg
204
+ end
205
+
206
+ def value
207
+ @value.()
208
+ end
209
+
210
+ include CheckSwitch
211
+ end
212
+
213
+ def setup_switches
214
+ $markdown = Switch.new(
215
+ :markdown,
216
+ msg: {
217
+ true => "Using #{italic{'ANSI'}} markdown to output content.",
218
+ false => "Using plaintext for outputting content.",
219
+ }
220
+ )
221
+
222
+ $stream = Switch.new(
223
+ :stream,
224
+ msg: {
225
+ true => "Streaming enabled.",
226
+ false => "Streaming disabled.",
227
+ }
228
+ )
229
+
230
+ $voice = Switch.new(
231
+ :stream,
232
+ msg: {
233
+ true => "Voice output enabled.",
234
+ false => "Voice output disabled.",
235
+ },
236
+ config: $config.voice
237
+ )
238
+
239
+ $embedding_enabled = Switch.new(
240
+ :embedding_enabled,
241
+ msg: {
242
+ true => "Embedding enabled.",
243
+ false => "Embedding disabled.",
244
+ }
245
+ )
246
+
247
+ $embedding_paused = Switch.new(
248
+ :embedding_paused,
249
+ msg: {
250
+ true => "Embedding paused.",
251
+ false => "Embedding resumed.",
252
+ }
253
+ )
254
+
255
+ $embedding = CombinedSwitch.new(
256
+ value: -> { $embedding_enabled.on? && $embedding_paused.off? },
257
+ msg: {
258
+ true => "Embedding is currently performed.",
259
+ false => "Embedding is currently not performed.",
260
+ }
261
+ )
262
+ end
263
+
161
264
  def search_web(query, n = nil)
162
265
  n = n.to_i
163
266
  n < 1 and n = 1
164
267
  query = URI.encode_uri_component(query)
165
268
  url = "https://www.duckduckgo.com/html/?q=#{query}"
166
- Ollama::Utils::Fetcher.new(debug: $config.debug).get(url) do |tmp|
269
+ Ollama::Utils::Fetcher.get(url, debug: $config.debug) do |tmp|
167
270
  result = []
168
271
  doc = Nokogiri::HTML(tmp)
169
272
  doc.css('.results_links').each do |link|
@@ -196,7 +299,7 @@ def pull_model_unless_present(model, options, retried = false)
196
299
  end
197
300
  }
198
301
  rescue Errors::NotFoundError
199
- puts "Model #{bold{model}} not found, attempting to pull it now…"
302
+ puts "Model #{bold{model}} not found locally, attempting to pull it from remote now…"
200
303
  ollama.pull(name: model)
201
304
  if retried
202
305
  exit 1
@@ -205,7 +308,7 @@ rescue Errors::NotFoundError
205
308
  retry
206
309
  end
207
310
  rescue Errors::Error => e
208
- warn "Caught #{e.class}: #{e} => Exiting."
311
+ warn "Caught #{e.class} while pulling model: #{e} => Exiting."
209
312
  exit 1
210
313
  end
211
314
 
@@ -242,7 +345,7 @@ def list_conversation(messages, last = nil)
242
345
  when 'system' then 213
243
346
  else 210
244
347
  end
245
- content = m.content.full? { $markdown ? Utils::ANSIMarkdown.parse(_1) : _1 }
348
+ content = m.content.full? { $markdown.on? ? Utils::ANSIMarkdown.parse(_1) : _1 }
246
349
  message_text = message_type(m.images) + " "
247
350
  message_text += bold { color(role_color) { m.role } }
248
351
  message_text += ":\n#{content}"
@@ -319,8 +422,6 @@ def parse_source(source_io)
319
422
  result << "\n\n"
320
423
  end
321
424
  result
322
- when %r(\Atext/)
323
- source_io.read
324
425
  when 'application/rss+xml'
325
426
  parse_rss(source_io)
326
427
  when 'application/atom+xml'
@@ -330,6 +431,8 @@ def parse_source(source_io)
330
431
  when 'application/pdf'
331
432
  reader = PDF::Reader.new(source_io)
332
433
  reader.pages.inject(+'') { |result, page| result << page.text }
434
+ when %r(\Atext/), nil
435
+ source_io.read
333
436
  else
334
437
  STDERR.puts "Cannot embed #{source_io&.content_type} document."
335
438
  return
@@ -337,7 +440,7 @@ def parse_source(source_io)
337
440
  end
338
441
 
339
442
  def embed_source(source_io, source)
340
- embedding_enabled? or return parse_source(source_io)
443
+ $embedding.on? or return parse_source(source_io)
341
444
  puts "Embedding #{italic { source_io&.content_type }} document #{source.to_s.inspect}."
342
445
  text = parse_source(source_io) or return
343
446
  text.downcase!
@@ -404,7 +507,12 @@ def fetch_source(source, &block)
404
507
  block.(tmp)
405
508
  end
406
509
  when %r(\Ahttps?://\S+)
407
- Utils::Fetcher.get(source, debug: $config.debug, http_options: http_options(source)) do |tmp|
510
+ Utils::Fetcher.get(
511
+ source,
512
+ cache: $cache,
513
+ debug: $config.debug,
514
+ http_options: http_options(source)
515
+ ) do |tmp|
408
516
  block.(tmp)
409
517
  end
410
518
  when %r(\Afile://(?:(?:[.-]|[[:alnum:]])*)(/\S*)|([~.]?/\S*))
@@ -417,7 +525,7 @@ def fetch_source(source, &block)
417
525
  raise "invalid source"
418
526
  end
419
527
  rescue => e
420
- STDERR.puts "Cannot add source #{source.to_s.inspect}: #{e}\n#{e.backtrace * ?\n}"
528
+ STDERR.puts "Cannot fetch source #{source.to_s.inspect}: #{e}\n#{e.backtrace * ?\n}"
421
529
  end
422
530
 
423
531
  def import(source)
@@ -445,7 +553,7 @@ def summarize(source, words: nil)
445
553
  end
446
554
 
447
555
  def embed(source)
448
- if embedding_enabled?
556
+ if $embedding.on?
449
557
  puts "Now embedding #{source.to_s.inspect}."
450
558
  fetch_source(source) do |source_io|
451
559
  content = parse_source(source_io)
@@ -537,62 +645,84 @@ rescue => e
537
645
  Ollama::Documents::MemoryCache
538
646
  end
539
647
 
540
- def toggle_markdown
541
- $markdown = !$markdown
542
- show_markdown
543
- end
544
-
545
- def show_markdown
546
- if $markdown
547
- puts "Using ANSI markdown to output content."
548
- else
549
- puts "Using plaintext for outputting content."
550
- end
551
- $markdown
552
- end
553
-
554
- def set_embedding(embedding)
555
- $embedding_enabled = embedding
556
- show_embedding
648
+ def show_system_prompt
649
+ puts <<~EOT
650
+ Configured system prompt is:
651
+ #{Ollama::Utils::ANSIMarkdown.parse($system.to_s).gsub(/\n+\z/, '').full? || 'n/a'}
652
+ EOT
557
653
  end
558
654
 
559
- def show_embedding
560
- puts "Embedding is #{embedding_enabled? ? "on" : "off"}."
561
- $embedding_enabled
655
+ def set_system_prompt(messages, system)
656
+ $system = system
657
+ messages.clear
658
+ messages << Message.new(role: 'system', content: system)
562
659
  end
563
660
 
564
- def embedding_enabled?
565
- $embedding_enabled && !$embedding_paused
661
+ def change_system_prompt(messages)
662
+ prompts = $config.system_prompts.attribute_names.compact
663
+ chosen = Ollama::Utils::Chooser.choose(prompts)
664
+ system = if chosen
665
+ $config.system_prompts.send(chosen)
666
+ else
667
+ default
668
+ end
669
+ set_system_prompt(messages, system)
566
670
  end
567
671
 
568
- def toggle_embedding_paused
569
- $embedding_paused = !$embedding_paused
570
- show_embedding
672
+ def change_voice
673
+ chosen = Ollama::Utils::Chooser.choose($config.voice.list)
674
+ $current_voice = chosen.full? || $config.voice.default
571
675
  end
572
676
 
573
677
  def info
574
678
  puts "Current model is #{bold{$model}}."
575
679
  collection_stats
576
- if show_embedding
680
+ $embedding.show
681
+ if $embedding.on?
577
682
  puts "Text splitter is #{bold{$config.embedding.splitter.name}}."
578
683
  end
579
- puts "Documents database cache is #{$documents.nil? ? 'n/a' : $documents.cache.class}"
580
- show_markdown
684
+ puts "Documents database cache is #{$documents.nil? ? 'n/a' : bold{$documents.cache.class}}"
685
+ $markdown.show
686
+ $stream.show
687
+ if $voice.on?
688
+ puts "Using voice #{bold{$current_voice}} to speak."
689
+ end
690
+ show_system_prompt
581
691
  end
582
692
 
583
693
  def clear_messages(messages)
584
694
  messages.delete_if { _1.role != 'system' }
585
695
  end
586
696
 
697
+ def copy_to_clipboard(messages)
698
+ if message = messages.last and message.role == 'assistant'
699
+ copy = `which #{$config.copy}`.chomp
700
+ if copy.present?
701
+ IO.popen(copy, 'w') do |clipboard|
702
+ clipboard.write(message.content)
703
+ end
704
+ STDOUT.puts "The last response has been copied to the system clipboard."
705
+ else
706
+ STDERR.puts "#{$config.copy.inspect} command not found in system's path!"
707
+ end
708
+ else
709
+ STDERR.puts "No response available to copy to the system clipboard."
710
+ end
711
+ end
712
+
587
713
  def display_chat_help
588
714
  puts <<~EOT
715
+ /copy to copy last response to clipboard
589
716
  /paste to paste content
590
717
  /markdown toggle markdown output
718
+ /stream toggle stream output
719
+ /voice( change) toggle voice output or change the voice
591
720
  /list [n] list the last n / all conversation exchanges
592
721
  /clear clear the whole conversation
593
722
  /clobber clear the conversation and collection
594
723
  /pop [n] pop the last n exchanges, defaults to 1
595
724
  /model change the model
725
+ /system change system prompt (clears conversation)
596
726
  /regenerate the last answer message
597
727
  /collection clear [tag]|change clear or show stats of current collection
598
728
  /import source import the source's content
@@ -620,25 +750,31 @@ def usage
620
750
  -D DOCUMENT load document and add to embeddings collection (multiple)
621
751
  -M use (empty) MemoryCache for this chat session
622
752
  -E disable embeddings for this chat session
623
- -v use voice output
753
+ -V display the current version number and quit
624
754
  -h this help
625
755
 
626
756
  EOT
627
757
  exit 0
628
758
  end
629
759
 
760
+ def version
761
+ puts "%s %s" % [ File.basename($0), Ollama::VERSION ]
762
+ exit 0
763
+ end
764
+
630
765
  def ollama
631
766
  $ollama
632
767
  end
633
768
 
634
- $opts = go 'f:u:m:s:c:C:D:MEvh'
769
+ $opts = go 'f:u:m:s:c:C:D:MEVh'
635
770
 
636
771
  config = OllamaChatConfig.new($opts[?f])
637
772
  $config = config.config
638
773
 
639
- $opts[?h] and usage
774
+ setup_switches
640
775
 
641
- puts "Configuration read from #{config.filename.inspect} is:", $config
776
+ $opts[?h] and usage
777
+ $opts[?V] and version
642
778
 
643
779
  base_url = $opts[?u] || $config.url
644
780
  $ollama = Client.new(base_url:, debug: $config.debug)
@@ -647,14 +783,21 @@ $model = choose_model($opts[?m], $config.model.name)
647
783
  options = Options[$config.model.options]
648
784
  model_system = pull_model_unless_present($model, options)
649
785
  messages = []
650
- set_embedding($config.embedding.enabled && !$opts[?E])
786
+ $embedding_enabled.set($config.embedding.enabled && !$opts[?E])
651
787
 
652
- if voice = ($config.voice if $opts[?v])
653
- puts "Using voice #{bold{voice}} to speak."
788
+ if $opts[?c]
789
+ messages.concat load_conversation($opts[?c])
790
+ else
791
+ default = $config.system_prompts.default? || model_system
792
+ if $opts[?s] == ??
793
+ change_system_prompt(messages)
794
+ else
795
+ system = Ollama::Utils::FileArgument.get_file_argument($opts[?s], default:)
796
+ system.present? and set_system_prompt(messages, system)
797
+ end
654
798
  end
655
- $markdown = $config.markdown
656
799
 
657
- if embedding_enabled?
800
+ if $embedding.on?
658
801
  $embedding_model = $config.embedding.model.name
659
802
  embedding_model_options = Options[$config.embedding.model.options]
660
803
  pull_model_unless_present($embedding_model, embedding_model_options)
@@ -666,6 +809,7 @@ if embedding_enabled?
666
809
  collection:,
667
810
  cache: configure_cache,
668
811
  redis_url: $config.redis.documents.url?,
812
+ debug: ENV['DEBUG'].to_i == 1,
669
813
  )
670
814
 
671
815
  document_list = $opts[?D].to_a
@@ -691,24 +835,22 @@ if embedding_enabled?
691
835
  end
692
836
  end
693
837
  end
694
- collection_stats
695
838
  else
696
839
  $documents = Tins::NULL
697
840
  end
698
841
 
699
- if $opts[?c]
700
- messages.concat load_conversation($opts[?c])
701
- else
702
- if system = Ollama::Utils::FileArgument.
703
- get_file_argument($opts[?s], default: $config.prompts.system? || model_system)
704
- messages << Message.new(role: 'system', content: system)
705
- puts <<~EOT
706
- Configured system prompt is:
707
- #{italic{Ollama::Utils::Width.wrap(system, percentage: 90)}}
708
- EOT
709
- end
842
+ if redis_expiring_url = $config.redis.expiring.url?
843
+ $cache = Ollama::Documents::RedisCache.new(
844
+ prefix: 'Expiring-',
845
+ url: redis_expiring_url,
846
+ ex: $config.redis.expiring.ex,
847
+ )
710
848
  end
711
849
 
850
+ $current_voice = $config.voice.default
851
+
852
+ puts "Configuration read from #{config.filename.inspect} is:", $config
853
+ info
712
854
  puts "\nType /help to display the chat help."
713
855
 
714
856
  images = []
@@ -721,8 +863,21 @@ loop do
721
863
  when %r(^/paste$)
722
864
  puts bold { "Paste your content and then press C-d!" }
723
865
  content = STDIN.read
866
+ when %r(^/copy$)
867
+ copy_to_clipboard(messages)
868
+ next
724
869
  when %r(^/markdown$)
725
- $markdown = toggle_markdown
870
+ $markdown.toggle
871
+ next
872
+ when %r(^/stream$)
873
+ $stream.toggle
874
+ next
875
+ when %r(^/voice(?:\s+(change))?$)
876
+ if $1 == 'change'
877
+ change_voice
878
+ else
879
+ $voice.toggle
880
+ end
726
881
  next
727
882
  when %r(^/list(?:\s+(\d*))?$)
728
883
  last = if $1
@@ -755,6 +910,10 @@ loop do
755
910
  choose_collection(collection)
756
911
  end
757
912
  next
913
+ when %r(^/system$)
914
+ change_system_prompt(messages)
915
+ info
916
+ next
758
917
  when %r(/info)
759
918
  info
760
919
  next
@@ -789,7 +948,8 @@ loop do
789
948
  parse_content = false
790
949
  content = summarize($2, words: $1) or next
791
950
  when %r(^/embedding$)
792
- toggle_embedding_paused
951
+ $embedding_paused.toggle(show: false)
952
+ $embedding.show
793
953
  next
794
954
  when %r(^/embed\s+(.+))
795
955
  parse_content = false
@@ -828,12 +988,12 @@ loop do
828
988
  end
829
989
 
830
990
  content, tags = if parse_content
831
- parse_content(content, images.clear)
991
+ parse_content(content, images)
832
992
  else
833
993
  [ content, Utils::Tags.new ]
834
994
  end
835
995
 
836
- if embedding_enabled? && content
996
+ if $embedding.on? && content
837
997
  records = $documents.find_where(
838
998
  content.downcase,
839
999
  tags:,
@@ -847,11 +1007,12 @@ loop do
847
1007
  end
848
1008
  end
849
1009
 
850
- messages << Message.new(role: 'user', content:, images:)
851
- handler = FollowChat.new(messages:, markdown: $markdown, voice:)
852
- ollama.chat(model: $model, messages:, options:, stream: true, &handler)
1010
+ messages << Message.new(role: 'user', content:, images: images.dup)
1011
+ images.clear
1012
+ handler = FollowChat.new(messages:, markdown: $markdown.on?, voice: ($current_voice if $voice.on?))
1013
+ ollama.chat(model: $model, messages:, options:, stream: $stream.on?, &handler)
853
1014
 
854
- if embedding_enabled? && !records.empty?
1015
+ if $embedding.on? && !records.empty?
855
1016
  puts "", records.map { |record|
856
1017
  link = if record.source =~ %r(\Ahttps?://)
857
1018
  record.source
data/docker-compose.yml CHANGED
@@ -2,10 +2,9 @@ services:
2
2
  redis:
3
3
  image: redis:7.2.5-alpine
4
4
  restart: unless-stopped
5
- ports:
6
- - "9736:6379"
5
+ ports: [ "127.0.0.1:9736:6379" ]
7
6
  volumes:
8
- - "redis-data:/data:delegated"
9
- - "./config/redis.conf:/etc/redis.conf"
7
+ - "redis-data:/data:delegated"
8
+ - "./config/redis.conf:/etc/redis.conf"
10
9
  volumes:
11
10
  redis-data:
data/lib/ollama/client.rb CHANGED
@@ -19,8 +19,8 @@ class Ollama::Client
19
19
  'missing :base_url parameter or OLLAMA_URL environment variable'
20
20
  end
21
21
  base_url.is_a? URI or base_url = URI.parse(base_url)
22
- base_url.is_a?(URI::HTTP) || base_url.is_a?(URI::HTTPS) or
23
- raise ArgumentError, "require #{base_url.inspect} to be http/https-URI"
22
+ base_url.is_a?(URI::HTTP) || base_url.is_a?(URI::HTTPS) or
23
+ raise ArgumentError, "require #{base_url.inspect} to be http/https-URI"
24
24
  @ssl_verify_peer = base_url.query.to_s.split(?&).inject({}) { |h, l|
25
25
  h.merge Hash[*l.split(?=)]
26
26
  }['ssl_verify_peer'] != 'false'
@@ -35,12 +35,13 @@ class Ollama::Documents
35
35
  alias inspect to_s
36
36
  end
37
37
 
38
- def initialize(ollama:, model:, model_options: nil, collection: nil, cache: MemoryCache, redis_url: nil)
38
+ def initialize(ollama:, model:, model_options: nil, collection: nil, cache: MemoryCache, redis_url: nil, debug: false)
39
39
  collection ||= default_collection
40
40
  @ollama, @model, @model_options, @collection =
41
41
  ollama, model, model_options, collection.to_sym
42
42
  @redis_url = redis_url
43
- @cache = connect_cache(cache)
43
+ @cache = connect_cache(cache)
44
+ @debug = debug
44
45
  end
45
46
 
46
47
  def default_collection
@@ -64,6 +65,9 @@ class Ollama::Documents
64
65
  }
65
66
  inputs.reject! { |i| exist?(i) }
66
67
  inputs.empty? and return self
68
+ if @debug
69
+ puts Ollama::Utils::ColorizeTexts.new(inputs)
70
+ end
67
71
  batches = inputs.each_slice(batch_size).
68
72
  with_infobar(
69
73
  label: "Add #{truncate(tags.to_s, percentage: 25)}",
@@ -0,0 +1,38 @@
1
+ require 'digest/md5'
2
+
3
+ class Ollama::Utils::CacheFetcher
4
+ def initialize(cache)
5
+ @cache = cache
6
+ end
7
+
8
+ def get(url, &block)
9
+ block or raise ArgumentError, 'require block argument'
10
+ body = @cache[key(:body, url)]
11
+ content_type = @cache[key(:content_type, url)]
12
+ content_type = MIME::Types[content_type].first
13
+ if body && content_type
14
+ io = StringIO.new(body)
15
+ io.rewind
16
+ io.extend(Ollama::Utils::Fetcher::ContentType)
17
+ io.content_type = content_type
18
+ block.(io)
19
+ end
20
+ end
21
+
22
+ def put(url, io)
23
+ io.rewind
24
+ body = io.read
25
+ body.empty? and return
26
+ content_type = io.content_type
27
+ content_type.nil? and return
28
+ @cache[key(:body, url)] = body
29
+ @cache[key(:content_type, url)] = content_type.to_s
30
+ self
31
+ end
32
+
33
+ private
34
+
35
+ def key(type, url)
36
+ [ type, Digest::MD5.hexdigest(url) ] * ?-
37
+ end
38
+ end
@@ -3,6 +3,7 @@ require 'tins/unit'
3
3
  require 'infobar'
4
4
  require 'mime-types'
5
5
  require 'stringio'
6
+ require 'ollama/utils/cache_fetcher'
6
7
 
7
8
  class Ollama::Utils::Fetcher
8
9
  module ContentType
@@ -17,6 +18,54 @@ class Ollama::Utils::Fetcher
17
18
 
18
19
  class RetryWithoutStreaming < StandardError; end
19
20
 
21
+ def self.get(url, **options, &block)
22
+ cache = options.delete(:cache) and
23
+ cache = Ollama::Utils::CacheFetcher.new(cache)
24
+ if result = cache&.get(url, &block)
25
+ infobar.puts "Getting #{url.inspect} from cache."
26
+ return result
27
+ else
28
+ new(**options).send(:get, url) do |tmp|
29
+ result = block.(tmp)
30
+ if cache && !tmp.is_a?(StringIO)
31
+ tmp.rewind
32
+ cache.put(url, tmp)
33
+ end
34
+ result
35
+ end
36
+ end
37
+ end
38
+
39
+ def self.read(filename, &block)
40
+ if File.exist?(filename)
41
+ File.open(filename) do |file|
42
+ file.extend(Ollama::Utils::Fetcher::ContentType)
43
+ file.content_type = MIME::Types.type_for(filename).first
44
+ block.(file)
45
+ end
46
+ end
47
+ end
48
+
49
+ def self.execute(command, &block)
50
+ Tempfile.open do |tmp|
51
+ IO.popen(command) do |command|
52
+ until command.eof?
53
+ tmp.write command.read(4096)
54
+ end
55
+ tmp.rewind
56
+ tmp.extend(Ollama::Utils::Fetcher::ContentType)
57
+ tmp.content_type = MIME::Types['text/plain'].first
58
+ block.(tmp)
59
+ end
60
+ end
61
+ rescue => e
62
+ STDERR.puts "Cannot execute #{command.inspect} (#{e})"
63
+ if @debug && !e.is_a?(RuntimeError)
64
+ STDERR.puts "#{e.backtrace * ?\n}"
65
+ end
66
+ yield ContentType.failed
67
+ end
68
+
20
69
  def initialize(debug: false, http_options: {})
21
70
  @debug = debug
22
71
  @started = false
@@ -24,9 +73,7 @@ class Ollama::Utils::Fetcher
24
73
  @http_options = http_options
25
74
  end
26
75
 
27
- def self.get(url, **options, &block)
28
- new(**options).get(url, &block)
29
- end
76
+ private
30
77
 
31
78
  def excon(url, **options)
32
79
  Excon.new(url, options.merge(@http_options))
@@ -105,34 +152,4 @@ class Ollama::Utils::Fetcher
105
152
  }
106
153
  '%l ' + progress + ' in %te, ETA %e @%E'
107
154
  end
108
-
109
- def self.read(filename, &block)
110
- if File.exist?(filename)
111
- File.open(filename) do |file|
112
- file.extend(Ollama::Utils::Fetcher::ContentType)
113
- file.content_type = MIME::Types.type_for(filename).first
114
- block.(file)
115
- end
116
- end
117
- end
118
-
119
- def self.execute(command, &block)
120
- Tempfile.open do |tmp|
121
- IO.popen(command) do |command|
122
- until command.eof?
123
- tmp.write command.read(4096)
124
- end
125
- tmp.rewind
126
- tmp.extend(Ollama::Utils::Fetcher::ContentType)
127
- tmp.content_type = MIME::Types['text/plain'].first
128
- block.(tmp)
129
- end
130
- end
131
- rescue => e
132
- STDERR.puts "Cannot execute #{command.inspect} (#{e})"
133
- if @debug && !e.is_a?(RuntimeError)
134
- STDERR.puts "#{e.backtrace * ?\n}"
135
- end
136
- yield ContentType.failed
137
- end
138
155
  end
@@ -25,8 +25,10 @@ module Ollama::Utils::FileArgument
25
25
  File.exist?(path_or_content)
26
26
  then
27
27
  File.read(path_or_content)
28
+ elsif path_or_content.present?
29
+ path_or_content
28
30
  else
29
- path_or_content.full? || default
31
+ default
30
32
  end
31
33
  end
32
34
  end
@@ -1,6 +1,6 @@
1
1
  module Ollama
2
2
  # Ollama version
3
- VERSION = '0.5.0'
3
+ VERSION = '0.6.0'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama-ruby.gemspec CHANGED
@@ -1,26 +1,26 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama-ruby 0.5.0 ruby lib
2
+ # stub: ollama-ruby 0.6.0 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama-ruby".freeze
6
- s.version = "0.5.0".freeze
6
+ s.version = "0.6.0".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
10
10
  s.authors = ["Florian Frank".freeze]
11
- s.date = "2024-09-26"
11
+ s.date = "2024-09-30"
12
12
  s.description = "Library that allows interacting with the Ollama API".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_console".freeze, "ollama_chat".freeze, "ollama_update".freeze, "ollama_cli".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/documents.rb".freeze, "lib/ollama/documents/cache/common.rb".freeze, "lib/ollama/documents/cache/memory_cache.rb".freeze, "lib/ollama/documents/cache/redis_backed_memory_cache.rb".freeze, "lib/ollama/documents/cache/redis_cache.rb".freeze, "lib/ollama/documents/splitters/character.rb".freeze, "lib/ollama/documents/splitters/semantic.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/chooser.rb".freeze, "lib/ollama/utils/colorize_texts.rb".freeze, "lib/ollama/utils/fetcher.rb".freeze, "lib/ollama/utils/file_argument.rb".freeze, "lib/ollama/utils/math.rb".freeze, "lib/ollama/utils/tags.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze]
16
- s.files = [".envrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "LICENSE".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_cli".freeze, "bin/ollama_console".freeze, "bin/ollama_update".freeze, "config/redis.conf".freeze, "docker-compose.yml".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/documents.rb".freeze, "lib/ollama/documents/cache/common.rb".freeze, "lib/ollama/documents/cache/memory_cache.rb".freeze, "lib/ollama/documents/cache/redis_backed_memory_cache.rb".freeze, "lib/ollama/documents/cache/redis_cache.rb".freeze, "lib/ollama/documents/splitters/character.rb".freeze, "lib/ollama/documents/splitters/semantic.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/chooser.rb".freeze, "lib/ollama/utils/colorize_texts.rb".freeze, "lib/ollama/utils/fetcher.rb".freeze, "lib/ollama/utils/file_argument.rb".freeze, "lib/ollama/utils/math.rb".freeze, "lib/ollama/utils/tags.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze, "ollama-ruby.gemspec".freeze, "spec/assets/embeddings.json".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/documents/memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_backed_memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_cache_spec.rb".freeze, "spec/ollama/documents/splitters/character_spec.rb".freeze, "spec/ollama/documents/splitters/semantic_spec.rb".freeze, "spec/ollama/documents_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/ollama/utils/fetcher_spec.rb".freeze, "spec/ollama/utils/file_argument_spec.rb".freeze, "spec/ollama/utils/tags_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/documents.rb".freeze, "lib/ollama/documents/cache/common.rb".freeze, "lib/ollama/documents/cache/memory_cache.rb".freeze, "lib/ollama/documents/cache/redis_backed_memory_cache.rb".freeze, "lib/ollama/documents/cache/redis_cache.rb".freeze, "lib/ollama/documents/splitters/character.rb".freeze, "lib/ollama/documents/splitters/semantic.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/cache_fetcher.rb".freeze, "lib/ollama/utils/chooser.rb".freeze, "lib/ollama/utils/colorize_texts.rb".freeze, "lib/ollama/utils/fetcher.rb".freeze, "lib/ollama/utils/file_argument.rb".freeze, "lib/ollama/utils/math.rb".freeze, "lib/ollama/utils/tags.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze]
16
+ s.files = [".envrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "LICENSE".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_cli".freeze, "bin/ollama_console".freeze, "bin/ollama_update".freeze, "config/redis.conf".freeze, "docker-compose.yml".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/documents.rb".freeze, "lib/ollama/documents/cache/common.rb".freeze, "lib/ollama/documents/cache/memory_cache.rb".freeze, "lib/ollama/documents/cache/redis_backed_memory_cache.rb".freeze, "lib/ollama/documents/cache/redis_cache.rb".freeze, "lib/ollama/documents/splitters/character.rb".freeze, "lib/ollama/documents/splitters/semantic.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/cache_fetcher.rb".freeze, "lib/ollama/utils/chooser.rb".freeze, "lib/ollama/utils/colorize_texts.rb".freeze, "lib/ollama/utils/fetcher.rb".freeze, "lib/ollama/utils/file_argument.rb".freeze, "lib/ollama/utils/math.rb".freeze, "lib/ollama/utils/tags.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze, "ollama-ruby.gemspec".freeze, "spec/assets/embeddings.json".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/documents/memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_backed_memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_cache_spec.rb".freeze, "spec/ollama/documents/splitters/character_spec.rb".freeze, "spec/ollama/documents/splitters/semantic_spec.rb".freeze, "spec/ollama/documents_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/ollama/utils/cache_fetcher_spec.rb".freeze, "spec/ollama/utils/fetcher_spec.rb".freeze, "spec/ollama/utils/file_argument_spec.rb".freeze, "spec/ollama/utils/tags_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama-ruby".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "Ollama-ruby - Interacting with the Ollama API".freeze, "--main".freeze, "README.md".freeze]
20
20
  s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
21
21
  s.rubygems_version = "3.5.18".freeze
22
22
  s.summary = "Interacting with the Ollama API".freeze
23
- s.test_files = ["spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/documents/memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_backed_memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_cache_spec.rb".freeze, "spec/ollama/documents/splitters/character_spec.rb".freeze, "spec/ollama/documents/splitters/semantic_spec.rb".freeze, "spec/ollama/documents_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/ollama/utils/fetcher_spec.rb".freeze, "spec/ollama/utils/file_argument_spec.rb".freeze, "spec/ollama/utils/tags_spec.rb".freeze, "spec/spec_helper.rb".freeze]
23
+ s.test_files = ["spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/documents/memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_backed_memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_cache_spec.rb".freeze, "spec/ollama/documents/splitters/character_spec.rb".freeze, "spec/ollama/documents/splitters/semantic_spec.rb".freeze, "spec/ollama/documents_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/ollama/utils/cache_fetcher_spec.rb".freeze, "spec/ollama/utils/fetcher_spec.rb".freeze, "spec/ollama/utils/file_argument_spec.rb".freeze, "spec/ollama/utils/tags_spec.rb".freeze, "spec/spec_helper.rb".freeze]
24
24
 
25
25
  s.specification_version = 4
26
26
 
@@ -47,4 +47,5 @@ Gem::Specification.new do |s|
47
47
  s.add_runtime_dependency(%q<pdf-reader>.freeze, ["~> 2.0".freeze])
48
48
  s.add_runtime_dependency(%q<logger>.freeze, ["~> 1.0".freeze])
49
49
  s.add_runtime_dependency(%q<json>.freeze, ["~> 2.0".freeze])
50
+ s.add_runtime_dependency(%q<xdg>.freeze, ["~> 7.0".freeze])
50
51
  end
@@ -0,0 +1,42 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Utils::CacheFetcher do
4
+ let :url do
5
+ 'https://www.example.com/hello'
6
+ end
7
+
8
+ let :cache do
9
+ double('RedisCache')
10
+ end
11
+
12
+ let :fetcher do
13
+ described_class.new(cache).expose
14
+ end
15
+
16
+ it 'can be instantiated' do
17
+ expect(fetcher).to be_a described_class
18
+ end
19
+
20
+ it 'has #get' do
21
+ expect(cache).to receive(:[]).with('body-69ce405ab83f42dffa9fd22bbd47783f').and_return 'world'
22
+ expect(cache).to receive(:[]).with('content_type-69ce405ab83f42dffa9fd22bbd47783f').and_return 'text/plain'
23
+ yielded_io = nil
24
+ block = -> io { yielded_io = io }
25
+ fetcher.get(url, &block)
26
+ expect(yielded_io).to be_a StringIO
27
+ expect(yielded_io.read).to eq 'world'
28
+ end
29
+
30
+ it '#get needs block' do
31
+ expect { fetcher.get(url) }.to raise_error(ArgumentError)
32
+ end
33
+
34
+ it 'has #put' do
35
+ io = StringIO.new('world')
36
+ io.extend(Ollama::Utils::Fetcher::ContentType)
37
+ io.content_type = MIME::Types['text/plain'].first
38
+ expect(cache).to receive(:[]=).with('body-69ce405ab83f42dffa9fd22bbd47783f', 'world')
39
+ expect(cache).to receive(:[]=).with('content_type-69ce405ab83f42dffa9fd22bbd47783f', 'text/plain')
40
+ fetcher.put(url, io)
41
+ end
42
+ end
@@ -6,7 +6,7 @@ RSpec.describe Ollama::Utils::Fetcher do
6
6
  end
7
7
 
8
8
  let :fetcher do
9
- described_class.new
9
+ described_class.new.expose
10
10
  end
11
11
 
12
12
  it 'can be instantiated' do
@@ -36,7 +36,7 @@ RSpec.describe Ollama::Utils::Fetcher do
36
36
  it 'can #get without ssl peer verification' do
37
37
  fetcher = described_class.new(
38
38
  http_options: { ssl_verify_peer: false }
39
- )
39
+ ).expose
40
40
  stub_request(:get, 'https://www.example.com/hello').
41
41
  with(headers: fetcher.headers).
42
42
  to_return(
data/spec/spec_helper.rb CHANGED
@@ -5,6 +5,7 @@ if ENV['START_SIMPLECOV'].to_i == 1
5
5
  end
6
6
  end
7
7
  require 'rspec'
8
+ require 'tins/xt/expose'
8
9
  begin
9
10
  require 'debug'
10
11
  rescue LoadError
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.0
4
+ version: 0.6.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-09-26 00:00:00.000000000 Z
11
+ date: 2024-09-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gem_hadar
@@ -332,6 +332,20 @@ dependencies:
332
332
  - - "~>"
333
333
  - !ruby/object:Gem::Version
334
334
  version: '2.0'
335
+ - !ruby/object:Gem::Dependency
336
+ name: xdg
337
+ requirement: !ruby/object:Gem::Requirement
338
+ requirements:
339
+ - - "~>"
340
+ - !ruby/object:Gem::Version
341
+ version: '7.0'
342
+ type: :runtime
343
+ prerelease: false
344
+ version_requirements: !ruby/object:Gem::Requirement
345
+ requirements:
346
+ - - "~>"
347
+ - !ruby/object:Gem::Version
348
+ version: '7.0'
335
349
  description: Library that allows interacting with the Ollama API
336
350
  email: flori@ping.de
337
351
  executables:
@@ -387,6 +401,7 @@ extra_rdoc_files:
387
401
  - lib/ollama/tool/function/parameters.rb
388
402
  - lib/ollama/tool/function/parameters/property.rb
389
403
  - lib/ollama/utils/ansi_markdown.rb
404
+ - lib/ollama/utils/cache_fetcher.rb
390
405
  - lib/ollama/utils/chooser.rb
391
406
  - lib/ollama/utils/colorize_texts.rb
392
407
  - lib/ollama/utils/fetcher.rb
@@ -453,6 +468,7 @@ files:
453
468
  - lib/ollama/tool/function/parameters.rb
454
469
  - lib/ollama/tool/function/parameters/property.rb
455
470
  - lib/ollama/utils/ansi_markdown.rb
471
+ - lib/ollama/utils/cache_fetcher.rb
456
472
  - lib/ollama/utils/chooser.rb
457
473
  - lib/ollama/utils/colorize_texts.rb
458
474
  - lib/ollama/utils/fetcher.rb
@@ -499,6 +515,7 @@ files:
499
515
  - spec/ollama/options_spec.rb
500
516
  - spec/ollama/tool_spec.rb
501
517
  - spec/ollama/utils/ansi_markdown_spec.rb
518
+ - spec/ollama/utils/cache_fetcher_spec.rb
502
519
  - spec/ollama/utils/fetcher_spec.rb
503
520
  - spec/ollama/utils/file_argument_spec.rb
504
521
  - spec/ollama/utils/tags_spec.rb
@@ -566,6 +583,7 @@ test_files:
566
583
  - spec/ollama/options_spec.rb
567
584
  - spec/ollama/tool_spec.rb
568
585
  - spec/ollama/utils/ansi_markdown_spec.rb
586
+ - spec/ollama/utils/cache_fetcher_spec.rb
569
587
  - spec/ollama/utils/fetcher_spec.rb
570
588
  - spec/ollama/utils/file_argument_spec.rb
571
589
  - spec/ollama/utils/tags_spec.rb