ollama-ruby 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,9 +36,11 @@ class Ollama::Documents::MemoryCache
36
36
  end
37
37
  include Enumerable
38
38
 
39
- private
40
-
41
39
  def pre(key)
42
40
  [ @prefix, key ].join
43
41
  end
42
+
43
+ def unpre(key)
44
+ key.sub(/\A#@prefix/, '')
45
+ end
44
46
  end
@@ -13,7 +13,10 @@ class Ollama::Documents::RedisCache
13
13
  end
14
14
 
15
15
  def [](key)
16
- JSON(redis.get(pre(key)), object_class: Ollama::Documents::Record)
16
+ value = redis.get(pre(key))
17
+ unless value.nil?
18
+ JSON(value, object_class: Ollama::Documents::Record)
19
+ end
17
20
  end
18
21
 
19
22
  def []=(key, value)
@@ -45,8 +48,6 @@ class Ollama::Documents::RedisCache
45
48
  end
46
49
  include Enumerable
47
50
 
48
- private
49
-
50
51
  def pre(key)
51
52
  [ @prefix, key ].join
52
53
  end
@@ -12,6 +12,7 @@ require 'ollama/documents/splitters/semantic'
12
12
 
13
13
  class Ollama::Documents
14
14
  include Ollama::Utils::Math
15
+ include Ollama::Utils::Width
15
16
 
16
17
  class Record < JSON::GenericObject
17
18
  def to_s
@@ -42,7 +43,7 @@ class Ollama::Documents
42
43
  def add(inputs, batch_size: 10, source: nil, tags: [])
43
44
  inputs = Array(inputs)
44
45
  tags = Ollama::Utils::Tags.new(tags)
45
- source and tags.add File.basename(source)
46
+ source and tags.add File.basename(source).gsub(/\?.*/, '')
46
47
  inputs.map! { |i|
47
48
  text = i.respond_to?(:read) ? i.read : i.to_s
48
49
  text
@@ -51,7 +52,7 @@ class Ollama::Documents
51
52
  inputs.empty? and return self
52
53
  batches = inputs.each_slice(batch_size).
53
54
  with_infobar(
54
- label: "Add #{tags}",
55
+ label: "Add #{truncate(tags.to_s, percentage: 25)}",
55
56
  total: inputs.size
56
57
  )
57
58
  batches.each do |batch|
@@ -87,8 +88,18 @@ class Ollama::Documents
87
88
  @cache.size
88
89
  end
89
90
 
90
- def clear
91
- @cache.clear
91
+ def clear(tags: nil)
92
+ if tags
93
+ tags = Ollama::Utils::Tags.new(Array(tags)).to_a
94
+ @cache.each do |key, record|
95
+ if (tags & record.tags).size >= 1
96
+ @cache.delete(@cache.unpre(key))
97
+ end
98
+ end
99
+ else
100
+ @cache.clear
101
+ end
102
+ self
92
103
  end
93
104
 
94
105
  def find(string, tags: nil, prompt: nil)
@@ -96,7 +107,7 @@ class Ollama::Documents
96
107
  needle_norm = norm(needle)
97
108
  records = @cache
98
109
  if tags
99
- tags = Ollama::Utils::Tags.new(tags)
110
+ tags = Ollama::Utils::Tags.new(tags).to_a
100
111
  records = records.select { |_key, record| (tags & record.tags).size >= 1 }
101
112
  end
102
113
  records = records.sort_by { |key, record|
@@ -111,6 +122,20 @@ class Ollama::Documents
111
122
  records.transpose.last&.reverse.to_a
112
123
  end
113
124
 
125
+ def find_where(string, text_size: nil, text_count: nil, **opts)
126
+ records = find(string, **opts)
127
+ size, count = 0, 0
128
+ records.take_while do |record|
129
+ if text_size and (size += record.text.size) > text_size
130
+ next false
131
+ end
132
+ if text_count and (count += 1) > text_count
133
+ next false
134
+ end
135
+ true
136
+ end
137
+ end
138
+
114
139
  def collections
115
140
  case @cache
116
141
  when MemoryCache
data/lib/ollama/dto.rb CHANGED
@@ -8,8 +8,8 @@ module Ollama::DTO
8
8
  module ClassMethods
9
9
  attr_accessor :attributes
10
10
 
11
- def json_create(object)
12
- new(**object.transform_keys(&:to_sym))
11
+ def from_hash(hash)
12
+ new(**hash.transform_keys(&:to_sym))
13
13
  end
14
14
 
15
15
  def attr_reader(*names)
@@ -27,11 +27,8 @@ module Ollama::DTO
27
27
  end
28
28
 
29
29
  def as_json(*)
30
- {
31
- json_class: self.class.name
32
- }.merge(
33
- self.class.attributes.each_with_object({}) { |a, h| h[a] = send(a) }
34
- ).reject { _2.nil? || _2.ask_and_send(:size) == 0 }
30
+ self.class.attributes.each_with_object({}) { |a, h| h[a] = send(a) }.
31
+ reject { _2.nil? || _2.ask_and_send(:size) == 0 }
35
32
  end
36
33
 
37
34
  alias to_hash as_json
@@ -65,4 +65,8 @@ class Ollama::Options
65
65
  #{@@types.keys.map { "self.#{_1} = #{_1}" }.join(?\n)}
66
66
  end
67
67
  }
68
+
69
+ def self.[](value)
70
+ new(**value.to_h)
71
+ end
68
72
  end
@@ -0,0 +1,16 @@
1
+ module Ollama::Utils::FileArgument
2
+ module_function
3
+
4
+ def get_file_argument(prompt, default: nil)
5
+ if prompt.present? && prompt.size < 2 ** 15 &&
6
+ File.basename(prompt).size < 2 ** 8 &&
7
+ File.exist?(prompt)
8
+ then
9
+ File.read(prompt)
10
+ elsif prompt.present?
11
+ prompt
12
+ else
13
+ default
14
+ end
15
+ end
16
+ end
@@ -1,6 +1,10 @@
1
1
  require 'sorted_set'
2
2
 
3
3
  class Ollama::Utils::Tags < SortedSet
4
+ def to_a
5
+ super.map(&:to_s)
6
+ end
7
+
4
8
  def to_s
5
9
  map { |t| '#%s' % t } * ' '
6
10
  end
@@ -12,11 +12,24 @@ module Ollama::Utils::Width
12
12
  raise ArgumentError, "either pass percentage or length argument"
13
13
  percentage and length ||= width(percentage:)
14
14
  text.gsub(/(?<!\n)\n(?!\n)/, ' ').lines.map do |line|
15
- if line.length > length
15
+ if length >= 1 && line.length > length
16
16
  line.gsub(/(.{1,#{length}})(\s+|$)/, "\\1\n").strip
17
17
  else
18
18
  line.strip
19
19
  end
20
20
  end * ?\n
21
21
  end
22
+
23
+ def truncate(text, percentage: nil, length: nil, ellipsis: ?…)
24
+ percentage.nil? ^ length.nil? or
25
+ raise ArgumentError, "either pass percentage or length argument"
26
+ percentage and length ||= width(percentage:)
27
+ if length < 1
28
+ +''
29
+ elsif text.size > length
30
+ text[0, length - 1] + ?…
31
+ else
32
+ text
33
+ end
34
+ end
22
35
  end
@@ -1,6 +1,6 @@
1
1
  module Ollama
2
2
  # Ollama version
3
- VERSION = '0.1.0'
3
+ VERSION = '0.3.0'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/lib/ollama.rb CHANGED
@@ -18,6 +18,7 @@ require 'ollama/utils/math'
18
18
  require 'ollama/utils/colorize_texts'
19
19
  require 'ollama/utils/fetcher'
20
20
  require 'ollama/utils/chooser'
21
+ require 'ollama/utils/file_argument'
21
22
 
22
23
  require 'ollama/version'
23
24
  require 'ollama/errors'
data/ollama-ruby.gemspec CHANGED
@@ -1,24 +1,24 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama-ruby 0.1.0 ruby lib
2
+ # stub: ollama-ruby 0.3.0 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama-ruby".freeze
6
- s.version = "0.1.0".freeze
6
+ s.version = "0.3.0".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
10
10
  s.authors = ["Florian Frank".freeze]
11
- s.date = "2024-08-30"
11
+ s.date = "2024-09-06"
12
12
  s.description = "Library that allows interacting with the Ollama API".freeze
13
13
  s.email = "flori@ping.de".freeze
14
- s.executables = ["ollama_console".freeze, "ollama_chat".freeze, "ollama_update".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/documents.rb".freeze, "lib/ollama/documents/memory_cache.rb".freeze, "lib/ollama/documents/redis_cache.rb".freeze, "lib/ollama/documents/splitters/character.rb".freeze, "lib/ollama/documents/splitters/semantic.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/chooser.rb".freeze, "lib/ollama/utils/colorize_texts.rb".freeze, "lib/ollama/utils/fetcher.rb".freeze, "lib/ollama/utils/math.rb".freeze, "lib/ollama/utils/tags.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze]
16
- s.files = [".envrc".freeze, "Gemfile".freeze, "LICENSE".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_console".freeze, "bin/ollama_update".freeze, "config/redis.conf".freeze, "docker-compose.yml".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/documents.rb".freeze, "lib/ollama/documents/memory_cache.rb".freeze, "lib/ollama/documents/redis_cache.rb".freeze, "lib/ollama/documents/splitters/character.rb".freeze, "lib/ollama/documents/splitters/semantic.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/chooser.rb".freeze, "lib/ollama/utils/colorize_texts.rb".freeze, "lib/ollama/utils/fetcher.rb".freeze, "lib/ollama/utils/math.rb".freeze, "lib/ollama/utils/tags.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze, "ollama-ruby.gemspec".freeze, "spec/assets/embeddings.json".freeze, "spec/assets/kitten.jpg".freeze, "spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/documents/memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_cache_spec.rb".freeze, "spec/ollama/documents/splitters/character_spec.rb".freeze, "spec/ollama/documents/splitters/semantic_spec.rb".freeze, "spec/ollama/documents_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/ollama/utils/fetcher_spec.rb".freeze, "spec/ollama/utils/tags_spec.rb".freeze, "spec/spec_helper.rb".freeze]
14
+ s.executables = ["ollama_console".freeze, "ollama_chat".freeze, "ollama_update".freeze, "ollama_cli".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/documents.rb".freeze, "lib/ollama/documents/memory_cache.rb".freeze, "lib/ollama/documents/redis_cache.rb".freeze, "lib/ollama/documents/splitters/character.rb".freeze, "lib/ollama/documents/splitters/semantic.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/chooser.rb".freeze, "lib/ollama/utils/colorize_texts.rb".freeze, "lib/ollama/utils/fetcher.rb".freeze, "lib/ollama/utils/file_argument.rb".freeze, "lib/ollama/utils/math.rb".freeze, "lib/ollama/utils/tags.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze]
16
+ s.files = [".envrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "LICENSE".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_cli".freeze, "bin/ollama_console".freeze, "bin/ollama_update".freeze, "config/redis.conf".freeze, "docker-compose.yml".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/documents.rb".freeze, "lib/ollama/documents/memory_cache.rb".freeze, "lib/ollama/documents/redis_cache.rb".freeze, "lib/ollama/documents/splitters/character.rb".freeze, "lib/ollama/documents/splitters/semantic.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/chooser.rb".freeze, "lib/ollama/utils/colorize_texts.rb".freeze, "lib/ollama/utils/fetcher.rb".freeze, "lib/ollama/utils/file_argument.rb".freeze, "lib/ollama/utils/math.rb".freeze, "lib/ollama/utils/tags.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze, "ollama-ruby.gemspec".freeze, "spec/assets/embeddings.json".freeze, "spec/assets/kitten.jpg".freeze, "spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/documents/memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_cache_spec.rb".freeze, "spec/ollama/documents/splitters/character_spec.rb".freeze, "spec/ollama/documents/splitters/semantic_spec.rb".freeze, "spec/ollama/documents_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/ollama/utils/fetcher_spec.rb".freeze, "spec/ollama/utils/tags_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama-ruby".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "Ollama-ruby - Interacting with the Ollama API".freeze, "--main".freeze, "README.md".freeze]
20
20
  s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
21
- s.rubygems_version = "3.5.16".freeze
21
+ s.rubygems_version = "3.5.18".freeze
22
22
  s.summary = "Interacting with the Ollama API".freeze
23
23
  s.test_files = ["spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/documents/memory_cache_spec.rb".freeze, "spec/ollama/documents/redis_cache_spec.rb".freeze, "spec/ollama/documents/splitters/character_spec.rb".freeze, "spec/ollama/documents/splitters/semantic_spec.rb".freeze, "spec/ollama/documents_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/ollama/utils/fetcher_spec.rb".freeze, "spec/ollama/utils/tags_spec.rb".freeze, "spec/spec_helper.rb".freeze]
24
24
 
@@ -43,4 +43,5 @@ Gem::Specification.new do |s|
43
43
  s.add_runtime_dependency(%q<complex_config>.freeze, ["~> 0.20".freeze])
44
44
  s.add_runtime_dependency(%q<search_ui>.freeze, ["~> 0.0".freeze])
45
45
  s.add_runtime_dependency(%q<amatch>.freeze, ["~> 0.4.1".freeze])
46
+ s.add_runtime_dependency(%q<pdf-reader>.freeze, ["~> 2.0".freeze])
46
47
  end
@@ -116,7 +116,7 @@ RSpec.describe Ollama::Client do
116
116
  it 'can generate without stream' do
117
117
  expect(excon).to receive(:send).with(
118
118
  :post,
119
- body: '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World"}',
119
+ body: '{"model":"llama3.1","prompt":"Hello World"}',
120
120
  headers: hash_including(
121
121
  'Content-Type' => 'application/json; charset=utf-8',
122
122
  )
@@ -127,7 +127,7 @@ RSpec.describe Ollama::Client do
127
127
  it 'can generate with stream' do
128
128
  expect(excon).to receive(:send).with(
129
129
  :post,
130
- body: '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World","stream":true}',
130
+ body: '{"model":"llama3.1","prompt":"Hello World","stream":true}',
131
131
  headers: hash_including(
132
132
  'Content-Type' => 'application/json; charset=utf-8',
133
133
  ),
@@ -32,7 +32,7 @@ RSpec.describe Ollama::Commands::Chat do
32
32
  model: 'llama3.1', messages: messages.map(&:as_json), stream: true,
33
33
  )
34
34
  expect(chat.to_json).to eq(
35
- '{"json_class":"Ollama::Commands::Chat","model":"llama3.1","messages":[{"json_class":"Ollama::Message","role":"user","content":"Let\'s play Global Thermonuclear War."}],"stream":true}'
35
+ '{"model":"llama3.1","messages":[{"role":"user","content":"Let\'s play Global Thermonuclear War."}],"stream":true}'
36
36
  )
37
37
  end
38
38
 
@@ -45,7 +45,7 @@ RSpec.describe Ollama::Commands::Chat do
45
45
  expect(ollama).to receive(:request).
46
46
  with(
47
47
  method: :post, path: '/api/chat', handler: Ollama::Handlers::NOP, stream: true,
48
- body: '{"json_class":"Ollama::Commands::Chat","model":"llama3.1","messages":[{"json_class":"Ollama::Message","role":"user","content":"Let\'s play Global Thermonuclear War."}],"stream":true}'
48
+ body: '{"model":"llama3.1","messages":[{"role":"user","content":"Let\'s play Global Thermonuclear War."}],"stream":true}'
49
49
  )
50
50
  chat.perform(Ollama::Handlers::NOP)
51
51
  end
@@ -12,7 +12,7 @@ RSpec.describe Ollama::Commands::Copy do
12
12
  source: 'llama3.1', destination: 'camell3', stream: false
13
13
  )
14
14
  expect(copy.to_json).to eq(
15
- '{"json_class":"Ollama::Commands::Copy","source":"llama3.1","destination":"camell3","stream":false}'
15
+ '{"source":"llama3.1","destination":"camell3","stream":false}'
16
16
  )
17
17
  end
18
18
 
@@ -21,7 +21,7 @@ RSpec.describe Ollama::Commands::Copy do
21
21
  copy.client = ollama = double('Ollama::Client')
22
22
  expect(ollama).to receive(:request).with(
23
23
  method: :post, path: '/api/copy', handler: Ollama::Handlers::NOP, stream: false,
24
- body: '{"json_class":"Ollama::Commands::Copy","source":"llama3.1","destination":"camell3","stream":false}'
24
+ body: '{"source":"llama3.1","destination":"camell3","stream":false}'
25
25
  )
26
26
  copy.perform(Ollama::Handlers::NOP)
27
27
  end
@@ -16,7 +16,7 @@ RSpec.describe Ollama::Commands::Create do
16
16
  name: 'llama3.1-wopr', modelfile: "FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.", stream: true,
17
17
  )
18
18
  expect(create.to_json).to eq(
19
- '{"json_class":"Ollama::Commands::Create","name":"llama3.1-wopr","modelfile":"FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.","stream":true}'
19
+ '{"name":"llama3.1-wopr","modelfile":"FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.","stream":true}'
20
20
  )
21
21
  end
22
22
 
@@ -30,7 +30,7 @@ RSpec.describe Ollama::Commands::Create do
30
30
  expect(ollama).to receive(:request).
31
31
  with(
32
32
  method: :post, path: '/api/create', handler: Ollama::Handlers::NOP, stream: true,
33
- body: '{"json_class":"Ollama::Commands::Create","name":"llama3.1-wopr","modelfile":"FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.","stream":true}'
33
+ body: '{"name":"llama3.1-wopr","modelfile":"FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.","stream":true}'
34
34
  )
35
35
  create.perform(Ollama::Handlers::NOP)
36
36
  end
@@ -12,7 +12,7 @@ RSpec.describe Ollama::Commands::Delete do
12
12
  name: 'llama3.1', stream: false
13
13
  )
14
14
  expect(delete.to_json).to eq(
15
- '{"json_class":"Ollama::Commands::Delete","name":"llama3.1","stream":false}'
15
+ '{"name":"llama3.1","stream":false}'
16
16
  )
17
17
  end
18
18
 
@@ -21,7 +21,7 @@ RSpec.describe Ollama::Commands::Delete do
21
21
  delete.client = ollama = double('Ollama::Client')
22
22
  expect(ollama).to receive(:request).with(
23
23
  method: :delete, path: '/api/delete', handler: Ollama::Handlers::NOP, stream: false,
24
- body: '{"json_class":"Ollama::Commands::Delete","name":"llama3.1","stream":false}'
24
+ body: '{"name":"llama3.1","stream":false}'
25
25
  )
26
26
  delete.perform(Ollama::Handlers::NOP)
27
27
  end
@@ -19,7 +19,7 @@ RSpec.describe Ollama::Commands::Embed do
19
19
  model: 'all-minilm', input: 'Why is the sky blue?',
20
20
  )
21
21
  expect(embed.to_json).to eq(
22
- '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":"Why is the sky blue?","options":{"json_class":"Ollama::Options","num_ctx":666},"stream":false}'
22
+ '{"model":"all-minilm","input":"Why is the sky blue?","options":{"num_ctx":666},"stream":false}'
23
23
  )
24
24
  end
25
25
 
@@ -32,7 +32,7 @@ RSpec.describe Ollama::Commands::Embed do
32
32
  model: 'all-minilm', input: [ 'Why is the sky blue?', 'Why is the grass green?' ],
33
33
  )
34
34
  expect(embed.to_json).to eq(
35
- '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":["Why is the sky blue?","Why is the grass green?"],"stream":false}'
35
+ '{"model":"all-minilm","input":["Why is the sky blue?","Why is the grass green?"],"stream":false}'
36
36
  )
37
37
  end
38
38
 
@@ -46,7 +46,7 @@ RSpec.describe Ollama::Commands::Embed do
46
46
  expect(ollama).to receive(:request).
47
47
  with(
48
48
  method: :post, path: '/api/embed', handler: Ollama::Handlers::NOP, stream: false,
49
- body: '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":"Why is the sky blue?","stream":false}'
49
+ body: '{"model":"all-minilm","input":"Why is the sky blue?","stream":false}'
50
50
  )
51
51
  embed.perform(Ollama::Handlers::NOP)
52
52
  end
@@ -18,7 +18,7 @@ RSpec.describe Ollama::Commands::Embeddings do
18
18
  model: 'mxbai-embed-large', prompt: 'Here are the coordinates of all Soviet military installations: …',
19
19
  )
20
20
  expect(embeddings.to_json).to eq(
21
- '{"json_class":"Ollama::Commands::Embeddings","model":"mxbai-embed-large","prompt":"Here are the coordinates of all Soviet military installations: …","stream":false}'
21
+ '{"model":"mxbai-embed-large","prompt":"Here are the coordinates of all Soviet military installations: …","stream":false}'
22
22
  )
23
23
  end
24
24
 
@@ -31,7 +31,7 @@ RSpec.describe Ollama::Commands::Embeddings do
31
31
  expect(ollama).to receive(:request).
32
32
  with(
33
33
  method: :post, path: '/api/embeddings', handler: Ollama::Handlers::NOP, stream: false,
34
- body: '{"json_class":"Ollama::Commands::Embeddings","model":"mxbai-embed-large","prompt":"Here are the coordinates of all Soviet military installations: …","stream":false}'
34
+ body: '{"model":"mxbai-embed-large","prompt":"Here are the coordinates of all Soviet military installations: …","stream":false}'
35
35
  )
36
36
  embeddings.perform(Ollama::Handlers::NOP)
37
37
  end
@@ -12,7 +12,7 @@ RSpec.describe Ollama::Commands::Generate do
12
12
  model: 'llama3.1', prompt: 'Hello World'
13
13
  )
14
14
  expect(generate.to_json).to eq(
15
- '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World"}'
15
+ '{"model":"llama3.1","prompt":"Hello World"}'
16
16
  )
17
17
  end
18
18
 
@@ -22,7 +22,7 @@ RSpec.describe Ollama::Commands::Generate do
22
22
  expect(ollama).to receive(:request).
23
23
  with(
24
24
  method: :post, path: '/api/generate', handler: Ollama::Handlers::NOP, stream: true,
25
- body: '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World","stream":true}'
25
+ body: '{"model":"llama3.1","prompt":"Hello World","stream":true}'
26
26
  )
27
27
  generate.perform(Ollama::Handlers::NOP)
28
28
  end
@@ -12,7 +12,7 @@ RSpec.describe Ollama::Commands::Pull do
12
12
  name: 'llama3.1', stream: true
13
13
  )
14
14
  expect(pull.to_json).to eq(
15
- '{"json_class":"Ollama::Commands::Pull","name":"llama3.1","stream":true}'
15
+ '{"name":"llama3.1","stream":true}'
16
16
  )
17
17
  end
18
18
 
@@ -21,7 +21,7 @@ RSpec.describe Ollama::Commands::Pull do
21
21
  pull.client = ollama = double('Ollama::Client')
22
22
  expect(ollama).to receive(:request).with(
23
23
  method: :post, path: '/api/pull', handler: Ollama::Handlers::NOP, stream: true,
24
- body: '{"json_class":"Ollama::Commands::Pull","name":"llama3.1","stream":true}'
24
+ body: '{"name":"llama3.1","stream":true}'
25
25
  )
26
26
  pull.perform(Ollama::Handlers::NOP)
27
27
  end
@@ -12,7 +12,7 @@ RSpec.describe Ollama::Commands::Push do
12
12
  name: 'llama3.1', stream: true
13
13
  )
14
14
  expect(push.to_json).to eq(
15
- '{"json_class":"Ollama::Commands::Push","name":"llama3.1","stream":true}'
15
+ '{"name":"llama3.1","stream":true}'
16
16
  )
17
17
  end
18
18
 
@@ -21,7 +21,7 @@ RSpec.describe Ollama::Commands::Push do
21
21
  push.client = ollama = double('Ollama::Client')
22
22
  expect(ollama).to receive(:request).with(
23
23
  method: :post, path: '/api/push', handler: Ollama::Handlers::NOP, stream: true,
24
- body: '{"json_class":"Ollama::Commands::Push","name":"llama3.1","stream":true}'
24
+ body: '{"name":"llama3.1","stream":true}'
25
25
  )
26
26
  push.perform(Ollama::Handlers::NOP)
27
27
  end
@@ -12,7 +12,7 @@ RSpec.describe Ollama::Commands::Show do
12
12
  name: 'llama3.1', stream: false
13
13
  )
14
14
  expect(show.to_json).to eq(
15
- '{"json_class":"Ollama::Commands::Show","name":"llama3.1","stream":false}'
15
+ '{"name":"llama3.1","stream":false}'
16
16
  )
17
17
  end
18
18
 
@@ -21,7 +21,7 @@ RSpec.describe Ollama::Commands::Show do
21
21
  show.client = ollama = double('Ollama::Client')
22
22
  expect(ollama).to receive(:request).with(
23
23
  method: :post, path: '/api/show', handler: Ollama::Handlers::NOP ,stream: false,
24
- body: '{"json_class":"Ollama::Commands::Show","name":"llama3.1","stream":false}'
24
+ body: '{"name":"llama3.1","stream":false}'
25
25
  )
26
26
  show.perform(Ollama::Handlers::NOP)
27
27
  end
@@ -74,5 +74,13 @@ RSpec.describe Ollama::Documents::RedisCache do
74
74
  expect(redis).to receive(:scan_each).with(match: 'test-*')
75
75
  redis_cache.to_a
76
76
  end
77
+
78
+ it 'can compute prefix with pre' do
79
+ expect(redis_cache.pre('foo')).to eq 'test-foo'
80
+ end
81
+
82
+ it 'can remove prefix with unpre' do
83
+ expect(redis_cache.unpre('test-foo')).to eq 'foo'
84
+ end
77
85
  end
78
86
  end
@@ -76,6 +76,34 @@ RSpec.describe Ollama::Documents do
76
76
  expect(records[0].to_s).to eq '#<Ollama::Documents::Record "foo" #test 1.0>'
77
77
  end
78
78
 
79
+ it 'can find strings conditionally' do
80
+ allow(ollama).to receive(:embed).
81
+ with(model:, input: [ 'foobar' ], options: nil).
82
+ and_return(double(embeddings: [ [ 0.01 ] ]))
83
+ allow(ollama).to receive(:embed).
84
+ with(model:, input: [ 'foo' ], options: nil).
85
+ and_return(double(embeddings: [ [ 0.1 ] ]))
86
+ expect(documents << 'foobar').to eq documents
87
+ expect(documents << 'foo').to eq documents
88
+ expect(ollama).to receive(:embed).at_least(:once).
89
+ with(model:, input: 'foo', options: nil).
90
+ and_return(double(embeddings: [ [ 0.1 ] ]))
91
+ records = documents.find_where('foo', text_count: 1)
92
+ expect(records).to eq [
93
+ Ollama::Documents::Record[text: 'foo', embedding: [ 0.1 ], similarity: 1.0 ],
94
+ ]
95
+ records = documents.find_where('foo', text_size: 3)
96
+ expect(records).to eq [
97
+ Ollama::Documents::Record[text: 'foo', embedding: [ 0.1 ], similarity: 1.0 ],
98
+ ]
99
+ records = documents.find_where('foo')
100
+ expect(records).to eq [
101
+ Ollama::Documents::Record[text: 'foo', embedding: [ 0.1 ], similarity: 1.0 ],
102
+ Ollama::Documents::Record[text: 'foobar', embedding: [ 0.1 ], similarity: 1.0 ],
103
+ ]
104
+ end
105
+
106
+
79
107
  context 'it uses cache' do
80
108
  before do
81
109
  allow(ollama).to receive(:embed).
@@ -103,6 +131,20 @@ RSpec.describe Ollama::Documents do
103
131
  }.to change { documents.size }.from(1).to(0)
104
132
  end
105
133
 
134
+ it 'can clear texts with tags' do
135
+ allow(ollama).to receive(:embed).
136
+ with(model:, input: %w[ bar ], options: nil).
137
+ and_return(double(embeddings: [ [ 0.1 ] ]))
138
+ expect(documents.add('foo', tags: %i[ test ])).to eq documents
139
+ expect(documents.add('bar', tags: %i[ test2 ])).to eq documents
140
+ expect {
141
+ documents.clear tags: 'test'
142
+ }.to change { documents.size }.from(2).to(1)
143
+ expect {
144
+ documents.clear tags: :test2
145
+ }.to change { documents.size }.from(1).to(0)
146
+ end
147
+
106
148
  it 'returns collections' do
107
149
  expect(documents.collections).to eq [ :default ]
108
150
  end
@@ -19,19 +19,18 @@ RSpec.describe Ollama::Message do
19
19
 
20
20
  it 'can be converted to JSON' do
21
21
  expect(message.as_json).to eq(
22
- json_class: described_class.name,
23
22
  role: 'user',
24
23
  content: 'hello world',
25
24
  images: [ image ],
26
25
  )
27
26
  expect(message.to_json).to eq(
28
- '{"json_class":"Ollama::Message","role":"user","content":"hello world","images":["dGVzdA==\n"]}'
27
+ '{"role":"user","content":"hello world","images":["dGVzdA==\n"]}'
29
28
  )
30
29
  end
31
30
 
32
31
  it 'can be restored from JSON' do
33
- expect(JSON(<<~'end', create_additions: true)).to be_a described_class
34
- {"json_class":"Ollama::Message","role":"user","content":"hello world","images":["dGVzdA==\n"]}
32
+ expect(described_class.from_hash(JSON(<<~'end'))).to be_a described_class
33
+ {"role":"user","content":"hello world","images":["dGVzdA==\n"]}
35
34
  end
36
35
  end
37
36
  end
@@ -13,6 +13,24 @@ RSpec.describe Ollama::Options do
13
13
  expect(options).to be_a described_class
14
14
  end
15
15
 
16
+ it 'can be used to cast hashes' do
17
+ expect(described_class[{
18
+ penalize_newline: true,
19
+ num_ctx: 8192,
20
+ temperature: 0.7,
21
+ }]).to be_a described_class
22
+ end
23
+
24
+ it 'raises errors when casting goes all wrong' do
25
+ expect {
26
+ described_class[{
27
+ penalize_newline: :tertium,
28
+ num_ctx: 8192,
29
+ temperature: 0.7,
30
+ }]
31
+ }.to raise_error(TypeError)
32
+ end
33
+
16
34
  it 'throws error for invalid types' do
17
35
  expect { described_class.new(temperature: Class.new) }.
18
36
  to raise_error(TypeError)