llm.rb 1.0.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -40,47 +40,68 @@ class LLM::Gemini
40
40
  end
41
41
  end
42
42
 
43
- def merge_candidates!(new_candidates_list)
44
- new_candidates_list.each do |new_candidate_delta|
45
- index = new_candidate_delta.index
43
+ def merge_candidates!(deltas)
44
+ deltas.each do |delta|
45
+ index = delta.index
46
46
  @body.candidates[index] ||= LLM::Object.from_hash({content: {parts: []}})
47
- existing_candidate = @body.candidates[index]
48
- new_candidate_delta.each do |key, value|
47
+ candidate = @body.candidates[index]
48
+ delta.each do |key, value|
49
49
  if key.to_s == "content"
50
- merge_candidate_content!(existing_candidate.content, value) if value
50
+ merge_candidate_content!(candidate.content, value) if value
51
51
  else
52
- existing_candidate[key] = value # Overwrite other fields
52
+ candidate[key] = value # Overwrite other fields
53
53
  end
54
54
  end
55
55
  end
56
56
  end
57
57
 
58
- def merge_candidate_content!(existing_content, new_content_delta)
59
- new_content_delta.each do |key, value|
58
+ def merge_candidate_content!(content, delta)
59
+ delta.each do |key, value|
60
60
  if key.to_s == "parts"
61
- existing_content.parts ||= []
62
- merge_content_parts!(existing_content.parts, value) if value
61
+ content.parts ||= []
62
+ merge_content_parts!(content.parts, value) if value
63
63
  else
64
- existing_content[key] = value
64
+ content[key] = value
65
65
  end
66
66
  end
67
67
  end
68
68
 
69
- def merge_content_parts!(existing_parts, new_parts_delta)
70
- new_parts_delta.each do |new_part_delta|
71
- if new_part_delta.text
72
- last_existing_part = existing_parts.last
73
- if last_existing_part&.text
74
- last_existing_part.text << new_part_delta.text
75
- @io << new_part_delta.text if @io.respond_to?(:<<)
76
- else
77
- existing_parts << new_part_delta
78
- @io << new_part_delta.text if @io.respond_to?(:<<)
79
- end
80
- elsif new_part_delta.functionCall
81
- existing_parts << new_part_delta
69
+ def merge_content_parts!(parts, deltas)
70
+ deltas.each do |delta|
71
+ if delta.text
72
+ merge_text!(parts, delta)
73
+ elsif delta.functionCall
74
+ merge_function_call!(parts, delta)
75
+ elsif delta.inlineData
76
+ parts << delta
77
+ elsif delta.functionResponse
78
+ parts << delta
79
+ elsif delta.fileData
80
+ parts << delta
82
81
  end
83
82
  end
84
83
  end
84
+
85
+ def merge_text!(parts, delta)
86
+ last_existing_part = parts.last
87
+ if last_existing_part&.text
88
+ last_existing_part.text << delta.text
89
+ @io << delta.text if @io.respond_to?(:<<)
90
+ else
91
+ parts << delta
92
+ @io << delta.text if @io.respond_to?(:<<)
93
+ end
94
+ end
95
+
96
+ def merge_function_call!(parts, delta)
97
+ last_existing_part = parts.last
98
+ if last_existing_part&.functionCall
99
+ last_existing_part.functionCall = LLM::Object.from_hash(
100
+ last_existing_part.functionCall.to_h.merge(delta.functionCall.to_h)
101
+ )
102
+ else
103
+ parts << delta
104
+ end
105
+ end
85
106
  end
86
107
  end
@@ -12,7 +12,7 @@ module LLM::Ollama::Format
12
12
  end
13
13
 
14
14
  ##
15
- # Returns the message for the Ollama chat completions API
15
+ # Formats the message for the Ollama chat completions API
16
16
  # @return [Hash]
17
17
  def format
18
18
  catch(:abort) do
@@ -28,26 +28,16 @@ module LLM::Ollama::Format
28
28
 
29
29
  def format_content(content)
30
30
  case content
31
- when File
32
- content.close unless content.closed?
33
- format_content(LLM.File(content.path))
34
- when LLM::File
35
- if content.image?
36
- {content: "This message has an image associated with it", images: [content.to_b64]}
37
- else
38
- raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
39
- "is not an image, and therefore not supported by the " \
40
- "Ollama API"
41
- end
42
31
  when String
43
32
  {content:}
44
33
  when LLM::Message
45
34
  format_content(content.content)
46
35
  when LLM::Function::Return
47
36
  throw(:abort, {role: "tool", tool_call_id: content.id, content: JSON.dump(content.value)})
37
+ when LLM::Object
38
+ format_object(content)
48
39
  else
49
- raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
50
- "is not supported by the Ollama API"
40
+ prompt_error!(content)
51
41
  end
52
42
  end
53
43
 
@@ -70,6 +60,35 @@ module LLM::Ollama::Format
70
60
  end
71
61
  end
72
62
 
63
+ def format_object(object)
64
+ case object.kind
65
+ when :local_file then format_local_file(object.value)
66
+ when :remote_file then prompt_error!(object)
67
+ when :image_url then prompt_error!(object)
68
+ else prompt_error!(object)
69
+ end
70
+ end
71
+
72
+ def format_local_file(file)
73
+ if file.image?
74
+ {content: "This message has an image associated with it", images: [file.to_b64]}
75
+ else
76
+ raise LLM::PromptError, "The given local file (an instance of #{file.class}) " \
77
+ "is not an image, and therefore not supported by the " \
78
+ "Ollama API"
79
+ end
80
+ end
81
+
82
+ def prompt_error!(object)
83
+ if LLM::Object === object
84
+ raise LLM::PromptError, "The given LLM::Object with kind '#{content.kind}' is not " \
85
+ "supported by the Ollama API"
86
+ else
87
+ raise LLM::PromptError, "The given object (an instance of #{object.class}) " \
88
+ "is not supported by the Ollama API"
89
+ end
90
+ end
91
+
73
92
  def message = @message
74
93
  def content = message.content
75
94
  def returns = content.grep(LLM::Function::Return)
@@ -28,19 +28,33 @@ module LLM::OpenAI::Format
28
28
 
29
29
  private
30
30
 
31
+ def format_message
32
+ case content
33
+ when Array
34
+ format_array
35
+ else
36
+ {role: message.role, content: format_content(content)}
37
+ end
38
+ end
39
+
40
+ def format_array
41
+ if content.empty?
42
+ nil
43
+ elsif returns.any?
44
+ returns.map { {role: "tool", tool_call_id: _1.id, content: JSON.dump(_1.value)} }
45
+ else
46
+ {role: message.role, content: content.flat_map { format_content(_1) }}
47
+ end
48
+ end
49
+
31
50
  def format_content(content)
32
51
  case content
33
- when URI
34
- [{type: :image_url, image_url: {url: content.to_s}}]
35
- when File
36
- content.close unless content.closed?
37
- format_content(LLM.File(content.path))
38
- when LLM::File
39
- format_file(content)
40
- when LLM::Response
41
- content.file? ? [{type: :file, file: {file_id: content.id}}] : prompt_error!(content)
52
+ when LLM::Object
53
+ format_object(content)
42
54
  when String
43
55
  [{type: :text, text: content.to_s}]
56
+ when LLM::Response
57
+ format_remote_file(content)
44
58
  when LLM::Message
45
59
  format_content(content.content)
46
60
  when LLM::Function::Return
@@ -50,37 +64,43 @@ module LLM::OpenAI::Format
50
64
  end
51
65
  end
52
66
 
53
- def format_file(content)
54
- file = content
55
- if file.image?
56
- [{type: :image_url, image_url: {url: file.to_data_uri}}]
67
+ def format_object(object)
68
+ case object.kind
69
+ when :image_url
70
+ [{type: :image_url, image_url: {url: object.value}}]
71
+ when :local_file
72
+ format_local_file(object.value)
73
+ when :remote_file
74
+ format_remote_file(object.value)
57
75
  else
58
- [{type: :file, file: {filename: file.basename, file_data: file.to_data_uri}}]
76
+ prompt_error!(object)
59
77
  end
60
78
  end
61
79
 
62
- def format_message
63
- case content
64
- when Array
65
- format_array
80
+ def format_local_file(file)
81
+ if file.image?
82
+ [{type: :image_url, image_url: {url: file.to_data_uri}}]
66
83
  else
67
- {role: message.role, content: format_content(content)}
84
+ [{type: :file, file: {filename: file.basename, file_data: file.to_data_uri}}]
68
85
  end
69
86
  end
70
87
 
71
- def format_array
72
- if content.empty?
73
- nil
74
- elsif returns.any?
75
- returns.map { {role: "tool", tool_call_id: _1.id, content: JSON.dump(_1.value)} }
88
+ def format_remote_file(file)
89
+ if file.file?
90
+ [{type: :file, file: {file_id: file.id}}]
76
91
  else
77
- {role: message.role, content: content.flat_map { format_content(_1) }}
92
+ prompt_error!(file)
78
93
  end
79
94
  end
80
95
 
81
96
  def prompt_error!(content)
82
- raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
83
- "is not supported by the OpenAI chat completions API"
97
+ if LLM::Object === content
98
+ raise LLM::PromptError, "The given LLM::Object with kind '#{content.kind}' is not " \
99
+ "supported by the OpenAI chat completions API."
100
+ else
101
+ raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
102
+ "is not supported by the OpenAI chat completions API."
103
+ end
84
104
  end
85
105
 
86
106
  def message = @message
@@ -4,6 +4,9 @@ module LLM::OpenAI::Format
4
4
  ##
5
5
  # @private
6
6
  class RespondFormat
7
+ ##
8
+ # @param [LLM::Message] message
9
+ # The message to format
7
10
  def initialize(message)
8
11
  @message = message
9
12
  end
@@ -22,12 +25,17 @@ module LLM::OpenAI::Format
22
25
 
23
26
  def format_content(content)
24
27
  case content
25
- when LLM::Response
26
- content.file? ? format_file(content) : prompt_error!(content)
27
28
  when String
28
29
  [{type: :input_text, text: content.to_s}]
29
- when LLM::Message
30
- format_content(content.content)
30
+ when LLM::Response then format_remote_file(content)
31
+ when LLM::Message then format_content(content.content)
32
+ when LLM::Object
33
+ case content.kind
34
+ when :image_url then [{type: :image_url, image_url: {url: content.value.to_s}}]
35
+ when :remote_file then format_remote_file(content.value)
36
+ when :local_file then prompt_error!(content)
37
+ else prompt_error!(content)
38
+ end
31
39
  else
32
40
  prompt_error!(content)
33
41
  end
@@ -52,7 +60,8 @@ module LLM::OpenAI::Format
52
60
  end
53
61
  end
54
62
 
55
- def format_file(content)
63
+ def format_remote_file(content)
64
+ prompt_error!(content) unless content.file?
56
65
  file = LLM::File(content.filename)
57
66
  if file.image?
58
67
  [{type: :input_image, file_id: content.id}]
@@ -62,9 +71,15 @@ module LLM::OpenAI::Format
62
71
  end
63
72
 
64
73
  def prompt_error!(content)
65
- raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
66
- "is not supported by the OpenAI responses API"
74
+ if LLM::Object === content
75
+ raise LLM::PromptError, "The given LLM::Object with kind '#{content.kind}' is not " \
76
+ "supported by the OpenAI responses API."
77
+ else
78
+ raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
79
+ "is not supported by the OpenAI responses API"
80
+ end
67
81
  end
82
+
68
83
  def message = @message
69
84
  def content = message.content
70
85
  def returns = content.grep(LLM::Function::Return)
@@ -7,22 +7,43 @@ class LLM::Schema
7
7
  # {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
8
8
  # can act as constraints.
9
9
  class Object < Leaf
10
+ ##
11
+ # @return [Hash]
12
+ attr_reader :properties
13
+
14
+ ##
15
+ # @param params [Hash]
16
+ # A hash of properties
17
+ # @return [LLM::Schema::Object]
10
18
  def initialize(properties)
11
19
  @properties = properties
12
20
  end
13
21
 
22
+ ##
23
+ # @return [Hash]
14
24
  def to_h
15
25
  super.merge!({type: "object", properties:, required:})
16
26
  end
17
27
 
28
+ ##
29
+ # @raise [TypeError]
30
+ # When given an object other than Object
31
+ # @return [LLM::Schema::Object]
32
+ # Returns self
33
+ def merge!(other)
34
+ raise TypeError, "expected #{self.class} but got #{other.class}" unless self.class === other
35
+ @properties.merge!(other.properties)
36
+ self
37
+ end
38
+
39
+ ##
40
+ # @return [String]
18
41
  def to_json(options = {})
19
42
  to_h.to_json(options)
20
43
  end
21
44
 
22
45
  private
23
46
 
24
- attr_reader :properties
25
-
26
47
  def required
27
48
  @properties.filter_map { _2.required? ? _1 : nil }
28
49
  end
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Tool
4
+ ##
5
+ # The {LLM::Tool::Param LLM::Tool::Param} module extends the
6
+ # {LLM::Tool LLM::Tool} class with a "param" method that can
7
+ # define a parameter for simple types. For complex types, use
8
+ # {LLM::Tool.params LLM::Tool.params} instead.
9
+ #
10
+ # @example
11
+ # class Greeter < LLM::Tool
12
+ # name "greeter"
13
+ # description "Greets the user"
14
+ # param :name, String, "The user's name", required: true
15
+ #
16
+ # def call(name:)
17
+ # puts "Hello, #{name}!"
18
+ # end
19
+ # end
20
+ module Param
21
+ ##
22
+ # @param name [Symbol]
23
+ # The name of a parameter
24
+ # @param type [Class, Symbol]
25
+ # The parameter type (eg String)
26
+ # @param description [String]
27
+ # The description of a property
28
+ # @param options [Hash]
29
+ # A hash of options for the parameter
30
+ # @option options [Boolean] :required
31
+ # Whether or not the parameter is required
32
+ # @option options [Object] :default
33
+ # The default value for a given property
34
+ # @option options [Array<String>] :enum
35
+ # One or more possible values for a param
36
+ def param(name, type, description, options = {})
37
+ lock do
38
+ function.params do |schema|
39
+ leaf = schema.public_send(Utils.resolve(type))
40
+ leaf = Utils.setup(leaf, description, options)
41
+ schema.object(name => leaf)
42
+ end
43
+ end
44
+ end
45
+
46
+ ##
47
+ # @api private
48
+ module Utils
49
+ extend self
50
+
51
+ def resolve(type)
52
+ if type == String
53
+ :string
54
+ elsif type == Integer
55
+ :integer
56
+ elsif type == Float
57
+ :number
58
+ else
59
+ type
60
+ end
61
+ end
62
+
63
+ def setup(leaf, description, options)
64
+ required = options.fetch(:required, false)
65
+ default = options.fetch(:default, nil)
66
+ enum = options.fetch(:enum, nil)
67
+ leaf.required if required
68
+ leaf.description(description) if description
69
+ leaf.default(default) if default
70
+ leaf.enum(enum) if enum
71
+ leaf
72
+ end
73
+ end
74
+ end
75
+ end
data/lib/llm/tool.rb CHANGED
@@ -18,6 +18,9 @@
18
18
  # end
19
19
  # end
20
20
  class LLM::Tool
21
+ require_relative "tool/param"
22
+ extend LLM::Tool::Param
23
+
21
24
  ##
22
25
  # Registers the tool as a function when inherited
23
26
  # @param [Class] klass The subclass
@@ -35,7 +38,7 @@ class LLM::Tool
35
38
  # @return [String]
36
39
  def self.name(name = nil)
37
40
  lock do
38
- function.tap { _1.name(name) }
41
+ name ? function.name(name) : function.name
39
42
  end
40
43
  end
41
44
 
@@ -45,7 +48,7 @@ class LLM::Tool
45
48
  # @return [String]
46
49
  def self.description(desc = nil)
47
50
  lock do
48
- function.tap { _1.description(desc) }
51
+ desc ? function.description(desc) : function.description
49
52
  end
50
53
  end
51
54
 
data/lib/llm/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LLM
4
- VERSION = "1.0.1"
4
+ VERSION = "2.0.0"
5
5
  end
data/lib/llm.rb CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  module LLM
4
4
  require "stringio"
5
+ require_relative "llm/builder"
5
6
  require_relative "llm/schema"
6
7
  require_relative "llm/object"
7
8
  require_relative "llm/version"
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm.rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.1
4
+ version: 2.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antar Azri
@@ -179,11 +179,8 @@ files:
179
179
  - README.md
180
180
  - lib/llm.rb
181
181
  - lib/llm/bot.rb
182
- - lib/llm/bot/builder.rb
183
- - lib/llm/bot/conversable.rb
184
- - lib/llm/bot/prompt/completion.rb
185
- - lib/llm/bot/prompt/respond.rb
186
182
  - lib/llm/buffer.rb
183
+ - lib/llm/builder.rb
187
184
  - lib/llm/client.rb
188
185
  - lib/llm/error.rb
189
186
  - lib/llm/eventhandler.rb
@@ -278,6 +275,7 @@ files:
278
275
  - lib/llm/schema/version.rb
279
276
  - lib/llm/server_tool.rb
280
277
  - lib/llm/tool.rb
278
+ - lib/llm/tool/param.rb
281
279
  - lib/llm/utils.rb
282
280
  - lib/llm/version.rb
283
281
  - llm.gemspec
@@ -1,31 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- class LLM::Bot
4
- ##
5
- # @private
6
- module Builder
7
- private
8
-
9
- ##
10
- # @param [String] prompt The prompt
11
- # @param [Hash] params
12
- # @return [LLM::Response]
13
- def create_response!(prompt, params)
14
- @provider.responses.create(
15
- prompt,
16
- @params.merge(params.merge(@response ? {previous_response_id: @response.id} : {}))
17
- )
18
- end
19
-
20
- ##
21
- # @param [String] prompt The prompt
22
- # @param [Hash] params
23
- # @return [LLM::Response]
24
- def create_completion!(prompt, params)
25
- @provider.complete(
26
- prompt,
27
- @params.merge(params.merge(messages:))
28
- )
29
- end
30
- end
31
- end
@@ -1,37 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- class LLM::Bot
4
- ##
5
- # @private
6
- module Conversable
7
- private
8
-
9
- ##
10
- # Queues a response to be sent to the provider.
11
- # @param [String] prompt The prompt
12
- # @param [Hash] params
13
- # @return [void]
14
- def async_response(prompt, params = {})
15
- if Array === prompt and prompt.empty?
16
- @messages
17
- else
18
- role = params.delete(:role)
19
- @messages << [LLM::Message.new(role, prompt), @params.merge(params), :respond]
20
- end
21
- end
22
-
23
- ##
24
- # Queues a completion to be sent to the provider.
25
- # @param [String] prompt The prompt
26
- # @param [Hash] params
27
- # @return [void]
28
- def async_completion(prompt, params = {})
29
- if Array === prompt and prompt.empty?
30
- @messages
31
- else
32
- role = params.delete(:role)
33
- @messages << [LLM::Message.new(role, prompt), @params.merge(params), :complete]
34
- end
35
- end
36
- end
37
- end
@@ -1,49 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module LLM::Bot::Prompt
4
- class Completion < Struct.new(:bot, :defaults)
5
- ##
6
- # @param [LLM::Bot] bot
7
- # @param [Hash] defaults
8
- # @return [LLM::Bot::Prompt::Completion]
9
- def initialize(bot, defaults)
10
- super(bot, defaults || {})
11
- end
12
-
13
- ##
14
- # @param [String] prompt
15
- # @param [Hash] params (see LLM::Provider#complete)
16
- # @return [LLM::Bot]
17
- def system(prompt, params = {})
18
- params = defaults.merge(params)
19
- bot.chat prompt, params.merge(role: :system)
20
- end
21
-
22
- ##
23
- # @param [String] prompt
24
- # @param [Hash] params (see LLM::Provider#complete)
25
- # @return [LLM::Bot]
26
- def user(prompt, params = {})
27
- params = defaults.merge(params)
28
- bot.chat prompt, params.merge(role: :user)
29
- end
30
-
31
- ##
32
- # @param [String] prompt
33
- # @param [Hash] params (see LLM::Provider#complete)
34
- # @return [LLM::Bot]
35
- def assistant(prompt, params = {})
36
- params = defaults.merge(params)
37
- bot.chat prompt, params.merge(role: :assistant)
38
- end
39
-
40
- ##
41
- # @param [String] prompt
42
- # @param [Hash] params (see LLM::Provider#complete)
43
- # @return [LLM::Bot]
44
- def model(prompt, params = {})
45
- params = defaults.merge(params)
46
- bot.chat prompt, params.merge(role: :model)
47
- end
48
- end
49
- end