omniai 1.5.2 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 81c34e7c05b90e85fe99ba87f0b462f4322a0dd14c74afdb5f951faccdc698d6
4
- data.tar.gz: 551b238e17c909abbe3b08ecf630a2f6aa0047a17413c7966d23ca9ccbf0d26d
3
+ metadata.gz: fc0ef904482119dedd44057ac782fdf5c0c43a72f4a212c9a10736ba6389bd9d
4
+ data.tar.gz: 9d8c20309d3652207719f0d95253df5d4bb422658f42d724aa509bec0595721b
5
5
  SHA512:
6
- metadata.gz: 0c291e2840cd9782046d7011e7becc77b2d9c64da427d6785ed121ae430a4153f7fb3d6f28f4f4c9cb943f18a0b8d7c20e345fc8c6d0097faa83eb205a302132
7
- data.tar.gz: d592b529340cacdcf458295a9914589806f6c568469c0c9548cdbd63fa639d64dcb04982dc2f7f68506e2207809a2b1b689b6267dabe262f3847c298785f5ba7
6
+ metadata.gz: 00462515ed1392ccc2a94dcdc1bc61e18ce316306dbbaf77523938a47e49e33d7f6e17d37ecc921edc82b04f8f7a8de7a122fdd0eeb787cabf95cf8c42092309
7
+ data.tar.gz: 894271b6a222be666bad5a2502ab4b040ab4e8c7f31c6c3e3dd3226b296a8198a0f3030e31c75de0fea30db8d0b5e27c0601b13d99127ab0121dbbef5b128943
data/README.md CHANGED
@@ -122,25 +122,30 @@ client = OmniAI::OpenAI::Client.new(timeout: {
122
122
 
123
123
  Clients that support chat (e.g. Anthropic w/ "Claude", Google w/ "Gemini", Mistral w/ "LeChat", OpenAI w/ "ChatGPT", etc) generate completions using the following calls:
124
124
 
125
- #### Completions using Single Message
125
+ #### Completions using a Simple Prompt
126
+
127
+ Generating a completion is as simple as sending in the text:
126
128
 
127
129
  ```ruby
128
130
  completion = client.chat('Tell me a joke.')
129
- completion.choice.message.content # '...'
131
+ completion.choice.message.content # 'Why don't scientists trust atoms? They make up everything!'
130
132
  ```
131
133
 
132
- #### Completions using Multiple Messages
134
+ #### Completions using a Complex Prompt
135
+
136
+ More complex completions are generated using a block w/ various system / user messages:
133
137
 
134
138
  ```ruby
135
- messages = [
136
- {
137
- role: OmniAI::Chat::Role::SYSTEM,
138
- content: 'You are a helpful assistant with an expertise in geography.',
139
- },
140
- 'What is the capital of Canada?'
141
- ]
142
- completion = client.chat(messages, model: '...', temperature: 0.7, format: :json)
143
- completion.choice.message.content # '...'
139
+ completion = client.chat do |prompt|
140
+ prompt.system 'You are a helpful assistant with an expertise in animals.'
141
+ prompt.user do |message|
142
+ message.text 'What animals are in the attached photos?'
143
+ message.url('https://.../cat.jpeg', "image/jpeg")
144
+ message.url('https://.../dog.jpeg', "image/jpeg")
145
+ message.file('./hamster.jpeg', "image/jpeg")
146
+ end
147
+ end
148
+ completion.choice.message.content # 'They are photos of a cat, a cat, and a hamster.'
144
149
  ```
145
150
 
146
151
  #### Completions using Streaming via Proc
@@ -167,20 +172,19 @@ client.chat('Tell me a story', stream: $stdout)
167
172
  A chat can also be initialized with tools:
168
173
 
169
174
  ```ruby
170
- client.chat('What is the weather in "London, England" and "Madrid, Spain"?', tools: [
171
- OmniAI::Tool.new(
172
- proc { |location:, unit: 'celsius'| "It is #{rand(20..50)}° #{unit} in #{location}" },
173
- name: 'Weather',
174
- description: 'Lookup the weather in a location',
175
- parameters: OmniAI::Tool::Parameters.new(
176
- properties: {
177
- location: OmniAI::Tool::Property.string(description: 'The city and country (e.g. Toronto, Canada).'),
178
- unit: OmniAI::Tool::Property.string(enum: %w[celcius farenheit]),
179
- },
180
- required: %i[location]
181
- )
175
+ tool = OmniAI::Tool.new(
176
+ proc { |location:, unit: 'celsius'| "#{rand(20..50)}° #{unit} in #{location}" },
177
+ name: 'Weather',
178
+ description: 'Lookup the weather in a location',
179
+ parameters: OmniAI::Tool::Parameters.new(
180
+ properties: {
181
+ location: OmniAI::Tool::Property.string(description: 'e.g. Toronto'),
182
+ unit: OmniAI::Tool::Property.string(enum: %w[celcius farenheit]),
183
+ },
184
+ required: %i[location]
182
185
  )
183
- ])
186
+ )
187
+ client.chat('What is the weather in "London" and "Madrid"?', tools: [tool])
184
188
  ```
185
189
 
186
190
  ### Transcribe
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # A placeholder for parts of a message. Any subclass must implement the serializable interface.
6
+ class Content
7
+ # @param context [Context] optional
8
+ #
9
+ # @return [String]
10
+ def serialize(context: nil)
11
+ raise NotImplementedError, ' # {self.class}#serialize undefined'
12
+ end
13
+
14
+ # @param data [hash]
15
+ # @param context [Context] optional
16
+ #
17
+ # @return [Content]
18
+ def self.deserialize(data, context: nil)
19
+ raise ArgumentError, "untyped data=#{data.inspect}" unless data.key?('type')
20
+
21
+ case data['type']
22
+ when 'text' then Text.deserialize(data, context:)
23
+ when /(.*)_url/ then URL.deserialize(data, context:)
24
+ else raise ArgumentError, "unknown type=#{data['type'].inspect}"
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # Used to handle the setup of serializer / deserializer methods for each type.
6
+ #
7
+ # Usage:
8
+ #
9
+ # OmniAI::Chat::Context.build do |context|
10
+ # context.serializers[:prompt] = (prompt, context:) -> { ... }
11
+ # context.serializers[:message] = (prompt, context:) -> { ... }
12
+ # context.serializers[:file] = (prompt, context:) -> { ... }
13
+ # context.serializers[:text] = (prompt, context:) -> { ... }
14
+ # context.serializers[:url] = (prompt, context:) -> { ... }
15
+ # context.deserializers[:prompt] = (data, context:) -> { Prompt.new(...) }
16
+ # context.deserializers[:message] = (data, context:) -> { Message.new(...) }
17
+ # context.deserializers[:file] = (data, context:) -> { File.new(...) }
18
+ # context.deserializers[:text] = (data, context:) -> { Text.new(...) }
19
+ # context.deserializers[:url] = (data, context:) -> { URL.new(...) }
20
+ # end
21
+ class Context
22
+ # @return [Hash]
23
+ attr_accessor :serializers
24
+
25
+ # @return [Hash]
26
+ attr_reader :deserializers
27
+
28
+ # @return [Context]
29
+ def self.build(&block)
30
+ new.tap do |context|
31
+ block&.call(context)
32
+ end
33
+ end
34
+
35
+ # @return [Context]
36
+ def initialize
37
+ @serializers = {}
38
+ @deserializers = {}
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # A file is media that can be sent to many LLMs.
6
+ class File < Media
7
+ attr_accessor :io
8
+
9
+ # @param io [IO, Pathname, String]
10
+ # @param type [Symbol, String] :image, :video, :audio, "audio/flac", "image/jpeg", "video/mpeg", etc.
11
+ def initialize(io, type)
12
+ super(type)
13
+ @io = io
14
+ end
15
+
16
+ # @return [String]
17
+ def inspect
18
+ "#<#{self.class} io=#{@io.inspect}>"
19
+ end
20
+
21
+ # @return [String]
22
+ def fetch!
23
+ case @io
24
+ when IO then @io.read
25
+ else ::File.binread(@io)
26
+ end
27
+ end
28
+
29
+ # @param context [Context]
30
+ # @return [Hash]
31
+ def serialize(context: nil)
32
+ serializer = context&.serializers&.[](:file)
33
+ return serializer.call(self, context:) if serializer
34
+
35
+ { type: "#{kind}_url", "#{kind}_url": { url: data_uri } }
36
+ end
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,69 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # An abstract class that represents audio / image / video and is used for both files and urls.
6
+ class Media < Content
7
+ class TypeError < Error; end
8
+
9
+ # @return [Symbol, String]
10
+ attr_accessor :type
11
+
12
+ # @param type [String] "audio/flac", "image/jpeg", "video/mpeg", etc.
13
+ def initialize(type)
14
+ super()
15
+ @type = type
16
+ end
17
+
18
+ # @return [Boolean]
19
+ def text?
20
+ @type.match?(%r{^text/})
21
+ end
22
+
23
+ # @return [Boolean]
24
+ def audio?
25
+ @type.match?(%r{^audio/})
26
+ end
27
+
28
+ # @return [Boolean]
29
+ def image?
30
+ @type.match?(%r{^image/})
31
+ end
32
+
33
+ # @return [Boolean]
34
+ def video?
35
+ @type.match?(%r{^video/})
36
+ end
37
+
38
+ # @return [:video, :audio, :image, :text]
39
+ def kind
40
+ if text? then :text
41
+ elsif audio? then :audio
42
+ elsif image? then :image
43
+ elsif video? then :video
44
+ else
45
+ raise(TypeError, "unsupported type=#{@type}")
46
+ end
47
+ end
48
+
49
+ # e.g. "Hello" -> "SGVsbG8h"
50
+ #
51
+ # @return [String]
52
+ def data
53
+ Base64.strict_encode64(fetch!)
54
+ end
55
+
56
+ # e.g. "data:text/html;base64,..."
57
+ #
58
+ # @return [String]
59
+ def data_uri
60
+ "data:#{@type};base64,#{data}"
61
+ end
62
+
63
+ # @return [String]
64
+ def fetch!
65
+ raise NotImplementedError, "#{self.class}#fetch! undefined"
66
+ end
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,126 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # Used to standardize the process of building message within a prompt:
6
+ #
7
+ # completion = client.chat do |prompt|
8
+ # prompt.user do |message|
9
+ # message.text 'What are these photos of?'
10
+ # message.url 'https://example.com/cat.jpg', type: "image/jpeg"
11
+ # message.url 'https://example.com/dog.jpg', type: "image/jpeg"
12
+ # message.file File.open('hamster.jpg'), type: "image/jpeg"
13
+ # end
14
+ # end
15
+ class Message
16
+ # @return [Array<Content>, String]
17
+ attr_accessor :content
18
+
19
+ # @return [String]
20
+ attr_accessor :role
21
+
22
+ # @param content [String, nil]
23
+ # @param role [String]
24
+ def initialize(content: nil, role: Role::USER)
25
+ @content = content || []
26
+ @role = role
27
+ end
28
+
29
+ # @return [String]
30
+ def inspect
31
+ "#<#{self.class} role=#{@role.inspect} content=#{@content.inspect}>"
32
+ end
33
+
34
+ # Usage:
35
+ #
36
+ # Message.deserialize({ role: :user, content: 'Hello!' }) # => #<Message ...>
37
+ #
38
+ # @param data [Hash]
39
+ # @param context [Context] optional
40
+ #
41
+ # @return [Message]
42
+ def self.deserialize(data, context: nil)
43
+ deserialize = context&.deserializers&.[](:message)
44
+ return deserialize.call(data, context:) if deserialize
45
+
46
+ new(
47
+ content: data['content'].map { |content| Content.deserialize(content, context:) },
48
+ role: data['role']
49
+ )
50
+ end
51
+
52
+ # Usage:
53
+ #
54
+ # message.serialize # => { role: :user, content: 'Hello!' }
55
+ # message.serialize # => { role: :user, content: [{ type: 'text', text: 'Hello!' }] }
56
+ #
57
+ # @param context [Context] optional
58
+ #
59
+ # @return [Hash]
60
+ def serialize(context: nil)
61
+ serializer = context&.serializers&.[](:message)
62
+ return serializer.call(self, context:) if serializer
63
+
64
+ content = @content.is_a?(String) ? @content : @content.map { |content| content.serialize(context:) }
65
+
66
+ { role: @role, content: }
67
+ end
68
+
69
+ # @return [Boolean]
70
+ def role?(role)
71
+ String(@role).eql?(String(role))
72
+ end
73
+
74
+ # @return [Boolean]
75
+ def system?
76
+ role?(Role::SYSTEM)
77
+ end
78
+
79
+ # @return [Boolean]
80
+ def user?
81
+ role?(Role::USER)
82
+ end
83
+
84
+ # Usage:
85
+ #
86
+ # message.text('What are these photos of?')
87
+ #
88
+ # @param value [String]
89
+ #
90
+ # @return [Text]
91
+ def text(value)
92
+ Text.new(value).tap do |text|
93
+ @content << text
94
+ end
95
+ end
96
+
97
+ # Usage:
98
+ #
99
+ # message.url('https://example.com/hamster.jpg', type: "image/jpeg")
100
+ #
101
+ # @param uri [String]
102
+ # @param type [String]
103
+ #
104
+ # @return [URL]
105
+ def url(uri, type)
106
+ URL.new(uri, type).tap do |url|
107
+ @content << url
108
+ end
109
+ end
110
+
111
+ # Usage:
112
+ #
113
+ # message.file(File.open('hamster.jpg'), type: "image/jpeg")
114
+ #
115
+ # @param io [IO]
116
+ # @param type [String]
117
+ #
118
+ # @return [File]
119
+ def file(io, type)
120
+ File.new(io, type).tap do |file|
121
+ @content << file
122
+ end
123
+ end
124
+ end
125
+ end
126
+ end
@@ -0,0 +1,130 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # Used to standardizes the process of building complex prompts.
6
+ #
7
+ # Usage:
8
+ #
9
+ # completion = OmniAI::Chat::Prompt.build do |prompt|
10
+ # prompt.system('You are a helpful assistant.')
11
+ # prompt.user do |message|
12
+ # message.text 'What are these photos of?'
13
+ # message.url 'https://example.com/cat.jpg', type: "image/jpeg"
14
+ # message.url 'https://example.com/dog.jpg', type: "image/jpeg"
15
+ # message.file File.open('hamster.jpg'), type: "image/jpeg"
16
+ # end
17
+ # end
18
+ class Prompt
19
+ class MessageError < Error; end
20
+
21
+ # @return [Array<Message>]
22
+ attr_accessor :messages
23
+
24
+ # Usage:
25
+ #
26
+ # OmniAI::Chat::Prompt.build do |prompt|
27
+ # prompt.system('You are an expert in geography.')
28
+ # prompt.user('What is the capital of Canada?')
29
+ # end
30
+ #
31
+ # @return [Prompt]
32
+ # @yield [Prompt]
33
+ def self.build(&block)
34
+ new.tap do |prompt|
35
+ block&.call(prompt)
36
+ end
37
+ end
38
+
39
+ # Usage:
40
+ #
41
+ # OmniAI::Chat::Prompt.parse('What is the capital of Canada?')
42
+ #
43
+ # @param prompt [nil, String]
44
+ #
45
+ # @return [Prompt]
46
+ def self.parse(prompt)
47
+ new if prompt.nil?
48
+ return prompt if prompt.is_a?(self)
49
+
50
+ new.tap do |instance|
51
+ instance.user(prompt)
52
+ end
53
+ end
54
+
55
+ # @param messages [Array<Message>] optional
56
+ def initialize(messages: [])
57
+ @messages = messages
58
+ end
59
+
60
+ # @return [String]
61
+ def inspect
62
+ "#<#{self.class.name} messages=#{@messages.inspect}>"
63
+ end
64
+
65
+ # Usage:
66
+ #
67
+ # prompt.serialize # => [{ content: "What is the capital of Canada?", role: :user }]
68
+ #
69
+ # @param context [Context] optional
70
+ #
71
+ # @return [Array<Hash>]
72
+ def serialize(context: nil)
73
+ serializer = context&.serializers&.[](:prompt)
74
+ return serializer.call(self, context:) if serializer
75
+
76
+ @messages.map { |message| message.serialize(context:) }
77
+ end
78
+
79
+ # Usage:
80
+ #
81
+ # prompt.message('What is the capital of Canada?')
82
+ #
83
+ # @param content [String, nil]
84
+ # @param role [Symbol]
85
+ #
86
+ # @yield [Message]
87
+ # @return [Message]
88
+ def message(content = nil, role: :user, &block)
89
+ raise ArgumentError, 'content or block is required' if content.nil? && block.nil?
90
+
91
+ Message.new(content:, role:).tap do |message|
92
+ block&.call(message)
93
+ @messages << message
94
+ end
95
+ end
96
+
97
+ # Usage:
98
+ #
99
+ # prompt.system('You are a helpful assistant.')
100
+ #
101
+ # prompt.system do |message|
102
+ # message.text 'You are a helpful assistant.'
103
+ # end
104
+ #
105
+ # @param content [String, nil]
106
+ #
107
+ # @yield [Message]
108
+ # @return [Message]
109
+ def system(content = nil, &)
110
+ message(content, role: Role::SYSTEM, &)
111
+ end
112
+
113
+ # Usage:
114
+ #
115
+ # prompt.user('What is the capital of Canada?')
116
+ #
117
+ # prompt.user do |message|
118
+ # message.text 'What is the capital of Canada?'
119
+ # end
120
+ #
121
+ # @param content [String, nil]
122
+ #
123
+ # @yield [Message]
124
+ # @return [Message]
125
+ def user(content = nil, &)
126
+ message(content, role: Role::USER, &)
127
+ end
128
+ end
129
+ end
130
+ end
@@ -10,12 +10,12 @@ module OmniAI
10
10
  @data['index']
11
11
  end
12
12
 
13
- # @return [OmniAI::Chat::Response::Part]
13
+ # @return [Part]
14
14
  def part
15
15
  raise NotImplementedError, "#{self.class.name}#part undefined"
16
16
  end
17
17
 
18
- # @return [OmniAI::Chat::Response::ToolCallList]
18
+ # @return [ToolCallList]
19
19
  def tool_call_list
20
20
  part.tool_call_list
21
21
  end
@@ -5,7 +5,7 @@ module OmniAI
5
5
  module Response
6
6
  # A chunk returned by the API.
7
7
  class Chunk < Payload
8
- # @return [Array<OmniAI::Chat::Response::DeltaChoice>]
8
+ # @return [Array<DeltaChoice>]
9
9
  def choices
10
10
  @choices ||= @data['choices'].map { |data| DeltaChoice.new(data:) }
11
11
  end
@@ -5,7 +5,7 @@ module OmniAI
5
5
  module Response
6
6
  # A completion returned by the API.
7
7
  class Completion < Payload
8
- # @return [Array<OmniAI::Chat:Response:::MessageChoice>]
8
+ # @return [Array<MessageChoice>]
9
9
  def choices
10
10
  @choices ||= @data['choices'].map { |data| MessageChoice.new(data:) }
11
11
  end
@@ -10,12 +10,12 @@ module OmniAI
10
10
  "#<#{self.class.name} index=#{index} delta=#{delta.inspect}>"
11
11
  end
12
12
 
13
- # @return [OmniAI::Chat::Response::Delta]
13
+ # @return [Delta]
14
14
  def delta
15
15
  @delta ||= Delta.new(data: @data['delta'])
16
16
  end
17
17
 
18
- # @return [OmniAI::Chat::Response::Delta]
18
+ # @return [Delta]
19
19
  def part
20
20
  delta
21
21
  end
@@ -10,12 +10,12 @@ module OmniAI
10
10
  "#<#{self.class.name} index=#{index} message=#{message.inspect}>"
11
11
  end
12
12
 
13
- # @return [OmniAI::Chat::Response::Message]
13
+ # @return [Message]
14
14
  def message
15
15
  @message ||= Message.new(data: @data['message'])
16
16
  end
17
17
 
18
- # @return [OmniAI::Chat::Response::Message]
18
+ # @return [Message]
19
19
  def part
20
20
  message
21
21
  end
@@ -20,7 +20,7 @@ module OmniAI
20
20
  @data['content']
21
21
  end
22
22
 
23
- # @return [Array<OmniAI::Chat::Response::ToolCall>]
23
+ # @return [Array<ToolCall>]
24
24
  def tool_call_list
25
25
  return [] unless @data['tool_calls']
26
26
 
@@ -28,7 +28,7 @@ module OmniAI
28
28
  end
29
29
 
30
30
  # @param index [Integer]
31
- # @return [OmniAI::Chat::Response::ToolCall, nil]
31
+ # @return [ToolCall, nil]
32
32
  def tool_call(index: 0)
33
33
  tool_call_list[index]
34
34
  end
@@ -30,24 +30,24 @@ module OmniAI
30
30
  @data['model']
31
31
  end
32
32
 
33
- # @return [Array<OmniAI::Chat::Response::Choice>]
33
+ # @return [Array<Choice>]
34
34
  def choices
35
35
  raise NotImplementedError, "#{self.class.name}#choices undefined"
36
36
  end
37
37
 
38
38
  # @param index [Integer]
39
- # @return [OmniAI::Chat::Response::DeltaChoice]
39
+ # @return [DeltaChoice]
40
40
  def choice(index: 0)
41
41
  choices[index]
42
42
  end
43
43
 
44
44
  # @param index [Integer]
45
- # @return [OmniAI::Chat::Response::Part]
45
+ # @return [Part]
46
46
  def part(index: 0)
47
47
  choice(index:).part
48
48
  end
49
49
 
50
- # @return [OmniAI::Chat::Response::Usage]
50
+ # @return [Usage]
51
51
  def usage
52
52
  @usage ||= Usage.new(data: @data['usage']) if @data['usage']
53
53
  end
@@ -62,7 +62,7 @@ module OmniAI
62
62
  choice.content?
63
63
  end
64
64
 
65
- # @return [Array<OmniAI::Chat::Response:ToolCall>]
65
+ # @return [Array<ToolCall>]
66
66
  def tool_call_list
67
67
  choice.tool_call_list
68
68
  end
@@ -20,7 +20,7 @@ module OmniAI
20
20
  @data['type']
21
21
  end
22
22
 
23
- # @return [OmniAI::Chat::Response::Function]
23
+ # @return [Function]
24
24
  def function
25
25
  @function ||= Function.new(data: @data['function']) if @data['function']
26
26
  end
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # Just some text.
6
+ class Text < Content
7
+ # @return [String]
8
+ attr_accessor :text
9
+
10
+ # @param text [text]
11
+ def initialize(text = nil)
12
+ super()
13
+ @text = text
14
+ end
15
+
16
+ # @return [String]
17
+ def inspect
18
+ "#<#{self.class} text=#{@text.inspect}>"
19
+ end
20
+
21
+ # @param data [Hash]
22
+ def self.deserialize(data, context: nil)
23
+ deserialize = context&.deserializers&.[](:text)
24
+ return deserialize.call(data, context:) if deserialize
25
+
26
+ new(data['text'])
27
+ end
28
+
29
+ # @param context [Context] optional
30
+ #
31
+ # @return [Hash]
32
+ def serialize(context: nil)
33
+ serializer = context&.serializers&.[](:text)
34
+ return serializer.call(self, context:) if serializer
35
+
36
+ { type: 'text', text: @text }
37
+ end
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,69 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # A URL that is media that can be sent to many LLMs.
6
+ class URL < Media
7
+ # @return [URI, String]
8
+ attr_accessor :uri
9
+
10
+ class FetchError < HTTPError; end
11
+
12
+ # @param uri [URI, String] "https://example.com/cat.jpg"
13
+ # @param type [Symbol, String] "audio/flac", "image/jpeg", "video/mpeg", :audi, :image, :video, etc.
14
+ def initialize(uri, type = nil)
15
+ super(type)
16
+ @uri = uri
17
+ end
18
+
19
+ # @return [String]
20
+ def inspect
21
+ "#<#{self.class} uri=#{@uri.inspect}>"
22
+ end
23
+
24
+ # @param data [Hash]
25
+ def self.deserialize(data, context: nil)
26
+ deserialize = context&.deserializers&.[](:url)
27
+ return deserialize.call(data, context:) if deserialize
28
+
29
+ type = /(?<type>\w+)_url/.match(data['type'])[:type]
30
+ uri = data["#{type}_url"]['url']
31
+
32
+ new(uri, type)
33
+ end
34
+
35
+ # @param context [Context] optional
36
+ #
37
+ # @return [Hash]
38
+ def serialize(context: nil)
39
+ serializer = context&.serializers&.[](:url)
40
+ return serializer.call(self, context:) if serializer
41
+
42
+ {
43
+ type: "#{kind}_url",
44
+ "#{kind}_url": { url: @uri },
45
+ }
46
+ end
47
+
48
+ # @raise [FetchError]
49
+ #
50
+ # @return [String]
51
+ def fetch!
52
+ response = request!
53
+ String(response.body)
54
+ end
55
+
56
+ protected
57
+
58
+ # @raise [FetchError]
59
+ #
60
+ # @return [HTTP::Response]
61
+ def request!
62
+ response = HTTP.get(@uri)
63
+ raise FetchError, response.flush unless response.status.success?
64
+
65
+ response
66
+ end
67
+ end
68
+ end
69
+ end
data/lib/omniai/chat.rb CHANGED
@@ -50,15 +50,24 @@ module OmniAI
50
50
  new(...).process!
51
51
  end
52
52
 
53
- # @param messages [String] required
53
+ # @param prompt [OmniAI::Chat::Prompt, String, nil] optional
54
54
  # @param client [OmniAI::Client] the client
55
55
  # @param model [String] required
56
56
  # @param temperature [Float, nil] optional
57
57
  # @param stream [Proc, IO, nil] optional
58
58
  # @param tools [Array<OmniAI::Tool>] optional
59
59
  # @param format [Symbol, nil] optional - :json
60
- def initialize(messages, client:, model:, temperature: nil, stream: nil, tools: nil, format: nil)
61
- @messages = arrayify(messages)
60
+ #
61
+ # @yield [prompt] optional
62
+ # @yieldparam prompt [OmniAI::Chat::Prompt]
63
+ #
64
+ # @return [OmniAi::Chat]
65
+ def initialize(prompt = nil, client:, model:, temperature: nil, stream: nil, tools: nil, format: nil, &block)
66
+ raise ArgumentError, 'prompt or block is required' if !prompt && !block
67
+
68
+ @prompt = prompt ? Prompt.parse(prompt) : Prompt.new
69
+ block&.call(@prompt)
70
+
62
71
  @client = client
63
72
  @model = model
64
73
  @temperature = temperature
@@ -78,6 +87,22 @@ module OmniAI
78
87
 
79
88
  protected
80
89
 
90
+ # Used to spawn another chat with the same configuration using different messages.
91
+ #
92
+ # @param prompt [OmniAI::Chat::Prompt]
93
+ # @return [OmniAI::Chat::Prompt]
94
+ def spawn!(prompt)
95
+ self.class.new(
96
+ prompt,
97
+ client: @client,
98
+ model: @model,
99
+ temperature: @temperature,
100
+ stream: @stream,
101
+ tools: @tools,
102
+ format: @format
103
+ ).process!
104
+ end
105
+
81
106
  # @return [Hash]
82
107
  def payload
83
108
  raise NotImplementedError, "#{self.class.name}#payload undefined"
@@ -89,7 +114,7 @@ module OmniAI
89
114
  end
90
115
 
91
116
  # @param response [HTTP::Response]
92
- # @return [OmniAI::Chat::Completion]
117
+ # @return [OmniAI::Chat::Response::Completion]
93
118
  def parse!(response:)
94
119
  if @stream
95
120
  stream!(response:)
@@ -104,19 +129,18 @@ module OmniAI
104
129
  completion = self.class::Response::Completion.new(data: response.parse)
105
130
 
106
131
  if @tools && completion.tool_call_list.any?
107
- @messages = [
108
- *@messages,
132
+ spawn!([
133
+ *@prompt.serialize,
109
134
  *completion.choices.map(&:message).map(&:data),
110
135
  *(completion.tool_call_list.map { |tool_call| execute_tool_call(tool_call) }),
111
- ]
112
- process!
136
+ ])
113
137
  else
114
138
  completion
115
139
  end
116
140
  end
117
141
 
118
142
  # @param response [HTTP::Response]
119
- # @return [OmniAI::Chat::Stream]
143
+ # @return [OmniAI::Chat::Response::Stream]
120
144
  def stream!(response:)
121
145
  raise Error, "#{self.class.name}#stream! unstreamable" unless @stream
122
146
 
@@ -134,23 +158,6 @@ module OmniAI
134
158
  @stream.puts if @stream.is_a?(IO) || @stream.is_a?(StringIO)
135
159
  end
136
160
 
137
- # @return [Array<Hash>]
138
- def messages
139
- @messages.map do |content|
140
- case content
141
- when String then { role: Role::USER, content: }
142
- when Hash then content
143
- else raise Error, "Unsupported content=#{content.inspect}"
144
- end
145
- end
146
- end
147
-
148
- # @param value [Object, Array<Object>]
149
- # @return [Array<Object>]
150
- def arrayify(value)
151
- value.is_a?(Array) ? value : [value]
152
- end
153
-
154
161
  # @return [HTTP::Response]
155
162
  def request!
156
163
  @client
data/lib/omniai/client.rb CHANGED
@@ -131,8 +131,11 @@ module OmniAI
131
131
  # @param stream [Proc, nil] optional
132
132
  # @param tools [Array<OmniAI::Tool>] optional
133
133
  #
134
+ # @yield [prompt] optional
135
+ # @yieldparam prompt [OmniAI::Chat::Prompt]
136
+ #
134
137
  # @return [OmniAI::Chat::Completion]
135
- def chat(messages, model:, temperature: nil, format: nil, stream: nil, tools: nil)
138
+ def chat(messages, model:, temperature: nil, format: nil, stream: nil, tools: nil, &)
136
139
  raise NotImplementedError, "#{self.class.name}#chat undefined"
137
140
  end
138
141
 
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OmniAI
4
- VERSION = '1.5.2'
4
+ VERSION = '1.6.1'
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.5.2
4
+ version: 1.6.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-07-12 00:00:00.000000000 Z
11
+ date: 2024-07-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: event_stream_parser
@@ -68,10 +68,12 @@ files:
68
68
  - exe/omniai
69
69
  - lib/omniai.rb
70
70
  - lib/omniai/chat.rb
71
- - lib/omniai/chat/content/file.rb
72
- - lib/omniai/chat/content/media.rb
73
- - lib/omniai/chat/content/text.rb
74
- - lib/omniai/chat/content/url.rb
71
+ - lib/omniai/chat/content.rb
72
+ - lib/omniai/chat/context.rb
73
+ - lib/omniai/chat/file.rb
74
+ - lib/omniai/chat/media.rb
75
+ - lib/omniai/chat/message.rb
76
+ - lib/omniai/chat/prompt.rb
75
77
  - lib/omniai/chat/response/choice.rb
76
78
  - lib/omniai/chat/response/chunk.rb
77
79
  - lib/omniai/chat/response/completion.rb
@@ -86,6 +88,8 @@ files:
86
88
  - lib/omniai/chat/response/stream.rb
87
89
  - lib/omniai/chat/response/tool_call.rb
88
90
  - lib/omniai/chat/response/usage.rb
91
+ - lib/omniai/chat/text.rb
92
+ - lib/omniai/chat/url.rb
89
93
  - lib/omniai/cli.rb
90
94
  - lib/omniai/cli/base_handler.rb
91
95
  - lib/omniai/cli/chat_handler.rb
@@ -1,27 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- class Chat
5
- module Content
6
- # A file that is either audio / image / video.
7
- class File < Media
8
- attr_accessor :io
9
-
10
- # @param io [IO, Pathname, String]
11
- # @param type [Symbol, String] :image, :video, :audio, "audio/flac", "image/jpeg", "video/mpeg", etc.
12
- def initialize(io, type)
13
- super(type)
14
- @io = io
15
- end
16
-
17
- # @return [String]
18
- def fetch!
19
- case @io
20
- when IO then @io.read
21
- else ::File.binread(@io)
22
- end
23
- end
24
- end
25
- end
26
- end
27
- end
@@ -1,56 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- class Chat
5
- module Content
6
- # An abstract class that represents audio / image / video and is used for both files and urls.
7
- class Media
8
- attr_accessor :type
9
-
10
- # @param type [String] "audio/flac", "image/jpeg", "video/mpeg", etc.
11
- def initialize(type)
12
- @type = type
13
- end
14
-
15
- # @return [Boolean]
16
- def text?
17
- @type.match?(%r{^text/})
18
- end
19
-
20
- # @return [Boolean]
21
- def audio?
22
- @type.match?(%r{^audio/})
23
- end
24
-
25
- # @return [Boolean]
26
- def image?
27
- @type.match?(%r{^image/})
28
- end
29
-
30
- # @return [Boolean]
31
- def video?
32
- @type.match?(%r{^video/})
33
- end
34
-
35
- # @yield [io]
36
- def fetch!(&)
37
- raise NotImplementedError, "#{self.class}#fetch! undefined"
38
- end
39
-
40
- # e.g. "Hello" -> "SGVsbG8h"
41
- #
42
- # @return [String]
43
- def data
44
- Base64.strict_encode64(fetch!)
45
- end
46
-
47
- # e.g. "data:text/html;base64,..."
48
- #
49
- # @return [String]
50
- def data_uri
51
- "data:#{@type};base64,#{data}"
52
- end
53
- end
54
- end
55
- end
56
- end
@@ -1,17 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- class Chat
5
- module Content
6
- # Just some text.
7
- class Text
8
- attr_accessor :text
9
-
10
- # @param text [text]
11
- def initialize(text)
12
- @text = text
13
- end
14
- end
15
- end
16
- end
17
- end
@@ -1,41 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- class Chat
5
- module Content
6
- # A url that is either audio / image / video.
7
- class URL < Media
8
- attr_accessor :url, :type
9
-
10
- class HTTPError < OmniAI::HTTPError; end
11
-
12
- # @param url [URI, String]
13
- # @param type [Symbol, String] "audio/flac", "image/jpeg", "video/mpeg", etc.
14
- def initialize(url, type)
15
- super(type)
16
- @url = url
17
- end
18
-
19
- # @raise [HTTPError]
20
- #
21
- # @return [String]
22
- def fetch!
23
- response = request!
24
- String(response.body)
25
- end
26
-
27
- private
28
-
29
- # @raise [HTTPError]
30
- #
31
- # @return [HTTP::Response]
32
- def request!
33
- response = HTTP.get(@url)
34
- raise HTTPError, response.flush unless response.status.success?
35
-
36
- response
37
- end
38
- end
39
- end
40
- end
41
- end