omniai-openai 1.3.3 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c5f48cc11134ffb023a4d1797a637ee1a0a6ae0365da799df07c484f177a58cb
4
- data.tar.gz: 971a2c0524376bd64cf4c1e04b82ab0373b20314d11a06eb0ebc7113f6b9515f
3
+ metadata.gz: ccfa9de2275327a63e5e187d40050fe86c5c22098a1caf03418fd34e3bd87462
4
+ data.tar.gz: 163cbd9b4dab840e0d16dbb7677c4caab56552717a6dbba27afb3326832b70c6
5
5
  SHA512:
6
- metadata.gz: c3765f6900534a23d7dc6033f0dc73dc31845494b938e33d345374e4e76ba16e300083cf1c2bedfc82ccc727a29636327092ea86b05f265eae40881de43fe0e8
7
- data.tar.gz: b37e93448e1b3314724af029fcdca1f472b6a4e55b027a864079881d5139211b4f8001c0b805479af24ff6f69e586fd919d78bf10843d7de046ca525805e75e3
6
+ metadata.gz: 7ce15471f2609194788cfef73679e8c95bae0d9b011aecdc1087fe3f546239be4ca9053de3e4100808b403a34bb8d997a76a67d66b299afae426a812bd35f430
7
+ data.tar.gz: d0d1e276d090972077b51de2fab52f710fff7f2cd78b22a95cb3b7dbea23eac64d3d3f48d5f584abf3b108a24b8566946e23f81ccb78d92752b04fcd6067dd1e
data/Gemfile CHANGED
@@ -5,7 +5,6 @@ source 'https://rubygems.org'
5
5
  gemspec
6
6
 
7
7
  gem 'rake'
8
-
9
8
  gem 'rspec'
10
9
  gem 'rspec_junit_formatter'
11
10
  gem 'rubocop'
@@ -13,3 +12,4 @@ gem 'rubocop-rake'
13
12
  gem 'rubocop-rspec'
14
13
  gem 'simplecov'
15
14
  gem 'webmock'
15
+ gem 'yard'
data/README.md CHANGED
@@ -70,21 +70,10 @@ completion.choice.message.content # 'Why did the chicken cross the road? To get
70
70
  ```
71
71
 
72
72
  ```ruby
73
- completion = client.chat({
74
- role: OmniAI::Chat::Role::USER,
75
- content: 'Is it wise to jump off a bridge?'
76
- })
77
- completion.choice.message.content # 'No.'
78
- ```
79
-
80
- ```ruby
81
- completion = client.chat([
82
- {
83
- role: OmniAI::Chat::Role::SYSTEM,
84
- content: 'You are a helpful assistant.'
85
- },
86
- 'What is the capital of Canada?',
87
- ])
73
+ completion = client.chat do |prompt|
74
+ prompt.system('Your are an expert in geography.')
75
+ prompt.user('What is the capital of Canada?')
76
+ end
88
77
  completion.choice.message.content # 'The capital of Canada is Ottawa.'
89
78
  ```
90
79
 
@@ -128,10 +117,10 @@ client.chat('Be poetic.', stream:)
128
117
  `format` takes an optional symbol (`:json`) and that setes the `response_format` to `json_object`:
129
118
 
130
119
  ```ruby
131
- completion = client.chat([
132
- { role: OmniAI::Chat::Role::SYSTEM, content: OmniAI::Chat::JSON_PROMPT },
133
- { role: OmniAI::Chat::Role::USER, content: 'What is the name of the drummer for the Beatles?' }
134
- ], format: :json)
120
+ completion = client.chat(format: :json) do |prompt|
121
+ prompt.system(OmniAI::Chat::JSON_PROMPT)
122
+ prompt.user('What is the name of the drummer for the Beatles?')
123
+ end
135
124
  JSON.parse(completion.choice.message.content) # { "name": "Ringo" }
136
125
  ```
137
126
 
@@ -3,6 +3,14 @@
3
3
  module OmniAI
4
4
  module OpenAI
5
5
  # An OpenAI chat implementation.
6
+ #
7
+ # Usage:
8
+ #
9
+ # completion = OmniAI::OpenAI::Chat.process!(client: client) do |prompt|
10
+ # prompt.system('You are an expert in the field of AI.')
11
+ # prompt.user('What are the biggest risks of AI?')
12
+ # end
13
+ # completion.choice.message.content # '...'
6
14
  class Chat < OmniAI::Chat
7
15
  JSON_RESPONSE_FORMAT = { type: 'json_object' }.freeze
8
16
 
@@ -20,11 +28,12 @@ module OmniAI
20
28
  # @return [Hash]
21
29
  def payload
22
30
  OmniAI::OpenAI.config.chat_options.merge({
23
- messages:,
31
+ messages: @prompt.serialize,
24
32
  model: @model,
25
33
  stream: @stream.nil? ? nil : !@stream.nil?,
26
34
  temperature: @temperature,
27
35
  response_format: (JSON_RESPONSE_FORMAT if @format.eql?(:json)),
36
+ tools: @tools&.map(&:prepare),
28
37
  }).compact
29
38
  end
30
39
 
@@ -67,10 +67,11 @@ module OmniAI
67
67
  # @param format [Symbol] optional :text or :json
68
68
  # @param temperature [Float, nil] optional
69
69
  # @param stream [Proc, nil] optional
70
+ # @param tools [Array<OmniAI::Tool>, nil] optional
70
71
  #
71
72
  # @return [OmniAI::Chat::Completion]
72
- def chat(messages, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil)
73
- Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
73
+ def chat(messages, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil)
74
+ Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self)
74
75
  end
75
76
 
76
77
  # @raise [OmniAI::Error]
@@ -82,7 +83,7 @@ module OmniAI
82
83
  # @param temperature [Float, nil] optional
83
84
  # @param format [Symbol] :text, :srt, :vtt, or :json (default)
84
85
  #
85
- # @return text [OmniAI::Transcribe::Transcription]
86
+ # @return [OmniAI::Transcribe]
86
87
  def transcribe(path, model: Transcribe::Model::WHISPER, language: nil, prompt: nil, temperature: nil, format: nil)
87
88
  Transcribe.process!(path, model:, language:, prompt:, temperature:, format:, client: self)
88
89
  end
@@ -34,7 +34,6 @@ module OmniAI
34
34
  # @param content [String, Array, nil] optional
35
35
  # @param attachments [Array, nil] optional
36
36
  # @param metadata [Hash, nil] optional
37
- # @param client [OmniAI::OpenAI::Client] optional
38
37
  # @return [OmniAI::OpenAI::Thread::Message]
39
38
  def build(role: nil, content: nil, attachments: [], metadata: {})
40
39
  Message.new(role:, content:, attachments:, metadata:, thread_id: @thread.id, client: @client)
@@ -183,7 +183,7 @@ module OmniAI
183
183
  self
184
184
  end
185
185
 
186
- # @param interval [Integer, Float, nil] optional (seconds)
186
+ # @param delay [Numeric, nil] optional (seconds)
187
187
  #
188
188
  # @return [OmniAI::OpenAI::Thread::Run]
189
189
  def poll!(delay: 2)
@@ -31,12 +31,11 @@ module OmniAI
31
31
  end
32
32
 
33
33
  # @param assistant_id [String] required
34
- # @param model [String] optional
35
- # @param temperature [Float] optional
36
- # @param instructions [String] optional
34
+ # @param model [String, nil] optional
35
+ # @param temperature [Float, nil] optional
36
+ # @param instructions [String, nil] optional
37
37
  # @param tools [Array<Hash>, nil] optional
38
38
  # @param metadata [Hash, nil] optional
39
- # @param client [OmniAI::OpenAI::Client] optional
40
39
  # @return [OmniAI::OpenAI::Thread::Message]
41
40
  def build(assistant_id:, model: nil, temperature: nil, instructions: nil, tools: nil, metadata: {})
42
41
  Run.new(
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module OpenAI
5
- VERSION = '1.3.3'
5
+ VERSION = '1.6.0'
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.3.3
4
+ version: 1.6.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-06-25 00:00:00.000000000 Z
11
+ date: 2024-07-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: event_stream_parser
@@ -105,7 +105,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
105
105
  - !ruby/object:Gem::Version
106
106
  version: '0'
107
107
  requirements: []
108
- rubygems_version: 3.5.3
108
+ rubygems_version: 3.5.14
109
109
  signing_key:
110
110
  specification_version: 4
111
111
  summary: A generalized framework for interacting with OpenAI