omniai-openai 1.3.3 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +1 -1
- data/README.md +8 -19
- data/lib/omniai/openai/chat.rb +10 -1
- data/lib/omniai/openai/client.rb +4 -3
- data/lib/omniai/openai/thread/messages.rb +0 -1
- data/lib/omniai/openai/thread/run.rb +1 -1
- data/lib/omniai/openai/thread/runs.rb +3 -4
- data/lib/omniai/openai/version.rb +1 -1
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ccfa9de2275327a63e5e187d40050fe86c5c22098a1caf03418fd34e3bd87462
|
4
|
+
data.tar.gz: 163cbd9b4dab840e0d16dbb7677c4caab56552717a6dbba27afb3326832b70c6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7ce15471f2609194788cfef73679e8c95bae0d9b011aecdc1087fe3f546239be4ca9053de3e4100808b403a34bb8d997a76a67d66b299afae426a812bd35f430
|
7
|
+
data.tar.gz: d0d1e276d090972077b51de2fab52f710fff7f2cd78b22a95cb3b7dbea23eac64d3d3f48d5f584abf3b108a24b8566946e23f81ccb78d92752b04fcd6067dd1e
|
data/Gemfile
CHANGED
data/README.md
CHANGED
@@ -70,21 +70,10 @@ completion.choice.message.content # 'Why did the chicken cross the road? To get
|
|
70
70
|
```
|
71
71
|
|
72
72
|
```ruby
|
73
|
-
completion = client.chat
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
completion.choice.message.content # 'No.'
|
78
|
-
```
|
79
|
-
|
80
|
-
```ruby
|
81
|
-
completion = client.chat([
|
82
|
-
{
|
83
|
-
role: OmniAI::Chat::Role::SYSTEM,
|
84
|
-
content: 'You are a helpful assistant.'
|
85
|
-
},
|
86
|
-
'What is the capital of Canada?',
|
87
|
-
])
|
73
|
+
completion = client.chat do |prompt|
|
74
|
+
prompt.system('Your are an expert in geography.')
|
75
|
+
prompt.user('What is the capital of Canada?')
|
76
|
+
end
|
88
77
|
completion.choice.message.content # 'The capital of Canada is Ottawa.'
|
89
78
|
```
|
90
79
|
|
@@ -128,10 +117,10 @@ client.chat('Be poetic.', stream:)
|
|
128
117
|
`format` takes an optional symbol (`:json`) and that setes the `response_format` to `json_object`:
|
129
118
|
|
130
119
|
```ruby
|
131
|
-
completion = client.chat(
|
132
|
-
|
133
|
-
|
134
|
-
|
120
|
+
completion = client.chat(format: :json) do |prompt|
|
121
|
+
prompt.system(OmniAI::Chat::JSON_PROMPT)
|
122
|
+
prompt.user('What is the name of the drummer for the Beatles?')
|
123
|
+
end
|
135
124
|
JSON.parse(completion.choice.message.content) # { "name": "Ringo" }
|
136
125
|
```
|
137
126
|
|
data/lib/omniai/openai/chat.rb
CHANGED
@@ -3,6 +3,14 @@
|
|
3
3
|
module OmniAI
|
4
4
|
module OpenAI
|
5
5
|
# An OpenAI chat implementation.
|
6
|
+
#
|
7
|
+
# Usage:
|
8
|
+
#
|
9
|
+
# completion = OmniAI::OpenAI::Chat.process!(client: client) do |prompt|
|
10
|
+
# prompt.system('You are an expert in the field of AI.')
|
11
|
+
# prompt.user('What are the biggest risks of AI?')
|
12
|
+
# end
|
13
|
+
# completion.choice.message.content # '...'
|
6
14
|
class Chat < OmniAI::Chat
|
7
15
|
JSON_RESPONSE_FORMAT = { type: 'json_object' }.freeze
|
8
16
|
|
@@ -20,11 +28,12 @@ module OmniAI
|
|
20
28
|
# @return [Hash]
|
21
29
|
def payload
|
22
30
|
OmniAI::OpenAI.config.chat_options.merge({
|
23
|
-
messages
|
31
|
+
messages: @prompt.serialize,
|
24
32
|
model: @model,
|
25
33
|
stream: @stream.nil? ? nil : !@stream.nil?,
|
26
34
|
temperature: @temperature,
|
27
35
|
response_format: (JSON_RESPONSE_FORMAT if @format.eql?(:json)),
|
36
|
+
tools: @tools&.map(&:prepare),
|
28
37
|
}).compact
|
29
38
|
end
|
30
39
|
|
data/lib/omniai/openai/client.rb
CHANGED
@@ -67,10 +67,11 @@ module OmniAI
|
|
67
67
|
# @param format [Symbol] optional :text or :json
|
68
68
|
# @param temperature [Float, nil] optional
|
69
69
|
# @param stream [Proc, nil] optional
|
70
|
+
# @param tools [Array<OmniAI::Tool>, nil] optional
|
70
71
|
#
|
71
72
|
# @return [OmniAI::Chat::Completion]
|
72
|
-
def chat(messages, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil)
|
73
|
-
Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
|
73
|
+
def chat(messages, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil)
|
74
|
+
Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self)
|
74
75
|
end
|
75
76
|
|
76
77
|
# @raise [OmniAI::Error]
|
@@ -82,7 +83,7 @@ module OmniAI
|
|
82
83
|
# @param temperature [Float, nil] optional
|
83
84
|
# @param format [Symbol] :text, :srt, :vtt, or :json (default)
|
84
85
|
#
|
85
|
-
# @return
|
86
|
+
# @return [OmniAI::Transcribe]
|
86
87
|
def transcribe(path, model: Transcribe::Model::WHISPER, language: nil, prompt: nil, temperature: nil, format: nil)
|
87
88
|
Transcribe.process!(path, model:, language:, prompt:, temperature:, format:, client: self)
|
88
89
|
end
|
@@ -34,7 +34,6 @@ module OmniAI
|
|
34
34
|
# @param content [String, Array, nil] optional
|
35
35
|
# @param attachments [Array, nil] optional
|
36
36
|
# @param metadata [Hash, nil] optional
|
37
|
-
# @param client [OmniAI::OpenAI::Client] optional
|
38
37
|
# @return [OmniAI::OpenAI::Thread::Message]
|
39
38
|
def build(role: nil, content: nil, attachments: [], metadata: {})
|
40
39
|
Message.new(role:, content:, attachments:, metadata:, thread_id: @thread.id, client: @client)
|
@@ -31,12 +31,11 @@ module OmniAI
|
|
31
31
|
end
|
32
32
|
|
33
33
|
# @param assistant_id [String] required
|
34
|
-
# @param model [String] optional
|
35
|
-
# @param temperature [Float] optional
|
36
|
-
# @param instructions [String] optional
|
34
|
+
# @param model [String, nil] optional
|
35
|
+
# @param temperature [Float, nil] optional
|
36
|
+
# @param instructions [String, nil] optional
|
37
37
|
# @param tools [Array<Hash>, nil] optional
|
38
38
|
# @param metadata [Hash, nil] optional
|
39
|
-
# @param client [OmniAI::OpenAI::Client] optional
|
40
39
|
# @return [OmniAI::OpenAI::Thread::Message]
|
41
40
|
def build(assistant_id:, model: nil, temperature: nil, instructions: nil, tools: nil, metadata: {})
|
42
41
|
Run.new(
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai-openai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-07-18 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -105,7 +105,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
105
105
|
- !ruby/object:Gem::Version
|
106
106
|
version: '0'
|
107
107
|
requirements: []
|
108
|
-
rubygems_version: 3.5.
|
108
|
+
rubygems_version: 3.5.14
|
109
109
|
signing_key:
|
110
110
|
specification_version: 4
|
111
111
|
summary: A generalized framework for interacting with OpenAI
|