omniai-openai 1.9.1 → 1.9.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +11 -10
- data/README.md +1 -1
- data/lib/omniai/openai/assistant.rb +20 -20
- data/lib/omniai/openai/chat.rb +9 -9
- data/lib/omniai/openai/client.rb +6 -6
- data/lib/omniai/openai/config.rb +5 -5
- data/lib/omniai/openai/embed.rb +4 -4
- data/lib/omniai/openai/file.rb +16 -16
- data/lib/omniai/openai/speak.rb +9 -9
- data/lib/omniai/openai/thread/annotation.rb +5 -5
- data/lib/omniai/openai/thread/attachment.rb +2 -2
- data/lib/omniai/openai/thread/content.rb +3 -3
- data/lib/omniai/openai/thread/message.rb +21 -21
- data/lib/omniai/openai/thread/run.rb +29 -29
- data/lib/omniai/openai/thread/text.rb +3 -3
- data/lib/omniai/openai/thread.rb +11 -11
- data/lib/omniai/openai/tool.rb +2 -2
- data/lib/omniai/openai/transcribe.rb +2 -2
- data/lib/omniai/openai/version.rb +1 -1
- data/lib/omniai/openai.rb +4 -4
- metadata +3 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fecb8c5c7e0dd7b6dabcb1103dc700e84cda8ffac62b8a4af1d88bcdfe745ca1
|
4
|
+
data.tar.gz: b60529994b5b46a294ba1549ee79432bbc55278c4411895a479571182cfea502
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: cd20bfdd8656340e48c5f97803ff3f72115588571dacf5c15bb88841c2e62c480defdc377392301692abcabdef3522d65244bc69bcc254ee5e7061c49221928f
|
7
|
+
data.tar.gz: a57d9837b922d70e0f6e74c20a7e976682c8bdae49c59552928a4bb057865b8ecea6a27e9b3ec42c5758147d11a0e1a7f663192776f0c21a982a9b1b5e56b55c
|
data/Gemfile
CHANGED
@@ -1,15 +1,16 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
source
|
3
|
+
source "https://rubygems.org"
|
4
4
|
|
5
5
|
gemspec
|
6
6
|
|
7
|
-
gem
|
8
|
-
gem
|
9
|
-
gem
|
10
|
-
gem
|
11
|
-
gem
|
12
|
-
gem
|
13
|
-
gem
|
14
|
-
gem
|
15
|
-
gem
|
7
|
+
gem "rake"
|
8
|
+
gem "rspec"
|
9
|
+
gem "rspec_junit_formatter"
|
10
|
+
gem "rubocop"
|
11
|
+
gem "rubocop-basic"
|
12
|
+
gem "rubocop-rake"
|
13
|
+
gem "rubocop-rspec"
|
14
|
+
gem "simplecov"
|
15
|
+
gem "webmock"
|
16
|
+
gem "yard"
|
data/README.md
CHANGED
@@ -97,7 +97,7 @@ completion.content # 'The capital of Canada is Ottawa.'
|
|
97
97
|
|
98
98
|
#### Model
|
99
99
|
|
100
|
-
`model` takes an optional string (default is `
|
100
|
+
`model` takes an optional string (default is `gpt-4o`):
|
101
101
|
|
102
102
|
```ruby
|
103
103
|
completion = client.chat('How fast is a cheetah?', model: OmniAI::OpenAI::Chat::Model::GPT_3_5_TURBO)
|
@@ -4,7 +4,7 @@ module OmniAI
|
|
4
4
|
module OpenAI
|
5
5
|
# An OpenAI assistants implementation.
|
6
6
|
class Assistant
|
7
|
-
HEADERS = {
|
7
|
+
HEADERS = { "OpenAI-Beta": "assistants=v2" }.freeze
|
8
8
|
|
9
9
|
# @!attribute [rw] id
|
10
10
|
# @return [String, nil]
|
@@ -95,7 +95,7 @@ module OmniAI
|
|
95
95
|
|
96
96
|
raise HTTPError, response.flush unless response.status.ok?
|
97
97
|
|
98
|
-
response.parse[
|
98
|
+
response.parse["data"].map { |data| parse(data:, client:) }
|
99
99
|
end
|
100
100
|
|
101
101
|
# @param id [String] required
|
@@ -128,17 +128,17 @@ module OmniAI
|
|
128
128
|
# @raise [OmniAI::Error]
|
129
129
|
# @return [OmniAI::OpenAI::Assistant]
|
130
130
|
def destroy!
|
131
|
-
raise OmniAI::Error,
|
131
|
+
raise OmniAI::Error, "cannot destroy a non-persisted assistant" unless @id
|
132
132
|
|
133
133
|
data = self.class.destroy!(id: @id, client: @client)
|
134
|
-
@deleted = data[
|
134
|
+
@deleted = data["deleted"]
|
135
135
|
self
|
136
136
|
end
|
137
137
|
|
138
|
-
|
138
|
+
private
|
139
139
|
|
140
140
|
class << self
|
141
|
-
|
141
|
+
private
|
142
142
|
|
143
143
|
# @param data [Hash] required
|
144
144
|
# @param client [OmniAI::OpenAI::Client] required
|
@@ -146,13 +146,13 @@ module OmniAI
|
|
146
146
|
def parse(data:, client: Client.new)
|
147
147
|
new(
|
148
148
|
client:,
|
149
|
-
id: data[
|
150
|
-
name: data[
|
151
|
-
model: data[
|
152
|
-
description: data[
|
153
|
-
instructions: data[
|
154
|
-
metadata: data[
|
155
|
-
tools: data[
|
149
|
+
id: data["id"],
|
150
|
+
name: data["name"],
|
151
|
+
model: data["model"],
|
152
|
+
description: data["description"],
|
153
|
+
instructions: data["instructions"],
|
154
|
+
metadata: data["metadata"],
|
155
|
+
tools: data["tools"]
|
156
156
|
)
|
157
157
|
end
|
158
158
|
end
|
@@ -160,13 +160,13 @@ module OmniAI
|
|
160
160
|
# @param data [Hash] required
|
161
161
|
# @return [OmniAI::OpenAI::Assistant]
|
162
162
|
def parse(data:)
|
163
|
-
@id = data[
|
164
|
-
@name = data[
|
165
|
-
@model = data[
|
166
|
-
@description = data[
|
167
|
-
@instructions = data[
|
168
|
-
@metadata = data[
|
169
|
-
@tools = data[
|
163
|
+
@id = data["id"]
|
164
|
+
@name = data["name"]
|
165
|
+
@model = data["model"]
|
166
|
+
@description = data["description"]
|
167
|
+
@instructions = data["instructions"]
|
168
|
+
@metadata = data["metadata"]
|
169
|
+
@tools = data["tools"]
|
170
170
|
end
|
171
171
|
|
172
172
|
# @return [Hash]
|
data/lib/omniai/openai/chat.rb
CHANGED
@@ -12,22 +12,22 @@ module OmniAI
|
|
12
12
|
# end
|
13
13
|
# completion.choice.message.content # '...'
|
14
14
|
class Chat < OmniAI::Chat
|
15
|
-
JSON_RESPONSE_FORMAT = { type:
|
15
|
+
JSON_RESPONSE_FORMAT = { type: "json_object" }.freeze
|
16
16
|
|
17
17
|
module Model
|
18
|
-
GPT_4O =
|
19
|
-
GPT_4O_MINI =
|
20
|
-
GPT_4 =
|
21
|
-
GPT_4_TURBO =
|
22
|
-
GPT_3_5_TURBO =
|
23
|
-
O1_MINI =
|
24
|
-
O1_PREVIEW =
|
18
|
+
GPT_4O = "gpt-4o"
|
19
|
+
GPT_4O_MINI = "gpt-4o-mini"
|
20
|
+
GPT_4 = "gpt-4"
|
21
|
+
GPT_4_TURBO = "gpt-4-turbo"
|
22
|
+
GPT_3_5_TURBO = "gpt-3.5-turbo"
|
23
|
+
O1_MINI = "o1-mini"
|
24
|
+
O1_PREVIEW = "o1-preview"
|
25
25
|
O1 = O1_PREVIEW
|
26
26
|
end
|
27
27
|
|
28
28
|
DEFAULT_MODEL = Model::GPT_4O
|
29
29
|
|
30
|
-
|
30
|
+
protected
|
31
31
|
|
32
32
|
# @return [Hash]
|
33
33
|
def payload
|
data/lib/omniai/openai/client.rb
CHANGED
@@ -20,7 +20,7 @@ module OmniAI
|
|
20
20
|
#
|
21
21
|
# client = OmniAI::OpenAI::Client.new
|
22
22
|
class Client < OmniAI::Client
|
23
|
-
VERSION =
|
23
|
+
VERSION = "v1"
|
24
24
|
|
25
25
|
attr_reader :api_prefix
|
26
26
|
|
@@ -33,7 +33,7 @@ module OmniAI
|
|
33
33
|
# @param timeout [Integer, nil] optional - defaults to `OmniAI::OpenAI.config.timeout`
|
34
34
|
def initialize(
|
35
35
|
api_key: OmniAI::OpenAI.config.api_key,
|
36
|
-
api_prefix:
|
36
|
+
api_prefix: "",
|
37
37
|
host: OmniAI::OpenAI.config.host,
|
38
38
|
organization: OmniAI::OpenAI.config.organization,
|
39
39
|
project: OmniAI::OpenAI.config.project,
|
@@ -53,9 +53,9 @@ module OmniAI
|
|
53
53
|
@project = project
|
54
54
|
|
55
55
|
@api_prefix = api_prefix
|
56
|
-
return if @api_prefix.empty? || @api_prefix.start_with?(
|
56
|
+
return if @api_prefix.empty? || @api_prefix.start_with?("/")
|
57
57
|
|
58
|
-
@api_prefix.prepend(
|
58
|
+
@api_prefix.prepend("/")
|
59
59
|
end
|
60
60
|
|
61
61
|
# @return [HTTP::Client]
|
@@ -63,8 +63,8 @@ module OmniAI
|
|
63
63
|
@connection ||= begin
|
64
64
|
http = super
|
65
65
|
http = http.auth("Bearer #{@api_key}") if @api_key
|
66
|
-
http = http.headers(
|
67
|
-
http = http.headers(
|
66
|
+
http = http.headers("OpenAI-Organization": @organization) if @organization
|
67
|
+
http = http.headers("OpenAI-Project": @project) if @project
|
68
68
|
http
|
69
69
|
end
|
70
70
|
end
|
data/lib/omniai/openai/config.rb
CHANGED
@@ -4,7 +4,7 @@ module OmniAI
|
|
4
4
|
module OpenAI
|
5
5
|
# Configuration for OpenAI.
|
6
6
|
class Config < OmniAI::Config
|
7
|
-
DEFAULT_HOST =
|
7
|
+
DEFAULT_HOST = "https://api.openai.com"
|
8
8
|
|
9
9
|
# @!attribute [rw] organization
|
10
10
|
# @return [String, nil] passed as `OpenAI-Organization` if specified
|
@@ -21,10 +21,10 @@ module OmniAI
|
|
21
21
|
# @param logger [Logger, nil] optional
|
22
22
|
# @param timeout [Integer, Hash, nil] optional
|
23
23
|
def initialize(
|
24
|
-
api_key: ENV.fetch(
|
25
|
-
host: ENV.fetch(
|
26
|
-
organization: ENV.fetch(
|
27
|
-
project: ENV.fetch(
|
24
|
+
api_key: ENV.fetch("OPENAI_API_KEY", nil),
|
25
|
+
host: ENV.fetch("OPENAI_HOST", DEFAULT_HOST),
|
26
|
+
organization: ENV.fetch("OPENAI_ORGANIZATION", nil),
|
27
|
+
project: ENV.fetch("OPENAI_PROJECT", nil),
|
28
28
|
logger: nil,
|
29
29
|
timeout: nil
|
30
30
|
)
|
data/lib/omniai/openai/embed.rb
CHANGED
@@ -11,14 +11,14 @@ module OmniAI
|
|
11
11
|
# response.embedding [0.0, ...]
|
12
12
|
class Embed < OmniAI::Embed
|
13
13
|
module Model
|
14
|
-
SMALL =
|
15
|
-
LARGE =
|
16
|
-
ADA =
|
14
|
+
SMALL = "text-embedding-3-small"
|
15
|
+
LARGE = "text-embedding-3-large"
|
16
|
+
ADA = "text-embedding-ada-002"
|
17
17
|
end
|
18
18
|
|
19
19
|
DEFAULT_MODEL = Model::LARGE
|
20
20
|
|
21
|
-
|
21
|
+
protected
|
22
22
|
|
23
23
|
# @return [Hash]
|
24
24
|
def payload
|
data/lib/omniai/openai/file.rb
CHANGED
@@ -25,7 +25,7 @@ module OmniAI
|
|
25
25
|
attr_accessor :deleted
|
26
26
|
|
27
27
|
module Purpose
|
28
|
-
ASSISTANTS =
|
28
|
+
ASSISTANTS = "assistants"
|
29
29
|
end
|
30
30
|
|
31
31
|
# @param client [OmniAI::OpenAI::Client] optional
|
@@ -58,7 +58,7 @@ module OmniAI
|
|
58
58
|
# @raise [OmniAI::Error]
|
59
59
|
# @yield [String]
|
60
60
|
def content(&)
|
61
|
-
raise OmniAI::Error,
|
61
|
+
raise OmniAI::Error, "cannot fetch content without ID" unless @id
|
62
62
|
|
63
63
|
response = @client.connection
|
64
64
|
.get("/#{OmniAI::OpenAI::Client::VERSION}/files/#{@id}/content")
|
@@ -90,7 +90,7 @@ module OmniAI
|
|
90
90
|
|
91
91
|
raise HTTPError, response.flush unless response.status.ok?
|
92
92
|
|
93
|
-
response.parse[
|
93
|
+
response.parse["data"].map { |data| parse(data:, client:) }
|
94
94
|
end
|
95
95
|
|
96
96
|
# @param id [String] required
|
@@ -109,7 +109,7 @@ module OmniAI
|
|
109
109
|
# @raise [HTTPError]
|
110
110
|
# @return [OmniAI::OpenAI::Assistant]
|
111
111
|
def save!
|
112
|
-
raise OmniAI::Error,
|
112
|
+
raise OmniAI::Error, "cannot save a file without IO" unless @io
|
113
113
|
|
114
114
|
response = @client.connection
|
115
115
|
.accept(:json)
|
@@ -123,14 +123,14 @@ module OmniAI
|
|
123
123
|
# @raise [OmniAI::Error]
|
124
124
|
# @return [OmniAI::OpenAI::Assistant]
|
125
125
|
def destroy!
|
126
|
-
raise OmniAI::Error,
|
126
|
+
raise OmniAI::Error, "cannot destroy w/o ID" unless @id
|
127
127
|
|
128
128
|
data = self.class.destroy!(id: @id, client: @client)
|
129
|
-
@deleted = data[
|
129
|
+
@deleted = data["deleted"]
|
130
130
|
self
|
131
131
|
end
|
132
132
|
|
133
|
-
|
133
|
+
private
|
134
134
|
|
135
135
|
# @return [Hash]
|
136
136
|
def payload
|
@@ -141,7 +141,7 @@ module OmniAI
|
|
141
141
|
end
|
142
142
|
|
143
143
|
class << self
|
144
|
-
|
144
|
+
private
|
145
145
|
|
146
146
|
# @param data [Hash] required
|
147
147
|
# @param client [OmniAI::OpenAI::Client] required
|
@@ -149,10 +149,10 @@ module OmniAI
|
|
149
149
|
def parse(data:, client: Client.new)
|
150
150
|
new(
|
151
151
|
client:,
|
152
|
-
id: data[
|
153
|
-
bytes: data[
|
154
|
-
filename: data[
|
155
|
-
purpose: data[
|
152
|
+
id: data["id"],
|
153
|
+
bytes: data["bytes"],
|
154
|
+
filename: data["filename"],
|
155
|
+
purpose: data["purpose"]
|
156
156
|
)
|
157
157
|
end
|
158
158
|
end
|
@@ -160,10 +160,10 @@ module OmniAI
|
|
160
160
|
# @param data [Hash] required
|
161
161
|
# @return [OmniAI::OpenAI::Assistant]
|
162
162
|
def parse(data:)
|
163
|
-
@id = data[
|
164
|
-
@bytes = data[
|
165
|
-
@filename = data[
|
166
|
-
@purpose = data[
|
163
|
+
@id = data["id"]
|
164
|
+
@bytes = data["bytes"]
|
165
|
+
@filename = data["filename"]
|
166
|
+
@purpose = data["purpose"]
|
167
167
|
end
|
168
168
|
end
|
169
169
|
end
|
data/lib/omniai/openai/speak.rb
CHANGED
@@ -5,20 +5,20 @@ module OmniAI
|
|
5
5
|
# An OpenAI transcribe implementation.
|
6
6
|
class Speak < OmniAI::Speak
|
7
7
|
module Model
|
8
|
-
TTS_1 =
|
9
|
-
TTS_1_HD =
|
8
|
+
TTS_1 = "tts-1"
|
9
|
+
TTS_1_HD = "tts-1-hd"
|
10
10
|
end
|
11
11
|
|
12
12
|
module Voice
|
13
|
-
ALLOY =
|
14
|
-
ECHO =
|
15
|
-
FABLE =
|
16
|
-
NOVA =
|
17
|
-
ONYX =
|
18
|
-
SHIMMER =
|
13
|
+
ALLOY = "alloy" # https://platform.openai.com/docs/guides/text-to-speech/alloy
|
14
|
+
ECHO = "echo" # https://platform.openai.com/docs/guides/text-to-speech/echo
|
15
|
+
FABLE = "fable" # https://platform.openai.com/docs/guides/text-to-speech/fable
|
16
|
+
NOVA = "nova" # https://platform.openai.com/docs/guides/text-to-speech/nova
|
17
|
+
ONYX = "onyx" # https://platform.openai.com/docs/guides/text-to-speech/onyx
|
18
|
+
SHIMMER = "shimmer" # https://platform.openai.com/docs/guides/text-to-speech/shimmer
|
19
19
|
end
|
20
20
|
|
21
|
-
|
21
|
+
protected
|
22
22
|
|
23
23
|
# @return [Hash]
|
24
24
|
def payload
|
@@ -18,22 +18,22 @@ module OmniAI
|
|
18
18
|
|
19
19
|
# @return [String] "file_citation" or "file_path"
|
20
20
|
def type
|
21
|
-
@data[
|
21
|
+
@data["type"]
|
22
22
|
end
|
23
23
|
|
24
24
|
# @return [String]
|
25
25
|
def text
|
26
|
-
@data[
|
26
|
+
@data["text"]
|
27
27
|
end
|
28
28
|
|
29
29
|
# @return [Integer]
|
30
30
|
def start_index
|
31
|
-
@data[
|
31
|
+
@data["start_index"]
|
32
32
|
end
|
33
33
|
|
34
34
|
# @return [Integer]
|
35
35
|
def end_index
|
36
|
-
@data[
|
36
|
+
@data["end_index"]
|
37
37
|
end
|
38
38
|
|
39
39
|
# @return [Range<Integer>]
|
@@ -43,7 +43,7 @@ module OmniAI
|
|
43
43
|
|
44
44
|
# @return [String]
|
45
45
|
def file_id
|
46
|
-
@file_id ||= (@data[
|
46
|
+
@file_id ||= (@data["file_citation"] || @data["file_path"])["file_id"]
|
47
47
|
end
|
48
48
|
|
49
49
|
# Present if type is "file_citation" or "file_path".
|
@@ -28,12 +28,12 @@ module OmniAI
|
|
28
28
|
|
29
29
|
# @return [String] e.g. "text"
|
30
30
|
def file_id
|
31
|
-
@file_id ||= @data[
|
31
|
+
@file_id ||= @data["file_id"]
|
32
32
|
end
|
33
33
|
|
34
34
|
# @return [Array<Hash>]
|
35
35
|
def tools
|
36
|
-
@tools ||= @data[
|
36
|
+
@tools ||= @data["tools"]
|
37
37
|
end
|
38
38
|
|
39
39
|
# @return [OmniAI::OpenAI::File]
|
@@ -6,7 +6,7 @@ module OmniAI
|
|
6
6
|
# An OpenAI content w/ annotations.
|
7
7
|
class Content
|
8
8
|
module Type
|
9
|
-
TEXT =
|
9
|
+
TEXT = "text"
|
10
10
|
end
|
11
11
|
|
12
12
|
# @param data [Array]
|
@@ -31,7 +31,7 @@ module OmniAI
|
|
31
31
|
|
32
32
|
# @return [String] e.g. "text"
|
33
33
|
def type
|
34
|
-
@type ||= @data[
|
34
|
+
@type ||= @data["type"]
|
35
35
|
end
|
36
36
|
|
37
37
|
# @return [Boolean]
|
@@ -41,7 +41,7 @@ module OmniAI
|
|
41
41
|
|
42
42
|
# @return [OmniAI::OpenAI::Thread::Text]
|
43
43
|
def text
|
44
|
-
@text ||= Text.new(data: @data[
|
44
|
+
@text ||= Text.new(data: @data["text"], client: @client) if @data["text"]
|
45
45
|
end
|
46
46
|
end
|
47
47
|
end
|
@@ -110,7 +110,7 @@ module OmniAI
|
|
110
110
|
|
111
111
|
raise HTTPError, response.flush unless response.status.ok?
|
112
112
|
|
113
|
-
response.parse[
|
113
|
+
response.parse["data"].map { |data| parse(data:, client:) }
|
114
114
|
end
|
115
115
|
|
116
116
|
# @param thread_id [String] required
|
@@ -145,17 +145,17 @@ module OmniAI
|
|
145
145
|
# @raise [OmniAI::Error]
|
146
146
|
# @return [OmniAI::OpenAI::Thread]
|
147
147
|
def destroy!
|
148
|
-
raise OmniAI::Error,
|
148
|
+
raise OmniAI::Error, "cannot destroy a non-persisted thread" unless @id
|
149
149
|
|
150
150
|
data = self.class.destroy!(thread_id: @thread_id, id: @id, client: @client)
|
151
|
-
@deleted = data[
|
151
|
+
@deleted = data["deleted"]
|
152
152
|
self
|
153
153
|
end
|
154
154
|
|
155
|
-
|
155
|
+
private
|
156
156
|
|
157
157
|
class << self
|
158
|
-
|
158
|
+
private
|
159
159
|
|
160
160
|
# @param data [Hash] required
|
161
161
|
# @param client [OmniAI::OpenAI::Client] required
|
@@ -163,28 +163,28 @@ module OmniAI
|
|
163
163
|
def parse(data:, client: Client.new)
|
164
164
|
new(
|
165
165
|
client:,
|
166
|
-
id: data[
|
167
|
-
assistant_id: data[
|
168
|
-
thread_id: data[
|
169
|
-
run_id: data[
|
170
|
-
role: data[
|
171
|
-
content: Content.for(data: data[
|
172
|
-
attachments: Attachment.for(data: data[
|
173
|
-
metadata: data[
|
166
|
+
id: data["id"],
|
167
|
+
assistant_id: data["assistant_id"],
|
168
|
+
thread_id: data["thread_id"],
|
169
|
+
run_id: data["run_id"],
|
170
|
+
role: data["role"],
|
171
|
+
content: Content.for(data: data["content"], client:),
|
172
|
+
attachments: Attachment.for(data: data["attachments"], client:),
|
173
|
+
metadata: data["metadata"]
|
174
174
|
)
|
175
175
|
end
|
176
176
|
end
|
177
177
|
|
178
178
|
# @param data [Hash] required
|
179
179
|
def parse(data:)
|
180
|
-
@id = data[
|
181
|
-
@assistant_id = data[
|
182
|
-
@thread_id = data[
|
183
|
-
@run_id = data[
|
184
|
-
@role = data[
|
185
|
-
@content = Content.for(data: data[
|
186
|
-
@attachments = Attachment.for(data: data[
|
187
|
-
@metadata = data[
|
180
|
+
@id = data["id"]
|
181
|
+
@assistant_id = data["assistant_id"]
|
182
|
+
@thread_id = data["thread_id"]
|
183
|
+
@run_id = data["run_id"]
|
184
|
+
@role = data["role"]
|
185
|
+
@content = Content.for(data: data["content"], client: @client)
|
186
|
+
@attachments = Attachment.for(data: data["content"], client: @client)
|
187
|
+
@metadata = data["metadata"]
|
188
188
|
end
|
189
189
|
|
190
190
|
# @return [Hash]
|
@@ -6,10 +6,10 @@ module OmniAI
|
|
6
6
|
# An OpenAI run within a thread.
|
7
7
|
class Run
|
8
8
|
module Status
|
9
|
-
CANCELLED =
|
10
|
-
FAILED =
|
11
|
-
COMPLETED =
|
12
|
-
EXPIRED =
|
9
|
+
CANCELLED = "cancelled"
|
10
|
+
FAILED = "failed"
|
11
|
+
COMPLETED = "completed"
|
12
|
+
EXPIRED = "expired"
|
13
13
|
end
|
14
14
|
|
15
15
|
TERMINATED_STATUSES = [
|
@@ -125,7 +125,7 @@ module OmniAI
|
|
125
125
|
|
126
126
|
raise HTTPError, response.flush unless response.status.ok?
|
127
127
|
|
128
|
-
response.parse[
|
128
|
+
response.parse["data"].map { |data| parse(data:, client:) }
|
129
129
|
end
|
130
130
|
|
131
131
|
# @param thread_id [String] required
|
@@ -160,7 +160,7 @@ module OmniAI
|
|
160
160
|
# @raise [HTTPError]
|
161
161
|
# @return [OmniAI::OpenAI::Thread]
|
162
162
|
def reload!
|
163
|
-
raise Error,
|
163
|
+
raise Error, "unable to fetch! without an ID" unless @id
|
164
164
|
|
165
165
|
response = @client.connection
|
166
166
|
.accept(:json)
|
@@ -176,10 +176,10 @@ module OmniAI
|
|
176
176
|
# @raise [OmniAI::Error]
|
177
177
|
# @return [OmniAI::OpenAI::Thread]
|
178
178
|
def cancel!
|
179
|
-
raise OmniAI::Error,
|
179
|
+
raise OmniAI::Error, "cannot cancel a non-persisted thread" unless @id
|
180
180
|
|
181
181
|
data = self.class.cancel!(thread_id: @thread_id, id: @id, client: @client)
|
182
|
-
@status = data[
|
182
|
+
@status = data["status"]
|
183
183
|
self
|
184
184
|
end
|
185
185
|
|
@@ -200,10 +200,10 @@ module OmniAI
|
|
200
200
|
TERMINATED_STATUSES.include?(@status)
|
201
201
|
end
|
202
202
|
|
203
|
-
|
203
|
+
private
|
204
204
|
|
205
205
|
class << self
|
206
|
-
|
206
|
+
private
|
207
207
|
|
208
208
|
# @param data [Hash] required
|
209
209
|
# @param client [OmniAI::OpenAI::Client] required
|
@@ -211,31 +211,31 @@ module OmniAI
|
|
211
211
|
def parse(data:, client: Client.new)
|
212
212
|
new(
|
213
213
|
client:,
|
214
|
-
id: data[
|
215
|
-
assistant_id: data[
|
216
|
-
thread_id: data[
|
217
|
-
status: data[
|
218
|
-
model: data[
|
219
|
-
temperature: data[
|
220
|
-
instructions: data[
|
221
|
-
tools: data[
|
222
|
-
metadata: data[
|
214
|
+
id: data["id"],
|
215
|
+
assistant_id: data["assistant_id"],
|
216
|
+
thread_id: data["thread_id"],
|
217
|
+
status: data["status"],
|
218
|
+
model: data["model"],
|
219
|
+
temperature: data["temperature"],
|
220
|
+
instructions: data["instructions"],
|
221
|
+
tools: data["tools"],
|
222
|
+
metadata: data["metadata"]
|
223
223
|
)
|
224
224
|
end
|
225
225
|
end
|
226
226
|
|
227
227
|
# @param data [Hash] required
|
228
228
|
def parse(data:)
|
229
|
-
@id = data[
|
230
|
-
@assistant_id = data[
|
231
|
-
@thread_id = data[
|
232
|
-
@run_id = data[
|
233
|
-
@status = data[
|
234
|
-
@model = data[
|
235
|
-
@temperature = data[
|
236
|
-
@instructions = data[
|
237
|
-
@tools = data[
|
238
|
-
@metadata = data[
|
229
|
+
@id = data["id"]
|
230
|
+
@assistant_id = data["assistant_id"]
|
231
|
+
@thread_id = data["thread_id"]
|
232
|
+
@run_id = data["run_id"]
|
233
|
+
@status = data["status"]
|
234
|
+
@model = data["model"]
|
235
|
+
@temperature = data["temperature"]
|
236
|
+
@instructions = data["instructions"]
|
237
|
+
@tools = data["tools"]
|
238
|
+
@metadata = data["metadata"]
|
239
239
|
end
|
240
240
|
|
241
241
|
# @return [Hash]
|
@@ -30,17 +30,17 @@ module OmniAI
|
|
30
30
|
|
31
31
|
# @return [String] e.g. "text"
|
32
32
|
def type
|
33
|
-
@data[
|
33
|
+
@data["type"]
|
34
34
|
end
|
35
35
|
|
36
36
|
# @return [String]
|
37
37
|
def value
|
38
|
-
@data[
|
38
|
+
@data["value"]
|
39
39
|
end
|
40
40
|
|
41
41
|
# @return [Array<OmniAI::OpenAI::Thread::Annotation>]
|
42
42
|
def annotations
|
43
|
-
@annotations ||= @data[
|
43
|
+
@annotations ||= @data["annotations"].map { |data| Annotation.new(data:, client: @client) }
|
44
44
|
end
|
45
45
|
end
|
46
46
|
end
|
data/lib/omniai/openai/thread.rb
CHANGED
@@ -4,7 +4,7 @@ module OmniAI
|
|
4
4
|
module OpenAI
|
5
5
|
# An OpenAI threads implementation.
|
6
6
|
class Thread
|
7
|
-
HEADERS = {
|
7
|
+
HEADERS = { "OpenAI-Beta": "assistants=v2" }.freeze
|
8
8
|
|
9
9
|
# @!attribute [rw] id
|
10
10
|
# @return [String, nil]
|
@@ -86,10 +86,10 @@ module OmniAI
|
|
86
86
|
# @raise [OmniAI::Error]
|
87
87
|
# @return [OmniAI::OpenAI::Thread]
|
88
88
|
def destroy!
|
89
|
-
raise OmniAI::Error,
|
89
|
+
raise OmniAI::Error, "cannot destroy a non-persisted thread" unless @id
|
90
90
|
|
91
91
|
data = self.class.destroy!(id: @id, client: @client)
|
92
|
-
@deleted = data[
|
92
|
+
@deleted = data["deleted"]
|
93
93
|
self
|
94
94
|
end
|
95
95
|
|
@@ -103,10 +103,10 @@ module OmniAI
|
|
103
103
|
Runs.new(client: @client, thread: self)
|
104
104
|
end
|
105
105
|
|
106
|
-
|
106
|
+
private
|
107
107
|
|
108
108
|
class << self
|
109
|
-
|
109
|
+
private
|
110
110
|
|
111
111
|
# @param data [Hash] required
|
112
112
|
# @param client [OmniAI::OpenAI::Client] required
|
@@ -114,9 +114,9 @@ module OmniAI
|
|
114
114
|
def parse(data:, client: Client.new)
|
115
115
|
new(
|
116
116
|
client:,
|
117
|
-
id: data[
|
118
|
-
metadata: data[
|
119
|
-
tool_resources: data[
|
117
|
+
id: data["id"],
|
118
|
+
metadata: data["metadata"],
|
119
|
+
tool_resources: data["tool_resources"]
|
120
120
|
)
|
121
121
|
end
|
122
122
|
end
|
@@ -124,9 +124,9 @@ module OmniAI
|
|
124
124
|
# @param data [Hash] required
|
125
125
|
# @return [OmniAI::OpenAI::Thread]
|
126
126
|
def parse(data:)
|
127
|
-
@id = data[
|
128
|
-
@metadata = data[
|
129
|
-
@tool_resources = data[
|
127
|
+
@id = data["id"]
|
128
|
+
@metadata = data["metadata"]
|
129
|
+
@tool_resources = data["tool_resources"]
|
130
130
|
end
|
131
131
|
|
132
132
|
# @return [Hash]
|
data/lib/omniai/openai/tool.rb
CHANGED
@@ -4,8 +4,8 @@ module OmniAI
|
|
4
4
|
module OpenAI
|
5
5
|
# An set of tools.
|
6
6
|
module Tool
|
7
|
-
FILE_SEARCH = { type:
|
8
|
-
CODE_INTERPRETER = { type:
|
7
|
+
FILE_SEARCH = { type: "file_search" }.freeze
|
8
|
+
CODE_INTERPRETER = { type: "code_interpreter" }.freeze
|
9
9
|
end
|
10
10
|
end
|
11
11
|
end
|
@@ -5,11 +5,11 @@ module OmniAI
|
|
5
5
|
# An OpenAI transcribe implementation.
|
6
6
|
class Transcribe < OmniAI::Transcribe
|
7
7
|
module Model
|
8
|
-
WHISPER_1 =
|
8
|
+
WHISPER_1 = "whisper-1"
|
9
9
|
WHISPER = WHISPER_1
|
10
10
|
end
|
11
11
|
|
12
|
-
|
12
|
+
protected
|
13
13
|
|
14
14
|
# @return [Hash]
|
15
15
|
def payload
|
data/lib/omniai/openai.rb
CHANGED
@@ -1,12 +1,12 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require
|
4
|
-
require
|
5
|
-
require
|
3
|
+
require "event_stream_parser"
|
4
|
+
require "omniai"
|
5
|
+
require "zeitwerk"
|
6
6
|
|
7
7
|
loader = Zeitwerk::Loader.for_gem
|
8
8
|
loader.push_dir(__dir__, namespace: OmniAI)
|
9
|
-
loader.inflector.inflect
|
9
|
+
loader.inflector.inflect "openai" => "OpenAI"
|
10
10
|
loader.setup
|
11
11
|
|
12
12
|
module OmniAI
|
metadata
CHANGED
@@ -1,14 +1,13 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai-openai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.9.
|
4
|
+
version: 1.9.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
|
-
autorequire:
|
9
8
|
bindir: exe
|
10
9
|
cert_chain: []
|
11
|
-
date:
|
10
|
+
date: 2025-01-18 00:00:00.000000000 Z
|
12
11
|
dependencies:
|
13
12
|
- !ruby/object:Gem::Dependency
|
14
13
|
name: event_stream_parser
|
@@ -91,7 +90,6 @@ metadata:
|
|
91
90
|
homepage_uri: https://github.com/ksylvest/omniai-openai
|
92
91
|
changelog_uri: https://github.com/ksylvest/omniai-openai/releases
|
93
92
|
rubygems_mfa_required: 'true'
|
94
|
-
post_install_message:
|
95
93
|
rdoc_options: []
|
96
94
|
require_paths:
|
97
95
|
- lib
|
@@ -106,8 +104,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
106
104
|
- !ruby/object:Gem::Version
|
107
105
|
version: '0'
|
108
106
|
requirements: []
|
109
|
-
rubygems_version: 3.
|
110
|
-
signing_key:
|
107
|
+
rubygems_version: 3.6.2
|
111
108
|
specification_version: 4
|
112
109
|
summary: A generalized framework for interacting with OpenAI
|
113
110
|
test_files: []
|