omniai-openai 1.9.1 → 1.9.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1b1c24b9ac02d2b147545b2ee6a90e57a84358ae73501eb74bd7075b194d2329
4
- data.tar.gz: d43256628ab71b0f80277044d9e65a33596dff1354ff819af48671d50e8935a5
3
+ metadata.gz: 261546cd2fc0144ed03b3e562ee3b8f45d96df8a5117d2797d322550e99a9267
4
+ data.tar.gz: 956aa013320d97147d6b6b2ac86fe9116c4008d50da345046494b974deb06459
5
5
  SHA512:
6
- metadata.gz: 7cb3cfa6f659c0f1f5a8cd5b844a5bc4ff72d54b6e860d61083f5aec70c91fe4ec87025b2cb92fc54e1a68a642a3a36091e43f60f624fe8d880281ad4db0cb43
7
- data.tar.gz: 01e8896f53d6a99b3f1d864165716010d27f622808f85e74d1dd1fd2f9fc9facfd9c161cf00711d418a023c6db0349972149537728c87a9bea0def49e15c7608
6
+ metadata.gz: 5ad30f7b00224bf2dc8001b116733507209789b0b3ec2178288f8696161cf9d0cda1f4c9083522099511c6e6693416fee98ef147c89991888bc7ad873e222d6e
7
+ data.tar.gz: 4386ce5cea890be1394981d12b3500ae1ec89cd34a92453b4bc0ba5c3e3dbd792f61a476d899a060fdaa4ea872d8e2aeb2f414803d9f4b216708392e497c9aa7
data/Gemfile CHANGED
@@ -1,15 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- source 'https://rubygems.org'
3
+ source "https://rubygems.org"
4
4
 
5
5
  gemspec
6
6
 
7
- gem 'rake'
8
- gem 'rspec'
9
- gem 'rspec_junit_formatter'
10
- gem 'rubocop'
11
- gem 'rubocop-rake'
12
- gem 'rubocop-rspec'
13
- gem 'simplecov'
14
- gem 'webmock'
15
- gem 'yard'
7
+ gem "rake"
8
+ gem "rspec"
9
+ gem "rspec_junit_formatter"
10
+ gem "rubocop"
11
+ gem "rubocop-basic"
12
+ gem "rubocop-rake"
13
+ gem "rubocop-rspec"
14
+ gem "simplecov"
15
+ gem "webmock"
16
+ gem "yard"
data/README.md CHANGED
@@ -97,7 +97,7 @@ completion.content # 'The capital of Canada is Ottawa.'
97
97
 
98
98
  #### Model
99
99
 
100
- `model` takes an optional string (default is `gtp-4o`):
100
+ `model` takes an optional string (default is `gpt-4o`):
101
101
 
102
102
  ```ruby
103
103
  completion = client.chat('How fast is a cheetah?', model: OmniAI::OpenAI::Chat::Model::GPT_3_5_TURBO)
@@ -4,7 +4,7 @@ module OmniAI
4
4
  module OpenAI
5
5
  # An OpenAI assistants implementation.
6
6
  class Assistant
7
- HEADERS = { 'OpenAI-Beta': 'assistants=v2' }.freeze
7
+ HEADERS = { "OpenAI-Beta": "assistants=v2" }.freeze
8
8
 
9
9
  # @!attribute [rw] id
10
10
  # @return [String, nil]
@@ -95,7 +95,7 @@ module OmniAI
95
95
 
96
96
  raise HTTPError, response.flush unless response.status.ok?
97
97
 
98
- response.parse['data'].map { |data| parse(data:, client:) }
98
+ response.parse["data"].map { |data| parse(data:, client:) }
99
99
  end
100
100
 
101
101
  # @param id [String] required
@@ -128,17 +128,17 @@ module OmniAI
128
128
  # @raise [OmniAI::Error]
129
129
  # @return [OmniAI::OpenAI::Assistant]
130
130
  def destroy!
131
- raise OmniAI::Error, 'cannot destroy a non-persisted assistant' unless @id
131
+ raise OmniAI::Error, "cannot destroy a non-persisted assistant" unless @id
132
132
 
133
133
  data = self.class.destroy!(id: @id, client: @client)
134
- @deleted = data['deleted']
134
+ @deleted = data["deleted"]
135
135
  self
136
136
  end
137
137
 
138
- private
138
+ private
139
139
 
140
140
  class << self
141
- private
141
+ private
142
142
 
143
143
  # @param data [Hash] required
144
144
  # @param client [OmniAI::OpenAI::Client] required
@@ -146,13 +146,13 @@ module OmniAI
146
146
  def parse(data:, client: Client.new)
147
147
  new(
148
148
  client:,
149
- id: data['id'],
150
- name: data['name'],
151
- model: data['model'],
152
- description: data['description'],
153
- instructions: data['instructions'],
154
- metadata: data['metadata'],
155
- tools: data['tools']
149
+ id: data["id"],
150
+ name: data["name"],
151
+ model: data["model"],
152
+ description: data["description"],
153
+ instructions: data["instructions"],
154
+ metadata: data["metadata"],
155
+ tools: data["tools"]
156
156
  )
157
157
  end
158
158
  end
@@ -160,13 +160,13 @@ module OmniAI
160
160
  # @param data [Hash] required
161
161
  # @return [OmniAI::OpenAI::Assistant]
162
162
  def parse(data:)
163
- @id = data['id']
164
- @name = data['name']
165
- @model = data['model']
166
- @description = data['description']
167
- @instructions = data['instructions']
168
- @metadata = data['metadata']
169
- @tools = data['tools']
163
+ @id = data["id"]
164
+ @name = data["name"]
165
+ @model = data["model"]
166
+ @description = data["description"]
167
+ @instructions = data["instructions"]
168
+ @metadata = data["metadata"]
169
+ @tools = data["tools"]
170
170
  end
171
171
 
172
172
  # @return [Hash]
@@ -12,22 +12,23 @@ module OmniAI
12
12
  # end
13
13
  # completion.choice.message.content # '...'
14
14
  class Chat < OmniAI::Chat
15
- JSON_RESPONSE_FORMAT = { type: 'json_object' }.freeze
15
+ JSON_RESPONSE_FORMAT = { type: "json_object" }.freeze
16
16
 
17
17
  module Model
18
- GPT_4O = 'gpt-4o'
19
- GPT_4O_MINI = 'gpt-4o-mini'
20
- GPT_4 = 'gpt-4'
21
- GPT_4_TURBO = 'gpt-4-turbo'
22
- GPT_3_5_TURBO = 'gpt-3.5-turbo'
23
- O1_MINI = 'o1-mini'
24
- O1_PREVIEW = 'o1-preview'
25
- O1 = O1_PREVIEW
18
+ GPT_4O = "gpt-4o"
19
+ GPT_4O_MINI = "gpt-4o-mini"
20
+ GPT_4 = "gpt-4"
21
+ GPT_4_TURBO = "gpt-4-turbo"
22
+ GPT_3_5_TURBO = "gpt-3.5-turbo"
23
+ O1_MINI = "o1-mini"
24
+ O3_MINI = "o3-mini"
25
+ O1_PREVIEW = "o1-preview"
26
+ O1 = "o1"
26
27
  end
27
28
 
28
29
  DEFAULT_MODEL = Model::GPT_4O
29
30
 
30
- protected
31
+ protected
31
32
 
32
33
  # @return [Hash]
33
34
  def payload
@@ -20,7 +20,7 @@ module OmniAI
20
20
  #
21
21
  # client = OmniAI::OpenAI::Client.new
22
22
  class Client < OmniAI::Client
23
- VERSION = 'v1'
23
+ VERSION = "v1"
24
24
 
25
25
  attr_reader :api_prefix
26
26
 
@@ -33,7 +33,7 @@ module OmniAI
33
33
  # @param timeout [Integer, nil] optional - defaults to `OmniAI::OpenAI.config.timeout`
34
34
  def initialize(
35
35
  api_key: OmniAI::OpenAI.config.api_key,
36
- api_prefix: '',
36
+ api_prefix: "",
37
37
  host: OmniAI::OpenAI.config.host,
38
38
  organization: OmniAI::OpenAI.config.organization,
39
39
  project: OmniAI::OpenAI.config.project,
@@ -53,9 +53,9 @@ module OmniAI
53
53
  @project = project
54
54
 
55
55
  @api_prefix = api_prefix
56
- return if @api_prefix.empty? || @api_prefix.start_with?('/')
56
+ return if @api_prefix.empty? || @api_prefix.start_with?("/")
57
57
 
58
- @api_prefix.prepend('/')
58
+ @api_prefix.prepend("/")
59
59
  end
60
60
 
61
61
  # @return [HTTP::Client]
@@ -63,8 +63,8 @@ module OmniAI
63
63
  @connection ||= begin
64
64
  http = super
65
65
  http = http.auth("Bearer #{@api_key}") if @api_key
66
- http = http.headers('OpenAI-Organization': @organization) if @organization
67
- http = http.headers('OpenAI-Project': @project) if @project
66
+ http = http.headers("OpenAI-Organization": @organization) if @organization
67
+ http = http.headers("OpenAI-Project": @project) if @project
68
68
  http
69
69
  end
70
70
  end
@@ -4,7 +4,7 @@ module OmniAI
4
4
  module OpenAI
5
5
  # Configuration for OpenAI.
6
6
  class Config < OmniAI::Config
7
- DEFAULT_HOST = 'https://api.openai.com'
7
+ DEFAULT_HOST = "https://api.openai.com"
8
8
 
9
9
  # @!attribute [rw] organization
10
10
  # @return [String, nil] passed as `OpenAI-Organization` if specified
@@ -21,10 +21,10 @@ module OmniAI
21
21
  # @param logger [Logger, nil] optional
22
22
  # @param timeout [Integer, Hash, nil] optional
23
23
  def initialize(
24
- api_key: ENV.fetch('OPENAI_API_KEY', nil),
25
- host: ENV.fetch('OPENAI_HOST', DEFAULT_HOST),
26
- organization: ENV.fetch('OPENAI_ORGANIZATION', nil),
27
- project: ENV.fetch('OPENAI_PROJECT', nil),
24
+ api_key: ENV.fetch("OPENAI_API_KEY", nil),
25
+ host: ENV.fetch("OPENAI_HOST", DEFAULT_HOST),
26
+ organization: ENV.fetch("OPENAI_ORGANIZATION", nil),
27
+ project: ENV.fetch("OPENAI_PROJECT", nil),
28
28
  logger: nil,
29
29
  timeout: nil
30
30
  )
@@ -11,14 +11,14 @@ module OmniAI
11
11
  # response.embedding [0.0, ...]
12
12
  class Embed < OmniAI::Embed
13
13
  module Model
14
- SMALL = 'text-embedding-3-small'
15
- LARGE = 'text-embedding-3-large'
16
- ADA = 'text-embedding-ada-002'
14
+ SMALL = "text-embedding-3-small"
15
+ LARGE = "text-embedding-3-large"
16
+ ADA = "text-embedding-ada-002"
17
17
  end
18
18
 
19
19
  DEFAULT_MODEL = Model::LARGE
20
20
 
21
- protected
21
+ protected
22
22
 
23
23
  # @return [Hash]
24
24
  def payload
@@ -25,7 +25,7 @@ module OmniAI
25
25
  attr_accessor :deleted
26
26
 
27
27
  module Purpose
28
- ASSISTANTS = 'assistants'
28
+ ASSISTANTS = "assistants"
29
29
  end
30
30
 
31
31
  # @param client [OmniAI::OpenAI::Client] optional
@@ -58,7 +58,7 @@ module OmniAI
58
58
  # @raise [OmniAI::Error]
59
59
  # @yield [String]
60
60
  def content(&)
61
- raise OmniAI::Error, 'cannot fetch content without ID' unless @id
61
+ raise OmniAI::Error, "cannot fetch content without ID" unless @id
62
62
 
63
63
  response = @client.connection
64
64
  .get("/#{OmniAI::OpenAI::Client::VERSION}/files/#{@id}/content")
@@ -90,7 +90,7 @@ module OmniAI
90
90
 
91
91
  raise HTTPError, response.flush unless response.status.ok?
92
92
 
93
- response.parse['data'].map { |data| parse(data:, client:) }
93
+ response.parse["data"].map { |data| parse(data:, client:) }
94
94
  end
95
95
 
96
96
  # @param id [String] required
@@ -109,7 +109,7 @@ module OmniAI
109
109
  # @raise [HTTPError]
110
110
  # @return [OmniAI::OpenAI::Assistant]
111
111
  def save!
112
- raise OmniAI::Error, 'cannot save a file without IO' unless @io
112
+ raise OmniAI::Error, "cannot save a file without IO" unless @io
113
113
 
114
114
  response = @client.connection
115
115
  .accept(:json)
@@ -123,14 +123,14 @@ module OmniAI
123
123
  # @raise [OmniAI::Error]
124
124
  # @return [OmniAI::OpenAI::Assistant]
125
125
  def destroy!
126
- raise OmniAI::Error, 'cannot destroy w/o ID' unless @id
126
+ raise OmniAI::Error, "cannot destroy w/o ID" unless @id
127
127
 
128
128
  data = self.class.destroy!(id: @id, client: @client)
129
- @deleted = data['deleted']
129
+ @deleted = data["deleted"]
130
130
  self
131
131
  end
132
132
 
133
- private
133
+ private
134
134
 
135
135
  # @return [Hash]
136
136
  def payload
@@ -141,7 +141,7 @@ module OmniAI
141
141
  end
142
142
 
143
143
  class << self
144
- private
144
+ private
145
145
 
146
146
  # @param data [Hash] required
147
147
  # @param client [OmniAI::OpenAI::Client] required
@@ -149,10 +149,10 @@ module OmniAI
149
149
  def parse(data:, client: Client.new)
150
150
  new(
151
151
  client:,
152
- id: data['id'],
153
- bytes: data['bytes'],
154
- filename: data['filename'],
155
- purpose: data['purpose']
152
+ id: data["id"],
153
+ bytes: data["bytes"],
154
+ filename: data["filename"],
155
+ purpose: data["purpose"]
156
156
  )
157
157
  end
158
158
  end
@@ -160,10 +160,10 @@ module OmniAI
160
160
  # @param data [Hash] required
161
161
  # @return [OmniAI::OpenAI::Assistant]
162
162
  def parse(data:)
163
- @id = data['id']
164
- @bytes = data['bytes']
165
- @filename = data['filename']
166
- @purpose = data['purpose']
163
+ @id = data["id"]
164
+ @bytes = data["bytes"]
165
+ @filename = data["filename"]
166
+ @purpose = data["purpose"]
167
167
  end
168
168
  end
169
169
  end
@@ -5,20 +5,20 @@ module OmniAI
5
5
  # An OpenAI transcribe implementation.
6
6
  class Speak < OmniAI::Speak
7
7
  module Model
8
- TTS_1 = 'tts-1'
9
- TTS_1_HD = 'tts-1-hd'
8
+ TTS_1 = "tts-1"
9
+ TTS_1_HD = "tts-1-hd"
10
10
  end
11
11
 
12
12
  module Voice
13
- ALLOY = 'alloy' # https://platform.openai.com/docs/guides/text-to-speech/alloy
14
- ECHO = 'echo' # https://platform.openai.com/docs/guides/text-to-speech/echo
15
- FABLE = 'fable' # https://platform.openai.com/docs/guides/text-to-speech/fable
16
- NOVA = 'nova' # https://platform.openai.com/docs/guides/text-to-speech/nova
17
- ONYX = 'onyx' # https://platform.openai.com/docs/guides/text-to-speech/onyx
18
- SHIMMER = 'shimmer' # https://platform.openai.com/docs/guides/text-to-speech/shimmer
13
+ ALLOY = "alloy" # https://platform.openai.com/docs/guides/text-to-speech/alloy
14
+ ECHO = "echo" # https://platform.openai.com/docs/guides/text-to-speech/echo
15
+ FABLE = "fable" # https://platform.openai.com/docs/guides/text-to-speech/fable
16
+ NOVA = "nova" # https://platform.openai.com/docs/guides/text-to-speech/nova
17
+ ONYX = "onyx" # https://platform.openai.com/docs/guides/text-to-speech/onyx
18
+ SHIMMER = "shimmer" # https://platform.openai.com/docs/guides/text-to-speech/shimmer
19
19
  end
20
20
 
21
- protected
21
+ protected
22
22
 
23
23
  # @return [Hash]
24
24
  def payload
@@ -18,22 +18,22 @@ module OmniAI
18
18
 
19
19
  # @return [String] "file_citation" or "file_path"
20
20
  def type
21
- @data['type']
21
+ @data["type"]
22
22
  end
23
23
 
24
24
  # @return [String]
25
25
  def text
26
- @data['text']
26
+ @data["text"]
27
27
  end
28
28
 
29
29
  # @return [Integer]
30
30
  def start_index
31
- @data['start_index']
31
+ @data["start_index"]
32
32
  end
33
33
 
34
34
  # @return [Integer]
35
35
  def end_index
36
- @data['end_index']
36
+ @data["end_index"]
37
37
  end
38
38
 
39
39
  # @return [Range<Integer>]
@@ -43,7 +43,7 @@ module OmniAI
43
43
 
44
44
  # @return [String]
45
45
  def file_id
46
- @file_id ||= (@data['file_citation'] || @data['file_path'])['file_id']
46
+ @file_id ||= (@data["file_citation"] || @data["file_path"])["file_id"]
47
47
  end
48
48
 
49
49
  # Present if type is "file_citation" or "file_path".
@@ -28,12 +28,12 @@ module OmniAI
28
28
 
29
29
  # @return [String] e.g. "text"
30
30
  def file_id
31
- @file_id ||= @data['file_id']
31
+ @file_id ||= @data["file_id"]
32
32
  end
33
33
 
34
34
  # @return [Array<Hash>]
35
35
  def tools
36
- @tools ||= @data['tools']
36
+ @tools ||= @data["tools"]
37
37
  end
38
38
 
39
39
  # @return [OmniAI::OpenAI::File]
@@ -6,7 +6,7 @@ module OmniAI
6
6
  # An OpenAI content w/ annotations.
7
7
  class Content
8
8
  module Type
9
- TEXT = 'text'
9
+ TEXT = "text"
10
10
  end
11
11
 
12
12
  # @param data [Array]
@@ -31,7 +31,7 @@ module OmniAI
31
31
 
32
32
  # @return [String] e.g. "text"
33
33
  def type
34
- @type ||= @data['type']
34
+ @type ||= @data["type"]
35
35
  end
36
36
 
37
37
  # @return [Boolean]
@@ -41,7 +41,7 @@ module OmniAI
41
41
 
42
42
  # @return [OmniAI::OpenAI::Thread::Text]
43
43
  def text
44
- @text ||= Text.new(data: @data['text'], client: @client) if @data['text']
44
+ @text ||= Text.new(data: @data["text"], client: @client) if @data["text"]
45
45
  end
46
46
  end
47
47
  end
@@ -110,7 +110,7 @@ module OmniAI
110
110
 
111
111
  raise HTTPError, response.flush unless response.status.ok?
112
112
 
113
- response.parse['data'].map { |data| parse(data:, client:) }
113
+ response.parse["data"].map { |data| parse(data:, client:) }
114
114
  end
115
115
 
116
116
  # @param thread_id [String] required
@@ -145,17 +145,17 @@ module OmniAI
145
145
  # @raise [OmniAI::Error]
146
146
  # @return [OmniAI::OpenAI::Thread]
147
147
  def destroy!
148
- raise OmniAI::Error, 'cannot destroy a non-persisted thread' unless @id
148
+ raise OmniAI::Error, "cannot destroy a non-persisted thread" unless @id
149
149
 
150
150
  data = self.class.destroy!(thread_id: @thread_id, id: @id, client: @client)
151
- @deleted = data['deleted']
151
+ @deleted = data["deleted"]
152
152
  self
153
153
  end
154
154
 
155
- private
155
+ private
156
156
 
157
157
  class << self
158
- private
158
+ private
159
159
 
160
160
  # @param data [Hash] required
161
161
  # @param client [OmniAI::OpenAI::Client] required
@@ -163,28 +163,28 @@ module OmniAI
163
163
  def parse(data:, client: Client.new)
164
164
  new(
165
165
  client:,
166
- id: data['id'],
167
- assistant_id: data['assistant_id'],
168
- thread_id: data['thread_id'],
169
- run_id: data['run_id'],
170
- role: data['role'],
171
- content: Content.for(data: data['content'], client:),
172
- attachments: Attachment.for(data: data['attachments'], client:),
173
- metadata: data['metadata']
166
+ id: data["id"],
167
+ assistant_id: data["assistant_id"],
168
+ thread_id: data["thread_id"],
169
+ run_id: data["run_id"],
170
+ role: data["role"],
171
+ content: Content.for(data: data["content"], client:),
172
+ attachments: Attachment.for(data: data["attachments"], client:),
173
+ metadata: data["metadata"]
174
174
  )
175
175
  end
176
176
  end
177
177
 
178
178
  # @param data [Hash] required
179
179
  def parse(data:)
180
- @id = data['id']
181
- @assistant_id = data['assistant_id']
182
- @thread_id = data['thread_id']
183
- @run_id = data['run_id']
184
- @role = data['role']
185
- @content = Content.for(data: data['content'], client: @client)
186
- @attachments = Attachment.for(data: data['content'], client: @client)
187
- @metadata = data['metadata']
180
+ @id = data["id"]
181
+ @assistant_id = data["assistant_id"]
182
+ @thread_id = data["thread_id"]
183
+ @run_id = data["run_id"]
184
+ @role = data["role"]
185
+ @content = Content.for(data: data["content"], client: @client)
186
+ @attachments = Attachment.for(data: data["content"], client: @client)
187
+ @metadata = data["metadata"]
188
188
  end
189
189
 
190
190
  # @return [Hash]
@@ -6,10 +6,10 @@ module OmniAI
6
6
  # An OpenAI run within a thread.
7
7
  class Run
8
8
  module Status
9
- CANCELLED = 'cancelled'
10
- FAILED = 'failed'
11
- COMPLETED = 'completed'
12
- EXPIRED = 'expired'
9
+ CANCELLED = "cancelled"
10
+ FAILED = "failed"
11
+ COMPLETED = "completed"
12
+ EXPIRED = "expired"
13
13
  end
14
14
 
15
15
  TERMINATED_STATUSES = [
@@ -125,7 +125,7 @@ module OmniAI
125
125
 
126
126
  raise HTTPError, response.flush unless response.status.ok?
127
127
 
128
- response.parse['data'].map { |data| parse(data:, client:) }
128
+ response.parse["data"].map { |data| parse(data:, client:) }
129
129
  end
130
130
 
131
131
  # @param thread_id [String] required
@@ -160,7 +160,7 @@ module OmniAI
160
160
  # @raise [HTTPError]
161
161
  # @return [OmniAI::OpenAI::Thread]
162
162
  def reload!
163
- raise Error, 'unable to fetch! without an ID' unless @id
163
+ raise Error, "unable to fetch! without an ID" unless @id
164
164
 
165
165
  response = @client.connection
166
166
  .accept(:json)
@@ -176,10 +176,10 @@ module OmniAI
176
176
  # @raise [OmniAI::Error]
177
177
  # @return [OmniAI::OpenAI::Thread]
178
178
  def cancel!
179
- raise OmniAI::Error, 'cannot cancel a non-persisted thread' unless @id
179
+ raise OmniAI::Error, "cannot cancel a non-persisted thread" unless @id
180
180
 
181
181
  data = self.class.cancel!(thread_id: @thread_id, id: @id, client: @client)
182
- @status = data['status']
182
+ @status = data["status"]
183
183
  self
184
184
  end
185
185
 
@@ -200,10 +200,10 @@ module OmniAI
200
200
  TERMINATED_STATUSES.include?(@status)
201
201
  end
202
202
 
203
- private
203
+ private
204
204
 
205
205
  class << self
206
- private
206
+ private
207
207
 
208
208
  # @param data [Hash] required
209
209
  # @param client [OmniAI::OpenAI::Client] required
@@ -211,31 +211,31 @@ module OmniAI
211
211
  def parse(data:, client: Client.new)
212
212
  new(
213
213
  client:,
214
- id: data['id'],
215
- assistant_id: data['assistant_id'],
216
- thread_id: data['thread_id'],
217
- status: data['status'],
218
- model: data['model'],
219
- temperature: data['temperature'],
220
- instructions: data['instructions'],
221
- tools: data['tools'],
222
- metadata: data['metadata']
214
+ id: data["id"],
215
+ assistant_id: data["assistant_id"],
216
+ thread_id: data["thread_id"],
217
+ status: data["status"],
218
+ model: data["model"],
219
+ temperature: data["temperature"],
220
+ instructions: data["instructions"],
221
+ tools: data["tools"],
222
+ metadata: data["metadata"]
223
223
  )
224
224
  end
225
225
  end
226
226
 
227
227
  # @param data [Hash] required
228
228
  def parse(data:)
229
- @id = data['id']
230
- @assistant_id = data['assistant_id']
231
- @thread_id = data['thread_id']
232
- @run_id = data['run_id']
233
- @status = data['status']
234
- @model = data['model']
235
- @temperature = data['temperature']
236
- @instructions = data['instructions']
237
- @tools = data['tools']
238
- @metadata = data['metadata']
229
+ @id = data["id"]
230
+ @assistant_id = data["assistant_id"]
231
+ @thread_id = data["thread_id"]
232
+ @run_id = data["run_id"]
233
+ @status = data["status"]
234
+ @model = data["model"]
235
+ @temperature = data["temperature"]
236
+ @instructions = data["instructions"]
237
+ @tools = data["tools"]
238
+ @metadata = data["metadata"]
239
239
  end
240
240
 
241
241
  # @return [Hash]
@@ -30,17 +30,17 @@ module OmniAI
30
30
 
31
31
  # @return [String] e.g. "text"
32
32
  def type
33
- @data['type']
33
+ @data["type"]
34
34
  end
35
35
 
36
36
  # @return [String]
37
37
  def value
38
- @data['value']
38
+ @data["value"]
39
39
  end
40
40
 
41
41
  # @return [Array<OmniAI::OpenAI::Thread::Annotation>]
42
42
  def annotations
43
- @annotations ||= @data['annotations'].map { |data| Annotation.new(data:, client: @client) }
43
+ @annotations ||= @data["annotations"].map { |data| Annotation.new(data:, client: @client) }
44
44
  end
45
45
  end
46
46
  end
@@ -4,7 +4,7 @@ module OmniAI
4
4
  module OpenAI
5
5
  # An OpenAI threads implementation.
6
6
  class Thread
7
- HEADERS = { 'OpenAI-Beta': 'assistants=v2' }.freeze
7
+ HEADERS = { "OpenAI-Beta": "assistants=v2" }.freeze
8
8
 
9
9
  # @!attribute [rw] id
10
10
  # @return [String, nil]
@@ -86,10 +86,10 @@ module OmniAI
86
86
  # @raise [OmniAI::Error]
87
87
  # @return [OmniAI::OpenAI::Thread]
88
88
  def destroy!
89
- raise OmniAI::Error, 'cannot destroy a non-persisted thread' unless @id
89
+ raise OmniAI::Error, "cannot destroy a non-persisted thread" unless @id
90
90
 
91
91
  data = self.class.destroy!(id: @id, client: @client)
92
- @deleted = data['deleted']
92
+ @deleted = data["deleted"]
93
93
  self
94
94
  end
95
95
 
@@ -103,10 +103,10 @@ module OmniAI
103
103
  Runs.new(client: @client, thread: self)
104
104
  end
105
105
 
106
- private
106
+ private
107
107
 
108
108
  class << self
109
- private
109
+ private
110
110
 
111
111
  # @param data [Hash] required
112
112
  # @param client [OmniAI::OpenAI::Client] required
@@ -114,9 +114,9 @@ module OmniAI
114
114
  def parse(data:, client: Client.new)
115
115
  new(
116
116
  client:,
117
- id: data['id'],
118
- metadata: data['metadata'],
119
- tool_resources: data['tool_resources']
117
+ id: data["id"],
118
+ metadata: data["metadata"],
119
+ tool_resources: data["tool_resources"]
120
120
  )
121
121
  end
122
122
  end
@@ -124,9 +124,9 @@ module OmniAI
124
124
  # @param data [Hash] required
125
125
  # @return [OmniAI::OpenAI::Thread]
126
126
  def parse(data:)
127
- @id = data['id']
128
- @metadata = data['metadata']
129
- @tool_resources = data['tool_resources']
127
+ @id = data["id"]
128
+ @metadata = data["metadata"]
129
+ @tool_resources = data["tool_resources"]
130
130
  end
131
131
 
132
132
  # @return [Hash]
@@ -4,8 +4,8 @@ module OmniAI
4
4
  module OpenAI
5
5
  # An set of tools.
6
6
  module Tool
7
- FILE_SEARCH = { type: 'file_search' }.freeze
8
- CODE_INTERPRETER = { type: 'code_interpreter' }.freeze
7
+ FILE_SEARCH = { type: "file_search" }.freeze
8
+ CODE_INTERPRETER = { type: "code_interpreter" }.freeze
9
9
  end
10
10
  end
11
11
  end
@@ -5,11 +5,11 @@ module OmniAI
5
5
  # An OpenAI transcribe implementation.
6
6
  class Transcribe < OmniAI::Transcribe
7
7
  module Model
8
- WHISPER_1 = 'whisper-1'
8
+ WHISPER_1 = "whisper-1"
9
9
  WHISPER = WHISPER_1
10
10
  end
11
11
 
12
- protected
12
+ protected
13
13
 
14
14
  # @return [Hash]
15
15
  def payload
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module OpenAI
5
- VERSION = '1.9.1'
5
+ VERSION = "1.9.3"
6
6
  end
7
7
  end
data/lib/omniai/openai.rb CHANGED
@@ -1,12 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'event_stream_parser'
4
- require 'omniai'
5
- require 'zeitwerk'
3
+ require "event_stream_parser"
4
+ require "omniai"
5
+ require "zeitwerk"
6
6
 
7
7
  loader = Zeitwerk::Loader.for_gem
8
8
  loader.push_dir(__dir__, namespace: OmniAI)
9
- loader.inflector.inflect 'openai' => 'OpenAI'
9
+ loader.inflector.inflect "openai" => "OpenAI"
10
10
  loader.setup
11
11
 
12
12
  module OmniAI
metadata CHANGED
@@ -1,14 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.9.1
4
+ version: 1.9.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
8
- autorequire:
9
8
  bindir: exe
10
9
  cert_chain: []
11
- date: 2024-11-05 00:00:00.000000000 Z
10
+ date: 2025-01-31 00:00:00.000000000 Z
12
11
  dependencies:
13
12
  - !ruby/object:Gem::Dependency
14
13
  name: event_stream_parser
@@ -91,7 +90,6 @@ metadata:
91
90
  homepage_uri: https://github.com/ksylvest/omniai-openai
92
91
  changelog_uri: https://github.com/ksylvest/omniai-openai/releases
93
92
  rubygems_mfa_required: 'true'
94
- post_install_message:
95
93
  rdoc_options: []
96
94
  require_paths:
97
95
  - lib
@@ -106,8 +104,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
106
104
  - !ruby/object:Gem::Version
107
105
  version: '0'
108
106
  requirements: []
109
- rubygems_version: 3.5.18
110
- signing_key:
107
+ rubygems_version: 3.6.2
111
108
  specification_version: 4
112
109
  summary: A generalized framework for interacting with OpenAI
113
110
  test_files: []