omniai-openai 2.6.1 → 2.6.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e2a82f5aa43b981c17fb2464d2df62118de751be2bc3ac29b12c07a67f500f84
4
- data.tar.gz: a2260b1de24f96c58ed2611ecbe6b477c7da4ab8f3a378b1535c8140eb4156d4
3
+ metadata.gz: e5e61a00ff0433efbc924958d49f273d17a9182420e8fcf4c1469078333243d2
4
+ data.tar.gz: bf8cd59ce36c89ad864e1c0b1a2ff69b1a38a9bc062d3ca7135851c80639956d
5
5
  SHA512:
6
- metadata.gz: f70f8eecfb376ccf582628139e992cf5f52de35e1cc3886da01400fb4cf2d086df4c97637df20c9e6b2d92d09159719b970e8c623876621b690c8226c74d74e3
7
- data.tar.gz: 7cb2a40052bd07117d660755e6827639ba11ed9cac4fffae8b473b07fd5f5677d9c8b72f969c826b91ff5ee07ae3daadb6b7431af37c2d0e0007dff850086dcd
6
+ metadata.gz: fe597fd91aab8aad25c3ad3b3f38cb59f3424b821bc15894fba58285c53262b96c850d194a8f32172b8308481e0c57786ffe72601dfd3e64fd0bdb8f2a4e5de0
7
+ data.tar.gz: f41c7dee9afb44c963b2eee6e091650848cb0d1ae3581d179c9b36875b5358bf66f14ef871d44876b52c4a38c1d49181ac984fcc8760151f0e2cb32323a14367
data/README.md CHANGED
@@ -306,160 +306,6 @@ end
306
306
  client.files.destroy!('file_...')
307
307
  ```
308
308
 
309
- ## Assistants
310
-
311
- ### Finding an Assistant
312
-
313
- ```ruby
314
- client.assistants.find(id: 'asst_...')
315
- ```
316
-
317
- ### Listing all Assistants
318
-
319
- ```ruby
320
- client.assistants.all
321
- ```
322
-
323
- ### Creating an Assistant
324
-
325
- ```ruby
326
- assistant = client.assistants.build
327
- assistant.name = 'Ringo'
328
- assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
329
- assistant.description = 'The drummer for the Beatles.'
330
- assistant.save!
331
- ```
332
-
333
- ### Updating an Assistant
334
-
335
- ```ruby
336
- assistant = client.assistants.find(id: 'asst_...')
337
- assistant.name = 'George'
338
- assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
339
- assistant.description = 'A guitarist for the Beatles.'
340
- assistant.save!
341
- ```
342
-
343
- ### Destroying an Assistant
344
-
345
- ```ruby
346
- client.assistants.destroy!('asst_...')
347
- ```
348
-
349
- ## Threads
350
-
351
- ### Finding a Thread
352
-
353
- ```ruby
354
- client.threads.find(id: 'thread_...')
355
- ```
356
-
357
- ### Creating a Thread
358
-
359
- ```ruby
360
- thread = client.threads.build
361
- thread.metadata = { user: 'Ringo' }
362
- thread.save!
363
- ```
364
-
365
- ### Updating a Thread
366
-
367
- ```ruby
368
- thread = client.threads.find(id: 'thread_...')
369
- thread.metadata = { user: 'Ringo' }
370
- thread.save!
371
- ```
372
-
373
- ### Destroying a Threads
374
-
375
- ```ruby
376
- client.threads.destroy!('thread_...')
377
- ```
378
-
379
- ### Messages
380
-
381
- #### Finding a Message
382
-
383
- ```ruby
384
- thread = client.threads.find(id: 'thread_...')
385
- message = thread.messages.find(id: 'msg_...')
386
- message.save!
387
- ```
388
-
389
- #### Listing all Messages
390
-
391
- ```ruby
392
- thread = client.threads.find(id: 'thread_...')
393
- thread.messages.all
394
- ```
395
-
396
- #### Creating a Message
397
-
398
- ```ruby
399
- thread = client.threads.find(id: 'thread_...')
400
- message = thread.messages.build(role: 'user', content: 'Hello?')
401
- message.save!
402
- ```
403
-
404
- #### Updating a Message
405
-
406
- ```ruby
407
- thread = client.threads.find(id: 'thread_...')
408
- message = thread.messages.build(role: 'user', content: 'Hello?')
409
- message.save!
410
- ```
411
-
412
- ### Runs
413
-
414
- #### Finding a Run
415
-
416
- ```ruby
417
- thread = client.threads.find(id: 'thread_...')
418
- run = thread.runs.find(id: 'run_...')
419
- run.save!
420
- ```
421
-
422
- #### Listing all Runs
423
-
424
- ```ruby
425
- thread = client.threads.find(id: 'thread_...')
426
- thread.runs.all
427
- ```
428
-
429
- #### Creating a Run
430
-
431
- ```ruby
432
- run = client.runs.find(id: 'thread_...')
433
- run = thread.runs.build
434
- run.metadata = { user: 'Ringo' }
435
- run.save!
436
- ```
437
-
438
- #### Updating a Run
439
-
440
- ```ruby
441
- thread = client.threads.find(id: 'thread_...')
442
- run = thread.messages.find(id: 'run_...')
443
- run.metadata = { user: 'Ringo' }
444
- run.save!
445
- ```
446
-
447
- #### Polling a Run
448
-
449
- ```ruby
450
- run.terminated? # false
451
- run.poll!
452
- run.terminated? # true
453
- run.status # 'cancelled' / 'failed' / 'completed' / 'expired'
454
- ```
455
-
456
- #### Cancelling a Run
457
-
458
- ```ruby
459
- thread = client.threads.find(id: 'thread_...')
460
- run = thread.runs.cancel!(id: 'run_...')
461
- ```
462
-
463
309
  ### Embed
464
310
 
465
311
  Text can be converted into a vector embedding for similarity comparison usage via:
@@ -20,7 +20,21 @@ module OmniAI
20
20
  SCHEMA_TYPE = "json_schema"
21
21
  end
22
22
 
23
+ module ReasoningEffort
24
+ NONE = "none"
25
+ LOW = "low"
26
+ MEDIUM = "medium"
27
+ HIGH = "high"
28
+ end
29
+
30
+ module VerbosityText
31
+ LOW = "low"
32
+ MEDIUM = "medium"
33
+ HIGH = "high"
34
+ end
35
+
23
36
  module Model
37
+ GPT_5_1 = "gpt-5.1"
24
38
  GPT_5 = "gpt-5"
25
39
  GPT_5_MINI = "gpt-5-mini"
26
40
  GPT_5_NANO = "gpt-5-nano"
@@ -65,7 +79,9 @@ module OmniAI
65
79
  stream_options: (DEFAULT_STREAM_OPTIONS if stream?),
66
80
  temperature:,
67
81
  tools: (@tools.map(&:serialize) if @tools&.any?),
68
- }).compact
82
+ reasoning: reasoning_payload,
83
+ verbosity: verbosity_payload,
84
+ }.merge(@kwargs || {})).compact
69
85
  end
70
86
 
71
87
  # @return [String]
@@ -86,6 +102,42 @@ module OmniAI
86
102
  else raise ArgumentError, "unknown format=#{@format}"
87
103
  end
88
104
  end
105
+
106
+ # @raise [ArgumentError]
107
+ #
108
+ # @return [Hash, nil]
109
+ def reasoning_payload
110
+ return if @reasoning.nil?
111
+
112
+ effort = @reasoning[:effort] || @reasoning["effort"]
113
+ return if effort.nil?
114
+
115
+ valid_efforts = [ReasoningEffort::NONE, ReasoningEffort::LOW, ReasoningEffort::MEDIUM, ReasoningEffort::HIGH]
116
+ unless valid_efforts.include?(effort)
117
+ raise ArgumentError,
118
+ "reasoning effort must be one of #{valid_efforts.join(', ')}"
119
+ end
120
+
121
+ { effort: }
122
+ end
123
+
124
+ # @raise [ArgumentError]
125
+ #
126
+ # @return [Hash, nil]
127
+ def verbosity_payload
128
+ return if @verbosity.nil?
129
+
130
+ text = @verbosity[:text] || @verbosity["text"]
131
+ return if text.nil?
132
+
133
+ valid_text_levels = [VerbosityText::LOW, VerbosityText::MEDIUM, VerbosityText::HIGH]
134
+ unless valid_text_levels.include?(text)
135
+ raise ArgumentError,
136
+ "verbosity text must be one of #{valid_text_levels.join(', ')}"
137
+ end
138
+
139
+ { text: }
140
+ end
89
141
  end
90
142
  end
91
143
  end
@@ -77,13 +77,16 @@ module OmniAI
77
77
  # @param temperature [Float, nil] optional
78
78
  # @param stream [Proc, nil] optional
79
79
  # @param tools [Array<OmniAI::Tool>, nil] optional
80
+ # @param reasoning [Hash, nil] optional reasoning configuration
81
+ # @param verbosity [Hash, nil] optional verbosity configuration
80
82
  #
81
83
  # @yield [prompt]
82
84
  # @yieldparam prompt [OmniAI::Chat::Prompt]
83
85
  #
84
86
  # @return [OmniAI::Chat::Completion]
85
- def chat(messages = nil, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil, &)
86
- Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self, &)
87
+ def chat(messages = nil, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil,
88
+ reasoning: nil, verbosity: nil, &)
89
+ Chat.process!(messages, model:, temperature:, format:, stream:, tools:, reasoning:, verbosity:, client: self, &)
87
90
  end
88
91
 
89
92
  # @raise [OmniAI::Error]
@@ -133,16 +136,6 @@ module OmniAI
133
136
  def files
134
137
  Files.new(client: self)
135
138
  end
136
-
137
- # @return [OmniAI::OpenAI::Assistants]
138
- def assistants
139
- Assistants.new(client: self)
140
- end
141
-
142
- # @return [OmniAI::OpenAI::Threads]
143
- def threads
144
- Threads.new(client: self)
145
- end
146
139
  end
147
140
  end
148
141
  end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module OpenAI
5
- VERSION = "2.6.1"
5
+ VERSION = "2.6.3"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.6.1
4
+ version: 2.6.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
@@ -61,8 +61,6 @@ files:
61
61
  - Gemfile
62
62
  - README.md
63
63
  - lib/omniai/openai.rb
64
- - lib/omniai/openai/assistant.rb
65
- - lib/omniai/openai/assistants.rb
66
64
  - lib/omniai/openai/chat.rb
67
65
  - lib/omniai/openai/client.rb
68
66
  - lib/omniai/openai/config.rb
@@ -70,16 +68,6 @@ files:
70
68
  - lib/omniai/openai/file.rb
71
69
  - lib/omniai/openai/files.rb
72
70
  - lib/omniai/openai/speak.rb
73
- - lib/omniai/openai/thread.rb
74
- - lib/omniai/openai/thread/annotation.rb
75
- - lib/omniai/openai/thread/attachment.rb
76
- - lib/omniai/openai/thread/content.rb
77
- - lib/omniai/openai/thread/message.rb
78
- - lib/omniai/openai/thread/messages.rb
79
- - lib/omniai/openai/thread/run.rb
80
- - lib/omniai/openai/thread/runs.rb
81
- - lib/omniai/openai/thread/text.rb
82
- - lib/omniai/openai/threads.rb
83
71
  - lib/omniai/openai/tool.rb
84
72
  - lib/omniai/openai/transcribe.rb
85
73
  - lib/omniai/openai/version.rb
@@ -104,7 +92,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
104
92
  - !ruby/object:Gem::Version
105
93
  version: '0'
106
94
  requirements: []
107
- rubygems_version: 3.6.9
95
+ rubygems_version: 3.7.2
108
96
  specification_version: 4
109
97
  summary: A generalized framework for interacting with OpenAI
110
98
  test_files: []
@@ -1,185 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- # An OpenAI assistants implementation.
6
- class Assistant
7
- HEADERS = { "OpenAI-Beta": "assistants=v2" }.freeze
8
-
9
- # @!attribute [rw] id
10
- # @return [String, nil]
11
- attr_accessor :id
12
-
13
- # @!attribute [rw] name
14
- # @return [String, nil]
15
- attr_accessor :name
16
-
17
- # @!attribute [rw] model
18
- # @return [String, nil]
19
- attr_accessor :model
20
-
21
- # @!attribute [rw] description
22
- # @return [String, nil]
23
- attr_accessor :description
24
-
25
- # @!attribute [rw] instructions
26
- # @return [String, nil]
27
- attr_accessor :instructions
28
-
29
- # @!attribute [rw] metadata
30
- # @return [Hash]
31
- attr_accessor :metadata
32
-
33
- # @!attribute [rw] deleted
34
- # @return [Boolean, nil]
35
- attr_accessor :deleted
36
-
37
- # @!attribute [r] tools
38
- # @return [Array<Hash>, nil]
39
- attr_accessor :tools
40
-
41
- # @param client [OmniAI::OpenAI::Client] optional
42
- # @param id [String]
43
- # @param name [String]
44
- # @param model [String]
45
- # @param description [String, nil] optional
46
- # @param instructions [String,nil] optional
47
- # @param metadata [Hash] optional
48
- def initialize(
49
- client: Client.new,
50
- id: nil,
51
- name: nil,
52
- model: OmniAI::Chat::DEFAULT_MODEL,
53
- description: nil,
54
- instructions: nil,
55
- metadata: {},
56
- tools: []
57
- )
58
- @client = client
59
- @id = id
60
- @name = name
61
- @model = model
62
- @description = description
63
- @instructions = instructions
64
- @metadata = metadata
65
- @tools = tools
66
- end
67
-
68
- # @return [String]
69
- def inspect
70
- "#<#{self.class.name} id=#{@id.inspect} name=#{@name.inspect} model=#{@model.inspect}>"
71
- end
72
-
73
- # @param id [String] required
74
- # @param client [OmniAI::OpenAI::Client] optional
75
- # @return [OmniAI::OpenAI::Assistant]
76
- def self.find(id:, client: Client.new)
77
- response = client.connection
78
- .accept(:json)
79
- .headers(HEADERS)
80
- .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
81
-
82
- raise HTTPError, response.flush unless response.status.ok?
83
-
84
- parse(data: response.parse)
85
- end
86
-
87
- # @param limit [Integer] optional
88
- # @param client [OmniAI::OpenAI::Client] optional
89
- # @return [Array<OmniAI::OpenAI::Assistant>]
90
- def self.all(limit: nil, client: Client.new)
91
- response = client.connection
92
- .accept(:json)
93
- .headers(HEADERS)
94
- .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants", params: { limit: }.compact)
95
-
96
- raise HTTPError, response.flush unless response.status.ok?
97
-
98
- response.parse["data"].map { |data| parse(data:, client:) }
99
- end
100
-
101
- # @param id [String] required
102
- # @param client [OmniAI::OpenAI::Client] optional
103
- # @return [void]
104
- def self.destroy!(id:, client: Client.new)
105
- response = client.connection
106
- .accept(:json)
107
- .headers(HEADERS)
108
- .delete("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
109
-
110
- raise HTTPError, response.flush unless response.status.ok?
111
-
112
- response.parse
113
- end
114
-
115
- # @raise [HTTPError]
116
- # @return [OmniAI::OpenAI::Assistant]
117
- def save!
118
- response = @client.connection
119
- .accept(:json)
120
- .headers(HEADERS)
121
- .post("/#{OmniAI::OpenAI::Client::VERSION}/assistants#{"/#{@id}" if @id}", json: payload)
122
- raise HTTPError, response.flush unless response.status.ok?
123
-
124
- parse(data: response.parse)
125
- self
126
- end
127
-
128
- # @raise [OmniAI::Error]
129
- # @return [OmniAI::OpenAI::Assistant]
130
- def destroy!
131
- raise OmniAI::Error, "cannot destroy a non-persisted assistant" unless @id
132
-
133
- data = self.class.destroy!(id: @id, client: @client)
134
- @deleted = data["deleted"]
135
- self
136
- end
137
-
138
- private
139
-
140
- class << self
141
- private
142
-
143
- # @param data [Hash] required
144
- # @param client [OmniAI::OpenAI::Client] required
145
- # @return [OmniAI::OpenAI::Assistant]
146
- def parse(data:, client: Client.new)
147
- new(
148
- client:,
149
- id: data["id"],
150
- name: data["name"],
151
- model: data["model"],
152
- description: data["description"],
153
- instructions: data["instructions"],
154
- metadata: data["metadata"],
155
- tools: data["tools"]
156
- )
157
- end
158
- end
159
-
160
- # @param data [Hash] required
161
- # @return [OmniAI::OpenAI::Assistant]
162
- def parse(data:)
163
- @id = data["id"]
164
- @name = data["name"]
165
- @model = data["model"]
166
- @description = data["description"]
167
- @instructions = data["instructions"]
168
- @metadata = data["metadata"]
169
- @tools = data["tools"]
170
- end
171
-
172
- # @return [Hash]
173
- def payload
174
- {
175
- name: @name,
176
- model: @model,
177
- description: @description,
178
- instructions: @instructions,
179
- metadata: @metadata,
180
- tools: @tools,
181
- }.compact
182
- end
183
- end
184
- end
185
- end
@@ -1,38 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- # An OpenAI scope for establishing assistants.
6
- class Assistants
7
- # @param client [OmniAI::OpenAI::Client] required
8
- def initialize(client:)
9
- @client = client
10
- end
11
-
12
- # @param id [String] required
13
- def find(id:)
14
- Assistant.find(id:, client: @client)
15
- end
16
-
17
- # @param limit [Integer] optional
18
- def all(limit: nil)
19
- Assistant.all(limit:, client: @client)
20
- end
21
-
22
- # @param id [String] required
23
- def destroy!(id:)
24
- Assistant.destroy!(id:, client: @client)
25
- end
26
-
27
- # @param name [String]
28
- # @param model [String]
29
- # @param description [String, nil] optional
30
- # @param instructions [String,nil] optional
31
- # @param metadata [Hash] optional
32
- # @param tools [Array<Hash>] optional
33
- def build(name: nil, description: nil, instructions: nil, model: Chat::Model, metadata: {}, tools: [])
34
- Assistant.new(name:, model:, description:, instructions:, metadata:, tools:, client: @client)
35
- end
36
- end
37
- end
38
- end
@@ -1,58 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- class Thread
6
- # An OpenAI content w/ annotations.
7
- class Annotation
8
- # @!attribute [rw] data
9
- # @return [Hash, nil]
10
- attr_accessor :data
11
-
12
- # @param data [Hash] required
13
- # @param client [OmniAI::OpenAI::Client] optional
14
- def initialize(data:, client: Client.new)
15
- @data = data
16
- @client = client
17
- end
18
-
19
- # @return [String] "file_citation" or "file_path"
20
- def type
21
- @data["type"]
22
- end
23
-
24
- # @return [String]
25
- def text
26
- @data["text"]
27
- end
28
-
29
- # @return [Integer]
30
- def start_index
31
- @data["start_index"]
32
- end
33
-
34
- # @return [Integer]
35
- def end_index
36
- @data["end_index"]
37
- end
38
-
39
- # @return [Range<Integer>]
40
- def range
41
- start_index..end_index
42
- end
43
-
44
- # @return [String]
45
- def file_id
46
- @file_id ||= (@data["file_citation"] || @data["file_path"])["file_id"]
47
- end
48
-
49
- # Present if type is "file_citation" or "file_path".
50
- #
51
- # @return [OmniAI::OpenAI::File, nil]
52
- def file!
53
- @file ||= @client.files.find(id: file_id)
54
- end
55
- end
56
- end
57
- end
58
- end
@@ -1,46 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- class Thread
6
- # An OpenAI attachment.
7
- class Attachment
8
- # @!attribute [rw] data
9
- # @return [Hash, nil]
10
- attr_accessor :data
11
-
12
- # @param data [Array]
13
- # @param client [OmniAI::OpenAI::Client]
14
- #
15
- # @return [Array<OmniAI::OpenAI::Thread::Content>, String, nil]
16
- def self.for(data:, client: Client.new)
17
- return data unless data.is_a?(Enumerable)
18
-
19
- data.map { |attachment| new(data: attachment, client:) }
20
- end
21
-
22
- # @param data [Hash]
23
- # @param client [OmniAI::OpenAI::Client]
24
- def initialize(data:, client: Client.new)
25
- @data = data
26
- @client = client
27
- end
28
-
29
- # @return [String] e.g. "text"
30
- def file_id
31
- @file_id ||= @data["file_id"]
32
- end
33
-
34
- # @return [Array<Hash>]
35
- def tools
36
- @tools ||= @data["tools"]
37
- end
38
-
39
- # @return [OmniAI::OpenAI::File]
40
- def file!
41
- @file ||= @client.files.find(id: file_id)
42
- end
43
- end
44
- end
45
- end
46
- end