omniai-openai 2.6.1 → 2.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e2a82f5aa43b981c17fb2464d2df62118de751be2bc3ac29b12c07a67f500f84
4
- data.tar.gz: a2260b1de24f96c58ed2611ecbe6b477c7da4ab8f3a378b1535c8140eb4156d4
3
+ metadata.gz: a607626741751d7baefb2565a8cdb8ca049b42f68a98650f78aad0ab52efd8f4
4
+ data.tar.gz: 14ad9a8c536d7e45111d2278f1b6f9f301d253975d8a85e30092933418ad57d7
5
5
  SHA512:
6
- metadata.gz: f70f8eecfb376ccf582628139e992cf5f52de35e1cc3886da01400fb4cf2d086df4c97637df20c9e6b2d92d09159719b970e8c623876621b690c8226c74d74e3
7
- data.tar.gz: 7cb2a40052bd07117d660755e6827639ba11ed9cac4fffae8b473b07fd5f5677d9c8b72f969c826b91ff5ee07ae3daadb6b7431af37c2d0e0007dff850086dcd
6
+ metadata.gz: 7ee6ce9705bcfe870a08dcb73357e5d6080e8e5086bad45b85bedad091510e3737bfdb0adad3b9386695b42836eba76a5f3b7c8d59dd74293baf3ea96b1ccdf8
7
+ data.tar.gz: 9f7c0a5aaf69b74575b51b81acf45e81a131786f27e3455689b7b4706df20310b6a709188fdc897df772901e5e5e2f5b5d46db18473e5dd7368a2d3850a9f1d7
data/README.md CHANGED
@@ -306,160 +306,6 @@ end
306
306
  client.files.destroy!('file_...')
307
307
  ```
308
308
 
309
- ## Assistants
310
-
311
- ### Finding an Assistant
312
-
313
- ```ruby
314
- client.assistants.find(id: 'asst_...')
315
- ```
316
-
317
- ### Listing all Assistants
318
-
319
- ```ruby
320
- client.assistants.all
321
- ```
322
-
323
- ### Creating an Assistant
324
-
325
- ```ruby
326
- assistant = client.assistants.build
327
- assistant.name = 'Ringo'
328
- assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
329
- assistant.description = 'The drummer for the Beatles.'
330
- assistant.save!
331
- ```
332
-
333
- ### Updating an Assistant
334
-
335
- ```ruby
336
- assistant = client.assistants.find(id: 'asst_...')
337
- assistant.name = 'George'
338
- assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
339
- assistant.description = 'A guitarist for the Beatles.'
340
- assistant.save!
341
- ```
342
-
343
- ### Destroying an Assistant
344
-
345
- ```ruby
346
- client.assistants.destroy!('asst_...')
347
- ```
348
-
349
- ## Threads
350
-
351
- ### Finding a Thread
352
-
353
- ```ruby
354
- client.threads.find(id: 'thread_...')
355
- ```
356
-
357
- ### Creating a Thread
358
-
359
- ```ruby
360
- thread = client.threads.build
361
- thread.metadata = { user: 'Ringo' }
362
- thread.save!
363
- ```
364
-
365
- ### Updating a Thread
366
-
367
- ```ruby
368
- thread = client.threads.find(id: 'thread_...')
369
- thread.metadata = { user: 'Ringo' }
370
- thread.save!
371
- ```
372
-
373
- ### Destroying a Threads
374
-
375
- ```ruby
376
- client.threads.destroy!('thread_...')
377
- ```
378
-
379
- ### Messages
380
-
381
- #### Finding a Message
382
-
383
- ```ruby
384
- thread = client.threads.find(id: 'thread_...')
385
- message = thread.messages.find(id: 'msg_...')
386
- message.save!
387
- ```
388
-
389
- #### Listing all Messages
390
-
391
- ```ruby
392
- thread = client.threads.find(id: 'thread_...')
393
- thread.messages.all
394
- ```
395
-
396
- #### Creating a Message
397
-
398
- ```ruby
399
- thread = client.threads.find(id: 'thread_...')
400
- message = thread.messages.build(role: 'user', content: 'Hello?')
401
- message.save!
402
- ```
403
-
404
- #### Updating a Message
405
-
406
- ```ruby
407
- thread = client.threads.find(id: 'thread_...')
408
- message = thread.messages.build(role: 'user', content: 'Hello?')
409
- message.save!
410
- ```
411
-
412
- ### Runs
413
-
414
- #### Finding a Run
415
-
416
- ```ruby
417
- thread = client.threads.find(id: 'thread_...')
418
- run = thread.runs.find(id: 'run_...')
419
- run.save!
420
- ```
421
-
422
- #### Listing all Runs
423
-
424
- ```ruby
425
- thread = client.threads.find(id: 'thread_...')
426
- thread.runs.all
427
- ```
428
-
429
- #### Creating a Run
430
-
431
- ```ruby
432
- run = client.runs.find(id: 'thread_...')
433
- run = thread.runs.build
434
- run.metadata = { user: 'Ringo' }
435
- run.save!
436
- ```
437
-
438
- #### Updating a Run
439
-
440
- ```ruby
441
- thread = client.threads.find(id: 'thread_...')
442
- run = thread.messages.find(id: 'run_...')
443
- run.metadata = { user: 'Ringo' }
444
- run.save!
445
- ```
446
-
447
- #### Polling a Run
448
-
449
- ```ruby
450
- run.terminated? # false
451
- run.poll!
452
- run.terminated? # true
453
- run.status # 'cancelled' / 'failed' / 'completed' / 'expired'
454
- ```
455
-
456
- #### Cancelling a Run
457
-
458
- ```ruby
459
- thread = client.threads.find(id: 'thread_...')
460
- run = thread.runs.cancel!(id: 'run_...')
461
- ```
462
-
463
309
  ### Embed
464
310
 
465
311
  Text can be converted into a vector embedding for similarity comparison usage via:
@@ -21,6 +21,7 @@ module OmniAI
21
21
  end
22
22
 
23
23
  module Model
24
+ GPT_5_1 = "gpt-5.1"
24
25
  GPT_5 = "gpt-5"
25
26
  GPT_5_MINI = "gpt-5-mini"
26
27
  GPT_5_NANO = "gpt-5-nano"
@@ -133,16 +133,6 @@ module OmniAI
133
133
  def files
134
134
  Files.new(client: self)
135
135
  end
136
-
137
- # @return [OmniAI::OpenAI::Assistants]
138
- def assistants
139
- Assistants.new(client: self)
140
- end
141
-
142
- # @return [OmniAI::OpenAI::Threads]
143
- def threads
144
- Threads.new(client: self)
145
- end
146
136
  end
147
137
  end
148
138
  end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module OpenAI
5
- VERSION = "2.6.1"
5
+ VERSION = "2.6.2"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.6.1
4
+ version: 2.6.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
@@ -61,8 +61,6 @@ files:
61
61
  - Gemfile
62
62
  - README.md
63
63
  - lib/omniai/openai.rb
64
- - lib/omniai/openai/assistant.rb
65
- - lib/omniai/openai/assistants.rb
66
64
  - lib/omniai/openai/chat.rb
67
65
  - lib/omniai/openai/client.rb
68
66
  - lib/omniai/openai/config.rb
@@ -70,16 +68,6 @@ files:
70
68
  - lib/omniai/openai/file.rb
71
69
  - lib/omniai/openai/files.rb
72
70
  - lib/omniai/openai/speak.rb
73
- - lib/omniai/openai/thread.rb
74
- - lib/omniai/openai/thread/annotation.rb
75
- - lib/omniai/openai/thread/attachment.rb
76
- - lib/omniai/openai/thread/content.rb
77
- - lib/omniai/openai/thread/message.rb
78
- - lib/omniai/openai/thread/messages.rb
79
- - lib/omniai/openai/thread/run.rb
80
- - lib/omniai/openai/thread/runs.rb
81
- - lib/omniai/openai/thread/text.rb
82
- - lib/omniai/openai/threads.rb
83
71
  - lib/omniai/openai/tool.rb
84
72
  - lib/omniai/openai/transcribe.rb
85
73
  - lib/omniai/openai/version.rb
@@ -104,7 +92,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
104
92
  - !ruby/object:Gem::Version
105
93
  version: '0'
106
94
  requirements: []
107
- rubygems_version: 3.6.9
95
+ rubygems_version: 3.7.2
108
96
  specification_version: 4
109
97
  summary: A generalized framework for interacting with OpenAI
110
98
  test_files: []
@@ -1,185 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- # An OpenAI assistants implementation.
6
- class Assistant
7
- HEADERS = { "OpenAI-Beta": "assistants=v2" }.freeze
8
-
9
- # @!attribute [rw] id
10
- # @return [String, nil]
11
- attr_accessor :id
12
-
13
- # @!attribute [rw] name
14
- # @return [String, nil]
15
- attr_accessor :name
16
-
17
- # @!attribute [rw] model
18
- # @return [String, nil]
19
- attr_accessor :model
20
-
21
- # @!attribute [rw] description
22
- # @return [String, nil]
23
- attr_accessor :description
24
-
25
- # @!attribute [rw] instructions
26
- # @return [String, nil]
27
- attr_accessor :instructions
28
-
29
- # @!attribute [rw] metadata
30
- # @return [Hash]
31
- attr_accessor :metadata
32
-
33
- # @!attribute [rw] deleted
34
- # @return [Boolean, nil]
35
- attr_accessor :deleted
36
-
37
- # @!attribute [r] tools
38
- # @return [Array<Hash>, nil]
39
- attr_accessor :tools
40
-
41
- # @param client [OmniAI::OpenAI::Client] optional
42
- # @param id [String]
43
- # @param name [String]
44
- # @param model [String]
45
- # @param description [String, nil] optional
46
- # @param instructions [String,nil] optional
47
- # @param metadata [Hash] optional
48
- def initialize(
49
- client: Client.new,
50
- id: nil,
51
- name: nil,
52
- model: OmniAI::Chat::DEFAULT_MODEL,
53
- description: nil,
54
- instructions: nil,
55
- metadata: {},
56
- tools: []
57
- )
58
- @client = client
59
- @id = id
60
- @name = name
61
- @model = model
62
- @description = description
63
- @instructions = instructions
64
- @metadata = metadata
65
- @tools = tools
66
- end
67
-
68
- # @return [String]
69
- def inspect
70
- "#<#{self.class.name} id=#{@id.inspect} name=#{@name.inspect} model=#{@model.inspect}>"
71
- end
72
-
73
- # @param id [String] required
74
- # @param client [OmniAI::OpenAI::Client] optional
75
- # @return [OmniAI::OpenAI::Assistant]
76
- def self.find(id:, client: Client.new)
77
- response = client.connection
78
- .accept(:json)
79
- .headers(HEADERS)
80
- .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
81
-
82
- raise HTTPError, response.flush unless response.status.ok?
83
-
84
- parse(data: response.parse)
85
- end
86
-
87
- # @param limit [Integer] optional
88
- # @param client [OmniAI::OpenAI::Client] optional
89
- # @return [Array<OmniAI::OpenAI::Assistant>]
90
- def self.all(limit: nil, client: Client.new)
91
- response = client.connection
92
- .accept(:json)
93
- .headers(HEADERS)
94
- .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants", params: { limit: }.compact)
95
-
96
- raise HTTPError, response.flush unless response.status.ok?
97
-
98
- response.parse["data"].map { |data| parse(data:, client:) }
99
- end
100
-
101
- # @param id [String] required
102
- # @param client [OmniAI::OpenAI::Client] optional
103
- # @return [void]
104
- def self.destroy!(id:, client: Client.new)
105
- response = client.connection
106
- .accept(:json)
107
- .headers(HEADERS)
108
- .delete("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
109
-
110
- raise HTTPError, response.flush unless response.status.ok?
111
-
112
- response.parse
113
- end
114
-
115
- # @raise [HTTPError]
116
- # @return [OmniAI::OpenAI::Assistant]
117
- def save!
118
- response = @client.connection
119
- .accept(:json)
120
- .headers(HEADERS)
121
- .post("/#{OmniAI::OpenAI::Client::VERSION}/assistants#{"/#{@id}" if @id}", json: payload)
122
- raise HTTPError, response.flush unless response.status.ok?
123
-
124
- parse(data: response.parse)
125
- self
126
- end
127
-
128
- # @raise [OmniAI::Error]
129
- # @return [OmniAI::OpenAI::Assistant]
130
- def destroy!
131
- raise OmniAI::Error, "cannot destroy a non-persisted assistant" unless @id
132
-
133
- data = self.class.destroy!(id: @id, client: @client)
134
- @deleted = data["deleted"]
135
- self
136
- end
137
-
138
- private
139
-
140
- class << self
141
- private
142
-
143
- # @param data [Hash] required
144
- # @param client [OmniAI::OpenAI::Client] required
145
- # @return [OmniAI::OpenAI::Assistant]
146
- def parse(data:, client: Client.new)
147
- new(
148
- client:,
149
- id: data["id"],
150
- name: data["name"],
151
- model: data["model"],
152
- description: data["description"],
153
- instructions: data["instructions"],
154
- metadata: data["metadata"],
155
- tools: data["tools"]
156
- )
157
- end
158
- end
159
-
160
- # @param data [Hash] required
161
- # @return [OmniAI::OpenAI::Assistant]
162
- def parse(data:)
163
- @id = data["id"]
164
- @name = data["name"]
165
- @model = data["model"]
166
- @description = data["description"]
167
- @instructions = data["instructions"]
168
- @metadata = data["metadata"]
169
- @tools = data["tools"]
170
- end
171
-
172
- # @return [Hash]
173
- def payload
174
- {
175
- name: @name,
176
- model: @model,
177
- description: @description,
178
- instructions: @instructions,
179
- metadata: @metadata,
180
- tools: @tools,
181
- }.compact
182
- end
183
- end
184
- end
185
- end
@@ -1,38 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- # An OpenAI scope for establishing assistants.
6
- class Assistants
7
- # @param client [OmniAI::OpenAI::Client] required
8
- def initialize(client:)
9
- @client = client
10
- end
11
-
12
- # @param id [String] required
13
- def find(id:)
14
- Assistant.find(id:, client: @client)
15
- end
16
-
17
- # @param limit [Integer] optional
18
- def all(limit: nil)
19
- Assistant.all(limit:, client: @client)
20
- end
21
-
22
- # @param id [String] required
23
- def destroy!(id:)
24
- Assistant.destroy!(id:, client: @client)
25
- end
26
-
27
- # @param name [String]
28
- # @param model [String]
29
- # @param description [String, nil] optional
30
- # @param instructions [String,nil] optional
31
- # @param metadata [Hash] optional
32
- # @param tools [Array<Hash>] optional
33
- def build(name: nil, description: nil, instructions: nil, model: Chat::Model, metadata: {}, tools: [])
34
- Assistant.new(name:, model:, description:, instructions:, metadata:, tools:, client: @client)
35
- end
36
- end
37
- end
38
- end
@@ -1,58 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- class Thread
6
- # An OpenAI content w/ annotations.
7
- class Annotation
8
- # @!attribute [rw] data
9
- # @return [Hash, nil]
10
- attr_accessor :data
11
-
12
- # @param data [Hash] required
13
- # @param client [OmniAI::OpenAI::Client] optional
14
- def initialize(data:, client: Client.new)
15
- @data = data
16
- @client = client
17
- end
18
-
19
- # @return [String] "file_citation" or "file_path"
20
- def type
21
- @data["type"]
22
- end
23
-
24
- # @return [String]
25
- def text
26
- @data["text"]
27
- end
28
-
29
- # @return [Integer]
30
- def start_index
31
- @data["start_index"]
32
- end
33
-
34
- # @return [Integer]
35
- def end_index
36
- @data["end_index"]
37
- end
38
-
39
- # @return [Range<Integer>]
40
- def range
41
- start_index..end_index
42
- end
43
-
44
- # @return [String]
45
- def file_id
46
- @file_id ||= (@data["file_citation"] || @data["file_path"])["file_id"]
47
- end
48
-
49
- # Present if type is "file_citation" or "file_path".
50
- #
51
- # @return [OmniAI::OpenAI::File, nil]
52
- def file!
53
- @file ||= @client.files.find(id: file_id)
54
- end
55
- end
56
- end
57
- end
58
- end
@@ -1,46 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- class Thread
6
- # An OpenAI attachment.
7
- class Attachment
8
- # @!attribute [rw] data
9
- # @return [Hash, nil]
10
- attr_accessor :data
11
-
12
- # @param data [Array]
13
- # @param client [OmniAI::OpenAI::Client]
14
- #
15
- # @return [Array<OmniAI::OpenAI::Thread::Content>, String, nil]
16
- def self.for(data:, client: Client.new)
17
- return data unless data.is_a?(Enumerable)
18
-
19
- data.map { |attachment| new(data: attachment, client:) }
20
- end
21
-
22
- # @param data [Hash]
23
- # @param client [OmniAI::OpenAI::Client]
24
- def initialize(data:, client: Client.new)
25
- @data = data
26
- @client = client
27
- end
28
-
29
- # @return [String] e.g. "text"
30
- def file_id
31
- @file_id ||= @data["file_id"]
32
- end
33
-
34
- # @return [Array<Hash>]
35
- def tools
36
- @tools ||= @data["tools"]
37
- end
38
-
39
- # @return [OmniAI::OpenAI::File]
40
- def file!
41
- @file ||= @client.files.find(id: file_id)
42
- end
43
- end
44
- end
45
- end
46
- end
@@ -1,49 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module OmniAI
4
- module OpenAI
5
- class Thread
6
- # An OpenAI content w/ annotations.
7
- class Content
8
- module Type
9
- TEXT = "text"
10
- end
11
-
12
- # @param data [Array]
13
- # @param client [OmniAI::OpenAI::Client]
14
- #
15
- # @return [Array<OmniAI::OpenAI::Thread::Content>, String, nil]
16
- def self.for(data:, client: Client.new)
17
- return data unless data.is_a?(Enumerable)
18
-
19
- data.map { |attachment| new(data: attachment, client:) }
20
- end
21
-
22
- # @!attribute [rw] data
23
- # @return [Hash, nil]
24
- attr_accessor :data
25
-
26
- # @param data [Hash]
27
- def initialize(data:, client:)
28
- @data = data
29
- @client = client
30
- end
31
-
32
- # @return [String] e.g. "text"
33
- def type
34
- @type ||= @data["type"]
35
- end
36
-
37
- # @return [Boolean]
38
- def text?
39
- type.eql?(Type::TEXT)
40
- end
41
-
42
- # @return [OmniAI::OpenAI::Thread::Text]
43
- def text
44
- @text ||= Text.new(data: @data["text"], client: @client) if @data["text"]
45
- end
46
- end
47
- end
48
- end
49
- end