omniai-openai 1.3.0 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8a95f906f1bf54382e8ace7ae6b1622ad22d2502a23c4804038b1ca66ff43086
4
- data.tar.gz: 6694a63f7f76e2d41220a18e2ea852a19e956a8258761b71e9a0ec747197c9a9
3
+ metadata.gz: 32b555cd7f80abb2cc11a12986a0a8df15e5c1d9b6d31113f5e00b47174f4ec6
4
+ data.tar.gz: 81046a31442f6f2b1a1eb5d37723deefbb82cf7c145907ae0bb72db738e09080
5
5
  SHA512:
6
- metadata.gz: c19cd98191bd1826f24af180bcf87e96670fc162c6555bf368e4eda9305cdf5fa465585c74764d55481181752f563a581b09e3b1583220df70d9b227861a51d8
7
- data.tar.gz: b6eb2fdeb33dcf4dda02f2b608e13ae3cc29c687b0c9eb31bdecaf72d07548b0201c68efe3e40cd754b368d2b2f84813e5eeb207878dbc6355fe4a67a8308f3e
6
+ metadata.gz: '0939f67a3bc54221fc1141b00ab8eb1c38323178da2c3c78057cd25a288df776397eb204c18f5425005a92f05556a67b28b902b6520716c359b7901a70d7e183'
7
+ data.tar.gz: 7611023c850321bec21b71dfb626b5c308905e55a1485d5c965d1e31f2e9ced8fa09929ada98eff681a933499e5735b88243c404cf7452c2fbf0aab3fd515606
data/README.md CHANGED
@@ -251,3 +251,186 @@ client.speak('A pessemistic pest exists amidst us.', format: OmniAI::OpenAI::Spe
251
251
  ```
252
252
 
253
253
  [OpenAI API Reference `format`](https://platform.openai.com/docs/api-reference/audio/createSpeech#audio-createspeech-response_format)
254
+
255
+ ## Files
256
+
257
+ ### Finding an File
258
+
259
+ ```ruby
260
+ client.files.find(id: 'file_...')
261
+ ```
262
+
263
+ ### Listing all Files
264
+
265
+ ```ruby
266
+ client.files.all
267
+ ```
268
+
269
+ ### Uploading a File
270
+
271
+ ```ruby
272
+ file = client.files.build(io: File.open('...', 'wb'))
273
+ file.save!
274
+ ```
275
+
276
+ ### Downloading a File
277
+
278
+ ```ruby
279
+ file = client.files.find(id: 'file_...')
280
+ File.open('...', 'wb') do |file|
281
+ file.content do |chunk|
282
+ file << chunk
283
+ end
284
+ end
285
+ ```
286
+
287
+ ### Destroying a File
288
+
289
+ ```ruby
290
+ client.files.destroy!('file_...')
291
+ ```
292
+
293
+ ## Assistants
294
+
295
+ ### Finding an Assistant
296
+
297
+ ```ruby
298
+ client.assistants.find(id: 'asst_...')
299
+ ```
300
+
301
+ ### Listing all Assistants
302
+
303
+ ```ruby
304
+ client.assistants.all
305
+ ```
306
+
307
+ ### Creating an Assistant
308
+
309
+ ```ruby
310
+ assistant = client.assistants.build
311
+ assistant.name = 'Ringo'
312
+ assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
313
+ assistant.description = 'The drummer for the Beatles.'
314
+ assistant.save!
315
+ ```
316
+
317
+ ### Updating an Assistant
318
+
319
+ ```ruby
320
+ assistant = client.assistants.find(id: 'asst_...')
321
+ assistant.name = 'George'
322
+ assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
323
+ assistant.description = 'A guitarist for the Beatles.'
324
+ assistant.save!
325
+ ```
326
+
327
+ ### Destroying an Assistant
328
+
329
+ ```ruby
330
+ client.assistants.destroy!('asst_...')
331
+ ```
332
+
333
+ ## Threads
334
+
335
+ ### Finding a Thread
336
+
337
+ ```ruby
338
+ client.threads.find(id: 'thread_...')
339
+ ```
340
+
341
+ ### Creating a Thread
342
+
343
+ ```ruby
344
+ thread = client.threads.build
345
+ thread.metadata = { user: 'Ringo' }
346
+ thread.save!
347
+ ```
348
+
349
+ ### Updating a Thread
350
+
351
+ ```ruby
352
+ thread = client.threads.find(id: 'thread_...')
353
+ thread.metadata = { user: 'Ringo' }
354
+ thread.save!
355
+ ```
356
+
357
+ ### Destroying a Threads
358
+
359
+ ```ruby
360
+ client.threads.destroy!('thread_...')
361
+ ```
362
+
363
+ ### Messages
364
+
365
+ #### Finding a Message
366
+
367
+ ```ruby
368
+ thread = client.threads.find(id: 'thread_...')
369
+ message = thread.messages.find(id: 'msg_...')
370
+ message.save!
371
+ ```
372
+
373
+ #### Listing all Messages
374
+
375
+ ```ruby
376
+ thread = client.threads.find(id: 'thread_...')
377
+ thread.messages.all
378
+ ```
379
+
380
+ #### Creating a Message
381
+
382
+ ```ruby
383
+ thread = client.threads.find(id: 'thread_...')
384
+ message = thread.messages.build(role: 'user', content: 'Hello?')
385
+ message.save!
386
+ ```
387
+
388
+ #### Updating a Message
389
+
390
+ ```ruby
391
+ thread = client.threads.find(id: 'thread_...')
392
+ message = thread.messages.build(role: 'user', content: 'Hello?')
393
+ message.save!
394
+ ```
395
+
396
+ ### Runs
397
+
398
+ #### Finding a Run
399
+
400
+ ```ruby
401
+ thread = client.threads.find(id: 'thread_...')
402
+ run = thread.runs.find(id: 'run_...')
403
+ run.save!
404
+ ```
405
+
406
+ #### Listing all Runs
407
+
408
+ ```ruby
409
+ thread = client.threads.find(id: 'thread_...')
410
+ thread.runs.all
411
+ ```
412
+
413
+ #### Creating a Run
414
+
415
+ ```ruby
416
+ run = client.runs.find(id: 'thread_...')
417
+ run = thread.runs.build
418
+ run.metadata = { user: 'Ringo' }
419
+ run.save!
420
+ ```
421
+
422
+ #### Updating a Run
423
+
424
+ ```ruby
425
+ thread = client.threads.find(id: 'thread_...')
426
+ run = thread.messages.find(id: 'run_...')
427
+ run.metadata = { user: 'Ringo' }
428
+ run.save!
429
+ ```
430
+
431
+ #### Cancelling a Run
432
+
433
+ ```ruby
434
+ thread = client.threads.find(id: 'thread_...')
435
+ run = thread.runs.cancel!(id: 'run_...')
436
+ ```
@@ -0,0 +1,185 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI assistants implementation.
6
+ class Assistant
7
+ HEADERS = { 'OpenAI-Beta': 'assistants=v2' }.freeze
8
+
9
+ # @!attribute [rw] id
10
+ # @return [String, nil]
11
+ attr_accessor :id
12
+
13
+ # @!attribute [rw] name
14
+ # @return [String, nil]
15
+ attr_accessor :name
16
+
17
+ # @!attribute [rw] model
18
+ # @return [String, nil]
19
+ attr_accessor :model
20
+
21
+ # @!attribute [rw] description
22
+ # @return [String, nil]
23
+ attr_accessor :description
24
+
25
+ # @!attribute [rw] instructions
26
+ # @return [String, nil]
27
+ attr_accessor :instructions
28
+
29
+ # @!attribute [rw] metadata
30
+ # @return [Hash]
31
+ attr_accessor :metadata
32
+
33
+ # @!attribute [rw] deleted
34
+ # @return [Boolean, nil]
35
+ attr_accessor :deleted
36
+
37
+ # @!attribute [r] tools
38
+ # @return [Array<Hash>, nil]
39
+ attr_accessor :tools
40
+
41
+ # @param client [OmniAI::OpenAI::Client] optional
42
+ # @param id [String]
43
+ # @param name [String]
44
+ # @param model [String]
45
+ # @param description [String, nil] optional
46
+ # @param instructions [String,nil] optional
47
+ # @param metadata [Hash] optional
48
+ def initialize(
49
+ client: Client.new,
50
+ id: nil,
51
+ name: nil,
52
+ model: nil,
53
+ description: nil,
54
+ instructions: nil,
55
+ metadata: {},
56
+ tools: []
57
+ )
58
+ @client = client
59
+ @id = id
60
+ @name = name
61
+ @model = model
62
+ @description = description
63
+ @instructions = instructions
64
+ @metadata = metadata
65
+ @tools = tools
66
+ end
67
+
68
+ # @return [String]
69
+ def inspect
70
+ "#<#{self.class.name} id=#{@id.inspect} name=#{@name.inspect} model=#{@model.inspect}>"
71
+ end
72
+
73
+ # @param id [String] required
74
+ # @param client [OmniAI::OpenAI::Client] optional
75
+ # @return [OmniAI::OpenAI::Assistant]
76
+ def self.find(id:, client: Client.new)
77
+ response = client.connection
78
+ .accept(:json)
79
+ .headers(HEADERS)
80
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
81
+
82
+ raise HTTPError, response.flush unless response.status.ok?
83
+
84
+ parse(data: response.parse)
85
+ end
86
+
87
+ # @param limit [Integer] optional
88
+ # @param client [OmniAI::OpenAI::Client] optional
89
+ # @return [Array<OmniAI::OpenAI::Assistant>]
90
+ def self.all(limit: nil, client: Client.new)
91
+ response = client.connection
92
+ .accept(:json)
93
+ .headers(HEADERS)
94
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants", params: { limit: }.compact)
95
+
96
+ raise HTTPError, response.flush unless response.status.ok?
97
+
98
+ response.parse['data'].map { |data| parse(data:, client:) }
99
+ end
100
+
101
+ # @param id [String] required
102
+ # @param client [OmniAI::OpenAI::Client] optional
103
+ # @return [void]
104
+ def self.destroy!(id:, client: Client.new)
105
+ response = client.connection
106
+ .accept(:json)
107
+ .headers(HEADERS)
108
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
109
+
110
+ raise HTTPError, response.flush unless response.status.ok?
111
+
112
+ response.parse
113
+ end
114
+
115
+ # @raise [HTTPError]
116
+ # @return [OmniAI::OpenAI::Assistant]
117
+ def save!
118
+ response = @client.connection
119
+ .accept(:json)
120
+ .headers(HEADERS)
121
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/assistants#{"/#{@id}" if @id}", json: payload)
122
+ raise HTTPError, response.flush unless response.status.ok?
123
+
124
+ parse(data: response.parse)
125
+ self
126
+ end
127
+
128
+ # @raise [OmniAI::Error]
129
+ # @return [OmniAI::OpenAI::Assistant]
130
+ def destroy!
131
+ raise OmniAI::Error, 'cannot destroy a non-persisted assistant' unless @id
132
+
133
+ data = self.class.destroy!(id: @id, client: @client)
134
+ @deleted = data['deleted']
135
+ self
136
+ end
137
+
138
+ private
139
+
140
+ class << self
141
+ private
142
+
143
+ # @param data [Hash] required
144
+ # @param client [OmniAI::OpenAI::Client] required
145
+ # @return [OmniAI::OpenAI::Assistant]
146
+ def parse(data:, client: Client.new)
147
+ new(
148
+ client:,
149
+ id: data['id'],
150
+ name: data['name'],
151
+ model: data['model'],
152
+ description: data['description'],
153
+ instructions: data['instructions'],
154
+ metadata: data['metadata'],
155
+ tools: data['tools']
156
+ )
157
+ end
158
+ end
159
+
160
+ # @param data [Hash] required
161
+ # @return [OmniAI::OpenAI::Assistant]
162
+ def parse(data:)
163
+ @id = data['id']
164
+ @name = data['name']
165
+ @model = data['model']
166
+ @description = data['description']
167
+ @instructions = data['instructions']
168
+ @metadata = data['metadata']
169
+ @tools = data['tools']
170
+ end
171
+
172
+ # @return [Hash]
173
+ def payload
174
+ {
175
+ name: @name,
176
+ model: @model,
177
+ description: @description,
178
+ instructions: @instructions,
179
+ metadata: @metadata,
180
+ tools: @tools,
181
+ }.compact
182
+ end
183
+ end
184
+ end
185
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI scope for establishing assistants.
6
+ class Assistants
7
+ # @param client [OmniAI::OpenAI::Client] required
8
+ def initialize(client:)
9
+ @client = client
10
+ end
11
+
12
+ # @param id [String] required
13
+ def find(id:)
14
+ Assistant.find(id:, client: @client)
15
+ end
16
+
17
+ # @param limit [Integer] optional
18
+ def all(limit: nil)
19
+ Assistant.all(limit:, client: @client)
20
+ end
21
+
22
+ # @param id [String] required
23
+ def destroy!(id:)
24
+ Assistant.destroy!(id:, client: @client)
25
+ end
26
+
27
+ # @param name [String]
28
+ # @param model [String]
29
+ # @param description [String, nil] optional
30
+ # @param instructions [String,nil] optional
31
+ # @param metadata [Hash] optional
32
+ # @param tools [Array<Hash>] optional
33
+ def build(name: nil, description: nil, instructions: nil, model: Chat::Model, metadata: {}, tools: [])
34
+ Assistant.new(name:, model:, description:, instructions:, metadata:, tools:, client: @client)
35
+ end
36
+ end
37
+ end
38
+ end
@@ -13,6 +13,8 @@ module OmniAI
13
13
  GPT_3_5_TURBO = 'gpt-3.5-turbo'
14
14
  end
15
15
 
16
+ DEFAULT_MODEL = Model::GPT_4O
17
+
16
18
  protected
17
19
 
18
20
  # @return [Hash]
@@ -69,7 +69,7 @@ module OmniAI
69
69
  # @param stream [Proc, nil] optional
70
70
  #
71
71
  # @return [OmniAI::Chat::Completion]
72
- def chat(messages, model: Chat::Model::GPT_4O, temperature: nil, format: nil, stream: nil)
72
+ def chat(messages, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil)
73
73
  Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
74
74
  end
75
75
 
@@ -107,6 +107,21 @@ module OmniAI
107
107
  def speak(input, model: Speak::Model::TTS_1_HD, voice: Speak::Voice::ALLOY, speed: nil, format: nil, &)
108
108
  Speak.process!(input, model:, voice:, speed:, format:, client: self, &)
109
109
  end
110
+
111
+ # @return [OmniAI::OpenAI::Files]
112
+ def files
113
+ Files.new(client: self)
114
+ end
115
+
116
+ # @return [OmniAI::OpenAI::Assistants]
117
+ def assistants
118
+ Assistants.new(client: self)
119
+ end
120
+
121
+ # @return [OmniAI::OpenAI::Threads]
122
+ def threads
123
+ Threads.new(client: self)
124
+ end
110
125
  end
111
126
  end
112
127
  end
@@ -0,0 +1,170 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI file implementation.
6
+ class File
7
+ # @!attribute [rw] id
8
+ # @return [String, nil]
9
+ attr_accessor :id
10
+
11
+ # @!attribute [rw] bytes
12
+ # @return [Integer, nil]
13
+ attr_accessor :bytes
14
+
15
+ # @!attribute [rw] filename
16
+ # @return [String, nil]
17
+ attr_accessor :filename
18
+
19
+ # @!attribute [rw] purpose
20
+ # @return [String, nil]
21
+ attr_accessor :purpose
22
+
23
+ # @!attribute [rw] deleted
24
+ # @return [Boolean, nil]
25
+ attr_accessor :deleted
26
+
27
+ module Purpose
28
+ ASSISTANTS = 'assistants'
29
+ end
30
+
31
+ # @param client [OmniAI::OpenAI::Client] optional
32
+ # @param io [IO] optional
33
+ # @param id [String] optional
34
+ # @param bytes [Integer] optional
35
+ # @param filename [String] optional
36
+ # @param purpose [String] optional
37
+ def initialize(
38
+ client: Client.new,
39
+ io: nil,
40
+ id: nil,
41
+ bytes: nil,
42
+ filename: nil,
43
+ purpose: Purpose::ASSISTANTS
44
+ )
45
+ @client = client
46
+ @io = io
47
+ @id = id
48
+ @bytes = bytes
49
+ @filename = filename
50
+ @purpose = purpose
51
+ end
52
+
53
+ # @return [String]
54
+ def inspect
55
+ "#<#{self.class.name} id=#{@id.inspect} filename=#{@filename.inspect}>"
56
+ end
57
+
58
+ # @raise [OmniAI::Error]
59
+ # @yield [String]
60
+ def content(&)
61
+ raise OmniAI::Error, 'cannot fetch content without ID' unless @id
62
+
63
+ response = @client.connection
64
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/files/#{@id}/content")
65
+
66
+ raise HTTPError, response.flush unless response.status.ok?
67
+
68
+ response.body.each(&)
69
+ end
70
+
71
+ # @param id [String] required
72
+ # @param client [OmniAI::OpenAI::Client] optional
73
+ # @return [OmniAI::OpenAI::Assistant]
74
+ def self.find(id:, client: Client.new)
75
+ response = client.connection
76
+ .accept(:json)
77
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/files/#{id}")
78
+
79
+ raise HTTPError, response.flush unless response.status.ok?
80
+
81
+ parse(data: response.parse)
82
+ end
83
+
84
+ # @param client [OmniAI::OpenAI::Client] optional
85
+ # @return [Array<OmniAI::OpenAI::File>]
86
+ def self.all(client: Client.new)
87
+ response = client.connection
88
+ .accept(:json)
89
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/files")
90
+
91
+ raise HTTPError, response.flush unless response.status.ok?
92
+
93
+ response.parse['data'].map { |data| parse(data:, client:) }
94
+ end
95
+
96
+ # @param id [String] required
97
+ # @param client [OmniAI::OpenAI::Client] optional
98
+ # @return [Hash]
99
+ def self.destroy!(id:, client: Client.new)
100
+ response = client.connection
101
+ .accept(:json)
102
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/files/#{id}")
103
+
104
+ raise HTTPError, response.flush unless response.status.ok?
105
+
106
+ response.parse
107
+ end
108
+
109
+ # @raise [HTTPError]
110
+ # @return [OmniAI::OpenAI::Assistant]
111
+ def save!
112
+ raise OmniAI::Error, 'cannot save a file without IO' unless @io
113
+
114
+ response = @client.connection
115
+ .accept(:json)
116
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/files", form: payload)
117
+ raise HTTPError, response.flush unless response.status.ok?
118
+
119
+ parse(data: response.parse)
120
+ self
121
+ end
122
+
123
+ # @raise [OmniAI::Error]
124
+ # @return [OmniAI::OpenAI::Assistant]
125
+ def destroy!
126
+ raise OmniAI::Error, 'cannot destroy w/o ID' unless @id
127
+
128
+ data = self.class.destroy!(id: @id, client: @client)
129
+ @deleted = data['deleted']
130
+ self
131
+ end
132
+
133
+ private
134
+
135
+ # @return [Hash]
136
+ def payload
137
+ {
138
+ file: HTTP::FormData::File.new(@io),
139
+ purpose: @purpose,
140
+ }
141
+ end
142
+
143
+ class << self
144
+ private
145
+
146
+ # @param data [Hash] required
147
+ # @param client [OmniAI::OpenAI::Client] required
148
+ # @return [OmniAI::OpenAI::Assistant]
149
+ def parse(data:, client: Client.new)
150
+ new(
151
+ client:,
152
+ id: data['id'],
153
+ bytes: data['bytes'],
154
+ filename: data['filename'],
155
+ purpose: data['purpose']
156
+ )
157
+ end
158
+ end
159
+
160
+ # @param data [Hash] required
161
+ # @return [OmniAI::OpenAI::Assistant]
162
+ def parse(data:)
163
+ @id = data['id']
164
+ @bytes = data['bytes']
165
+ @filename = data['filename']
166
+ @purpose = data['purpose']
167
+ end
168
+ end
169
+ end
170
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI scope for establishing files.
6
+ class Files
7
+ # @param client [OmniAI::OpenAI::Client] required
8
+ def initialize(client:)
9
+ @client = client
10
+ end
11
+
12
+ # @raise [OmniAI::Error]
13
+ #
14
+ # @param id [String] required
15
+ #
16
+ # @return [OmniAI::OpenAI::File]
17
+ def find(id:)
18
+ File.find(id:, client: @client)
19
+ end
20
+
21
+ # @raise [OmniAI::Error]
22
+ #
23
+ # @return [Array<OmniAI::OpenAI::File>]
24
+ def all
25
+ File.all(client: @client)
26
+ end
27
+
28
+ # @raise [OmniAI::Error]
29
+ #
30
+ # @param id [String] required
31
+ def destroy!(id:)
32
+ File.destroy!(id:, client: @client)
33
+ end
34
+
35
+ # @param io [IO] optional
36
+ # @param purpose [String] optional
37
+ #
38
+ # @return [OmniAI::OpenAI::File]
39
+ def build(io: nil, purpose: File::Purpose::ASSISTANTS)
40
+ File.new(io:, purpose:, client: @client)
41
+ end
42
+ end
43
+ end
44
+ end