omniai-openai 1.3.0 → 1.3.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8a95f906f1bf54382e8ace7ae6b1622ad22d2502a23c4804038b1ca66ff43086
4
- data.tar.gz: 6694a63f7f76e2d41220a18e2ea852a19e956a8258761b71e9a0ec747197c9a9
3
+ metadata.gz: 7cb12222b6c13b368f2f25d55c66ff57e98114830a32bdd54ee82947aaf38674
4
+ data.tar.gz: 7600584957b977be573321a25a995f720fde9e9112fdd3e1b8676ce3d75461e0
5
5
  SHA512:
6
- metadata.gz: c19cd98191bd1826f24af180bcf87e96670fc162c6555bf368e4eda9305cdf5fa465585c74764d55481181752f563a581b09e3b1583220df70d9b227861a51d8
7
- data.tar.gz: b6eb2fdeb33dcf4dda02f2b608e13ae3cc29c687b0c9eb31bdecaf72d07548b0201c68efe3e40cd754b368d2b2f84813e5eeb207878dbc6355fe4a67a8308f3e
6
+ metadata.gz: 75f8bdbfff09e789058e15bf962efcbbe0146b3e522e5efc701ca1303ef734b68f6d19094502c2e8fd89f5db213687b6c5001967e3dfd1b46eaab060b4c4e4c0
7
+ data.tar.gz: 5b206397432e32277bda4767bad41707699607cf93e9a396570cbe78ab668f42b5c34c75f99d26d93cccd042b9c08359a899a4f375fabae7fc6c55a587515550
data/README.md CHANGED
@@ -251,3 +251,204 @@ client.speak('A pessemistic pest exists amidst us.', format: OmniAI::OpenAI::Spe
251
251
  ```
252
252
 
253
253
  [OpenAI API Reference `format`](https://platform.openai.com/docs/api-reference/audio/createSpeech#audio-createspeech-response_format)
254
+
255
+ ## Files
256
+
257
+ ### Finding an File
258
+
259
+ ```ruby
260
+ client.files.find(id: 'file_...')
261
+ ```
262
+
263
+ ### Listing all Files
264
+
265
+ ```ruby
266
+ client.files.all
267
+ ```
268
+
269
+ ### Uploading a File
270
+
271
+ #### Using a File
272
+
273
+ ```ruby
274
+ file = client.files.build(io: File.open('demo.pdf', 'wb'))
275
+ file.save!
276
+ ```
277
+
278
+ #### Using a Path
279
+
280
+ ```ruby
281
+ file = client.files.build(io: 'demo.pdf'))
282
+ file.save!
283
+ ```
284
+
285
+ ### Downloading a File
286
+
287
+ ```ruby
288
+ file = client.files.find(id: 'file_...')
289
+ File.open('...', 'wb') do |file|
290
+ file.content do |chunk|
291
+ file << chunk
292
+ end
293
+ end
294
+ ```
295
+
296
+ ### Destroying a File
297
+
298
+ ```ruby
299
+ client.files.destroy!('file_...')
300
+ ```
301
+
302
+ ## Assistants
303
+
304
+ ### Finding an Assistant
305
+
306
+ ```ruby
307
+ client.assistants.find(id: 'asst_...')
308
+ ```
309
+
310
+ ### Listing all Assistants
311
+
312
+ ```ruby
313
+ client.assistants.all
314
+ ```
315
+
316
+ ### Creating an Assistant
317
+
318
+ ```ruby
319
+ assistant = client.assistants.build
320
+ assistant.name = 'Ringo'
321
+ assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
322
+ assistant.description = 'The drummer for the Beatles.'
323
+ assistant.save!
324
+ ```
325
+
326
+ ### Updating an Assistant
327
+
328
+ ```ruby
329
+ assistant = client.assistants.find(id: 'asst_...')
330
+ assistant.name = 'George'
331
+ assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
332
+ assistant.description = 'A guitarist for the Beatles.'
333
+ assistant.save!
334
+ ```
335
+
336
+ ### Destroying an Assistant
337
+
338
+ ```ruby
339
+ client.assistants.destroy!('asst_...')
340
+ ```
341
+
342
+ ## Threads
343
+
344
+ ### Finding a Thread
345
+
346
+ ```ruby
347
+ client.threads.find(id: 'thread_...')
348
+ ```
349
+
350
+ ### Creating a Thread
351
+
352
+ ```ruby
353
+ thread = client.threads.build
354
+ thread.metadata = { user: 'Ringo' }
355
+ thread.save!
356
+ ```
357
+
358
+ ### Updating a Thread
359
+
360
+ ```ruby
361
+ thread = client.threads.find(id: 'thread_...')
362
+ thread.metadata = { user: 'Ringo' }
363
+ thread.save!
364
+ ```
365
+
366
+ ### Destroying a Threads
367
+
368
+ ```ruby
369
+ client.threads.destroy!('thread_...')
370
+ ```
371
+
372
+ ### Messages
373
+
374
+ #### Finding a Message
375
+
376
+ ```ruby
377
+ thread = client.threads.find(id: 'thread_...')
378
+ message = thread.messages.find(id: 'msg_...')
379
+ message.save!
380
+ ```
381
+
382
+ #### Listing all Messages
383
+
384
+ ```ruby
385
+ thread = client.threads.find(id: 'thread_...')
386
+ thread.messages.all
387
+ ```
388
+
389
+ #### Creating a Message
390
+
391
+ ```ruby
392
+ thread = client.threads.find(id: 'thread_...')
393
+ message = thread.messages.build(role: 'user', content: 'Hello?')
394
+ message.save!
395
+ ```
396
+
397
+ #### Updating a Message
398
+
399
+ ```ruby
400
+ thread = client.threads.find(id: 'thread_...')
401
+ message = thread.messages.build(role: 'user', content: 'Hello?')
402
+ message.save!
403
+ ```
404
+
405
+ ### Runs
406
+
407
+ #### Finding a Run
408
+
409
+ ```ruby
410
+ thread = client.threads.find(id: 'thread_...')
411
+ run = thread.runs.find(id: 'run_...')
412
+ run.save!
413
+ ```
414
+
415
+ #### Listing all Runs
416
+
417
+ ```ruby
418
+ thread = client.threads.find(id: 'thread_...')
419
+ thread.runs.all
420
+ ```
421
+
422
+ #### Creating a Run
423
+
424
+ ```ruby
425
+ run = client.runs.find(id: 'thread_...')
426
+ run = thread.runs.build
427
+ run.metadata = { user: 'Ringo' }
428
+ run.save!
429
+ ```
430
+
431
+ #### Updating a Run
432
+
433
+ ```ruby
434
+ thread = client.threads.find(id: 'thread_...')
435
+ run = thread.messages.find(id: 'run_...')
436
+ run.metadata = { user: 'Ringo' }
437
+ run.save!
438
+ ```
439
+
440
+ #### Polling a Run
441
+
442
+ ```ruby
443
+ run.terminated? # false
444
+ run.poll!
445
+ run.terminated? # true
446
+ run.status # 'cancelled' / 'failed' / 'completed' / 'expired'
447
+ ```
448
+
449
+ #### Cancelling a Run
450
+
451
+ ```ruby
452
+ thread = client.threads.find(id: 'thread_...')
453
+ run = thread.runs.cancel!(id: 'run_...')
454
+ ```
@@ -0,0 +1,185 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI assistants implementation.
6
+ class Assistant
7
+ HEADERS = { 'OpenAI-Beta': 'assistants=v2' }.freeze
8
+
9
+ # @!attribute [rw] id
10
+ # @return [String, nil]
11
+ attr_accessor :id
12
+
13
+ # @!attribute [rw] name
14
+ # @return [String, nil]
15
+ attr_accessor :name
16
+
17
+ # @!attribute [rw] model
18
+ # @return [String, nil]
19
+ attr_accessor :model
20
+
21
+ # @!attribute [rw] description
22
+ # @return [String, nil]
23
+ attr_accessor :description
24
+
25
+ # @!attribute [rw] instructions
26
+ # @return [String, nil]
27
+ attr_accessor :instructions
28
+
29
+ # @!attribute [rw] metadata
30
+ # @return [Hash]
31
+ attr_accessor :metadata
32
+
33
+ # @!attribute [rw] deleted
34
+ # @return [Boolean, nil]
35
+ attr_accessor :deleted
36
+
37
+ # @!attribute [r] tools
38
+ # @return [Array<Hash>, nil]
39
+ attr_accessor :tools
40
+
41
+ # @param client [OmniAI::OpenAI::Client] optional
42
+ # @param id [String]
43
+ # @param name [String]
44
+ # @param model [String]
45
+ # @param description [String, nil] optional
46
+ # @param instructions [String,nil] optional
47
+ # @param metadata [Hash] optional
48
+ def initialize(
49
+ client: Client.new,
50
+ id: nil,
51
+ name: nil,
52
+ model: OmniAI::Chat::DEFAULT_MODEL,
53
+ description: nil,
54
+ instructions: nil,
55
+ metadata: {},
56
+ tools: []
57
+ )
58
+ @client = client
59
+ @id = id
60
+ @name = name
61
+ @model = model
62
+ @description = description
63
+ @instructions = instructions
64
+ @metadata = metadata
65
+ @tools = tools
66
+ end
67
+
68
+ # @return [String]
69
+ def inspect
70
+ "#<#{self.class.name} id=#{@id.inspect} name=#{@name.inspect} model=#{@model.inspect}>"
71
+ end
72
+
73
+ # @param id [String] required
74
+ # @param client [OmniAI::OpenAI::Client] optional
75
+ # @return [OmniAI::OpenAI::Assistant]
76
+ def self.find(id:, client: Client.new)
77
+ response = client.connection
78
+ .accept(:json)
79
+ .headers(HEADERS)
80
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
81
+
82
+ raise HTTPError, response.flush unless response.status.ok?
83
+
84
+ parse(data: response.parse)
85
+ end
86
+
87
+ # @param limit [Integer] optional
88
+ # @param client [OmniAI::OpenAI::Client] optional
89
+ # @return [Array<OmniAI::OpenAI::Assistant>]
90
+ def self.all(limit: nil, client: Client.new)
91
+ response = client.connection
92
+ .accept(:json)
93
+ .headers(HEADERS)
94
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants", params: { limit: }.compact)
95
+
96
+ raise HTTPError, response.flush unless response.status.ok?
97
+
98
+ response.parse['data'].map { |data| parse(data:, client:) }
99
+ end
100
+
101
+ # @param id [String] required
102
+ # @param client [OmniAI::OpenAI::Client] optional
103
+ # @return [void]
104
+ def self.destroy!(id:, client: Client.new)
105
+ response = client.connection
106
+ .accept(:json)
107
+ .headers(HEADERS)
108
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
109
+
110
+ raise HTTPError, response.flush unless response.status.ok?
111
+
112
+ response.parse
113
+ end
114
+
115
+ # @raise [HTTPError]
116
+ # @return [OmniAI::OpenAI::Assistant]
117
+ def save!
118
+ response = @client.connection
119
+ .accept(:json)
120
+ .headers(HEADERS)
121
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/assistants#{"/#{@id}" if @id}", json: payload)
122
+ raise HTTPError, response.flush unless response.status.ok?
123
+
124
+ parse(data: response.parse)
125
+ self
126
+ end
127
+
128
+ # @raise [OmniAI::Error]
129
+ # @return [OmniAI::OpenAI::Assistant]
130
+ def destroy!
131
+ raise OmniAI::Error, 'cannot destroy a non-persisted assistant' unless @id
132
+
133
+ data = self.class.destroy!(id: @id, client: @client)
134
+ @deleted = data['deleted']
135
+ self
136
+ end
137
+
138
+ private
139
+
140
+ class << self
141
+ private
142
+
143
+ # @param data [Hash] required
144
+ # @param client [OmniAI::OpenAI::Client] required
145
+ # @return [OmniAI::OpenAI::Assistant]
146
+ def parse(data:, client: Client.new)
147
+ new(
148
+ client:,
149
+ id: data['id'],
150
+ name: data['name'],
151
+ model: data['model'],
152
+ description: data['description'],
153
+ instructions: data['instructions'],
154
+ metadata: data['metadata'],
155
+ tools: data['tools']
156
+ )
157
+ end
158
+ end
159
+
160
+ # @param data [Hash] required
161
+ # @return [OmniAI::OpenAI::Assistant]
162
+ def parse(data:)
163
+ @id = data['id']
164
+ @name = data['name']
165
+ @model = data['model']
166
+ @description = data['description']
167
+ @instructions = data['instructions']
168
+ @metadata = data['metadata']
169
+ @tools = data['tools']
170
+ end
171
+
172
+ # @return [Hash]
173
+ def payload
174
+ {
175
+ name: @name,
176
+ model: @model,
177
+ description: @description,
178
+ instructions: @instructions,
179
+ metadata: @metadata,
180
+ tools: @tools,
181
+ }.compact
182
+ end
183
+ end
184
+ end
185
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI scope for establishing assistants.
6
+ class Assistants
7
+ # @param client [OmniAI::OpenAI::Client] required
8
+ def initialize(client:)
9
+ @client = client
10
+ end
11
+
12
+ # @param id [String] required
13
+ def find(id:)
14
+ Assistant.find(id:, client: @client)
15
+ end
16
+
17
+ # @param limit [Integer] optional
18
+ def all(limit: nil)
19
+ Assistant.all(limit:, client: @client)
20
+ end
21
+
22
+ # @param id [String] required
23
+ def destroy!(id:)
24
+ Assistant.destroy!(id:, client: @client)
25
+ end
26
+
27
+ # @param name [String]
28
+ # @param model [String]
29
+ # @param description [String, nil] optional
30
+ # @param instructions [String,nil] optional
31
+ # @param metadata [Hash] optional
32
+ # @param tools [Array<Hash>] optional
33
+ def build(name: nil, description: nil, instructions: nil, model: Chat::Model, metadata: {}, tools: [])
34
+ Assistant.new(name:, model:, description:, instructions:, metadata:, tools:, client: @client)
35
+ end
36
+ end
37
+ end
38
+ end
@@ -13,6 +13,8 @@ module OmniAI
13
13
  GPT_3_5_TURBO = 'gpt-3.5-turbo'
14
14
  end
15
15
 
16
+ DEFAULT_MODEL = Model::GPT_4O
17
+
16
18
  protected
17
19
 
18
20
  # @return [Hash]
@@ -69,7 +69,7 @@ module OmniAI
69
69
  # @param stream [Proc, nil] optional
70
70
  #
71
71
  # @return [OmniAI::Chat::Completion]
72
- def chat(messages, model: Chat::Model::GPT_4O, temperature: nil, format: nil, stream: nil)
72
+ def chat(messages, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil)
73
73
  Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
74
74
  end
75
75
 
@@ -107,6 +107,21 @@ module OmniAI
107
107
  def speak(input, model: Speak::Model::TTS_1_HD, voice: Speak::Voice::ALLOY, speed: nil, format: nil, &)
108
108
  Speak.process!(input, model:, voice:, speed:, format:, client: self, &)
109
109
  end
110
+
111
+ # @return [OmniAI::OpenAI::Files]
112
+ def files
113
+ Files.new(client: self)
114
+ end
115
+
116
+ # @return [OmniAI::OpenAI::Assistants]
117
+ def assistants
118
+ Assistants.new(client: self)
119
+ end
120
+
121
+ # @return [OmniAI::OpenAI::Threads]
122
+ def threads
123
+ Threads.new(client: self)
124
+ end
110
125
  end
111
126
  end
112
127
  end
@@ -0,0 +1,170 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI file implementation.
6
+ class File
7
+ # @!attribute [rw] id
8
+ # @return [String, nil]
9
+ attr_accessor :id
10
+
11
+ # @!attribute [rw] bytes
12
+ # @return [Integer, nil]
13
+ attr_accessor :bytes
14
+
15
+ # @!attribute [rw] filename
16
+ # @return [String, nil]
17
+ attr_accessor :filename
18
+
19
+ # @!attribute [rw] purpose
20
+ # @return [String, nil]
21
+ attr_accessor :purpose
22
+
23
+ # @!attribute [rw] deleted
24
+ # @return [Boolean, nil]
25
+ attr_accessor :deleted
26
+
27
+ module Purpose
28
+ ASSISTANTS = 'assistants'
29
+ end
30
+
31
+ # @param client [OmniAI::OpenAI::Client] optional
32
+ # @param io [IO] optional
33
+ # @param id [String] optional
34
+ # @param bytes [Integer] optional
35
+ # @param filename [String] optional
36
+ # @param purpose [String] optional
37
+ def initialize(
38
+ client: Client.new,
39
+ io: nil,
40
+ id: nil,
41
+ bytes: nil,
42
+ filename: nil,
43
+ purpose: Purpose::ASSISTANTS
44
+ )
45
+ @client = client
46
+ @io = io
47
+ @id = id
48
+ @bytes = bytes
49
+ @filename = filename
50
+ @purpose = purpose
51
+ end
52
+
53
+ # @return [String]
54
+ def inspect
55
+ "#<#{self.class.name} id=#{@id.inspect} filename=#{@filename.inspect}>"
56
+ end
57
+
58
+ # @raise [OmniAI::Error]
59
+ # @yield [String]
60
+ def content(&)
61
+ raise OmniAI::Error, 'cannot fetch content without ID' unless @id
62
+
63
+ response = @client.connection
64
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/files/#{@id}/content")
65
+
66
+ raise HTTPError, response.flush unless response.status.ok?
67
+
68
+ response.body.each(&)
69
+ end
70
+
71
+ # @param id [String] required
72
+ # @param client [OmniAI::OpenAI::Client] optional
73
+ # @return [OmniAI::OpenAI::Assistant]
74
+ def self.find(id:, client: Client.new)
75
+ response = client.connection
76
+ .accept(:json)
77
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/files/#{id}")
78
+
79
+ raise HTTPError, response.flush unless response.status.ok?
80
+
81
+ parse(data: response.parse)
82
+ end
83
+
84
+ # @param client [OmniAI::OpenAI::Client] optional
85
+ # @return [Array<OmniAI::OpenAI::File>]
86
+ def self.all(client: Client.new)
87
+ response = client.connection
88
+ .accept(:json)
89
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/files")
90
+
91
+ raise HTTPError, response.flush unless response.status.ok?
92
+
93
+ response.parse['data'].map { |data| parse(data:, client:) }
94
+ end
95
+
96
+ # @param id [String] required
97
+ # @param client [OmniAI::OpenAI::Client] optional
98
+ # @return [Hash]
99
+ def self.destroy!(id:, client: Client.new)
100
+ response = client.connection
101
+ .accept(:json)
102
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/files/#{id}")
103
+
104
+ raise HTTPError, response.flush unless response.status.ok?
105
+
106
+ response.parse
107
+ end
108
+
109
+ # @raise [HTTPError]
110
+ # @return [OmniAI::OpenAI::Assistant]
111
+ def save!
112
+ raise OmniAI::Error, 'cannot save a file without IO' unless @io
113
+
114
+ response = @client.connection
115
+ .accept(:json)
116
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/files", form: payload)
117
+ raise HTTPError, response.flush unless response.status.ok?
118
+
119
+ parse(data: response.parse)
120
+ self
121
+ end
122
+
123
+ # @raise [OmniAI::Error]
124
+ # @return [OmniAI::OpenAI::Assistant]
125
+ def destroy!
126
+ raise OmniAI::Error, 'cannot destroy w/o ID' unless @id
127
+
128
+ data = self.class.destroy!(id: @id, client: @client)
129
+ @deleted = data['deleted']
130
+ self
131
+ end
132
+
133
+ private
134
+
135
+ # @return [Hash]
136
+ def payload
137
+ {
138
+ file: HTTP::FormData::File.new(@io),
139
+ purpose: @purpose,
140
+ }
141
+ end
142
+
143
+ class << self
144
+ private
145
+
146
+ # @param data [Hash] required
147
+ # @param client [OmniAI::OpenAI::Client] required
148
+ # @return [OmniAI::OpenAI::Assistant]
149
+ def parse(data:, client: Client.new)
150
+ new(
151
+ client:,
152
+ id: data['id'],
153
+ bytes: data['bytes'],
154
+ filename: data['filename'],
155
+ purpose: data['purpose']
156
+ )
157
+ end
158
+ end
159
+
160
+ # @param data [Hash] required
161
+ # @return [OmniAI::OpenAI::Assistant]
162
+ def parse(data:)
163
+ @id = data['id']
164
+ @bytes = data['bytes']
165
+ @filename = data['filename']
166
+ @purpose = data['purpose']
167
+ end
168
+ end
169
+ end
170
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI scope for establishing files.
6
+ class Files
7
+ # @param client [OmniAI::OpenAI::Client] required
8
+ def initialize(client:)
9
+ @client = client
10
+ end
11
+
12
+ # @raise [OmniAI::Error]
13
+ #
14
+ # @param id [String] required
15
+ #
16
+ # @return [OmniAI::OpenAI::File]
17
+ def find(id:)
18
+ File.find(id:, client: @client)
19
+ end
20
+
21
+ # @raise [OmniAI::Error]
22
+ #
23
+ # @return [Array<OmniAI::OpenAI::File>]
24
+ def all
25
+ File.all(client: @client)
26
+ end
27
+
28
+ # @raise [OmniAI::Error]
29
+ #
30
+ # @param id [String] required
31
+ def destroy!(id:)
32
+ File.destroy!(id:, client: @client)
33
+ end
34
+
35
+ # @param io [IO] optional
36
+ # @param purpose [String] optional
37
+ #
38
+ # @return [OmniAI::OpenAI::File]
39
+ def build(io: nil, purpose: File::Purpose::ASSISTANTS)
40
+ File.new(io:, purpose:, client: @client)
41
+ end
42
+ end
43
+ end
44
+ end