omniai-openai 1.2.1 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e3afb339ec2a0e69d5a268fd134aae4119e3140ccad493f22d7db43b817145a4
4
- data.tar.gz: f49c3cfd25228e49555981cc93e05e6702d3ce4edde9fa07277bc68c83312361
3
+ metadata.gz: 32b555cd7f80abb2cc11a12986a0a8df15e5c1d9b6d31113f5e00b47174f4ec6
4
+ data.tar.gz: 81046a31442f6f2b1a1eb5d37723deefbb82cf7c145907ae0bb72db738e09080
5
5
  SHA512:
6
- metadata.gz: 4455db3523b81301793b936528312394cb0f98b8f51ad7a49f5f6fe5811c2969dcaf679649deb0cb90bfec5fdf5838f3ba6444eea645d560ceaaaf96b9c15f5f
7
- data.tar.gz: 804f70fd6067cba057aaa7dbbe7d65f5edaa389e5315fb4a107f154fbafa1139d33ad637439e8b838036767c02e4e2cfed0d69c43c0a36cb805bb4dfe21eb907
6
+ metadata.gz: '0939f67a3bc54221fc1141b00ab8eb1c38323178da2c3c78057cd25a288df776397eb204c18f5425005a92f05556a67b28b902b6520716c359b7901a70d7e183'
7
+ data.tar.gz: 7611023c850321bec21b71dfb626b5c308905e55a1485d5c965d1e31f2e9ced8fa09929ada98eff681a933499e5735b88243c404cf7452c2fbf0aab3fd515606
data/README.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  [![CircleCI](https://circleci.com/gh/ksylvest/omniai-openai.svg?style=svg)](https://circleci.com/gh/ksylvest/omniai-openai)
4
4
 
5
- An OpenAI implementation of the [OmniAI](https://github.com/ksylvest/omniai) APIs.
5
+ An OpenAI implementation of the [OmniAI](https://github.com/ksylvest/omniai) interface supporting ChatGPT, Whisper, Text-to-Voice, Voice-to-Text, and more. This library is community maintained.
6
6
 
7
7
  ## Installation
8
8
 
@@ -251,3 +251,186 @@ client.speak('A pessemistic pest exists amidst us.', format: OmniAI::OpenAI::Spe
251
251
  ```
252
252
 
253
253
  [OpenAI API Reference `format`](https://platform.openai.com/docs/api-reference/audio/createSpeech#audio-createspeech-response_format)
254
+
255
+ ## Files
256
+
257
+ ### Finding an File
258
+
259
+ ```ruby
260
+ client.files.find(id: 'file_...')
261
+ ```
262
+
263
+ ### Listing all Files
264
+
265
+ ```ruby
266
+ client.files.all
267
+ ```
268
+
269
+ ### Uploading a File
270
+
271
+ ```ruby
272
+ file = client.files.build(io: File.open('...', 'wb'))
273
+ file.save!
274
+ ```
275
+
276
+ ### Downloading a File
277
+
278
+ ```ruby
279
+ file = client.files.find(id: 'file_...')
280
+ File.open('...', 'wb') do |file|
281
+ file.content do |chunk|
282
+ file << chunk
283
+ end
284
+ end
285
+ ```
286
+
287
+ ### Destroying a File
288
+
289
+ ```ruby
290
+ client.files.destroy!('file_...')
291
+ ```
292
+
293
+ ## Assistants
294
+
295
+ ### Finding an Assistant
296
+
297
+ ```ruby
298
+ client.assistants.find(id: 'asst_...')
299
+ ```
300
+
301
+ ### Listing all Assistants
302
+
303
+ ```ruby
304
+ client.assistants.all
305
+ ```
306
+
307
+ ### Creating an Assistant
308
+
309
+ ```ruby
310
+ assistant = client.assistants.build
311
+ assistant.name = 'Ringo'
312
+ assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
313
+ assistant.description = 'The drummer for the Beatles.'
314
+ assistant.save!
315
+ ```
316
+
317
+ ### Updating an Assistant
318
+
319
+ ```ruby
320
+ assistant = client.assistants.find(id: 'asst_...')
321
+ assistant.name = 'George'
322
+ assistant.model = OmniAI::OpenAI::Chat::Model::GPT_4
323
+ assistant.description = 'A guitarist for the Beatles.'
324
+ assistant.save!
325
+ ```
326
+
327
+ ### Destroying an Assistant
328
+
329
+ ```ruby
330
+ client.assistants.destroy!('asst_...')
331
+ ```
332
+
333
+ ## Threads
334
+
335
+ ### Finding a Thread
336
+
337
+ ```ruby
338
+ client.threads.find(id: 'thread_...')
339
+ ```
340
+
341
+ ### Creating a Thread
342
+
343
+ ```ruby
344
+ thread = client.threads.build
345
+ thread.metadata = { user: 'Ringo' }
346
+ thread.save!
347
+ ```
348
+
349
+ ### Updating a Thread
350
+
351
+ ```ruby
352
+ thread = client.threads.find(id: 'thread_...')
353
+ thread.metadata = { user: 'Ringo' }
354
+ thread.save!
355
+ ```
356
+
357
+ ### Destroying a Threads
358
+
359
+ ```ruby
360
+ client.threads.destroy!('thread_...')
361
+ ```
362
+
363
+ ### Messages
364
+
365
+ #### Finding a Message
366
+
367
+ ```ruby
368
+ thread = client.threads.find(id: 'thread_...')
369
+ message = thread.messages.find(id: 'msg_...')
370
+ message.save!
371
+ ```
372
+
373
+ #### Listing all Messages
374
+
375
+ ```ruby
376
+ thread = client.threads.find(id: 'thread_...')
377
+ thread.messages.all
378
+ ```
379
+
380
+ #### Creating a Message
381
+
382
+ ```ruby
383
+ thread = client.threads.find(id: 'thread_...')
384
+ message = thread.messages.build(role: 'user', content: 'Hello?')
385
+ message.save!
386
+ ```
387
+
388
+ #### Updating a Message
389
+
390
+ ```ruby
391
+ thread = client.threads.find(id: 'thread_...')
392
+ message = thread.messages.build(role: 'user', content: 'Hello?')
393
+ message.save!
394
+ ```
395
+
396
+ ### Runs
397
+
398
+ #### Finding a Run
399
+
400
+ ```ruby
401
+ thread = client.threads.find(id: 'thread_...')
402
+ run = thread.runs.find(id: 'run_...')
403
+ run.save!
404
+ ```
405
+
406
+ #### Listing all Runs
407
+
408
+ ```ruby
409
+ thread = client.threads.find(id: 'thread_...')
410
+ thread.runs.all
411
+ ```
412
+
413
+ #### Creating a Run
414
+
415
+ ```ruby
416
+ run = client.runs.find(id: 'thread_...')
417
+ run = thread.runs.build
418
+ run.metadata = { user: 'Ringo' }
419
+ run.save!
420
+ ```
421
+
422
+ #### Updating a Run
423
+
424
+ ```ruby
425
+ thread = client.threads.find(id: 'thread_...')
426
+ run = thread.messages.find(id: 'run_...')
427
+ run.metadata = { user: 'Ringo' }
428
+ run.save!
429
+ ```
430
+
431
+ #### Cancelling a Run
432
+
433
+ ```ruby
434
+ thread = client.threads.find(id: 'thread_...')
435
+ run = thread.runs.cancel!(id: 'run_...')
436
+ ```
@@ -0,0 +1,185 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI assistants implementation.
6
+ class Assistant
7
+ HEADERS = { 'OpenAI-Beta': 'assistants=v2' }.freeze
8
+
9
+ # @!attribute [rw] id
10
+ # @return [String, nil]
11
+ attr_accessor :id
12
+
13
+ # @!attribute [rw] name
14
+ # @return [String, nil]
15
+ attr_accessor :name
16
+
17
+ # @!attribute [rw] model
18
+ # @return [String, nil]
19
+ attr_accessor :model
20
+
21
+ # @!attribute [rw] description
22
+ # @return [String, nil]
23
+ attr_accessor :description
24
+
25
+ # @!attribute [rw] instructions
26
+ # @return [String, nil]
27
+ attr_accessor :instructions
28
+
29
+ # @!attribute [rw] metadata
30
+ # @return [Hash]
31
+ attr_accessor :metadata
32
+
33
+ # @!attribute [rw] deleted
34
+ # @return [Boolean, nil]
35
+ attr_accessor :deleted
36
+
37
+ # @!attribute [r] tools
38
+ # @return [Array<Hash>, nil]
39
+ attr_accessor :tools
40
+
41
+ # @param client [OmniAI::OpenAI::Client] optional
42
+ # @param id [String]
43
+ # @param name [String]
44
+ # @param model [String]
45
+ # @param description [String, nil] optional
46
+ # @param instructions [String,nil] optional
47
+ # @param metadata [Hash] optional
48
+ def initialize(
49
+ client: Client.new,
50
+ id: nil,
51
+ name: nil,
52
+ model: nil,
53
+ description: nil,
54
+ instructions: nil,
55
+ metadata: {},
56
+ tools: []
57
+ )
58
+ @client = client
59
+ @id = id
60
+ @name = name
61
+ @model = model
62
+ @description = description
63
+ @instructions = instructions
64
+ @metadata = metadata
65
+ @tools = tools
66
+ end
67
+
68
+ # @return [String]
69
+ def inspect
70
+ "#<#{self.class.name} id=#{@id.inspect} name=#{@name.inspect} model=#{@model.inspect}>"
71
+ end
72
+
73
+ # @param id [String] required
74
+ # @param client [OmniAI::OpenAI::Client] optional
75
+ # @return [OmniAI::OpenAI::Assistant]
76
+ def self.find(id:, client: Client.new)
77
+ response = client.connection
78
+ .accept(:json)
79
+ .headers(HEADERS)
80
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
81
+
82
+ raise HTTPError, response.flush unless response.status.ok?
83
+
84
+ parse(data: response.parse)
85
+ end
86
+
87
+ # @param limit [Integer] optional
88
+ # @param client [OmniAI::OpenAI::Client] optional
89
+ # @return [Array<OmniAI::OpenAI::Assistant>]
90
+ def self.all(limit: nil, client: Client.new)
91
+ response = client.connection
92
+ .accept(:json)
93
+ .headers(HEADERS)
94
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/assistants", params: { limit: }.compact)
95
+
96
+ raise HTTPError, response.flush unless response.status.ok?
97
+
98
+ response.parse['data'].map { |data| parse(data:, client:) }
99
+ end
100
+
101
+ # @param id [String] required
102
+ # @param client [OmniAI::OpenAI::Client] optional
103
+ # @return [void]
104
+ def self.destroy!(id:, client: Client.new)
105
+ response = client.connection
106
+ .accept(:json)
107
+ .headers(HEADERS)
108
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/assistants/#{id}")
109
+
110
+ raise HTTPError, response.flush unless response.status.ok?
111
+
112
+ response.parse
113
+ end
114
+
115
+ # @raise [HTTPError]
116
+ # @return [OmniAI::OpenAI::Assistant]
117
+ def save!
118
+ response = @client.connection
119
+ .accept(:json)
120
+ .headers(HEADERS)
121
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/assistants#{"/#{@id}" if @id}", json: payload)
122
+ raise HTTPError, response.flush unless response.status.ok?
123
+
124
+ parse(data: response.parse)
125
+ self
126
+ end
127
+
128
+ # @raise [OmniAI::Error]
129
+ # @return [OmniAI::OpenAI::Assistant]
130
+ def destroy!
131
+ raise OmniAI::Error, 'cannot destroy a non-persisted assistant' unless @id
132
+
133
+ data = self.class.destroy!(id: @id, client: @client)
134
+ @deleted = data['deleted']
135
+ self
136
+ end
137
+
138
+ private
139
+
140
+ class << self
141
+ private
142
+
143
+ # @param data [Hash] required
144
+ # @param client [OmniAI::OpenAI::Client] required
145
+ # @return [OmniAI::OpenAI::Assistant]
146
+ def parse(data:, client: Client.new)
147
+ new(
148
+ client:,
149
+ id: data['id'],
150
+ name: data['name'],
151
+ model: data['model'],
152
+ description: data['description'],
153
+ instructions: data['instructions'],
154
+ metadata: data['metadata'],
155
+ tools: data['tools']
156
+ )
157
+ end
158
+ end
159
+
160
+ # @param data [Hash] required
161
+ # @return [OmniAI::OpenAI::Assistant]
162
+ def parse(data:)
163
+ @id = data['id']
164
+ @name = data['name']
165
+ @model = data['model']
166
+ @description = data['description']
167
+ @instructions = data['instructions']
168
+ @metadata = data['metadata']
169
+ @tools = data['tools']
170
+ end
171
+
172
+ # @return [Hash]
173
+ def payload
174
+ {
175
+ name: @name,
176
+ model: @model,
177
+ description: @description,
178
+ instructions: @instructions,
179
+ metadata: @metadata,
180
+ tools: @tools,
181
+ }.compact
182
+ end
183
+ end
184
+ end
185
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI scope for establishing assistants.
6
+ class Assistants
7
+ # @param client [OmniAI::OpenAI::Client] required
8
+ def initialize(client:)
9
+ @client = client
10
+ end
11
+
12
+ # @param id [String] required
13
+ def find(id:)
14
+ Assistant.find(id:, client: @client)
15
+ end
16
+
17
+ # @param limit [Integer] optional
18
+ def all(limit: nil)
19
+ Assistant.all(limit:, client: @client)
20
+ end
21
+
22
+ # @param id [String] required
23
+ def destroy!(id:)
24
+ Assistant.destroy!(id:, client: @client)
25
+ end
26
+
27
+ # @param name [String]
28
+ # @param model [String]
29
+ # @param description [String, nil] optional
30
+ # @param instructions [String,nil] optional
31
+ # @param metadata [Hash] optional
32
+ # @param tools [Array<Hash>] optional
33
+ def build(name: nil, description: nil, instructions: nil, model: Chat::Model, metadata: {}, tools: [])
34
+ Assistant.new(name:, model:, description:, instructions:, metadata:, tools:, client: @client)
35
+ end
36
+ end
37
+ end
38
+ end
@@ -13,6 +13,8 @@ module OmniAI
13
13
  GPT_3_5_TURBO = 'gpt-3.5-turbo'
14
14
  end
15
15
 
16
+ DEFAULT_MODEL = Model::GPT_4O
17
+
16
18
  protected
17
19
 
18
20
  # @return [Hash]
@@ -22,16 +22,19 @@ module OmniAI
22
22
  class Client < OmniAI::Client
23
23
  VERSION = 'v1'
24
24
 
25
- # @param api_key [String] optional - defaults to `OmniAI::OpenAI.config.api_key`
26
- # @param project_id [String] optional - defaults to `OmniAI::OpenAI.config.project`
27
- # @param organization_id [String] optional - defaults to `OmniAI::OpenAI.config.organization`
28
- # @param logger [Logger] optional - defaults to `OmniAI::OpenAI.config.logger`
25
+ # @param api_key [String, nil] optional - defaults to `OmniAI::OpenAI.config.api_key`
26
+ # @param host [String] optional - defaults to `OmniAI::OpenAI.config.host`
27
+ # @param project [String, nil] optional - defaults to `OmniAI::OpenAI.config.project`
28
+ # @param organization [String, nil] optional - defaults to `OmniAI::OpenAI.config.organization`
29
+ # @param logger [Logger, nil] optional - defaults to `OmniAI::OpenAI.config.logger`
30
+ # @param timeout [Integer, nil] optional - defaults to `OmniAI::OpenAI.config.timeout`
29
31
  def initialize(
30
32
  api_key: OmniAI::OpenAI.config.api_key,
33
+ host: OmniAI::OpenAI.config.host,
31
34
  organization: OmniAI::OpenAI.config.organization,
32
35
  project: OmniAI::OpenAI.config.project,
33
36
  logger: OmniAI::OpenAI.config.logger,
34
- host: OmniAI::OpenAI.config.host
37
+ timeout: OmniAI::OpenAI.config.timeout
35
38
  )
36
39
  if api_key.nil? && host.eql?(Config::DEFAULT_HOST)
37
40
  raise(
@@ -40,7 +43,7 @@ module OmniAI
40
43
  )
41
44
  end
42
45
 
43
- super(api_key:, host:, logger:)
46
+ super(api_key:, host:, logger:, timeout:)
44
47
 
45
48
  @organization = organization
46
49
  @project = project
@@ -49,7 +52,7 @@ module OmniAI
49
52
  # @return [HTTP::Client]
50
53
  def connection
51
54
  @connection ||= begin
52
- http = HTTP.persistent(@host)
55
+ http = super
53
56
  http = http.auth("Bearer #{@api_key}") if @api_key
54
57
  http = http.headers('OpenAI-Organization': @organization) if @organization
55
58
  http = http.headers('OpenAI-Project': @project) if @project
@@ -66,7 +69,7 @@ module OmniAI
66
69
  # @param stream [Proc, nil] optional
67
70
  #
68
71
  # @return [OmniAI::Chat::Completion]
69
- def chat(messages, model: Chat::Model::GPT_4O, temperature: nil, format: nil, stream: nil)
72
+ def chat(messages, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil)
70
73
  Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
71
74
  end
72
75
 
@@ -104,6 +107,21 @@ module OmniAI
104
107
  def speak(input, model: Speak::Model::TTS_1_HD, voice: Speak::Voice::ALLOY, speed: nil, format: nil, &)
105
108
  Speak.process!(input, model:, voice:, speed:, format:, client: self, &)
106
109
  end
110
+
111
+ # @return [OmniAI::OpenAI::Files]
112
+ def files
113
+ Files.new(client: self)
114
+ end
115
+
116
+ # @return [OmniAI::OpenAI::Assistants]
117
+ def assistants
118
+ Assistants.new(client: self)
119
+ end
120
+
121
+ # @return [OmniAI::OpenAI::Threads]
122
+ def threads
123
+ Threads.new(client: self)
124
+ end
107
125
  end
108
126
  end
109
127
  end
@@ -2,21 +2,36 @@
2
2
 
3
3
  module OmniAI
4
4
  module OpenAI
5
- # Configuration for managing the OpenAI `api_key` / `organization` / `project` / `logger`.
5
+ # Configuration for OpenAI.
6
6
  class Config < OmniAI::Config
7
- attr_accessor :organization, :project, :chat_options, :transcribe_options, :speak_options
8
-
9
7
  DEFAULT_HOST = 'https://api.openai.com'
10
8
 
11
- def initialize
12
- super
13
- @api_key = ENV.fetch('OPENAI_API_KEY', nil)
14
- @organization = ENV.fetch('OPENAI_ORGANIZATION', nil)
15
- @project = ENV.fetch('OPENAI_PROJECT', nil)
16
- @host = ENV.fetch('OPENAI_HOST', DEFAULT_HOST)
17
- @chat_options = {}
18
- @transcribe_options = {}
19
- @speak_options = {}
9
+ # @!attribute [rw] organization
10
+ # @return [String, nil] passed as `OpenAI-Organization` if specified
11
+ attr_accessor :organization
12
+
13
+ # @!attribute [rw] project
14
+ # @return [String, nil] passed as `OpenAI-Organization` if specified
15
+ attr_accessor :project
16
+
17
+ # @param api_key [String, nil] optional - defaults to `ENV['OPENAI_API_KEY']`
18
+ # @param host [String, nil] optional - defaults to ENV['OPENAI_HOST'] w/ fallback to `DEFAULT_HOST`
19
+ # @param organization [String, nil] optional - defaults to `ENV['OPENAI_ORGANIZATION']`
20
+ # @param project [String, nil] optional - defaults to `ENV['OPENAI_PROJECT']`
21
+ # @param logger [Logger, nil] optional
22
+ # @param timeout [Integer, Hash, nil] optional
23
+ def initialize(
24
+ api_key: ENV.fetch('OPENAI_API_KEY', nil),
25
+ host: ENV.fetch('OPENAI_HOST', DEFAULT_HOST),
26
+ organization: ENV.fetch('OPENAI_ORGANIZATION', nil),
27
+ project: ENV.fetch('OPENAI_PROJECT', nil),
28
+ logger: nil,
29
+ timeout: nil
30
+ )
31
+ super(api_key:, host:, logger:, timeout:)
32
+
33
+ @organization = organization
34
+ @project = project
20
35
  end
21
36
  end
22
37
  end