omniai-openai 2.0.1 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d9aa6adb553c3bb4ee1c6463b1d5654d4e9deb01044ee3e5e3974cb9fc762e2c
4
- data.tar.gz: 3d2347a9ca7a198bc96b93afc670b1283fd9f871ff73de35ea5c8d0a6c78d56b
3
+ metadata.gz: 41ef95afc5946c5c220a540871805830c94f04f6b6e890782a30e44b56af9033
4
+ data.tar.gz: cd627890f6206f48e7187bae2dc713d875fd274b0aa1b1d3643b4cb89a63912e
5
5
  SHA512:
6
- metadata.gz: d671369f7115e0b63193aa47815bd4552c2a375b8719c7585f376cd01e79f5b8dcfc163d8e910e75426146c011a7e3d74d3ae1994a38b9120d2eda3ab866a9bb
7
- data.tar.gz: 0124e71067ecde28627f775ff4c27781d72a223a081163404a2fa0c67d1c890455c409baf7793a629f4158c9d85bd828eb971000ca4d431a31c9f01165eb2a81
6
+ metadata.gz: 50fe670e52459ea9fa0365d0d8ba1c586fe020c0fe36dc2e0de99114925e56e2aa6d6c7527d09d48482cbd391900d98301b3316b67e4ccb9a53773eb62abf34c
7
+ data.tar.gz: b3baa12324acea0b5b1165e721a25670baf77ffba23839903c68cf6c2d09a78063e273ddae6f10a0a1c19c2ca42c7d63964a47c9c1f78306e2bf0c5cdd509aeb
@@ -13,6 +13,7 @@ module OmniAI
13
13
  # completion.choice.message.content # '...'
14
14
  class Chat < OmniAI::Chat
15
15
  JSON_RESPONSE_FORMAT = { type: "json_object" }.freeze
16
+ DEFAULT_STREAM_OPTIONS = { include_usage: ENV.fetch("OMNIAI_STREAM_USAGE", "on").eql?("on") }.freeze
16
17
 
17
18
  module Model
18
19
  GPT_4O = "gpt-4o"
@@ -35,9 +36,10 @@ module OmniAI
35
36
  OmniAI::OpenAI.config.chat_options.merge({
36
37
  messages: @prompt.serialize,
37
38
  model: @model,
38
- stream: @stream.nil? ? nil : !@stream.nil?,
39
- temperature: @temperature,
40
39
  response_format: (JSON_RESPONSE_FORMAT if @format.eql?(:json)),
40
+ stream: stream? || nil,
41
+ stream_options: (DEFAULT_STREAM_OPTIONS if stream?),
42
+ temperature: @temperature,
41
43
  tools: (@tools.map(&:serialize) if @tools&.any?),
42
44
  }).compact
43
45
  end
@@ -104,7 +104,7 @@ module OmniAI
104
104
  # @param format [Symbol] :text, :srt, :vtt, or :json (default)
105
105
  #
106
106
  # @return [OmniAI::Transcribe]
107
- def transcribe(path, model: Transcribe::Model::WHISPER, language: nil, prompt: nil, temperature: nil, format: nil)
107
+ def transcribe(path, model: Transcribe::DEFAULT_MODEL, language: nil, prompt: nil, temperature: nil, format: nil)
108
108
  Transcribe.process!(path, model:, language:, prompt:, temperature:, format:, client: self)
109
109
  end
110
110
 
@@ -125,7 +125,7 @@ module OmniAI
125
125
  # @yield [output] optional
126
126
  #
127
127
  # @return [Tempfile``]
128
- def speak(input, model: Speak::Model::TTS_1_HD, voice: Speak::Voice::ALLOY, speed: nil, format: nil, &)
128
+ def speak(input, model: Speak::DEFAULT_MODEL, voice: Speak::DEFAULT_VOICE, speed: nil, format: nil, &)
129
129
  Speak.process!(input, model:, voice:, speed:, format:, client: self, &)
130
130
  end
131
131
 
@@ -7,17 +7,25 @@ module OmniAI
7
7
  module Model
8
8
  TTS_1 = "tts-1"
9
9
  TTS_1_HD = "tts-1-hd"
10
+ GPT_4O_MINI_TTS = "gpt-4o-mini-tts"
10
11
  end
11
12
 
12
13
  module Voice
13
14
  ALLOY = "alloy" # https://platform.openai.com/docs/guides/text-to-speech/alloy
15
+ ASH = "ash" # https://platform.openai.com/docs/guides/text-to-speech/ash
16
+ BALLARD = "ballard" # https://platform.openai.com/docs/guides/text-to-speech/ballard
17
+ CORAL = "coral" # https://platform.openai.com/docs/guides/text-to-speech/coral
14
18
  ECHO = "echo" # https://platform.openai.com/docs/guides/text-to-speech/echo
15
19
  FABLE = "fable" # https://platform.openai.com/docs/guides/text-to-speech/fable
16
20
  NOVA = "nova" # https://platform.openai.com/docs/guides/text-to-speech/nova
17
21
  ONYX = "onyx" # https://platform.openai.com/docs/guides/text-to-speech/onyx
22
+ SAGE = "sage" # https://platform.openai.com/docs/guides/text-to-speech/sage
18
23
  SHIMMER = "shimmer" # https://platform.openai.com/docs/guides/text-to-speech/shimmer
19
24
  end
20
25
 
26
+ DEFAULT_MODEL = Model::GPT_4O_MINI_TTS
27
+ DEFAULT_VOICE = Voice::ALLOY
28
+
21
29
  protected
22
30
 
23
31
  # @return [Hash]
@@ -6,9 +6,13 @@ module OmniAI
6
6
  class Transcribe < OmniAI::Transcribe
7
7
  module Model
8
8
  WHISPER_1 = "whisper-1"
9
+ GPT_4O_TRANSCRIBE = "gpt-4o-transcribe"
10
+ GPT_4O_MINI_TRANSCRIBE = "gpt-4-0-mini-transcribe"
9
11
  WHISPER = WHISPER_1
10
12
  end
11
13
 
14
+ DEFAULT_MODEL = Model::WHISPER
15
+
12
16
  protected
13
17
 
14
18
  # @return [Hash]
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module OpenAI
5
- VERSION = "2.0.1"
5
+ VERSION = "2.2.0"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.1
4
+ version: 2.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
8
8
  bindir: exe
9
9
  cert_chain: []
10
- date: 2025-03-10 00:00:00.000000000 Z
10
+ date: 2025-03-27 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: event_stream_parser
@@ -29,14 +29,14 @@ dependencies:
29
29
  requirements:
30
30
  - - "~>"
31
31
  - !ruby/object:Gem::Version
32
- version: '2.0'
32
+ version: '2.2'
33
33
  type: :runtime
34
34
  prerelease: false
35
35
  version_requirements: !ruby/object:Gem::Requirement
36
36
  requirements:
37
37
  - - "~>"
38
38
  - !ruby/object:Gem::Version
39
- version: '2.0'
39
+ version: '2.2'
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: zeitwerk
42
42
  requirement: !ruby/object:Gem::Requirement