omniai 1.1.2 → 1.1.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: facf65abfc03bc2fedc0572bbdebe5baca776da08b3eba9f4bde4b082bf037a7
4
- data.tar.gz: 265613b4ab126f34d68d079241d8f4d2eae96c3e5c4f083aa9ff2761003a5678
3
+ metadata.gz: 62558072fec9583fe2df59e34498e31b449fc551a7d262a21ad45c1aa8bef721
4
+ data.tar.gz: 2b410f4fdccea4fdae2d839c60f703cbf311c3ad9b43f4f87b862f719c5e2ca8
5
5
  SHA512:
6
- metadata.gz: a689d6591eabb97d47fea58a73c2b7fcc133c62aea51180bffe846976086b3819099708757cae85e20c5db3e38964b9dc7c0dc1cbc9fa8b076bc322a56dec4cb
7
- data.tar.gz: bc80184100a4ca2888fb8a0ee3b93ac8d9d301e875e541e692b9a08564eb85fa59d58fcd88e35c4d72142624621ef73174b276c8da95ae53bfcabd1fc1ae776d
6
+ metadata.gz: 5d975d4d7767392a20cab800bbcc17c988ae18adf0d6c31c816e99ab6b33cbeb9fac319d6e15bdbd425da3e00d2843c28004223f24c3471c1e2c60c55428fba4
7
+ data.tar.gz: e9967f2302492db51c6af31c101a908bf19392d5eefe87829a2d146431ea6d17b6d3516493a92beeae780b75cfb0e61a7214ddfd52817df5199f6d1e9b36642e
data/README.md CHANGED
@@ -1,6 +1,8 @@
1
1
  # OmniAI
2
2
 
3
- OmniAI is a flexible AI library that standardizes the APIs for multipe AI providers:
3
+ [![CircleCI](https://circleci.com/gh/ksylvest/omniai.svg?style=svg)](https://circleci.com/gh/ksylvest/omniai)
4
+
5
+ OmniAI is a flexible AI library that standardizes the APIs for multiple AI providers:
4
6
 
5
7
  - [OmniAI::Anthropic](https://github.com/ksylvest/omniai-anthropic)
6
8
  - [OmniAI::Google](https://github.com/ksylvest/omniai-google)
@@ -29,7 +31,7 @@ OmniAI implements APIs for a number of popular clients by default. A client can
29
31
  #### [OmniAI::Anthropic](https://github.com/ksylvest/omniai-anthropic)
30
32
 
31
33
  ```ruby
32
- require 'omniai-anthropic'
34
+ require 'omniai/anthropic'
33
35
 
34
36
  client = OmniAI::Anthropic::Client.new
35
37
  ```
@@ -37,7 +39,7 @@ client = OmniAI::Anthropic::Client.new
37
39
  #### [OmniAI::Google](https://github.com/ksylvest/omniai-google)
38
40
 
39
41
  ```ruby
40
- require 'omniai-google'
42
+ require 'omniai/google'
41
43
 
42
44
  client = OmniAI::Google::Client.new
43
45
  ```
@@ -45,7 +47,7 @@ client = OmniAI::Google::Client.new
45
47
  #### [OmniAI::Mistral](https://github.com/ksylvest/omniai-mistral)
46
48
 
47
49
  ```ruby
48
- require 'omniai-mistral'
50
+ require 'omniai/mistral'
49
51
 
50
52
  client = OmniAI::Mistral::Client.new
51
53
  ```
@@ -53,7 +55,7 @@ client = OmniAI::Mistral::Client.new
53
55
  #### [OmniAI::OpenAI](https://github.com/ksylvest/omniai-openai)
54
56
 
55
57
  ```ruby
56
- require 'omniai-openai'
58
+ require 'omniai/openai'
57
59
 
58
60
  client = OmniAI::OpenAI::Client.new
59
61
  ```
@@ -62,19 +64,19 @@ client = OmniAI::OpenAI::Client.new
62
64
 
63
65
  Clients that support chat (e.g. Anthropic w/ "Claude", Google w/ "Gemini", Mistral w/ "LeChat", OpenAI w/ "ChatGPT", etc) generate completions using the following calls:
64
66
 
65
- #### w/ a Simple Prompt
67
+ #### Completions using Single Message
66
68
 
67
69
  ```ruby
68
70
  completion = client.chat('Tell me a joke.')
69
71
  completion.choice.message.content # '...'
70
72
  ```
71
73
 
72
- #### w/ a Collection of Messages
74
+ #### Completions using Multiple Messages
73
75
 
74
76
  ```ruby
75
77
  messages = [
76
78
  {
77
- role: 'system',
79
+ role: OmniAI::Chat::Role::SYSTEM,
78
80
  content: 'You are a helpful assistant with an expertise in geography.',
79
81
  },
80
82
  'What is the capital of Canada?'
@@ -83,27 +85,7 @@ completion = client.chat(messages, model: '...', temperature: 0.7, format: :json
83
85
  completion.choice.message.content # '...'
84
86
  ```
85
87
 
86
- #### w/ a Collection of Files
87
-
88
- ```ruby
89
-
90
- image_a_url = "https://images.unsplash.com/photo-1517849845537-4d257902454a?w=800&h=800&format=jpeg&fit=crop"
91
- image_b_url = "https://images.unsplash.com/photo-1537151625747-768eb6cf92b2?q=80&w=1024&h=1024&format=jpeg"
92
-
93
- message = {
94
- role: 'user',
95
- content: [
96
- OmniAI::Chat::Content.new('What are in these images and are they different?'),
97
- OmniAI::Chat::Content.new(image_a_url, type: :image),
98
- OmniAI::Chat::Content.new(image_b_url, type: :image),
99
- ]
100
- }
101
-
102
- completion = client.chat(message)
103
- completion.choice.message.content # '...'
104
- ```
105
-
106
- #### Streaming
88
+ #### Completions using Real-Time Streaming
107
89
 
108
90
  ```ruby
109
91
  stream = proc do |chunk|
@@ -116,7 +98,17 @@ client.chat('Tell me a joke.', stream:)
116
98
 
117
99
  Clients that support chat (e.g. OpenAI w/ "Whisper", etc) convert recordings to text via the following calls:
118
100
 
101
+ #### Transcriptions with Path
102
+
103
+ ```ruby
104
+ transcription = client.transcribe("example.ogg")
105
+ transcription.text # '...'
106
+ ```
107
+
108
+ #### Transcriptions with Files
109
+
119
110
  ```ruby
120
- transcription = client.transcribe(file.path)
111
+ file = File.open("example.ogg", "rb")
112
+ transcription = client.transcribe(file)
121
113
  transcription.text # '...'
122
114
  ```
@@ -16,7 +16,9 @@ module OmniAI
16
16
  new(index:, delta:, message:)
17
17
  end
18
18
 
19
- # @param data [Hash]
19
+ # @param index [Integer]
20
+ # @param delta [OmniAI::Chat::Delta] optional
21
+ # @param message [OmniAI::Chat::Message] optional
20
22
  def initialize(index:, delta:, message:)
21
23
  @index = index
22
24
  @delta = delta
@@ -36,7 +36,7 @@ module OmniAI
36
36
  @choices ||= @data['choices'].map { |data| Choice.for(data:) }
37
37
  end
38
38
 
39
- # @param [index] [Integer]
39
+ # @param index [Integer]
40
40
  # @return [OmniAI::Chat::Delta]
41
41
  def choice(index: 0)
42
42
  choices[index]
@@ -43,7 +43,7 @@ module OmniAI
43
43
  @choices ||= @data['choices'].map { |data| Choice.for(data:) }
44
44
  end
45
45
 
46
- # @param [index] [Integer] optional - default is 0
46
+ # @param index [Integer] optional - default is 0
47
47
  # @return [OmniAI::Chat::Choice]
48
48
  def choice(index: 0)
49
49
  choices[index]
@@ -6,8 +6,7 @@ module OmniAI
6
6
  class Content
7
7
  attr_accessor :type, :value
8
8
 
9
- # @param url [String]
10
- # @param text [String]
9
+ # @param value [String]
11
10
  # @param type [Symbol] :image / :video / :audio / :text
12
11
  def initialize(value, type: :text)
13
12
  @value = value
@@ -5,7 +5,6 @@ module OmniAI
5
5
  # A stream given when streaming.
6
6
  class Stream
7
7
  # @param response [HTTP::Response]
8
- # @param block [Proc]
9
8
  def initialize(response:)
10
9
  @response = response
11
10
  @parser = EventStreamParser::Parser.new
data/lib/omniai/client.rb CHANGED
@@ -48,7 +48,7 @@ module OmniAI
48
48
 
49
49
  # @raise [OmniAI::Error]
50
50
  #
51
- # @param file [IO]
51
+ # @param io [String, Pathname, IO] required
52
52
  # @param model [String]
53
53
  # @param language [String, nil] optional
54
54
  # @param prompt [String, nil] optional
@@ -56,7 +56,7 @@ module OmniAI
56
56
  # @param format [Symbol] :text, :srt, :vtt, or :json (default)
57
57
  #
58
58
  # @return text [OmniAI::Transcribe::Transcription]
59
- def transcribe(file, model:, language: nil, prompt: nil, temperature: nil, format: nil)
59
+ def transcribe(io, model:, language: nil, prompt: nil, temperature: nil, format: nil)
60
60
  raise NotImplementedError, "#{self.class.name}#speak undefined"
61
61
  end
62
62
  end
@@ -96,15 +96,15 @@ module OmniAI
96
96
  new(...).process!
97
97
  end
98
98
 
99
- # @param path [String] required
99
+ # @param io [String, Pathname, IO] required
100
100
  # @param client [OmniAI::Client] the client
101
101
  # @param model [String] required
102
102
  # @param language [String, nil] optional
103
103
  # @param prompt [String, nil] optional
104
104
  # @param temperature [Float, nil] optional
105
105
  # @param format [String, nil] optional
106
- def initialize(path, client:, model:, language: nil, prompt: nil, temperature: nil, format: Format::JSON)
107
- @path = path
106
+ def initialize(io, client:, model:, language: nil, prompt: nil, temperature: nil, format: Format::JSON)
107
+ @io = io
108
108
  @model = model
109
109
  @language = language
110
110
  @prompt = prompt
@@ -128,7 +128,7 @@ module OmniAI
128
128
  # @return [Hash]
129
129
  def payload
130
130
  {
131
- file: HTTP::FormData::File.new(@path),
131
+ file: HTTP::FormData::File.new(@io),
132
132
  model: @model,
133
133
  language: @language,
134
134
  prompt: @prompt,
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OmniAI
4
- VERSION = '1.1.2'
4
+ VERSION = '1.1.4'
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.2
4
+ version: 1.1.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-06-17 00:00:00.000000000 Z
11
+ date: 2024-06-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: event_stream_parser
@@ -80,7 +80,8 @@ files:
80
80
  - lib/omniai/transcribe/transcription.rb
81
81
  - lib/omniai/version.rb
82
82
  homepage: https://github.com/ksylvest/omniai
83
- licenses: []
83
+ licenses:
84
+ - MIT
84
85
  metadata:
85
86
  homepage_uri: https://github.com/ksylvest/omniai
86
87
  changelog_uri: https://github.com/ksylvest/omniai/releases