omniai-openai 1.1.1 → 1.2.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8fa0f1acc820456282d48d42e9bc1154f8aa9b83fe9fa86817e1d3446a8cd31d
4
- data.tar.gz: cd4c84c579985a17c96edadcb91324b545035bf170fb20c3172855e13e83586c
3
+ metadata.gz: e3afb339ec2a0e69d5a268fd134aae4119e3140ccad493f22d7db43b817145a4
4
+ data.tar.gz: f49c3cfd25228e49555981cc93e05e6702d3ce4edde9fa07277bc68c83312361
5
5
  SHA512:
6
- metadata.gz: c4583c5da031c749e56b6ec2bbcf59daaebe8f873c5fe9a9cbdfdfb67acf8d8eabc798a754fe5e2784fd73495bb91181deb3e323d00e0e84d962ff86624192a5
7
- data.tar.gz: fcf7612d1f7361697820a3e14cf5558fe83fb096b1154a5c3a67c3c6c64823d230e1c1d029252ee91fa0e9cd26bbba398fc18612189171db891d47a830536266
6
+ metadata.gz: 4455db3523b81301793b936528312394cb0f98b8f51ad7a49f5f6fe5811c2969dcaf679649deb0cb90bfec5fdf5838f3ba6444eea645d560ceaaaf96b9c15f5f
7
+ data.tar.gz: 804f70fd6067cba057aaa7dbbe7d65f5edaa389e5315fb4a107f154fbafa1139d33ad637439e8b838036767c02e4e2cfed0d69c43c0a36cb805bb4dfe21eb907
data/README.md CHANGED
@@ -25,7 +25,7 @@ A client may also be passed the following options:
25
25
  - `api_key` (required - default is `ENV['OPENAI_API_KEY']`)
26
26
  - `organization` (optional)
27
27
  - `project` (optional)
28
- - `host` (optional)
28
+ - `host` (optional) useful for usage with Ollama or LocalAI
29
29
 
30
30
  ### Configuration
31
31
 
@@ -36,21 +36,41 @@ OmniAI::OpenAI.configure do |config|
36
36
  config.api_key = 'sk-...' # default: ENV['OPENAI_API_KEY']
37
37
  config.organization = '...' # default: ENV['OPENAI_ORGANIZATION']
38
38
  config.project = '...' # default: ENV['OPENAI_PROJECT']
39
- config.host = '...' # default: 'https://api.openai.com'
39
+ config.host = '...' # default: 'https://api.openai.com' - override for usage with LocalAI / Ollama
40
40
  end
41
41
  ```
42
42
 
43
+ #### Usage with [LocalAI](https://localai.io/)
44
+
45
+ LocalAI offers [built in compatability with the OpenAI specification](https://localai.io/). To initialize a client that points to a Ollama change the host accordingly:
46
+
47
+ ```ruby
48
+ client = OmniAI::OpenAI::Client.new(host: 'http://localhost:8080', api_key: nil)
49
+ ```
50
+
51
+ _For details on installation or running LocalAI see the [getting started tutorial](https://localai.io/basics/getting_started/)._
52
+
53
+ #### Usage with [Ollama](https://www.ollama.com/)
54
+
55
+ Ollama offers [built in compatability with the OpenAI specification](https://ollama.com/blog/openai-compatibility). To initialize a client that points to a Ollama change the host accordingly:
56
+
57
+ ```ruby
58
+ client = OmniAI::OpenAI::Client.new(host: 'http://localhost:11434', api_key: nil)
59
+ ```
60
+
61
+ _For details on installation or running Ollama checkout [the project README](https://github.com/ollama/ollama)._
62
+
43
63
  ### Chat
44
64
 
45
65
  A chat completion is generated by passing in prompts using any a variety of formats:
46
66
 
47
67
  ```ruby
48
- completion = client.chat.completion('Tell me a joke!')
68
+ completion = client.chat('Tell me a joke!')
49
69
  completion.choice.message.content # 'Why did the chicken cross the road? To get to the other side.'
50
70
  ```
51
71
 
52
72
  ```ruby
53
- completion = client.chat.completion({
73
+ completion = client.chat({
54
74
  role: OmniAI::Chat::Role::USER,
55
75
  content: 'Is it wise to jump off a bridge?'
56
76
  })
@@ -58,7 +78,7 @@ completion.choice.message.content # 'No.'
58
78
  ```
59
79
 
60
80
  ```ruby
61
- completion = client.chat.completion([
81
+ completion = client.chat([
62
82
  {
63
83
  role: OmniAI::Chat::Role::SYSTEM,
64
84
  content: 'You are a helpful assistant.'
@@ -73,7 +93,7 @@ completion.choice.message.content # 'The capital of Canada is Ottawa.'
73
93
  `model` takes an optional string (default is `gtp-4o`):
74
94
 
75
95
  ```ruby
76
- completion = client.chat.completion('How fast is a cheetah?', model: OmniAI::OpenAI::Chat::Model::GPT_3_5_TURBO)
96
+ completion = client.chat('How fast is a cheetah?', model: OmniAI::OpenAI::Chat::Model::GPT_3_5_TURBO)
77
97
  completion.choice.message.content # 'A cheetah can reach speeds over 100 km/h.'
78
98
  ```
79
99
 
@@ -84,7 +104,7 @@ completion.choice.message.content # 'A cheetah can reach speeds over 100 km/h.'
84
104
  `temperature` takes an optional float between `0.0` and `2.0` (defaults is `0.7`):
85
105
 
86
106
  ```ruby
87
- completion = client.chat.completion('Pick a number between 1 and 5', temperature: 2.0)
107
+ completion = client.chat('Pick a number between 1 and 5', temperature: 2.0)
88
108
  completion.choice.message.content # '3'
89
109
  ```
90
110
 
@@ -98,7 +118,7 @@ completion.choice.message.content # '3'
98
118
  stream = proc do |chunk|
99
119
  print(chunk.choice.delta.content) # 'Better', 'three', 'hours', ...
100
120
  end
101
- client.chat.completion('Be poetic.', stream:)
121
+ client.chat('Be poetic.', stream:)
102
122
  ```
103
123
 
104
124
  [OpenAI API Reference `stream`](https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream)
@@ -108,7 +128,7 @@ client.chat.completion('Be poetic.', stream:)
108
128
  `format` takes an optional symbol (`:json`) and that setes the `response_format` to `json_object`:
109
129
 
110
130
  ```ruby
111
- completion = client.chat.completion([
131
+ completion = client.chat([
112
132
  { role: OmniAI::Chat::Role::SYSTEM, content: OmniAI::Chat::JSON_PROMPT },
113
133
  { role: OmniAI::Chat::Role::USER, content: 'What is the name of the drummer for the Beatles?' }
114
134
  ], format: :json)
@@ -33,19 +33,24 @@ module OmniAI
33
33
  logger: OmniAI::OpenAI.config.logger,
34
34
  host: OmniAI::OpenAI.config.host
35
35
  )
36
- raise(ArgumentError, %(ENV['OPENAI_API_KEY'] must be defined or `api_key` must be passed)) if api_key.nil?
36
+ if api_key.nil? && host.eql?(Config::DEFAULT_HOST)
37
+ raise(
38
+ ArgumentError,
39
+ %(ENV['OPENAI_API_KEY'] must be defined or `api_key` must be passed when using #{Config::DEFAULT_HOST})
40
+ )
41
+ end
37
42
 
38
- super(api_key:, logger:)
43
+ super(api_key:, host:, logger:)
39
44
 
40
45
  @organization = organization
41
46
  @project = project
42
- @host = host
43
47
  end
44
48
 
45
49
  # @return [HTTP::Client]
46
50
  def connection
47
51
  @connection ||= begin
48
- http = HTTP.auth("Bearer #{api_key}").persistent(@host)
52
+ http = HTTP.persistent(@host)
53
+ http = http.auth("Bearer #{@api_key}") if @api_key
49
54
  http = http.headers('OpenAI-Organization': @organization) if @organization
50
55
  http = http.headers('OpenAI-Project': @project) if @project
51
56
  http
@@ -6,12 +6,14 @@ module OmniAI
6
6
  class Config < OmniAI::Config
7
7
  attr_accessor :organization, :project, :chat_options, :transcribe_options, :speak_options
8
8
 
9
+ DEFAULT_HOST = 'https://api.openai.com'
10
+
9
11
  def initialize
10
12
  super
11
13
  @api_key = ENV.fetch('OPENAI_API_KEY', nil)
12
14
  @organization = ENV.fetch('OPENAI_ORGANIZATION', nil)
13
15
  @project = ENV.fetch('OPENAI_PROJECT', nil)
14
- @host = ENV.fetch('OPENAI_HOST', 'https://api.openai.com')
16
+ @host = ENV.fetch('OPENAI_HOST', DEFAULT_HOST)
15
17
  @chat_options = {}
16
18
  @transcribe_options = {}
17
19
  @speak_options = {}
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module OpenAI
5
- VERSION = '1.1.1'
5
+ VERSION = '1.2.1'
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.1
4
+ version: 1.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
@@ -69,7 +69,8 @@ files:
69
69
  - lib/omniai/openai/transcribe.rb
70
70
  - lib/omniai/openai/version.rb
71
71
  homepage: https://github.com/ksylvest/omniai-openai
72
- licenses: []
72
+ licenses:
73
+ - MIT
73
74
  metadata:
74
75
  homepage_uri: https://github.com/ksylvest/omniai-openai
75
76
  changelog_uri: https://github.com/ksylvest/omniai-openai/releases