ruby-openai 4.1.0 → 4.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: '06199aaabd11e965f6d07f3948323b5226fdda82f09326c065cda4b5b2cc9237'
4
- data.tar.gz: 59cce00dfeb08270e11b2c33b23af4558dfc53796c87ebb6cf64b1cb76739790
3
+ metadata.gz: 666cf605fbc981b59131ef5005ec32fd0eabd604f924d907ae0ee0d707b739bb
4
+ data.tar.gz: fe938931bad97952bd8829ab1ec336896c12ed247c18bc62f3df116917919498
5
5
  SHA512:
6
- metadata.gz: 1d9b9409e915f49284f8acdf4be10cde81d7065fadb254dc135aeaad5572c2278c3bb6115752e8c43619d4ea60d696bc55a8128af8981ec544db2320c2d4b89a
7
- data.tar.gz: a202ca506f515e8933f00e0302dcd8a59b8d15f6b11c13263499ac140b9b045162089141f27cab5dda2dbde3d5b960c05c8081415a721b29cbdffcd3d375ddd0
6
+ metadata.gz: 2799064a7bbfc79dc47ea4168b0a7755e53aad89e3f3d55a79defba569592141aca70bdfd86747d1690b4ab72acf0d542844d6dd7f1fc8e235c6e3bf7f911854
7
+ data.tar.gz: d81441ee5b5a1b4070c438ae02f363b95f133d571971c9876bd1cc239225cbd193955c4ce23efbba0c45af6d2726d06bd6d27b418a2a682edcf9de5c1c701b7f
data/.gitignore CHANGED
@@ -1,16 +1,74 @@
1
- /.bundle/
2
- /.yardoc
3
- /_yardoc/
1
+ ### Ruby ###
2
+ *.gem
3
+ *.rbc
4
+ /.config
4
5
  /coverage/
5
- /doc/
6
+ /InstalledFiles
6
7
  /pkg/
7
8
  /spec/reports/
9
+ /spec/examples.txt
10
+ /test/tmp/
11
+ /test/version_tmp/
8
12
  /tmp/
13
+ /.bundle/
14
+ /.yardoc
15
+ /_yardoc/
16
+ /doc/
17
+
18
+
19
+ # Used by dotenv library to load environment variables.
20
+ .env
21
+
22
+ # Ignore Byebug command history file.
23
+ .byebug_history
24
+
25
+ ## Specific to RubyMotion:
26
+ .dat*
27
+ .repl_history
28
+ build/
29
+ *.bridgesupport
30
+ build-iPhoneOS/
31
+ build-iPhoneSimulator/
32
+
33
+ ## Specific to RubyMotion (use of CocoaPods):
34
+ #
35
+ # We recommend against adding the Pods directory to your .gitignore. However
36
+ # you should judge for yourself, the pros and cons are mentioned at:
37
+ # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
38
+ # vendor/Pods/
39
+
40
+ ## Documentation cache and generated files:
41
+ /.yardoc/
42
+ /_yardoc/
43
+ /doc/
44
+ /rdoc/
45
+
46
+ ## Environment normalization:
47
+ /.bundle/
48
+ /vendor/bundle
49
+ /lib/bundler/man/
50
+
51
+ # for a library or gem, you might want to ignore these files since the code is
52
+ # intended to run in multiple environments; otherwise, check them in:
53
+ # Gemfile.lock
54
+ # .ruby-version
55
+ # .ruby-gemset
56
+
57
+ # unless supporting rvm < 1.11.0 or doing something fancy, ignore this:
58
+ .rvmrc
59
+
60
+ # Used by RuboCop. Remote config files pulled in from inherit_from directive.
61
+ # .rubocop-https?--*
9
62
 
10
63
  # rspec failure tracking
11
64
  .rspec_status
12
65
 
13
- .byebug_history
14
- .env
66
+ # IDE
67
+ .idea
68
+ .idea/
69
+ .idea/*
70
+ .vscode
71
+ .vs/
15
72
 
16
- *.gem
73
+ # Mac
74
+ .DS_Store
data/CHANGELOG.md CHANGED
@@ -5,6 +5,18 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [4.3.0] - 2023-06-20
9
+
10
+ ### Added
11
+
12
+ - Add extra-headers to config to allow setting openai-caching-proxy-worker TTL, Helicone Auth and anything else ya need. Ty to [@deltaguita](https://github.com/deltaguita) and [@marckohlbrugge](https://github.com/marckohlbrugge) for the PR!
13
+
14
+ ## [4.2.0] - 2023-06-20
15
+
16
+ ### Added
17
+
18
+ - Add Azure OpenAI Service support. Thanks to [@rmachielse](https://github.com/rmachielse) and [@steffansluis](https://github.com/steffansluis) for the PR and to everyone who requested this feature!
19
+
8
20
  ## [4.1.0] - 2023-05-15
9
21
 
10
22
  ### Added
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ruby-openai (4.1.0)
4
+ ruby-openai (4.3.0)
5
5
  faraday (>= 1)
6
6
  faraday-multipart (>= 1)
7
7
 
@@ -16,7 +16,7 @@ GEM
16
16
  rexml
17
17
  diff-lcs (1.5.0)
18
18
  dotenv (2.8.1)
19
- faraday (2.7.4)
19
+ faraday (2.7.10)
20
20
  faraday-net_http (>= 2.0, < 3.1)
21
21
  ruby2_keywords (>= 0.0.4)
22
22
  faraday-multipart (1.0.4)
data/README.md CHANGED
@@ -70,13 +70,18 @@ client = OpenAI::Client.new
70
70
 
71
71
  #### Custom timeout or base URI
72
72
 
73
- The default timeout for any request using this library is 120 seconds. You can change that by passing a number of seconds to the `request_timeout` when initializing the client. You can also change the base URI used for all requests, eg. to use observability tools like [Helicone](https://docs.helicone.ai/quickstart/integrate-in-one-line-of-code):
73
+ The default timeout for any request using this library is 120 seconds. You can change that by passing a number of seconds to the `request_timeout` when initializing the client. You can also change the base URI used for all requests, eg. to use observability tools like [Helicone](https://docs.helicone.ai/quickstart/integrate-in-one-line-of-code), and add arbitrary other headers e.g. for [openai-caching-proxy-worker](https://github.com/6/openai-caching-proxy-worker):
74
74
 
75
75
  ```ruby
76
76
  client = OpenAI::Client.new(
77
77
  access_token: "access_token_goes_here",
78
78
  uri_base: "https://oai.hconeai.com/",
79
- request_timeout: 240
79
+ request_timeout: 240,
80
+ extra_headers: {
81
+ "X-Proxy-TTL" => "43200", # For https://github.com/6/openai-caching-proxy-worker#specifying-a-cache-ttl
82
+ "X-Proxy-Refresh": "true" # For https://github.com/6/openai-caching-proxy-worker#refreshing-the-cache
83
+ "Helicone-Auth": "Bearer HELICONE_API_KEY", # For https://docs.helicone.ai/getting-started/integration-method/openai-proxy
84
+ }
80
85
  )
81
86
  ```
82
87
 
@@ -88,9 +93,29 @@ OpenAI.configure do |config|
88
93
  config.organization_id = ENV.fetch("OPENAI_ORGANIZATION_ID") # Optional
89
94
  config.uri_base = "https://oai.hconeai.com/" # Optional
90
95
  config.request_timeout = 240 # Optional
96
+ config.extra_headers = {
97
+ "X-Proxy-TTL" => "43200", # For https://github.com/6/openai-caching-proxy-worker#specifying-a-cache-ttl
98
+ "X-Proxy-Refresh": "true" # For https://github.com/6/openai-caching-proxy-worker#refreshing-the-cache
99
+ "Helicone-Auth": "Bearer HELICONE_API_KEY", # For https://docs.helicone.ai/getting-started/integration-method/openai-proxy
100
+ } # Optional
91
101
  end
92
102
  ```
93
103
 
104
+ #### Azure
105
+
106
+ To use the [Azure OpenAI Service](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/) API, you can configure the gem like this:
107
+
108
+ ```ruby
109
+ OpenAI.configure do |config|
110
+ config.access_token = ENV.fetch("AZURE_OPENAI_API_KEY")
111
+ config.uri_base = ENV.fetch("AZURE_OPENAI_URI")
112
+ config.api_type = :azure
113
+ config.api_version = "2023-03-15-preview"
114
+ end
115
+ ```
116
+
117
+ where `AZURE_OPENAI_URI` is e.g. `https://custom-domain.openai.azure.com/openai/deployments/gpt-35-turbo`
118
+
94
119
  ### Models
95
120
 
96
121
  There are different models that can be used to generate text. For a full list and to retrieve information about a single model:
@@ -149,6 +174,68 @@ client.chat(
149
174
  # => "Anna is a young woman in her mid-twenties, with wavy chestnut hair that falls to her shoulders..."
150
175
  ```
151
176
 
177
+ Note: the API docs state that token usage is included in the streamed chat chunk objects, but this doesn't currently appear to be the case. If you need to work out how many tokens are being used while streaming, try [tiktoken_ruby](https://github.com/IAPark/tiktoken_ruby).
178
+
179
+ ### Functions
180
+
181
+ You can describe and pass in functions and the model will intelligently choose to output a JSON object containing arguments to call those them. For example, if you want the model to use your method `get_current_weather` to get the current weather in a given location:
182
+
183
+ ```ruby
184
+ def get_current_weather(location:, unit: "fahrenheit")
185
+ # use a weather api to fetch weather
186
+ end
187
+
188
+ response =
189
+ client.chat(
190
+ parameters: {
191
+ model: "gpt-3.5-turbo-0613",
192
+ messages: [
193
+ {
194
+ "role": "user",
195
+ "content": "What is the weather like in San Francisco?",
196
+ },
197
+ ],
198
+ functions: [
199
+ {
200
+ name: "get_current_weather",
201
+ description: "Get the current weather in a given location",
202
+ parameters: {
203
+ type: :object,
204
+ properties: {
205
+ location: {
206
+ type: :string,
207
+ description: "The city and state, e.g. San Francisco, CA",
208
+ },
209
+ unit: {
210
+ type: "string",
211
+ enum: %w[celsius fahrenheit],
212
+ },
213
+ },
214
+ required: ["location"],
215
+ },
216
+ },
217
+ ],
218
+ },
219
+ )
220
+
221
+ message = response.dig("choices", 0, "message")
222
+
223
+ if message["role"] == "assistant" && message["function_call"]
224
+ function_name = message.dig("function_call", "name")
225
+ args =
226
+ JSON.parse(
227
+ message.dig("function_call", "arguments"),
228
+ { symbolize_names: true },
229
+ )
230
+
231
+ case function_name
232
+ when "get_current_weather"
233
+ get_current_weather(**args)
234
+ end
235
+ end
236
+ # => "The weather is nice 🌞"
237
+ ```
238
+
152
239
  ### Completions
153
240
 
154
241
  Hit the OpenAI API for a completion using other GPT-3 models:
@@ -185,12 +272,15 @@ puts response.dig("choices", 0, "text")
185
272
  You can use the embeddings endpoint to get a vector of numbers representing an input. You can then compare these vectors for different inputs to efficiently check how similar the inputs are.
186
273
 
187
274
  ```ruby
188
- client.embeddings(
275
+ response = client.embeddings(
189
276
  parameters: {
190
- model: "babbage-similarity",
277
+ model: "text-embedding-ada-002",
191
278
  input: "The food was delicious and the waiter..."
192
279
  }
193
280
  )
281
+
282
+ puts response.dig("data", 0, "embedding")
283
+ # => Vector representation of your embedding
194
284
  ```
195
285
 
196
286
  ### Files
data/lib/openai/client.rb CHANGED
@@ -2,11 +2,13 @@ module OpenAI
2
2
  class Client
3
3
  extend OpenAI::HTTP
4
4
 
5
- def initialize(access_token: nil, organization_id: nil, uri_base: nil, request_timeout: nil)
5
+ def initialize(access_token: nil, organization_id: nil, uri_base: nil, request_timeout: nil,
6
+ extra_headers: {})
6
7
  OpenAI.configuration.access_token = access_token if access_token
7
8
  OpenAI.configuration.organization_id = organization_id if organization_id
8
9
  OpenAI.configuration.uri_base = uri_base if uri_base
9
10
  OpenAI.configuration.request_timeout = request_timeout if request_timeout
11
+ OpenAI.configuration.extra_headers = extra_headers
10
12
  end
11
13
 
12
14
  def chat(parameters: {})
data/lib/openai/http.rb CHANGED
@@ -70,14 +70,28 @@ module OpenAI
70
70
  end
71
71
 
72
72
  def uri(path:)
73
- OpenAI.configuration.uri_base + OpenAI.configuration.api_version + path
73
+ if OpenAI.configuration.api_type == :azure
74
+ base = File.join(OpenAI.configuration.uri_base, path)
75
+ "#{base}?api-version=#{OpenAI.configuration.api_version}"
76
+ else
77
+ File.join(OpenAI.configuration.uri_base, OpenAI.configuration.api_version, path)
78
+ end
74
79
  end
75
80
 
76
81
  def headers
82
+ return azure_headers if OpenAI.configuration.api_type == :azure
83
+
77
84
  {
78
85
  "Content-Type" => "application/json",
79
86
  "Authorization" => "Bearer #{OpenAI.configuration.access_token}",
80
87
  "OpenAI-Organization" => OpenAI.configuration.organization_id
88
+ }.merge(OpenAI.configuration.extra_headers)
89
+ end
90
+
91
+ def azure_headers
92
+ {
93
+ "Content-Type" => "application/json",
94
+ "api-key" => OpenAI.configuration.access_token
81
95
  }
82
96
  end
83
97
 
@@ -1,3 +1,3 @@
1
1
  module OpenAI
2
- VERSION = "4.1.0".freeze
2
+ VERSION = "4.3.0".freeze
3
3
  end
data/lib/openai.rb CHANGED
@@ -15,7 +15,8 @@ module OpenAI
15
15
 
16
16
  class Configuration
17
17
  attr_writer :access_token
18
- attr_accessor :api_version, :organization_id, :uri_base, :request_timeout
18
+ attr_accessor :api_type, :api_version, :organization_id, :uri_base, :request_timeout,
19
+ :extra_headers
19
20
 
20
21
  DEFAULT_API_VERSION = "v1".freeze
21
22
  DEFAULT_URI_BASE = "https://api.openai.com/".freeze
@@ -23,6 +24,7 @@ module OpenAI
23
24
 
24
25
  def initialize
25
26
  @access_token = nil
27
+ @api_type = nil
26
28
  @api_version = DEFAULT_API_VERSION
27
29
  @organization_id = nil
28
30
  @uri_base = DEFAULT_URI_BASE
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.1.0
4
+ version: 4.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alex
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-15 00:00:00.000000000 Z
11
+ date: 2023-08-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: faraday