ruby-openai-transitory-v2 6.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.circleci/config.yml +45 -0
- data/.devcontainer/Dockerfile +16 -0
- data/.devcontainer/devcontainer.json +36 -0
- data/.devcontainer/docker-compose.yml +19 -0
- data/.github/FUNDING.yml +13 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +38 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.github/dependabot.yml +15 -0
- data/.gitignore +74 -0
- data/.rspec +3 -0
- data/.rubocop.yml +31 -0
- data/CHANGELOG.md +398 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/CONTRIBUTING.md +3 -0
- data/Gemfile +12 -0
- data/Gemfile.lock +88 -0
- data/LICENSE.txt +21 -0
- data/README.md +885 -0
- data/Rakefile +19 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/lib/openai/assistants.rb +27 -0
- data/lib/openai/audio.rb +19 -0
- data/lib/openai/client.rb +95 -0
- data/lib/openai/compatibility.rb +10 -0
- data/lib/openai/files.rb +42 -0
- data/lib/openai/finetunes.rb +27 -0
- data/lib/openai/http.rb +127 -0
- data/lib/openai/http_headers.rb +36 -0
- data/lib/openai/images.rb +27 -0
- data/lib/openai/messages.rb +23 -0
- data/lib/openai/models.rb +15 -0
- data/lib/openai/run_steps.rb +15 -0
- data/lib/openai/runs.rb +32 -0
- data/lib/openai/threads.rb +23 -0
- data/lib/openai/version.rb +3 -0
- data/lib/openai.rb +88 -0
- data/lib/ruby/openai.rb +2 -0
- data/pull_request_template.md +5 -0
- data/ruby-openai.gemspec +32 -0
- metadata +137 -0
data/Rakefile
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
require "bundler/gem_tasks"
|
2
|
+
require "rspec/core/rake_task"
|
3
|
+
require "rubocop/rake_task"
|
4
|
+
|
5
|
+
RSpec::Core::RakeTask.new(:spec)
|
6
|
+
|
7
|
+
task :default do
|
8
|
+
Rake::Task["test"].invoke
|
9
|
+
Rake::Task["lint"].invoke
|
10
|
+
end
|
11
|
+
|
12
|
+
task :test do
|
13
|
+
Rake::Task["spec"].invoke
|
14
|
+
end
|
15
|
+
|
16
|
+
task :lint do
|
17
|
+
RuboCop::RakeTask.new(:rubocop)
|
18
|
+
Rake::Task["rubocop"].invoke
|
19
|
+
end
|
data/bin/console
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require "bundler/setup"
|
4
|
+
require "openai"
|
5
|
+
|
6
|
+
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
+
# with your gem easier. You can also use a different console, if you like.
|
8
|
+
|
9
|
+
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
+
# require "pry"
|
11
|
+
# Pry.start
|
12
|
+
|
13
|
+
require "irb"
|
14
|
+
IRB.start(__FILE__)
|
data/bin/setup
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Assistants
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def list
|
8
|
+
@client.get(path: "/assistants")
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(id:)
|
12
|
+
@client.get(path: "/assistants/#{id}")
|
13
|
+
end
|
14
|
+
|
15
|
+
def create(parameters: {})
|
16
|
+
@client.json_post(path: "/assistants", parameters: parameters)
|
17
|
+
end
|
18
|
+
|
19
|
+
def modify(id:, parameters: {})
|
20
|
+
@client.json_post(path: "/assistants/#{id}", parameters: parameters)
|
21
|
+
end
|
22
|
+
|
23
|
+
def delete(id:)
|
24
|
+
@client.delete(path: "/assistants/#{id}")
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
data/lib/openai/audio.rb
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Audio
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
5
|
+
end
|
6
|
+
|
7
|
+
def transcribe(parameters: {})
|
8
|
+
@client.multipart_post(path: "/audio/transcriptions", parameters: parameters)
|
9
|
+
end
|
10
|
+
|
11
|
+
def translate(parameters: {})
|
12
|
+
@client.multipart_post(path: "/audio/translations", parameters: parameters)
|
13
|
+
end
|
14
|
+
|
15
|
+
def speech(parameters: {})
|
16
|
+
@client.json_post(path: "/audio/speech", parameters: parameters)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,95 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Client
|
3
|
+
include OpenAI::HTTP
|
4
|
+
|
5
|
+
CONFIG_KEYS = %i[
|
6
|
+
api_type
|
7
|
+
api_version
|
8
|
+
access_token
|
9
|
+
organization_id
|
10
|
+
uri_base
|
11
|
+
request_timeout
|
12
|
+
extra_headers
|
13
|
+
].freeze
|
14
|
+
attr_reader *CONFIG_KEYS, :faraday_middleware
|
15
|
+
|
16
|
+
def initialize(config = {}, &faraday_middleware)
|
17
|
+
CONFIG_KEYS.each do |key|
|
18
|
+
# Set instance variables like api_type & access_token. Fall back to global config
|
19
|
+
# if not present.
|
20
|
+
instance_variable_set("@#{key}", config[key] || OpenAI.configuration.send(key))
|
21
|
+
end
|
22
|
+
@faraday_middleware = faraday_middleware
|
23
|
+
end
|
24
|
+
|
25
|
+
def chat(parameters: {})
|
26
|
+
json_post(path: "/chat/completions", parameters: parameters)
|
27
|
+
end
|
28
|
+
|
29
|
+
def edits(parameters: {})
|
30
|
+
json_post(path: "/edits", parameters: parameters)
|
31
|
+
end
|
32
|
+
|
33
|
+
def embeddings(parameters: {})
|
34
|
+
json_post(path: "/embeddings", parameters: parameters)
|
35
|
+
end
|
36
|
+
|
37
|
+
def completions(parameters: {})
|
38
|
+
json_post(path: "/completions", parameters: parameters)
|
39
|
+
end
|
40
|
+
|
41
|
+
def audio
|
42
|
+
@audio ||= OpenAI::Audio.new(client: self)
|
43
|
+
end
|
44
|
+
|
45
|
+
def files
|
46
|
+
@files ||= OpenAI::Files.new(client: self)
|
47
|
+
end
|
48
|
+
|
49
|
+
def finetunes
|
50
|
+
@finetunes ||= OpenAI::Finetunes.new(client: self)
|
51
|
+
end
|
52
|
+
|
53
|
+
def images
|
54
|
+
@images ||= OpenAI::Images.new(client: self)
|
55
|
+
end
|
56
|
+
|
57
|
+
def models
|
58
|
+
@models ||= OpenAI::Models.new(client: self)
|
59
|
+
end
|
60
|
+
|
61
|
+
def assistants
|
62
|
+
@assistants ||= OpenAI::Assistants.new(client: self)
|
63
|
+
end
|
64
|
+
|
65
|
+
def threads
|
66
|
+
@threads ||= OpenAI::Threads.new(client: self)
|
67
|
+
end
|
68
|
+
|
69
|
+
def messages
|
70
|
+
@messages ||= OpenAI::Messages.new(client: self)
|
71
|
+
end
|
72
|
+
|
73
|
+
def runs
|
74
|
+
@runs ||= OpenAI::Runs.new(client: self)
|
75
|
+
end
|
76
|
+
|
77
|
+
def run_steps
|
78
|
+
@run_steps ||= OpenAI::RunSteps.new(client: self)
|
79
|
+
end
|
80
|
+
|
81
|
+
def moderations(parameters: {})
|
82
|
+
json_post(path: "/moderations", parameters: parameters)
|
83
|
+
end
|
84
|
+
|
85
|
+
def azure?
|
86
|
+
@api_type&.to_sym == :azure
|
87
|
+
end
|
88
|
+
|
89
|
+
def beta(apis)
|
90
|
+
dup.tap do |client|
|
91
|
+
client.add_headers("OpenAI-Beta": apis.map { |k, v| "#{k}=#{v}" }.join(";"))
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
data/lib/openai/files.rb
ADDED
@@ -0,0 +1,42 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Files
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
5
|
+
end
|
6
|
+
|
7
|
+
def list
|
8
|
+
@client.get(path: "/files")
|
9
|
+
end
|
10
|
+
|
11
|
+
def upload(parameters: {})
|
12
|
+
validate(file: parameters[:file]) if parameters[:file].include?(".jsonl")
|
13
|
+
|
14
|
+
@client.multipart_post(
|
15
|
+
path: "/files",
|
16
|
+
parameters: parameters.merge(file: File.open(parameters[:file]))
|
17
|
+
)
|
18
|
+
end
|
19
|
+
|
20
|
+
def retrieve(id:)
|
21
|
+
@client.get(path: "/files/#{id}")
|
22
|
+
end
|
23
|
+
|
24
|
+
def content(id:)
|
25
|
+
@client.get(path: "/files/#{id}/content")
|
26
|
+
end
|
27
|
+
|
28
|
+
def delete(id:)
|
29
|
+
@client.delete(path: "/files/#{id}")
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
def validate(file:)
|
35
|
+
File.open(file).each_line.with_index do |line, index|
|
36
|
+
JSON.parse(line)
|
37
|
+
rescue JSON::ParserError => e
|
38
|
+
raise JSON::ParserError, "#{e.message} - found on line #{index + 1} of #{file}"
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Finetunes
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
5
|
+
end
|
6
|
+
|
7
|
+
def list
|
8
|
+
@client.get(path: "/fine_tuning/jobs")
|
9
|
+
end
|
10
|
+
|
11
|
+
def create(parameters: {})
|
12
|
+
@client.json_post(path: "/fine_tuning/jobs", parameters: parameters)
|
13
|
+
end
|
14
|
+
|
15
|
+
def retrieve(id:)
|
16
|
+
@client.get(path: "/fine_tuning/jobs/#{id}")
|
17
|
+
end
|
18
|
+
|
19
|
+
def cancel(id:)
|
20
|
+
@client.json_post(path: "/fine_tuning/jobs/#{id}/cancel", parameters: {})
|
21
|
+
end
|
22
|
+
|
23
|
+
def list_events(id:)
|
24
|
+
@client.get(path: "/fine_tuning/jobs/#{id}/events")
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
data/lib/openai/http.rb
ADDED
@@ -0,0 +1,127 @@
|
|
1
|
+
require "event_stream_parser"
|
2
|
+
|
3
|
+
require_relative "http_headers"
|
4
|
+
|
5
|
+
module OpenAI
|
6
|
+
module HTTP
|
7
|
+
include HTTPHeaders
|
8
|
+
|
9
|
+
def get(path:)
|
10
|
+
parse_jsonl(conn.get(uri(path: path)) do |req|
|
11
|
+
req.headers = headers
|
12
|
+
end&.body)
|
13
|
+
end
|
14
|
+
|
15
|
+
def post(path:)
|
16
|
+
parse_jsonl(conn.post(uri(path: path)) do |req|
|
17
|
+
req.headers = headers
|
18
|
+
end&.body)
|
19
|
+
end
|
20
|
+
|
21
|
+
def json_post(path:, parameters:)
|
22
|
+
conn.post(uri(path: path)) do |req|
|
23
|
+
configure_json_post_request(req, parameters)
|
24
|
+
end&.body
|
25
|
+
end
|
26
|
+
|
27
|
+
def multipart_post(path:, parameters: nil)
|
28
|
+
conn(multipart: true).post(uri(path: path)) do |req|
|
29
|
+
req.headers = headers.merge({ "Content-Type" => "multipart/form-data" })
|
30
|
+
req.body = multipart_parameters(parameters)
|
31
|
+
end&.body
|
32
|
+
end
|
33
|
+
|
34
|
+
def delete(path:)
|
35
|
+
conn.delete(uri(path: path)) do |req|
|
36
|
+
req.headers = headers
|
37
|
+
end&.body
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def parse_jsonl(response)
|
43
|
+
return unless response
|
44
|
+
return response unless response.is_a?(String)
|
45
|
+
|
46
|
+
# Convert a multiline string of JSON objects to a JSON array.
|
47
|
+
response = response.gsub("}\n{", "},{").prepend("[").concat("]")
|
48
|
+
|
49
|
+
JSON.parse(response)
|
50
|
+
end
|
51
|
+
|
52
|
+
# Given a proc, returns an outer proc that can be used to iterate over a JSON stream of chunks.
|
53
|
+
# For each chunk, the inner user_proc is called giving it the JSON object. The JSON object could
|
54
|
+
# be a data object or an error object as described in the OpenAI API documentation.
|
55
|
+
#
|
56
|
+
# @param user_proc [Proc] The inner proc to call for each JSON object in the chunk.
|
57
|
+
# @return [Proc] An outer proc that iterates over a raw stream, converting it to JSON.
|
58
|
+
def to_json_stream(user_proc:)
|
59
|
+
parser = EventStreamParser::Parser.new
|
60
|
+
|
61
|
+
proc do |chunk, _bytes, env|
|
62
|
+
if env && env.status != 200
|
63
|
+
raise_error = Faraday::Response::RaiseError.new
|
64
|
+
raise_error.on_complete(env.merge(body: try_parse_json(chunk)))
|
65
|
+
end
|
66
|
+
|
67
|
+
parser.feed(chunk) do |_type, data|
|
68
|
+
user_proc.call(JSON.parse(data)) unless data == "[DONE]"
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def conn(multipart: false)
|
74
|
+
connection = Faraday.new do |f|
|
75
|
+
f.options[:timeout] = @request_timeout
|
76
|
+
f.request(:multipart) if multipart
|
77
|
+
f.use MiddlewareErrors
|
78
|
+
f.response :raise_error
|
79
|
+
f.response :json
|
80
|
+
end
|
81
|
+
|
82
|
+
@faraday_middleware&.call(connection)
|
83
|
+
|
84
|
+
connection
|
85
|
+
end
|
86
|
+
|
87
|
+
def uri(path:)
|
88
|
+
if azure?
|
89
|
+
base = File.join(@uri_base, path)
|
90
|
+
"#{base}?api-version=#{@api_version}"
|
91
|
+
else
|
92
|
+
File.join(@uri_base, @api_version, path)
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
def multipart_parameters(parameters)
|
97
|
+
parameters&.transform_values do |value|
|
98
|
+
next value unless value.respond_to?(:close) # File or IO object.
|
99
|
+
|
100
|
+
# Doesn't seem like OpenAI needs mime_type yet, so not worth
|
101
|
+
# the library to figure this out. Hence the empty string
|
102
|
+
# as the second argument.
|
103
|
+
Faraday::UploadIO.new(value, "", value.path)
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
def configure_json_post_request(req, parameters)
|
108
|
+
req_parameters = parameters.dup
|
109
|
+
|
110
|
+
if parameters[:stream].respond_to?(:call)
|
111
|
+
req.options.on_data = to_json_stream(user_proc: parameters[:stream])
|
112
|
+
req_parameters[:stream] = true # Necessary to tell OpenAI to stream.
|
113
|
+
elsif parameters[:stream]
|
114
|
+
raise ArgumentError, "The stream parameter must be a Proc or have a #call method"
|
115
|
+
end
|
116
|
+
|
117
|
+
req.headers = headers
|
118
|
+
req.body = req_parameters.to_json
|
119
|
+
end
|
120
|
+
|
121
|
+
def try_parse_json(maybe_json)
|
122
|
+
JSON.parse(maybe_json)
|
123
|
+
rescue JSON::ParserError
|
124
|
+
maybe_json
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module HTTPHeaders
|
3
|
+
def add_headers(headers)
|
4
|
+
@extra_headers = extra_headers.merge(headers.transform_keys(&:to_s))
|
5
|
+
end
|
6
|
+
|
7
|
+
private
|
8
|
+
|
9
|
+
def headers
|
10
|
+
if azure?
|
11
|
+
azure_headers
|
12
|
+
else
|
13
|
+
openai_headers
|
14
|
+
end.merge(extra_headers)
|
15
|
+
end
|
16
|
+
|
17
|
+
def openai_headers
|
18
|
+
{
|
19
|
+
"Content-Type" => "application/json",
|
20
|
+
"Authorization" => "Bearer #{@access_token}",
|
21
|
+
"OpenAI-Organization" => @organization_id
|
22
|
+
}
|
23
|
+
end
|
24
|
+
|
25
|
+
def azure_headers
|
26
|
+
{
|
27
|
+
"Content-Type" => "application/json",
|
28
|
+
"api-key" => @access_token
|
29
|
+
}
|
30
|
+
end
|
31
|
+
|
32
|
+
def extra_headers
|
33
|
+
@extra_headers ||= {}
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Images
|
3
|
+
def initialize(client: nil)
|
4
|
+
@client = client
|
5
|
+
end
|
6
|
+
|
7
|
+
def generate(parameters: {})
|
8
|
+
@client.json_post(path: "/images/generations", parameters: parameters)
|
9
|
+
end
|
10
|
+
|
11
|
+
def edit(parameters: {})
|
12
|
+
@client.multipart_post(path: "/images/edits", parameters: open_files(parameters))
|
13
|
+
end
|
14
|
+
|
15
|
+
def variations(parameters: {})
|
16
|
+
@client.multipart_post(path: "/images/variations", parameters: open_files(parameters))
|
17
|
+
end
|
18
|
+
|
19
|
+
private
|
20
|
+
|
21
|
+
def open_files(parameters)
|
22
|
+
parameters = parameters.merge(image: File.open(parameters[:image]))
|
23
|
+
parameters = parameters.merge(mask: File.open(parameters[:mask])) if parameters[:mask]
|
24
|
+
parameters
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Messages
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def list(thread_id:)
|
8
|
+
@client.get(path: "/threads/#{thread_id}/messages")
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(thread_id:, id:)
|
12
|
+
@client.get(path: "/threads/#{thread_id}/messages/#{id}")
|
13
|
+
end
|
14
|
+
|
15
|
+
def create(thread_id:, parameters: {})
|
16
|
+
@client.json_post(path: "/threads/#{thread_id}/messages", parameters: parameters)
|
17
|
+
end
|
18
|
+
|
19
|
+
def modify(id:, thread_id:, parameters: {})
|
20
|
+
@client.json_post(path: "/threads/#{thread_id}/messages/#{id}", parameters: parameters)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class RunSteps
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def list(thread_id:, run_id:)
|
8
|
+
@client.get(path: "/threads/#{thread_id}/runs/#{run_id}/steps")
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(thread_id:, run_id:, id:)
|
12
|
+
@client.get(path: "/threads/#{thread_id}/runs/#{run_id}/steps/#{id}")
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
data/lib/openai/runs.rb
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Runs
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def list(thread_id:)
|
8
|
+
@client.get(path: "/threads/#{thread_id}/runs")
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(thread_id:, id:)
|
12
|
+
@client.get(path: "/threads/#{thread_id}/runs/#{id}")
|
13
|
+
end
|
14
|
+
|
15
|
+
def create(thread_id:, parameters: {})
|
16
|
+
@client.json_post(path: "/threads/#{thread_id}/runs", parameters: parameters)
|
17
|
+
end
|
18
|
+
|
19
|
+
def modify(id:, thread_id:, parameters: {})
|
20
|
+
@client.json_post(path: "/threads/#{thread_id}/runs/#{id}", parameters: parameters)
|
21
|
+
end
|
22
|
+
|
23
|
+
def cancel(id:, thread_id:)
|
24
|
+
@client.post(path: "/threads/#{thread_id}/runs/#{id}/cancel")
|
25
|
+
end
|
26
|
+
|
27
|
+
def submit_tool_outputs(thread_id:, run_id:, parameters: {})
|
28
|
+
@client.json_post(path: "/threads/#{thread_id}/runs/#{run_id}/submit_tool_outputs",
|
29
|
+
parameters: parameters)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Threads
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def retrieve(id:)
|
8
|
+
@client.get(path: "/threads/#{id}")
|
9
|
+
end
|
10
|
+
|
11
|
+
def create(parameters: {})
|
12
|
+
@client.json_post(path: "/threads", parameters: parameters)
|
13
|
+
end
|
14
|
+
|
15
|
+
def modify(id:, parameters: {})
|
16
|
+
@client.json_post(path: "/threads/#{id}", parameters: parameters)
|
17
|
+
end
|
18
|
+
|
19
|
+
def delete(id:)
|
20
|
+
@client.delete(path: "/threads/#{id}")
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
data/lib/openai.rb
ADDED
@@ -0,0 +1,88 @@
|
|
1
|
+
require "faraday"
|
2
|
+
require "faraday/multipart"
|
3
|
+
|
4
|
+
require_relative "openai/http"
|
5
|
+
require_relative "openai/client"
|
6
|
+
require_relative "openai/files"
|
7
|
+
require_relative "openai/finetunes"
|
8
|
+
require_relative "openai/images"
|
9
|
+
require_relative "openai/models"
|
10
|
+
require_relative "openai/assistants"
|
11
|
+
require_relative "openai/threads"
|
12
|
+
require_relative "openai/messages"
|
13
|
+
require_relative "openai/runs"
|
14
|
+
require_relative "openai/run_steps"
|
15
|
+
require_relative "openai/audio"
|
16
|
+
require_relative "openai/version"
|
17
|
+
|
18
|
+
module OpenAI
|
19
|
+
class Error < StandardError; end
|
20
|
+
class ConfigurationError < Error; end
|
21
|
+
|
22
|
+
class MiddlewareErrors < Faraday::Middleware
|
23
|
+
def call(env)
|
24
|
+
@app.call(env)
|
25
|
+
rescue Faraday::Error => e
|
26
|
+
raise e unless e.response.is_a?(Hash)
|
27
|
+
|
28
|
+
logger = Logger.new($stdout)
|
29
|
+
logger.formatter = proc do |_severity, _datetime, _progname, msg|
|
30
|
+
"\033[31mOpenAI HTTP Error (spotted in ruby-openai #{VERSION}): #{msg}\n\033[0m"
|
31
|
+
end
|
32
|
+
logger.error(e.response[:body])
|
33
|
+
|
34
|
+
raise e
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class Configuration
|
39
|
+
attr_writer :access_token
|
40
|
+
attr_accessor :api_type, :api_version, :organization_id, :uri_base, :request_timeout,
|
41
|
+
:extra_headers
|
42
|
+
|
43
|
+
DEFAULT_API_VERSION = "v2".freeze
|
44
|
+
DEFAULT_URI_BASE = "https://api.openai.com/".freeze
|
45
|
+
DEFAULT_REQUEST_TIMEOUT = 120
|
46
|
+
|
47
|
+
def initialize
|
48
|
+
@access_token = nil
|
49
|
+
@api_type = nil
|
50
|
+
@api_version = DEFAULT_API_VERSION
|
51
|
+
@organization_id = nil
|
52
|
+
@uri_base = DEFAULT_URI_BASE
|
53
|
+
@request_timeout = DEFAULT_REQUEST_TIMEOUT
|
54
|
+
@extra_headers = {}
|
55
|
+
end
|
56
|
+
|
57
|
+
def access_token
|
58
|
+
return @access_token if @access_token
|
59
|
+
|
60
|
+
error_text = "OpenAI access token missing! See https://github.com/alexrudall/ruby-openai#usage"
|
61
|
+
raise ConfigurationError, error_text
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
class << self
|
66
|
+
attr_writer :configuration
|
67
|
+
end
|
68
|
+
|
69
|
+
def self.configuration
|
70
|
+
@configuration ||= OpenAI::Configuration.new
|
71
|
+
end
|
72
|
+
|
73
|
+
def self.configure
|
74
|
+
yield(configuration)
|
75
|
+
end
|
76
|
+
|
77
|
+
# Estimate the number of tokens in a string, using the rules of thumb from OpenAI:
|
78
|
+
# https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
|
79
|
+
def self.rough_token_count(content = "")
|
80
|
+
raise ArgumentError, "rough_token_count requires a string" unless content.is_a? String
|
81
|
+
return 0 if content.empty?
|
82
|
+
|
83
|
+
count_by_chars = content.size / 4.0
|
84
|
+
count_by_words = content.split.size * 4.0 / 3
|
85
|
+
estimate = ((count_by_chars + count_by_words) / 2.0).round
|
86
|
+
[1, estimate].max
|
87
|
+
end
|
88
|
+
end
|
data/lib/ruby/openai.rb
ADDED
@@ -0,0 +1,5 @@
|
|
1
|
+
## All Submissions:
|
2
|
+
|
3
|
+
* [ ] Have you followed the guidelines in our [Contributing document](../blob/main/CONTRIBUTING.md)?
|
4
|
+
* [ ] Have you checked to ensure there aren't other open [Pull Requests](../pulls) for the same update/change?
|
5
|
+
* [ ] Have you added an explanation of what your changes do and why you'd like us to include them?
|