ruby-openai 7.3.1 → 8.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.circleci/config.yml +1 -1
- data/.gitignore +3 -0
- data/CHANGELOG.md +29 -0
- data/Gemfile +6 -5
- data/Gemfile.lock +34 -28
- data/README.md +574 -262
- data/lib/openai/batches.rb +1 -1
- data/lib/openai/client.rb +24 -12
- data/lib/openai/files.rb +7 -3
- data/lib/openai/http.rb +16 -11
- data/lib/openai/models.rb +4 -0
- data/lib/openai/responses.rb +23 -0
- data/lib/openai/usage.rb +70 -0
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +36 -24
- data/lib/ruby/openai.rb +0 -1
- metadata +4 -3
- data/lib/openai/compatibility.rb +0 -10
data/lib/openai/batches.rb
CHANGED
data/lib/openai/client.rb
CHANGED
@@ -2,18 +2,11 @@ module OpenAI
|
|
2
2
|
class Client
|
3
3
|
include OpenAI::HTTP
|
4
4
|
|
5
|
-
SENSITIVE_ATTRIBUTES = %i[@access_token @organization_id @extra_headers].freeze
|
6
|
-
CONFIG_KEYS = %i[
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
log_errors
|
11
|
-
organization_id
|
12
|
-
uri_base
|
13
|
-
request_timeout
|
14
|
-
extra_headers
|
15
|
-
].freeze
|
16
|
-
attr_reader *CONFIG_KEYS, :faraday_middleware
|
5
|
+
SENSITIVE_ATTRIBUTES = %i[@access_token @admin_token @organization_id @extra_headers].freeze
|
6
|
+
CONFIG_KEYS = %i[access_token admin_token api_type api_version extra_headers
|
7
|
+
log_errors organization_id request_timeout uri_base].freeze
|
8
|
+
attr_reader(*CONFIG_KEYS, :faraday_middleware)
|
9
|
+
attr_writer :access_token
|
17
10
|
|
18
11
|
def initialize(config = {}, &faraday_middleware)
|
19
12
|
CONFIG_KEYS.each do |key|
|
@@ -59,6 +52,10 @@ module OpenAI
|
|
59
52
|
@models ||= OpenAI::Models.new(client: self)
|
60
53
|
end
|
61
54
|
|
55
|
+
def responses
|
56
|
+
@responses ||= OpenAI::Responses.new(client: self)
|
57
|
+
end
|
58
|
+
|
62
59
|
def assistants
|
63
60
|
@assistants ||= OpenAI::Assistants.new(client: self)
|
64
61
|
end
|
@@ -99,10 +96,25 @@ module OpenAI
|
|
99
96
|
json_post(path: "/moderations", parameters: parameters)
|
100
97
|
end
|
101
98
|
|
99
|
+
def usage
|
100
|
+
@usage ||= OpenAI::Usage.new(client: self)
|
101
|
+
end
|
102
|
+
|
102
103
|
def azure?
|
103
104
|
@api_type&.to_sym == :azure
|
104
105
|
end
|
105
106
|
|
107
|
+
def admin
|
108
|
+
unless admin_token
|
109
|
+
e = "You must set an OPENAI_ADMIN_TOKEN= to use administrative endpoints:\n\n https://platform.openai.com/settings/organization/admin-keys"
|
110
|
+
raise AuthenticationError, e
|
111
|
+
end
|
112
|
+
|
113
|
+
dup.tap do |client|
|
114
|
+
client.access_token = client.admin_token
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
106
118
|
def beta(apis)
|
107
119
|
dup.tap do |client|
|
108
120
|
client.add_headers("OpenAI-Beta": apis.map { |k, v| "#{k}=#{v}" }.join(";"))
|
data/lib/openai/files.rb
CHANGED
@@ -5,6 +5,8 @@ module OpenAI
|
|
5
5
|
batch
|
6
6
|
fine-tune
|
7
7
|
vision
|
8
|
+
user_data
|
9
|
+
evals
|
8
10
|
].freeze
|
9
11
|
|
10
12
|
def initialize(client:)
|
@@ -18,9 +20,7 @@ module OpenAI
|
|
18
20
|
def upload(parameters: {})
|
19
21
|
file_input = parameters[:file]
|
20
22
|
file = prepare_file_input(file_input: file_input)
|
21
|
-
|
22
23
|
validate(file: file, purpose: parameters[:purpose], file_input: file_input)
|
23
|
-
|
24
24
|
@client.multipart_post(
|
25
25
|
path: "/files",
|
26
26
|
parameters: parameters.merge(file: file)
|
@@ -55,8 +55,12 @@ module OpenAI
|
|
55
55
|
|
56
56
|
def validate(file:, purpose:, file_input:)
|
57
57
|
raise ArgumentError, "`file` is required" if file.nil?
|
58
|
+
|
58
59
|
unless PURPOSES.include?(purpose)
|
59
|
-
|
60
|
+
filename = file_input.is_a?(String) ? File.basename(file_input) : "uploaded file"
|
61
|
+
message = "The purpose '#{purpose}' for file '#{filename}' is not in the known purpose "
|
62
|
+
message += "list: #{PURPOSES.join(', ')}."
|
63
|
+
OpenAI.log_message("Warning", message, :warn)
|
60
64
|
end
|
61
65
|
|
62
66
|
validate_jsonl(file: file) if file_input.is_a?(String) && file_input.end_with?(".jsonl")
|
data/lib/openai/http.rb
CHANGED
@@ -7,47 +7,52 @@ module OpenAI
|
|
7
7
|
include HTTPHeaders
|
8
8
|
|
9
9
|
def get(path:, parameters: nil)
|
10
|
-
|
10
|
+
parse_json(conn.get(uri(path: path), parameters) do |req|
|
11
11
|
req.headers = headers
|
12
12
|
end&.body)
|
13
13
|
end
|
14
14
|
|
15
15
|
def post(path:)
|
16
|
-
|
16
|
+
parse_json(conn.post(uri(path: path)) do |req|
|
17
17
|
req.headers = headers
|
18
18
|
end&.body)
|
19
19
|
end
|
20
20
|
|
21
21
|
def json_post(path:, parameters:, query_parameters: {})
|
22
|
-
conn.post(uri(path: path)) do |req|
|
22
|
+
parse_json(conn.post(uri(path: path)) do |req|
|
23
23
|
configure_json_post_request(req, parameters)
|
24
24
|
req.params = req.params.merge(query_parameters)
|
25
|
-
end&.body
|
25
|
+
end&.body)
|
26
26
|
end
|
27
27
|
|
28
28
|
def multipart_post(path:, parameters: nil)
|
29
|
-
conn(multipart: true).post(uri(path: path)) do |req|
|
29
|
+
parse_json(conn(multipart: true).post(uri(path: path)) do |req|
|
30
30
|
req.headers = headers.merge({ "Content-Type" => "multipart/form-data" })
|
31
31
|
req.body = multipart_parameters(parameters)
|
32
|
-
end&.body
|
32
|
+
end&.body)
|
33
33
|
end
|
34
34
|
|
35
35
|
def delete(path:)
|
36
|
-
conn.delete(uri(path: path)) do |req|
|
36
|
+
parse_json(conn.delete(uri(path: path)) do |req|
|
37
37
|
req.headers = headers
|
38
|
-
end&.body
|
38
|
+
end&.body)
|
39
39
|
end
|
40
40
|
|
41
41
|
private
|
42
42
|
|
43
|
-
def
|
43
|
+
def parse_json(response)
|
44
44
|
return unless response
|
45
45
|
return response unless response.is_a?(String)
|
46
46
|
|
47
|
-
|
48
|
-
|
47
|
+
original_response = response.dup
|
48
|
+
if response.include?("}\n{")
|
49
|
+
# Attempt to convert what looks like a multiline string of JSON objects to a JSON array.
|
50
|
+
response = response.gsub("}\n{", "},{").prepend("[").concat("]")
|
51
|
+
end
|
49
52
|
|
50
53
|
JSON.parse(response)
|
54
|
+
rescue JSON::ParserError
|
55
|
+
original_response
|
51
56
|
end
|
52
57
|
|
53
58
|
# Given a proc, returns an outer proc that can be used to iterate over a JSON stream of chunks.
|
data/lib/openai/models.rb
CHANGED
@@ -0,0 +1,23 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Responses
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
5
|
+
end
|
6
|
+
|
7
|
+
def create(parameters: {})
|
8
|
+
@client.json_post(path: "/responses", parameters: parameters)
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(response_id:)
|
12
|
+
@client.get(path: "/responses/#{response_id}")
|
13
|
+
end
|
14
|
+
|
15
|
+
def delete(response_id:)
|
16
|
+
@client.delete(path: "/responses/#{response_id}")
|
17
|
+
end
|
18
|
+
|
19
|
+
def input_items(response_id:, parameters: {})
|
20
|
+
@client.get(path: "/responses/#{response_id}/input_items", parameters: parameters)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
data/lib/openai/usage.rb
ADDED
@@ -0,0 +1,70 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Usage
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
5
|
+
end
|
6
|
+
|
7
|
+
def completions(parameters: {})
|
8
|
+
@client.admin.get(
|
9
|
+
path: "/organization/usage/completions",
|
10
|
+
parameters: parameters
|
11
|
+
)
|
12
|
+
end
|
13
|
+
|
14
|
+
def embeddings(parameters: {})
|
15
|
+
@client.admin.get(
|
16
|
+
path: "/organization/usage/embeddings",
|
17
|
+
parameters: parameters
|
18
|
+
)
|
19
|
+
end
|
20
|
+
|
21
|
+
def moderations(parameters: {})
|
22
|
+
@client.admin.get(
|
23
|
+
path: "/organization/usage/moderations",
|
24
|
+
parameters: parameters
|
25
|
+
)
|
26
|
+
end
|
27
|
+
|
28
|
+
def images(parameters: {})
|
29
|
+
@client.admin.get(
|
30
|
+
path: "/organization/usage/images",
|
31
|
+
parameters: parameters
|
32
|
+
)
|
33
|
+
end
|
34
|
+
|
35
|
+
def audio_speeches(parameters: {})
|
36
|
+
@client.admin.get(
|
37
|
+
path: "/organization/usage/audio_speeches",
|
38
|
+
parameters: parameters
|
39
|
+
)
|
40
|
+
end
|
41
|
+
|
42
|
+
def audio_transcriptions(parameters: {})
|
43
|
+
@client.admin.get(
|
44
|
+
path: "/organization/usage/audio_transcriptions",
|
45
|
+
parameters: parameters
|
46
|
+
)
|
47
|
+
end
|
48
|
+
|
49
|
+
def vector_stores(parameters: {})
|
50
|
+
@client.admin.get(
|
51
|
+
path: "/organization/usage/vector_stores",
|
52
|
+
parameters: parameters
|
53
|
+
)
|
54
|
+
end
|
55
|
+
|
56
|
+
def code_interpreter_sessions(parameters: {})
|
57
|
+
@client.admin.get(
|
58
|
+
path: "/organization/usage/code_interpreter_sessions",
|
59
|
+
parameters: parameters
|
60
|
+
)
|
61
|
+
end
|
62
|
+
|
63
|
+
def costs(parameters: {})
|
64
|
+
@client.admin.get(
|
65
|
+
path: "/organization/costs",
|
66
|
+
parameters: parameters
|
67
|
+
)
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
data/lib/openai/version.rb
CHANGED
data/lib/openai.rb
CHANGED
@@ -1,12 +1,12 @@
|
|
1
1
|
require "faraday"
|
2
|
-
require "faraday/multipart"
|
3
|
-
|
2
|
+
require "faraday/multipart" if Gem::Version.new(Faraday::VERSION) >= Gem::Version.new("2.0")
|
4
3
|
require_relative "openai/http"
|
5
4
|
require_relative "openai/client"
|
6
5
|
require_relative "openai/files"
|
7
6
|
require_relative "openai/finetunes"
|
8
7
|
require_relative "openai/images"
|
9
8
|
require_relative "openai/models"
|
9
|
+
require_relative "openai/responses"
|
10
10
|
require_relative "openai/assistants"
|
11
11
|
require_relative "openai/threads"
|
12
12
|
require_relative "openai/messages"
|
@@ -18,10 +18,12 @@ require_relative "openai/vector_store_file_batches"
|
|
18
18
|
require_relative "openai/audio"
|
19
19
|
require_relative "openai/version"
|
20
20
|
require_relative "openai/batches"
|
21
|
+
require_relative "openai/usage"
|
21
22
|
|
22
23
|
module OpenAI
|
23
24
|
class Error < StandardError; end
|
24
25
|
class ConfigurationError < Error; end
|
26
|
+
class AuthenticationError < Error; end
|
25
27
|
|
26
28
|
class MiddlewareErrors < Faraday::Middleware
|
27
29
|
def call(env)
|
@@ -29,18 +31,14 @@ module OpenAI
|
|
29
31
|
rescue Faraday::Error => e
|
30
32
|
raise e unless e.response.is_a?(Hash)
|
31
33
|
|
32
|
-
|
33
|
-
logger.formatter = proc do |_severity, _datetime, _progname, msg|
|
34
|
-
"\033[31mOpenAI HTTP Error (spotted in ruby-openai #{VERSION}): #{msg}\n\033[0m"
|
35
|
-
end
|
36
|
-
logger.error(e.response[:body])
|
37
|
-
|
34
|
+
OpenAI.log_message("OpenAI HTTP Error", e.response[:body], :error)
|
38
35
|
raise e
|
39
36
|
end
|
40
37
|
end
|
41
38
|
|
42
39
|
class Configuration
|
43
40
|
attr_accessor :access_token,
|
41
|
+
:admin_token,
|
44
42
|
:api_type,
|
45
43
|
:api_version,
|
46
44
|
:log_errors,
|
@@ -56,6 +54,7 @@ module OpenAI
|
|
56
54
|
|
57
55
|
def initialize
|
58
56
|
@access_token = nil
|
57
|
+
@admin_token = nil
|
59
58
|
@api_type = nil
|
60
59
|
@api_version = DEFAULT_API_VERSION
|
61
60
|
@log_errors = DEFAULT_LOG_ERRORS
|
@@ -68,25 +67,38 @@ module OpenAI
|
|
68
67
|
|
69
68
|
class << self
|
70
69
|
attr_writer :configuration
|
71
|
-
end
|
72
70
|
|
73
|
-
|
74
|
-
|
75
|
-
|
71
|
+
def configuration
|
72
|
+
@configuration ||= OpenAI::Configuration.new
|
73
|
+
end
|
76
74
|
|
77
|
-
|
78
|
-
|
79
|
-
|
75
|
+
def configure
|
76
|
+
yield(configuration)
|
77
|
+
end
|
80
78
|
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
79
|
+
# Estimate the number of tokens in a string, using the rules of thumb from OpenAI:
|
80
|
+
# https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
|
81
|
+
def rough_token_count(content = "")
|
82
|
+
raise ArgumentError, "rough_token_count requires a string" unless content.is_a? String
|
83
|
+
return 0 if content.empty?
|
84
|
+
|
85
|
+
count_by_chars = content.size / 4.0
|
86
|
+
count_by_words = content.split.size * 4.0 / 3
|
87
|
+
estimate = ((count_by_chars + count_by_words) / 2.0).round
|
88
|
+
[1, estimate].max
|
89
|
+
end
|
86
90
|
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
[
|
91
|
+
# Log a message with appropriate formatting
|
92
|
+
# @param prefix [String] Prefix to add to the message
|
93
|
+
# @param message [String] The message to log
|
94
|
+
# @param level [Symbol] The log level (:error, :warn, etc.)
|
95
|
+
def log_message(prefix, message, level = :warn)
|
96
|
+
color = level == :error ? "\033[31m" : "\033[33m"
|
97
|
+
logger = Logger.new($stdout)
|
98
|
+
logger.formatter = proc do |_severity, _datetime, _progname, msg|
|
99
|
+
"#{color}#{prefix} (spotted in ruby-openai #{VERSION}): #{msg}\n\033[0m"
|
100
|
+
end
|
101
|
+
logger.send(level, message)
|
102
|
+
end
|
91
103
|
end
|
92
104
|
end
|
data/lib/ruby/openai.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-openai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 8.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Alex
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2025-03-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -91,7 +91,6 @@ files:
|
|
91
91
|
- lib/openai/audio.rb
|
92
92
|
- lib/openai/batches.rb
|
93
93
|
- lib/openai/client.rb
|
94
|
-
- lib/openai/compatibility.rb
|
95
94
|
- lib/openai/files.rb
|
96
95
|
- lib/openai/finetunes.rb
|
97
96
|
- lib/openai/http.rb
|
@@ -99,9 +98,11 @@ files:
|
|
99
98
|
- lib/openai/images.rb
|
100
99
|
- lib/openai/messages.rb
|
101
100
|
- lib/openai/models.rb
|
101
|
+
- lib/openai/responses.rb
|
102
102
|
- lib/openai/run_steps.rb
|
103
103
|
- lib/openai/runs.rb
|
104
104
|
- lib/openai/threads.rb
|
105
|
+
- lib/openai/usage.rb
|
105
106
|
- lib/openai/vector_store_file_batches.rb
|
106
107
|
- lib/openai/vector_store_files.rb
|
107
108
|
- lib/openai/vector_stores.rb
|
data/lib/openai/compatibility.rb
DELETED