ruby-openai 3.6.0 → 6.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.devcontainer/Dockerfile +16 -0
- data/.devcontainer/devcontainer.json +36 -0
- data/.devcontainer/docker-compose.yml +19 -0
- data/.github/FUNDING.yml +13 -0
- data/.gitignore +65 -7
- data/.rubocop.yml +8 -0
- data/CHANGELOG.md +141 -0
- data/Gemfile +3 -3
- data/Gemfile.lock +28 -20
- data/README.md +595 -56
- data/Rakefile +14 -1
- data/lib/openai/assistants.rb +27 -0
- data/lib/openai/audio.rb +19 -0
- data/lib/openai/client.rb +53 -63
- data/lib/openai/compatibility.rb +1 -0
- data/lib/openai/files.rb +8 -9
- data/lib/openai/finetunes.rb +8 -17
- data/lib/openai/http.rb +127 -0
- data/lib/openai/http_headers.rb +36 -0
- data/lib/openai/images.rb +5 -6
- data/lib/openai/messages.rb +23 -0
- data/lib/openai/models.rb +4 -5
- data/lib/openai/run_steps.rb +15 -0
- data/lib/openai/runs.rb +32 -0
- data/lib/openai/threads.rb +23 -0
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +43 -2
- data/ruby-openai.gemspec +5 -4
- metadata +58 -14
data/Rakefile
CHANGED
@@ -1,6 +1,19 @@
|
|
1
1
|
require "bundler/gem_tasks"
|
2
2
|
require "rspec/core/rake_task"
|
3
|
+
require "rubocop/rake_task"
|
3
4
|
|
4
5
|
RSpec::Core::RakeTask.new(:spec)
|
5
6
|
|
6
|
-
task default
|
7
|
+
task :default do
|
8
|
+
Rake::Task["test"].invoke
|
9
|
+
Rake::Task["lint"].invoke
|
10
|
+
end
|
11
|
+
|
12
|
+
task :test do
|
13
|
+
Rake::Task["spec"].invoke
|
14
|
+
end
|
15
|
+
|
16
|
+
task :lint do
|
17
|
+
RuboCop::RakeTask.new(:rubocop)
|
18
|
+
Rake::Task["rubocop"].invoke
|
19
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Assistants
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def list
|
8
|
+
@client.get(path: "/assistants")
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(id:)
|
12
|
+
@client.get(path: "/assistants/#{id}")
|
13
|
+
end
|
14
|
+
|
15
|
+
def create(parameters: {})
|
16
|
+
@client.json_post(path: "/assistants", parameters: parameters)
|
17
|
+
end
|
18
|
+
|
19
|
+
def modify(id:, parameters: {})
|
20
|
+
@client.json_post(path: "/assistants/#{id}", parameters: parameters)
|
21
|
+
end
|
22
|
+
|
23
|
+
def delete(id:)
|
24
|
+
@client.delete(path: "/assistants/#{id}")
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
data/lib/openai/audio.rb
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Audio
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
5
|
+
end
|
6
|
+
|
7
|
+
def transcribe(parameters: {})
|
8
|
+
@client.multipart_post(path: "/audio/transcriptions", parameters: parameters)
|
9
|
+
end
|
10
|
+
|
11
|
+
def translate(parameters: {})
|
12
|
+
@client.multipart_post(path: "/audio/translations", parameters: parameters)
|
13
|
+
end
|
14
|
+
|
15
|
+
def speech(parameters: {})
|
16
|
+
@client.json_post(path: "/audio/speech", parameters: parameters)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
data/lib/openai/client.rb
CHANGED
@@ -1,105 +1,95 @@
|
|
1
1
|
module OpenAI
|
2
2
|
class Client
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
3
|
+
include OpenAI::HTTP
|
4
|
+
|
5
|
+
CONFIG_KEYS = %i[
|
6
|
+
api_type
|
7
|
+
api_version
|
8
|
+
access_token
|
9
|
+
organization_id
|
10
|
+
uri_base
|
11
|
+
request_timeout
|
12
|
+
extra_headers
|
13
|
+
].freeze
|
14
|
+
attr_reader *CONFIG_KEYS, :faraday_middleware
|
15
|
+
|
16
|
+
def initialize(config = {}, &faraday_middleware)
|
17
|
+
CONFIG_KEYS.each do |key|
|
18
|
+
# Set instance variables like api_type & access_token. Fall back to global config
|
19
|
+
# if not present.
|
20
|
+
instance_variable_set("@#{key}", config[key] || OpenAI.configuration.send(key))
|
21
|
+
end
|
22
|
+
@faraday_middleware = faraday_middleware
|
9
23
|
end
|
10
24
|
|
11
25
|
def chat(parameters: {})
|
12
|
-
|
13
|
-
end
|
14
|
-
|
15
|
-
def completions(parameters: {})
|
16
|
-
OpenAI::Client.json_post(path: "/completions", parameters: parameters)
|
26
|
+
json_post(path: "/chat/completions", parameters: parameters)
|
17
27
|
end
|
18
28
|
|
19
29
|
def edits(parameters: {})
|
20
|
-
|
30
|
+
json_post(path: "/edits", parameters: parameters)
|
21
31
|
end
|
22
32
|
|
23
33
|
def embeddings(parameters: {})
|
24
|
-
|
34
|
+
json_post(path: "/embeddings", parameters: parameters)
|
25
35
|
end
|
26
36
|
|
27
|
-
def
|
28
|
-
|
37
|
+
def completions(parameters: {})
|
38
|
+
json_post(path: "/completions", parameters: parameters)
|
29
39
|
end
|
30
40
|
|
31
|
-
def
|
32
|
-
@
|
41
|
+
def audio
|
42
|
+
@audio ||= OpenAI::Audio.new(client: self)
|
33
43
|
end
|
34
44
|
|
35
|
-
def
|
36
|
-
@
|
45
|
+
def files
|
46
|
+
@files ||= OpenAI::Files.new(client: self)
|
37
47
|
end
|
38
48
|
|
39
|
-
def
|
40
|
-
@
|
49
|
+
def finetunes
|
50
|
+
@finetunes ||= OpenAI::Finetunes.new(client: self)
|
41
51
|
end
|
42
52
|
|
43
|
-
def
|
44
|
-
OpenAI::
|
53
|
+
def images
|
54
|
+
@images ||= OpenAI::Images.new(client: self)
|
45
55
|
end
|
46
56
|
|
47
|
-
def
|
48
|
-
OpenAI::
|
57
|
+
def models
|
58
|
+
@models ||= OpenAI::Models.new(client: self)
|
49
59
|
end
|
50
60
|
|
51
|
-
def
|
52
|
-
OpenAI::
|
61
|
+
def assistants
|
62
|
+
@assistants ||= OpenAI::Assistants.new(client: self)
|
53
63
|
end
|
54
64
|
|
55
|
-
def
|
56
|
-
|
57
|
-
uri(path: path),
|
58
|
-
headers: headers,
|
59
|
-
timeout: request_timeout
|
60
|
-
)
|
65
|
+
def threads
|
66
|
+
@threads ||= OpenAI::Threads.new(client: self)
|
61
67
|
end
|
62
68
|
|
63
|
-
def
|
64
|
-
|
65
|
-
uri(path: path),
|
66
|
-
headers: headers,
|
67
|
-
body: parameters&.to_json,
|
68
|
-
timeout: request_timeout
|
69
|
-
)
|
69
|
+
def messages
|
70
|
+
@messages ||= OpenAI::Messages.new(client: self)
|
70
71
|
end
|
71
72
|
|
72
|
-
def
|
73
|
-
|
74
|
-
uri(path: path),
|
75
|
-
headers: headers.merge({ "Content-Type" => "multipart/form-data" }),
|
76
|
-
body: parameters,
|
77
|
-
timeout: request_timeout
|
78
|
-
)
|
73
|
+
def runs
|
74
|
+
@runs ||= OpenAI::Runs.new(client: self)
|
79
75
|
end
|
80
76
|
|
81
|
-
def
|
82
|
-
|
83
|
-
uri(path: path),
|
84
|
-
headers: headers,
|
85
|
-
timeout: request_timeout
|
86
|
-
)
|
77
|
+
def run_steps
|
78
|
+
@run_steps ||= OpenAI::RunSteps.new(client: self)
|
87
79
|
end
|
88
80
|
|
89
|
-
|
90
|
-
|
81
|
+
def moderations(parameters: {})
|
82
|
+
json_post(path: "/moderations", parameters: parameters)
|
91
83
|
end
|
92
84
|
|
93
|
-
|
94
|
-
|
95
|
-
"Content-Type" => "application/json",
|
96
|
-
"Authorization" => "Bearer #{OpenAI.configuration.access_token}",
|
97
|
-
"OpenAI-Organization" => OpenAI.configuration.organization_id
|
98
|
-
}
|
85
|
+
def azure?
|
86
|
+
@api_type&.to_sym == :azure
|
99
87
|
end
|
100
88
|
|
101
|
-
|
102
|
-
|
89
|
+
def beta(apis)
|
90
|
+
dup.tap do |client|
|
91
|
+
client.add_headers("OpenAI-Beta": apis.map { |k, v| "#{k}=#{v}" }.join(";"))
|
92
|
+
end
|
103
93
|
end
|
104
94
|
end
|
105
95
|
end
|
data/lib/openai/compatibility.rb
CHANGED
data/lib/openai/files.rb
CHANGED
@@ -1,33 +1,32 @@
|
|
1
1
|
module OpenAI
|
2
2
|
class Files
|
3
|
-
def initialize(
|
4
|
-
|
5
|
-
OpenAI.configuration.organization_id = organization_id if organization_id
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
6
5
|
end
|
7
6
|
|
8
7
|
def list
|
9
|
-
|
8
|
+
@client.get(path: "/files")
|
10
9
|
end
|
11
10
|
|
12
11
|
def upload(parameters: {})
|
13
|
-
validate(file: parameters[:file])
|
12
|
+
validate(file: parameters[:file]) if parameters[:file].include?(".jsonl")
|
14
13
|
|
15
|
-
|
14
|
+
@client.multipart_post(
|
16
15
|
path: "/files",
|
17
16
|
parameters: parameters.merge(file: File.open(parameters[:file]))
|
18
17
|
)
|
19
18
|
end
|
20
19
|
|
21
20
|
def retrieve(id:)
|
22
|
-
|
21
|
+
@client.get(path: "/files/#{id}")
|
23
22
|
end
|
24
23
|
|
25
24
|
def content(id:)
|
26
|
-
|
25
|
+
@client.get(path: "/files/#{id}/content")
|
27
26
|
end
|
28
27
|
|
29
28
|
def delete(id:)
|
30
|
-
|
29
|
+
@client.delete(path: "/files/#{id}")
|
31
30
|
end
|
32
31
|
|
33
32
|
private
|
data/lib/openai/finetunes.rb
CHANGED
@@ -1,36 +1,27 @@
|
|
1
1
|
module OpenAI
|
2
2
|
class Finetunes
|
3
|
-
def initialize(
|
4
|
-
|
5
|
-
OpenAI.configuration.organization_id = organization_id if organization_id
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
6
5
|
end
|
7
6
|
|
8
7
|
def list
|
9
|
-
|
8
|
+
@client.get(path: "/fine_tuning/jobs")
|
10
9
|
end
|
11
10
|
|
12
11
|
def create(parameters: {})
|
13
|
-
|
12
|
+
@client.json_post(path: "/fine_tuning/jobs", parameters: parameters)
|
14
13
|
end
|
15
14
|
|
16
15
|
def retrieve(id:)
|
17
|
-
|
16
|
+
@client.get(path: "/fine_tuning/jobs/#{id}")
|
18
17
|
end
|
19
18
|
|
20
19
|
def cancel(id:)
|
21
|
-
|
20
|
+
@client.json_post(path: "/fine_tuning/jobs/#{id}/cancel", parameters: {})
|
22
21
|
end
|
23
22
|
|
24
|
-
def
|
25
|
-
|
26
|
-
end
|
27
|
-
|
28
|
-
def delete(fine_tuned_model:)
|
29
|
-
if fine_tuned_model.start_with?("ft-")
|
30
|
-
raise ArgumentError, "Please give a fine_tuned_model name, not a fine-tune ID"
|
31
|
-
end
|
32
|
-
|
33
|
-
OpenAI::Client.delete(path: "/models/#{fine_tuned_model}")
|
23
|
+
def list_events(id:)
|
24
|
+
@client.get(path: "/fine_tuning/jobs/#{id}/events")
|
34
25
|
end
|
35
26
|
end
|
36
27
|
end
|
data/lib/openai/http.rb
ADDED
@@ -0,0 +1,127 @@
|
|
1
|
+
require "event_stream_parser"
|
2
|
+
|
3
|
+
require_relative "http_headers"
|
4
|
+
|
5
|
+
module OpenAI
|
6
|
+
module HTTP
|
7
|
+
include HTTPHeaders
|
8
|
+
|
9
|
+
def get(path:)
|
10
|
+
parse_jsonl(conn.get(uri(path: path)) do |req|
|
11
|
+
req.headers = headers
|
12
|
+
end&.body)
|
13
|
+
end
|
14
|
+
|
15
|
+
def post(path:)
|
16
|
+
parse_jsonl(conn.post(uri(path: path)) do |req|
|
17
|
+
req.headers = headers
|
18
|
+
end&.body)
|
19
|
+
end
|
20
|
+
|
21
|
+
def json_post(path:, parameters:)
|
22
|
+
conn.post(uri(path: path)) do |req|
|
23
|
+
configure_json_post_request(req, parameters)
|
24
|
+
end&.body
|
25
|
+
end
|
26
|
+
|
27
|
+
def multipart_post(path:, parameters: nil)
|
28
|
+
conn(multipart: true).post(uri(path: path)) do |req|
|
29
|
+
req.headers = headers.merge({ "Content-Type" => "multipart/form-data" })
|
30
|
+
req.body = multipart_parameters(parameters)
|
31
|
+
end&.body
|
32
|
+
end
|
33
|
+
|
34
|
+
def delete(path:)
|
35
|
+
conn.delete(uri(path: path)) do |req|
|
36
|
+
req.headers = headers
|
37
|
+
end&.body
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def parse_jsonl(response)
|
43
|
+
return unless response
|
44
|
+
return response unless response.is_a?(String)
|
45
|
+
|
46
|
+
# Convert a multiline string of JSON objects to a JSON array.
|
47
|
+
response = response.gsub("}\n{", "},{").prepend("[").concat("]")
|
48
|
+
|
49
|
+
JSON.parse(response)
|
50
|
+
end
|
51
|
+
|
52
|
+
# Given a proc, returns an outer proc that can be used to iterate over a JSON stream of chunks.
|
53
|
+
# For each chunk, the inner user_proc is called giving it the JSON object. The JSON object could
|
54
|
+
# be a data object or an error object as described in the OpenAI API documentation.
|
55
|
+
#
|
56
|
+
# @param user_proc [Proc] The inner proc to call for each JSON object in the chunk.
|
57
|
+
# @return [Proc] An outer proc that iterates over a raw stream, converting it to JSON.
|
58
|
+
def to_json_stream(user_proc:)
|
59
|
+
parser = EventStreamParser::Parser.new
|
60
|
+
|
61
|
+
proc do |chunk, _bytes, env|
|
62
|
+
if env && env.status != 200
|
63
|
+
raise_error = Faraday::Response::RaiseError.new
|
64
|
+
raise_error.on_complete(env.merge(body: try_parse_json(chunk)))
|
65
|
+
end
|
66
|
+
|
67
|
+
parser.feed(chunk) do |_type, data|
|
68
|
+
user_proc.call(JSON.parse(data)) unless data == "[DONE]"
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def conn(multipart: false)
|
74
|
+
connection = Faraday.new do |f|
|
75
|
+
f.options[:timeout] = @request_timeout
|
76
|
+
f.request(:multipart) if multipart
|
77
|
+
f.use MiddlewareErrors
|
78
|
+
f.response :raise_error
|
79
|
+
f.response :json
|
80
|
+
end
|
81
|
+
|
82
|
+
@faraday_middleware&.call(connection)
|
83
|
+
|
84
|
+
connection
|
85
|
+
end
|
86
|
+
|
87
|
+
def uri(path:)
|
88
|
+
if azure?
|
89
|
+
base = File.join(@uri_base, path)
|
90
|
+
"#{base}?api-version=#{@api_version}"
|
91
|
+
else
|
92
|
+
File.join(@uri_base, @api_version, path)
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
def multipart_parameters(parameters)
|
97
|
+
parameters&.transform_values do |value|
|
98
|
+
next value unless value.respond_to?(:close) # File or IO object.
|
99
|
+
|
100
|
+
# Doesn't seem like OpenAI needs mime_type yet, so not worth
|
101
|
+
# the library to figure this out. Hence the empty string
|
102
|
+
# as the second argument.
|
103
|
+
Faraday::UploadIO.new(value, "", value.path)
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
def configure_json_post_request(req, parameters)
|
108
|
+
req_parameters = parameters.dup
|
109
|
+
|
110
|
+
if parameters[:stream].respond_to?(:call)
|
111
|
+
req.options.on_data = to_json_stream(user_proc: parameters[:stream])
|
112
|
+
req_parameters[:stream] = true # Necessary to tell OpenAI to stream.
|
113
|
+
elsif parameters[:stream]
|
114
|
+
raise ArgumentError, "The stream parameter must be a Proc or have a #call method"
|
115
|
+
end
|
116
|
+
|
117
|
+
req.headers = headers
|
118
|
+
req.body = req_parameters.to_json
|
119
|
+
end
|
120
|
+
|
121
|
+
def try_parse_json(maybe_json)
|
122
|
+
JSON.parse(maybe_json)
|
123
|
+
rescue JSON::ParserError
|
124
|
+
maybe_json
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
module OpenAI
|
2
|
+
module HTTPHeaders
|
3
|
+
def add_headers(headers)
|
4
|
+
@extra_headers = extra_headers.merge(headers.transform_keys(&:to_s))
|
5
|
+
end
|
6
|
+
|
7
|
+
private
|
8
|
+
|
9
|
+
def headers
|
10
|
+
if azure?
|
11
|
+
azure_headers
|
12
|
+
else
|
13
|
+
openai_headers
|
14
|
+
end.merge(extra_headers)
|
15
|
+
end
|
16
|
+
|
17
|
+
def openai_headers
|
18
|
+
{
|
19
|
+
"Content-Type" => "application/json",
|
20
|
+
"Authorization" => "Bearer #{@access_token}",
|
21
|
+
"OpenAI-Organization" => @organization_id
|
22
|
+
}
|
23
|
+
end
|
24
|
+
|
25
|
+
def azure_headers
|
26
|
+
{
|
27
|
+
"Content-Type" => "application/json",
|
28
|
+
"api-key" => @access_token
|
29
|
+
}
|
30
|
+
end
|
31
|
+
|
32
|
+
def extra_headers
|
33
|
+
@extra_headers ||= {}
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
data/lib/openai/images.rb
CHANGED
@@ -1,20 +1,19 @@
|
|
1
1
|
module OpenAI
|
2
2
|
class Images
|
3
|
-
def initialize(
|
4
|
-
|
5
|
-
OpenAI.configuration.organization_id = organization_id if organization_id
|
3
|
+
def initialize(client: nil)
|
4
|
+
@client = client
|
6
5
|
end
|
7
6
|
|
8
7
|
def generate(parameters: {})
|
9
|
-
|
8
|
+
@client.json_post(path: "/images/generations", parameters: parameters)
|
10
9
|
end
|
11
10
|
|
12
11
|
def edit(parameters: {})
|
13
|
-
|
12
|
+
@client.multipart_post(path: "/images/edits", parameters: open_files(parameters))
|
14
13
|
end
|
15
14
|
|
16
15
|
def variations(parameters: {})
|
17
|
-
|
16
|
+
@client.multipart_post(path: "/images/variations", parameters: open_files(parameters))
|
18
17
|
end
|
19
18
|
|
20
19
|
private
|
@@ -0,0 +1,23 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Messages
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def list(thread_id:)
|
8
|
+
@client.get(path: "/threads/#{thread_id}/messages")
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(thread_id:, id:)
|
12
|
+
@client.get(path: "/threads/#{thread_id}/messages/#{id}")
|
13
|
+
end
|
14
|
+
|
15
|
+
def create(thread_id:, parameters: {})
|
16
|
+
@client.json_post(path: "/threads/#{thread_id}/messages", parameters: parameters)
|
17
|
+
end
|
18
|
+
|
19
|
+
def modify(id:, thread_id:, parameters: {})
|
20
|
+
@client.json_post(path: "/threads/#{thread_id}/messages/#{id}", parameters: parameters)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
data/lib/openai/models.rb
CHANGED
@@ -1,16 +1,15 @@
|
|
1
1
|
module OpenAI
|
2
2
|
class Models
|
3
|
-
def initialize(
|
4
|
-
|
5
|
-
OpenAI.configuration.organization_id = organization_id if organization_id
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client
|
6
5
|
end
|
7
6
|
|
8
7
|
def list
|
9
|
-
|
8
|
+
@client.get(path: "/models")
|
10
9
|
end
|
11
10
|
|
12
11
|
def retrieve(id:)
|
13
|
-
|
12
|
+
@client.get(path: "/models/#{id}")
|
14
13
|
end
|
15
14
|
end
|
16
15
|
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class RunSteps
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def list(thread_id:, run_id:)
|
8
|
+
@client.get(path: "/threads/#{thread_id}/runs/#{run_id}/steps")
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(thread_id:, run_id:, id:)
|
12
|
+
@client.get(path: "/threads/#{thread_id}/runs/#{run_id}/steps/#{id}")
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
data/lib/openai/runs.rb
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Runs
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def list(thread_id:)
|
8
|
+
@client.get(path: "/threads/#{thread_id}/runs")
|
9
|
+
end
|
10
|
+
|
11
|
+
def retrieve(thread_id:, id:)
|
12
|
+
@client.get(path: "/threads/#{thread_id}/runs/#{id}")
|
13
|
+
end
|
14
|
+
|
15
|
+
def create(thread_id:, parameters: {})
|
16
|
+
@client.json_post(path: "/threads/#{thread_id}/runs", parameters: parameters)
|
17
|
+
end
|
18
|
+
|
19
|
+
def modify(id:, thread_id:, parameters: {})
|
20
|
+
@client.json_post(path: "/threads/#{thread_id}/runs/#{id}", parameters: parameters)
|
21
|
+
end
|
22
|
+
|
23
|
+
def cancel(id:, thread_id:)
|
24
|
+
@client.post(path: "/threads/#{thread_id}/runs/#{id}/cancel")
|
25
|
+
end
|
26
|
+
|
27
|
+
def submit_tool_outputs(thread_id:, run_id:, parameters: {})
|
28
|
+
@client.json_post(path: "/threads/#{thread_id}/runs/#{run_id}/submit_tool_outputs",
|
29
|
+
parameters: parameters)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
module OpenAI
|
2
|
+
class Threads
|
3
|
+
def initialize(client:)
|
4
|
+
@client = client.beta(assistants: "v1")
|
5
|
+
end
|
6
|
+
|
7
|
+
def retrieve(id:)
|
8
|
+
@client.get(path: "/threads/#{id}")
|
9
|
+
end
|
10
|
+
|
11
|
+
def create(parameters: {})
|
12
|
+
@client.json_post(path: "/threads", parameters: parameters)
|
13
|
+
end
|
14
|
+
|
15
|
+
def modify(id:, parameters: {})
|
16
|
+
@client.json_post(path: "/threads/#{id}", parameters: parameters)
|
17
|
+
end
|
18
|
+
|
19
|
+
def delete(id:)
|
20
|
+
@client.delete(path: "/threads/#{id}")
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
data/lib/openai/version.rb
CHANGED