omniai-openai 1.2.1 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +184 -1
- data/lib/omniai/openai/assistant.rb +185 -0
- data/lib/omniai/openai/assistants.rb +38 -0
- data/lib/omniai/openai/chat.rb +2 -0
- data/lib/omniai/openai/client.rb +26 -8
- data/lib/omniai/openai/config.rb +27 -12
- data/lib/omniai/openai/file.rb +170 -0
- data/lib/omniai/openai/files.rb +44 -0
- data/lib/omniai/openai/thread/message.rb +207 -0
- data/lib/omniai/openai/thread/messages.rb +45 -0
- data/lib/omniai/openai/thread/run.rb +213 -0
- data/lib/omniai/openai/thread/runs.rb +56 -0
- data/lib/omniai/openai/thread.rb +141 -0
- data/lib/omniai/openai/threads.rb +29 -0
- data/lib/omniai/openai/tool.rb +11 -0
- data/lib/omniai/openai/version.rb +1 -1
- metadata +13 -2
@@ -0,0 +1,170 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
# An OpenAI file implementation.
|
6
|
+
class File
|
7
|
+
# @!attribute [rw] id
|
8
|
+
# @return [String, nil]
|
9
|
+
attr_accessor :id
|
10
|
+
|
11
|
+
# @!attribute [rw] bytes
|
12
|
+
# @return [Integer, nil]
|
13
|
+
attr_accessor :bytes
|
14
|
+
|
15
|
+
# @!attribute [rw] filename
|
16
|
+
# @return [String, nil]
|
17
|
+
attr_accessor :filename
|
18
|
+
|
19
|
+
# @!attribute [rw] purpose
|
20
|
+
# @return [String, nil]
|
21
|
+
attr_accessor :purpose
|
22
|
+
|
23
|
+
# @!attribute [rw] deleted
|
24
|
+
# @return [Boolean, nil]
|
25
|
+
attr_accessor :deleted
|
26
|
+
|
27
|
+
module Purpose
|
28
|
+
ASSISTANTS = 'assistants'
|
29
|
+
end
|
30
|
+
|
31
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
32
|
+
# @param io [IO] optional
|
33
|
+
# @param id [String] optional
|
34
|
+
# @param bytes [Integer] optional
|
35
|
+
# @param filename [String] optional
|
36
|
+
# @param purpose [String] optional
|
37
|
+
def initialize(
|
38
|
+
client: Client.new,
|
39
|
+
io: nil,
|
40
|
+
id: nil,
|
41
|
+
bytes: nil,
|
42
|
+
filename: nil,
|
43
|
+
purpose: Purpose::ASSISTANTS
|
44
|
+
)
|
45
|
+
@client = client
|
46
|
+
@io = io
|
47
|
+
@id = id
|
48
|
+
@bytes = bytes
|
49
|
+
@filename = filename
|
50
|
+
@purpose = purpose
|
51
|
+
end
|
52
|
+
|
53
|
+
# @return [String]
|
54
|
+
def inspect
|
55
|
+
"#<#{self.class.name} id=#{@id.inspect} filename=#{@filename.inspect}>"
|
56
|
+
end
|
57
|
+
|
58
|
+
# @raise [OmniAI::Error]
|
59
|
+
# @yield [String]
|
60
|
+
def content(&)
|
61
|
+
raise OmniAI::Error, 'cannot fetch content without ID' unless @id
|
62
|
+
|
63
|
+
response = @client.connection
|
64
|
+
.get("/#{OmniAI::OpenAI::Client::VERSION}/files/#{@id}/content")
|
65
|
+
|
66
|
+
raise HTTPError, response.flush unless response.status.ok?
|
67
|
+
|
68
|
+
response.body.each(&)
|
69
|
+
end
|
70
|
+
|
71
|
+
# @param id [String] required
|
72
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
73
|
+
# @return [OmniAI::OpenAI::Assistant]
|
74
|
+
def self.find(id:, client: Client.new)
|
75
|
+
response = client.connection
|
76
|
+
.accept(:json)
|
77
|
+
.get("/#{OmniAI::OpenAI::Client::VERSION}/files/#{id}")
|
78
|
+
|
79
|
+
raise HTTPError, response.flush unless response.status.ok?
|
80
|
+
|
81
|
+
parse(data: response.parse)
|
82
|
+
end
|
83
|
+
|
84
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
85
|
+
# @return [Array<OmniAI::OpenAI::File>]
|
86
|
+
def self.all(client: Client.new)
|
87
|
+
response = client.connection
|
88
|
+
.accept(:json)
|
89
|
+
.get("/#{OmniAI::OpenAI::Client::VERSION}/files")
|
90
|
+
|
91
|
+
raise HTTPError, response.flush unless response.status.ok?
|
92
|
+
|
93
|
+
response.parse['data'].map { |data| parse(data:, client:) }
|
94
|
+
end
|
95
|
+
|
96
|
+
# @param id [String] required
|
97
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
98
|
+
# @return [Hash]
|
99
|
+
def self.destroy!(id:, client: Client.new)
|
100
|
+
response = client.connection
|
101
|
+
.accept(:json)
|
102
|
+
.delete("/#{OmniAI::OpenAI::Client::VERSION}/files/#{id}")
|
103
|
+
|
104
|
+
raise HTTPError, response.flush unless response.status.ok?
|
105
|
+
|
106
|
+
response.parse
|
107
|
+
end
|
108
|
+
|
109
|
+
# @raise [HTTPError]
|
110
|
+
# @return [OmniAI::OpenAI::Assistant]
|
111
|
+
def save!
|
112
|
+
raise OmniAI::Error, 'cannot save a file without IO' unless @io
|
113
|
+
|
114
|
+
response = @client.connection
|
115
|
+
.accept(:json)
|
116
|
+
.post("/#{OmniAI::OpenAI::Client::VERSION}/files", form: payload)
|
117
|
+
raise HTTPError, response.flush unless response.status.ok?
|
118
|
+
|
119
|
+
parse(data: response.parse)
|
120
|
+
self
|
121
|
+
end
|
122
|
+
|
123
|
+
# @raise [OmniAI::Error]
|
124
|
+
# @return [OmniAI::OpenAI::Assistant]
|
125
|
+
def destroy!
|
126
|
+
raise OmniAI::Error, 'cannot destroy w/o ID' unless @id
|
127
|
+
|
128
|
+
data = self.class.destroy!(id: @id, client: @client)
|
129
|
+
@deleted = data['deleted']
|
130
|
+
self
|
131
|
+
end
|
132
|
+
|
133
|
+
private
|
134
|
+
|
135
|
+
# @return [Hash]
|
136
|
+
def payload
|
137
|
+
{
|
138
|
+
file: HTTP::FormData::File.new(@io),
|
139
|
+
purpose: @purpose,
|
140
|
+
}
|
141
|
+
end
|
142
|
+
|
143
|
+
class << self
|
144
|
+
private
|
145
|
+
|
146
|
+
# @param data [Hash] required
|
147
|
+
# @param client [OmniAI::OpenAI::Client] required
|
148
|
+
# @return [OmniAI::OpenAI::Assistant]
|
149
|
+
def parse(data:, client: Client.new)
|
150
|
+
new(
|
151
|
+
client:,
|
152
|
+
id: data['id'],
|
153
|
+
bytes: data['bytes'],
|
154
|
+
filename: data['filename'],
|
155
|
+
purpose: data['purpose']
|
156
|
+
)
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
# @param data [Hash] required
|
161
|
+
# @return [OmniAI::OpenAI::Assistant]
|
162
|
+
def parse(data:)
|
163
|
+
@id = data['id']
|
164
|
+
@bytes = data['bytes']
|
165
|
+
@filename = data['filename']
|
166
|
+
@purpose = data['purpose']
|
167
|
+
end
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
# An OpenAI scope for establishing files.
|
6
|
+
class Files
|
7
|
+
# @param client [OmniAI::OpenAI::Client] required
|
8
|
+
def initialize(client:)
|
9
|
+
@client = client
|
10
|
+
end
|
11
|
+
|
12
|
+
# @raise [OmniAI::Error]
|
13
|
+
#
|
14
|
+
# @param id [String] required
|
15
|
+
#
|
16
|
+
# @return [OmniAI::OpenAI::File]
|
17
|
+
def find(id:)
|
18
|
+
File.find(id:, client: @client)
|
19
|
+
end
|
20
|
+
|
21
|
+
# @raise [OmniAI::Error]
|
22
|
+
#
|
23
|
+
# @return [Array<OmniAI::OpenAI::File>]
|
24
|
+
def all
|
25
|
+
File.all(client: @client)
|
26
|
+
end
|
27
|
+
|
28
|
+
# @raise [OmniAI::Error]
|
29
|
+
#
|
30
|
+
# @param id [String] required
|
31
|
+
def destroy!(id:)
|
32
|
+
File.destroy!(id:, client: @client)
|
33
|
+
end
|
34
|
+
|
35
|
+
# @param io [IO] optional
|
36
|
+
# @param purpose [String] optional
|
37
|
+
#
|
38
|
+
# @return [OmniAI::OpenAI::File]
|
39
|
+
def build(io: nil, purpose: File::Purpose::ASSISTANTS)
|
40
|
+
File.new(io:, purpose:, client: @client)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,207 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
class Thread
|
6
|
+
# An OpenAI message within a thread.
|
7
|
+
class Message
|
8
|
+
# @!attribute [rw] id
|
9
|
+
# @return [String, nil]
|
10
|
+
attr_accessor :id
|
11
|
+
|
12
|
+
# @!attribute [rw] assistant_id
|
13
|
+
# @return [String, nil]
|
14
|
+
attr_accessor :assistant_id
|
15
|
+
|
16
|
+
# @!attribute [rw] thread_id
|
17
|
+
# @return [String, nil]
|
18
|
+
attr_accessor :thread_id
|
19
|
+
|
20
|
+
# @!attribute [rw] run_id
|
21
|
+
# @return [String, nil]
|
22
|
+
attr_accessor :run_id
|
23
|
+
|
24
|
+
# @!attribute [rw] role
|
25
|
+
# @return [String, nil]
|
26
|
+
attr_accessor :role
|
27
|
+
|
28
|
+
# @!attribute [rw] content
|
29
|
+
# @return [String, Array, nil]
|
30
|
+
attr_accessor :content
|
31
|
+
|
32
|
+
# @!attribute [rw] attachments
|
33
|
+
# @return [Array, nil]
|
34
|
+
attr_accessor :attachments
|
35
|
+
|
36
|
+
# @!attribute [rw] metadata
|
37
|
+
# @return [Array, nil]
|
38
|
+
attr_accessor :metadata
|
39
|
+
|
40
|
+
# @!attribute [rw] deleted
|
41
|
+
# @return [Boolean, nil]
|
42
|
+
attr_accessor :deleted
|
43
|
+
|
44
|
+
# @param id [String, nil] optional
|
45
|
+
# @param assistant_id [String, nil] optional
|
46
|
+
# @param thread_id [String, nil] optional
|
47
|
+
# @param run_id [String, nil] optional
|
48
|
+
# @param role [String, nil] optional
|
49
|
+
# @param content [String, Array, nil] optional
|
50
|
+
# @param attachments [Array, nil] optional
|
51
|
+
# @param metadata [Hash, nil] optional
|
52
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
53
|
+
def initialize(
|
54
|
+
id: nil,
|
55
|
+
assistant_id: nil,
|
56
|
+
thread_id: nil,
|
57
|
+
run_id: nil,
|
58
|
+
role: nil,
|
59
|
+
content: nil,
|
60
|
+
attachments: [],
|
61
|
+
metadata: {},
|
62
|
+
client: Client.new
|
63
|
+
)
|
64
|
+
@id = id
|
65
|
+
@assistant_id = assistant_id
|
66
|
+
@thread_id = thread_id
|
67
|
+
@run_id = run_id
|
68
|
+
@role = role
|
69
|
+
@content = content
|
70
|
+
@attachments = attachments
|
71
|
+
@metadata = metadata
|
72
|
+
@client = client
|
73
|
+
end
|
74
|
+
|
75
|
+
# @return [String]
|
76
|
+
def inspect
|
77
|
+
props = [
|
78
|
+
"id=#{@id.inspect}",
|
79
|
+
("assistant_id=#{@assistant_id.inspect}" if @assistant_id),
|
80
|
+
("thread_id=#{@thread_id.inspect}" if @thread_id),
|
81
|
+
("content=#{@content.inspect}" if @content),
|
82
|
+
].compact
|
83
|
+
|
84
|
+
"#<#{self.class.name} #{props.join(' ')}>"
|
85
|
+
end
|
86
|
+
|
87
|
+
# @param thread_id [String] required
|
88
|
+
# @param id [String] required
|
89
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
90
|
+
# @return [OmniAI::OpenAI::Thread::Message]
|
91
|
+
def self.find(thread_id:, id:, client: Client.new)
|
92
|
+
response = client.connection
|
93
|
+
.accept(:json)
|
94
|
+
.headers(HEADERS)
|
95
|
+
.get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages/#{id}")
|
96
|
+
|
97
|
+
raise HTTPError, response.flush unless response.status.ok?
|
98
|
+
|
99
|
+
parse(data: response.parse)
|
100
|
+
end
|
101
|
+
|
102
|
+
# @param thread_id [String] required
|
103
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
104
|
+
# @return [Array<OmniAI::OpenAI::Thread::Message>]
|
105
|
+
def self.all(thread_id:, limit: nil, client: Client.new)
|
106
|
+
response = client.connection
|
107
|
+
.accept(:json)
|
108
|
+
.headers(HEADERS)
|
109
|
+
.get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages", params: { limit: }.compact)
|
110
|
+
|
111
|
+
raise HTTPError, response.flush unless response.status.ok?
|
112
|
+
|
113
|
+
response.parse['data'].map { |data| parse(data:, client:) }
|
114
|
+
end
|
115
|
+
|
116
|
+
# @param thread_id [String] required
|
117
|
+
# @param id [String] required
|
118
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
119
|
+
# @return [Hash]
|
120
|
+
def self.destroy!(thread_id:, id:, client: Client.new)
|
121
|
+
response = client.connection
|
122
|
+
.accept(:json)
|
123
|
+
.headers(HEADERS)
|
124
|
+
.delete("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages/#{id}")
|
125
|
+
|
126
|
+
raise HTTPError, response.flush unless response.status.ok?
|
127
|
+
|
128
|
+
response.parse
|
129
|
+
end
|
130
|
+
|
131
|
+
# @raise [HTTPError]
|
132
|
+
# @return [OmniAI::OpenAI::Thread]
|
133
|
+
def save!
|
134
|
+
response = @client.connection
|
135
|
+
.accept(:json)
|
136
|
+
.headers(HEADERS)
|
137
|
+
.post(path, json: payload)
|
138
|
+
|
139
|
+
raise HTTPError, response.flush unless response.status.ok?
|
140
|
+
|
141
|
+
parse(data: response.parse)
|
142
|
+
self
|
143
|
+
end
|
144
|
+
|
145
|
+
# @raise [OmniAI::Error]
|
146
|
+
# @return [OmniAI::OpenAI::Thread]
|
147
|
+
def destroy!
|
148
|
+
raise OmniAI::Error, 'cannot destroy a non-persisted thread' unless @id
|
149
|
+
|
150
|
+
data = self.class.destroy!(thread_id: @thread_id, id: @id, client: @client)
|
151
|
+
@deleted = data['deleted']
|
152
|
+
self
|
153
|
+
end
|
154
|
+
|
155
|
+
private
|
156
|
+
|
157
|
+
class << self
|
158
|
+
private
|
159
|
+
|
160
|
+
# @param data [Hash] required
|
161
|
+
# @param client [OmniAI::OpenAI::Client] required
|
162
|
+
# @return [OmniAI::OpenAI::Thread]
|
163
|
+
def parse(data:, client: Client.new)
|
164
|
+
new(
|
165
|
+
client:,
|
166
|
+
id: data['id'],
|
167
|
+
assistant_id: data['assistant_id'],
|
168
|
+
thread_id: data['thread_id'],
|
169
|
+
run_id: data['run_id'],
|
170
|
+
role: data['role'],
|
171
|
+
content: data['content'],
|
172
|
+
attachments: data['attachments'],
|
173
|
+
metadata: data['metadata']
|
174
|
+
)
|
175
|
+
end
|
176
|
+
end
|
177
|
+
|
178
|
+
# @param data [Hash] required
|
179
|
+
def parse(data:)
|
180
|
+
@id = data['id']
|
181
|
+
@assistant_id = data['assistant_id']
|
182
|
+
@thread_id = data['thread_id']
|
183
|
+
@run_id = data['run_id']
|
184
|
+
@role = data['role']
|
185
|
+
@content = data['content']
|
186
|
+
@attachments = data['attachments']
|
187
|
+
@metadata = data['metadata']
|
188
|
+
end
|
189
|
+
|
190
|
+
# @return [Hash]
|
191
|
+
def payload
|
192
|
+
{
|
193
|
+
role: @role,
|
194
|
+
content: @content,
|
195
|
+
attachments: @attachments,
|
196
|
+
metadata: @metadata,
|
197
|
+
}.compact
|
198
|
+
end
|
199
|
+
|
200
|
+
# @return [String]
|
201
|
+
def path
|
202
|
+
"/#{OmniAI::OpenAI::Client::VERSION}/threads/#{@thread_id}/messages#{"/#{@id}" if @id}"
|
203
|
+
end
|
204
|
+
end
|
205
|
+
end
|
206
|
+
end
|
207
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
class Thread
|
6
|
+
# An OpenAI scope for establishing messages.
|
7
|
+
class Messages
|
8
|
+
# @param client [OmniAI::OpenAI::Client] required
|
9
|
+
# @param thread [OmniAI::OpenAI::Thread] required
|
10
|
+
def initialize(client:, thread:)
|
11
|
+
@client = client
|
12
|
+
@thread = thread
|
13
|
+
end
|
14
|
+
|
15
|
+
# @param limit [Integer] optional
|
16
|
+
# @return [Array<OmniAI::Thread::Message>]
|
17
|
+
def all(limit:)
|
18
|
+
Message.all(thread_id: @thread.id, limit:, client: @client)
|
19
|
+
end
|
20
|
+
|
21
|
+
# @param id [String] required
|
22
|
+
# @return [OmniAI::OpenAI::Thread::Message]
|
23
|
+
def find(id:)
|
24
|
+
Message.find(id:, thread_id: @thread.id, client: @client)
|
25
|
+
end
|
26
|
+
|
27
|
+
# @param id [String] required
|
28
|
+
# @return [Hash]
|
29
|
+
def destroy!(id:)
|
30
|
+
Message.destroy!(id:, thread_id: @thread.id, client: @client)
|
31
|
+
end
|
32
|
+
|
33
|
+
# @param role [String, nil] optional
|
34
|
+
# @param content [String, Array, nil] optional
|
35
|
+
# @param attachments [Array, nil] optional
|
36
|
+
# @param metadata [Hash, nil] optional
|
37
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
38
|
+
# @return [OmniAI::OpenAI::Thread::Message]
|
39
|
+
def build(role: nil, content: nil, attachments: [], metadata: {})
|
40
|
+
Message.new(role:, content:, attachments:, metadata:, thread_id: @thread.id, client: @client)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,213 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
class Thread
|
6
|
+
# An OpenAI run within a thread.
|
7
|
+
class Run
|
8
|
+
# @!attribute [rw] id
|
9
|
+
# @return [String, nil]
|
10
|
+
attr_accessor :id
|
11
|
+
|
12
|
+
# @!attribute [rw] assistant_id
|
13
|
+
# @return [String, nil]
|
14
|
+
attr_accessor :assistant_id
|
15
|
+
|
16
|
+
# @!attribute [rw] thread_id
|
17
|
+
# @return [String, nil]
|
18
|
+
attr_accessor :thread_id
|
19
|
+
|
20
|
+
# @!attribute [rw] status
|
21
|
+
# @return [String, nil]
|
22
|
+
attr_accessor :status
|
23
|
+
|
24
|
+
# @!attribute [rw] model
|
25
|
+
# @return [String, nil]
|
26
|
+
attr_accessor :model
|
27
|
+
|
28
|
+
# @!attribute [rw] temperature
|
29
|
+
# @return [Float, nil]
|
30
|
+
attr_accessor :temperature
|
31
|
+
|
32
|
+
# @!attribute [rw] instructions
|
33
|
+
# @return [String, nil]
|
34
|
+
attr_accessor :instructions
|
35
|
+
|
36
|
+
# @!attribute [rw] tools
|
37
|
+
# @return [Array<Hash>, nil]
|
38
|
+
attr_accessor :tools
|
39
|
+
|
40
|
+
# @!attribute [rw] metadata
|
41
|
+
# @return [Hash, nil]
|
42
|
+
attr_accessor :metadata
|
43
|
+
|
44
|
+
# @param id [String, nil] optional
|
45
|
+
# @param assistant_id [String, nil] optional
|
46
|
+
# @param thread_id [String, nil] optional
|
47
|
+
# @param status [String, nil] optional
|
48
|
+
# @param temperature [Decimal, nil] optional
|
49
|
+
# @param instructions [String, nil] optional
|
50
|
+
# @param metadata [Hash, nil] optional
|
51
|
+
# @param tools [Array<Hash>, nil] optional
|
52
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
53
|
+
def initialize(
|
54
|
+
id: nil,
|
55
|
+
assistant_id: nil,
|
56
|
+
thread_id: nil,
|
57
|
+
status: nil,
|
58
|
+
model: nil,
|
59
|
+
temperature: nil,
|
60
|
+
instructions: nil,
|
61
|
+
metadata: {},
|
62
|
+
tools: [],
|
63
|
+
client: Client.new
|
64
|
+
)
|
65
|
+
@id = id
|
66
|
+
@assistant_id = assistant_id
|
67
|
+
@thread_id = thread_id
|
68
|
+
@status = status
|
69
|
+
@model = model
|
70
|
+
@temperature = temperature
|
71
|
+
@instructions = instructions
|
72
|
+
@metadata = metadata
|
73
|
+
@tools = tools
|
74
|
+
@client = client
|
75
|
+
end
|
76
|
+
|
77
|
+
# @return [String]
|
78
|
+
def inspect
|
79
|
+
props = [
|
80
|
+
"id=#{@id.inspect}",
|
81
|
+
("assistant_id=#{@assistant_id.inspect}" if @assistant_id),
|
82
|
+
("thread_id=#{@thread_id.inspect}" if @thread_id),
|
83
|
+
("status=#{@status.inspect}" if @status),
|
84
|
+
].compact
|
85
|
+
"#<#{self.class.name} #{props.join(' ')}>"
|
86
|
+
end
|
87
|
+
|
88
|
+
# @param thread_id [String] required
|
89
|
+
# @param id [String] required
|
90
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
91
|
+
# @return [OmniAI::OpenAI::Thread::Run]
|
92
|
+
def self.find(thread_id:, id:, client: Client.new)
|
93
|
+
response = client.connection
|
94
|
+
.accept(:json)
|
95
|
+
.headers(HEADERS)
|
96
|
+
.get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs/#{id}")
|
97
|
+
|
98
|
+
raise HTTPError, response.flush unless response.status.ok?
|
99
|
+
|
100
|
+
parse(data: response.parse)
|
101
|
+
end
|
102
|
+
|
103
|
+
# @param thread_id [String] required
|
104
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
105
|
+
# @return [Array<OmniAI::OpenAI::Thread::Run>]
|
106
|
+
def self.all(thread_id:, limit: nil, client: Client.new)
|
107
|
+
response = client.connection
|
108
|
+
.accept(:json)
|
109
|
+
.headers(HEADERS)
|
110
|
+
.get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs", params: { limit: }.compact)
|
111
|
+
|
112
|
+
raise HTTPError, response.flush unless response.status.ok?
|
113
|
+
|
114
|
+
response.parse['data'].map { |data| parse(data:, client:) }
|
115
|
+
end
|
116
|
+
|
117
|
+
# @param thread_id [String] required
|
118
|
+
# @param id [String] required
|
119
|
+
# @param client [OmniAI::OpenAI::Client] optional
|
120
|
+
# @return [Hash]
|
121
|
+
def self.cancel!(thread_id:, id:, client: Client.new)
|
122
|
+
response = client.connection
|
123
|
+
.accept(:json)
|
124
|
+
.headers(HEADERS)
|
125
|
+
.post("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs/#{id}/cancel")
|
126
|
+
|
127
|
+
raise HTTPError, response.flush unless response.status.ok?
|
128
|
+
|
129
|
+
response.parse
|
130
|
+
end
|
131
|
+
|
132
|
+
# @raise [HTTPError]
|
133
|
+
# @return [OmniAI::OpenAI::Thread]
|
134
|
+
def save!
|
135
|
+
response = @client.connection
|
136
|
+
.accept(:json)
|
137
|
+
.headers(HEADERS)
|
138
|
+
.post(path, json: payload)
|
139
|
+
|
140
|
+
raise HTTPError, response.flush unless response.status.ok?
|
141
|
+
|
142
|
+
parse(data: response.parse)
|
143
|
+
self
|
144
|
+
end
|
145
|
+
|
146
|
+
# @raise [OmniAI::Error]
|
147
|
+
# @return [OmniAI::OpenAI::Thread]
|
148
|
+
def cancel!
|
149
|
+
raise OmniAI::Error, 'cannot cancel a non-persisted thread' unless @id
|
150
|
+
|
151
|
+
data = self.class.cancel!(thread_id: @thread_id, id: @id, client: @client)
|
152
|
+
@status = data['status']
|
153
|
+
self
|
154
|
+
end
|
155
|
+
|
156
|
+
private
|
157
|
+
|
158
|
+
class << self
|
159
|
+
private
|
160
|
+
|
161
|
+
# @param data [Hash] required
|
162
|
+
# @param client [OmniAI::OpenAI::Client] required
|
163
|
+
# @return [OmniAI::OpenAI::Thread]
|
164
|
+
def parse(data:, client: Client.new)
|
165
|
+
new(
|
166
|
+
client:,
|
167
|
+
id: data['id'],
|
168
|
+
assistant_id: data['assistant_id'],
|
169
|
+
thread_id: data['thread_id'],
|
170
|
+
status: data['status'],
|
171
|
+
model: data['model'],
|
172
|
+
temperature: data['temperature'],
|
173
|
+
instructions: data['instructions'],
|
174
|
+
tools: data['tools'],
|
175
|
+
metadata: data['metadata']
|
176
|
+
)
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
# @param data [Hash] required
|
181
|
+
def parse(data:)
|
182
|
+
@id = data['id']
|
183
|
+
@assistant_id = data['assistant_id']
|
184
|
+
@thread_id = data['thread_id']
|
185
|
+
@run_id = data['run_id']
|
186
|
+
@status = data['status']
|
187
|
+
@model = data['model']
|
188
|
+
@temperature = data['temperature']
|
189
|
+
@instructions = data['instructions']
|
190
|
+
@tools = data['tools']
|
191
|
+
@metadata = data['metadata']
|
192
|
+
end
|
193
|
+
|
194
|
+
# @return [Hash]
|
195
|
+
def payload
|
196
|
+
{
|
197
|
+
assistant_id: @assistant_id,
|
198
|
+
model: @model,
|
199
|
+
temperature: @temperature,
|
200
|
+
instructions: @instructions,
|
201
|
+
tools: @tools,
|
202
|
+
metadata: @metadata,
|
203
|
+
}.compact
|
204
|
+
end
|
205
|
+
|
206
|
+
# @return [String]
|
207
|
+
def path
|
208
|
+
"/#{OmniAI::OpenAI::Client::VERSION}/threads/#{@thread_id}/runs#{"/#{@id}" if @id}"
|
209
|
+
end
|
210
|
+
end
|
211
|
+
end
|
212
|
+
end
|
213
|
+
end
|