omniai-openai 1.3.0 → 1.3.2

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,207 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ class Thread
6
+ # An OpenAI message within a thread.
7
+ class Message
8
+ # @!attribute [rw] id
9
+ # @return [String, nil]
10
+ attr_accessor :id
11
+
12
+ # @!attribute [rw] assistant_id
13
+ # @return [String, nil]
14
+ attr_accessor :assistant_id
15
+
16
+ # @!attribute [rw] thread_id
17
+ # @return [String, nil]
18
+ attr_accessor :thread_id
19
+
20
+ # @!attribute [rw] run_id
21
+ # @return [String, nil]
22
+ attr_accessor :run_id
23
+
24
+ # @!attribute [rw] role
25
+ # @return [String, nil]
26
+ attr_accessor :role
27
+
28
+ # @!attribute [rw] content
29
+ # @return [String, Array, nil]
30
+ attr_accessor :content
31
+
32
+ # @!attribute [rw] attachments
33
+ # @return [Array, nil]
34
+ attr_accessor :attachments
35
+
36
+ # @!attribute [rw] metadata
37
+ # @return [Array, nil]
38
+ attr_accessor :metadata
39
+
40
+ # @!attribute [rw] deleted
41
+ # @return [Boolean, nil]
42
+ attr_accessor :deleted
43
+
44
+ # @param id [String, nil] optional
45
+ # @param assistant_id [String, nil] optional
46
+ # @param thread_id [String, nil] optional
47
+ # @param run_id [String, nil] optional
48
+ # @param role [String, nil] optional
49
+ # @param content [String, Array, nil] optional
50
+ # @param attachments [Array, nil] optional
51
+ # @param metadata [Hash, nil] optional
52
+ # @param client [OmniAI::OpenAI::Client] optional
53
+ def initialize(
54
+ id: nil,
55
+ assistant_id: nil,
56
+ thread_id: nil,
57
+ run_id: nil,
58
+ role: nil,
59
+ content: nil,
60
+ attachments: [],
61
+ metadata: {},
62
+ client: Client.new
63
+ )
64
+ @id = id
65
+ @assistant_id = assistant_id
66
+ @thread_id = thread_id
67
+ @run_id = run_id
68
+ @role = role
69
+ @content = content
70
+ @attachments = attachments
71
+ @metadata = metadata
72
+ @client = client
73
+ end
74
+
75
+ # @return [String]
76
+ def inspect
77
+ props = [
78
+ "id=#{@id.inspect}",
79
+ ("assistant_id=#{@assistant_id.inspect}" if @assistant_id),
80
+ ("thread_id=#{@thread_id.inspect}" if @thread_id),
81
+ ("content=#{@content.inspect}" if @content),
82
+ ].compact
83
+
84
+ "#<#{self.class.name} #{props.join(' ')}>"
85
+ end
86
+
87
+ # @param thread_id [String] required
88
+ # @param id [String] required
89
+ # @param client [OmniAI::OpenAI::Client] optional
90
+ # @return [OmniAI::OpenAI::Thread::Message]
91
+ def self.find(thread_id:, id:, client: Client.new)
92
+ response = client.connection
93
+ .accept(:json)
94
+ .headers(HEADERS)
95
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages/#{id}")
96
+
97
+ raise HTTPError, response.flush unless response.status.ok?
98
+
99
+ parse(data: response.parse)
100
+ end
101
+
102
+ # @param thread_id [String] required
103
+ # @param client [OmniAI::OpenAI::Client] optional
104
+ # @return [Array<OmniAI::OpenAI::Thread::Message>]
105
+ def self.all(thread_id:, limit: nil, client: Client.new)
106
+ response = client.connection
107
+ .accept(:json)
108
+ .headers(HEADERS)
109
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages", params: { limit: }.compact)
110
+
111
+ raise HTTPError, response.flush unless response.status.ok?
112
+
113
+ response.parse['data'].map { |data| parse(data:, client:) }
114
+ end
115
+
116
+ # @param thread_id [String] required
117
+ # @param id [String] required
118
+ # @param client [OmniAI::OpenAI::Client] optional
119
+ # @return [Hash]
120
+ def self.destroy!(thread_id:, id:, client: Client.new)
121
+ response = client.connection
122
+ .accept(:json)
123
+ .headers(HEADERS)
124
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages/#{id}")
125
+
126
+ raise HTTPError, response.flush unless response.status.ok?
127
+
128
+ response.parse
129
+ end
130
+
131
+ # @raise [HTTPError]
132
+ # @return [OmniAI::OpenAI::Thread]
133
+ def save!
134
+ response = @client.connection
135
+ .accept(:json)
136
+ .headers(HEADERS)
137
+ .post(path, json: payload)
138
+
139
+ raise HTTPError, response.flush unless response.status.ok?
140
+
141
+ parse(data: response.parse)
142
+ self
143
+ end
144
+
145
+ # @raise [OmniAI::Error]
146
+ # @return [OmniAI::OpenAI::Thread]
147
+ def destroy!
148
+ raise OmniAI::Error, 'cannot destroy a non-persisted thread' unless @id
149
+
150
+ data = self.class.destroy!(thread_id: @thread_id, id: @id, client: @client)
151
+ @deleted = data['deleted']
152
+ self
153
+ end
154
+
155
+ private
156
+
157
+ class << self
158
+ private
159
+
160
+ # @param data [Hash] required
161
+ # @param client [OmniAI::OpenAI::Client] required
162
+ # @return [OmniAI::OpenAI::Thread]
163
+ def parse(data:, client: Client.new)
164
+ new(
165
+ client:,
166
+ id: data['id'],
167
+ assistant_id: data['assistant_id'],
168
+ thread_id: data['thread_id'],
169
+ run_id: data['run_id'],
170
+ role: data['role'],
171
+ content: data['content'],
172
+ attachments: data['attachments'],
173
+ metadata: data['metadata']
174
+ )
175
+ end
176
+ end
177
+
178
+ # @param data [Hash] required
179
+ def parse(data:)
180
+ @id = data['id']
181
+ @assistant_id = data['assistant_id']
182
+ @thread_id = data['thread_id']
183
+ @run_id = data['run_id']
184
+ @role = data['role']
185
+ @content = data['content']
186
+ @attachments = data['attachments']
187
+ @metadata = data['metadata']
188
+ end
189
+
190
+ # @return [Hash]
191
+ def payload
192
+ {
193
+ role: @role,
194
+ content: @content,
195
+ attachments: @attachments,
196
+ metadata: @metadata,
197
+ }.compact
198
+ end
199
+
200
+ # @return [String]
201
+ def path
202
+ "/#{OmniAI::OpenAI::Client::VERSION}/threads/#{@thread_id}/messages#{"/#{@id}" if @id}"
203
+ end
204
+ end
205
+ end
206
+ end
207
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ class Thread
6
+ # An OpenAI scope for establishing messages.
7
+ class Messages
8
+ # @param client [OmniAI::OpenAI::Client] required
9
+ # @param thread [OmniAI::OpenAI::Thread] required
10
+ def initialize(client:, thread:)
11
+ @client = client
12
+ @thread = thread
13
+ end
14
+
15
+ # @param limit [Integer] optional
16
+ # @return [Array<OmniAI::Thread::Message>]
17
+ def all(limit: nil)
18
+ Message.all(thread_id: @thread.id, limit:, client: @client)
19
+ end
20
+
21
+ # @param id [String] required
22
+ # @return [OmniAI::OpenAI::Thread::Message]
23
+ def find(id:)
24
+ Message.find(id:, thread_id: @thread.id, client: @client)
25
+ end
26
+
27
+ # @param id [String] required
28
+ # @return [Hash]
29
+ def destroy!(id:)
30
+ Message.destroy!(id:, thread_id: @thread.id, client: @client)
31
+ end
32
+
33
+ # @param role [String, nil] optional
34
+ # @param content [String, Array, nil] optional
35
+ # @param attachments [Array, nil] optional
36
+ # @param metadata [Hash, nil] optional
37
+ # @param client [OmniAI::OpenAI::Client] optional
38
+ # @return [OmniAI::OpenAI::Thread::Message]
39
+ def build(role: nil, content: nil, attachments: [], metadata: {})
40
+ Message.new(role:, content:, attachments:, metadata:, thread_id: @thread.id, client: @client)
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,260 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ class Thread
6
+ # An OpenAI run within a thread.
7
+ class Run
8
+ module Status
9
+ CANCELLED = 'cancelled'
10
+ FAILED = 'failed'
11
+ COMPLETED = 'completed'
12
+ EXPIRED = 'expired'
13
+ end
14
+
15
+ TERMINATED_STATUSES = [
16
+ Status::CANCELLED,
17
+ Status::FAILED,
18
+ Status::COMPLETED,
19
+ Status::EXPIRED,
20
+ ].freeze
21
+
22
+ # @!attribute [rw] id
23
+ # @return [String, nil]
24
+ attr_accessor :id
25
+
26
+ # @!attribute [rw] assistant_id
27
+ # @return [String, nil]
28
+ attr_accessor :assistant_id
29
+
30
+ # @!attribute [rw] thread_id
31
+ # @return [String, nil]
32
+ attr_accessor :thread_id
33
+
34
+ # @!attribute [rw] status
35
+ # @return [String, nil]
36
+ attr_accessor :status
37
+
38
+ # @!attribute [rw] model
39
+ # @return [String, nil]
40
+ attr_accessor :model
41
+
42
+ # @!attribute [rw] temperature
43
+ # @return [Float, nil]
44
+ attr_accessor :temperature
45
+
46
+ # @!attribute [rw] instructions
47
+ # @return [String, nil]
48
+ attr_accessor :instructions
49
+
50
+ # @!attribute [rw] tools
51
+ # @return [Array<Hash>, nil]
52
+ attr_accessor :tools
53
+
54
+ # @!attribute [rw] metadata
55
+ # @return [Hash, nil]
56
+ attr_accessor :metadata
57
+
58
+ # @param id [String, nil] optional
59
+ # @param assistant_id [String, nil] optional
60
+ # @param thread_id [String, nil] optional
61
+ # @param status [String, nil] optional
62
+ # @param temperature [Decimal, nil] optional
63
+ # @param instructions [String, nil] optional
64
+ # @param metadata [Hash, nil] optional
65
+ # @param tools [Array<Hash>, nil] optional
66
+ # @param client [OmniAI::OpenAI::Client] optional
67
+ def initialize(
68
+ id: nil,
69
+ assistant_id: nil,
70
+ thread_id: nil,
71
+ status: nil,
72
+ model: nil,
73
+ temperature: nil,
74
+ instructions: nil,
75
+ metadata: {},
76
+ tools: [],
77
+ client: Client.new
78
+ )
79
+ @id = id
80
+ @assistant_id = assistant_id
81
+ @thread_id = thread_id
82
+ @status = status
83
+ @model = model
84
+ @temperature = temperature
85
+ @instructions = instructions
86
+ @metadata = metadata
87
+ @tools = tools
88
+ @client = client
89
+ end
90
+
91
+ # @return [String]
92
+ def inspect
93
+ props = [
94
+ "id=#{@id.inspect}",
95
+ ("assistant_id=#{@assistant_id.inspect}" if @assistant_id),
96
+ ("thread_id=#{@thread_id.inspect}" if @thread_id),
97
+ ("status=#{@status.inspect}" if @status),
98
+ ].compact
99
+ "#<#{self.class.name} #{props.join(' ')}>"
100
+ end
101
+
102
+ # @param thread_id [String] required
103
+ # @param id [String] required
104
+ # @param client [OmniAI::OpenAI::Client] optional
105
+ # @return [OmniAI::OpenAI::Thread::Run]
106
+ def self.find(thread_id:, id:, client: Client.new)
107
+ response = client.connection
108
+ .accept(:json)
109
+ .headers(HEADERS)
110
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs/#{id}")
111
+
112
+ raise HTTPError, response.flush unless response.status.ok?
113
+
114
+ parse(data: response.parse)
115
+ end
116
+
117
+ # @param thread_id [String] required
118
+ # @param client [OmniAI::OpenAI::Client] optional
119
+ # @return [Array<OmniAI::OpenAI::Thread::Run>]
120
+ def self.all(thread_id:, limit: nil, client: Client.new)
121
+ response = client.connection
122
+ .accept(:json)
123
+ .headers(HEADERS)
124
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs", params: { limit: }.compact)
125
+
126
+ raise HTTPError, response.flush unless response.status.ok?
127
+
128
+ response.parse['data'].map { |data| parse(data:, client:) }
129
+ end
130
+
131
+ # @param thread_id [String] required
132
+ # @param id [String] required
133
+ # @param client [OmniAI::OpenAI::Client] optional
134
+ # @return [Hash]
135
+ def self.cancel!(thread_id:, id:, client: Client.new)
136
+ response = client.connection
137
+ .accept(:json)
138
+ .headers(HEADERS)
139
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs/#{id}/cancel")
140
+
141
+ raise HTTPError, response.flush unless response.status.ok?
142
+
143
+ response.parse
144
+ end
145
+
146
+ # @raise [HTTPError]
147
+ # @return [OmniAI::OpenAI::Thread]
148
+ def save!
149
+ response = @client.connection
150
+ .accept(:json)
151
+ .headers(HEADERS)
152
+ .post(path, json: payload)
153
+
154
+ raise HTTPError, response.flush unless response.status.ok?
155
+
156
+ parse(data: response.parse)
157
+ self
158
+ end
159
+
160
+ # @raise [HTTPError]
161
+ # @return [OmniAI::OpenAI::Thread]
162
+ def reload!
163
+ raise Error, 'unable to fetch! without an ID' unless @id
164
+
165
+ response = @client.connection
166
+ .accept(:json)
167
+ .headers(HEADERS)
168
+ .get(path)
169
+
170
+ raise HTTPError, response.flush unless response.status.ok?
171
+
172
+ parse(data: response.parse)
173
+ self
174
+ end
175
+
176
+ # @raise [OmniAI::Error]
177
+ # @return [OmniAI::OpenAI::Thread]
178
+ def cancel!
179
+ raise OmniAI::Error, 'cannot cancel a non-persisted thread' unless @id
180
+
181
+ data = self.class.cancel!(thread_id: @thread_id, id: @id, client: @client)
182
+ @status = data['status']
183
+ self
184
+ end
185
+
186
+ # @param interval [Integer, Float, nil] optional (seconds)
187
+ #
188
+ # @return [OmniAI::OpenAI::Thread::Run]
189
+ def poll!(delay: 2)
190
+ loop do
191
+ reload!
192
+ break if terminated?
193
+
194
+ sleep(delay) if delay
195
+ end
196
+ end
197
+
198
+ # @return [Boolean]
199
+ def terminated?
200
+ TERMINATED_STATUSES.include?(@status)
201
+ end
202
+
203
+ private
204
+
205
+ class << self
206
+ private
207
+
208
+ # @param data [Hash] required
209
+ # @param client [OmniAI::OpenAI::Client] required
210
+ # @return [OmniAI::OpenAI::Thread]
211
+ def parse(data:, client: Client.new)
212
+ new(
213
+ client:,
214
+ id: data['id'],
215
+ assistant_id: data['assistant_id'],
216
+ thread_id: data['thread_id'],
217
+ status: data['status'],
218
+ model: data['model'],
219
+ temperature: data['temperature'],
220
+ instructions: data['instructions'],
221
+ tools: data['tools'],
222
+ metadata: data['metadata']
223
+ )
224
+ end
225
+ end
226
+
227
+ # @param data [Hash] required
228
+ def parse(data:)
229
+ @id = data['id']
230
+ @assistant_id = data['assistant_id']
231
+ @thread_id = data['thread_id']
232
+ @run_id = data['run_id']
233
+ @status = data['status']
234
+ @model = data['model']
235
+ @temperature = data['temperature']
236
+ @instructions = data['instructions']
237
+ @tools = data['tools']
238
+ @metadata = data['metadata']
239
+ end
240
+
241
+ # @return [Hash]
242
+ def payload
243
+ {
244
+ assistant_id: @assistant_id,
245
+ model: @model,
246
+ temperature: @temperature,
247
+ instructions: @instructions,
248
+ tools: @tools,
249
+ metadata: @metadata,
250
+ }.compact
251
+ end
252
+
253
+ # @return [String]
254
+ def path
255
+ "/#{OmniAI::OpenAI::Client::VERSION}/threads/#{@thread_id}/runs#{"/#{@id}" if @id}"
256
+ end
257
+ end
258
+ end
259
+ end
260
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ class Thread
6
+ # An OpenAI scope for establishing messages.
7
+ class Runs
8
+ # @param client [OmniAI::OpenAI::Client] required
9
+ # @param thread [OmniAI::OpenAI::Thread] required
10
+ def initialize(client:, thread:)
11
+ @client = client
12
+ @thread = thread
13
+ end
14
+
15
+ # @param limit [Integer] optional
16
+ # @return [Array<OmniAI::Thread::Message>]
17
+ def all(limit: nil)
18
+ Run.all(thread_id: @thread.id, limit:, client: @client)
19
+ end
20
+
21
+ # @param id [String] required
22
+ # @return [OmniAI::OpenAI::Thread::Message]
23
+ def find(id:)
24
+ Run.find(id:, thread_id: @thread.id, client: @client)
25
+ end
26
+
27
+ # @param id [String] required
28
+ # @return [Hash]
29
+ def cancel!(id:)
30
+ Run.cancel!(id:, thread_id: @thread.id, client: @client)
31
+ end
32
+
33
+ # @param assistant_id [String] required
34
+ # @param model [String] optional
35
+ # @param temperature [Float] optional
36
+ # @param instructions [String] optional
37
+ # @param tools [Array<Hash>, nil] optional
38
+ # @param metadata [Hash, nil] optional
39
+ # @param client [OmniAI::OpenAI::Client] optional
40
+ # @return [OmniAI::OpenAI::Thread::Message]
41
+ def build(assistant_id:, model: nil, temperature: nil, instructions: nil, tools: nil, metadata: {})
42
+ Run.new(
43
+ assistant_id:,
44
+ thread_id: @thread.id,
45
+ model:,
46
+ temperature:,
47
+ instructions:,
48
+ tools:,
49
+ metadata:,
50
+ client: @client
51
+ )
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,141 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI threads implementation.
6
+ class Thread
7
+ HEADERS = { 'OpenAI-Beta': 'assistants=v2' }.freeze
8
+
9
+ # @!attribute [rw] id
10
+ # @return [String, nil]
11
+ attr_accessor :id
12
+
13
+ # @!attribute [rw] metadata
14
+ # @return [Hash]
15
+ attr_accessor :metadata
16
+
17
+ # @!attribute [rw] tool_resources
18
+ # @return [Hash]
19
+ attr_accessor :tool_resources
20
+
21
+ # @!attribute [rw] deleted
22
+ # @return [Boolean, nil]
23
+ attr_accessor :deleted
24
+
25
+ # @param client [OmniAI::OpenAI::Client] optional
26
+ # @param id [String]
27
+ # @param metadata [String]
28
+ def initialize(
29
+ client: Client.new,
30
+ id: nil,
31
+ metadata: {},
32
+ tool_resources: {}
33
+ )
34
+ @client = client
35
+ @id = id
36
+ @metadata = metadata
37
+ @tool_resources = tool_resources
38
+ end
39
+
40
+ # @return [String]
41
+ def inspect
42
+ "#<#{self.class.name} id=#{@id.inspect}>"
43
+ end
44
+
45
+ # @param id [String] required
46
+ # @param client [OmniAI::OpenAI::Client] optional
47
+ # @return [OmniAI::OpenAI::Thread]
48
+ def self.find(id:, client: Client.new)
49
+ response = client.connection
50
+ .accept(:json)
51
+ .headers(HEADERS)
52
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{id}")
53
+
54
+ raise HTTPError, response.flush unless response.status.ok?
55
+
56
+ parse(data: response.parse)
57
+ end
58
+
59
+ # @param id [String] required
60
+ # @param client [OmniAI::OpenAI::Client] optional
61
+ # @return [Hash]
62
+ def self.destroy!(id:, client: Client.new)
63
+ response = client.connection
64
+ .accept(:json)
65
+ .headers(HEADERS)
66
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{id}")
67
+
68
+ raise HTTPError, response.flush unless response.status.ok?
69
+
70
+ response.parse
71
+ end
72
+
73
+ # @raise [HTTPError]
74
+ # @return [OmniAI::OpenAI::Thread]
75
+ def save!
76
+ response = @client.connection
77
+ .accept(:json)
78
+ .headers(HEADERS)
79
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/threads#{"/#{@id}" if @id}", json: payload)
80
+ raise HTTPError, response.flush unless response.status.ok?
81
+
82
+ parse(data: response.parse)
83
+ self
84
+ end
85
+
86
+ # @raise [OmniAI::Error]
87
+ # @return [OmniAI::OpenAI::Thread]
88
+ def destroy!
89
+ raise OmniAI::Error, 'cannot destroy a non-persisted thread' unless @id
90
+
91
+ data = self.class.destroy!(id: @id, client: @client)
92
+ @deleted = data['deleted']
93
+ self
94
+ end
95
+
96
+ # @return [OmniAI::OpenAI::Thread::Messages]
97
+ def messages
98
+ Messages.new(client: @client, thread: self)
99
+ end
100
+
101
+ # @return [OmniAI::OpenAI::Thread::Runs]
102
+ def runs
103
+ Runs.new(client: @client, thread: self)
104
+ end
105
+
106
+ private
107
+
108
+ class << self
109
+ private
110
+
111
+ # @param data [Hash] required
112
+ # @param client [OmniAI::OpenAI::Client] required
113
+ # @return [OmniAI::OpenAI::Thread]
114
+ def parse(data:, client: Client.new)
115
+ new(
116
+ client:,
117
+ id: data['id'],
118
+ metadata: data['metadata'],
119
+ tool_resources: data['tool_resources']
120
+ )
121
+ end
122
+ end
123
+
124
+ # @param data [Hash] required
125
+ # @return [OmniAI::OpenAI::Thread]
126
+ def parse(data:)
127
+ @id = data['id']
128
+ @metadata = data['metadata']
129
+ @tool_resources = data['tool_resources']
130
+ end
131
+
132
+ # @return [Hash]
133
+ def payload
134
+ {
135
+ metadata: @metadata,
136
+ tool_resources: @tool_resources,
137
+ }.compact
138
+ end
139
+ end
140
+ end
141
+ end