omniai-openai 1.3.0 → 1.3.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,207 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ class Thread
6
+ # An OpenAI message within a thread.
7
+ class Message
8
+ # @!attribute [rw] id
9
+ # @return [String, nil]
10
+ attr_accessor :id
11
+
12
+ # @!attribute [rw] assistant_id
13
+ # @return [String, nil]
14
+ attr_accessor :assistant_id
15
+
16
+ # @!attribute [rw] thread_id
17
+ # @return [String, nil]
18
+ attr_accessor :thread_id
19
+
20
+ # @!attribute [rw] run_id
21
+ # @return [String, nil]
22
+ attr_accessor :run_id
23
+
24
+ # @!attribute [rw] role
25
+ # @return [String, nil]
26
+ attr_accessor :role
27
+
28
+ # @!attribute [rw] content
29
+ # @return [String, Array, nil]
30
+ attr_accessor :content
31
+
32
+ # @!attribute [rw] attachments
33
+ # @return [Array, nil]
34
+ attr_accessor :attachments
35
+
36
+ # @!attribute [rw] metadata
37
+ # @return [Array, nil]
38
+ attr_accessor :metadata
39
+
40
+ # @!attribute [rw] deleted
41
+ # @return [Boolean, nil]
42
+ attr_accessor :deleted
43
+
44
+ # @param id [String, nil] optional
45
+ # @param assistant_id [String, nil] optional
46
+ # @param thread_id [String, nil] optional
47
+ # @param run_id [String, nil] optional
48
+ # @param role [String, nil] optional
49
+ # @param content [String, Array, nil] optional
50
+ # @param attachments [Array, nil] optional
51
+ # @param metadata [Hash, nil] optional
52
+ # @param client [OmniAI::OpenAI::Client] optional
53
+ def initialize(
54
+ id: nil,
55
+ assistant_id: nil,
56
+ thread_id: nil,
57
+ run_id: nil,
58
+ role: nil,
59
+ content: nil,
60
+ attachments: [],
61
+ metadata: {},
62
+ client: Client.new
63
+ )
64
+ @id = id
65
+ @assistant_id = assistant_id
66
+ @thread_id = thread_id
67
+ @run_id = run_id
68
+ @role = role
69
+ @content = content
70
+ @attachments = attachments
71
+ @metadata = metadata
72
+ @client = client
73
+ end
74
+
75
+ # @return [String]
76
+ def inspect
77
+ props = [
78
+ "id=#{@id.inspect}",
79
+ ("assistant_id=#{@assistant_id.inspect}" if @assistant_id),
80
+ ("thread_id=#{@thread_id.inspect}" if @thread_id),
81
+ ("content=#{@content.inspect}" if @content),
82
+ ].compact
83
+
84
+ "#<#{self.class.name} #{props.join(' ')}>"
85
+ end
86
+
87
+ # @param thread_id [String] required
88
+ # @param id [String] required
89
+ # @param client [OmniAI::OpenAI::Client] optional
90
+ # @return [OmniAI::OpenAI::Thread::Message]
91
+ def self.find(thread_id:, id:, client: Client.new)
92
+ response = client.connection
93
+ .accept(:json)
94
+ .headers(HEADERS)
95
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages/#{id}")
96
+
97
+ raise HTTPError, response.flush unless response.status.ok?
98
+
99
+ parse(data: response.parse)
100
+ end
101
+
102
+ # @param thread_id [String] required
103
+ # @param client [OmniAI::OpenAI::Client] optional
104
+ # @return [Array<OmniAI::OpenAI::Thread::Message>]
105
+ def self.all(thread_id:, limit: nil, client: Client.new)
106
+ response = client.connection
107
+ .accept(:json)
108
+ .headers(HEADERS)
109
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages", params: { limit: }.compact)
110
+
111
+ raise HTTPError, response.flush unless response.status.ok?
112
+
113
+ response.parse['data'].map { |data| parse(data:, client:) }
114
+ end
115
+
116
+ # @param thread_id [String] required
117
+ # @param id [String] required
118
+ # @param client [OmniAI::OpenAI::Client] optional
119
+ # @return [Hash]
120
+ def self.destroy!(thread_id:, id:, client: Client.new)
121
+ response = client.connection
122
+ .accept(:json)
123
+ .headers(HEADERS)
124
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/messages/#{id}")
125
+
126
+ raise HTTPError, response.flush unless response.status.ok?
127
+
128
+ response.parse
129
+ end
130
+
131
+ # @raise [HTTPError]
132
+ # @return [OmniAI::OpenAI::Thread]
133
+ def save!
134
+ response = @client.connection
135
+ .accept(:json)
136
+ .headers(HEADERS)
137
+ .post(path, json: payload)
138
+
139
+ raise HTTPError, response.flush unless response.status.ok?
140
+
141
+ parse(data: response.parse)
142
+ self
143
+ end
144
+
145
+ # @raise [OmniAI::Error]
146
+ # @return [OmniAI::OpenAI::Thread]
147
+ def destroy!
148
+ raise OmniAI::Error, 'cannot destroy a non-persisted thread' unless @id
149
+
150
+ data = self.class.destroy!(thread_id: @thread_id, id: @id, client: @client)
151
+ @deleted = data['deleted']
152
+ self
153
+ end
154
+
155
+ private
156
+
157
+ class << self
158
+ private
159
+
160
+ # @param data [Hash] required
161
+ # @param client [OmniAI::OpenAI::Client] required
162
+ # @return [OmniAI::OpenAI::Thread]
163
+ def parse(data:, client: Client.new)
164
+ new(
165
+ client:,
166
+ id: data['id'],
167
+ assistant_id: data['assistant_id'],
168
+ thread_id: data['thread_id'],
169
+ run_id: data['run_id'],
170
+ role: data['role'],
171
+ content: data['content'],
172
+ attachments: data['attachments'],
173
+ metadata: data['metadata']
174
+ )
175
+ end
176
+ end
177
+
178
+ # @param data [Hash] required
179
+ def parse(data:)
180
+ @id = data['id']
181
+ @assistant_id = data['assistant_id']
182
+ @thread_id = data['thread_id']
183
+ @run_id = data['run_id']
184
+ @role = data['role']
185
+ @content = data['content']
186
+ @attachments = data['attachments']
187
+ @metadata = data['metadata']
188
+ end
189
+
190
+ # @return [Hash]
191
+ def payload
192
+ {
193
+ role: @role,
194
+ content: @content,
195
+ attachments: @attachments,
196
+ metadata: @metadata,
197
+ }.compact
198
+ end
199
+
200
+ # @return [String]
201
+ def path
202
+ "/#{OmniAI::OpenAI::Client::VERSION}/threads/#{@thread_id}/messages#{"/#{@id}" if @id}"
203
+ end
204
+ end
205
+ end
206
+ end
207
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ class Thread
6
+ # An OpenAI scope for establishing messages.
7
+ class Messages
8
+ # @param client [OmniAI::OpenAI::Client] required
9
+ # @param thread [OmniAI::OpenAI::Thread] required
10
+ def initialize(client:, thread:)
11
+ @client = client
12
+ @thread = thread
13
+ end
14
+
15
+ # @param limit [Integer] optional
16
+ # @return [Array<OmniAI::Thread::Message>]
17
+ def all(limit:)
18
+ Message.all(thread_id: @thread.id, limit:, client: @client)
19
+ end
20
+
21
+ # @param id [String] required
22
+ # @return [OmniAI::OpenAI::Thread::Message]
23
+ def find(id:)
24
+ Message.find(id:, thread_id: @thread.id, client: @client)
25
+ end
26
+
27
+ # @param id [String] required
28
+ # @return [Hash]
29
+ def destroy!(id:)
30
+ Message.destroy!(id:, thread_id: @thread.id, client: @client)
31
+ end
32
+
33
+ # @param role [String, nil] optional
34
+ # @param content [String, Array, nil] optional
35
+ # @param attachments [Array, nil] optional
36
+ # @param metadata [Hash, nil] optional
37
+ # @param client [OmniAI::OpenAI::Client] optional
38
+ # @return [OmniAI::OpenAI::Thread::Message]
39
+ def build(role: nil, content: nil, attachments: [], metadata: {})
40
+ Message.new(role:, content:, attachments:, metadata:, thread_id: @thread.id, client: @client)
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,213 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ class Thread
6
+ # An OpenAI run within a thread.
7
+ class Run
8
+ # @!attribute [rw] id
9
+ # @return [String, nil]
10
+ attr_accessor :id
11
+
12
+ # @!attribute [rw] assistant_id
13
+ # @return [String, nil]
14
+ attr_accessor :assistant_id
15
+
16
+ # @!attribute [rw] thread_id
17
+ # @return [String, nil]
18
+ attr_accessor :thread_id
19
+
20
+ # @!attribute [rw] status
21
+ # @return [String, nil]
22
+ attr_accessor :status
23
+
24
+ # @!attribute [rw] model
25
+ # @return [String, nil]
26
+ attr_accessor :model
27
+
28
+ # @!attribute [rw] temperature
29
+ # @return [Float, nil]
30
+ attr_accessor :temperature
31
+
32
+ # @!attribute [rw] instructions
33
+ # @return [String, nil]
34
+ attr_accessor :instructions
35
+
36
+ # @!attribute [rw] tools
37
+ # @return [Array<Hash>, nil]
38
+ attr_accessor :tools
39
+
40
+ # @!attribute [rw] metadata
41
+ # @return [Hash, nil]
42
+ attr_accessor :metadata
43
+
44
+ # @param id [String, nil] optional
45
+ # @param assistant_id [String, nil] optional
46
+ # @param thread_id [String, nil] optional
47
+ # @param status [String, nil] optional
48
+ # @param temperature [Decimal, nil] optional
49
+ # @param instructions [String, nil] optional
50
+ # @param metadata [Hash, nil] optional
51
+ # @param tools [Array<Hash>, nil] optional
52
+ # @param client [OmniAI::OpenAI::Client] optional
53
+ def initialize(
54
+ id: nil,
55
+ assistant_id: nil,
56
+ thread_id: nil,
57
+ status: nil,
58
+ model: nil,
59
+ temperature: nil,
60
+ instructions: nil,
61
+ metadata: {},
62
+ tools: [],
63
+ client: Client.new
64
+ )
65
+ @id = id
66
+ @assistant_id = assistant_id
67
+ @thread_id = thread_id
68
+ @status = status
69
+ @model = model
70
+ @temperature = temperature
71
+ @instructions = instructions
72
+ @metadata = metadata
73
+ @tools = tools
74
+ @client = client
75
+ end
76
+
77
+ # @return [String]
78
+ def inspect
79
+ props = [
80
+ "id=#{@id.inspect}",
81
+ ("assistant_id=#{@assistant_id.inspect}" if @assistant_id),
82
+ ("thread_id=#{@thread_id.inspect}" if @thread_id),
83
+ ("status=#{@status.inspect}" if @status),
84
+ ].compact
85
+ "#<#{self.class.name} #{props.join(' ')}>"
86
+ end
87
+
88
+ # @param thread_id [String] required
89
+ # @param id [String] required
90
+ # @param client [OmniAI::OpenAI::Client] optional
91
+ # @return [OmniAI::OpenAI::Thread::Run]
92
+ def self.find(thread_id:, id:, client: Client.new)
93
+ response = client.connection
94
+ .accept(:json)
95
+ .headers(HEADERS)
96
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs/#{id}")
97
+
98
+ raise HTTPError, response.flush unless response.status.ok?
99
+
100
+ parse(data: response.parse)
101
+ end
102
+
103
+ # @param thread_id [String] required
104
+ # @param client [OmniAI::OpenAI::Client] optional
105
+ # @return [Array<OmniAI::OpenAI::Thread::Run>]
106
+ def self.all(thread_id:, limit: nil, client: Client.new)
107
+ response = client.connection
108
+ .accept(:json)
109
+ .headers(HEADERS)
110
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs", params: { limit: }.compact)
111
+
112
+ raise HTTPError, response.flush unless response.status.ok?
113
+
114
+ response.parse['data'].map { |data| parse(data:, client:) }
115
+ end
116
+
117
+ # @param thread_id [String] required
118
+ # @param id [String] required
119
+ # @param client [OmniAI::OpenAI::Client] optional
120
+ # @return [Hash]
121
+ def self.cancel!(thread_id:, id:, client: Client.new)
122
+ response = client.connection
123
+ .accept(:json)
124
+ .headers(HEADERS)
125
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{thread_id}/runs/#{id}/cancel")
126
+
127
+ raise HTTPError, response.flush unless response.status.ok?
128
+
129
+ response.parse
130
+ end
131
+
132
+ # @raise [HTTPError]
133
+ # @return [OmniAI::OpenAI::Thread]
134
+ def save!
135
+ response = @client.connection
136
+ .accept(:json)
137
+ .headers(HEADERS)
138
+ .post(path, json: payload)
139
+
140
+ raise HTTPError, response.flush unless response.status.ok?
141
+
142
+ parse(data: response.parse)
143
+ self
144
+ end
145
+
146
+ # @raise [OmniAI::Error]
147
+ # @return [OmniAI::OpenAI::Thread]
148
+ def cancel!
149
+ raise OmniAI::Error, 'cannot cancel a non-persisted thread' unless @id
150
+
151
+ data = self.class.cancel!(thread_id: @thread_id, id: @id, client: @client)
152
+ @status = data['status']
153
+ self
154
+ end
155
+
156
+ private
157
+
158
+ class << self
159
+ private
160
+
161
+ # @param data [Hash] required
162
+ # @param client [OmniAI::OpenAI::Client] required
163
+ # @return [OmniAI::OpenAI::Thread]
164
+ def parse(data:, client: Client.new)
165
+ new(
166
+ client:,
167
+ id: data['id'],
168
+ assistant_id: data['assistant_id'],
169
+ thread_id: data['thread_id'],
170
+ status: data['status'],
171
+ model: data['model'],
172
+ temperature: data['temperature'],
173
+ instructions: data['instructions'],
174
+ tools: data['tools'],
175
+ metadata: data['metadata']
176
+ )
177
+ end
178
+ end
179
+
180
+ # @param data [Hash] required
181
+ def parse(data:)
182
+ @id = data['id']
183
+ @assistant_id = data['assistant_id']
184
+ @thread_id = data['thread_id']
185
+ @run_id = data['run_id']
186
+ @status = data['status']
187
+ @model = data['model']
188
+ @temperature = data['temperature']
189
+ @instructions = data['instructions']
190
+ @tools = data['tools']
191
+ @metadata = data['metadata']
192
+ end
193
+
194
+ # @return [Hash]
195
+ def payload
196
+ {
197
+ assistant_id: @assistant_id,
198
+ model: @model,
199
+ temperature: @temperature,
200
+ instructions: @instructions,
201
+ tools: @tools,
202
+ metadata: @metadata,
203
+ }.compact
204
+ end
205
+
206
+ # @return [String]
207
+ def path
208
+ "/#{OmniAI::OpenAI::Client::VERSION}/threads/#{@thread_id}/runs#{"/#{@id}" if @id}"
209
+ end
210
+ end
211
+ end
212
+ end
213
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ class Thread
6
+ # An OpenAI scope for establishing messages.
7
+ class Runs
8
+ # @param client [OmniAI::OpenAI::Client] required
9
+ # @param thread [OmniAI::OpenAI::Thread] required
10
+ def initialize(client:, thread:)
11
+ @client = client
12
+ @thread = thread
13
+ end
14
+
15
+ # @param limit [Integer] optional
16
+ # @return [Array<OmniAI::Thread::Message>]
17
+ def all(limit:)
18
+ Run.all(thread_id: @thread.id, limit:, client: @client)
19
+ end
20
+
21
+ # @param id [String] required
22
+ # @return [OmniAI::OpenAI::Thread::Message]
23
+ def find(id:)
24
+ Run.find(id:, thread_id: @thread.id, client: @client)
25
+ end
26
+
27
+ # @param id [String] required
28
+ # @return [Hash]
29
+ def cancel!(id:)
30
+ Run.cancel!(id:, thread_id: @thread.id, client: @client)
31
+ end
32
+
33
+ # @param assistant_id [String] required
34
+ # @param model [String] optional
35
+ # @param temperature [Float] optional
36
+ # @param instructions [String] optional
37
+ # @param tools [Array<Hash>, nil] optional
38
+ # @param metadata [Hash, nil] optional
39
+ # @param client [OmniAI::OpenAI::Client] optional
40
+ # @return [OmniAI::OpenAI::Thread::Message]
41
+ def build(assistant_id:, model: nil, temperature: nil, instructions: nil, tools: nil, metadata: {})
42
+ Run.new(
43
+ assistant_id:,
44
+ thread_id: @thread.id,
45
+ model:,
46
+ temperature:,
47
+ instructions:,
48
+ tools:,
49
+ metadata:,
50
+ client: @client
51
+ )
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,141 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI threads implementation.
6
+ class Thread
7
+ HEADERS = { 'OpenAI-Beta': 'assistants=v2' }.freeze
8
+
9
+ # @!attribute [rw] id
10
+ # @return [String, nil]
11
+ attr_accessor :id
12
+
13
+ # @!attribute [rw] metadata
14
+ # @return [Hash]
15
+ attr_accessor :metadata
16
+
17
+ # @!attribute [rw] tool_resources
18
+ # @return [Hash]
19
+ attr_accessor :tool_resources
20
+
21
+ # @!attribute [rw] deleted
22
+ # @return [Boolean, nil]
23
+ attr_accessor :deleted
24
+
25
+ # @param client [OmniAI::OpenAI::Client] optional
26
+ # @param id [String]
27
+ # @param metadata [String]
28
+ def initialize(
29
+ client: Client.new,
30
+ id: nil,
31
+ metadata: {},
32
+ tool_resources: {}
33
+ )
34
+ @client = client
35
+ @id = id
36
+ @metadata = metadata
37
+ @tool_resources = tool_resources
38
+ end
39
+
40
+ # @return [String]
41
+ def inspect
42
+ "#<#{self.class.name} id=#{@id.inspect}>"
43
+ end
44
+
45
+ # @param id [String] required
46
+ # @param client [OmniAI::OpenAI::Client] optional
47
+ # @return [OmniAI::OpenAI::Thread]
48
+ def self.find(id:, client: Client.new)
49
+ response = client.connection
50
+ .accept(:json)
51
+ .headers(HEADERS)
52
+ .get("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{id}")
53
+
54
+ raise HTTPError, response.flush unless response.status.ok?
55
+
56
+ parse(data: response.parse)
57
+ end
58
+
59
+ # @param id [String] required
60
+ # @param client [OmniAI::OpenAI::Client] optional
61
+ # @return [Hash]
62
+ def self.destroy!(id:, client: Client.new)
63
+ response = client.connection
64
+ .accept(:json)
65
+ .headers(HEADERS)
66
+ .delete("/#{OmniAI::OpenAI::Client::VERSION}/threads/#{id}")
67
+
68
+ raise HTTPError, response.flush unless response.status.ok?
69
+
70
+ response.parse
71
+ end
72
+
73
+ # @raise [HTTPError]
74
+ # @return [OmniAI::OpenAI::Thread]
75
+ def save!
76
+ response = @client.connection
77
+ .accept(:json)
78
+ .headers(HEADERS)
79
+ .post("/#{OmniAI::OpenAI::Client::VERSION}/threads#{"/#{@id}" if @id}", json: payload)
80
+ raise HTTPError, response.flush unless response.status.ok?
81
+
82
+ parse(data: response.parse)
83
+ self
84
+ end
85
+
86
+ # @raise [OmniAI::Error]
87
+ # @return [OmniAI::OpenAI::Thread]
88
+ def destroy!
89
+ raise OmniAI::Error, 'cannot destroy a non-persisted thread' unless @id
90
+
91
+ data = self.class.destroy!(id: @id, client: @client)
92
+ @deleted = data['deleted']
93
+ self
94
+ end
95
+
96
+ # @return [OmniAI::OpenAI::Thread::Messages]
97
+ def messages
98
+ Messages.new(client: @client, thread: self)
99
+ end
100
+
101
+ # @return [OmniAI::OpenAI::Thread::Runs]
102
+ def runs
103
+ Runs.new(client: @client, thread: self)
104
+ end
105
+
106
+ private
107
+
108
+ class << self
109
+ private
110
+
111
+ # @param data [Hash] required
112
+ # @param client [OmniAI::OpenAI::Client] required
113
+ # @return [OmniAI::OpenAI::Thread]
114
+ def parse(data:, client: Client.new)
115
+ new(
116
+ client:,
117
+ id: data['id'],
118
+ metadata: data['metadata'],
119
+ tool_resources: data['tool_resources']
120
+ )
121
+ end
122
+ end
123
+
124
+ # @param data [Hash] required
125
+ # @return [OmniAI::OpenAI::Thread]
126
+ def parse(data:)
127
+ @id = data['id']
128
+ @metadata = data['metadata']
129
+ @tool_resources = data['tool_resources']
130
+ end
131
+
132
+ # @return [Hash]
133
+ def payload
134
+ {
135
+ metadata: @metadata,
136
+ tool_resources: @tool_resources,
137
+ }.compact
138
+ end
139
+ end
140
+ end
141
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An OpenAI scope for establishing threads.
6
+ class Threads
7
+ # @param client [OmniAI::OpenAI::Client] required
8
+ def initialize(client:)
9
+ @client = client
10
+ end
11
+
12
+ # @param id [String] required
13
+ def find(id:)
14
+ Thread.find(id:, client: @client)
15
+ end
16
+
17
+ # @param id [String] required
18
+ def destroy!(id:)
19
+ Thread.destroy!(id:, client: @client)
20
+ end
21
+
22
+ # @param metadata [Hash] optional
23
+ # @param tool_resources [Hash] optional
24
+ def build(metadata: {}, tool_resources: {})
25
+ Thread.new(metadata:, tool_resources:, client: @client)
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module OpenAI
5
+ # An set of tools.
6
+ module Tool
7
+ FILE_SEARCH = { type: 'file_search' }.freeze
8
+ CODE_INTERPRETER = { type: 'code_interpreter' }.freeze
9
+ end
10
+ end
11
+ end