intelli_agent 0.2.1 → 0.2.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/intelli_agent/openai.bkp.rb +145 -0
- data/lib/intelli_agent/openai.rb +52 -39
- metadata +16 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a70f9043e7c97455596b720e803c7ae02274c8bf8e63eff090cf813327e256d7
|
4
|
+
data.tar.gz: b0e4e27b5ea461dbf87e8b7fa551b85de2258f3ffcb56c8fb98ba32d74d6a3a7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8792b7a3c0fddbbf7969f9d45a3f1a171613ad4f6f5331f88c0e43aa0adbd2c58ea4c7ad563dcd6eff6e5d00f4a4fda33cfaf8568efaacaff7fe242970c8464b
|
7
|
+
data.tar.gz: 7e60f9ddb4c80bfdd642330f1e066e641023f206f72302d9cc07cc27c4e70e4d353030d24600831dae1f97ec7edfd913ba6e37c5e53360c122492d658a4228e0
|
@@ -0,0 +1,145 @@
|
|
1
|
+
module IntelliAgent::OpenAI
|
2
|
+
BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
|
3
|
+
ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
|
4
|
+
MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
|
5
|
+
|
6
|
+
module ResponseExtender
|
7
|
+
def content
|
8
|
+
dig('choices', 0, 'message', 'content')
|
9
|
+
end
|
10
|
+
|
11
|
+
def message
|
12
|
+
dig('choices', 0, 'message')
|
13
|
+
end
|
14
|
+
|
15
|
+
def content?
|
16
|
+
!content.nil?
|
17
|
+
end
|
18
|
+
|
19
|
+
def tool_calls
|
20
|
+
dig('choices', 0, 'message', 'tool_calls')
|
21
|
+
end
|
22
|
+
|
23
|
+
def tool_calls?
|
24
|
+
!tool_calls.nil?
|
25
|
+
end
|
26
|
+
|
27
|
+
def functions
|
28
|
+
return if tool_calls.nil?
|
29
|
+
|
30
|
+
functions = tool_calls.filter { |tool| tool['type'].eql? 'function' }
|
31
|
+
return if functions.empty?
|
32
|
+
|
33
|
+
functions_list = []
|
34
|
+
functions.map.with_index do |function, function_index|
|
35
|
+
function_def = tool_calls.dig(function_index, 'function')
|
36
|
+
functions_list << { id: function['id'], name: function_def['name'], arguments: Oj.load(function_def['arguments'], symbol_keys: true) }
|
37
|
+
end
|
38
|
+
|
39
|
+
functions_list
|
40
|
+
end
|
41
|
+
|
42
|
+
def functions?
|
43
|
+
!functions.nil?
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
def self.embed(input, model: 'text-embedding-3-large')
|
48
|
+
response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
|
49
|
+
response.dig('data', 0, 'embedding')
|
50
|
+
end
|
51
|
+
|
52
|
+
def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
|
53
|
+
model = select_model(model)
|
54
|
+
messages = [{ type: :text, text: prompt },
|
55
|
+
{ type: :image_url, image_url: { url: image_url } }]
|
56
|
+
|
57
|
+
parameters = { model: model, messages: [{ role: :user, content: messages }], max_tokens: }
|
58
|
+
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
59
|
+
|
60
|
+
response = OpenAI::Client.new.chat(parameters:)
|
61
|
+
|
62
|
+
response.dig('choices', 0, 'message', 'content').strip
|
63
|
+
end
|
64
|
+
|
65
|
+
|
66
|
+
|
67
|
+
def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
|
68
|
+
model = select_model(model)
|
69
|
+
|
70
|
+
parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
|
71
|
+
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
72
|
+
parameters[:tools] = tools if tools
|
73
|
+
banana(parameters:)
|
74
|
+
end
|
75
|
+
|
76
|
+
def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
|
77
|
+
model = select_model(model)
|
78
|
+
parameters = { model:,
|
79
|
+
messages: [
|
80
|
+
{ role: 'system', content: system },
|
81
|
+
{ role: 'user', content: user }
|
82
|
+
], max_tokens: }
|
83
|
+
|
84
|
+
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
85
|
+
parameters[:tools] = tools if tools
|
86
|
+
|
87
|
+
banana(parameters:)
|
88
|
+
end
|
89
|
+
|
90
|
+
def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
|
91
|
+
model = select_model(model)
|
92
|
+
|
93
|
+
messages = parse_messages(messages)
|
94
|
+
|
95
|
+
parameters = { model:, messages:, max_tokens: }
|
96
|
+
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
97
|
+
parameters[:tools] = tools if tools
|
98
|
+
|
99
|
+
response = OpenAI::Client.new.chat(parameters:)
|
100
|
+
response.extend(ResponseExtender)
|
101
|
+
|
102
|
+
if response.functions?
|
103
|
+
parameters[:messages] << response.message
|
104
|
+
|
105
|
+
response.functions.each do |function|
|
106
|
+
parameters[:messages] << {
|
107
|
+
tool_call_id: function[:id],
|
108
|
+
role: :tool,
|
109
|
+
name: function[:name],
|
110
|
+
content: parameters[:function_run_context].send(function[:name], **function[:arguments])
|
111
|
+
}
|
112
|
+
end
|
113
|
+
|
114
|
+
response = OpenAI::Client.new.chat(parameters:)
|
115
|
+
response.extend(ResponseExtender)
|
116
|
+
end
|
117
|
+
|
118
|
+
response
|
119
|
+
end
|
120
|
+
|
121
|
+
def self.models = OpenAI::Client.new.models.list
|
122
|
+
|
123
|
+
def self.select_model(model)
|
124
|
+
case model
|
125
|
+
when :basic
|
126
|
+
BASIC_MODEL
|
127
|
+
when :advanced
|
128
|
+
ADVANCED_MODEL
|
129
|
+
else
|
130
|
+
model
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
def self.parse_messages(messages)
|
135
|
+
case messages
|
136
|
+
in [{ role: String, content: String }, *]
|
137
|
+
messages
|
138
|
+
else
|
139
|
+
messages.map do |msg|
|
140
|
+
role, content = msg.first
|
141
|
+
{ role: role.to_s, content: content }
|
142
|
+
end
|
143
|
+
end
|
144
|
+
end
|
145
|
+
end
|
data/lib/intelli_agent/openai.rb
CHANGED
@@ -8,6 +8,10 @@ module IntelliAgent::OpenAI
|
|
8
8
|
dig('choices', 0, 'message', 'content')
|
9
9
|
end
|
10
10
|
|
11
|
+
def message
|
12
|
+
dig('choices', 0, 'message')
|
13
|
+
end
|
14
|
+
|
11
15
|
def content?
|
12
16
|
!content.nil?
|
13
17
|
end
|
@@ -19,6 +23,25 @@ module IntelliAgent::OpenAI
|
|
19
23
|
def tool_calls?
|
20
24
|
!tool_calls.nil?
|
21
25
|
end
|
26
|
+
|
27
|
+
def functions
|
28
|
+
return if tool_calls.nil?
|
29
|
+
|
30
|
+
functions = tool_calls.filter { |tool| tool['type'].eql? 'function' }
|
31
|
+
return if functions.empty?
|
32
|
+
|
33
|
+
functions_list = []
|
34
|
+
functions.map.with_index do |function, function_index|
|
35
|
+
function_def = tool_calls.dig(function_index, 'function')
|
36
|
+
functions_list << { id: function['id'], name: function_def['name'], arguments: Oj.load(function_def['arguments'], symbol_keys: true) }
|
37
|
+
end
|
38
|
+
|
39
|
+
functions_list
|
40
|
+
end
|
41
|
+
|
42
|
+
def functions?
|
43
|
+
!functions.nil?
|
44
|
+
end
|
22
45
|
end
|
23
46
|
|
24
47
|
def self.embed(input, model: 'text-embedding-3-large')
|
@@ -39,38 +62,17 @@ module IntelliAgent::OpenAI
|
|
39
62
|
response.dig('choices', 0, 'message', 'content').strip
|
40
63
|
end
|
41
64
|
|
42
|
-
def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil)
|
43
|
-
model
|
44
|
-
|
45
|
-
parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
|
46
|
-
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
47
|
-
parameters[:tools] = tools if tools
|
48
|
-
|
49
|
-
response = OpenAI::Client.new.chat(parameters:)
|
50
|
-
response.extend(ResponseExtender)
|
51
|
-
response
|
65
|
+
def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
|
66
|
+
chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, tools:, function_run_context:)
|
52
67
|
end
|
53
68
|
|
54
|
-
def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
|
55
|
-
model
|
56
|
-
parameters = { model:,
|
57
|
-
messages: [
|
58
|
-
{ role: 'system', content: system },
|
59
|
-
{ role: 'user', content: user }
|
60
|
-
], max_tokens: }
|
61
|
-
|
62
|
-
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
63
|
-
parameters[:tools] = tools if tools
|
64
|
-
|
65
|
-
response = OpenAI::Client.new.chat(parameters:)
|
66
|
-
response.extend(ResponseExtender)
|
67
|
-
response
|
69
|
+
def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
|
70
|
+
chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, tools:, function_run_context:)
|
68
71
|
end
|
69
72
|
|
70
|
-
def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
|
73
|
+
def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
|
71
74
|
model = select_model(model)
|
72
|
-
|
73
|
-
messages = determine_message_format(messages).eql?(:short_format) ? convert_message_to_standard_format(messages) : messages
|
75
|
+
messages = parse_messages(messages)
|
74
76
|
|
75
77
|
parameters = { model:, messages:, max_tokens: }
|
76
78
|
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
@@ -78,6 +80,23 @@ module IntelliAgent::OpenAI
|
|
78
80
|
|
79
81
|
response = OpenAI::Client.new.chat(parameters:)
|
80
82
|
response.extend(ResponseExtender)
|
83
|
+
|
84
|
+
if response.functions?
|
85
|
+
parameters[:messages] << response.message
|
86
|
+
|
87
|
+
response.functions.each do |function|
|
88
|
+
parameters[:messages] << {
|
89
|
+
tool_call_id: function[:id],
|
90
|
+
role: :tool,
|
91
|
+
name: function[:name],
|
92
|
+
content: parameters[:function_run_context].send(function[:name], **function[:arguments])
|
93
|
+
}
|
94
|
+
end
|
95
|
+
|
96
|
+
response = OpenAI::Client.new.chat(parameters:)
|
97
|
+
response.extend(ResponseExtender)
|
98
|
+
end
|
99
|
+
|
81
100
|
response
|
82
101
|
end
|
83
102
|
|
@@ -94,21 +113,15 @@ module IntelliAgent::OpenAI
|
|
94
113
|
end
|
95
114
|
end
|
96
115
|
|
97
|
-
def self.
|
116
|
+
def self.parse_messages(messages)
|
98
117
|
case messages
|
99
118
|
in [{ role: String, content: String }, *]
|
100
|
-
|
101
|
-
in [{ system: String }, { user: String }, *]
|
102
|
-
:short_format
|
119
|
+
messages
|
103
120
|
else
|
104
|
-
|
121
|
+
messages.map do |msg|
|
122
|
+
role, content = msg.first
|
123
|
+
{ role: role.to_s, content: content }
|
124
|
+
end
|
105
125
|
end
|
106
126
|
end
|
107
|
-
|
108
|
-
def self.convert_message_to_standard_format(messages)
|
109
|
-
messages.map do |msg|
|
110
|
-
role, content = msg.first
|
111
|
-
{ role: role.to_s, content: content }
|
112
|
-
end
|
113
|
-
end
|
114
127
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: intelli_agent
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Gedean Dias
|
@@ -38,6 +38,20 @@ dependencies:
|
|
38
38
|
- - "~>"
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0.3'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: oj
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '3'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '3'
|
41
55
|
description: A helper layer over Anthropic and OpenAI API
|
42
56
|
email: gedean.dias@gmail.com
|
43
57
|
executables: []
|
@@ -49,6 +63,7 @@ files:
|
|
49
63
|
- lib/intelli_agent/agent.rb.disabled
|
50
64
|
- lib/intelli_agent/agent_test.rb_
|
51
65
|
- lib/intelli_agent/anthropic.rb
|
66
|
+
- lib/intelli_agent/openai.bkp.rb
|
52
67
|
- lib/intelli_agent/openai.rb
|
53
68
|
homepage: https://github.com/gedean/intelli_agent
|
54
69
|
licenses:
|