intelli_agent 0.2.0 → 0.2.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4a8a694f4bb6ba84140e24bdf44962629eb63ff75b8a9ff8bc1ab5ef416db5cb
4
- data.tar.gz: a3ef08dc21d823b19d597f8b1355328f55d6f767ddb21a99fe3bbac5cf9256b3
3
+ metadata.gz: a70f9043e7c97455596b720e803c7ae02274c8bf8e63eff090cf813327e256d7
4
+ data.tar.gz: b0e4e27b5ea461dbf87e8b7fa551b85de2258f3ffcb56c8fb98ba32d74d6a3a7
5
5
  SHA512:
6
- metadata.gz: 7602007cab2f23363d6ed87c9f22d1d262325e2080eb8dc5f2f7e0b8d599d98f7e8666348e3e477660acbf01a2f63bfc98199cd8f4eb5afb143165ef79058b74
7
- data.tar.gz: 49bbef1d5c157d9b4f51aede33b6bc83858a53b4c10281132b0dee3b06117459a144ab3c386496ac23a7c0078fb6f6079d9f0e0906898af1cb2f365d99ffe7af
6
+ metadata.gz: 8792b7a3c0fddbbf7969f9d45a3f1a171613ad4f6f5331f88c0e43aa0adbd2c58ea4c7ad563dcd6eff6e5d00f4a4fda33cfaf8568efaacaff7fe242970c8464b
7
+ data.tar.gz: 7e60f9ddb4c80bfdd642330f1e066e641023f206f72302d9cc07cc27c4e70e4d353030d24600831dae1f97ec7edfd913ba6e37c5e53360c122492d658a4228e0
@@ -0,0 +1,145 @@
1
+ module IntelliAgent::OpenAI
2
+ BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
3
+ ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
4
+ MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
5
+
6
+ module ResponseExtender
7
+ def content
8
+ dig('choices', 0, 'message', 'content')
9
+ end
10
+
11
+ def message
12
+ dig('choices', 0, 'message')
13
+ end
14
+
15
+ def content?
16
+ !content.nil?
17
+ end
18
+
19
+ def tool_calls
20
+ dig('choices', 0, 'message', 'tool_calls')
21
+ end
22
+
23
+ def tool_calls?
24
+ !tool_calls.nil?
25
+ end
26
+
27
+ def functions
28
+ return if tool_calls.nil?
29
+
30
+ functions = tool_calls.filter { |tool| tool['type'].eql? 'function' }
31
+ return if functions.empty?
32
+
33
+ functions_list = []
34
+ functions.map.with_index do |function, function_index|
35
+ function_def = tool_calls.dig(function_index, 'function')
36
+ functions_list << { id: function['id'], name: function_def['name'], arguments: Oj.load(function_def['arguments'], symbol_keys: true) }
37
+ end
38
+
39
+ functions_list
40
+ end
41
+
42
+ def functions?
43
+ !functions.nil?
44
+ end
45
+ end
46
+
47
+ def self.embed(input, model: 'text-embedding-3-large')
48
+ response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
49
+ response.dig('data', 0, 'embedding')
50
+ end
51
+
52
+ def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
53
+ model = select_model(model)
54
+ messages = [{ type: :text, text: prompt },
55
+ { type: :image_url, image_url: { url: image_url } }]
56
+
57
+ parameters = { model: model, messages: [{ role: :user, content: messages }], max_tokens: }
58
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
59
+
60
+ response = OpenAI::Client.new.chat(parameters:)
61
+
62
+ response.dig('choices', 0, 'message', 'content').strip
63
+ end
64
+
65
+
66
+
67
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
68
+ model = select_model(model)
69
+
70
+ parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
71
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
72
+ parameters[:tools] = tools if tools
73
+ banana(parameters:)
74
+ end
75
+
76
+ def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
77
+ model = select_model(model)
78
+ parameters = { model:,
79
+ messages: [
80
+ { role: 'system', content: system },
81
+ { role: 'user', content: user }
82
+ ], max_tokens: }
83
+
84
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
85
+ parameters[:tools] = tools if tools
86
+
87
+ banana(parameters:)
88
+ end
89
+
90
+ def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
91
+ model = select_model(model)
92
+
93
+ messages = parse_messages(messages)
94
+
95
+ parameters = { model:, messages:, max_tokens: }
96
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
97
+ parameters[:tools] = tools if tools
98
+
99
+ response = OpenAI::Client.new.chat(parameters:)
100
+ response.extend(ResponseExtender)
101
+
102
+ if response.functions?
103
+ parameters[:messages] << response.message
104
+
105
+ response.functions.each do |function|
106
+ parameters[:messages] << {
107
+ tool_call_id: function[:id],
108
+ role: :tool,
109
+ name: function[:name],
110
+ content: parameters[:function_run_context].send(function[:name], **function[:arguments])
111
+ }
112
+ end
113
+
114
+ response = OpenAI::Client.new.chat(parameters:)
115
+ response.extend(ResponseExtender)
116
+ end
117
+
118
+ response
119
+ end
120
+
121
+ def self.models = OpenAI::Client.new.models.list
122
+
123
+ def self.select_model(model)
124
+ case model
125
+ when :basic
126
+ BASIC_MODEL
127
+ when :advanced
128
+ ADVANCED_MODEL
129
+ else
130
+ model
131
+ end
132
+ end
133
+
134
+ def self.parse_messages(messages)
135
+ case messages
136
+ in [{ role: String, content: String }, *]
137
+ messages
138
+ else
139
+ messages.map do |msg|
140
+ role, content = msg.first
141
+ { role: role.to_s, content: content }
142
+ end
143
+ end
144
+ end
145
+ end
@@ -8,9 +8,40 @@ module IntelliAgent::OpenAI
8
8
  dig('choices', 0, 'message', 'content')
9
9
  end
10
10
 
11
+ def message
12
+ dig('choices', 0, 'message')
13
+ end
14
+
15
+ def content?
16
+ !content.nil?
17
+ end
18
+
11
19
  def tool_calls
12
20
  dig('choices', 0, 'message', 'tool_calls')
13
21
  end
22
+
23
+ def tool_calls?
24
+ !tool_calls.nil?
25
+ end
26
+
27
+ def functions
28
+ return if tool_calls.nil?
29
+
30
+ functions = tool_calls.filter { |tool| tool['type'].eql? 'function' }
31
+ return if functions.empty?
32
+
33
+ functions_list = []
34
+ functions.map.with_index do |function, function_index|
35
+ function_def = tool_calls.dig(function_index, 'function')
36
+ functions_list << { id: function['id'], name: function_def['name'], arguments: Oj.load(function_def['arguments'], symbol_keys: true) }
37
+ end
38
+
39
+ functions_list
40
+ end
41
+
42
+ def functions?
43
+ !functions.nil?
44
+ end
14
45
  end
15
46
 
16
47
  def self.embed(input, model: 'text-embedding-3-large')
@@ -31,38 +62,17 @@ module IntelliAgent::OpenAI
31
62
  response.dig('choices', 0, 'message', 'content').strip
32
63
  end
33
64
 
34
- def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil)
35
- model = select_model(model)
36
-
37
- parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
38
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
39
- parameters[:tools] = tools if tools
40
-
41
- response = OpenAI::Client.new.chat(parameters:)
42
- response.extend(ResponseExtender)
43
- response
65
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
66
+ chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, tools:, function_run_context:)
44
67
  end
45
68
 
46
- def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
47
- model = select_model(model)
48
- parameters = { model:,
49
- messages: [
50
- { role: 'system', content: system },
51
- { role: 'user', content: user }
52
- ], max_tokens: }
53
-
54
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
55
- parameters[:tools] = tools if tools
56
-
57
- response = OpenAI::Client.new.chat(parameters:)
58
- response.extend(ResponseExtender)
59
- response
69
+ def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
70
+ chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, tools:, function_run_context:)
60
71
  end
61
72
 
62
- def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
73
+ def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
63
74
  model = select_model(model)
64
-
65
- messages = determine_message_format(messages).eql?(:short_format) ? convert_message_to_standard_format(messages) : messages
75
+ messages = parse_messages(messages)
66
76
 
67
77
  parameters = { model:, messages:, max_tokens: }
68
78
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
@@ -70,6 +80,23 @@ module IntelliAgent::OpenAI
70
80
 
71
81
  response = OpenAI::Client.new.chat(parameters:)
72
82
  response.extend(ResponseExtender)
83
+
84
+ if response.functions?
85
+ parameters[:messages] << response.message
86
+
87
+ response.functions.each do |function|
88
+ parameters[:messages] << {
89
+ tool_call_id: function[:id],
90
+ role: :tool,
91
+ name: function[:name],
92
+ content: parameters[:function_run_context].send(function[:name], **function[:arguments])
93
+ }
94
+ end
95
+
96
+ response = OpenAI::Client.new.chat(parameters:)
97
+ response.extend(ResponseExtender)
98
+ end
99
+
73
100
  response
74
101
  end
75
102
 
@@ -86,21 +113,15 @@ module IntelliAgent::OpenAI
86
113
  end
87
114
  end
88
115
 
89
- def self.determine_message_format(messages)
116
+ def self.parse_messages(messages)
90
117
  case messages
91
118
  in [{ role: String, content: String }, *]
92
- :standard_format
93
- in [{ system: String }, { user: String }, *]
94
- :short_format
119
+ messages
95
120
  else
96
- :unknown_format
121
+ messages.map do |msg|
122
+ role, content = msg.first
123
+ { role: role.to_s, content: content }
124
+ end
97
125
  end
98
126
  end
99
-
100
- def self.convert_message_to_standard_format(messages)
101
- messages.map do |msg|
102
- role, content = msg.first
103
- { role: role.to_s, content: content }
104
- end
105
- end
106
127
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.2.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
@@ -38,6 +38,20 @@ dependencies:
38
38
  - - "~>"
39
39
  - !ruby/object:Gem::Version
40
40
  version: '0.3'
41
+ - !ruby/object:Gem::Dependency
42
+ name: oj
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '3'
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '3'
41
55
  description: A helper layer over Anthropic and OpenAI API
42
56
  email: gedean.dias@gmail.com
43
57
  executables: []
@@ -49,6 +63,7 @@ files:
49
63
  - lib/intelli_agent/agent.rb.disabled
50
64
  - lib/intelli_agent/agent_test.rb_
51
65
  - lib/intelli_agent/anthropic.rb
66
+ - lib/intelli_agent/openai.bkp.rb
52
67
  - lib/intelli_agent/openai.rb
53
68
  homepage: https://github.com/gedean/intelli_agent
54
69
  licenses: