intelli_agent 0.2.1 → 0.2.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +57 -41
  3. metadata +16 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: be996d76730695ba38477d0e748affd640eb03bf361892128305e668e32e1a43
4
- data.tar.gz: d860d1779e054c68214bf69fe640f2420486e514dded5f0b2b9c328da09a0e32
3
+ metadata.gz: 8b165ba2e8f4c70fc7b6719f1761e89a44c7646d9018ad55142d0b9ff39c167a
4
+ data.tar.gz: d618dd1fe8e8cdcacab135f2a59a59aa823875e9e30ec39d4dce3649b962a5af
5
5
  SHA512:
6
- metadata.gz: 46b6fe360ca8265f5d495b2853f4ea8e3fe9c5834e6e7e2cd072de2196b77e49483d8e67a74c334f526b12ed26d36bb66720241946c9de088b931ef7d5f4eacc
7
- data.tar.gz: d61707f97959304260975fc8b9b17f0f1cf355345b2f4db5715db2675645fcb19bec6e521fdf4d5ec54e4f7f2d6f7dd7bcfc636cae885f8b67151df7070aa3a7
6
+ metadata.gz: c72e622cbba6b3302096f4f32522173b29e0bf7c8921fc75fd6bfb8f231b2bd804c2421f128a605381b7b94d7c43c241b2cfd20b32b86281673623af5fc5dae9
7
+ data.tar.gz: 3e74939dc68fa5bc007c3a785b327fcffbda1e71efb0fdaaae6fe82b594b5e15a6756a7418764d7ab4e690b0f218d8f8417ae01c1ade8738291495ba7a2234bb
@@ -8,6 +8,10 @@ module IntelliAgent::OpenAI
8
8
  dig('choices', 0, 'message', 'content')
9
9
  end
10
10
 
11
+ def message
12
+ dig('choices', 0, 'message')
13
+ end
14
+
11
15
  def content?
12
16
  !content.nil?
13
17
  end
@@ -19,11 +23,31 @@ module IntelliAgent::OpenAI
19
23
  def tool_calls?
20
24
  !tool_calls.nil?
21
25
  end
26
+
27
+ def functions
28
+ return if tool_calls.nil?
29
+
30
+ functions = tool_calls.filter { |tool| tool['type'].eql? 'function' }
31
+ return if functions.empty?
32
+
33
+ functions_list = []
34
+ functions.map.with_index do |function, function_index|
35
+ function_def = tool_calls.dig(function_index, 'function')
36
+ functions_list << { id: function['id'], name: function_def['name'], arguments: Oj.load(function_def['arguments'], symbol_keys: true) }
37
+ end
38
+
39
+ functions_list
40
+ end
41
+
42
+ def functions?
43
+ !functions.nil?
44
+ end
22
45
  end
23
46
 
24
47
  def self.embed(input, model: 'text-embedding-3-large')
25
48
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
26
- response.dig('data', 0, 'embedding')
49
+ def response.embedding = dig('data', 0, 'embedding')
50
+ response
27
51
  end
28
52
 
29
53
  def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
@@ -36,41 +60,22 @@ module IntelliAgent::OpenAI
36
60
 
37
61
  response = OpenAI::Client.new.chat(parameters:)
38
62
 
39
- response.dig('choices', 0, 'message', 'content').strip
40
- end
63
+ def response.content = dig('choices', 0, 'message', 'content').strip
41
64
 
42
- def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil)
43
- model = select_model(model)
44
-
45
- parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
46
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
47
- parameters[:tools] = tools if tools
48
-
49
- response = OpenAI::Client.new.chat(parameters:)
50
- response.extend(ResponseExtender)
51
65
  response
52
- end
53
-
54
- def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
55
- model = select_model(model)
56
- parameters = { model:,
57
- messages: [
58
- { role: 'system', content: system },
59
- { role: 'user', content: user }
60
- ], max_tokens: }
66
+ end
61
67
 
62
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
63
- parameters[:tools] = tools if tools
68
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
69
+ chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, tools:, function_run_context:)
70
+ end
64
71
 
65
- response = OpenAI::Client.new.chat(parameters:)
66
- response.extend(ResponseExtender)
67
- response
72
+ def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
73
+ chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, tools:, function_run_context:)
68
74
  end
69
75
 
70
- def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
76
+ def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
71
77
  model = select_model(model)
72
-
73
- messages = determine_message_format(messages).eql?(:short_format) ? convert_message_to_standard_format(messages) : messages
78
+ messages = parse_messages(messages)
74
79
 
75
80
  parameters = { model:, messages:, max_tokens: }
76
81
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
@@ -78,6 +83,23 @@ module IntelliAgent::OpenAI
78
83
 
79
84
  response = OpenAI::Client.new.chat(parameters:)
80
85
  response.extend(ResponseExtender)
86
+
87
+ if response.functions?
88
+ parameters[:messages] << response.message
89
+
90
+ response.functions.each do |function|
91
+ parameters[:messages] << {
92
+ tool_call_id: function[:id],
93
+ role: :tool,
94
+ name: function[:name],
95
+ content: parameters[:function_run_context].send(function[:name], **function[:arguments])
96
+ }
97
+ end
98
+
99
+ response = OpenAI::Client.new.chat(parameters:)
100
+ response.extend(ResponseExtender)
101
+ end
102
+
81
103
  response
82
104
  end
83
105
 
@@ -94,21 +116,15 @@ module IntelliAgent::OpenAI
94
116
  end
95
117
  end
96
118
 
97
- def self.determine_message_format(messages)
119
+ def self.parse_messages(messages)
98
120
  case messages
99
121
  in [{ role: String, content: String }, *]
100
- :standard_format
101
- in [{ system: String }, { user: String }, *]
102
- :short_format
122
+ messages
103
123
  else
104
- :unknown_format
124
+ messages.map do |msg|
125
+ role, content = msg.first
126
+ { role: role.to_s, content: content }
127
+ end
105
128
  end
106
129
  end
107
-
108
- def self.convert_message_to_standard_format(messages)
109
- messages.map do |msg|
110
- role, content = msg.first
111
- { role: role.to_s, content: content }
112
- end
113
- end
114
130
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.1
4
+ version: 0.2.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-09-21 00:00:00.000000000 Z
11
+ date: 2027-09-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -38,6 +38,20 @@ dependencies:
38
38
  - - "~>"
39
39
  - !ruby/object:Gem::Version
40
40
  version: '0.3'
41
+ - !ruby/object:Gem::Dependency
42
+ name: oj
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '3'
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '3'
41
55
  description: A helper layer over Anthropic and OpenAI API
42
56
  email: gedean.dias@gmail.com
43
57
  executables: []