intelli_agent 0.2.7 → 0.2.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/intelli_agent/openai.rb +33 -23
- metadata +4 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 586990e6d1d8fa1fb890bb9530d7f175f4f403ed309ca111517755ed5e07c2f3
|
4
|
+
data.tar.gz: 669c3765fd006605eb86b93a31b90aa4e6101a50820e8e3dd9eaeec54f17f2d0
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7af7fe5d68898a89ace33fd71a5eeb519566760900c675782d5620ff29c56013b3e6b92e7074b006668cafa825e5b3ce48db5666b61e5140e14903b54874e739
|
7
|
+
data.tar.gz: dbb26d71d8187090d151436a8ec2229950b13f93f87c228c42cfaaa77ec98762f75f86e9696f1f9a843b0848cfde92a2ca9cd3afdab613cb51d693193ac17fb8
|
data/lib/intelli_agent/openai.rb
CHANGED
@@ -8,6 +8,8 @@ module IntelliAgent::OpenAI
|
|
8
8
|
MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
|
9
9
|
|
10
10
|
module ResponseExtender
|
11
|
+
def chat_params = self[:chat_params]
|
12
|
+
|
11
13
|
def message = dig('choices', 0, 'message')
|
12
14
|
|
13
15
|
def content = dig('choices', 0, 'message', 'content')
|
@@ -24,13 +26,29 @@ module IntelliAgent::OpenAI
|
|
24
26
|
|
25
27
|
functions_list = []
|
26
28
|
functions.map.with_index do |function, function_index|
|
27
|
-
|
28
|
-
|
29
|
+
function_info = tool_calls.dig(function_index, 'function')
|
30
|
+
function_def = { id: function['id'], name: function_info['name'], arguments: Oj.load(function_info['arguments'], symbol_keys: true) }
|
31
|
+
|
32
|
+
def function_def.run(context:)
|
33
|
+
{
|
34
|
+
tool_call_id: self[:id],
|
35
|
+
role: :tool,
|
36
|
+
name: self[:name],
|
37
|
+
content: context.send(self[:name], **self[:arguments])
|
38
|
+
}
|
39
|
+
end
|
40
|
+
|
41
|
+
functions_list << function_def
|
29
42
|
end
|
30
43
|
|
31
44
|
functions_list
|
32
45
|
end
|
33
46
|
|
47
|
+
def functions_run_all(context:)
|
48
|
+
raise 'No functions to run' if functions.nil?
|
49
|
+
functions.map { |function| function.run(context:) }
|
50
|
+
end
|
51
|
+
|
34
52
|
def functions? = !functions.nil?
|
35
53
|
end
|
36
54
|
|
@@ -55,15 +73,15 @@ module IntelliAgent::OpenAI
|
|
55
73
|
response
|
56
74
|
end
|
57
75
|
|
58
|
-
def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil,
|
59
|
-
chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, tools:,
|
76
|
+
def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, tools: nil, auto_run_functions: false, function_context: nil)
|
77
|
+
chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
|
60
78
|
end
|
61
79
|
|
62
|
-
def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil,
|
63
|
-
chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, tools:,
|
80
|
+
def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, tools: nil, auto_run_functions: false, function_context: nil)
|
81
|
+
chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
|
64
82
|
end
|
65
83
|
|
66
|
-
def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil,
|
84
|
+
def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, tools: nil, auto_run_functions: false, function_context: nil)
|
67
85
|
model = select_model(model)
|
68
86
|
|
69
87
|
# o1 models doesn't support max_tokens, instead max_completion_tokens
|
@@ -72,7 +90,7 @@ module IntelliAgent::OpenAI
|
|
72
90
|
|
73
91
|
messages = parse_messages(messages)
|
74
92
|
|
75
|
-
parameters = { model:, messages: }
|
93
|
+
parameters = { model:, messages:, store: }
|
76
94
|
|
77
95
|
parameters[:max_completion_tokens] = max_completion_tokens if is_o1_model
|
78
96
|
parameters[:max_tokens] = max_completion_tokens unless is_o1_model
|
@@ -81,26 +99,18 @@ module IntelliAgent::OpenAI
|
|
81
99
|
parameters[:tools] = tools if tools
|
82
100
|
|
83
101
|
response = OpenAI::Client.new.chat(parameters:)
|
102
|
+
|
103
|
+
response[:chat_params] = parameters
|
84
104
|
response.extend(ResponseExtender)
|
85
105
|
|
86
|
-
if response.functions?
|
87
|
-
raise 'Function
|
88
|
-
|
106
|
+
if response.functions? && auto_run_functions
|
107
|
+
raise 'Function context not provided for auto-running functions' if function_context.nil?
|
89
108
|
parameters[:messages] << response.message
|
109
|
+
parameters[:messages] += response.functions_run_all(context: function_context)
|
90
110
|
|
91
|
-
response
|
92
|
-
parameters[:messages] << {
|
93
|
-
tool_call_id: function[:id],
|
94
|
-
role: :tool,
|
95
|
-
name: function[:name],
|
96
|
-
content: function_run_context.send(function[:name], **function[:arguments])
|
97
|
-
}
|
98
|
-
end
|
99
|
-
|
100
|
-
response = OpenAI::Client.new.chat(parameters:)
|
101
|
-
response.extend(ResponseExtender)
|
111
|
+
response = chat(**parameters.except(:chat_params))
|
102
112
|
end
|
103
|
-
|
113
|
+
|
104
114
|
response
|
105
115
|
end
|
106
116
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: intelli_agent
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.9
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Gedean Dias
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2027-
|
11
|
+
date: 2027-10-03 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ruby-openai
|
@@ -52,7 +52,8 @@ dependencies:
|
|
52
52
|
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
54
|
version: '3'
|
55
|
-
description:
|
55
|
+
description: Adds helpers modules, classes and methods to make it easier to use Anthropic
|
56
|
+
and OpenAI API
|
56
57
|
email: gedean.dias@gmail.com
|
57
58
|
executables: []
|
58
59
|
extensions: []
|