intelli_agent 0.2.8 → 0.2.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +31 -21
  3. metadata +1 -1
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8f4f0af0654ac3e8e2e468dd804d3f18b254ebadf5649f6786ef05958279b59d
4
- data.tar.gz: 5cd51d0ac3f2b083cea25cc499b87bcdfc46212e9af3934a4bc3f5fed25f1fa5
3
+ metadata.gz: 586990e6d1d8fa1fb890bb9530d7f175f4f403ed309ca111517755ed5e07c2f3
4
+ data.tar.gz: 669c3765fd006605eb86b93a31b90aa4e6101a50820e8e3dd9eaeec54f17f2d0
5
5
  SHA512:
6
- metadata.gz: 3508e10d08f4ffbaa16f096e765168953a65b0831bcfe637932b2a808b8b5d0a62a6f049fddfdfbfad75f30ef02c6753980bf4ef216984c8aa5211555a949005
7
- data.tar.gz: 3d0e9ed70b2acdd03e3cd9a1aedd9de0478ff4cd9187efaf3eda5dbba01d03c2744736e55a161a498af7c58b0a5dabd0a8c6b13c70bd3f5ed3b268fc28fe5482
6
+ metadata.gz: 7af7fe5d68898a89ace33fd71a5eeb519566760900c675782d5620ff29c56013b3e6b92e7074b006668cafa825e5b3ce48db5666b61e5140e14903b54874e739
7
+ data.tar.gz: dbb26d71d8187090d151436a8ec2229950b13f93f87c228c42cfaaa77ec98762f75f86e9696f1f9a843b0848cfde92a2ca9cd3afdab613cb51d693193ac17fb8
@@ -8,6 +8,8 @@ module IntelliAgent::OpenAI
8
8
  MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
9
9
 
10
10
  module ResponseExtender
11
+ def chat_params = self[:chat_params]
12
+
11
13
  def message = dig('choices', 0, 'message')
12
14
 
13
15
  def content = dig('choices', 0, 'message', 'content')
@@ -24,13 +26,29 @@ module IntelliAgent::OpenAI
24
26
 
25
27
  functions_list = []
26
28
  functions.map.with_index do |function, function_index|
27
- function_def = tool_calls.dig(function_index, 'function')
28
- functions_list << { id: function['id'], name: function_def['name'], arguments: Oj.load(function_def['arguments'], symbol_keys: true) }
29
+ function_info = tool_calls.dig(function_index, 'function')
30
+ function_def = { id: function['id'], name: function_info['name'], arguments: Oj.load(function_info['arguments'], symbol_keys: true) }
31
+
32
+ def function_def.run(context:)
33
+ {
34
+ tool_call_id: self[:id],
35
+ role: :tool,
36
+ name: self[:name],
37
+ content: context.send(self[:name], **self[:arguments])
38
+ }
39
+ end
40
+
41
+ functions_list << function_def
29
42
  end
30
43
 
31
44
  functions_list
32
45
  end
33
46
 
47
+ def functions_run_all(context:)
48
+ raise 'No functions to run' if functions.nil?
49
+ functions.map { |function| function.run(context:) }
50
+ end
51
+
34
52
  def functions? = !functions.nil?
35
53
  end
36
54
 
@@ -55,15 +73,15 @@ module IntelliAgent::OpenAI
55
73
  response
56
74
  end
57
75
 
58
- def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, auto_run_functions: false, function_context: nil)
59
- chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, tools:, auto_run_functions:, function_context:)
76
+ def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, tools: nil, auto_run_functions: false, function_context: nil)
77
+ chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
60
78
  end
61
79
 
62
- def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, auto_run_functions: false, function_context: nil)
63
- chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, tools:, auto_run_functions:, function_context:)
80
+ def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, tools: nil, auto_run_functions: false, function_context: nil)
81
+ chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
64
82
  end
65
83
 
66
- def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, auto_run_functions: false, function_context: nil)
84
+ def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, tools: nil, auto_run_functions: false, function_context: nil)
67
85
  model = select_model(model)
68
86
 
69
87
  # o1 models doesn't support max_tokens, instead max_completion_tokens
@@ -72,7 +90,7 @@ module IntelliAgent::OpenAI
72
90
 
73
91
  messages = parse_messages(messages)
74
92
 
75
- parameters = { model:, messages:, store: true }
93
+ parameters = { model:, messages:, store: }
76
94
 
77
95
  parameters[:max_completion_tokens] = max_completion_tokens if is_o1_model
78
96
  parameters[:max_tokens] = max_completion_tokens unless is_o1_model
@@ -81,26 +99,18 @@ module IntelliAgent::OpenAI
81
99
  parameters[:tools] = tools if tools
82
100
 
83
101
  response = OpenAI::Client.new.chat(parameters:)
102
+
103
+ response[:chat_params] = parameters
84
104
  response.extend(ResponseExtender)
85
105
 
86
106
  if response.functions? && auto_run_functions
87
107
  raise 'Function context not provided for auto-running functions' if function_context.nil?
88
-
89
108
  parameters[:messages] << response.message
109
+ parameters[:messages] += response.functions_run_all(context: function_context)
90
110
 
91
- response.functions.each do |function|
92
- parameters[:messages] << {
93
- tool_call_id: function[:id],
94
- role: :tool,
95
- name: function[:name],
96
- content: function_context.send(function[:name], **function[:arguments])
97
- }
98
- end
99
-
100
- response = OpenAI::Client.new.chat(parameters:)
101
- response.extend(ResponseExtender)
111
+ response = chat(**parameters.except(:chat_params))
102
112
  end
103
-
113
+
104
114
  response
105
115
  end
106
116
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.8
4
+ version: 0.2.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias