intelli_agent 0.1.3 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +30 -31
  3. metadata +3 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9cb37d7cb52a2574978fbf164f5b9eb0c92e3b71a91114c897e3bae81f51d9ef
4
- data.tar.gz: e52d588661951e958e2d3551f2cfa92f622dacf9a84a00f50590db18d8fb0905
3
+ metadata.gz: 4a8a694f4bb6ba84140e24bdf44962629eb63ff75b8a9ff8bc1ab5ef416db5cb
4
+ data.tar.gz: a3ef08dc21d823b19d597f8b1355328f55d6f767ddb21a99fe3bbac5cf9256b3
5
5
  SHA512:
6
- metadata.gz: 7320d62158d94eca1db5ba269fb1a38e2c611a9772104cf718c2aa602407ff1648bcd0daeea2581c32478121fff351cb08881b8b02d8f14e4be67cbe0e726f40
7
- data.tar.gz: a74ddef7f64bc99e3d87518ee97988dcf47f56a8d9cb3ef28bd6fed1e739ecf41ba94b4822eb1590f1bffda1badfbc5d28ec8433d9bf5e6ffaafab86274fde5d
6
+ metadata.gz: 7602007cab2f23363d6ed87c9f22d1d262325e2080eb8dc5f2f7e0b8d599d98f7e8666348e3e477660acbf01a2f63bfc98199cd8f4eb5afb143165ef79058b74
7
+ data.tar.gz: 49bbef1d5c157d9b4f51aede33b6bc83858a53b4c10281132b0dee3b06117459a144ab3c386496ac23a7c0078fb6f6079d9f0e0906898af1cb2f365d99ffe7af
@@ -1,28 +1,23 @@
1
1
  module IntelliAgent::OpenAI
2
2
  BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
3
3
  ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
4
- MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16383)
4
+ MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
5
+
6
+ module ResponseExtender
7
+ def content
8
+ dig('choices', 0, 'message', 'content')
9
+ end
10
+
11
+ def tool_calls
12
+ dig('choices', 0, 'message', 'tool_calls')
13
+ end
14
+ end
5
15
 
6
16
  def self.embed(input, model: 'text-embedding-3-large')
7
17
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
18
  response.dig('data', 0, 'embedding')
9
19
  end
10
20
 
11
- def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
12
- model = select_model(model)
13
-
14
- parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
15
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
16
-
17
- response = OpenAI::Client.new.chat(parameters:)
18
-
19
- if response_format.nil?
20
- response.dig('choices', 0, 'message', 'content').strip
21
- else
22
- response
23
- end
24
- end
25
-
26
21
  def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
27
22
  model = select_model(model)
28
23
  messages = [{ type: :text, text: prompt },
@@ -32,12 +27,20 @@ module IntelliAgent::OpenAI
32
27
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
33
28
 
34
29
  response = OpenAI::Client.new.chat(parameters:)
30
+
31
+ response.dig('choices', 0, 'message', 'content').strip
32
+ end
35
33
 
36
- if response_format.nil?
37
- response.dig('choices', 0, 'message', 'content').strip
38
- else
39
- response
40
- end
34
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil)
35
+ model = select_model(model)
36
+
37
+ parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
38
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
39
+ parameters[:tools] = tools if tools
40
+
41
+ response = OpenAI::Client.new.chat(parameters:)
42
+ response.extend(ResponseExtender)
43
+ response
41
44
  end
42
45
 
43
46
  def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
@@ -49,13 +52,11 @@ module IntelliAgent::OpenAI
49
52
  ], max_tokens: }
50
53
 
51
54
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
55
+ parameters[:tools] = tools if tools
52
56
 
53
57
  response = OpenAI::Client.new.chat(parameters:)
54
- if response_format.nil?
55
- response.dig('choices', 0, 'message', 'content').strip
56
- else
57
- response
58
- end
58
+ response.extend(ResponseExtender)
59
+ response
59
60
  end
60
61
 
61
62
  def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
@@ -65,13 +66,11 @@ module IntelliAgent::OpenAI
65
66
 
66
67
  parameters = { model:, messages:, max_tokens: }
67
68
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
69
+ parameters[:tools] = tools if tools
68
70
 
69
71
  response = OpenAI::Client.new.chat(parameters:)
70
- if response_format.nil?
71
- response.dig('choices', 0, 'message', 'content').strip
72
- else
73
- response
74
- end
72
+ response.extend(ResponseExtender)
73
+ response
75
74
  end
76
75
 
77
76
  def self.models = OpenAI::Client.new.models.list
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.3
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-09-01 00:00:00.000000000 Z
11
+ date: 2027-09-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -69,7 +69,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
69
69
  - !ruby/object:Gem::Version
70
70
  version: '0'
71
71
  requirements: []
72
- rubygems_version: 3.5.18
72
+ rubygems_version: 3.5.19
73
73
  signing_key:
74
74
  specification_version: 4
75
75
  summary: A helper layer over Anthropic and OpenAI API