intelli_agent 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +29 -16
  3. metadata +3 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cb2113329a8f68d1433d8d54846c73eb620818041f617f5140afd00247c53dec
4
- data.tar.gz: ff64fce4f52a30f3d1289925f92ab30767d4e1ee54f75f96e702f32ae4e5354c
3
+ metadata.gz: 4a8a694f4bb6ba84140e24bdf44962629eb63ff75b8a9ff8bc1ab5ef416db5cb
4
+ data.tar.gz: a3ef08dc21d823b19d597f8b1355328f55d6f767ddb21a99fe3bbac5cf9256b3
5
5
  SHA512:
6
- metadata.gz: 784acf6fd048f8f521ebcb4dad4381b48db1e79a32b424d98c790b8abf74ddbf1f3a6b69ae2dffe67dd68a731dda295aa7df627689ba5672837d2be4fe0a0b0e
7
- data.tar.gz: 4ead6219753043957a4728b8808b57a0829a4954671f06895002ffc47c1087e61124a9c1d37a7328fa203e0a1ad5541570247513f79b8b477518ba704bd4f44d
6
+ metadata.gz: 7602007cab2f23363d6ed87c9f22d1d262325e2080eb8dc5f2f7e0b8d599d98f7e8666348e3e477660acbf01a2f63bfc98199cd8f4eb5afb143165ef79058b74
7
+ data.tar.gz: 49bbef1d5c157d9b4f51aede33b6bc83858a53b4c10281132b0dee3b06117459a144ab3c386496ac23a7c0078fb6f6079d9f0e0906898af1cb2f365d99ffe7af
@@ -1,24 +1,23 @@
1
1
  module IntelliAgent::OpenAI
2
2
  BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
3
3
  ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
4
- MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16383)
4
+ MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
5
+
6
+ module ResponseExtender
7
+ def content
8
+ dig('choices', 0, 'message', 'content')
9
+ end
10
+
11
+ def tool_calls
12
+ dig('choices', 0, 'message', 'tool_calls')
13
+ end
14
+ end
5
15
 
6
16
  def self.embed(input, model: 'text-embedding-3-large')
7
17
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
18
  response.dig('data', 0, 'embedding')
9
19
  end
10
20
 
11
- def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
12
- model = select_model(model)
13
-
14
- parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
15
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
16
-
17
- response = OpenAI::Client.new.chat(parameters:)
18
-
19
- response.dig('choices', 0, 'message', 'content').strip
20
- end
21
-
22
21
  def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
23
22
  model = select_model(model)
24
23
  messages = [{ type: :text, text: prompt },
@@ -30,6 +29,18 @@ module IntelliAgent::OpenAI
30
29
  response = OpenAI::Client.new.chat(parameters:)
31
30
 
32
31
  response.dig('choices', 0, 'message', 'content').strip
32
+ end
33
+
34
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil)
35
+ model = select_model(model)
36
+
37
+ parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
38
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
39
+ parameters[:tools] = tools if tools
40
+
41
+ response = OpenAI::Client.new.chat(parameters:)
42
+ response.extend(ResponseExtender)
43
+ response
33
44
  end
34
45
 
35
46
  def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
@@ -41,10 +52,11 @@ module IntelliAgent::OpenAI
41
52
  ], max_tokens: }
42
53
 
43
54
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
55
+ parameters[:tools] = tools if tools
44
56
 
45
57
  response = OpenAI::Client.new.chat(parameters:)
46
-
47
- response.dig('choices', 0, 'message', 'content').strip
58
+ response.extend(ResponseExtender)
59
+ response
48
60
  end
49
61
 
50
62
  def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
@@ -54,10 +66,11 @@ module IntelliAgent::OpenAI
54
66
 
55
67
  parameters = { model:, messages:, max_tokens: }
56
68
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
69
+ parameters[:tools] = tools if tools
57
70
 
58
71
  response = OpenAI::Client.new.chat(parameters:)
59
-
60
- response.dig('choices', 0, 'message', 'content').strip
72
+ response.extend(ResponseExtender)
73
+ response
61
74
  end
62
75
 
63
76
  def self.models = OpenAI::Client.new.models.list
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.4
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-09-02 00:00:00.000000000 Z
11
+ date: 2027-09-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -69,7 +69,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
69
69
  - !ruby/object:Gem::Version
70
70
  version: '0'
71
71
  requirements: []
72
- rubygems_version: 3.5.18
72
+ rubygems_version: 3.5.19
73
73
  signing_key:
74
74
  specification_version: 4
75
75
  summary: A helper layer over Anthropic and OpenAI API