intelli_agent 0.1.4 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +37 -16
  3. metadata +3 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cb2113329a8f68d1433d8d54846c73eb620818041f617f5140afd00247c53dec
4
- data.tar.gz: ff64fce4f52a30f3d1289925f92ab30767d4e1ee54f75f96e702f32ae4e5354c
3
+ metadata.gz: be996d76730695ba38477d0e748affd640eb03bf361892128305e668e32e1a43
4
+ data.tar.gz: d860d1779e054c68214bf69fe640f2420486e514dded5f0b2b9c328da09a0e32
5
5
  SHA512:
6
- metadata.gz: 784acf6fd048f8f521ebcb4dad4381b48db1e79a32b424d98c790b8abf74ddbf1f3a6b69ae2dffe67dd68a731dda295aa7df627689ba5672837d2be4fe0a0b0e
7
- data.tar.gz: 4ead6219753043957a4728b8808b57a0829a4954671f06895002ffc47c1087e61124a9c1d37a7328fa203e0a1ad5541570247513f79b8b477518ba704bd4f44d
6
+ metadata.gz: 46b6fe360ca8265f5d495b2853f4ea8e3fe9c5834e6e7e2cd072de2196b77e49483d8e67a74c334f526b12ed26d36bb66720241946c9de088b931ef7d5f4eacc
7
+ data.tar.gz: d61707f97959304260975fc8b9b17f0f1cf355345b2f4db5715db2675645fcb19bec6e521fdf4d5ec54e4f7f2d6f7dd7bcfc636cae885f8b67151df7070aa3a7
@@ -1,24 +1,31 @@
1
1
  module IntelliAgent::OpenAI
2
2
  BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
3
3
  ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
4
- MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16383)
4
+ MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
5
+
6
+ module ResponseExtender
7
+ def content
8
+ dig('choices', 0, 'message', 'content')
9
+ end
10
+
11
+ def content?
12
+ !content.nil?
13
+ end
14
+
15
+ def tool_calls
16
+ dig('choices', 0, 'message', 'tool_calls')
17
+ end
18
+
19
+ def tool_calls?
20
+ !tool_calls.nil?
21
+ end
22
+ end
5
23
 
6
24
  def self.embed(input, model: 'text-embedding-3-large')
7
25
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
26
  response.dig('data', 0, 'embedding')
9
27
  end
10
28
 
11
- def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
12
- model = select_model(model)
13
-
14
- parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
15
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
16
-
17
- response = OpenAI::Client.new.chat(parameters:)
18
-
19
- response.dig('choices', 0, 'message', 'content').strip
20
- end
21
-
22
29
  def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
23
30
  model = select_model(model)
24
31
  messages = [{ type: :text, text: prompt },
@@ -30,6 +37,18 @@ module IntelliAgent::OpenAI
30
37
  response = OpenAI::Client.new.chat(parameters:)
31
38
 
32
39
  response.dig('choices', 0, 'message', 'content').strip
40
+ end
41
+
42
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil)
43
+ model = select_model(model)
44
+
45
+ parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
46
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
47
+ parameters[:tools] = tools if tools
48
+
49
+ response = OpenAI::Client.new.chat(parameters:)
50
+ response.extend(ResponseExtender)
51
+ response
33
52
  end
34
53
 
35
54
  def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
@@ -41,10 +60,11 @@ module IntelliAgent::OpenAI
41
60
  ], max_tokens: }
42
61
 
43
62
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
63
+ parameters[:tools] = tools if tools
44
64
 
45
65
  response = OpenAI::Client.new.chat(parameters:)
46
-
47
- response.dig('choices', 0, 'message', 'content').strip
66
+ response.extend(ResponseExtender)
67
+ response
48
68
  end
49
69
 
50
70
  def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
@@ -54,10 +74,11 @@ module IntelliAgent::OpenAI
54
74
 
55
75
  parameters = { model:, messages:, max_tokens: }
56
76
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
77
+ parameters[:tools] = tools if tools
57
78
 
58
79
  response = OpenAI::Client.new.chat(parameters:)
59
-
60
- response.dig('choices', 0, 'message', 'content').strip
80
+ response.extend(ResponseExtender)
81
+ response
61
82
  end
62
83
 
63
84
  def self.models = OpenAI::Client.new.models.list
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.4
4
+ version: 0.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-09-02 00:00:00.000000000 Z
11
+ date: 2027-09-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -69,7 +69,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
69
69
  - !ruby/object:Gem::Version
70
70
  version: '0'
71
71
  requirements: []
72
- rubygems_version: 3.5.18
72
+ rubygems_version: 3.5.19
73
73
  signing_key:
74
74
  specification_version: 4
75
75
  summary: A helper layer over Anthropic and OpenAI API