intelli_agent 0.1.0 → 0.1.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +34 -17
  3. metadata +3 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cc52520455ecddb77608867d1b38fcc3e0d5bfa2ad40e6810af9d544b2e0c1fa
4
- data.tar.gz: 7b9df2447bcfb0f842bdf3fdb10aa2d71d0fe6e38425bf0c901f482c7567fd5b
3
+ metadata.gz: 5215cbeeb5f66df2815f4259cad370a962e2fc9cb8df39c55af6b1b73531f47b
4
+ data.tar.gz: 9d41844c690e1ceb733d099ce73fffaba9efbe53d233abcd91963202ac9ea2fe
5
5
  SHA512:
6
- metadata.gz: 87c3853788806cbdda92fe70ef0fdfc4e168fb36433c819ff25a3e94faa46ec28eb6d4700648f6e14d652c9c33048e2225ea2f94ee5ff0f187420487ef6603ea
7
- data.tar.gz: 2f77388f90d653761fec0340f9902e2e9e898548e8bb178c8d5cd1d85a5c11701d7aa84504c34e088baca432823363df13840be454d2e157a3ed76c5d2f61301
6
+ metadata.gz: d26c18ba4f481df7f2d1fc6517dc4195e1f59de1ff130795cf94e75d7c721456464566ee743740cd68728f9e62aea4feb655d5da5a020e16016b10341f03ff9d
7
+ data.tar.gz: 35aedfcd5871657e690e7bb0dbfc4cf208b766872ed882d18ca6c22f9103b0fc94e70b3889f964908012e09ae0b6472357c29714fbc6e0fda7f6b59fbe8aaf64
@@ -1,58 +1,75 @@
1
- # In the future, this became a bus to more than one AI provider
2
1
  module IntelliAgent::OpenAI
3
- BASIC_MODEL = 'gpt-4o-mini' # ENV.fetch('OPENAI_BASIC_MODEL')
4
- ADVANCED_MODEL = 'gpt-4o' # ENV.fetch('OPENAI_ADVANCED_MODEL')
2
+ BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
3
+ ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
4
+ MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16383)
5
5
 
6
6
  def self.embed(input, model: 'text-embedding-3-large')
7
7
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
8
  response.dig('data', 0, 'embedding')
9
9
  end
10
10
 
11
- def self.single_prompt(prompt:, model: :basic, response_format: nil)
11
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
12
12
  model = select_model(model)
13
13
 
14
- parameters = { model:, messages: [{ role: 'user', content: prompt }] }
15
-
14
+ parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
16
15
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
17
-
16
+
18
17
  response = OpenAI::Client.new.chat(parameters:)
19
- response.dig('choices', 0, 'message', 'content').strip
18
+
19
+ if response_format.nil?
20
+ response.dig('choices', 0, 'message', 'content').strip
21
+ else
22
+ response
23
+ end
20
24
  end
21
25
 
22
- def self.vision(prompt:, image_url:, model: :advanced, response_format: nil)
26
+ def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
23
27
  model = select_model(model)
24
28
  messages = [{ type: :text, text: prompt },
25
29
  { type: :image_url, image_url: { url: image_url } }]
26
30
 
27
- parameters = { model: model, messages: [{ role: :user, content: messages }] }
31
+ parameters = { model: model, messages: [{ role: :user, content: messages }], max_tokens: }
28
32
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
29
33
 
30
34
  response = OpenAI::Client.new.chat(parameters:)
31
35
 
32
- response.dig('choices', 0, 'message', 'content').strip
36
+ if response_format.nil?
37
+ response.dig('choices', 0, 'message', 'content').strip
38
+ else
39
+ response
40
+ end
33
41
  end
34
42
 
35
- def self.single_chat(system:, user:, model: :basic, response_format: nil)
43
+ def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
36
44
  model = select_model(model)
37
45
  parameters = { model:,
38
46
  messages: [
39
47
  { role: 'system', content: system },
40
48
  { role: 'user', content: user }
41
- ] }
49
+ ], max_tokens: }
42
50
 
43
51
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
44
52
 
45
53
  response = OpenAI::Client.new.chat(parameters:)
46
- response.dig('choices', 0, 'message', 'content').strip
54
+ if response_format.nil?
55
+ response.dig('choices', 0, 'message', 'content').strip
56
+ else
57
+ response
58
+ end
47
59
  end
48
60
 
49
- def self.chat(messages:, model: :basic, response_format: nil)
61
+ def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
50
62
  model = select_model(model)
51
- parameters = { model:, messages: }
63
+
64
+ parameters = { model:, messages:, max_tokens: }
52
65
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
53
66
 
54
67
  response = OpenAI::Client.new.chat(parameters:)
55
- response.dig('choices', 0, 'message', 'content').strip
68
+ if response_format.nil?
69
+ response.dig('choices', 0, 'message', 'content').strip
70
+ else
71
+ response
72
+ end
56
73
  end
57
74
 
58
75
  def self.models = OpenAI::Client.new.models.list
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-08-05 00:00:00.000000000 Z
11
+ date: 2027-08-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -69,7 +69,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
69
69
  - !ruby/object:Gem::Version
70
70
  version: '0'
71
71
  requirements: []
72
- rubygems_version: 3.5.17
72
+ rubygems_version: 3.5.18
73
73
  signing_key:
74
74
  specification_version: 4
75
75
  summary: A helper layer over Anthropic and OpenAI API