intelli_agent 0.1.1 → 0.1.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +11 -11
  3. metadata +3 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c76bcdd7573088ad3dcc72e5737a72b4bd352c1568c05d849f01007e409d5a09
4
- data.tar.gz: 4c84207e8e0a56741825b4a8d8b37ffd8ac9be22025b17b100bca4c2872b0bf6
3
+ metadata.gz: 5215cbeeb5f66df2815f4259cad370a962e2fc9cb8df39c55af6b1b73531f47b
4
+ data.tar.gz: 9d41844c690e1ceb733d099ce73fffaba9efbe53d233abcd91963202ac9ea2fe
5
5
  SHA512:
6
- metadata.gz: be84356ca55dcae68905bbd3b6547b764499406429b980c21acf48d2dc175433c47e85a51728f6b73194c0c7907db0e8d004dda6030966f5c84d4aecb040de12
7
- data.tar.gz: 254c5b1c8db19b4b75278813f3a7faae286ec9bc7d1930b2b973cc5b7a0c867c88bac74f9ba7d0abd87fa6e0c206617a537c242681335177d5c6abb0aaeec021
6
+ metadata.gz: d26c18ba4f481df7f2d1fc6517dc4195e1f59de1ff130795cf94e75d7c721456464566ee743740cd68728f9e62aea4feb655d5da5a020e16016b10341f03ff9d
7
+ data.tar.gz: 35aedfcd5871657e690e7bb0dbfc4cf208b766872ed882d18ca6c22f9103b0fc94e70b3889f964908012e09ae0b6472357c29714fbc6e0fda7f6b59fbe8aaf64
@@ -1,20 +1,19 @@
1
- # In the future, this became a bus to more than one AI provider
2
1
  module IntelliAgent::OpenAI
3
2
  BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
4
3
  ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
4
+ MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16383)
5
5
 
6
6
  def self.embed(input, model: 'text-embedding-3-large')
7
7
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
8
  response.dig('data', 0, 'embedding')
9
9
  end
10
10
 
11
- def self.single_prompt(prompt:, model: :basic, response_format: nil)
11
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
12
12
  model = select_model(model)
13
13
 
14
- parameters = { model:, messages: [{ role: 'user', content: prompt }] }
15
-
14
+ parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
16
15
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
17
-
16
+
18
17
  response = OpenAI::Client.new.chat(parameters:)
19
18
 
20
19
  if response_format.nil?
@@ -24,12 +23,12 @@ module IntelliAgent::OpenAI
24
23
  end
25
24
  end
26
25
 
27
- def self.vision(prompt:, image_url:, model: :advanced, response_format: nil)
26
+ def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
28
27
  model = select_model(model)
29
28
  messages = [{ type: :text, text: prompt },
30
29
  { type: :image_url, image_url: { url: image_url } }]
31
30
 
32
- parameters = { model: model, messages: [{ role: :user, content: messages }] }
31
+ parameters = { model: model, messages: [{ role: :user, content: messages }], max_tokens: }
33
32
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
34
33
 
35
34
  response = OpenAI::Client.new.chat(parameters:)
@@ -41,13 +40,13 @@ module IntelliAgent::OpenAI
41
40
  end
42
41
  end
43
42
 
44
- def self.single_chat(system:, user:, model: :basic, response_format: nil)
43
+ def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
45
44
  model = select_model(model)
46
45
  parameters = { model:,
47
46
  messages: [
48
47
  { role: 'system', content: system },
49
48
  { role: 'user', content: user }
50
- ] }
49
+ ], max_tokens: }
51
50
 
52
51
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
53
52
 
@@ -59,9 +58,10 @@ module IntelliAgent::OpenAI
59
58
  end
60
59
  end
61
60
 
62
- def self.chat(messages:, model: :basic, response_format: nil)
61
+ def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
63
62
  model = select_model(model)
64
- parameters = { model:, messages: }
63
+
64
+ parameters = { model:, messages:, max_tokens: }
65
65
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
66
66
 
67
67
  response = OpenAI::Client.new.chat(parameters:)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-08-07 00:00:00.000000000 Z
11
+ date: 2027-08-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -69,7 +69,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
69
69
  - !ruby/object:Gem::Version
70
70
  version: '0'
71
71
  requirements: []
72
- rubygems_version: 3.5.17
72
+ rubygems_version: 3.5.18
73
73
  signing_key:
74
74
  specification_version: 4
75
75
  summary: A helper layer over Anthropic and OpenAI API