intelli_agent 0.1.1 → 0.1.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +31 -11
  3. metadata +3 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c76bcdd7573088ad3dcc72e5737a72b4bd352c1568c05d849f01007e409d5a09
4
- data.tar.gz: 4c84207e8e0a56741825b4a8d8b37ffd8ac9be22025b17b100bca4c2872b0bf6
3
+ metadata.gz: 9cb37d7cb52a2574978fbf164f5b9eb0c92e3b71a91114c897e3bae81f51d9ef
4
+ data.tar.gz: e52d588661951e958e2d3551f2cfa92f622dacf9a84a00f50590db18d8fb0905
5
5
  SHA512:
6
- metadata.gz: be84356ca55dcae68905bbd3b6547b764499406429b980c21acf48d2dc175433c47e85a51728f6b73194c0c7907db0e8d004dda6030966f5c84d4aecb040de12
7
- data.tar.gz: 254c5b1c8db19b4b75278813f3a7faae286ec9bc7d1930b2b973cc5b7a0c867c88bac74f9ba7d0abd87fa6e0c206617a537c242681335177d5c6abb0aaeec021
6
+ metadata.gz: 7320d62158d94eca1db5ba269fb1a38e2c611a9772104cf718c2aa602407ff1648bcd0daeea2581c32478121fff351cb08881b8b02d8f14e4be67cbe0e726f40
7
+ data.tar.gz: a74ddef7f64bc99e3d87518ee97988dcf47f56a8d9cb3ef28bd6fed1e739ecf41ba94b4822eb1590f1bffda1badfbc5d28ec8433d9bf5e6ffaafab86274fde5d
@@ -1,20 +1,19 @@
1
- # In the future, this became a bus to more than one AI provider
2
1
  module IntelliAgent::OpenAI
3
2
  BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
4
3
  ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
4
+ MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16383)
5
5
 
6
6
  def self.embed(input, model: 'text-embedding-3-large')
7
7
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
8
  response.dig('data', 0, 'embedding')
9
9
  end
10
10
 
11
- def self.single_prompt(prompt:, model: :basic, response_format: nil)
11
+ def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
12
12
  model = select_model(model)
13
13
 
14
- parameters = { model:, messages: [{ role: 'user', content: prompt }] }
15
-
14
+ parameters = { model:, messages: [{ role: 'user', content: prompt }], max_tokens: }
16
15
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
17
-
16
+
18
17
  response = OpenAI::Client.new.chat(parameters:)
19
18
 
20
19
  if response_format.nil?
@@ -24,12 +23,12 @@ module IntelliAgent::OpenAI
24
23
  end
25
24
  end
26
25
 
27
- def self.vision(prompt:, image_url:, model: :advanced, response_format: nil)
26
+ def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
28
27
  model = select_model(model)
29
28
  messages = [{ type: :text, text: prompt },
30
29
  { type: :image_url, image_url: { url: image_url } }]
31
30
 
32
- parameters = { model: model, messages: [{ role: :user, content: messages }] }
31
+ parameters = { model: model, messages: [{ role: :user, content: messages }], max_tokens: }
33
32
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
34
33
 
35
34
  response = OpenAI::Client.new.chat(parameters:)
@@ -41,13 +40,13 @@ module IntelliAgent::OpenAI
41
40
  end
42
41
  end
43
42
 
44
- def self.single_chat(system:, user:, model: :basic, response_format: nil)
43
+ def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
45
44
  model = select_model(model)
46
45
  parameters = { model:,
47
46
  messages: [
48
47
  { role: 'system', content: system },
49
48
  { role: 'user', content: user }
50
- ] }
49
+ ], max_tokens: }
51
50
 
52
51
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
53
52
 
@@ -59,9 +58,12 @@ module IntelliAgent::OpenAI
59
58
  end
60
59
  end
61
60
 
62
- def self.chat(messages:, model: :basic, response_format: nil)
61
+ def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS)
63
62
  model = select_model(model)
64
- parameters = { model:, messages: }
63
+
64
+ messages = determine_message_format(messages).eql?(:short_format) ? convert_message_to_standard_format(messages) : messages
65
+
66
+ parameters = { model:, messages:, max_tokens: }
65
67
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
66
68
 
67
69
  response = OpenAI::Client.new.chat(parameters:)
@@ -84,4 +86,22 @@ module IntelliAgent::OpenAI
84
86
  model
85
87
  end
86
88
  end
89
+
90
+ def self.determine_message_format(messages)
91
+ case messages
92
+ in [{ role: String, content: String }, *]
93
+ :standard_format
94
+ in [{ system: String }, { user: String }, *]
95
+ :short_format
96
+ else
97
+ :unknown_format
98
+ end
99
+ end
100
+
101
+ def self.convert_message_to_standard_format(messages)
102
+ messages.map do |msg|
103
+ role, content = msg.first
104
+ { role: role.to_s, content: content }
105
+ end
106
+ end
87
107
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-08-07 00:00:00.000000000 Z
11
+ date: 2027-09-01 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -69,7 +69,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
69
69
  - !ruby/object:Gem::Version
70
70
  version: '0'
71
71
  requirements: []
72
- rubygems_version: 3.5.17
72
+ rubygems_version: 3.5.18
73
73
  signing_key:
74
74
  specification_version: 4
75
75
  summary: A helper layer over Anthropic and OpenAI API