intelli_agent 0.2.5 → 0.2.7

Sign up to get free protection for your applications and to get access to all the features.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/intelli_agent/openai.rb +29 -12
  3. metadata +3 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: dbcf865d2efd8e333b82c32d8f90d7537a6175520d25c1da6d4596f28ae0d5b5
4
- data.tar.gz: b135ada1b6204ef416763c4bfdbc6a1e837a1c06bdd0ae97bc42b90562c46055
3
+ metadata.gz: 56264e1d88fa6a52d6d65f14264646854cc29b6666f513735bf79128f215f2f7
4
+ data.tar.gz: 598ebf37db6bcf4c5812295d5a256c2b798bccf4a438e106523faa784e56953f
5
5
  SHA512:
6
- metadata.gz: 19d6ef84e6d8071daa0e041666fd8c9b80462dc9c1b48cecfc39d26384741156d8bd16be8ed7068d0d400bfaa4c67f396bcec34985d3296ac6b8516c5c770025
7
- data.tar.gz: 847d5813d222b4b01bb8697040b88e7fa18ef9a02b479dfbf4b4dd86a8a22ef84c32f6358c078551f84fb6c3d6993157983660e3731b25f96122b292a1d3a5ad
6
+ metadata.gz: d3fe6cf52e0bd87ef18c2eb6a0a8303011f3f1c452725b89e6777893a1d0f9d74a1e70e83e4163debecc9b61da7d89abc3170cdbb0d914792504c63b44e1e41c
7
+ data.tar.gz: ab6c01af01c1ba607ed8524ec09e401f841ac6c6c0a5d8842f1f7d110710f7eaca29c9a0d784e94ae4927a2495c1e8006a5c3e13d40d7c9cd26da743ba3dc58d
@@ -1,6 +1,10 @@
1
1
  module IntelliAgent::OpenAI
2
- BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
3
- ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
2
+ GPT_BASIC_MODEL = ENV.fetch('OPENAI_GPT_BASIC_MODEL', 'gpt-4o-mini')
3
+ GPT_ADVANCED_MODEL = ENV.fetch('OPENAI_GPT_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
4
+
5
+ O1_BASIC_MODEL = ENV.fetch('OPENAI_O1_BASIC_MODEL', 'o1-mini')
6
+ O1_ADVANCED_MODEL = ENV.fetch('OPENAI_O1_ADVANCED_MODEL', 'o1-preview')
7
+
4
8
  MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
5
9
 
6
10
  module ResponseExtender
@@ -32,11 +36,11 @@ module IntelliAgent::OpenAI
32
36
 
33
37
  def self.embeddings(input, model: 'text-embedding-3-large')
34
38
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
35
- def response.embedding = dig('data', 0, 'embedding')
39
+ def response.embeddings = dig('data', 0, 'embedding')
36
40
  response
37
41
  end
38
42
 
39
- def self.vision(prompt:, image_url:, model: :advanced, response_format: nil, max_tokens: MAX_TOKENS)
43
+ def self.vision(prompt:, image_url:, model: :gpt_advanced, response_format: nil, max_tokens: MAX_TOKENS)
40
44
  model = select_model(model)
41
45
  messages = [{ type: :text, text: prompt },
42
46
  { type: :image_url, image_url: { url: image_url } }]
@@ -51,19 +55,28 @@ module IntelliAgent::OpenAI
51
55
  response
52
56
  end
53
57
 
54
- def self.single_prompt(prompt:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
58
+ def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
55
59
  chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, tools:, function_run_context:)
56
60
  end
57
61
 
58
- def self.single_chat(system:, user:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: nil)
62
+ def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: nil)
59
63
  chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, tools:, function_run_context:)
60
64
  end
61
65
 
62
- def self.chat(messages:, model: :basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
66
+ def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, tools: nil, function_run_context: self)
63
67
  model = select_model(model)
68
+
69
+ # o1 models doesn't support max_tokens, instead max_completion_tokens
70
+ is_o1_model = model.start_with?('o1')
71
+ max_completion_tokens = max_tokens if is_o1_model
72
+
64
73
  messages = parse_messages(messages)
65
74
 
66
- parameters = { model:, messages:, max_tokens: }
75
+ parameters = { model:, messages: }
76
+
77
+ parameters[:max_completion_tokens] = max_completion_tokens if is_o1_model
78
+ parameters[:max_tokens] = max_completion_tokens unless is_o1_model
79
+
67
80
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
68
81
  parameters[:tools] = tools if tools
69
82
 
@@ -95,10 +108,14 @@ module IntelliAgent::OpenAI
95
108
 
96
109
  def self.select_model(model)
97
110
  case model
98
- when :basic
99
- BASIC_MODEL
100
- when :advanced
101
- ADVANCED_MODEL
111
+ when :gpt_basic
112
+ GPT_BASIC_MODEL
113
+ when :gpt_advanced
114
+ GPT_ADVANCED_MODEL
115
+ when :o1_basic
116
+ O1_BASIC_MODEL
117
+ when :o1_advanced
118
+ O1_ADVANCED_MODEL
102
119
  else
103
120
  model
104
121
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.5
4
+ version: 0.2.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-09-23 00:00:00.000000000 Z
11
+ date: 2027-09-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -83,7 +83,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
83
83
  - !ruby/object:Gem::Version
84
84
  version: '0'
85
85
  requirements: []
86
- rubygems_version: 3.5.19
86
+ rubygems_version: 3.5.20
87
87
  signing_key:
88
88
  specification_version: 4
89
89
  summary: A helper layer over Anthropic and OpenAI API