intelli_agent 0.0.6 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 475b4ea33841f7026d1186e346d0939a40bf80277bada1f00c058f4e966bd513
4
- data.tar.gz: 0fd82b217902cc662e163d08565afe0ff1eee10d6f690be1fb206fe47b216cdd
3
+ metadata.gz: cc52520455ecddb77608867d1b38fcc3e0d5bfa2ad40e6810af9d544b2e0c1fa
4
+ data.tar.gz: 7b9df2447bcfb0f842bdf3fdb10aa2d71d0fe6e38425bf0c901f482c7567fd5b
5
5
  SHA512:
6
- metadata.gz: 6af16bf022d0bb540d9e7748bb3186277d18347facaa0983c29af055382d6d7a88492d8b1dae184b6c3f7544a54f57b773f611a05507a510490648c5a7922791
7
- data.tar.gz: b0cb06584d055c8ea1350da73d71986b2897fb2ac6c9016d6c4b81cb1e40b49d361e80561c80dc9b6a6963edb60bedcb76a160b9dd7908fc2f05b05fb99197a0
6
+ metadata.gz: 87c3853788806cbdda92fe70ef0fdfc4e168fb36433c819ff25a3e94faa46ec28eb6d4700648f6e14d652c9c33048e2225ea2f94ee5ff0f187420487ef6603ea
7
+ data.tar.gz: 2f77388f90d653761fec0340f9902e2e9e898548e8bb178c8d5cd1d85a5c11701d7aa84504c34e088baca432823363df13840be454d2e157a3ed76c5d2f61301
@@ -0,0 +1,67 @@
1
+ require 'openai'
2
+ require 'intelli_agent/anthropic'
3
+ require 'intelli_agent/openai'
4
+
5
+ class IntelliAgent
6
+ extend OpenAI
7
+ extend Anthropic
8
+
9
+ attr_reader :assistant, :thread, :instructions, :vector_store_id
10
+
11
+ def initialize(assistant_id: nil, thread_id: nil, thread_instructions: nil, vector_store_id: nil)
12
+ @openai_client = OpenAI::Client.new
13
+
14
+ assistant_id ||= ENV.fetch('OPENAI_ASSISTANT_ID')
15
+ @assistant = @openai_client.assistants.retrieve(id: assistant_id)
16
+
17
+ thread_params = {}
18
+
19
+ # Only one vector store can be attached, according to the OpenAI API documentation
20
+ @vector_store_id = vector_store_id
21
+ thread_params = { tool_resources: { file_search: { vector_store_ids: [vector_store_id] } } } if @vector_store_id
22
+
23
+ thread_id ||= @openai_client.threads.create(parameters: thread_params)['id']
24
+ @thread = @openai_client.threads.retrieve(id: thread_id)
25
+
26
+ @instructions = thread_instructions || @assistant['instructions']
27
+ end
28
+
29
+ def add_message(text, role: 'user') = @openai_client.messages.create(thread_id: @thread['id'], parameters: { role: role, content: text })
30
+ def messages = @openai_client.messages.list(thread_id: @thread['id'])
31
+ def last_message = messages['data'].first['content'].first['text']['value']
32
+ def runs = @openai_client.runs.list(thread_id: @thread['id'])
33
+
34
+ def run(instructions: nil, additional_instructions: nil, additional_message: nil, model: nil, tool_choice: nil)
35
+ params = { assistant_id: @assistant['id'] }
36
+
37
+ params[:instructions] = instructions || @instructions
38
+ params[:additional_instructions] = additional_instructions unless additional_instructions.nil?
39
+ params[:tool_choice] = tool_choice unless tool_choice.nil?
40
+
41
+ params[:additional_messages] = [{ role: :user, content: additional_message }] unless additional_message.nil?
42
+
43
+ params[:model] = model || @assistant['model']
44
+
45
+ run_id = @openai_client.runs.create(thread_id: @thread['id'], parameters: params)['id']
46
+
47
+ loop do
48
+ response = @openai_client.runs.retrieve(id: run_id, thread_id: @thread['id'])
49
+
50
+ case response['status']
51
+ when 'queued', 'in_progress', 'cancelling'
52
+ puts 'Status: Waiting AI Processing finish'
53
+ sleep 1
54
+ when 'completed'
55
+ puts last_message
56
+ break
57
+ when 'requires_action'
58
+ # Handle tool calls (see below)
59
+ when 'cancelled', 'failed', 'expired'
60
+ puts response['last_error'].inspect
61
+ break # or `exit`
62
+ else
63
+ puts "Unknown status response: #{status}"
64
+ end
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,43 @@
1
+ module IntelliAgent::Anthropic
2
+ BASIC_MODEL = 'claude-3-haiku-20240307' # ENV.fetch('Anthropic_BASIC_MODEL')
3
+ ADVANCED_MODEL = 'claude-3-5-sonnet-20240620' # ENV.fetch('Anthropic_ADVANCED_MODEL')
4
+
5
+ def self.single_prompt(prompt:, model: :basic, max_tokens: 1000)
6
+ model = select_model(model)
7
+
8
+ parameters = { model:, max_tokens:, messages: [{ role: 'user', content: prompt }] }
9
+
10
+ response = Anthropic::Client.new.messages(parameters:)
11
+ response.dig('content', 0, 'text').strip
12
+ end
13
+
14
+ def self.single_chat(system:, user:, model: :basic, max_tokens: 1000)
15
+ model = select_model(model)
16
+
17
+ parameters = { model:, system:, max_tokens:,
18
+ messages: [ { role: 'user', content: user } ] }
19
+
20
+ response = Anthropic::Client.new.messages(parameters:)
21
+ response.dig('content', 0, 'text').strip
22
+ end
23
+
24
+ def self.chat(system:, messages:, model: :basic, max_tokens: 1000)
25
+ model = select_model(model)
26
+
27
+ parameters = { model:, max_tokens:, system:, messages: }
28
+
29
+ response = Anthropic::Client.new.messages(parameters:)
30
+ response.dig('content', 0, 'text').strip
31
+ end
32
+
33
+ def self.select_model(model)
34
+ case model
35
+ when :basic
36
+ BASIC_MODEL
37
+ when :advanced
38
+ ADVANCED_MODEL
39
+ else
40
+ model
41
+ end
42
+ end
43
+ end
@@ -1,14 +1,16 @@
1
1
  # In the future, this became a bus to more than one AI provider
2
- module AI
2
+ module IntelliAgent::OpenAI
3
3
  BASIC_MODEL = 'gpt-4o-mini' # ENV.fetch('OPENAI_BASIC_MODEL')
4
4
  ADVANCED_MODEL = 'gpt-4o' # ENV.fetch('OPENAI_ADVANCED_MODEL')
5
5
 
6
- def embed(input, model: 'text-embedding-3-large')
6
+ def self.embed(input, model: 'text-embedding-3-large')
7
7
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
8
  response.dig('data', 0, 'embedding')
9
9
  end
10
10
 
11
- def single_prompt(prompt:, model: AI::BASIC_MODEL, response_format: nil)
11
+ def self.single_prompt(prompt:, model: :basic, response_format: nil)
12
+ model = select_model(model)
13
+
12
14
  parameters = { model:, messages: [{ role: 'user', content: prompt }] }
13
15
 
14
16
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
@@ -17,7 +19,8 @@ module AI
17
19
  response.dig('choices', 0, 'message', 'content').strip
18
20
  end
19
21
 
20
- def vision(prompt:, image_url:, model: AI::ADVANCED_MODEL, response_format: nil)
22
+ def self.vision(prompt:, image_url:, model: :advanced, response_format: nil)
23
+ model = select_model(model)
21
24
  messages = [{ type: :text, text: prompt },
22
25
  { type: :image_url, image_url: { url: image_url } }]
23
26
 
@@ -29,7 +32,8 @@ module AI
29
32
  response.dig('choices', 0, 'message', 'content').strip
30
33
  end
31
34
 
32
- def single_chat(system:, user:, model: AI::BASIC_MODEL, response_format: nil)
35
+ def self.single_chat(system:, user:, model: :basic, response_format: nil)
36
+ model = select_model(model)
33
37
  parameters = { model:,
34
38
  messages: [
35
39
  { role: 'system', content: system },
@@ -42,7 +46,8 @@ module AI
42
46
  response.dig('choices', 0, 'message', 'content').strip
43
47
  end
44
48
 
45
- def chat(messages:, model: AI::BASIC_MODEL, response_format: nil)
49
+ def self.chat(messages:, model: :basic, response_format: nil)
50
+ model = select_model(model)
46
51
  parameters = { model:, messages: }
47
52
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
48
53
 
@@ -50,7 +55,16 @@ module AI
50
55
  response.dig('choices', 0, 'message', 'content').strip
51
56
  end
52
57
 
53
- def models
54
- OpenAI::Client.new.models.list
58
+ def self.models = OpenAI::Client.new.models.list
59
+
60
+ def self.select_model(model)
61
+ case model
62
+ when :basic
63
+ BASIC_MODEL
64
+ when :advanced
65
+ ADVANCED_MODEL
66
+ else
67
+ model
68
+ end
55
69
  end
56
- end
70
+ end
data/lib/intelli_agent.rb CHANGED
@@ -1,64 +1,8 @@
1
1
  require 'openai'
2
- require 'intelli_agent/ai'
2
+ require 'anthropic'
3
3
 
4
- class IntelliAgent
5
- extend AI
6
- attr_reader :assistant, :thread, :instructions, :vector_store_id
7
-
8
- def initialize(assistant_id: nil, thread_id: nil, thread_instructions: nil, vector_store_id: nil)
9
- @openai_client = OpenAI::Client.new
10
-
11
- assistant_id ||= ENV.fetch('OPENAI_ASSISTANT_ID')
12
- @assistant = @openai_client.assistants.retrieve(id: assistant_id)
13
-
14
- thread_params = {}
15
-
16
- # Only one vector store can be attached, according to the OpenAI API documentation
17
- @vector_store_id = vector_store_id
18
- thread_params = { tool_resources: { file_search: { vector_store_ids: [vector_store_id] } } } if @vector_store_id
19
-
20
- thread_id ||= @openai_client.threads.create(parameters: thread_params)['id']
21
- @thread = @openai_client.threads.retrieve(id: thread_id)
22
-
23
- @instructions = thread_instructions || @assistant['instructions']
24
- end
25
-
26
- def add_message(text, role: 'user') = @openai_client.messages.create(thread_id: @thread['id'], parameters: { role: role, content: text })
27
- def messages = @openai_client.messages.list(thread_id: @thread['id'])
28
- def last_message = messages['data'].first['content'].first['text']['value']
29
- def runs = @openai_client.runs.list(thread_id: @thread['id'])
30
-
31
- def run(instructions: nil, additional_instructions: nil, additional_message: nil, model: nil, tool_choice: nil)
32
- params = { assistant_id: @assistant['id'] }
33
-
34
- params[:instructions] = instructions || @instructions
35
- params[:additional_instructions] = additional_instructions unless additional_instructions.nil?
36
- params[:tool_choice] = tool_choice unless tool_choice.nil?
37
-
38
- params[:additional_messages] = [{ role: :user, content: additional_message }] unless additional_message.nil?
39
-
40
- params[:model] = model || @assistant['model']
41
-
42
- run_id = @openai_client.runs.create(thread_id: @thread['id'], parameters: params)['id']
43
-
44
- loop do
45
- response = @openai_client.runs.retrieve(id: run_id, thread_id: @thread['id'])
46
-
47
- case response['status']
48
- when 'queued', 'in_progress', 'cancelling'
49
- puts 'Status: Waiting AI Processing finish'
50
- sleep 1
51
- when 'completed'
52
- puts last_message
53
- break
54
- when 'requires_action'
55
- # Handle tool calls (see below)
56
- when 'cancelled', 'failed', 'expired'
57
- puts response['last_error'].inspect
58
- break # or `exit`
59
- else
60
- puts "Unknown status response: #{status}"
61
- end
62
- end
63
- end
4
+ module IntelliAgent
64
5
  end
6
+
7
+ require 'intelli_agent/anthropic'
8
+ require 'intelli_agent/openai'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.6
4
+ version: 0.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-07-23 00:00:00.000000000 Z
11
+ date: 2027-08-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -24,7 +24,21 @@ dependencies:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '7.1'
27
- description: A helper layer over OpenAI API
27
+ - !ruby/object:Gem::Dependency
28
+ name: anthropic
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '0.3'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '0.3'
41
+ description: A helper layer over Anthropic and OpenAI API
28
42
  email: gedean.dias@gmail.com
29
43
  executables: []
30
44
  extensions: []
@@ -32,8 +46,10 @@ extra_rdoc_files: []
32
46
  files:
33
47
  - README.md
34
48
  - lib/intelli_agent.rb
49
+ - lib/intelli_agent/agent.rb.disabled
35
50
  - lib/intelli_agent/agent_test.rb_
36
- - lib/intelli_agent/ai.rb
51
+ - lib/intelli_agent/anthropic.rb
52
+ - lib/intelli_agent/openai.rb
37
53
  homepage: https://github.com/gedean/intelli_agent
38
54
  licenses:
39
55
  - MIT
@@ -53,8 +69,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
53
69
  - !ruby/object:Gem::Version
54
70
  version: '0'
55
71
  requirements: []
56
- rubygems_version: 3.5.16
72
+ rubygems_version: 3.5.17
57
73
  signing_key:
58
74
  specification_version: 4
59
- summary: A helper layer over OpenAI API
75
+ summary: A helper layer over Anthropic and OpenAI API
60
76
  test_files: []