intelli_agent 0.0.6 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 475b4ea33841f7026d1186e346d0939a40bf80277bada1f00c058f4e966bd513
4
- data.tar.gz: 0fd82b217902cc662e163d08565afe0ff1eee10d6f690be1fb206fe47b216cdd
3
+ metadata.gz: c76bcdd7573088ad3dcc72e5737a72b4bd352c1568c05d849f01007e409d5a09
4
+ data.tar.gz: 4c84207e8e0a56741825b4a8d8b37ffd8ac9be22025b17b100bca4c2872b0bf6
5
5
  SHA512:
6
- metadata.gz: 6af16bf022d0bb540d9e7748bb3186277d18347facaa0983c29af055382d6d7a88492d8b1dae184b6c3f7544a54f57b773f611a05507a510490648c5a7922791
7
- data.tar.gz: b0cb06584d055c8ea1350da73d71986b2897fb2ac6c9016d6c4b81cb1e40b49d361e80561c80dc9b6a6963edb60bedcb76a160b9dd7908fc2f05b05fb99197a0
6
+ metadata.gz: be84356ca55dcae68905bbd3b6547b764499406429b980c21acf48d2dc175433c47e85a51728f6b73194c0c7907db0e8d004dda6030966f5c84d4aecb040de12
7
+ data.tar.gz: 254c5b1c8db19b4b75278813f3a7faae286ec9bc7d1930b2b973cc5b7a0c867c88bac74f9ba7d0abd87fa6e0c206617a537c242681335177d5c6abb0aaeec021
@@ -0,0 +1,67 @@
1
+ require 'openai'
2
+ require 'intelli_agent/anthropic'
3
+ require 'intelli_agent/openai'
4
+
5
+ class IntelliAgent
6
+ extend OpenAI
7
+ extend Anthropic
8
+
9
+ attr_reader :assistant, :thread, :instructions, :vector_store_id
10
+
11
+ def initialize(assistant_id: nil, thread_id: nil, thread_instructions: nil, vector_store_id: nil)
12
+ @openai_client = OpenAI::Client.new
13
+
14
+ assistant_id ||= ENV.fetch('OPENAI_ASSISTANT_ID')
15
+ @assistant = @openai_client.assistants.retrieve(id: assistant_id)
16
+
17
+ thread_params = {}
18
+
19
+ # Only one vector store can be attached, according to the OpenAI API documentation
20
+ @vector_store_id = vector_store_id
21
+ thread_params = { tool_resources: { file_search: { vector_store_ids: [vector_store_id] } } } if @vector_store_id
22
+
23
+ thread_id ||= @openai_client.threads.create(parameters: thread_params)['id']
24
+ @thread = @openai_client.threads.retrieve(id: thread_id)
25
+
26
+ @instructions = thread_instructions || @assistant['instructions']
27
+ end
28
+
29
+ def add_message(text, role: 'user') = @openai_client.messages.create(thread_id: @thread['id'], parameters: { role: role, content: text })
30
+ def messages = @openai_client.messages.list(thread_id: @thread['id'])
31
+ def last_message = messages['data'].first['content'].first['text']['value']
32
+ def runs = @openai_client.runs.list(thread_id: @thread['id'])
33
+
34
+ def run(instructions: nil, additional_instructions: nil, additional_message: nil, model: nil, tool_choice: nil)
35
+ params = { assistant_id: @assistant['id'] }
36
+
37
+ params[:instructions] = instructions || @instructions
38
+ params[:additional_instructions] = additional_instructions unless additional_instructions.nil?
39
+ params[:tool_choice] = tool_choice unless tool_choice.nil?
40
+
41
+ params[:additional_messages] = [{ role: :user, content: additional_message }] unless additional_message.nil?
42
+
43
+ params[:model] = model || @assistant['model']
44
+
45
+ run_id = @openai_client.runs.create(thread_id: @thread['id'], parameters: params)['id']
46
+
47
+ loop do
48
+ response = @openai_client.runs.retrieve(id: run_id, thread_id: @thread['id'])
49
+
50
+ case response['status']
51
+ when 'queued', 'in_progress', 'cancelling'
52
+ puts 'Status: Waiting AI Processing finish'
53
+ sleep 1
54
+ when 'completed'
55
+ puts last_message
56
+ break
57
+ when 'requires_action'
58
+ # Handle tool calls (see below)
59
+ when 'cancelled', 'failed', 'expired'
60
+ puts response['last_error'].inspect
61
+ break # or `exit`
62
+ else
63
+ puts "Unknown status response: #{status}"
64
+ end
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,43 @@
1
+ module IntelliAgent::Anthropic
2
+ BASIC_MODEL = 'claude-3-haiku-20240307' # ENV.fetch('Anthropic_BASIC_MODEL')
3
+ ADVANCED_MODEL = 'claude-3-5-sonnet-20240620' # ENV.fetch('Anthropic_ADVANCED_MODEL')
4
+
5
+ def self.single_prompt(prompt:, model: :basic, max_tokens: 1000)
6
+ model = select_model(model)
7
+
8
+ parameters = { model:, max_tokens:, messages: [{ role: 'user', content: prompt }] }
9
+
10
+ response = Anthropic::Client.new.messages(parameters:)
11
+ response.dig('content', 0, 'text').strip
12
+ end
13
+
14
+ def self.single_chat(system:, user:, model: :basic, max_tokens: 1000)
15
+ model = select_model(model)
16
+
17
+ parameters = { model:, system:, max_tokens:,
18
+ messages: [ { role: 'user', content: user } ] }
19
+
20
+ response = Anthropic::Client.new.messages(parameters:)
21
+ response.dig('content', 0, 'text').strip
22
+ end
23
+
24
+ def self.chat(system:, messages:, model: :basic, max_tokens: 1000)
25
+ model = select_model(model)
26
+
27
+ parameters = { model:, max_tokens:, system:, messages: }
28
+
29
+ response = Anthropic::Client.new.messages(parameters:)
30
+ response.dig('content', 0, 'text').strip
31
+ end
32
+
33
+ def self.select_model(model)
34
+ case model
35
+ when :basic
36
+ BASIC_MODEL
37
+ when :advanced
38
+ ADVANCED_MODEL
39
+ else
40
+ model
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,87 @@
1
+ # In the future, this became a bus to more than one AI provider
2
+ module IntelliAgent::OpenAI
3
+ BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
4
+ ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
5
+
6
+ def self.embed(input, model: 'text-embedding-3-large')
7
+ response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
+ response.dig('data', 0, 'embedding')
9
+ end
10
+
11
+ def self.single_prompt(prompt:, model: :basic, response_format: nil)
12
+ model = select_model(model)
13
+
14
+ parameters = { model:, messages: [{ role: 'user', content: prompt }] }
15
+
16
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
17
+
18
+ response = OpenAI::Client.new.chat(parameters:)
19
+
20
+ if response_format.nil?
21
+ response.dig('choices', 0, 'message', 'content').strip
22
+ else
23
+ response
24
+ end
25
+ end
26
+
27
+ def self.vision(prompt:, image_url:, model: :advanced, response_format: nil)
28
+ model = select_model(model)
29
+ messages = [{ type: :text, text: prompt },
30
+ { type: :image_url, image_url: { url: image_url } }]
31
+
32
+ parameters = { model: model, messages: [{ role: :user, content: messages }] }
33
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
34
+
35
+ response = OpenAI::Client.new.chat(parameters:)
36
+
37
+ if response_format.nil?
38
+ response.dig('choices', 0, 'message', 'content').strip
39
+ else
40
+ response
41
+ end
42
+ end
43
+
44
+ def self.single_chat(system:, user:, model: :basic, response_format: nil)
45
+ model = select_model(model)
46
+ parameters = { model:,
47
+ messages: [
48
+ { role: 'system', content: system },
49
+ { role: 'user', content: user }
50
+ ] }
51
+
52
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
53
+
54
+ response = OpenAI::Client.new.chat(parameters:)
55
+ if response_format.nil?
56
+ response.dig('choices', 0, 'message', 'content').strip
57
+ else
58
+ response
59
+ end
60
+ end
61
+
62
+ def self.chat(messages:, model: :basic, response_format: nil)
63
+ model = select_model(model)
64
+ parameters = { model:, messages: }
65
+ parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
66
+
67
+ response = OpenAI::Client.new.chat(parameters:)
68
+ if response_format.nil?
69
+ response.dig('choices', 0, 'message', 'content').strip
70
+ else
71
+ response
72
+ end
73
+ end
74
+
75
+ def self.models = OpenAI::Client.new.models.list
76
+
77
+ def self.select_model(model)
78
+ case model
79
+ when :basic
80
+ BASIC_MODEL
81
+ when :advanced
82
+ ADVANCED_MODEL
83
+ else
84
+ model
85
+ end
86
+ end
87
+ end
data/lib/intelli_agent.rb CHANGED
@@ -1,64 +1,8 @@
1
1
  require 'openai'
2
- require 'intelli_agent/ai'
2
+ require 'anthropic'
3
3
 
4
- class IntelliAgent
5
- extend AI
6
- attr_reader :assistant, :thread, :instructions, :vector_store_id
7
-
8
- def initialize(assistant_id: nil, thread_id: nil, thread_instructions: nil, vector_store_id: nil)
9
- @openai_client = OpenAI::Client.new
10
-
11
- assistant_id ||= ENV.fetch('OPENAI_ASSISTANT_ID')
12
- @assistant = @openai_client.assistants.retrieve(id: assistant_id)
13
-
14
- thread_params = {}
15
-
16
- # Only one vector store can be attached, according to the OpenAI API documentation
17
- @vector_store_id = vector_store_id
18
- thread_params = { tool_resources: { file_search: { vector_store_ids: [vector_store_id] } } } if @vector_store_id
19
-
20
- thread_id ||= @openai_client.threads.create(parameters: thread_params)['id']
21
- @thread = @openai_client.threads.retrieve(id: thread_id)
22
-
23
- @instructions = thread_instructions || @assistant['instructions']
24
- end
25
-
26
- def add_message(text, role: 'user') = @openai_client.messages.create(thread_id: @thread['id'], parameters: { role: role, content: text })
27
- def messages = @openai_client.messages.list(thread_id: @thread['id'])
28
- def last_message = messages['data'].first['content'].first['text']['value']
29
- def runs = @openai_client.runs.list(thread_id: @thread['id'])
30
-
31
- def run(instructions: nil, additional_instructions: nil, additional_message: nil, model: nil, tool_choice: nil)
32
- params = { assistant_id: @assistant['id'] }
33
-
34
- params[:instructions] = instructions || @instructions
35
- params[:additional_instructions] = additional_instructions unless additional_instructions.nil?
36
- params[:tool_choice] = tool_choice unless tool_choice.nil?
37
-
38
- params[:additional_messages] = [{ role: :user, content: additional_message }] unless additional_message.nil?
39
-
40
- params[:model] = model || @assistant['model']
41
-
42
- run_id = @openai_client.runs.create(thread_id: @thread['id'], parameters: params)['id']
43
-
44
- loop do
45
- response = @openai_client.runs.retrieve(id: run_id, thread_id: @thread['id'])
46
-
47
- case response['status']
48
- when 'queued', 'in_progress', 'cancelling'
49
- puts 'Status: Waiting AI Processing finish'
50
- sleep 1
51
- when 'completed'
52
- puts last_message
53
- break
54
- when 'requires_action'
55
- # Handle tool calls (see below)
56
- when 'cancelled', 'failed', 'expired'
57
- puts response['last_error'].inspect
58
- break # or `exit`
59
- else
60
- puts "Unknown status response: #{status}"
61
- end
62
- end
63
- end
4
+ module IntelliAgent
64
5
  end
6
+
7
+ require 'intelli_agent/anthropic'
8
+ require 'intelli_agent/openai'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.6
4
+ version: 0.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2027-07-23 00:00:00.000000000 Z
11
+ date: 2027-08-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -24,7 +24,21 @@ dependencies:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '7.1'
27
- description: A helper layer over OpenAI API
27
+ - !ruby/object:Gem::Dependency
28
+ name: anthropic
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '0.3'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '0.3'
41
+ description: A helper layer over Anthropic and OpenAI API
28
42
  email: gedean.dias@gmail.com
29
43
  executables: []
30
44
  extensions: []
@@ -32,8 +46,10 @@ extra_rdoc_files: []
32
46
  files:
33
47
  - README.md
34
48
  - lib/intelli_agent.rb
49
+ - lib/intelli_agent/agent.rb.disabled
35
50
  - lib/intelli_agent/agent_test.rb_
36
- - lib/intelli_agent/ai.rb
51
+ - lib/intelli_agent/anthropic.rb
52
+ - lib/intelli_agent/openai.rb
37
53
  homepage: https://github.com/gedean/intelli_agent
38
54
  licenses:
39
55
  - MIT
@@ -53,8 +69,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
53
69
  - !ruby/object:Gem::Version
54
70
  version: '0'
55
71
  requirements: []
56
- rubygems_version: 3.5.16
72
+ rubygems_version: 3.5.17
57
73
  signing_key:
58
74
  specification_version: 4
59
- summary: A helper layer over OpenAI API
75
+ summary: A helper layer over Anthropic and OpenAI API
60
76
  test_files: []
@@ -1,56 +0,0 @@
1
- # In the future, this became a bus to more than one AI provider
2
- module AI
3
- BASIC_MODEL = 'gpt-4o-mini' # ENV.fetch('OPENAI_BASIC_MODEL')
4
- ADVANCED_MODEL = 'gpt-4o' # ENV.fetch('OPENAI_ADVANCED_MODEL')
5
-
6
- def embed(input, model: 'text-embedding-3-large')
7
- response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
- response.dig('data', 0, 'embedding')
9
- end
10
-
11
- def single_prompt(prompt:, model: AI::BASIC_MODEL, response_format: nil)
12
- parameters = { model:, messages: [{ role: 'user', content: prompt }] }
13
-
14
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
15
-
16
- response = OpenAI::Client.new.chat(parameters:)
17
- response.dig('choices', 0, 'message', 'content').strip
18
- end
19
-
20
- def vision(prompt:, image_url:, model: AI::ADVANCED_MODEL, response_format: nil)
21
- messages = [{ type: :text, text: prompt },
22
- { type: :image_url, image_url: { url: image_url } }]
23
-
24
- parameters = { model: model, messages: [{ role: :user, content: messages }] }
25
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
26
-
27
- response = OpenAI::Client.new.chat(parameters:)
28
-
29
- response.dig('choices', 0, 'message', 'content').strip
30
- end
31
-
32
- def single_chat(system:, user:, model: AI::BASIC_MODEL, response_format: nil)
33
- parameters = { model:,
34
- messages: [
35
- { role: 'system', content: system },
36
- { role: 'user', content: user }
37
- ] }
38
-
39
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
40
-
41
- response = OpenAI::Client.new.chat(parameters:)
42
- response.dig('choices', 0, 'message', 'content').strip
43
- end
44
-
45
- def chat(messages:, model: AI::BASIC_MODEL, response_format: nil)
46
- parameters = { model:, messages: }
47
- parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
48
-
49
- response = OpenAI::Client.new.chat(parameters:)
50
- response.dig('choices', 0, 'message', 'content').strip
51
- end
52
-
53
- def models
54
- OpenAI::Client.new.models.list
55
- end
56
- end